[
  {
    "path": ".github/workflows/ccpp.yml",
    "content": "name: C/C++ CI\n\non: [push]\n\njobs:\n  build:\n\n    runs-on: ubuntu-latest\n    \n    steps:\n    - uses: actions/checkout@v1\n    - name: build_and_test\n      run: ./build.sh"
  },
  {
    "path": ".gitignore",
    "content": "*.o\n*.log\nmytest\nerror\n"
  },
  {
    "path": "CMakeLists.txt",
    "content": "\r\nset(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)\r\nset(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/lib)\r\nset(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin)\r\n\r\nadd_subdirectory(src)\r\nadd_subdirectory(test)"
  },
  {
    "path": "README.md",
    "content": "inifile2\n========\n\n## 一个轻量级的inifile文件解析库\n\n### 一. 使用介绍\n* 支持解析ini文件\n* 支持修改、保存ini文件\n* 支持设置多个注释符，默认为“#”和';'\n* 支持参数名重复\n* 使用gtest和valgrind做过测试\n\n使用很简单，生成一个ini文件test.ini\n```\n>cat > test.ini\n#this is commit\n;this is commit\n[COMMON]\nDB = mysql\nPASSWD=root\n```\n\n首先指定头文件和命名空间 然后使用open函数打开ini文件 getValue可以获取指定段的指定项的值\n\n```\n#include \"inifile.h\"\nusing namespace inifile;\n\nfilepath = \"test.ini\";\n\nIniFile ini;\nini.load(filepath);\n\n//获取指定段的指定项的值\nint ret = 0;\nstring db_name = ini.getValue(\"COMMON\",\"DB\",ret);\n```\n\n### 二. 本地测试构建方式\n\n* 构建: `./build.sh`\n* 清理: `./build.sh clean`\n* 帮助: `./build.sh -h` 或者`./build.sh --help`\n\n```\n$./build.sh --help\nusage: ./build.sh [clean] [-h|--help]\n```\n\n### 三. 如何引用inifile库\n\n1. 源码引用:\n\n编译时将src目录下的inifile.cpp 和inifile.h 拷贝到您的源码中直接进行编译即可,对于编译选项或者编译器有要求的建议使用这种源码拷贝的方式.\n\n2. 二进制库引用:libinifile.so或者libinifile.a 引用\n\n执行`./build.sh`后在build/lib目录下会生成libinifile.so和libinifile.a,您可以将这两个库添加到您的工程中，对应的头文件为inifile.h.\n"
  },
  {
    "path": "build.sh",
    "content": "#!/bin/bash\n\nset -eu\n\nfunction build_clean()\n{\n    rm -rf build\n}\n\nfunction build_test()\n{\n    mkdir -p build\n    cd build\n    cmake .. \n    make -j8 \n    make test\n}\n\nfunction usage()\n{\n    echo \"usage: ./build.sh [clean] [-h|--help]\"\n}\n\nopt=\"\"\nif [ $# -ge 1 ];then\n    opt=$1\nfi\n\ncase \"${opt}\" in\n    clean)\n        build_clean\n        ;;\n    -h|--help)\n        usage\n        ;;\n    *)\n        build_test\n        ;;\nesac\n"
  },
  {
    "path": "src/CMakeLists.txt",
    "content": "add_library(inifile STATIC inifile.cpp)\r\nadd_library(inifile_share SHARED inifile.cpp)\r\nadd_custom_target(rename_share_so COMMAND mv ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/libinifile_share.so ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/libinifile.so )\r\nadd_dependencies(rename_share_so inifile_share)\r\nadd_dependencies(inifile rename_share_so)"
  },
  {
    "path": "src/inifile.cpp",
    "content": "/* Copyright (c) 2014-2019 WinnerHust\n * All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n * \n * Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n * \n * Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n * \n * The names of its contributors may be used to endorse or promote products\n * derived from this software without specific prior written permission.\n * \n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,\n * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR \n * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS\n * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,\n * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\n * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR\n * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\n#include <stdlib.h>\n#include <stdio.h>\n#include <ctype.h>\n#include <iostream>\n#include <fstream>\n#include \"inifile.h\"\n\nnamespace inifile\n{\n\n// 构造函数，会初始化注释字符集合flags_（容器），目前只使用#和;作为注释前缀\nIniFile::IniFile()\n:commentDelimiter(\"#\")\n{\n}\n\n//解析一行数据，得到键值\n/* --------------------------------------------------------------------------*/\n/**\n * @brief   parse\n *\n * @param   content 数据源指针\n * @param   key     键名\n * @param   value   键值\n * @param   c       分隔符\n *\n * @return  bool\n */\n/* ----------------------------------------------------------------------------*/\nbool IniFile::parse(const string &content, string *key, string *value)\n{\n    return split(content, \"=\", key, value);\n}\n\nint IniFile::UpdateSection(const string &cleanLine, const string &comment,\n                           const string &rightComment, IniSection **section)\n{\n    IniSection *newSection;\n    // 查找右中括号\n    size_t index = cleanLine.find_first_of(']');\n    if (index == string::npos) {\n        errMsg = string(\"no matched ] found\");\n        return ERR_UNMATCHED_BRACKETS;\n    }\n\n    int len = index - 1;\n    // 若段名为空，继续下一行\n    if (len <= 0) {\n        errMsg = string(\"section name is empty\");\n        return ERR_SECTION_EMPTY;\n    }\n\n    // 取段名\n    string s(cleanLine, 1, len);\n\n    trim(s);\n\n    //检查段是否已存在\n    if (getSection(s) != NULL) {\n        errMsg = string(\"section \") + s + string(\"already exist\");\n        return ERR_SECTION_ALREADY_EXISTS;\n    }\n\n    // 申请一个新段，由于map容器会自动排序，打乱原有顺序，因此改用vector存储（sections_vt）\n    newSection = new IniSection();\n    // 填充段名\n    newSection->name = s;\n    // 填充段开头的注释\n    newSection->comment = comment;\n    newSection->rightComment = rightComment;\n\n    sections_vt.push_back(newSection);\n\n    *section = newSection;\n\n    return 0;\n}\n\nint IniFile::AddKeyValuePair(const string &cleanLine, const string &comment,\n                             const string &rightComment, IniSection *section)\n{\n    string key, value;\n\n    if (!parse(cleanLine, &key, &value)) {\n        errMsg = string(\"parse line failed:\") + cleanLine;\n        return ERR_PARSE_KEY_VALUE_FAILED;\n    }\n\n    IniItem item;\n    item.key = key;\n    item.value = value;\n    item.comment = comment;\n    item.rightComment = rightComment;\n\n    section->items.push_back(item);\n\n    return 0;\n}\n\nint IniFile::Load(const string &filePath)\n{\n    int err;\n    string line;  // 带注释的行\n    string cleanLine;  // 去掉注释后的行\n    string comment;\n    string rightComment;\n    IniSection *currSection = NULL;  // 初始化一个字段指针\n\n    release();\n\n    iniFilePath = filePath;\n    std::ifstream ifs(iniFilePath);\n    if (!ifs.is_open()) {\n        errMsg = string(\"open\") +iniFilePath+ string(\" file failed\");\n        return ERR_OPEN_FILE_FAILED;\n    }\n\n    //增加默认段，即 无名段\"\"\n    currSection = new IniSection();\n    currSection->name = \"\";\n    sections_vt.push_back(currSection);\n\n    // 每次读取一行内容到line\n    while (std::getline(ifs, line)) {\n        trim(line);\n\n        // step 0，空行处理，如果长度为0，说明是空行，添加到comment，当作是注释的一部分\n        if (line.length() <= 0) {\n            comment += delim;\n            continue;\n        }\n\n        // step 1\n        // 如果行首不是注释，查找行尾是否存在注释\n        // 如果该行以注释开头，添加到comment，跳过当前循环，continue\n        if (IsCommentLine(line)) {\n            comment += line + delim;\n            continue;\n        }\n\n        // 如果行首不是注释，查找行尾是否存在注释，若存在，切割该行，将注释内容添加到rightComment\n        split(line, commentDelimiter, &cleanLine, &rightComment);\n\n        // step 2，判断line内容是否为段或键\n        //段开头查找 [\n        if (cleanLine[0] == '[') {\n            err = UpdateSection(cleanLine, comment, rightComment, &currSection);\n        } else {\n            // 如果该行是键值，添加到section段的items容器\n             err = AddKeyValuePair(cleanLine, comment, rightComment, currSection);\n        }\n\n        if (err != 0) {\n            ifs.close();\n            return err;\n        }\n\n        // comment清零\n        comment = \"\";\n        rightComment = \"\";\n    }\n\n    ifs.close();\n\n    return 0;\n}\n\nint IniFile::Save()\n{\n    return SaveAs(iniFilePath);\n}\n\nint IniFile::SaveAs(const string &filePath)\n{\n    string data = \"\";\n\n    /* 载入section数据 */\n    for (IniSection_it sect = sections_vt.begin(); sect != sections_vt.end(); ++sect) {\n        if ((*sect)->comment != \"\") {\n            data += (*sect)->comment;\n        }\n\n        if ((*sect)->name != \"\") {\n            data += string(\"[\") + (*sect)->name + string(\"]\");\n            data += delim;\n        }\n\n        if ((*sect)->rightComment != \"\") {\n            data += \" \" + commentDelimiter +(*sect)->rightComment;\n        }\n\n        /* 载入item数据 */\n        for (IniSection::IniItem_it item = (*sect)->items.begin(); item != (*sect)->items.end(); ++item) {\n            if (item->comment != \"\") {\n                data += item->comment;\n                if (data[data.length()-1] != '\\n') {\n                    data += delim;\n                }\n            }\n\n            data += item->key + \"=\" + item->value;\n\n            if (item->rightComment != \"\") {\n                data += \" \" + commentDelimiter + item->rightComment;\n            }\n\n            if (data[data.length()-1] != '\\n') {\n                data += delim;\n            }\n        }\n    }\n\n    std::ofstream ofs(filePath);\n    ofs << data;\n    ofs.close();\n    return 0;\n}\n\nIniSection *IniFile::getSection(const string &section /*=\"\"*/)\n{\n    for (IniSection_it it = sections_vt.begin(); it != sections_vt.end(); ++it) {\n        if ((*it)->name == section) {\n            return *it;\n        }\n    }\n\n    return NULL;\n}\n\nint IniFile::GetSections(vector<string> *sections)\n{\n    for (IniSection_it it = sections_vt.begin(); it != sections_vt.end(); ++it) {\n        sections->push_back((*it)->name);\n    }\n\n    return sections->size();\n}\n\nint IniFile::GetSectionNum()\n{\n    return sections_vt.size();\n}\n\nint IniFile::GetStringValue(const string &section, const string &key, string *value)\n{\n    return getValue(section, key, value);\n}\n\nint IniFile::GetIntValue(const string &section, const string &key, int *intValue)\n{\n    int err;\n    string strValue;\n\n    err = getValue(section, key, &strValue);\n\n    *intValue = atoi(strValue.c_str());\n\n    return err;\n}\n\nint IniFile::GetDoubleValue(const string &section, const string &key, double *value)\n{\n    int err;\n    string strValue;\n\n    err = getValue(section, key, &strValue);\n\n    *value = atof(strValue.c_str());\n\n    return err;\n}\n\nint IniFile::GetBoolValue(const string &section, const string &key, bool *value)\n{\n    int err;\n    string strValue;\n\n    err = getValue(section, key, &strValue);\n\n    if (StringCmpIgnoreCase(strValue, \"true\") || StringCmpIgnoreCase(strValue, \"1\")) {\n        *value = true;\n    } else if (StringCmpIgnoreCase(strValue, \"false\") || StringCmpIgnoreCase(strValue, \"0\")) {\n        *value = false;\n    }\n\n    return err;\n}\n\n/* 获取section段第一个键为key的string值，成功返回获取的值，否则返回默认值 */\nvoid IniFile::GetStringValueOrDefault(const string &section, const string &key,\n                                      string *value, const string &defaultValue)\n{\n    if (GetStringValue(section, key, value) != 0) {\n        *value = defaultValue;\n    }\n\n    return;\n}\n\n/* 获取section段第一个键为key的int值，成功返回获取的值，否则返回默认值 */\nvoid IniFile::GetIntValueOrDefault(const string &section, const string &key, int *value, int defaultValue)\n{\n    if (GetIntValue(section, key, value) != 0) {\n        *value = defaultValue;\n    }\n\n    return;\n}\n\n/* 获取section段第一个键为key的double值，成功返回获取的值，否则返回默认值 */\nvoid IniFile::GetDoubleValueOrDefault(const string &section, const string &key, double *value, double defaultValue)\n{\n    if (GetDoubleValue(section, key, value) != 0) {\n        *value = defaultValue;\n    }\n\n    return;\n}\n\n/* 获取section段第一个键为key的bool值，成功返回获取的值，否则返回默认值 */\nvoid IniFile::GetBoolValueOrDefault(const string &section, const string &key, bool *value, bool defaultValue)\n{\n    if (GetBoolValue(section, key, value) != 0) {\n        *value = defaultValue;\n    }\n\n    return;\n}\n\n/* 获取注释，如果key=\"\"则获取段注释 */\nint IniFile::GetComment(const string &section, const string &key, string *comment)\n{\n    IniSection *sect = getSection(section);\n\n    if (sect == NULL) {\n        errMsg = string(\"not find the section \")+section;\n        return ERR_NOT_FOUND_SECTION;\n    }\n\n    if (key == \"\") {\n        *comment = sect->comment;\n        return RET_OK;\n    }\n\n    for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n        if (it->key == key) {\n            *comment = it->comment;\n            return RET_OK;\n        }\n    }\n\n    errMsg = string(\"not find the key \")+section;\n    return ERR_NOT_FOUND_KEY;\n}\n\n/* 获取行尾注释，如果key=\"\"则获取段的行尾注释 */\nint IniFile::GetRightComment(const string &section, const string &key, string *rightComment)\n{\n    IniSection *sect = getSection(section);\n\n    if (sect == NULL) {\n        errMsg = string(\"not find the section \")+section;\n        return ERR_NOT_FOUND_SECTION;\n    }\n\n    if (key == \"\") {\n        *rightComment = sect->rightComment;\n        return RET_OK;\n    }\n\n    for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n        if (it->key == key) {\n            *rightComment = it->rightComment;\n            return RET_OK;\n        }\n    }\n\n    errMsg = string(\"not find the key \")+key;\n    return ERR_NOT_FOUND_KEY;\n}\n\nint IniFile::getValue(const string &section, const string &key, string *value)\n{\n    string comment;\n    return getValue(section, key, value, &comment);\n}\n\nint IniFile::getValue(const string &section, const string &key, string *value, string *comment)\n{\n    IniSection *sect = getSection(section);\n\n    if (sect == NULL) {\n        errMsg = string(\"not find the section \")+section;\n        return ERR_NOT_FOUND_SECTION;\n    }\n\n    for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n        if (it->key == key) {\n            *value = it->value;\n            *comment = it->comment;\n            return RET_OK;\n        }\n    }\n\n    errMsg = string(\"not find the key \")+key;\n    return ERR_NOT_FOUND_KEY;\n}\n\nint IniFile::GetValues(const string &section, const string &key, vector<string> *values)\n{\n    vector<string> comments;\n    return GetValues(section, key, values, &comments);\n}\n\nint IniFile::GetValues(const string &section, const string &key, vector<string> *values, vector<string> *comments)\n{\n    string value, comment;\n\n    values->clear();\n    comments->clear();\n\n    IniSection *sect = getSection(section);\n\n    if (sect == NULL) {\n        errMsg = string(\"not find the section \")+section;\n        return ERR_NOT_FOUND_SECTION;\n    }\n\n    for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n        if (it->key == key) {\n            value = it->value;\n            comment = it->comment;\n\n            values->push_back(value);\n            comments->push_back(comment);\n        }\n    }\n\n    if (values->size() == 0) {\n        errMsg = string(\"not find the key \")+key;\n        return ERR_NOT_FOUND_KEY;\n    }\n\n    return RET_OK;\n}\n\nbool IniFile::HasSection(const string &section)\n{\n    return (getSection(section) != NULL);\n}\n\nbool IniFile::HasKey(const string &section, const string &key)\n{\n    IniSection *sect = getSection(section);\n\n    if (sect != NULL) {\n        for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n            if (it->key == key) {\n                return true;\n            }\n        }\n    }\n\n    return false;\n}\n\nint IniFile::setValue(const string &section, const string &key, const string &value, const string &comment /*=\"\"*/)\n{\n    IniSection *sect = getSection(section);\n\n    string comt = comment;\n\n    if (comt != \"\") {\n        comt = commentDelimiter + comt;\n    }\n\n    if (sect == NULL) {\n        //如果段不存在，新建一个\n        sect = new IniSection();\n\n        if (sect == NULL) {\n            errMsg = string(\"no enough memory!\");\n            return ERR_NO_ENOUGH_MEMORY;\n        }\n\n        sect->name = section;\n        if (sect->name == \"\") {\n            // 确保空section在第一个\n            sections_vt.insert(sections_vt.begin(), sect);\n        } else {\n            sections_vt.push_back(sect);\n        }\n    }\n\n    for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n        if (it->key == key) {\n            it->value = value;\n            it->comment = comt;\n            return RET_OK;\n        }\n    }\n\n    // not found key\n    IniItem item;\n    item.key = key;\n    item.value = value;\n    item.comment = comt;\n\n    sect->items.push_back(item);\n\n    return RET_OK;\n}\n\nint IniFile::SetStringValue(const string &section, const string &key, const string &value)\n{\n    return setValue(section, key, value);\n}\n\nint IniFile::SetIntValue(const string &section, const string &key, int value)\n{\n    char buf[64] = {0};\n    snprintf(buf, sizeof(buf), \"%d\", value);\n    return setValue(section, key, buf);\n}\n\nint IniFile::SetDoubleValue(const string &section, const string &key, double value)\n{\n    char buf[64] = {0};\n    snprintf(buf, sizeof(buf), \"%f\", value);\n    return setValue(section, key, buf);\n}\n\nint IniFile::SetBoolValue(const string &section, const string &key, bool value)\n{\n    if (value) {\n        return setValue(section, key, \"true\");\n    } else {\n        return setValue(section, key, \"false\");\n    }\n}\n\nint IniFile::SetComment(const string &section, const string &key, const string &comment)\n{\n    IniSection *sect = getSection(section);\n\n    if (sect == NULL) {\n        errMsg = string(\"Not find the section \")+section;\n        return ERR_NOT_FOUND_SECTION;\n    }\n\n    if (key == \"\") {\n        sect->comment = comment;\n        return RET_OK;\n    }\n\n    for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n        if (it->key == key) {\n            it->comment = comment;\n            return RET_OK;\n        }\n    }\n\n    errMsg = string(\"not find the key \")+key;\n    return ERR_NOT_FOUND_KEY;\n}\n\nint IniFile::SetRightComment(const string &section, const string &key, const string &rightComment)\n{\n    IniSection *sect = getSection(section);\n\n    if (sect == NULL) {\n        errMsg = string(\"Not find the section \")+section;\n        return ERR_NOT_FOUND_SECTION;\n    }\n\n    if (key == \"\") {\n        sect->rightComment = rightComment;\n        return RET_OK;\n    }\n\n    for (IniSection::IniItem_it it = sect->begin(); it != sect->end(); ++it) {\n        if (it->key == key) {\n            it->rightComment = rightComment;\n            return RET_OK;\n        }\n    }\n\n    errMsg = string(\"not find the key \")+key;\n    return ERR_NOT_FOUND_KEY;\n}\n\nvoid IniFile::SetCommentDelimiter(const string &delimiter)\n{\n    commentDelimiter = delimiter;\n}\n\nvoid IniFile::DeleteSection(const string &section)\n{\n    for (IniSection_it it = sections_vt.begin(); it != sections_vt.end(); ) {\n        if ((*it)->name == section) {\n            delete *it;\n            it = sections_vt.erase(it);\n            break;\n        } else {\n            it++;\n        }\n    }\n}\n\nvoid IniFile::DeleteKey(const string &section, const string &key)\n{\n    IniSection *sect = getSection(section);\n\n    if (sect != NULL) {\n        for (IniSection::IniItem_it it = sect->begin(); it != sect->end();) {\n            if (it->key == key) {\n                it = sect->items.erase(it);\n                break;\n            } else {\n                it++;\n            }\n        }\n    }\n}\n\n/*-------------------------------------------------------------------------*/\n/**\n  @brief    release: 释放全部资源，清空容器\n  @param    none\n  @return   none\n */\n/*--------------------------------------------------------------------------*/\nvoid IniFile::release()\n{\n    iniFilePath = \"\";\n\n    for (IniSection_it it = sections_vt.begin(); it != sections_vt.end(); ++it) {\n        delete (*it);  // 清除section\n    }\n\n    sections_vt.clear();\n}\n\n/*-------------------------------------------------------------------------*/\n/**\n  @brief    判断是否是注释\n  @param    str 一个string变量\n  @return   如果是注释则为真\n */\n/*--------------------------------------------------------------------------*/\nbool IniFile::IsCommentLine(const string &str)\n{\n    return StartWith(str, commentDelimiter);\n}\n\n/*-------------------------------------------------------------------------*/\n/**\n  @brief    print for debug\n  @param    none\n  @return   none\n */\n/*--------------------------------------------------------------------------*/\nvoid IniFile::print()\n{\n    printf(\"############ print start ############\\n\");\n    printf(\"filePath:[%s]\\n\", iniFilePath.c_str());\n    printf(\"commentDelimiter:[%s]\\n\", commentDelimiter.c_str());\n\n    for (IniSection_it it = sections_vt.begin(); it != sections_vt.end(); ++it) {\n        printf(\"comment :[\\n%s]\\n\", (*it)->comment.c_str());\n        printf(\"section :\\n[%s]\\n\", (*it)->name.c_str());\n        if ((*it)->rightComment != \"\") {\n            printf(\"rightComment:\\n%s\", (*it)->rightComment.c_str());\n        }\n\n        for (IniSection::IniItem_it i = (*it)->items.begin(); i != (*it)->items.end(); ++i) {\n            printf(\"    comment :[\\n%s]\\n\", i->comment.c_str());\n            printf(\"    parm    :%s=%s\\n\", i->key.c_str(), i->value.c_str());\n            if (i->rightComment != \"\") {\n                printf(\"    rcomment:[\\n%s]\\n\", i->rightComment.c_str());\n            }\n        }\n    }\n\n    printf(\"############ print end ############\\n\");\n    return;\n}\n\nconst string & IniFile::GetErrMsg()\n{\n    return errMsg;\n}\n\nbool IniFile::StartWith(const string &str, const string &prefix)\n{\n    if (strncmp(str.c_str(), prefix.c_str(), prefix.size()) == 0) {\n        return true;\n    }\n\n    return false;\n}\n\nvoid IniFile::trimleft(string &str, char c /*=' '*/)\n{\n    int len = str.length();\n\n    int i = 0;\n\n    while (str[i] == c && str[i] != '\\0') {\n        i++;\n    }\n\n    if (i != 0) {\n        str = string(str, i, len - i);\n    }\n}\n\nvoid IniFile::trimright(string &str, char c /*=' '*/)\n{\n    int i = 0;\n    int len = str.length();\n\n    for (i = len - 1; i >= 0; --i) {\n        if (str[i] != c) {\n            break;\n        }\n    }\n\n    str = string(str, 0, i + 1);\n}\n\n/*-------------------------------------------------------------------------*/\n/**\n  @brief    trim，整理一行字符串，去掉首尾空格\n  @param    str string变量\n */\n/*--------------------------------------------------------------------------*/\nvoid IniFile::trim(string &str)\n{\n    int len = str.length();\n\n    int i = 0;\n\n    while ((i < len) && isspace(str[i]) && (str[i] != '\\0')) {\n        i++;\n    }\n\n    if (i != 0) {\n        str = string(str, i, len - i);\n    }\n\n    len = str.length();\n\n    for (i = len - 1; i >= 0; --i) {\n        if (!isspace(str[i])) {\n            break;\n        }\n    }\n\n    str = string(str, 0, i + 1);\n}\n\n/*-------------------------------------------------------------------------*/\n/**\n  @brief    split，用分隔符切割字符串\n  @param    str 输入字符串\n  @param    left_str 分隔后得到的左字符串\n  @param    right_str 分隔后得到的右字符串（不包含seperator）\n  @param    seperator 分隔符\n  @return   pos\n */\n/*--------------------------------------------------------------------------*/\nbool IniFile::split(const string &str, const string &sep, string *pleft, string *pright)\n{\n    size_t pos = str.find(sep);\n    string left, right;\n\n    if (pos != string::npos) {\n        left = string(str, 0, pos);\n        right = string(str, pos+1);\n\n        trim(left);\n        trim(right);\n\n        *pleft = left;\n        *pright = right;\n        return true;\n    } else {\n        left = str;\n        right = \"\";\n\n        trim(left);\n\n        *pleft = left;\n        *pright = right;\n        return false;\n    }\n}\n\nbool IniFile::StringCmpIgnoreCase(const string &str1, const string &str2)\n{\n    string a = str1;\n    string b = str2;\n    transform(a.begin(), a.end(), a.begin(), towupper);\n    transform(b.begin(), b.end(), b.begin(), towupper);\n\n    return (a == b);\n}\n\n}  /* namespace inifile */\n"
  },
  {
    "path": "src/inifile.h",
    "content": "/* Copyright (c) 2014-2019 WinnerHust\n * All rights reserved.\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n * \n * Redistributions of source code must retain the above copyright notice,\n * this list of conditions and the following disclaimer.\n * \n * Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n * \n * The names of its contributors may be used to endorse or promote products\n * derived from this software without specific prior written permission.\n * \n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,\n * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR \n * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS\n * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,\n * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF\n * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS\n * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,\n * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR\n * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\n#ifndef INIFILE_H_\n#define INIFILE_H_\n\n#include <vector>\n#include <algorithm>\n#include <string>\n#include <string.h>\n\nusing std::string;\nusing std::vector;\n\n#define RET_OK 0\n// 没有找到匹配的]\n#define ERR_UNMATCHED_BRACKETS 2\n// 段为空\n#define ERR_SECTION_EMPTY 3\n// 段名已经存在\n#define ERR_SECTION_ALREADY_EXISTS 4\n// 解析key value失败\n#define ERR_PARSE_KEY_VALUE_FAILED 5\n// 打开文件失败\n#define ERR_OPEN_FILE_FAILED 6\n// 内存不足\n#define ERR_NO_ENOUGH_MEMORY 7\n// 没有找到对应的key\n#define ERR_NOT_FOUND_KEY 8\n// 没有找到对应的section\n#define ERR_NOT_FOUND_SECTION 9\n\nnamespace inifile\n{\nconst char delim[] = \"\\n\";\nstruct IniItem {\n    string key;\n    string value;\n    string comment;  // 每个键的注释，都是指该行上方的内容\n    string rightComment;\n};\n\nstruct IniSection {\n    typedef vector<IniItem>::iterator IniItem_it;  // 定义一个迭代器，即指向键元素的指针\n    IniItem_it begin() {\n        return items.begin();  // 段结构体的begin元素指向items的头指针\n    }\n\n    IniItem_it end() {\n        return items.end();  // 段结构体的begin元素指向items的尾指针\n    }\n\n    string name;\n    string comment;  // 每个段的注释，都是指该行上方的内容\n    string rightComment;\n    vector<IniItem> items;  // 键值项数组，一个段可以有多个键值，所有用vector来储存\n};\n\nclass IniFile\n{\n public:\n    IniFile();\n    ~IniFile() {\n        release();\n    }\n\n public:\n    /* 打开并解析一个名为fname的INI文件 */\n    int Load(const string &fname);\n    /* 将内容保存到当前文件 */\n    int Save();\n    /* 将内容另存到一个名为fname的文件 */\n    int SaveAs(const string &fname);\n\n    /* 获取section段第一个键为key的string值，成功返回0，否则返回错误码 */\n    int GetStringValue(const string &section, const string &key, string *value);\n    /* 获取section段第一个键为key的int值，成功返回0，否则返回错误码 */\n    int GetIntValue(const string &section, const string &key, int *value);\n    /* 获取section段第一个键为key的double值，成功返回0，否则返回错误码 */\n    int GetDoubleValue(const string &section, const string &key, double *value);\n    /* 获取section段第一个键为key的bool值，成功返回0，否则返回错误码 */\n    int GetBoolValue(const string &section, const string &key, bool *value);\n    /* 获取注释，如果key=\"\"则获取段注释 */\n    int GetComment(const string &section, const string &key, string *comment);\n    /* 获取行尾注释，如果key=\"\"则获取段的行尾注释 */\n    int GetRightComment(const string &section, const string &key, string *rightComment);\n\n    /* 获取section段第一个键为key的string值，成功返回获取的值，否则返回默认值 */\n    void GetStringValueOrDefault(const string &section, const string &key, string *value, const string &defaultValue);\n    /* 获取section段第一个键为key的int值，成功返回获取的值，否则返回默认值 */\n    void GetIntValueOrDefault(const string &section, const string &key, int *value, int defaultValue);\n    /* 获取section段第一个键为key的double值，成功返回获取的值，否则返回默认值 */\n    void GetDoubleValueOrDefault(const string &section, const string &key, double *value, double defaultValue);\n    /* 获取section段第一个键为key的bool值，成功返回获取的值，否则返回默认值 */\n    void GetBoolValueOrDefault(const string &section, const string &key, bool *value, bool defaultValue);\n\n    /* 获取section段所有键为key的值,并将值赋到values的vector中 */\n    int GetValues(const string &section, const string &key, vector<string> *values);\n\n    /* 获取section列表,并返回section个数 */\n    int GetSections(vector<string> *sections);\n    /* 获取section个数，至少存在一个空section */\n    int GetSectionNum();\n    /* 是否存在某个section */\n    bool HasSection(const string &section);\n    /* 获取指定section的所有ken=value信息 */\n    IniSection *getSection(const string &section = \"\");\n    \n    /* 是否存在某个key */\n    bool HasKey(const string &section, const string &key);\n\n    /* 设置字符串值 */\n    int SetStringValue(const string &section, const string &key, const string &value);\n    /* 设置整形值 */\n    int SetIntValue(const string &section, const string &key, int value);\n    /* 设置浮点数值 */\n    int SetDoubleValue(const string &section, const string &key, double value);\n    /* 设置布尔值 */\n    int SetBoolValue(const string &section, const string &key, bool value);\n    /* 设置注释，如果key=\"\"则设置段注释 */\n    int SetComment(const string &section, const string &key, const string &comment);\n    /* 设置行尾注释，如果key=\"\"则设置段的行尾注释 */\n    int SetRightComment(const string &section, const string &key, const string &rightComment);\n\n    /* 删除段 */\n    void DeleteSection(const string &section);\n    /* 删除特定段的特定参数 */\n    void DeleteKey(const string &section, const string &key);\n    /*设置注释分隔符，默认为\"#\" */\n    void SetCommentDelimiter(const string &delimiter);\n\n    const string &GetErrMsg();\n private:\n    /* 获取section段所有键为key的值,并将值赋到values的vector中,,将注释赋到comments的vector中 */\n    int GetValues(const string &section, const string &key, vector<string> *value, vector<string> *comments);\n\n    /* 同时设置值和注释 */\n    int setValue(const string &section, const string &key, const string &value, const string &comment = \"\");\n\n    /* 去掉str前面的c字符 */\n    static void trimleft(string &str, char c = ' ');\n    /* 去掉str后面的c字符 */\n    static void trimright(string &str, char c = ' ');\n    /* 去掉str前面和后面的空格符,Tab符等空白符 */\n    static void trim(string &str);\n    /* 将字符串str按分割符delim分割成多个子串 */\n private:\n    int UpdateSection(const string &cleanLine, const string &comment,\n                      const string &rightComment, IniSection **section);\n    int AddKeyValuePair(const string &cleanLine, const string &comment,\n                        const string &rightComment, IniSection *section);\n\n    void release();\n    bool split(const string &str, const string &sep, string *left, string *right);\n    bool IsCommentLine(const string &str);\n    bool parse(const string &content, string *key, string *value);\n    // for debug\n    void print();\n\n private:\n    /* 获取section段第一个键为key的值,并将值赋到value中 */\n    int getValue(const string &section, const string &key, string *value);\n    /* 获取section段第一个键为key的值,并将值赋到value中,将注释赋到comment中 */\n    int getValue(const string &section, const string &key, string *value, string *comment);\n\n    bool StringCmpIgnoreCase(const string &str1, const string &str2);\n    bool StartWith(const string &str, const string &prefix);\n\n private:\n    typedef vector<IniSection *>::iterator IniSection_it;\n    vector<IniSection *> sections_vt;  // 用于存储段集合的vector容器\n    string iniFilePath;\n    string commentDelimiter;\n    string errMsg;  // 保存出错时的错误信息\n};\n\n}  // endof namespace inifile\n\n#endif  // INIFILE_H_\n\n"
  },
  {
    "path": "test/CMakeLists.txt",
    "content": "enable_testing()\r\n\r\nadd_subdirectory(googletest)\r\n\r\ninclude_directories(${CMAKE_CURRENT_SOURCE_DIR}/googletest/include)\r\ninclude_directories(${CMAKE_CURRENT_SOURCE_DIR}/../src)\r\n\r\nadd_executable(inifile_test inifile_test.cpp)\r\nadd_compile_options(\"-g -Wall\")\r\ntarget_link_libraries(inifile_test inifile gtest gtest_main)\r\nadd_dependencies(inifile_test inifile gtest gtest_main)\r\n\r\n\r\n#规避make test不生效的问题\r\nadd_test(NAME testcase COMMAND inifile_test)\r\n#设置-V，可以在执行测试用例的同时将用例信息打印出来\r\nadd_custom_target(test COMMAND ${CMAKE_CTEST_COMMAND} -V DEPENDS inifile_test)\r\n\r\n"
  },
  {
    "path": "test/googletest/CMakeLists.txt",
    "content": "########################################################################\n# Note: CMake support is community-based. The maintainers do not use CMake\n# internally.\n#\n# CMake build script for Google Test.\n#\n# To run the tests for Google Test itself on Linux, use 'make test' or\n# ctest.  You can select which tests to run using 'ctest -R regex'.\n# For more options, run 'ctest --help'.\n\n# When other libraries are using a shared version of runtime libraries,\n# Google Test also has to use one.\noption(\n  gtest_force_shared_crt\n  \"Use shared (DLL) run-time lib even when Google Test is built as static lib.\"\n  OFF)\n\noption(gtest_build_tests \"Build all of gtest's own tests.\" OFF)\n\noption(gtest_build_samples \"Build gtest's sample programs.\" OFF)\n\noption(gtest_disable_pthreads \"Disable uses of pthreads in gtest.\" OFF)\n\noption(\n  gtest_hide_internal_symbols\n  \"Build gtest with internal symbols hidden in shared libraries.\"\n  OFF)\n\n# Defines pre_project_set_up_hermetic_build() and set_up_hermetic_build().\ninclude(cmake/hermetic_build.cmake OPTIONAL)\n\nif (COMMAND pre_project_set_up_hermetic_build)\n  pre_project_set_up_hermetic_build()\nendif()\n\n########################################################################\n#\n# Project-wide settings\n\n# Name of the project.\n#\n# CMake files in this project can refer to the root source directory\n# as ${gtest_SOURCE_DIR} and to the root binary directory as\n# ${gtest_BINARY_DIR}.\n# Language \"C\" is required for find_package(Threads).\n\n# Project version:\n\nif (CMAKE_VERSION VERSION_LESS 3.0)\n  project(gtest CXX C)\n  set(PROJECT_VERSION ${GOOGLETEST_VERSION})\nelse()\n  cmake_policy(SET CMP0048 NEW)\n  project(gtest VERSION ${GOOGLETEST_VERSION} LANGUAGES CXX C)\nendif()\ncmake_minimum_required(VERSION 2.6.4)\n\nif (POLICY CMP0063) # Visibility\n  cmake_policy(SET CMP0063 NEW)\nendif (POLICY CMP0063)\n\nif (COMMAND set_up_hermetic_build)\n  set_up_hermetic_build()\nendif()\n\n# These commands only run if this is the main project\nif(CMAKE_PROJECT_NAME STREQUAL \"gtest\" OR CMAKE_PROJECT_NAME STREQUAL \"googletest-distribution\")\n\n  # BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to\n  # make it prominent in the GUI.\n  option(BUILD_SHARED_LIBS \"Build shared libraries (DLLs).\" OFF)\n\nelse()\n\n  mark_as_advanced(\n    gtest_force_shared_crt\n    gtest_build_tests\n    gtest_build_samples\n    gtest_disable_pthreads\n    gtest_hide_internal_symbols)\n\nendif()\n\n\nif (gtest_hide_internal_symbols)\n  set(CMAKE_CXX_VISIBILITY_PRESET hidden)\n  set(CMAKE_VISIBILITY_INLINES_HIDDEN 1)\nendif()\n\n# Define helper functions and macros used by Google Test.\ninclude(cmake/internal_utils.cmake)\n\nconfig_compiler_and_linker()  # Defined in internal_utils.cmake.\n\n# Create the CMake package file descriptors.\nif (INSTALL_GTEST)\n  include(CMakePackageConfigHelpers)\n  set(cmake_package_name GTest)\n  set(targets_export_name ${cmake_package_name}Targets CACHE INTERNAL \"\")\n  set(generated_dir \"${CMAKE_CURRENT_BINARY_DIR}/generated\" CACHE INTERNAL \"\")\n  set(cmake_files_install_dir \"${CMAKE_INSTALL_LIBDIR}/cmake/${cmake_package_name}\")\n  set(version_file \"${generated_dir}/${cmake_package_name}ConfigVersion.cmake\")\n  write_basic_package_version_file(${version_file} VERSION ${GOOGLETEST_VERSION} COMPATIBILITY AnyNewerVersion)\n  install(EXPORT ${targets_export_name}\n    NAMESPACE ${cmake_package_name}::\n    DESTINATION ${cmake_files_install_dir})\n  set(config_file \"${generated_dir}/${cmake_package_name}Config.cmake\")\n  configure_package_config_file(\"${gtest_SOURCE_DIR}/cmake/Config.cmake.in\"\n    \"${config_file}\" INSTALL_DESTINATION ${cmake_files_install_dir})\n  install(FILES ${version_file} ${config_file}\n    DESTINATION ${cmake_files_install_dir})\nendif()\n\n# Where Google Test's .h files can be found.\nset(gtest_build_include_dirs\n  \"${gtest_SOURCE_DIR}/include\"\n  \"${gtest_SOURCE_DIR}\")\ninclude_directories(${gtest_build_include_dirs})\n\n########################################################################\n#\n# Defines the gtest & gtest_main libraries.  User tests should link\n# with one of them.\n\n# Google Test libraries.  We build them using more strict warnings than what\n# are used for other targets, to ensure that gtest can be compiled by a user\n# aggressive about warnings.\ncxx_library(gtest \"${cxx_strict}\" src/gtest-all.cc)\ncxx_library(gtest_main \"${cxx_strict}\" src/gtest_main.cc)\n# If the CMake version supports it, attach header directory information\n# to the targets for when we are part of a parent build (ie being pulled\n# in via add_subdirectory() rather than being a standalone build).\nif (DEFINED CMAKE_VERSION AND NOT \"${CMAKE_VERSION}\" VERSION_LESS \"2.8.11\")\n  target_include_directories(gtest SYSTEM INTERFACE\n    \"$<BUILD_INTERFACE:${gtest_build_include_dirs}>\"\n    \"$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/${CMAKE_INSTALL_INCLUDEDIR}>\")\n  target_include_directories(gtest_main SYSTEM INTERFACE\n    \"$<BUILD_INTERFACE:${gtest_build_include_dirs}>\"\n    \"$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/${CMAKE_INSTALL_INCLUDEDIR}>\")\nendif()\ntarget_link_libraries(gtest_main PUBLIC gtest)\n\n########################################################################\n#\n# Install rules\ninstall_project(gtest gtest_main)\n\n########################################################################\n#\n# Samples on how to link user tests with gtest or gtest_main.\n#\n# They are not built by default.  To build them, set the\n# gtest_build_samples option to ON.  You can do it by running ccmake\n# or specifying the -Dgtest_build_samples=ON flag when running cmake.\n\nif (gtest_build_samples)\n  cxx_executable(sample1_unittest samples gtest_main samples/sample1.cc)\n  cxx_executable(sample2_unittest samples gtest_main samples/sample2.cc)\n  cxx_executable(sample3_unittest samples gtest_main)\n  cxx_executable(sample4_unittest samples gtest_main samples/sample4.cc)\n  cxx_executable(sample5_unittest samples gtest_main samples/sample1.cc)\n  cxx_executable(sample6_unittest samples gtest_main)\n  cxx_executable(sample7_unittest samples gtest_main)\n  cxx_executable(sample8_unittest samples gtest_main)\n  cxx_executable(sample9_unittest samples gtest)\n  cxx_executable(sample10_unittest samples gtest)\nendif()\n\n########################################################################\n#\n# Google Test's own tests.\n#\n# You can skip this section if you aren't interested in testing\n# Google Test itself.\n#\n# The tests are not built by default.  To build them, set the\n# gtest_build_tests option to ON.  You can do it by running ccmake\n# or specifying the -Dgtest_build_tests=ON flag when running cmake.\n\nif (gtest_build_tests)\n  # This must be set in the root directory for the tests to be run by\n  # 'make test' or ctest.\n  enable_testing()\n\n  if (WIN32)\n    file(GENERATE OUTPUT \"${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/RunTest.ps1\"\n         CONTENT\n\"$project_bin = \\\"${CMAKE_BINARY_DIR}/bin/$<CONFIG>\\\"\n$env:Path = \\\"$project_bin;$env:Path\\\"\n& $args\")\n  elseif (MINGW OR CYGWIN)\n    file(GENERATE OUTPUT \"${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1\"\n         CONTENT\n\"$project_bin = (cygpath --windows ${CMAKE_BINARY_DIR}/bin)\n$env:Path = \\\"$project_bin;$env:Path\\\"\n& $args\")\n  endif()\n\n  ############################################################\n  # C++ tests built with standard compiler flags.\n\n  cxx_test(googletest-death-test-test gtest_main)\n  cxx_test(gtest_environment_test gtest)\n  cxx_test(googletest-filepath-test gtest_main)\n  cxx_test(googletest-listener-test gtest_main)\n  cxx_test(gtest_main_unittest gtest_main)\n  cxx_test(googletest-message-test gtest_main)\n  cxx_test(gtest_no_test_unittest gtest)\n  cxx_test(googletest-options-test gtest_main)\n  cxx_test(googletest-param-test-test gtest\n    test/googletest-param-test2-test.cc)\n  cxx_test(googletest-port-test gtest_main)\n  cxx_test(gtest_pred_impl_unittest gtest_main)\n  cxx_test(gtest_premature_exit_test gtest\n    test/gtest_premature_exit_test.cc)\n  cxx_test(googletest-printers-test gtest_main)\n  cxx_test(gtest_prod_test gtest_main\n    test/production.cc)\n  cxx_test(gtest_repeat_test gtest)\n  cxx_test(gtest_sole_header_test gtest_main)\n  cxx_test(gtest_stress_test gtest)\n  cxx_test(googletest-test-part-test gtest_main)\n  cxx_test(gtest_throw_on_failure_ex_test gtest)\n  cxx_test(gtest-typed-test_test gtest_main\n    test/gtest-typed-test2_test.cc)\n  cxx_test(gtest_unittest gtest_main)\n  cxx_test(gtest-unittest-api_test gtest)\n  cxx_test(gtest_skip_in_environment_setup_test gtest_main)\n  cxx_test(gtest_skip_test gtest_main)\n\n  ############################################################\n  # C++ tests built with non-standard compiler flags.\n\n  # MSVC 7.1 does not support STL with exceptions disabled.\n  if (NOT MSVC OR MSVC_VERSION GREATER 1310)\n    cxx_library(gtest_no_exception \"${cxx_no_exception}\"\n      src/gtest-all.cc)\n    cxx_library(gtest_main_no_exception \"${cxx_no_exception}\"\n      src/gtest-all.cc src/gtest_main.cc)\n  endif()\n  cxx_library(gtest_main_no_rtti \"${cxx_no_rtti}\"\n    src/gtest-all.cc src/gtest_main.cc)\n\n  cxx_test_with_flags(gtest-death-test_ex_nocatch_test\n    \"${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=0\"\n    gtest test/googletest-death-test_ex_test.cc)\n  cxx_test_with_flags(gtest-death-test_ex_catch_test\n    \"${cxx_exception} -DGTEST_ENABLE_CATCH_EXCEPTIONS_=1\"\n    gtest test/googletest-death-test_ex_test.cc)\n\n  cxx_test_with_flags(gtest_no_rtti_unittest \"${cxx_no_rtti}\"\n    gtest_main_no_rtti test/gtest_unittest.cc)\n\n  cxx_shared_library(gtest_dll \"${cxx_default}\"\n    src/gtest-all.cc src/gtest_main.cc)\n\n  cxx_executable_with_flags(gtest_dll_test_ \"${cxx_default}\"\n    gtest_dll test/gtest_all_test.cc)\n  set_target_properties(gtest_dll_test_\n                        PROPERTIES\n                        COMPILE_DEFINITIONS \"GTEST_LINKED_AS_SHARED_LIBRARY=1\")\n\n  ############################################################\n  # Python tests.\n\n  cxx_executable(googletest-break-on-failure-unittest_ test gtest)\n  py_test(googletest-break-on-failure-unittest)\n\n  py_test(gtest_skip_check_output_test)\n  py_test(gtest_skip_environment_check_output_test)\n\n  # Visual Studio .NET 2003 does not support STL with exceptions disabled.\n  if (NOT MSVC OR MSVC_VERSION GREATER 1310)  # 1310 is Visual Studio .NET 2003\n    cxx_executable_with_flags(\n      googletest-catch-exceptions-no-ex-test_\n      \"${cxx_no_exception}\"\n      gtest_main_no_exception\n      test/googletest-catch-exceptions-test_.cc)\n  endif()\n\n  cxx_executable_with_flags(\n    googletest-catch-exceptions-ex-test_\n    \"${cxx_exception}\"\n    gtest_main\n    test/googletest-catch-exceptions-test_.cc)\n  py_test(googletest-catch-exceptions-test)\n\n  cxx_executable(googletest-color-test_ test gtest)\n  py_test(googletest-color-test)\n\n  cxx_executable(googletest-env-var-test_ test gtest)\n  py_test(googletest-env-var-test)\n\n  cxx_executable(googletest-filter-unittest_ test gtest)\n  py_test(googletest-filter-unittest)\n\n  cxx_executable(gtest_help_test_ test gtest_main)\n  py_test(gtest_help_test)\n\n  cxx_executable(googletest-list-tests-unittest_ test gtest)\n  py_test(googletest-list-tests-unittest)\n\n  cxx_executable(googletest-output-test_ test gtest)\n  py_test(googletest-output-test --no_stacktrace_support)\n\n  cxx_executable(googletest-shuffle-test_ test gtest)\n  py_test(googletest-shuffle-test)\n\n  # MSVC 7.1 does not support STL with exceptions disabled.\n  if (NOT MSVC OR MSVC_VERSION GREATER 1310)\n    cxx_executable(googletest-throw-on-failure-test_ test gtest_no_exception)\n    set_target_properties(googletest-throw-on-failure-test_\n      PROPERTIES\n      COMPILE_FLAGS \"${cxx_no_exception}\")\n    py_test(googletest-throw-on-failure-test)\n  endif()\n\n  cxx_executable(googletest-uninitialized-test_ test gtest)\n  py_test(googletest-uninitialized-test)\n\n  cxx_executable(gtest_xml_outfile1_test_ test gtest_main)\n  cxx_executable(gtest_xml_outfile2_test_ test gtest_main)\n  py_test(gtest_xml_outfiles_test)\n  py_test(googletest-json-outfiles-test)\n\n  cxx_executable(gtest_xml_output_unittest_ test gtest)\n  py_test(gtest_xml_output_unittest --no_stacktrace_support)\n  py_test(googletest-json-output-unittest --no_stacktrace_support)\nendif()\n"
  },
  {
    "path": "test/googletest/CONTRIBUTORS",
    "content": "# This file contains a list of people who've made non-trivial\n# contribution to the Google C++ Testing Framework project.  People\n# who commit code to the project are encouraged to add their names\n# here.  Please keep the list sorted by first names.\n\nAjay Joshi <jaj@google.com>\nBalázs Dán <balazs.dan@gmail.com>\nBharat Mediratta <bharat@menalto.com>\nChandler Carruth <chandlerc@google.com>\nChris Prince <cprince@google.com>\nChris Taylor <taylorc@google.com>\nDan Egnor <egnor@google.com>\nEric Roman <eroman@chromium.org>\nHady Zalek <hady.zalek@gmail.com>\nJeffrey Yasskin <jyasskin@google.com>\nJói Sigurðsson <joi@google.com>\nKeir Mierle <mierle@gmail.com>\nKeith Ray <keith.ray@gmail.com>\nKenton Varda <kenton@google.com>\nKrystian Kuzniarek <krystian.kuzniarek@gmail.com>\nManuel Klimek <klimek@google.com>\nMarkus Heule <markus.heule@gmail.com>\nMika Raento <mikie@iki.fi>\nMiklós Fazekas <mfazekas@szemafor.com>\nPasi Valminen <pasi.valminen@gmail.com>\nPatrick Hanna <phanna@google.com>\nPatrick Riley <pfr@google.com>\nPeter Kaminski <piotrk@google.com>\nPreston Jackson <preston.a.jackson@gmail.com>\nRainer Klaffenboeck <rainer.klaffenboeck@dynatrace.com>\nRuss Cox <rsc@google.com>\nRuss Rufer <russ@pentad.com>\nSean Mcafee <eefacm@gmail.com>\nSigurður Ásgeirsson <siggi@google.com>\nTracy Bialik <tracy@pentad.com>\nVadim Berman <vadimb@google.com>\nVlad Losev <vladl@google.com>\nZhanyong Wan <wan@google.com>\n"
  },
  {
    "path": "test/googletest/LICENSE",
    "content": "Copyright 2008, Google Inc.\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n    * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n    * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n    * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
  },
  {
    "path": "test/googletest/README.md",
    "content": "### Generic Build Instructions\n\n#### Setup\n\nTo build Google Test and your tests that use it, you need to tell your build\nsystem where to find its headers and source files. The exact way to do it\ndepends on which build system you use, and is usually straightforward.\n\n### Build with CMake\n\nGoogle Test comes with a CMake build script\n([CMakeLists.txt](https://github.com/google/googletest/blob/master/CMakeLists.txt))\nthat can be used on a wide range of platforms (\"C\" stands for cross-platform.).\nIf you don't have CMake installed already, you can download it for free from\n<http://www.cmake.org/>.\n\nCMake works by generating native makefiles or build projects that can be used in\nthe compiler environment of your choice. You can either build Google Test as a\nstandalone project or it can be incorporated into an existing CMake build for\nanother project.\n\n#### Standalone CMake Project\n\nWhen building Google Test as a standalone project, the typical workflow starts\nwith:\n\n    mkdir mybuild       # Create a directory to hold the build output.\n    cd mybuild\n    cmake ${GTEST_DIR}  # Generate native build scripts.\n\nIf you want to build Google Test's samples, you should replace the last command\nwith\n\n    cmake -Dgtest_build_samples=ON ${GTEST_DIR}\n\nIf you are on a \\*nix system, you should now see a Makefile in the current\ndirectory. Just type 'make' to build gtest.\n\nIf you use Windows and have Visual Studio installed, a `gtest.sln` file and\nseveral `.vcproj` files will be created. You can then build them using Visual\nStudio.\n\nOn Mac OS X with Xcode installed, a `.xcodeproj` file will be generated.\n\n#### Incorporating Into An Existing CMake Project\n\nIf you want to use gtest in a project which already uses CMake, then a more\nrobust and flexible approach is to build gtest as part of that project directly.\nThis is done by making the GoogleTest source code available to the main build\nand adding it using CMake's `add_subdirectory()` command. This has the\nsignificant advantage that the same compiler and linker settings are used\nbetween gtest and the rest of your project, so issues associated with using\nincompatible libraries (eg debug/release), etc. are avoided. This is\nparticularly useful on Windows. Making GoogleTest's source code available to the\nmain build can be done a few different ways:\n\n*   Download the GoogleTest source code manually and place it at a known\n    location. This is the least flexible approach and can make it more difficult\n    to use with continuous integration systems, etc.\n*   Embed the GoogleTest source code as a direct copy in the main project's\n    source tree. This is often the simplest approach, but is also the hardest to\n    keep up to date. Some organizations may not permit this method.\n*   Add GoogleTest as a git submodule or equivalent. This may not always be\n    possible or appropriate. Git submodules, for example, have their own set of\n    advantages and drawbacks.\n*   Use CMake to download GoogleTest as part of the build's configure step. This\n    is just a little more complex, but doesn't have the limitations of the other\n    methods.\n\nThe last of the above methods is implemented with a small piece of CMake code in\na separate file (e.g. `CMakeLists.txt.in`) which is copied to the build area and\nthen invoked as a sub-build _during the CMake stage_. That directory is then\npulled into the main build with `add_subdirectory()`. For example:\n\nNew file `CMakeLists.txt.in`:\n\n```cmake\ncmake_minimum_required(VERSION 2.8.2)\n\nproject(googletest-download NONE)\n\ninclude(ExternalProject)\nExternalProject_Add(googletest\n  GIT_REPOSITORY    https://github.com/google/googletest.git\n  GIT_TAG           master\n  SOURCE_DIR        \"${CMAKE_CURRENT_BINARY_DIR}/googletest-src\"\n  BINARY_DIR        \"${CMAKE_CURRENT_BINARY_DIR}/googletest-build\"\n  CONFIGURE_COMMAND \"\"\n  BUILD_COMMAND     \"\"\n  INSTALL_COMMAND   \"\"\n  TEST_COMMAND      \"\"\n)\n```\n\nExisting build's `CMakeLists.txt`:\n\n```cmake\n# Download and unpack googletest at configure time\nconfigure_file(CMakeLists.txt.in googletest-download/CMakeLists.txt)\nexecute_process(COMMAND ${CMAKE_COMMAND} -G \"${CMAKE_GENERATOR}\" .\n  RESULT_VARIABLE result\n  WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/googletest-download )\nif(result)\n  message(FATAL_ERROR \"CMake step for googletest failed: ${result}\")\nendif()\nexecute_process(COMMAND ${CMAKE_COMMAND} --build .\n  RESULT_VARIABLE result\n  WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/googletest-download )\nif(result)\n  message(FATAL_ERROR \"Build step for googletest failed: ${result}\")\nendif()\n\n# Prevent overriding the parent project's compiler/linker\n# settings on Windows\nset(gtest_force_shared_crt ON CACHE BOOL \"\" FORCE)\n\n# Add googletest directly to our build. This defines\n# the gtest and gtest_main targets.\nadd_subdirectory(${CMAKE_CURRENT_BINARY_DIR}/googletest-src\n                 ${CMAKE_CURRENT_BINARY_DIR}/googletest-build\n                 EXCLUDE_FROM_ALL)\n\n# The gtest/gtest_main targets carry header search path\n# dependencies automatically when using CMake 2.8.11 or\n# later. Otherwise we have to add them here ourselves.\nif (CMAKE_VERSION VERSION_LESS 2.8.11)\n  include_directories(\"${gtest_SOURCE_DIR}/include\")\nendif()\n\n# Now simply link against gtest or gtest_main as needed. Eg\nadd_executable(example example.cpp)\ntarget_link_libraries(example gtest_main)\nadd_test(NAME example_test COMMAND example)\n```\n\nNote that this approach requires CMake 2.8.2 or later due to its use of the\n`ExternalProject_Add()` command. The above technique is discussed in more detail\nin [this separate article](http://crascit.com/2015/07/25/cmake-gtest/) which\nalso contains a link to a fully generalized implementation of the technique.\n\n##### Visual Studio Dynamic vs Static Runtimes\n\nBy default, new Visual Studio projects link the C runtimes dynamically but\nGoogle Test links them statically. This will generate an error that looks\nsomething like the following: gtest.lib(gtest-all.obj) : error LNK2038: mismatch\ndetected for 'RuntimeLibrary': value 'MTd_StaticDebug' doesn't match value\n'MDd_DynamicDebug' in main.obj\n\nGoogle Test already has a CMake option for this: `gtest_force_shared_crt`\n\nEnabling this option will make gtest link the runtimes dynamically too, and\nmatch the project in which it is included.\n\n#### C++ Standard Version\n\nAn environment that supports C++11 is required in order to successfully build\nGoogle Test. One way to ensure this is to specify the standard in the top-level\nproject, for example by using the `set(CMAKE_CXX_STANDARD 11)` command. If this\nis not feasible, for example in a C project using Google Test for validation,\nthen it can be specified by adding it to the options for cmake via the\n`DCMAKE_CXX_FLAGS` option.\n\n### Tweaking Google Test\n\nGoogle Test can be used in diverse environments. The default configuration may\nnot work (or may not work well) out of the box in some environments. However,\nyou can easily tweak Google Test by defining control macros on the compiler\ncommand line. Generally, these macros are named like `GTEST_XYZ` and you define\nthem to either 1 or 0 to enable or disable a certain feature.\n\nWe list the most frequently used macros below. For a complete list, see file\n[include/gtest/internal/gtest-port.h](https://github.com/google/googletest/blob/master/googletest/include/gtest/internal/gtest-port.h).\n\n### Multi-threaded Tests\n\nGoogle Test is thread-safe where the pthread library is available. After\n`#include \"gtest/gtest.h\"`, you can check the\n`GTEST_IS_THREADSAFE` macro to see whether this is the case (yes if the macro is\n`#defined` to 1, no if it's undefined.).\n\nIf Google Test doesn't correctly detect whether pthread is available in your\nenvironment, you can force it with\n\n    -DGTEST_HAS_PTHREAD=1\n\nor\n\n    -DGTEST_HAS_PTHREAD=0\n\nWhen Google Test uses pthread, you may need to add flags to your compiler and/or\nlinker to select the pthread library, or you'll get link errors. If you use the\nCMake script or the deprecated Autotools script, this is taken care of for you.\nIf you use your own build script, you'll need to read your compiler and linker's\nmanual to figure out what flags to add.\n\n### As a Shared Library (DLL)\n\nGoogle Test is compact, so most users can build and link it as a static library\nfor the simplicity. You can choose to use Google Test as a shared library (known\nas a DLL on Windows) if you prefer.\n\nTo compile *gtest* as a shared library, add\n\n    -DGTEST_CREATE_SHARED_LIBRARY=1\n\nto the compiler flags. You'll also need to tell the linker to produce a shared\nlibrary instead - consult your linker's manual for how to do it.\n\nTo compile your *tests* that use the gtest shared library, add\n\n    -DGTEST_LINKED_AS_SHARED_LIBRARY=1\n\nto the compiler flags.\n\nNote: while the above steps aren't technically necessary today when using some\ncompilers (e.g. GCC), they may become necessary in the future, if we decide to\nimprove the speed of loading the library (see\n<http://gcc.gnu.org/wiki/Visibility> for details). Therefore you are recommended\nto always add the above flags when using Google Test as a shared library.\nOtherwise a future release of Google Test may break your build script.\n\n### Avoiding Macro Name Clashes\n\nIn C++, macros don't obey namespaces. Therefore two libraries that both define a\nmacro of the same name will clash if you `#include` both definitions. In case a\nGoogle Test macro clashes with another library, you can force Google Test to\nrename its macro to avoid the conflict.\n\nSpecifically, if both Google Test and some other code define macro FOO, you can\nadd\n\n    -DGTEST_DONT_DEFINE_FOO=1\n\nto the compiler flags to tell Google Test to change the macro's name from `FOO`\nto `GTEST_FOO`. Currently `FOO` can be `FAIL`, `SUCCEED`, or `TEST`. For\nexample, with `-DGTEST_DONT_DEFINE_TEST=1`, you'll need to write\n\n    GTEST_TEST(SomeTest, DoesThis) { ... }\n\ninstead of\n\n    TEST(SomeTest, DoesThis) { ... }\n\nin order to define a test.\n"
  },
  {
    "path": "test/googletest/cmake/Config.cmake.in",
    "content": "@PACKAGE_INIT@\ninclude(CMakeFindDependencyMacro)\nif (@GTEST_HAS_PTHREAD@)\n  set(THREADS_PREFER_PTHREAD_FLAG @THREADS_PREFER_PTHREAD_FLAG@)\n  find_dependency(Threads)\nendif()\n\ninclude(\"${CMAKE_CURRENT_LIST_DIR}/@targets_export_name@.cmake\")\ncheck_required_components(\"@project_name@\")\n"
  },
  {
    "path": "test/googletest/cmake/gtest.pc.in",
    "content": "libdir=@CMAKE_INSTALL_FULL_LIBDIR@\nincludedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@\n\nName: gtest\nDescription: GoogleTest (without main() function)\nVersion: @PROJECT_VERSION@\nURL: https://github.com/google/googletest\nLibs: -L${libdir} -lgtest @CMAKE_THREAD_LIBS_INIT@\nCflags: -I${includedir} @GTEST_HAS_PTHREAD_MACRO@\n"
  },
  {
    "path": "test/googletest/cmake/gtest_main.pc.in",
    "content": "libdir=@CMAKE_INSTALL_FULL_LIBDIR@\nincludedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@\n\nName: gtest_main\nDescription: GoogleTest (with main() function)\nVersion: @PROJECT_VERSION@\nURL: https://github.com/google/googletest\nRequires: gtest\nLibs: -L${libdir} -lgtest_main @CMAKE_THREAD_LIBS_INIT@\nCflags: -I${includedir} @GTEST_HAS_PTHREAD_MACRO@\n"
  },
  {
    "path": "test/googletest/cmake/internal_utils.cmake",
    "content": "# Defines functions and macros useful for building Google Test and\n# Google Mock.\n#\n# Note:\n#\n# - This file will be run twice when building Google Mock (once via\n#   Google Test's CMakeLists.txt, and once via Google Mock's).\n#   Therefore it shouldn't have any side effects other than defining\n#   the functions and macros.\n#\n# - The functions/macros defined in this file may depend on Google\n#   Test and Google Mock's option() definitions, and thus must be\n#   called *after* the options have been defined.\n\nif (POLICY CMP0054)\n  cmake_policy(SET CMP0054 NEW)\nendif (POLICY CMP0054)\n\n# Tweaks CMake's default compiler/linker settings to suit Google Test's needs.\n#\n# This must be a macro(), as inside a function string() can only\n# update variables in the function scope.\nmacro(fix_default_compiler_settings_)\n  if (MSVC)\n    # For MSVC, CMake sets certain flags to defaults we want to override.\n    # This replacement code is taken from sample in the CMake Wiki at\n    # https://gitlab.kitware.com/cmake/community/wikis/FAQ#dynamic-replace.\n    foreach (flag_var\n             CMAKE_C_FLAGS CMAKE_C_FLAGS_DEBUG CMAKE_C_FLAGS_RELEASE\n             CMAKE_C_FLAGS_MINSIZEREL CMAKE_C_FLAGS_RELWITHDEBINFO\n             CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE\n             CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)\n      if (NOT BUILD_SHARED_LIBS AND NOT gtest_force_shared_crt)\n        # When Google Test is built as a shared library, it should also use\n        # shared runtime libraries.  Otherwise, it may end up with multiple\n        # copies of runtime library data in different modules, resulting in\n        # hard-to-find crashes. When it is built as a static library, it is\n        # preferable to use CRT as static libraries, as we don't have to rely\n        # on CRT DLLs being available. CMake always defaults to using shared\n        # CRT libraries, so we override that default here.\n        string(REPLACE \"/MD\" \"-MT\" ${flag_var} \"${${flag_var}}\")\n      endif()\n\n      # We prefer more strict warning checking for building Google Test.\n      # Replaces /W3 with /W4 in defaults.\n      string(REPLACE \"/W3\" \"/W4\" ${flag_var} \"${${flag_var}}\")\n\n      # Prevent D9025 warning for targets that have exception handling\n      # turned off (/EHs-c- flag). Where required, exceptions are explicitly\n      # re-enabled using the cxx_exception_flags variable.\n      string(REPLACE \"/EHsc\" \"\" ${flag_var} \"${${flag_var}}\")\n    endforeach()\n  endif()\nendmacro()\n\n# Defines the compiler/linker flags used to build Google Test and\n# Google Mock.  You can tweak these definitions to suit your need.  A\n# variable's value is empty before it's explicitly assigned to.\nmacro(config_compiler_and_linker)\n  # Note: pthreads on MinGW is not supported, even if available\n  # instead, we use windows threading primitives\n  unset(GTEST_HAS_PTHREAD)\n  if (NOT gtest_disable_pthreads AND NOT MINGW)\n    # Defines CMAKE_USE_PTHREADS_INIT and CMAKE_THREAD_LIBS_INIT.\n    find_package(Threads)\n    if (CMAKE_USE_PTHREADS_INIT)\n      set(GTEST_HAS_PTHREAD ON)\n    endif()\n  endif()\n\n  fix_default_compiler_settings_()\n  if (MSVC)\n    # Newlines inside flags variables break CMake's NMake generator.\n    # TODO(vladl@google.com): Add -RTCs and -RTCu to debug builds.\n    set(cxx_base_flags \"-GS -W4 -WX -wd4251 -wd4275 -nologo -J -Zi\")\n    set(cxx_base_flags \"${cxx_base_flags} -D_UNICODE -DUNICODE -DWIN32 -D_WIN32\")\n    set(cxx_base_flags \"${cxx_base_flags} -DSTRICT -DWIN32_LEAN_AND_MEAN\")\n    set(cxx_exception_flags \"-EHsc -D_HAS_EXCEPTIONS=1\")\n    set(cxx_no_exception_flags \"-EHs-c- -D_HAS_EXCEPTIONS=0\")\n    set(cxx_no_rtti_flags \"-GR-\")\n    # Suppress \"unreachable code\" warning\n    # http://stackoverflow.com/questions/3232669 explains the issue.\n    set(cxx_base_flags \"${cxx_base_flags} -wd4702\")\n  elseif (CMAKE_CXX_COMPILER_ID STREQUAL \"Clang\")\n    set(cxx_base_flags \"-Wall -Wshadow -Werror -Wconversion\")\n    set(cxx_exception_flags \"-fexceptions\")\n    set(cxx_no_exception_flags \"-fno-exceptions\")\n    set(cxx_strict_flags \"-W -Wpointer-arith -Wreturn-type -Wcast-qual -Wwrite-strings -Wswitch -Wunused-parameter -Wcast-align -Wchar-subscripts -Winline -Wredundant-decls\")\n    set(cxx_no_rtti_flags \"-fno-rtti\")\n  elseif (CMAKE_COMPILER_IS_GNUCXX)\n    set(cxx_base_flags \"-Wall -Wshadow -Werror\")\n    if(NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 7.0.0)\n      set(cxx_base_flags \"${cxx_base_flags} -Wno-error=dangling-else\")\n    endif()\n    set(cxx_exception_flags \"-fexceptions\")\n    set(cxx_no_exception_flags \"-fno-exceptions\")\n    # Until version 4.3.2, GCC doesn't define a macro to indicate\n    # whether RTTI is enabled.  Therefore we define GTEST_HAS_RTTI\n    # explicitly.\n    set(cxx_no_rtti_flags \"-fno-rtti -DGTEST_HAS_RTTI=0\")\n    set(cxx_strict_flags\n      \"-Wextra -Wno-unused-parameter -Wno-missing-field-initializers\")\n  elseif (CMAKE_CXX_COMPILER_ID STREQUAL \"SunPro\")\n    set(cxx_exception_flags \"-features=except\")\n    # Sun Pro doesn't provide macros to indicate whether exceptions and\n    # RTTI are enabled, so we define GTEST_HAS_* explicitly.\n    set(cxx_no_exception_flags \"-features=no%except -DGTEST_HAS_EXCEPTIONS=0\")\n    set(cxx_no_rtti_flags \"-features=no%rtti -DGTEST_HAS_RTTI=0\")\n  elseif (CMAKE_CXX_COMPILER_ID STREQUAL \"VisualAge\" OR\n      CMAKE_CXX_COMPILER_ID STREQUAL \"XL\")\n    # CMake 2.8 changes Visual Age's compiler ID to \"XL\".\n    set(cxx_exception_flags \"-qeh\")\n    set(cxx_no_exception_flags \"-qnoeh\")\n    # Until version 9.0, Visual Age doesn't define a macro to indicate\n    # whether RTTI is enabled.  Therefore we define GTEST_HAS_RTTI\n    # explicitly.\n    set(cxx_no_rtti_flags \"-qnortti -DGTEST_HAS_RTTI=0\")\n  elseif (CMAKE_CXX_COMPILER_ID STREQUAL \"HP\")\n    set(cxx_base_flags \"-AA -mt\")\n    set(cxx_exception_flags \"-DGTEST_HAS_EXCEPTIONS=1\")\n    set(cxx_no_exception_flags \"+noeh -DGTEST_HAS_EXCEPTIONS=0\")\n    # RTTI can not be disabled in HP aCC compiler.\n    set(cxx_no_rtti_flags \"\")\n  endif()\n\n  # The pthreads library is available and allowed?\n  if (DEFINED GTEST_HAS_PTHREAD)\n    set(GTEST_HAS_PTHREAD_MACRO \"-DGTEST_HAS_PTHREAD=1\")\n  else()\n    set(GTEST_HAS_PTHREAD_MACRO \"-DGTEST_HAS_PTHREAD=0\")\n  endif()\n  set(cxx_base_flags \"${cxx_base_flags} ${GTEST_HAS_PTHREAD_MACRO}\")\n\n  # For building gtest's own tests and samples.\n  set(cxx_exception \"${cxx_base_flags} ${cxx_exception_flags}\")\n  set(cxx_no_exception\n    \"${CMAKE_CXX_FLAGS} ${cxx_base_flags} ${cxx_no_exception_flags}\")\n  set(cxx_default \"${cxx_exception}\")\n  set(cxx_no_rtti \"${cxx_default} ${cxx_no_rtti_flags}\")\n\n  # For building the gtest libraries.\n  set(cxx_strict \"${cxx_default} ${cxx_strict_flags}\")\nendmacro()\n\n# Defines the gtest & gtest_main libraries.  User tests should link\n# with one of them.\nfunction(cxx_library_with_type name type cxx_flags)\n  # type can be either STATIC or SHARED to denote a static or shared library.\n  # ARGN refers to additional arguments after 'cxx_flags'.\n  add_library(${name} ${type} ${ARGN})\n  set_target_properties(${name}\n    PROPERTIES\n    COMPILE_FLAGS \"${cxx_flags}\")\n  # Generate debug library name with a postfix.\n  set_target_properties(${name}\n    PROPERTIES\n    DEBUG_POSTFIX \"d\")\n  # Set the output directory for build artifacts\n  set_target_properties(${name}\n    PROPERTIES\n    RUNTIME_OUTPUT_DIRECTORY \"${CMAKE_BINARY_DIR}/bin\"\n    LIBRARY_OUTPUT_DIRECTORY \"${CMAKE_BINARY_DIR}/lib\"\n    ARCHIVE_OUTPUT_DIRECTORY \"${CMAKE_BINARY_DIR}/lib\"\n    PDB_OUTPUT_DIRECTORY \"${CMAKE_BINARY_DIR}/bin\")\n  # make PDBs match library name\n  get_target_property(pdb_debug_postfix ${name} DEBUG_POSTFIX)\n  set_target_properties(${name}\n    PROPERTIES\n    PDB_NAME \"${name}\"\n    PDB_NAME_DEBUG \"${name}${pdb_debug_postfix}\"\n    COMPILE_PDB_NAME \"${name}\"\n    COMPILE_PDB_NAME_DEBUG \"${name}${pdb_debug_postfix}\")\n\n  if (BUILD_SHARED_LIBS OR type STREQUAL \"SHARED\")\n    set_target_properties(${name}\n      PROPERTIES\n      COMPILE_DEFINITIONS \"GTEST_CREATE_SHARED_LIBRARY=1\")\n    if (NOT \"${CMAKE_VERSION}\" VERSION_LESS \"2.8.11\")\n      target_compile_definitions(${name} INTERFACE\n        $<INSTALL_INTERFACE:GTEST_LINKED_AS_SHARED_LIBRARY=1>)\n    endif()\n  endif()\n  if (DEFINED GTEST_HAS_PTHREAD)\n    if (\"${CMAKE_VERSION}\" VERSION_LESS \"3.1.0\")\n      set(threads_spec ${CMAKE_THREAD_LIBS_INIT})\n    else()\n      set(threads_spec Threads::Threads)\n    endif()\n    target_link_libraries(${name} PUBLIC ${threads_spec})\n  endif()\nendfunction()\n\n########################################################################\n#\n# Helper functions for creating build targets.\n\nfunction(cxx_shared_library name cxx_flags)\n  cxx_library_with_type(${name} SHARED \"${cxx_flags}\" ${ARGN})\nendfunction()\n\nfunction(cxx_library name cxx_flags)\n  cxx_library_with_type(${name} \"\" \"${cxx_flags}\" ${ARGN})\nendfunction()\n\n# cxx_executable_with_flags(name cxx_flags libs srcs...)\n#\n# creates a named C++ executable that depends on the given libraries and\n# is built from the given source files with the given compiler flags.\nfunction(cxx_executable_with_flags name cxx_flags libs)\n  add_executable(${name} ${ARGN})\n  if (MSVC)\n    # BigObj required for tests.\n    set(cxx_flags \"${cxx_flags} -bigobj\")\n  endif()\n  if (cxx_flags)\n    set_target_properties(${name}\n      PROPERTIES\n      COMPILE_FLAGS \"${cxx_flags}\")\n  endif()\n  if (BUILD_SHARED_LIBS)\n    set_target_properties(${name}\n      PROPERTIES\n      COMPILE_DEFINITIONS \"GTEST_LINKED_AS_SHARED_LIBRARY=1\")\n  endif()\n  # To support mixing linking in static and dynamic libraries, link each\n  # library in with an extra call to target_link_libraries.\n  foreach (lib \"${libs}\")\n    target_link_libraries(${name} ${lib})\n  endforeach()\nendfunction()\n\n# cxx_executable(name dir lib srcs...)\n#\n# creates a named target that depends on the given libs and is built\n# from the given source files.  dir/name.cc is implicitly included in\n# the source file list.\nfunction(cxx_executable name dir libs)\n  cxx_executable_with_flags(\n    ${name} \"${cxx_default}\" \"${libs}\" \"${dir}/${name}.cc\" ${ARGN})\nendfunction()\n\n# Sets PYTHONINTERP_FOUND and PYTHON_EXECUTABLE.\nfind_package(PythonInterp)\n\n# cxx_test_with_flags(name cxx_flags libs srcs...)\n#\n# creates a named C++ test that depends on the given libs and is built\n# from the given source files with the given compiler flags.\nfunction(cxx_test_with_flags name cxx_flags libs)\n  cxx_executable_with_flags(${name} \"${cxx_flags}\" \"${libs}\" ${ARGN})\n  if (WIN32 OR MINGW)\n    add_test(NAME ${name}\n      COMMAND \"powershell\" \"-Command\" \"${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/RunTest.ps1\" \"$<TARGET_FILE:${name}>\")\n  else()\n    add_test(NAME ${name}\n      COMMAND \"$<TARGET_FILE:${name}>\")\n  endif()\nendfunction()\n\n# cxx_test(name libs srcs...)\n#\n# creates a named test target that depends on the given libs and is\n# built from the given source files.  Unlike cxx_test_with_flags,\n# test/name.cc is already implicitly included in the source file list.\nfunction(cxx_test name libs)\n  cxx_test_with_flags(\"${name}\" \"${cxx_default}\" \"${libs}\"\n    \"test/${name}.cc\" ${ARGN})\nendfunction()\n\n# py_test(name)\n#\n# creates a Python test with the given name whose main module is in\n# test/name.py.  It does nothing if Python is not installed.\nfunction(py_test name)\n  if (PYTHONINTERP_FOUND)\n    if (\"${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}\" VERSION_GREATER 3.1)\n      if (CMAKE_CONFIGURATION_TYPES)\n        # Multi-configuration build generators as for Visual Studio save\n        # output in a subdirectory of CMAKE_CURRENT_BINARY_DIR (Debug,\n        # Release etc.), so we have to provide it here.\n        if (WIN32 OR MINGW)\n          add_test(NAME ${name}\n            COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG>/RunTest.ps1\n              ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py\n              --build_dir=${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> ${ARGN})\n        else()\n          add_test(NAME ${name}\n            COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py\n              --build_dir=${CMAKE_CURRENT_BINARY_DIR}/$<CONFIG> ${ARGN})\n        endif()\n      else (CMAKE_CONFIGURATION_TYPES)\n        # Single-configuration build generators like Makefile generators\n        # don't have subdirs below CMAKE_CURRENT_BINARY_DIR.\n        if (WIN32 OR MINGW)\n          add_test(NAME ${name}\n            COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1\n              ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py\n              --build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN})\n        else()\n          add_test(NAME ${name}\n            COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py\n              --build_dir=${CMAKE_CURRENT_BINARY_DIR} ${ARGN})\n        endif()\n      endif (CMAKE_CONFIGURATION_TYPES)\n    else()\n      # ${CMAKE_CURRENT_BINARY_DIR} is known at configuration time, so we can\n      # directly bind it from cmake. ${CTEST_CONFIGURATION_TYPE} is known\n      # only at ctest runtime (by calling ctest -c <Configuration>), so\n      # we have to escape $ to delay variable substitution here.\n      if (WIN32 OR MINGW)\n        add_test(NAME ${name}\n          COMMAND powershell -Command ${CMAKE_CURRENT_BINARY_DIR}/RunTest.ps1\n            ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py\n            --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\\${CTEST_CONFIGURATION_TYPE} ${ARGN})\n      else()\n        add_test(NAME ${name}\n          COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/test/${name}.py\n            --build_dir=${CMAKE_CURRENT_BINARY_DIR}/\\${CTEST_CONFIGURATION_TYPE} ${ARGN})\n      endif()\n    endif()\n  endif(PYTHONINTERP_FOUND)\nendfunction()\n\n# install_project(targets...)\n#\n# Installs the specified targets and configures the associated pkgconfig files.\nfunction(install_project)\n  if(INSTALL_GTEST)\n    install(DIRECTORY \"${PROJECT_SOURCE_DIR}/include/\"\n      DESTINATION \"${CMAKE_INSTALL_INCLUDEDIR}\")\n    # Install the project targets.\n    install(TARGETS ${ARGN}\n      EXPORT ${targets_export_name}\n      RUNTIME DESTINATION \"${CMAKE_INSTALL_BINDIR}\"\n      ARCHIVE DESTINATION \"${CMAKE_INSTALL_LIBDIR}\"\n      LIBRARY DESTINATION \"${CMAKE_INSTALL_LIBDIR}\")\n    if(CMAKE_CXX_COMPILER_ID MATCHES \"MSVC\")\n      # Install PDBs\n      foreach(t ${ARGN})\n        get_target_property(t_pdb_name ${t} COMPILE_PDB_NAME)\n        get_target_property(t_pdb_name_debug ${t} COMPILE_PDB_NAME_DEBUG)\n        get_target_property(t_pdb_output_directory ${t} PDB_OUTPUT_DIRECTORY)\n        install(FILES\n          \"${t_pdb_output_directory}/\\${CMAKE_INSTALL_CONFIG_NAME}/$<$<CONFIG:Debug>:${t_pdb_name_debug}>$<$<NOT:$<CONFIG:Debug>>:${t_pdb_name}>.pdb\"\n          DESTINATION ${CMAKE_INSTALL_LIBDIR}\n          OPTIONAL)\n      endforeach()\n    endif()\n    # Configure and install pkgconfig files.\n    foreach(t ${ARGN})\n      set(configured_pc \"${generated_dir}/${t}.pc\")\n      configure_file(\"${PROJECT_SOURCE_DIR}/cmake/${t}.pc.in\"\n        \"${configured_pc}\" @ONLY)\n      install(FILES \"${configured_pc}\"\n        DESTINATION \"${CMAKE_INSTALL_LIBDIR}/pkgconfig\")\n    endforeach()\n  endif()\nendfunction()\n"
  },
  {
    "path": "test/googletest/cmake/libgtest.la.in",
    "content": "# libgtest.la - a libtool library file\n# Generated by libtool (GNU libtool) 2.4.6\n\n# Please DO NOT delete this file!\n# It is necessary for linking the library.\n\n# Names of this library.\nlibrary_names='libgtest.so'\n\n# Is this an already installed library?\ninstalled=yes\n\n# Should we warn about portability when linking against -modules?\nshouldnotlink=no\n\n# Files to dlopen/dlpreopen\ndlopen=''\ndlpreopen=''\n\n# Directory that this library needs to be installed in:\nlibdir='@CMAKE_INSTALL_FULL_LIBDIR@'\n"
  },
  {
    "path": "test/googletest/docs/advanced.md",
    "content": "# Advanced googletest Topics\n\n<!-- GOOGLETEST_CM0016 DO NOT DELETE -->\n\n## Introduction\n\nNow that you have read the [googletest Primer](primer.md) and learned how to\nwrite tests using googletest, it's time to learn some new tricks. This document\nwill show you more assertions as well as how to construct complex failure\nmessages, propagate fatal failures, reuse and speed up your test fixtures, and\nuse various flags with your tests.\n\n## More Assertions\n\nThis section covers some less frequently used, but still significant,\nassertions.\n\n### Explicit Success and Failure\n\nThese three assertions do not actually test a value or expression. Instead, they\ngenerate a success or failure directly. Like the macros that actually perform a\ntest, you may stream a custom failure message into them.\n\n```c++\nSUCCEED();\n```\n\nGenerates a success. This does **NOT** make the overall test succeed. A test is\nconsidered successful only if none of its assertions fail during its execution.\n\nNOTE: `SUCCEED()` is purely documentary and currently doesn't generate any\nuser-visible output. However, we may add `SUCCEED()` messages to googletest's\noutput in the future.\n\n```c++\nFAIL();\nADD_FAILURE();\nADD_FAILURE_AT(\"file_path\", line_number);\n```\n\n`FAIL()` generates a fatal failure, while `ADD_FAILURE()` and `ADD_FAILURE_AT()`\ngenerate a nonfatal failure. These are useful when control flow, rather than a\nBoolean expression, determines the test's success or failure. For example, you\nmight want to write something like:\n\n```c++\nswitch(expression) {\n  case 1:\n     ... some checks ...\n  case 2:\n     ... some other checks ...\n  default:\n     FAIL() << \"We shouldn't get here.\";\n}\n```\n\nNOTE: you can only use `FAIL()` in functions that return `void`. See the\n[Assertion Placement section](#assertion-placement) for more information.\n\n### Exception Assertions\n\nThese are for verifying that a piece of code throws (or does not throw) an\nexception of the given type:\n\nFatal assertion                            | Nonfatal assertion                         | Verifies\n------------------------------------------ | ------------------------------------------ | --------\n`ASSERT_THROW(statement, exception_type);` | `EXPECT_THROW(statement, exception_type);` | `statement` throws an exception of the given type\n`ASSERT_ANY_THROW(statement);`             | `EXPECT_ANY_THROW(statement);`             | `statement` throws an exception of any type\n`ASSERT_NO_THROW(statement);`              | `EXPECT_NO_THROW(statement);`              | `statement` doesn't throw any exception\n\nExamples:\n\n```c++\nASSERT_THROW(Foo(5), bar_exception);\n\nEXPECT_NO_THROW({\n  int n = 5;\n  Bar(&n);\n});\n```\n\n**Availability**: requires exceptions to be enabled in the build environment\n\n### Predicate Assertions for Better Error Messages\n\nEven though googletest has a rich set of assertions, they can never be complete,\nas it's impossible (nor a good idea) to anticipate all scenarios a user might\nrun into. Therefore, sometimes a user has to use `EXPECT_TRUE()` to check a\ncomplex expression, for lack of a better macro. This has the problem of not\nshowing you the values of the parts of the expression, making it hard to\nunderstand what went wrong. As a workaround, some users choose to construct the\nfailure message by themselves, streaming it into `EXPECT_TRUE()`. However, this\nis awkward especially when the expression has side-effects or is expensive to\nevaluate.\n\ngoogletest gives you three different options to solve this problem:\n\n#### Using an Existing Boolean Function\n\nIf you already have a function or functor that returns `bool` (or a type that\ncan be implicitly converted to `bool`), you can use it in a *predicate\nassertion* to get the function arguments printed for free:\n\n<!-- mdformat off(github rendering does not support multiline tables) -->\n\n| Fatal assertion                   | Nonfatal assertion                | Verifies                    |\n| --------------------------------- | --------------------------------- | --------------------------- |\n| `ASSERT_PRED1(pred1, val1)`       | `EXPECT_PRED1(pred1, val1)`       | `pred1(val1)` is true       |\n| `ASSERT_PRED2(pred2, val1, val2)` | `EXPECT_PRED2(pred2, val1, val2)` | `pred1(val1, val2)` is true |\n| `...`                             | `...`                             | `...`                       |\n\n<!-- mdformat on-->\nIn the above, `predn` is an `n`-ary predicate function or functor, where `val1`,\n`val2`, ..., and `valn` are its arguments. The assertion succeeds if the\npredicate returns `true` when applied to the given arguments, and fails\notherwise. When the assertion fails, it prints the value of each argument. In\neither case, the arguments are evaluated exactly once.\n\nHere's an example. Given\n\n```c++\n// Returns true if m and n have no common divisors except 1.\nbool MutuallyPrime(int m, int n) { ... }\n\nconst int a = 3;\nconst int b = 4;\nconst int c = 10;\n```\n\nthe assertion\n\n```c++\n  EXPECT_PRED2(MutuallyPrime, a, b);\n```\n\nwill succeed, while the assertion\n\n```c++\n  EXPECT_PRED2(MutuallyPrime, b, c);\n```\n\nwill fail with the message\n\n```none\nMutuallyPrime(b, c) is false, where\nb is 4\nc is 10\n```\n\n> NOTE:\n>\n> 1.  If you see a compiler error \"no matching function to call\" when using\n>     `ASSERT_PRED*` or `EXPECT_PRED*`, please see\n>     [this](faq.md#the-compiler-complains-no-matching-function-to-call-when-i-use-assert-pred-how-do-i-fix-it)\n>     for how to resolve it.\n\n#### Using a Function That Returns an AssertionResult\n\nWhile `EXPECT_PRED*()` and friends are handy for a quick job, the syntax is not\nsatisfactory: you have to use different macros for different arities, and it\nfeels more like Lisp than C++. The `::testing::AssertionResult` class solves\nthis problem.\n\nAn `AssertionResult` object represents the result of an assertion (whether it's\na success or a failure, and an associated message). You can create an\n`AssertionResult` using one of these factory functions:\n\n```c++\nnamespace testing {\n\n// Returns an AssertionResult object to indicate that an assertion has\n// succeeded.\nAssertionResult AssertionSuccess();\n\n// Returns an AssertionResult object to indicate that an assertion has\n// failed.\nAssertionResult AssertionFailure();\n\n}\n```\n\nYou can then use the `<<` operator to stream messages to the `AssertionResult`\nobject.\n\nTo provide more readable messages in Boolean assertions (e.g. `EXPECT_TRUE()`),\nwrite a predicate function that returns `AssertionResult` instead of `bool`. For\nexample, if you define `IsEven()` as:\n\n```c++\n::testing::AssertionResult IsEven(int n) {\n  if ((n % 2) == 0)\n     return ::testing::AssertionSuccess();\n  else\n     return ::testing::AssertionFailure() << n << \" is odd\";\n}\n```\n\ninstead of:\n\n```c++\nbool IsEven(int n) {\n  return (n % 2) == 0;\n}\n```\n\nthe failed assertion `EXPECT_TRUE(IsEven(Fib(4)))` will print:\n\n```none\nValue of: IsEven(Fib(4))\n  Actual: false (3 is odd)\nExpected: true\n```\n\ninstead of a more opaque\n\n```none\nValue of: IsEven(Fib(4))\n  Actual: false\nExpected: true\n```\n\nIf you want informative messages in `EXPECT_FALSE` and `ASSERT_FALSE` as well\n(one third of Boolean assertions in the Google code base are negative ones), and\nare fine with making the predicate slower in the success case, you can supply a\nsuccess message:\n\n```c++\n::testing::AssertionResult IsEven(int n) {\n  if ((n % 2) == 0)\n     return ::testing::AssertionSuccess() << n << \" is even\";\n  else\n     return ::testing::AssertionFailure() << n << \" is odd\";\n}\n```\n\nThen the statement `EXPECT_FALSE(IsEven(Fib(6)))` will print\n\n```none\n  Value of: IsEven(Fib(6))\n     Actual: true (8 is even)\n  Expected: false\n```\n\n#### Using a Predicate-Formatter\n\nIf you find the default message generated by `(ASSERT|EXPECT)_PRED*` and\n`(ASSERT|EXPECT)_(TRUE|FALSE)` unsatisfactory, or some arguments to your\npredicate do not support streaming to `ostream`, you can instead use the\nfollowing *predicate-formatter assertions* to *fully* customize how the message\nis formatted:\n\nFatal assertion                                  | Nonfatal assertion                               | Verifies\n------------------------------------------------ | ------------------------------------------------ | --------\n`ASSERT_PRED_FORMAT1(pred_format1, val1);`       | `EXPECT_PRED_FORMAT1(pred_format1, val1);`       | `pred_format1(val1)` is successful\n`ASSERT_PRED_FORMAT2(pred_format2, val1, val2);` | `EXPECT_PRED_FORMAT2(pred_format2, val1, val2);` | `pred_format2(val1, val2)` is successful\n`...`                                            | `...`                                            | ...\n\nThe difference between this and the previous group of macros is that instead of\na predicate, `(ASSERT|EXPECT)_PRED_FORMAT*` take a *predicate-formatter*\n(`pred_formatn`), which is a function or functor with the signature:\n\n```c++\n::testing::AssertionResult PredicateFormattern(const char* expr1,\n                                               const char* expr2,\n                                               ...\n                                               const char* exprn,\n                                               T1 val1,\n                                               T2 val2,\n                                               ...\n                                               Tn valn);\n```\n\nwhere `val1`, `val2`, ..., and `valn` are the values of the predicate arguments,\nand `expr1`, `expr2`, ..., and `exprn` are the corresponding expressions as they\nappear in the source code. The types `T1`, `T2`, ..., and `Tn` can be either\nvalue types or reference types. For example, if an argument has type `Foo`, you\ncan declare it as either `Foo` or `const Foo&`, whichever is appropriate.\n\nAs an example, let's improve the failure message in `MutuallyPrime()`, which was\nused with `EXPECT_PRED2()`:\n\n```c++\n// Returns the smallest prime common divisor of m and n,\n// or 1 when m and n are mutually prime.\nint SmallestPrimeCommonDivisor(int m, int n) { ... }\n\n// A predicate-formatter for asserting that two integers are mutually prime.\n::testing::AssertionResult AssertMutuallyPrime(const char* m_expr,\n                                               const char* n_expr,\n                                               int m,\n                                               int n) {\n  if (MutuallyPrime(m, n)) return ::testing::AssertionSuccess();\n\n  return ::testing::AssertionFailure() << m_expr << \" and \" << n_expr\n      << \" (\" << m << \" and \" << n << \") are not mutually prime, \"\n      << \"as they have a common divisor \" << SmallestPrimeCommonDivisor(m, n);\n}\n```\n\nWith this predicate-formatter, we can use\n\n```c++\n  EXPECT_PRED_FORMAT2(AssertMutuallyPrime, b, c);\n```\n\nto generate the message\n\n```none\nb and c (4 and 10) are not mutually prime, as they have a common divisor 2.\n```\n\nAs you may have realized, many of the built-in assertions we introduced earlier\nare special cases of `(EXPECT|ASSERT)_PRED_FORMAT*`. In fact, most of them are\nindeed defined using `(EXPECT|ASSERT)_PRED_FORMAT*`.\n\n### Floating-Point Comparison\n\nComparing floating-point numbers is tricky. Due to round-off errors, it is very\nunlikely that two floating-points will match exactly. Therefore, `ASSERT_EQ` 's\nnaive comparison usually doesn't work. And since floating-points can have a wide\nvalue range, no single fixed error bound works. It's better to compare by a\nfixed relative error bound, except for values close to 0 due to the loss of\nprecision there.\n\nIn general, for floating-point comparison to make sense, the user needs to\ncarefully choose the error bound. If they don't want or care to, comparing in\nterms of Units in the Last Place (ULPs) is a good default, and googletest\nprovides assertions to do this. Full details about ULPs are quite long; if you\nwant to learn more, see\n[here](https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/).\n\n#### Floating-Point Macros\n\n<!-- mdformat off(github rendering does not support multiline tables) -->\n\n| Fatal assertion                 | Nonfatal assertion              | Verifies                                 |\n| ------------------------------- | ------------------------------- | ---------------------------------------- |\n| `ASSERT_FLOAT_EQ(val1, val2);`  | `EXPECT_FLOAT_EQ(val1, val2);`  | the two `float` values are almost equal  |\n| `ASSERT_DOUBLE_EQ(val1, val2);` | `EXPECT_DOUBLE_EQ(val1, val2);` | the two `double` values are almost equal |\n\n<!-- mdformat on-->\n\nBy \"almost equal\" we mean the values are within 4 ULP's from each other.\n\nThe following assertions allow you to choose the acceptable error bound:\n\n<!-- mdformat off(github rendering does not support multiline tables) -->\n\n| Fatal assertion                       | Nonfatal assertion                    | Verifies                                                                         |\n| ------------------------------------- | ------------------------------------- | -------------------------------------------------------------------------------- |\n| `ASSERT_NEAR(val1, val2, abs_error);` | `EXPECT_NEAR(val1, val2, abs_error);` | the difference between `val1` and `val2` doesn't exceed the given absolute error |\n\n<!-- mdformat on-->\n\n#### Floating-Point Predicate-Format Functions\n\nSome floating-point operations are useful, but not that often used. In order to\navoid an explosion of new macros, we provide them as predicate-format functions\nthat can be used in predicate assertion macros (e.g. `EXPECT_PRED_FORMAT2`,\netc).\n\n```c++\nEXPECT_PRED_FORMAT2(::testing::FloatLE, val1, val2);\nEXPECT_PRED_FORMAT2(::testing::DoubleLE, val1, val2);\n```\n\nVerifies that `val1` is less than, or almost equal to, `val2`. You can replace\n`EXPECT_PRED_FORMAT2` in the above table with `ASSERT_PRED_FORMAT2`.\n\n### Asserting Using gMock Matchers\n\n[gMock](../../googlemock) comes with a library of matchers for validating\narguments passed to mock objects. A gMock *matcher* is basically a predicate\nthat knows how to describe itself. It can be used in these assertion macros:\n\n<!-- mdformat off(github rendering does not support multiline tables) -->\n\n| Fatal assertion                | Nonfatal assertion             | Verifies              |\n| ------------------------------ | ------------------------------ | --------------------- |\n| `ASSERT_THAT(value, matcher);` | `EXPECT_THAT(value, matcher);` | value matches matcher |\n\n<!-- mdformat on-->\n\nFor example, `StartsWith(prefix)` is a matcher that matches a string starting\nwith `prefix`, and you can write:\n\n```c++\nusing ::testing::StartsWith;\n...\n    // Verifies that Foo() returns a string starting with \"Hello\".\n    EXPECT_THAT(Foo(), StartsWith(\"Hello\"));\n```\n\nRead this\n[recipe](../../googlemock/docs/cook_book.md#using-matchers-in-googletest-assertions)\nin the gMock Cookbook for more details.\n\ngMock has a rich set of matchers. You can do many things googletest cannot do\nalone with them. For a list of matchers gMock provides, read\n[this](../../googlemock/docs/cook_book.md##using-matchers). It's easy to write\nyour [own matchers](../../googlemock/docs/cook_book.md#NewMatchers) too.\n\ngMock is bundled with googletest, so you don't need to add any build dependency\nin order to take advantage of this. Just include `\"testing/base/public/gmock.h\"`\nand you're ready to go.\n\n### More String Assertions\n\n(Please read the [previous](#asserting-using-gmock-matchers) section first if\nyou haven't.)\n\nYou can use the gMock\n[string matchers](../../googlemock/docs/cheat_sheet.md#string-matchers) with\n`EXPECT_THAT()` or `ASSERT_THAT()` to do more string comparison tricks\n(sub-string, prefix, suffix, regular expression, and etc). For example,\n\n```c++\nusing ::testing::HasSubstr;\nusing ::testing::MatchesRegex;\n...\n  ASSERT_THAT(foo_string, HasSubstr(\"needle\"));\n  EXPECT_THAT(bar_string, MatchesRegex(\"\\\\w*\\\\d+\"));\n```\n\nIf the string contains a well-formed HTML or XML document, you can check whether\nits DOM tree matches an\n[XPath expression](http://www.w3.org/TR/xpath/#contents):\n\n```c++\n// Currently still in //template/prototemplate/testing:xpath_matcher\n#include \"template/prototemplate/testing/xpath_matcher.h\"\nusing prototemplate::testing::MatchesXPath;\nEXPECT_THAT(html_string, MatchesXPath(\"//a[text()='click here']\"));\n```\n\n### Windows HRESULT assertions\n\nThese assertions test for `HRESULT` success or failure.\n\nFatal assertion                        | Nonfatal assertion                     | Verifies\n-------------------------------------- | -------------------------------------- | --------\n`ASSERT_HRESULT_SUCCEEDED(expression)` | `EXPECT_HRESULT_SUCCEEDED(expression)` | `expression` is a success `HRESULT`\n`ASSERT_HRESULT_FAILED(expression)`    | `EXPECT_HRESULT_FAILED(expression)`    | `expression` is a failure `HRESULT`\n\nThe generated output contains the human-readable error message associated with\nthe `HRESULT` code returned by `expression`.\n\nYou might use them like this:\n\n```c++\nCComPtr<IShellDispatch2> shell;\nASSERT_HRESULT_SUCCEEDED(shell.CoCreateInstance(L\"Shell.Application\"));\nCComVariant empty;\nASSERT_HRESULT_SUCCEEDED(shell->ShellExecute(CComBSTR(url), empty, empty, empty, empty));\n```\n\n### Type Assertions\n\nYou can call the function\n\n```c++\n::testing::StaticAssertTypeEq<T1, T2>();\n```\n\nto assert that types `T1` and `T2` are the same. The function does nothing if\nthe assertion is satisfied. If the types are different, the function call will\nfail to compile, the compiler error message will say that\n`T1 and T2 are not the same type` and most likely (depending on the compiler)\nshow you the actual values of `T1` and `T2`. This is mainly useful inside\ntemplate code.\n\n**Caveat**: When used inside a member function of a class template or a function\ntemplate, `StaticAssertTypeEq<T1, T2>()` is effective only if the function is\ninstantiated. For example, given:\n\n```c++\ntemplate <typename T> class Foo {\n public:\n  void Bar() { ::testing::StaticAssertTypeEq<int, T>(); }\n};\n```\n\nthe code:\n\n```c++\nvoid Test1() { Foo<bool> foo; }\n```\n\nwill not generate a compiler error, as `Foo<bool>::Bar()` is never actually\ninstantiated. Instead, you need:\n\n```c++\nvoid Test2() { Foo<bool> foo; foo.Bar(); }\n```\n\nto cause a compiler error.\n\n### Assertion Placement\n\nYou can use assertions in any C++ function. In particular, it doesn't have to be\na method of the test fixture class. The one constraint is that assertions that\ngenerate a fatal failure (`FAIL*` and `ASSERT_*`) can only be used in\nvoid-returning functions. This is a consequence of Google's not using\nexceptions. By placing it in a non-void function you'll get a confusing compile\nerror like `\"error: void value not ignored as it ought to be\"` or `\"cannot\ninitialize return object of type 'bool' with an rvalue of type 'void'\"` or\n`\"error: no viable conversion from 'void' to 'string'\"`.\n\nIf you need to use fatal assertions in a function that returns non-void, one\noption is to make the function return the value in an out parameter instead. For\nexample, you can rewrite `T2 Foo(T1 x)` to `void Foo(T1 x, T2* result)`. You\nneed to make sure that `*result` contains some sensible value even when the\nfunction returns prematurely. As the function now returns `void`, you can use\nany assertion inside of it.\n\nIf changing the function's type is not an option, you should just use assertions\nthat generate non-fatal failures, such as `ADD_FAILURE*` and `EXPECT_*`.\n\nNOTE: Constructors and destructors are not considered void-returning functions,\naccording to the C++ language specification, and so you may not use fatal\nassertions in them; you'll get a compilation error if you try. Instead, either\ncall `abort` and crash the entire test executable, or put the fatal assertion in\na `SetUp`/`TearDown` function; see\n[constructor/destructor vs. `SetUp`/`TearDown`](faq.md#CtorVsSetUp)\n\nWARNING: A fatal assertion in a helper function (private void-returning method)\ncalled from a constructor or destructor does not does not terminate the current\ntest, as your intuition might suggest: it merely returns from the constructor or\ndestructor early, possibly leaving your object in a partially-constructed or\npartially-destructed state! You almost certainly want to `abort` or use\n`SetUp`/`TearDown` instead.\n\n## Teaching googletest How to Print Your Values\n\nWhen a test assertion such as `EXPECT_EQ` fails, googletest prints the argument\nvalues to help you debug. It does this using a user-extensible value printer.\n\nThis printer knows how to print built-in C++ types, native arrays, STL\ncontainers, and any type that supports the `<<` operator. For other types, it\nprints the raw bytes in the value and hopes that you the user can figure it out.\n\nAs mentioned earlier, the printer is *extensible*. That means you can teach it\nto do a better job at printing your particular type than to dump the bytes. To\ndo that, define `<<` for your type:\n\n```c++\n#include <ostream>\n\nnamespace foo {\n\nclass Bar {  // We want googletest to be able to print instances of this.\n...\n  // Create a free inline friend function.\n  friend std::ostream& operator<<(std::ostream& os, const Bar& bar) {\n    return os << bar.DebugString();  // whatever needed to print bar to os\n  }\n};\n\n// If you can't declare the function in the class it's important that the\n// << operator is defined in the SAME namespace that defines Bar.  C++'s look-up\n// rules rely on that.\nstd::ostream& operator<<(std::ostream& os, const Bar& bar) {\n  return os << bar.DebugString();  // whatever needed to print bar to os\n}\n\n}  // namespace foo\n```\n\nSometimes, this might not be an option: your team may consider it bad style to\nhave a `<<` operator for `Bar`, or `Bar` may already have a `<<` operator that\ndoesn't do what you want (and you cannot change it). If so, you can instead\ndefine a `PrintTo()` function like this:\n\n```c++\n#include <ostream>\n\nnamespace foo {\n\nclass Bar {\n  ...\n  friend void PrintTo(const Bar& bar, std::ostream* os) {\n    *os << bar.DebugString();  // whatever needed to print bar to os\n  }\n};\n\n// If you can't declare the function in the class it's important that PrintTo()\n// is defined in the SAME namespace that defines Bar.  C++'s look-up rules rely\n// on that.\nvoid PrintTo(const Bar& bar, std::ostream* os) {\n  *os << bar.DebugString();  // whatever needed to print bar to os\n}\n\n}  // namespace foo\n```\n\nIf you have defined both `<<` and `PrintTo()`, the latter will be used when\ngoogletest is concerned. This allows you to customize how the value appears in\ngoogletest's output without affecting code that relies on the behavior of its\n`<<` operator.\n\nIf you want to print a value `x` using googletest's value printer yourself, just\ncall `::testing::PrintToString(x)`, which returns an `std::string`:\n\n```c++\nvector<pair<Bar, int> > bar_ints = GetBarIntVector();\n\nEXPECT_TRUE(IsCorrectBarIntVector(bar_ints))\n    << \"bar_ints = \" << ::testing::PrintToString(bar_ints);\n```\n\n## Death Tests\n\nIn many applications, there are assertions that can cause application failure if\na condition is not met. These sanity checks, which ensure that the program is in\na known good state, are there to fail at the earliest possible time after some\nprogram state is corrupted. If the assertion checks the wrong condition, then\nthe program may proceed in an erroneous state, which could lead to memory\ncorruption, security holes, or worse. Hence it is vitally important to test that\nsuch assertion statements work as expected.\n\nSince these precondition checks cause the processes to die, we call such tests\n_death tests_. More generally, any test that checks that a program terminates\n(except by throwing an exception) in an expected fashion is also a death test.\n\nNote that if a piece of code throws an exception, we don't consider it \"death\"\nfor the purpose of death tests, as the caller of the code could catch the\nexception and avoid the crash. If you want to verify exceptions thrown by your\ncode, see [Exception Assertions](#ExceptionAssertions).\n\nIf you want to test `EXPECT_*()/ASSERT_*()` failures in your test code, see\nCatching Failures\n\n### How to Write a Death Test\n\ngoogletest has the following macros to support death tests:\n\nFatal assertion                                  | Nonfatal assertion                               | Verifies\n------------------------------------------------ | ------------------------------------------------ | --------\n`ASSERT_DEATH(statement, matcher);`              | `EXPECT_DEATH(statement, matcher);`              | `statement` crashes with the given error\n`ASSERT_DEATH_IF_SUPPORTED(statement, matcher);` | `EXPECT_DEATH_IF_SUPPORTED(statement, matcher);` | if death tests are supported, verifies that `statement` crashes with the given error; otherwise verifies nothing\n`ASSERT_EXIT(statement, predicate, matcher);`    | `EXPECT_EXIT(statement, predicate, matcher);`    | `statement` exits with the given error, and its exit code matches `predicate`\n\nwhere `statement` is a statement that is expected to cause the process to die,\n`predicate` is a function or function object that evaluates an integer exit\nstatus, and `matcher` is either a gMock matcher matching a `const std::string&`\nor a (Perl) regular expression - either of which is matched against the stderr\noutput of `statement`. For legacy reasons, a bare string (i.e. with no matcher)\nis interpreted as `ContainsRegex(str)`, **not** `Eq(str)`. Note that `statement`\ncan be *any valid statement* (including *compound statement*) and doesn't have\nto be an expression.\n\nAs usual, the `ASSERT` variants abort the current test function, while the\n`EXPECT` variants do not.\n\n> NOTE: We use the word \"crash\" here to mean that the process terminates with a\n> *non-zero* exit status code. There are two possibilities: either the process\n> has called `exit()` or `_exit()` with a non-zero value, or it may be killed by\n> a signal.\n>\n> This means that if *`statement`* terminates the process with a 0 exit code, it\n> is *not* considered a crash by `EXPECT_DEATH`. Use `EXPECT_EXIT` instead if\n> this is the case, or if you want to restrict the exit code more precisely.\n\nA predicate here must accept an `int` and return a `bool`. The death test\nsucceeds only if the predicate returns `true`. googletest defines a few\npredicates that handle the most common cases:\n\n```c++\n::testing::ExitedWithCode(exit_code)\n```\n\nThis expression is `true` if the program exited normally with the given exit\ncode.\n\n```c++\n::testing::KilledBySignal(signal_number)  // Not available on Windows.\n```\n\nThis expression is `true` if the program was killed by the given signal.\n\nThe `*_DEATH` macros are convenient wrappers for `*_EXIT` that use a predicate\nthat verifies the process' exit code is non-zero.\n\nNote that a death test only cares about three things:\n\n1.  does `statement` abort or exit the process?\n2.  (in the case of `ASSERT_EXIT` and `EXPECT_EXIT`) does the exit status\n    satisfy `predicate`? Or (in the case of `ASSERT_DEATH` and `EXPECT_DEATH`)\n    is the exit status non-zero? And\n3.  does the stderr output match `matcher`?\n\nIn particular, if `statement` generates an `ASSERT_*` or `EXPECT_*` failure, it\nwill **not** cause the death test to fail, as googletest assertions don't abort\nthe process.\n\nTo write a death test, simply use one of the above macros inside your test\nfunction. For example,\n\n```c++\nTEST(MyDeathTest, Foo) {\n  // This death test uses a compound statement.\n  ASSERT_DEATH({\n    int n = 5;\n    Foo(&n);\n  }, \"Error on line .* of Foo()\");\n}\n\nTEST(MyDeathTest, NormalExit) {\n  EXPECT_EXIT(NormalExit(), ::testing::ExitedWithCode(0), \"Success\");\n}\n\nTEST(MyDeathTest, KillMyself) {\n  EXPECT_EXIT(KillMyself(), ::testing::KilledBySignal(SIGKILL),\n              \"Sending myself unblockable signal\");\n}\n```\n\nverifies that:\n\n*   calling `Foo(5)` causes the process to die with the given error message,\n*   calling `NormalExit()` causes the process to print `\"Success\"` to stderr and\n    exit with exit code 0, and\n*   calling `KillMyself()` kills the process with signal `SIGKILL`.\n\nThe test function body may contain other assertions and statements as well, if\nnecessary.\n\n### Death Test Naming\n\nIMPORTANT: We strongly recommend you to follow the convention of naming your\n**test suite** (not test) `*DeathTest` when it contains a death test, as\ndemonstrated in the above example. The\n[Death Tests And Threads](#death-tests-and-threads) section below explains why.\n\nIf a test fixture class is shared by normal tests and death tests, you can use\n`using` or `typedef` to introduce an alias for the fixture class and avoid\nduplicating its code:\n\n```c++\nclass FooTest : public ::testing::Test { ... };\n\nusing FooDeathTest = FooTest;\n\nTEST_F(FooTest, DoesThis) {\n  // normal test\n}\n\nTEST_F(FooDeathTest, DoesThat) {\n  // death test\n}\n```\n\n### Regular Expression Syntax\n\nOn POSIX systems (e.g. Linux, Cygwin, and Mac), googletest uses the\n[POSIX extended regular expression](http://www.opengroup.org/onlinepubs/009695399/basedefs/xbd_chap09.html#tag_09_04)\nsyntax. To learn about this syntax, you may want to read this\n[Wikipedia entry](http://en.wikipedia.org/wiki/Regular_expression#POSIX_Extended_Regular_Expressions).\n\nOn Windows, googletest uses its own simple regular expression implementation. It\nlacks many features. For example, we don't support union (`\"x|y\"`), grouping\n(`\"(xy)\"`), brackets (`\"[xy]\"`), and repetition count (`\"x{5,7}\"`), among\nothers. Below is what we do support (`A` denotes a literal character, period\n(`.`), or a single `\\\\ ` escape sequence; `x` and `y` denote regular\nexpressions.):\n\nExpression | Meaning\n---------- | --------------------------------------------------------------\n`c`        | matches any literal character `c`\n`\\\\d`      | matches any decimal digit\n`\\\\D`      | matches any character that's not a decimal digit\n`\\\\f`      | matches `\\f`\n`\\\\n`      | matches `\\n`\n`\\\\r`      | matches `\\r`\n`\\\\s`      | matches any ASCII whitespace, including `\\n`\n`\\\\S`      | matches any character that's not a whitespace\n`\\\\t`      | matches `\\t`\n`\\\\v`      | matches `\\v`\n`\\\\w`      | matches any letter, `_`, or decimal digit\n`\\\\W`      | matches any character that `\\\\w` doesn't match\n`\\\\c`      | matches any literal character `c`, which must be a punctuation\n`.`        | matches any single character except `\\n`\n`A?`       | matches 0 or 1 occurrences of `A`\n`A*`       | matches 0 or many occurrences of `A`\n`A+`       | matches 1 or many occurrences of `A`\n`^`        | matches the beginning of a string (not that of each line)\n`$`        | matches the end of a string (not that of each line)\n`xy`       | matches `x` followed by `y`\n\nTo help you determine which capability is available on your system, googletest\ndefines macros to govern which regular expression it is using. The macros are:\n`GTEST_USES_SIMPLE_RE=1` or `GTEST_USES_POSIX_RE=1`. If you want your death\ntests to work in all cases, you can either `#if` on these macros or use the more\nlimited syntax only.\n\n### How It Works\n\nUnder the hood, `ASSERT_EXIT()` spawns a new process and executes the death test\nstatement in that process. The details of how precisely that happens depend on\nthe platform and the variable ::testing::GTEST_FLAG(death_test_style) (which is\ninitialized from the command-line flag `--gtest_death_test_style`).\n\n*   On POSIX systems, `fork()` (or `clone()` on Linux) is used to spawn the\n    child, after which:\n    *   If the variable's value is `\"fast\"`, the death test statement is\n        immediately executed.\n    *   If the variable's value is `\"threadsafe\"`, the child process re-executes\n        the unit test binary just as it was originally invoked, but with some\n        extra flags to cause just the single death test under consideration to\n        be run.\n*   On Windows, the child is spawned using the `CreateProcess()` API, and\n    re-executes the binary to cause just the single death test under\n    consideration to be run - much like the `threadsafe` mode on POSIX.\n\nOther values for the variable are illegal and will cause the death test to fail.\nCurrently, the flag's default value is **\"fast\"**\n\n1.  the child's exit status satisfies the predicate, and\n2.  the child's stderr matches the regular expression.\n\nIf the death test statement runs to completion without dying, the child process\nwill nonetheless terminate, and the assertion fails.\n\n### Death Tests And Threads\n\nThe reason for the two death test styles has to do with thread safety. Due to\nwell-known problems with forking in the presence of threads, death tests should\nbe run in a single-threaded context. Sometimes, however, it isn't feasible to\narrange that kind of environment. For example, statically-initialized modules\nmay start threads before main is ever reached. Once threads have been created,\nit may be difficult or impossible to clean them up.\n\ngoogletest has three features intended to raise awareness of threading issues.\n\n1.  A warning is emitted if multiple threads are running when a death test is\n    encountered.\n2.  Test suites with a name ending in \"DeathTest\" are run before all other\n    tests.\n3.  It uses `clone()` instead of `fork()` to spawn the child process on Linux\n    (`clone()` is not available on Cygwin and Mac), as `fork()` is more likely\n    to cause the child to hang when the parent process has multiple threads.\n\nIt's perfectly fine to create threads inside a death test statement; they are\nexecuted in a separate process and cannot affect the parent.\n\n### Death Test Styles\n\nThe \"threadsafe\" death test style was introduced in order to help mitigate the\nrisks of testing in a possibly multithreaded environment. It trades increased\ntest execution time (potentially dramatically so) for improved thread safety.\n\nThe automated testing framework does not set the style flag. You can choose a\nparticular style of death tests by setting the flag programmatically:\n\n```c++\ntesting::FLAGS_gtest_death_test_style=\"threadsafe\"\n```\n\nYou can do this in `main()` to set the style for all death tests in the binary,\nor in individual tests. Recall that flags are saved before running each test and\nrestored afterwards, so you need not do that yourself. For example:\n\n```c++\nint main(int argc, char** argv) {\n  InitGoogle(argv[0], &argc, &argv, true);\n  ::testing::FLAGS_gtest_death_test_style = \"fast\";\n  return RUN_ALL_TESTS();\n}\n\nTEST(MyDeathTest, TestOne) {\n  ::testing::FLAGS_gtest_death_test_style = \"threadsafe\";\n  // This test is run in the \"threadsafe\" style:\n  ASSERT_DEATH(ThisShouldDie(), \"\");\n}\n\nTEST(MyDeathTest, TestTwo) {\n  // This test is run in the \"fast\" style:\n  ASSERT_DEATH(ThisShouldDie(), \"\");\n}\n```\n\n### Caveats\n\nThe `statement` argument of `ASSERT_EXIT()` can be any valid C++ statement. If\nit leaves the current function via a `return` statement or by throwing an\nexception, the death test is considered to have failed. Some googletest macros\nmay return from the current function (e.g. `ASSERT_TRUE()`), so be sure to avoid\nthem in `statement`.\n\nSince `statement` runs in the child process, any in-memory side effect (e.g.\nmodifying a variable, releasing memory, etc) it causes will *not* be observable\nin the parent process. In particular, if you release memory in a death test,\nyour program will fail the heap check as the parent process will never see the\nmemory reclaimed. To solve this problem, you can\n\n1.  try not to free memory in a death test;\n2.  free the memory again in the parent process; or\n3.  do not use the heap checker in your program.\n\nDue to an implementation detail, you cannot place multiple death test assertions\non the same line; otherwise, compilation will fail with an unobvious error\nmessage.\n\nDespite the improved thread safety afforded by the \"threadsafe\" style of death\ntest, thread problems such as deadlock are still possible in the presence of\nhandlers registered with `pthread_atfork(3)`.\n\n\n## Using Assertions in Sub-routines\n\n### Adding Traces to Assertions\n\nIf a test sub-routine is called from several places, when an assertion inside it\nfails, it can be hard to tell which invocation of the sub-routine the failure is\nfrom. You can alleviate this problem using extra logging or custom failure\nmessages, but that usually clutters up your tests. A better solution is to use\nthe `SCOPED_TRACE` macro or the `ScopedTrace` utility:\n\n```c++\nSCOPED_TRACE(message);\n```\n```c++\nScopedTrace trace(\"file_path\", line_number, message);\n```\n\nwhere `message` can be anything streamable to `std::ostream`. `SCOPED_TRACE`\nmacro will cause the current file name, line number, and the given message to be\nadded in every failure message. `ScopedTrace` accepts explicit file name and\nline number in arguments, which is useful for writing test helpers. The effect\nwill be undone when the control leaves the current lexical scope.\n\nFor example,\n\n```c++\n10: void Sub1(int n) {\n11:   EXPECT_EQ(Bar(n), 1);\n12:   EXPECT_EQ(Bar(n + 1), 2);\n13: }\n14:\n15: TEST(FooTest, Bar) {\n16:   {\n17:     SCOPED_TRACE(\"A\");  // This trace point will be included in\n18:                         // every failure in this scope.\n19:     Sub1(1);\n20:   }\n21:   // Now it won't.\n22:   Sub1(9);\n23: }\n```\n\ncould result in messages like these:\n\n```none\npath/to/foo_test.cc:11: Failure\nValue of: Bar(n)\nExpected: 1\n  Actual: 2\n   Trace:\npath/to/foo_test.cc:17: A\n\npath/to/foo_test.cc:12: Failure\nValue of: Bar(n + 1)\nExpected: 2\n  Actual: 3\n```\n\nWithout the trace, it would've been difficult to know which invocation of\n`Sub1()` the two failures come from respectively. (You could add an extra\nmessage to each assertion in `Sub1()` to indicate the value of `n`, but that's\ntedious.)\n\nSome tips on using `SCOPED_TRACE`:\n\n1.  With a suitable message, it's often enough to use `SCOPED_TRACE` at the\n    beginning of a sub-routine, instead of at each call site.\n2.  When calling sub-routines inside a loop, make the loop iterator part of the\n    message in `SCOPED_TRACE` such that you can know which iteration the failure\n    is from.\n3.  Sometimes the line number of the trace point is enough for identifying the\n    particular invocation of a sub-routine. In this case, you don't have to\n    choose a unique message for `SCOPED_TRACE`. You can simply use `\"\"`.\n4.  You can use `SCOPED_TRACE` in an inner scope when there is one in the outer\n    scope. In this case, all active trace points will be included in the failure\n    messages, in reverse order they are encountered.\n5.  The trace dump is clickable in Emacs - hit `return` on a line number and\n    you'll be taken to that line in the source file!\n\n### Propagating Fatal Failures\n\nA common pitfall when using `ASSERT_*` and `FAIL*` is not understanding that\nwhen they fail they only abort the _current function_, not the entire test. For\nexample, the following test will segfault:\n\n```c++\nvoid Subroutine() {\n  // Generates a fatal failure and aborts the current function.\n  ASSERT_EQ(1, 2);\n\n  // The following won't be executed.\n  ...\n}\n\nTEST(FooTest, Bar) {\n  Subroutine();  // The intended behavior is for the fatal failure\n                 // in Subroutine() to abort the entire test.\n\n  // The actual behavior: the function goes on after Subroutine() returns.\n  int* p = NULL;\n  *p = 3;  // Segfault!\n}\n```\n\nTo alleviate this, googletest provides three different solutions. You could use\neither exceptions, the `(ASSERT|EXPECT)_NO_FATAL_FAILURE` assertions or the\n`HasFatalFailure()` function. They are described in the following two\nsubsections.\n\n#### Asserting on Subroutines with an exception\n\nThe following code can turn ASSERT-failure into an exception:\n\n```c++\nclass ThrowListener : public testing::EmptyTestEventListener {\n  void OnTestPartResult(const testing::TestPartResult& result) override {\n    if (result.type() == testing::TestPartResult::kFatalFailure) {\n      throw testing::AssertionException(result);\n    }\n  }\n};\nint main(int argc, char** argv) {\n  ...\n  testing::UnitTest::GetInstance()->listeners().Append(new ThrowListener);\n  return RUN_ALL_TESTS();\n}\n```\n\nThis listener should be added after other listeners if you have any, otherwise\nthey won't see failed `OnTestPartResult`.\n\n#### Asserting on Subroutines\n\nAs shown above, if your test calls a subroutine that has an `ASSERT_*` failure\nin it, the test will continue after the subroutine returns. This may not be what\nyou want.\n\nOften people want fatal failures to propagate like exceptions. For that\ngoogletest offers the following macros:\n\nFatal assertion                       | Nonfatal assertion                    | Verifies\n------------------------------------- | ------------------------------------- | --------\n`ASSERT_NO_FATAL_FAILURE(statement);` | `EXPECT_NO_FATAL_FAILURE(statement);` | `statement` doesn't generate any new fatal failures in the current thread.\n\nOnly failures in the thread that executes the assertion are checked to determine\nthe result of this type of assertions. If `statement` creates new threads,\nfailures in these threads are ignored.\n\nExamples:\n\n```c++\nASSERT_NO_FATAL_FAILURE(Foo());\n\nint i;\nEXPECT_NO_FATAL_FAILURE({\n  i = Bar();\n});\n```\n\nAssertions from multiple threads are currently not supported on Windows.\n\n#### Checking for Failures in the Current Test\n\n`HasFatalFailure()` in the `::testing::Test` class returns `true` if an\nassertion in the current test has suffered a fatal failure. This allows\nfunctions to catch fatal failures in a sub-routine and return early.\n\n```c++\nclass Test {\n public:\n  ...\n  static bool HasFatalFailure();\n};\n```\n\nThe typical usage, which basically simulates the behavior of a thrown exception,\nis:\n\n```c++\nTEST(FooTest, Bar) {\n  Subroutine();\n  // Aborts if Subroutine() had a fatal failure.\n  if (HasFatalFailure()) return;\n\n  // The following won't be executed.\n  ...\n}\n```\n\nIf `HasFatalFailure()` is used outside of `TEST()` , `TEST_F()` , or a test\nfixture, you must add the `::testing::Test::` prefix, as in:\n\n```c++\nif (::testing::Test::HasFatalFailure()) return;\n```\n\nSimilarly, `HasNonfatalFailure()` returns `true` if the current test has at\nleast one non-fatal failure, and `HasFailure()` returns `true` if the current\ntest has at least one failure of either kind.\n\n## Logging Additional Information\n\nIn your test code, you can call `RecordProperty(\"key\", value)` to log additional\ninformation, where `value` can be either a string or an `int`. The *last* value\nrecorded for a key will be emitted to the\n[XML output](#generating-an-xml-report) if you specify one. For example, the\ntest\n\n```c++\nTEST_F(WidgetUsageTest, MinAndMaxWidgets) {\n  RecordProperty(\"MaximumWidgets\", ComputeMaxUsage());\n  RecordProperty(\"MinimumWidgets\", ComputeMinUsage());\n}\n```\n\nwill output XML like this:\n\n```xml\n  ...\n    <testcase name=\"MinAndMaxWidgets\" status=\"run\" time=\"0.006\" classname=\"WidgetUsageTest\" MaximumWidgets=\"12\" MinimumWidgets=\"9\" />\n  ...\n```\n\n> NOTE:\n>\n> *   `RecordProperty()` is a static member of the `Test` class. Therefore it\n>     needs to be prefixed with `::testing::Test::` if used outside of the\n>     `TEST` body and the test fixture class.\n> *   *`key`* must be a valid XML attribute name, and cannot conflict with the\n>     ones already used by googletest (`name`, `status`, `time`, `classname`,\n>     `type_param`, and `value_param`).\n> *   Calling `RecordProperty()` outside of the lifespan of a test is allowed.\n>     If it's called outside of a test but between a test suite's\n>     `SetUpTestSuite()` and `TearDownTestSuite()` methods, it will be\n>     attributed to the XML element for the test suite. If it's called outside\n>     of all test suites (e.g. in a test environment), it will be attributed to\n>     the top-level XML element.\n\n## Sharing Resources Between Tests in the Same Test Suite\n\ngoogletest creates a new test fixture object for each test in order to make\ntests independent and easier to debug. However, sometimes tests use resources\nthat are expensive to set up, making the one-copy-per-test model prohibitively\nexpensive.\n\nIf the tests don't change the resource, there's no harm in their sharing a\nsingle resource copy. So, in addition to per-test set-up/tear-down, googletest\nalso supports per-test-suite set-up/tear-down. To use it:\n\n1.  In your test fixture class (say `FooTest` ), declare as `static` some member\n    variables to hold the shared resources.\n2.  Outside your test fixture class (typically just below it), define those\n    member variables, optionally giving them initial values.\n3.  In the same test fixture class, define a `static void SetUpTestSuite()`\n    function (remember not to spell it as **`SetupTestSuite`** with a small\n    `u`!) to set up the shared resources and a `static void TearDownTestSuite()`\n    function to tear them down.\n\nThat's it! googletest automatically calls `SetUpTestSuite()` before running the\n*first test* in the `FooTest` test suite (i.e. before creating the first\n`FooTest` object), and calls `TearDownTestSuite()` after running the *last test*\nin it (i.e. after deleting the last `FooTest` object). In between, the tests can\nuse the shared resources.\n\nRemember that the test order is undefined, so your code can't depend on a test\npreceding or following another. Also, the tests must either not modify the state\nof any shared resource, or, if they do modify the state, they must restore the\nstate to its original value before passing control to the next test.\n\nHere's an example of per-test-suite set-up and tear-down:\n\n```c++\nclass FooTest : public ::testing::Test {\n protected:\n  // Per-test-suite set-up.\n  // Called before the first test in this test suite.\n  // Can be omitted if not needed.\n  static void SetUpTestSuite() {\n    shared_resource_ = new ...;\n  }\n\n  // Per-test-suite tear-down.\n  // Called after the last test in this test suite.\n  // Can be omitted if not needed.\n  static void TearDownTestSuite() {\n    delete shared_resource_;\n    shared_resource_ = NULL;\n  }\n\n  // You can define per-test set-up logic as usual.\n  virtual void SetUp() { ... }\n\n  // You can define per-test tear-down logic as usual.\n  virtual void TearDown() { ... }\n\n  // Some expensive resource shared by all tests.\n  static T* shared_resource_;\n};\n\nT* FooTest::shared_resource_ = NULL;\n\nTEST_F(FooTest, Test1) {\n  ... you can refer to shared_resource_ here ...\n}\n\nTEST_F(FooTest, Test2) {\n  ... you can refer to shared_resource_ here ...\n}\n```\n\nNOTE: Though the above code declares `SetUpTestSuite()` protected, it may\nsometimes be necessary to declare it public, such as when using it with\n`TEST_P`.\n\n## Global Set-Up and Tear-Down\n\nJust as you can do set-up and tear-down at the test level and the test suite\nlevel, you can also do it at the test program level. Here's how.\n\nFirst, you subclass the `::testing::Environment` class to define a test\nenvironment, which knows how to set-up and tear-down:\n\n```c++\nclass Environment : public ::testing::Environment {\n public:\n  virtual ~Environment() {}\n\n  // Override this to define how to set up the environment.\n  void SetUp() override {}\n\n  // Override this to define how to tear down the environment.\n  void TearDown() override {}\n};\n```\n\nThen, you register an instance of your environment class with googletest by\ncalling the `::testing::AddGlobalTestEnvironment()` function:\n\n```c++\nEnvironment* AddGlobalTestEnvironment(Environment* env);\n```\n\nNow, when `RUN_ALL_TESTS()` is called, it first calls the `SetUp()` method of\neach environment object, then runs the tests if none of the environments\nreported fatal failures and `GTEST_SKIP()` was not called. `RUN_ALL_TESTS()`\nalways calls `TearDown()` with each environment object, regardless of whether or\nnot the tests were run.\n\nIt's OK to register multiple environment objects. In this suite, their `SetUp()`\nwill be called in the order they are registered, and their `TearDown()` will be\ncalled in the reverse order.\n\nNote that googletest takes ownership of the registered environment objects.\nTherefore **do not delete them** by yourself.\n\nYou should call `AddGlobalTestEnvironment()` before `RUN_ALL_TESTS()` is called,\nprobably in `main()`. If you use `gtest_main`, you need to call this before\n`main()` starts for it to take effect. One way to do this is to define a global\nvariable like this:\n\n```c++\n::testing::Environment* const foo_env =\n    ::testing::AddGlobalTestEnvironment(new FooEnvironment);\n```\n\nHowever, we strongly recommend you to write your own `main()` and call\n`AddGlobalTestEnvironment()` there, as relying on initialization of global\nvariables makes the code harder to read and may cause problems when you register\nmultiple environments from different translation units and the environments have\ndependencies among them (remember that the compiler doesn't guarantee the order\nin which global variables from different translation units are initialized).\n\n## Value-Parameterized Tests\n\n*Value-parameterized tests* allow you to test your code with different\nparameters without writing multiple copies of the same test. This is useful in a\nnumber of situations, for example:\n\n*   You have a piece of code whose behavior is affected by one or more\n    command-line flags. You want to make sure your code performs correctly for\n    various values of those flags.\n*   You want to test different implementations of an OO interface.\n*   You want to test your code over various inputs (a.k.a. data-driven testing).\n    This feature is easy to abuse, so please exercise your good sense when doing\n    it!\n\n### How to Write Value-Parameterized Tests\n\nTo write value-parameterized tests, first you should define a fixture class. It\nmust be derived from both `testing::Test` and `testing::WithParamInterface<T>`\n(the latter is a pure interface), where `T` is the type of your parameter\nvalues. For convenience, you can just derive the fixture class from\n`testing::TestWithParam<T>`, which itself is derived from both `testing::Test`\nand `testing::WithParamInterface<T>`. `T` can be any copyable type. If it's a\nraw pointer, you are responsible for managing the lifespan of the pointed\nvalues.\n\nNOTE: If your test fixture defines `SetUpTestSuite()` or `TearDownTestSuite()`\nthey must be declared **public** rather than **protected** in order to use\n`TEST_P`.\n\n```c++\nclass FooTest :\n    public testing::TestWithParam<const char*> {\n  // You can implement all the usual fixture class members here.\n  // To access the test parameter, call GetParam() from class\n  // TestWithParam<T>.\n};\n\n// Or, when you want to add parameters to a pre-existing fixture class:\nclass BaseTest : public testing::Test {\n  ...\n};\nclass BarTest : public BaseTest,\n                public testing::WithParamInterface<const char*> {\n  ...\n};\n```\n\nThen, use the `TEST_P` macro to define as many test patterns using this fixture\nas you want. The `_P` suffix is for \"parameterized\" or \"pattern\", whichever you\nprefer to think.\n\n```c++\nTEST_P(FooTest, DoesBlah) {\n  // Inside a test, access the test parameter with the GetParam() method\n  // of the TestWithParam<T> class:\n  EXPECT_TRUE(foo.Blah(GetParam()));\n  ...\n}\n\nTEST_P(FooTest, HasBlahBlah) {\n  ...\n}\n```\n\nFinally, you can use `INSTANTIATE_TEST_SUITE_P` to instantiate the test suite\nwith any set of parameters you want. googletest defines a number of functions\nfor generating test parameters. They return what we call (surprise!) *parameter\ngenerators*. Here is a summary of them, which are all in the `testing`\nnamespace:\n\n<!-- mdformat off(github rendering does not support multiline tables) -->\n\n| Parameter Generator                                                                       | Behavior                                                                                                          |\n| ----------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------- |\n| `Range(begin, end [, step])`                                                              | Yields values `{begin, begin+step, begin+step+step, ...}`. The values do not include `end`. `step` defaults to 1. |\n| `Values(v1, v2, ..., vN)`                                                                 | Yields values `{v1, v2, ..., vN}`.                                                                                |\n| `ValuesIn(container)` and  `ValuesIn(begin,end)`                                          | Yields values from a C-style array, an  STL-style container, or an iterator range `[begin, end)`                  |\n| `Bool()`                                                                                  | Yields sequence `{false, true}`.                                                                                  |\n| `Combine(g1, g2, ..., gN)`                                                                | Yields all combinations (Cartesian product) as std\\:\\:tuples of the values generated by the `N` generators.       |\n\n<!-- mdformat on-->\n\nFor more details, see the comments at the definitions of these functions.\n\nThe following statement will instantiate tests from the `FooTest` test suite\neach with parameter values `\"meeny\"`, `\"miny\"`, and `\"moe\"`.\n\n```c++\nINSTANTIATE_TEST_SUITE_P(InstantiationName,\n                         FooTest,\n                         testing::Values(\"meeny\", \"miny\", \"moe\"));\n```\n\nNOTE: The code above must be placed at global or namespace scope, not at\nfunction scope.\n\nNOTE: Don't forget this step! If you do your test will silently pass, but none\nof its suites will ever run!\n\nTo distinguish different instances of the pattern (yes, you can instantiate it\nmore than once), the first argument to `INSTANTIATE_TEST_SUITE_P` is a prefix\nthat will be added to the actual test suite name. Remember to pick unique\nprefixes for different instantiations. The tests from the instantiation above\nwill have these names:\n\n*   `InstantiationName/FooTest.DoesBlah/0` for `\"meeny\"`\n*   `InstantiationName/FooTest.DoesBlah/1` for `\"miny\"`\n*   `InstantiationName/FooTest.DoesBlah/2` for `\"moe\"`\n*   `InstantiationName/FooTest.HasBlahBlah/0` for `\"meeny\"`\n*   `InstantiationName/FooTest.HasBlahBlah/1` for `\"miny\"`\n*   `InstantiationName/FooTest.HasBlahBlah/2` for `\"moe\"`\n\nYou can use these names in [`--gtest_filter`](#running-a-subset-of-the-tests).\n\nThis statement will instantiate all tests from `FooTest` again, each with\nparameter values `\"cat\"` and `\"dog\"`:\n\n```c++\nconst char* pets[] = {\"cat\", \"dog\"};\nINSTANTIATE_TEST_SUITE_P(AnotherInstantiationName, FooTest,\n                         testing::ValuesIn(pets));\n```\n\nThe tests from the instantiation above will have these names:\n\n*   `AnotherInstantiationName/FooTest.DoesBlah/0` for `\"cat\"`\n*   `AnotherInstantiationName/FooTest.DoesBlah/1` for `\"dog\"`\n*   `AnotherInstantiationName/FooTest.HasBlahBlah/0` for `\"cat\"`\n*   `AnotherInstantiationName/FooTest.HasBlahBlah/1` for `\"dog\"`\n\nPlease note that `INSTANTIATE_TEST_SUITE_P` will instantiate *all* tests in the\ngiven test suite, whether their definitions come before or *after* the\n`INSTANTIATE_TEST_SUITE_P` statement.\n\nYou can see [sample7_unittest.cc] and [sample8_unittest.cc] for more examples.\n\n[sample7_unittest.cc]: ../samples/sample7_unittest.cc \"Parameterized Test example\"\n[sample8_unittest.cc]: ../samples/sample8_unittest.cc \"Parameterized Test example with multiple parameters\"\n\n### Creating Value-Parameterized Abstract Tests\n\nIn the above, we define and instantiate `FooTest` in the *same* source file.\nSometimes you may want to define value-parameterized tests in a library and let\nother people instantiate them later. This pattern is known as *abstract tests*.\nAs an example of its application, when you are designing an interface you can\nwrite a standard suite of abstract tests (perhaps using a factory function as\nthe test parameter) that all implementations of the interface are expected to\npass. When someone implements the interface, they can instantiate your suite to\nget all the interface-conformance tests for free.\n\nTo define abstract tests, you should organize your code like this:\n\n1.  Put the definition of the parameterized test fixture class (e.g. `FooTest`)\n    in a header file, say `foo_param_test.h`. Think of this as *declaring* your\n    abstract tests.\n2.  Put the `TEST_P` definitions in `foo_param_test.cc`, which includes\n    `foo_param_test.h`. Think of this as *implementing* your abstract tests.\n\nOnce they are defined, you can instantiate them by including `foo_param_test.h`,\ninvoking `INSTANTIATE_TEST_SUITE_P()`, and depending on the library target that\ncontains `foo_param_test.cc`. You can instantiate the same abstract test suite\nmultiple times, possibly in different source files.\n\n### Specifying Names for Value-Parameterized Test Parameters\n\nThe optional last argument to `INSTANTIATE_TEST_SUITE_P()` allows the user to\nspecify a function or functor that generates custom test name suffixes based on\nthe test parameters. The function should accept one argument of type\n`testing::TestParamInfo<class ParamType>`, and return `std::string`.\n\n`testing::PrintToStringParamName` is a builtin test suffix generator that\nreturns the value of `testing::PrintToString(GetParam())`. It does not work for\n`std::string` or C strings.\n\nNOTE: test names must be non-empty, unique, and may only contain ASCII\nalphanumeric characters. In particular, they\n[should not contain underscores](faq.md#why-should-test-suite-names-and-test-names-not-contain-underscore)\n\n```c++\nclass MyTestSuite : public testing::TestWithParam<int> {};\n\nTEST_P(MyTestSuite, MyTest)\n{\n  std::cout << \"Example Test Param: \" << GetParam() << std::endl;\n}\n\nINSTANTIATE_TEST_SUITE_P(MyGroup, MyTestSuite, testing::Range(0, 10),\n                         testing::PrintToStringParamName());\n```\n\nProviding a custom functor allows for more control over test parameter name\ngeneration, especially for types where the automatic conversion does not\ngenerate helpful parameter names (e.g. strings as demonstrated above). The\nfollowing example illustrates this for multiple parameters, an enumeration type\nand a string, and also demonstrates how to combine generators. It uses a lambda\nfor conciseness:\n\n```c++\nenum class MyType { MY_FOO = 0, MY_BAR = 1 };\n\nclass MyTestSuite : public testing::TestWithParam<std::tuple<MyType, string>> {\n};\n\nINSTANTIATE_TEST_SUITE_P(\n    MyGroup, MyTestSuite,\n    testing::Combine(\n        testing::Values(MyType::VALUE_0, MyType::VALUE_1),\n        testing::ValuesIn(\"\", \"\")),\n    [](const testing::TestParamInfo<MyTestSuite::ParamType>& info) {\n      string name = absl::StrCat(\n          std::get<0>(info.param) == MY_FOO ? \"Foo\" : \"Bar\", \"_\",\n          std::get<1>(info.param));\n      absl::c_replace_if(name, [](char c) { return !std::isalnum(c); }, '_');\n      return name;\n    });\n```\n\n## Typed Tests\n\nSuppose you have multiple implementations of the same interface and want to make\nsure that all of them satisfy some common requirements. Or, you may have defined\nseveral types that are supposed to conform to the same \"concept\" and you want to\nverify it. In both cases, you want the same test logic repeated for different\ntypes.\n\nWhile you can write one `TEST` or `TEST_F` for each type you want to test (and\nyou may even factor the test logic into a function template that you invoke from\nthe `TEST`), it's tedious and doesn't scale: if you want `m` tests over `n`\ntypes, you'll end up writing `m*n` `TEST`s.\n\n*Typed tests* allow you to repeat the same test logic over a list of types. You\nonly need to write the test logic once, although you must know the type list\nwhen writing typed tests. Here's how you do it:\n\nFirst, define a fixture class template. It should be parameterized by a type.\nRemember to derive it from `::testing::Test`:\n\n```c++\ntemplate <typename T>\nclass FooTest : public ::testing::Test {\n public:\n  ...\n  typedef std::list<T> List;\n  static T shared_;\n  T value_;\n};\n```\n\nNext, associate a list of types with the test suite, which will be repeated for\neach type in the list:\n\n```c++\nusing MyTypes = ::testing::Types<char, int, unsigned int>;\nTYPED_TEST_SUITE(FooTest, MyTypes);\n```\n\nThe type alias (`using` or `typedef`) is necessary for the `TYPED_TEST_SUITE`\nmacro to parse correctly. Otherwise the compiler will think that each comma in\nthe type list introduces a new macro argument.\n\nThen, use `TYPED_TEST()` instead of `TEST_F()` to define a typed test for this\ntest suite. You can repeat this as many times as you want:\n\n```c++\nTYPED_TEST(FooTest, DoesBlah) {\n  // Inside a test, refer to the special name TypeParam to get the type\n  // parameter.  Since we are inside a derived class template, C++ requires\n  // us to visit the members of FooTest via 'this'.\n  TypeParam n = this->value_;\n\n  // To visit static members of the fixture, add the 'TestFixture::'\n  // prefix.\n  n += TestFixture::shared_;\n\n  // To refer to typedefs in the fixture, add the 'typename TestFixture::'\n  // prefix.  The 'typename' is required to satisfy the compiler.\n  typename TestFixture::List values;\n\n  values.push_back(n);\n  ...\n}\n\nTYPED_TEST(FooTest, HasPropertyA) { ... }\n```\n\nYou can see [sample6_unittest.cc] for a complete example.\n\n[sample6_unittest.cc]: ../samples/sample6_unittest.cc \"Typed Test example\"\n\n## Type-Parameterized Tests\n\n*Type-parameterized tests* are like typed tests, except that they don't require\nyou to know the list of types ahead of time. Instead, you can define the test\nlogic first and instantiate it with different type lists later. You can even\ninstantiate it more than once in the same program.\n\nIf you are designing an interface or concept, you can define a suite of\ntype-parameterized tests to verify properties that any valid implementation of\nthe interface/concept should have. Then, the author of each implementation can\njust instantiate the test suite with their type to verify that it conforms to\nthe requirements, without having to write similar tests repeatedly. Here's an\nexample:\n\nFirst, define a fixture class template, as we did with typed tests:\n\n```c++\ntemplate <typename T>\nclass FooTest : public ::testing::Test {\n  ...\n};\n```\n\nNext, declare that you will define a type-parameterized test suite:\n\n```c++\nTYPED_TEST_SUITE_P(FooTest);\n```\n\nThen, use `TYPED_TEST_P()` to define a type-parameterized test. You can repeat\nthis as many times as you want:\n\n```c++\nTYPED_TEST_P(FooTest, DoesBlah) {\n  // Inside a test, refer to TypeParam to get the type parameter.\n  TypeParam n = 0;\n  ...\n}\n\nTYPED_TEST_P(FooTest, HasPropertyA) { ... }\n```\n\nNow the tricky part: you need to register all test patterns using the\n`REGISTER_TYPED_TEST_SUITE_P` macro before you can instantiate them. The first\nargument of the macro is the test suite name; the rest are the names of the\ntests in this test suite:\n\n```c++\nREGISTER_TYPED_TEST_SUITE_P(FooTest,\n                            DoesBlah, HasPropertyA);\n```\n\nFinally, you are free to instantiate the pattern with the types you want. If you\nput the above code in a header file, you can `#include` it in multiple C++\nsource files and instantiate it multiple times.\n\n```c++\ntypedef ::testing::Types<char, int, unsigned int> MyTypes;\nINSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, MyTypes);\n```\n\nTo distinguish different instances of the pattern, the first argument to the\n`INSTANTIATE_TYPED_TEST_SUITE_P` macro is a prefix that will be added to the\nactual test suite name. Remember to pick unique prefixes for different\ninstances.\n\nIn the special case where the type list contains only one type, you can write\nthat type directly without `::testing::Types<...>`, like this:\n\n```c++\nINSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, int);\n```\n\nYou can see [sample6_unittest.cc] for a complete example.\n\n## Testing Private Code\n\nIf you change your software's internal implementation, your tests should not\nbreak as long as the change is not observable by users. Therefore, **per the\nblack-box testing principle, most of the time you should test your code through\nits public interfaces.**\n\n**If you still find yourself needing to test internal implementation code,\nconsider if there's a better design.** The desire to test internal\nimplementation is often a sign that the class is doing too much. Consider\nextracting an implementation class, and testing it. Then use that implementation\nclass in the original class.\n\nIf you absolutely have to test non-public interface code though, you can. There\nare two cases to consider:\n\n*   Static functions ( *not* the same as static member functions!) or unnamed\n    namespaces, and\n*   Private or protected class members\n\nTo test them, we use the following special techniques:\n\n*   Both static functions and definitions/declarations in an unnamed namespace\n    are only visible within the same translation unit. To test them, you can\n    `#include` the entire `.cc` file being tested in your `*_test.cc` file.\n    (#including `.cc` files is not a good way to reuse code - you should not do\n    this in production code!)\n\n    However, a better approach is to move the private code into the\n    `foo::internal` namespace, where `foo` is the namespace your project\n    normally uses, and put the private declarations in a `*-internal.h` file.\n    Your production `.cc` files and your tests are allowed to include this\n    internal header, but your clients are not. This way, you can fully test your\n    internal implementation without leaking it to your clients.\n\n*   Private class members are only accessible from within the class or by\n    friends. To access a class' private members, you can declare your test\n    fixture as a friend to the class and define accessors in your fixture. Tests\n    using the fixture can then access the private members of your production\n    class via the accessors in the fixture. Note that even though your fixture\n    is a friend to your production class, your tests are not automatically\n    friends to it, as they are technically defined in sub-classes of the\n    fixture.\n\n    Another way to test private members is to refactor them into an\n    implementation class, which is then declared in a `*-internal.h` file. Your\n    clients aren't allowed to include this header but your tests can. Such is\n    called the\n    [Pimpl](https://www.gamedev.net/articles/programming/general-and-gameplay-programming/the-c-pimpl-r1794/)\n    (Private Implementation) idiom.\n\n    Or, you can declare an individual test as a friend of your class by adding\n    this line in the class body:\n\n    ```c++\n        FRIEND_TEST(TestSuiteName, TestName);\n    ```\n\n    For example,\n\n    ```c++\n    // foo.h\n    class Foo {\n      ...\n     private:\n      FRIEND_TEST(FooTest, BarReturnsZeroOnNull);\n\n      int Bar(void* x);\n    };\n\n    // foo_test.cc\n    ...\n    TEST(FooTest, BarReturnsZeroOnNull) {\n      Foo foo;\n      EXPECT_EQ(foo.Bar(NULL), 0);  // Uses Foo's private member Bar().\n    }\n    ```\n\n    Pay special attention when your class is defined in a namespace, as you\n    should define your test fixtures and tests in the same namespace if you want\n    them to be friends of your class. For example, if the code to be tested\n    looks like:\n\n    ```c++\n    namespace my_namespace {\n\n    class Foo {\n      friend class FooTest;\n      FRIEND_TEST(FooTest, Bar);\n      FRIEND_TEST(FooTest, Baz);\n      ... definition of the class Foo ...\n    };\n\n    }  // namespace my_namespace\n    ```\n\n    Your test code should be something like:\n\n    ```c++\n    namespace my_namespace {\n\n    class FooTest : public ::testing::Test {\n     protected:\n      ...\n    };\n\n    TEST_F(FooTest, Bar) { ... }\n    TEST_F(FooTest, Baz) { ... }\n\n    }  // namespace my_namespace\n    ```\n\n## \"Catching\" Failures\n\nIf you are building a testing utility on top of googletest, you'll want to test\nyour utility. What framework would you use to test it? googletest, of course.\n\nThe challenge is to verify that your testing utility reports failures correctly.\nIn frameworks that report a failure by throwing an exception, you could catch\nthe exception and assert on it. But googletest doesn't use exceptions, so how do\nwe test that a piece of code generates an expected failure?\n\ngunit-spi.h contains some constructs to do this. After #including this header,\nyou can use\n\n```c++\n  EXPECT_FATAL_FAILURE(statement, substring);\n```\n\nto assert that `statement` generates a fatal (e.g. `ASSERT_*`) failure in the\ncurrent thread whose message contains the given `substring`, or use\n\n```c++\n  EXPECT_NONFATAL_FAILURE(statement, substring);\n```\n\nif you are expecting a non-fatal (e.g. `EXPECT_*`) failure.\n\nOnly failures in the current thread are checked to determine the result of this\ntype of expectations. If `statement` creates new threads, failures in these\nthreads are also ignored. If you want to catch failures in other threads as\nwell, use one of the following macros instead:\n\n```c++\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(statement, substring);\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(statement, substring);\n```\n\nNOTE: Assertions from multiple threads are currently not supported on Windows.\n\nFor technical reasons, there are some caveats:\n\n1.  You cannot stream a failure message to either macro.\n\n2.  `statement` in `EXPECT_FATAL_FAILURE{_ON_ALL_THREADS}()` cannot reference\n    local non-static variables or non-static members of `this` object.\n\n3.  `statement` in `EXPECT_FATAL_FAILURE{_ON_ALL_THREADS}()` cannot return a\n    value.\n\n## Registering tests programmatically\n\nThe `TEST` macros handle the vast majority of all use cases, but there are few\nwhere runtime registration logic is required. For those cases, the framework\nprovides the `::testing::RegisterTest` that allows callers to register arbitrary\ntests dynamically.\n\nThis is an advanced API only to be used when the `TEST` macros are insufficient.\nThe macros should be preferred when possible, as they avoid most of the\ncomplexity of calling this function.\n\nIt provides the following signature:\n\n```c++\ntemplate <typename Factory>\nTestInfo* RegisterTest(const char* test_suite_name, const char* test_name,\n                       const char* type_param, const char* value_param,\n                       const char* file, int line, Factory factory);\n```\n\nThe `factory` argument is a factory callable (move-constructible) object or\nfunction pointer that creates a new instance of the Test object. It handles\nownership to the caller. The signature of the callable is `Fixture*()`, where\n`Fixture` is the test fixture class for the test. All tests registered with the\nsame `test_suite_name` must return the same fixture type. This is checked at\nruntime.\n\nThe framework will infer the fixture class from the factory and will call the\n`SetUpTestSuite` and `TearDownTestSuite` for it.\n\nMust be called before `RUN_ALL_TESTS()` is invoked, otherwise behavior is\nundefined.\n\nUse case example:\n\n```c++\nclass MyFixture : public ::testing::Test {\n public:\n  // All of these optional, just like in regular macro usage.\n  static void SetUpTestSuite() { ... }\n  static void TearDownTestSuite() { ... }\n  void SetUp() override { ... }\n  void TearDown() override { ... }\n};\n\nclass MyTest : public MyFixture {\n public:\n  explicit MyTest(int data) : data_(data) {}\n  void TestBody() override { ... }\n\n private:\n  int data_;\n};\n\nvoid RegisterMyTests(const std::vector<int>& values) {\n  for (int v : values) {\n    ::testing::RegisterTest(\n        \"MyFixture\", (\"Test\" + std::to_string(v)).c_str(), nullptr,\n        std::to_string(v).c_str(),\n        __FILE__, __LINE__,\n        // Important to use the fixture type as the return type here.\n        [=]() -> MyFixture* { return new MyTest(v); });\n  }\n}\n...\nint main(int argc, char** argv) {\n  std::vector<int> values_to_test = LoadValuesFromConfig();\n  RegisterMyTests(values_to_test);\n  ...\n  return RUN_ALL_TESTS();\n}\n```\n## Getting the Current Test's Name\n\nSometimes a function may need to know the name of the currently running test.\nFor example, you may be using the `SetUp()` method of your test fixture to set\nthe golden file name based on which test is running. The `::testing::TestInfo`\nclass has this information:\n\n```c++\nnamespace testing {\n\nclass TestInfo {\n public:\n  // Returns the test suite name and the test name, respectively.\n  //\n  // Do NOT delete or free the return value - it's managed by the\n  // TestInfo class.\n  const char* test_suite_name() const;\n  const char* name() const;\n};\n\n}\n```\n\nTo obtain a `TestInfo` object for the currently running test, call\n`current_test_info()` on the `UnitTest` singleton object:\n\n```c++\n  // Gets information about the currently running test.\n  // Do NOT delete the returned object - it's managed by the UnitTest class.\n  const ::testing::TestInfo* const test_info =\n    ::testing::UnitTest::GetInstance()->current_test_info();\n\n  printf(\"We are in test %s of test suite %s.\\n\",\n         test_info->name(),\n         test_info->test_suite_name());\n```\n\n`current_test_info()` returns a null pointer if no test is running. In\nparticular, you cannot find the test suite name in `TestSuiteSetUp()`,\n`TestSuiteTearDown()` (where you know the test suite name implicitly), or\nfunctions called from them.\n\n## Extending googletest by Handling Test Events\n\ngoogletest provides an **event listener API** to let you receive notifications\nabout the progress of a test program and test failures. The events you can\nlisten to include the start and end of the test program, a test suite, or a test\nmethod, among others. You may use this API to augment or replace the standard\nconsole output, replace the XML output, or provide a completely different form\nof output, such as a GUI or a database. You can also use test events as\ncheckpoints to implement a resource leak checker, for example.\n\n### Defining Event Listeners\n\nTo define a event listener, you subclass either testing::TestEventListener or\ntesting::EmptyTestEventListener The former is an (abstract) interface, where\n*each pure virtual method can be overridden to handle a test event* (For\nexample, when a test starts, the `OnTestStart()` method will be called.). The\nlatter provides an empty implementation of all methods in the interface, such\nthat a subclass only needs to override the methods it cares about.\n\nWhen an event is fired, its context is passed to the handler function as an\nargument. The following argument types are used:\n\n*   UnitTest reflects the state of the entire test program,\n*   TestSuite has information about a test suite, which can contain one or more\n    tests,\n*   TestInfo contains the state of a test, and\n*   TestPartResult represents the result of a test assertion.\n\nAn event handler function can examine the argument it receives to find out\ninteresting information about the event and the test program's state.\n\nHere's an example:\n\n```c++\n  class MinimalistPrinter : public ::testing::EmptyTestEventListener {\n    // Called before a test starts.\n    virtual void OnTestStart(const ::testing::TestInfo& test_info) {\n      printf(\"*** Test %s.%s starting.\\n\",\n             test_info.test_suite_name(), test_info.name());\n    }\n\n    // Called after a failed assertion or a SUCCESS().\n    virtual void OnTestPartResult(const ::testing::TestPartResult& test_part_result) {\n      printf(\"%s in %s:%d\\n%s\\n\",\n             test_part_result.failed() ? \"*** Failure\" : \"Success\",\n             test_part_result.file_name(),\n             test_part_result.line_number(),\n             test_part_result.summary());\n    }\n\n    // Called after a test ends.\n    virtual void OnTestEnd(const ::testing::TestInfo& test_info) {\n      printf(\"*** Test %s.%s ending.\\n\",\n             test_info.test_suite_name(), test_info.name());\n    }\n  };\n```\n\n### Using Event Listeners\n\nTo use the event listener you have defined, add an instance of it to the\ngoogletest event listener list (represented by class TestEventListeners - note\nthe \"s\" at the end of the name) in your `main()` function, before calling\n`RUN_ALL_TESTS()`:\n\n```c++\nint main(int argc, char** argv) {\n  ::testing::InitGoogleTest(&argc, argv);\n  // Gets hold of the event listener list.\n  ::testing::TestEventListeners& listeners =\n        ::testing::UnitTest::GetInstance()->listeners();\n  // Adds a listener to the end.  googletest takes the ownership.\n  listeners.Append(new MinimalistPrinter);\n  return RUN_ALL_TESTS();\n}\n```\n\nThere's only one problem: the default test result printer is still in effect, so\nits output will mingle with the output from your minimalist printer. To suppress\nthe default printer, just release it from the event listener list and delete it.\nYou can do so by adding one line:\n\n```c++\n  ...\n  delete listeners.Release(listeners.default_result_printer());\n  listeners.Append(new MinimalistPrinter);\n  return RUN_ALL_TESTS();\n```\n\nNow, sit back and enjoy a completely different output from your tests. For more\ndetails, see [sample9_unittest.cc].\n\n[sample9_unittest.cc]: ../samples/sample9_unittest.cc \"Event listener example\"\n\nYou may append more than one listener to the list. When an `On*Start()` or\n`OnTestPartResult()` event is fired, the listeners will receive it in the order\nthey appear in the list (since new listeners are added to the end of the list,\nthe default text printer and the default XML generator will receive the event\nfirst). An `On*End()` event will be received by the listeners in the *reverse*\norder. This allows output by listeners added later to be framed by output from\nlisteners added earlier.\n\n### Generating Failures in Listeners\n\nYou may use failure-raising macros (`EXPECT_*()`, `ASSERT_*()`, `FAIL()`, etc)\nwhen processing an event. There are some restrictions:\n\n1.  You cannot generate any failure in `OnTestPartResult()` (otherwise it will\n    cause `OnTestPartResult()` to be called recursively).\n2.  A listener that handles `OnTestPartResult()` is not allowed to generate any\n    failure.\n\nWhen you add listeners to the listener list, you should put listeners that\nhandle `OnTestPartResult()` *before* listeners that can generate failures. This\nensures that failures generated by the latter are attributed to the right test\nby the former.\n\nSee [sample10_unittest.cc] for an example of a failure-raising listener.\n\n[sample10_unittest.cc]: ../samples/sample10_unittest.cc \"Failure-raising listener example\"\n\n## Running Test Programs: Advanced Options\n\ngoogletest test programs are ordinary executables. Once built, you can run them\ndirectly and affect their behavior via the following environment variables\nand/or command line flags. For the flags to work, your programs must call\n`::testing::InitGoogleTest()` before calling `RUN_ALL_TESTS()`.\n\nTo see a list of supported flags and their usage, please run your test program\nwith the `--help` flag. You can also use `-h`, `-?`, or `/?` for short.\n\nIf an option is specified both by an environment variable and by a flag, the\nlatter takes precedence.\n\n### Selecting Tests\n\n#### Listing Test Names\n\nSometimes it is necessary to list the available tests in a program before\nrunning them so that a filter may be applied if needed. Including the flag\n`--gtest_list_tests` overrides all other flags and lists tests in the following\nformat:\n\n```none\nTestSuite1.\n  TestName1\n  TestName2\nTestSuite2.\n  TestName\n```\n\nNone of the tests listed are actually run if the flag is provided. There is no\ncorresponding environment variable for this flag.\n\n#### Running a Subset of the Tests\n\nBy default, a googletest program runs all tests the user has defined. Sometimes,\nyou want to run only a subset of the tests (e.g. for debugging or quickly\nverifying a change). If you set the `GTEST_FILTER` environment variable or the\n`--gtest_filter` flag to a filter string, googletest will only run the tests\nwhose full names (in the form of `TestSuiteName.TestName`) match the filter.\n\nThe format of a filter is a '`:`'-separated list of wildcard patterns (called\nthe *positive patterns*) optionally followed by a '`-`' and another\n'`:`'-separated pattern list (called the *negative patterns*). A test matches\nthe filter if and only if it matches any of the positive patterns but does not\nmatch any of the negative patterns.\n\nA pattern may contain `'*'` (matches any string) or `'?'` (matches any single\ncharacter). For convenience, the filter `'*-NegativePatterns'` can be also\nwritten as `'-NegativePatterns'`.\n\nFor example:\n\n*   `./foo_test` Has no flag, and thus runs all its tests.\n*   `./foo_test --gtest_filter=*` Also runs everything, due to the single\n    match-everything `*` value.\n*   `./foo_test --gtest_filter=FooTest.*` Runs everything in test suite\n    `FooTest` .\n*   `./foo_test --gtest_filter=*Null*:*Constructor*` Runs any test whose full\n    name contains either `\"Null\"` or `\"Constructor\"` .\n*   `./foo_test --gtest_filter=-*DeathTest.*` Runs all non-death tests.\n*   `./foo_test --gtest_filter=FooTest.*-FooTest.Bar` Runs everything in test\n    suite `FooTest` except `FooTest.Bar`.\n*   `./foo_test --gtest_filter=FooTest.*:BarTest.*-FooTest.Bar:BarTest.Foo` Runs\n    everything in test suite `FooTest` except `FooTest.Bar` and everything in\n    test suite `BarTest` except `BarTest.Foo`.\n\n#### Temporarily Disabling Tests\n\nIf you have a broken test that you cannot fix right away, you can add the\n`DISABLED_` prefix to its name. This will exclude it from execution. This is\nbetter than commenting out the code or using `#if 0`, as disabled tests are\nstill compiled (and thus won't rot).\n\nIf you need to disable all tests in a test suite, you can either add `DISABLED_`\nto the front of the name of each test, or alternatively add it to the front of\nthe test suite name.\n\nFor example, the following tests won't be run by googletest, even though they\nwill still be compiled:\n\n```c++\n// Tests that Foo does Abc.\nTEST(FooTest, DISABLED_DoesAbc) { ... }\n\nclass DISABLED_BarTest : public ::testing::Test { ... };\n\n// Tests that Bar does Xyz.\nTEST_F(DISABLED_BarTest, DoesXyz) { ... }\n```\n\nNOTE: This feature should only be used for temporary pain-relief. You still have\nto fix the disabled tests at a later date. As a reminder, googletest will print\na banner warning you if a test program contains any disabled tests.\n\nTIP: You can easily count the number of disabled tests you have using `gsearch`\nand/or `grep`. This number can be used as a metric for improving your test\nquality.\n\n#### Temporarily Enabling Disabled Tests\n\nTo include disabled tests in test execution, just invoke the test program with\nthe `--gtest_also_run_disabled_tests` flag or set the\n`GTEST_ALSO_RUN_DISABLED_TESTS` environment variable to a value other than `0`.\nYou can combine this with the `--gtest_filter` flag to further select which\ndisabled tests to run.\n\n### Repeating the Tests\n\nOnce in a while you'll run into a test whose result is hit-or-miss. Perhaps it\nwill fail only 1% of the time, making it rather hard to reproduce the bug under\na debugger. This can be a major source of frustration.\n\nThe `--gtest_repeat` flag allows you to repeat all (or selected) test methods in\na program many times. Hopefully, a flaky test will eventually fail and give you\na chance to debug. Here's how to use it:\n\n```none\n$ foo_test --gtest_repeat=1000\nRepeat foo_test 1000 times and don't stop at failures.\n\n$ foo_test --gtest_repeat=-1\nA negative count means repeating forever.\n\n$ foo_test --gtest_repeat=1000 --gtest_break_on_failure\nRepeat foo_test 1000 times, stopping at the first failure.  This\nis especially useful when running under a debugger: when the test\nfails, it will drop into the debugger and you can then inspect\nvariables and stacks.\n\n$ foo_test --gtest_repeat=1000 --gtest_filter=FooBar.*\nRepeat the tests whose name matches the filter 1000 times.\n```\n\nIf your test program contains\n[global set-up/tear-down](#global-set-up-and-tear-down) code, it will be\nrepeated in each iteration as well, as the flakiness may be in it. You can also\nspecify the repeat count by setting the `GTEST_REPEAT` environment variable.\n\n### Shuffling the Tests\n\nYou can specify the `--gtest_shuffle` flag (or set the `GTEST_SHUFFLE`\nenvironment variable to `1`) to run the tests in a program in a random order.\nThis helps to reveal bad dependencies between tests.\n\nBy default, googletest uses a random seed calculated from the current time.\nTherefore you'll get a different order every time. The console output includes\nthe random seed value, such that you can reproduce an order-related test failure\nlater. To specify the random seed explicitly, use the `--gtest_random_seed=SEED`\nflag (or set the `GTEST_RANDOM_SEED` environment variable), where `SEED` is an\ninteger in the range [0, 99999]. The seed value 0 is special: it tells\ngoogletest to do the default behavior of calculating the seed from the current\ntime.\n\nIf you combine this with `--gtest_repeat=N`, googletest will pick a different\nrandom seed and re-shuffle the tests in each iteration.\n\n### Controlling Test Output\n\n#### Colored Terminal Output\n\ngoogletest can use colors in its terminal output to make it easier to spot the\nimportant information:\n\n<code>\n...<br/>\n  <font color=\"green\">[----------]</font><font color=\"black\"> 1 test from\n  FooTest</font><br/>\n  <font color=\"green\">[ RUN &nbsp; &nbsp; &nbsp;]</font><font color=\"black\">\n  FooTest.DoesAbc</font><br/>\n  <font color=\"green\">[ &nbsp; &nbsp; &nbsp; OK ]</font><font color=\"black\">\n  FooTest.DoesAbc </font><br/>\n  <font color=\"green\">[----------]</font><font color=\"black\">\n  2 tests from BarTest</font><br/>\n  <font color=\"green\">[ RUN &nbsp; &nbsp; &nbsp;]</font><font color=\"black\">\n  BarTest.HasXyzProperty </font><br/>\n  <font color=\"green\">[ &nbsp; &nbsp; &nbsp; OK ]</font><font color=\"black\">\n  BarTest.HasXyzProperty</font><br/>\n  <font color=\"green\">[ RUN &nbsp; &nbsp; &nbsp;]</font><font color=\"black\">\n  BarTest.ReturnsTrueOnSuccess ... some error messages ...</font><br/>\n  <font color=\"red\">[ &nbsp; FAILED ]</font><font color=\"black\">\n  BarTest.ReturnsTrueOnSuccess ...</font><br/>\n  <font color=\"green\">[==========]</font><font color=\"black\">\n  30 tests from 14 test suites ran.</font><br/>\n  <font color=\"green\">[ &nbsp; PASSED ]</font><font color=\"black\">\n  28 tests.</font><br/>\n  <font color=\"red\">[ &nbsp; FAILED ]</font><font color=\"black\">\n  2 tests, listed below:</font><br/>\n  <font color=\"red\">[ &nbsp; FAILED ]</font><font color=\"black\">\n  BarTest.ReturnsTrueOnSuccess</font><br/>\n  <font color=\"red\">[ &nbsp; FAILED ]</font><font color=\"black\">\n  AnotherTest.DoesXyz<br/>\n<br/>\n  2 FAILED TESTS\n  </font>\n</code>\n\nYou can set the `GTEST_COLOR` environment variable or the `--gtest_color`\ncommand line flag to `yes`, `no`, or `auto` (the default) to enable colors,\ndisable colors, or let googletest decide. When the value is `auto`, googletest\nwill use colors if and only if the output goes to a terminal and (on non-Windows\nplatforms) the `TERM` environment variable is set to `xterm` or `xterm-color`.\n\n#### Suppressing the Elapsed Time\n\nBy default, googletest prints the time it takes to run each test. To disable\nthat, run the test program with the `--gtest_print_time=0` command line flag, or\nset the GTEST_PRINT_TIME environment variable to `0`.\n\n#### Suppressing UTF-8 Text Output\n\nIn case of assertion failures, googletest prints expected and actual values of\ntype `string` both as hex-encoded strings as well as in readable UTF-8 text if\nthey contain valid non-ASCII UTF-8 characters. If you want to suppress the UTF-8\ntext because, for example, you don't have an UTF-8 compatible output medium, run\nthe test program with `--gtest_print_utf8=0` or set the `GTEST_PRINT_UTF8`\nenvironment variable to `0`.\n\n\n\n#### Generating an XML Report\n\ngoogletest can emit a detailed XML report to a file in addition to its normal\ntextual output. The report contains the duration of each test, and thus can help\nyou identify slow tests. The report is also used by the http://unittest\ndashboard to show per-test-method error messages.\n\nTo generate the XML report, set the `GTEST_OUTPUT` environment variable or the\n`--gtest_output` flag to the string `\"xml:path_to_output_file\"`, which will\ncreate the file at the given location. You can also just use the string `\"xml\"`,\nin which case the output can be found in the `test_detail.xml` file in the\ncurrent directory.\n\nIf you specify a directory (for example, `\"xml:output/directory/\"` on Linux or\n`\"xml:output\\directory\\\"` on Windows), googletest will create the XML file in\nthat directory, named after the test executable (e.g. `foo_test.xml` for test\nprogram `foo_test` or `foo_test.exe`). If the file already exists (perhaps left\nover from a previous run), googletest will pick a different name (e.g.\n`foo_test_1.xml`) to avoid overwriting it.\n\nThe report is based on the `junitreport` Ant task. Since that format was\noriginally intended for Java, a little interpretation is required to make it\napply to googletest tests, as shown here:\n\n```xml\n<testsuites name=\"AllTests\" ...>\n  <testsuite name=\"test_case_name\" ...>\n    <testcase    name=\"test_name\" ...>\n      <failure message=\"...\"/>\n      <failure message=\"...\"/>\n      <failure message=\"...\"/>\n    </testcase>\n  </testsuite>\n</testsuites>\n```\n\n*   The root `<testsuites>` element corresponds to the entire test program.\n*   `<testsuite>` elements correspond to googletest test suites.\n*   `<testcase>` elements correspond to googletest test functions.\n\nFor instance, the following program\n\n```c++\nTEST(MathTest, Addition) { ... }\nTEST(MathTest, Subtraction) { ... }\nTEST(LogicTest, NonContradiction) { ... }\n```\n\ncould generate this report:\n\n```xml\n<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<testsuites tests=\"3\" failures=\"1\" errors=\"0\" time=\"0.035\" timestamp=\"2011-10-31T18:52:42\" name=\"AllTests\">\n  <testsuite name=\"MathTest\" tests=\"2\" failures=\"1\" errors=\"0\" time=\"0.015\">\n    <testcase name=\"Addition\" status=\"run\" time=\"0.007\" classname=\"\">\n      <failure message=\"Value of: add(1, 1)&#x0A;  Actual: 3&#x0A;Expected: 2\" type=\"\">...</failure>\n      <failure message=\"Value of: add(1, -1)&#x0A;  Actual: 1&#x0A;Expected: 0\" type=\"\">...</failure>\n    </testcase>\n    <testcase name=\"Subtraction\" status=\"run\" time=\"0.005\" classname=\"\">\n    </testcase>\n  </testsuite>\n  <testsuite name=\"LogicTest\" tests=\"1\" failures=\"0\" errors=\"0\" time=\"0.005\">\n    <testcase name=\"NonContradiction\" status=\"run\" time=\"0.005\" classname=\"\">\n    </testcase>\n  </testsuite>\n</testsuites>\n```\n\nThings to note:\n\n*   The `tests` attribute of a `<testsuites>` or `<testsuite>` element tells how\n    many test functions the googletest program or test suite contains, while the\n    `failures` attribute tells how many of them failed.\n\n*   The `time` attribute expresses the duration of the test, test suite, or\n    entire test program in seconds.\n\n*   The `timestamp` attribute records the local date and time of the test\n    execution.\n\n*   Each `<failure>` element corresponds to a single failed googletest\n    assertion.\n\n#### Generating a JSON Report\n\ngoogletest can also emit a JSON report as an alternative format to XML. To\ngenerate the JSON report, set the `GTEST_OUTPUT` environment variable or the\n`--gtest_output` flag to the string `\"json:path_to_output_file\"`, which will\ncreate the file at the given location. You can also just use the string\n`\"json\"`, in which case the output can be found in the `test_detail.json` file\nin the current directory.\n\nThe report format conforms to the following JSON Schema:\n\n```json\n{\n  \"$schema\": \"http://json-schema.org/schema#\",\n  \"type\": \"object\",\n  \"definitions\": {\n    \"TestCase\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"name\": { \"type\": \"string\" },\n        \"tests\": { \"type\": \"integer\" },\n        \"failures\": { \"type\": \"integer\" },\n        \"disabled\": { \"type\": \"integer\" },\n        \"time\": { \"type\": \"string\" },\n        \"testsuite\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/TestInfo\"\n          }\n        }\n      }\n    },\n    \"TestInfo\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"name\": { \"type\": \"string\" },\n        \"status\": {\n          \"type\": \"string\",\n          \"enum\": [\"RUN\", \"NOTRUN\"]\n        },\n        \"time\": { \"type\": \"string\" },\n        \"classname\": { \"type\": \"string\" },\n        \"failures\": {\n          \"type\": \"array\",\n          \"items\": {\n            \"$ref\": \"#/definitions/Failure\"\n          }\n        }\n      }\n    },\n    \"Failure\": {\n      \"type\": \"object\",\n      \"properties\": {\n        \"failures\": { \"type\": \"string\" },\n        \"type\": { \"type\": \"string\" }\n      }\n    }\n  },\n  \"properties\": {\n    \"tests\": { \"type\": \"integer\" },\n    \"failures\": { \"type\": \"integer\" },\n    \"disabled\": { \"type\": \"integer\" },\n    \"errors\": { \"type\": \"integer\" },\n    \"timestamp\": {\n      \"type\": \"string\",\n      \"format\": \"date-time\"\n    },\n    \"time\": { \"type\": \"string\" },\n    \"name\": { \"type\": \"string\" },\n    \"testsuites\": {\n      \"type\": \"array\",\n      \"items\": {\n        \"$ref\": \"#/definitions/TestCase\"\n      }\n    }\n  }\n}\n```\n\nThe report uses the format that conforms to the following Proto3 using the\n[JSON encoding](https://developers.google.com/protocol-buffers/docs/proto3#json):\n\n```proto\nsyntax = \"proto3\";\n\npackage googletest;\n\nimport \"google/protobuf/timestamp.proto\";\nimport \"google/protobuf/duration.proto\";\n\nmessage UnitTest {\n  int32 tests = 1;\n  int32 failures = 2;\n  int32 disabled = 3;\n  int32 errors = 4;\n  google.protobuf.Timestamp timestamp = 5;\n  google.protobuf.Duration time = 6;\n  string name = 7;\n  repeated TestCase testsuites = 8;\n}\n\nmessage TestCase {\n  string name = 1;\n  int32 tests = 2;\n  int32 failures = 3;\n  int32 disabled = 4;\n  int32 errors = 5;\n  google.protobuf.Duration time = 6;\n  repeated TestInfo testsuite = 7;\n}\n\nmessage TestInfo {\n  string name = 1;\n  enum Status {\n    RUN = 0;\n    NOTRUN = 1;\n  }\n  Status status = 2;\n  google.protobuf.Duration time = 3;\n  string classname = 4;\n  message Failure {\n    string failures = 1;\n    string type = 2;\n  }\n  repeated Failure failures = 5;\n}\n```\n\nFor instance, the following program\n\n```c++\nTEST(MathTest, Addition) { ... }\nTEST(MathTest, Subtraction) { ... }\nTEST(LogicTest, NonContradiction) { ... }\n```\n\ncould generate this report:\n\n```json\n{\n  \"tests\": 3,\n  \"failures\": 1,\n  \"errors\": 0,\n  \"time\": \"0.035s\",\n  \"timestamp\": \"2011-10-31T18:52:42Z\",\n  \"name\": \"AllTests\",\n  \"testsuites\": [\n    {\n      \"name\": \"MathTest\",\n      \"tests\": 2,\n      \"failures\": 1,\n      \"errors\": 0,\n      \"time\": \"0.015s\",\n      \"testsuite\": [\n        {\n          \"name\": \"Addition\",\n          \"status\": \"RUN\",\n          \"time\": \"0.007s\",\n          \"classname\": \"\",\n          \"failures\": [\n            {\n              \"message\": \"Value of: add(1, 1)\\n  Actual: 3\\nExpected: 2\",\n              \"type\": \"\"\n            },\n            {\n              \"message\": \"Value of: add(1, -1)\\n  Actual: 1\\nExpected: 0\",\n              \"type\": \"\"\n            }\n          ]\n        },\n        {\n          \"name\": \"Subtraction\",\n          \"status\": \"RUN\",\n          \"time\": \"0.005s\",\n          \"classname\": \"\"\n        }\n      ]\n    },\n    {\n      \"name\": \"LogicTest\",\n      \"tests\": 1,\n      \"failures\": 0,\n      \"errors\": 0,\n      \"time\": \"0.005s\",\n      \"testsuite\": [\n        {\n          \"name\": \"NonContradiction\",\n          \"status\": \"RUN\",\n          \"time\": \"0.005s\",\n          \"classname\": \"\"\n        }\n      ]\n    }\n  ]\n}\n```\n\nIMPORTANT: The exact format of the JSON document is subject to change.\n\n### Controlling How Failures Are Reported\n\n#### Turning Assertion Failures into Break-Points\n\nWhen running test programs under a debugger, it's very convenient if the\ndebugger can catch an assertion failure and automatically drop into interactive\nmode. googletest's *break-on-failure* mode supports this behavior.\n\nTo enable it, set the `GTEST_BREAK_ON_FAILURE` environment variable to a value\nother than `0`. Alternatively, you can use the `--gtest_break_on_failure`\ncommand line flag.\n\n#### Disabling Catching Test-Thrown Exceptions\n\ngoogletest can be used either with or without exceptions enabled. If a test\nthrows a C++ exception or (on Windows) a structured exception (SEH), by default\ngoogletest catches it, reports it as a test failure, and continues with the next\ntest method. This maximizes the coverage of a test run. Also, on Windows an\nuncaught exception will cause a pop-up window, so catching the exceptions allows\nyou to run the tests automatically.\n\nWhen debugging the test failures, however, you may instead want the exceptions\nto be handled by the debugger, such that you can examine the call stack when an\nexception is thrown. To achieve that, set the `GTEST_CATCH_EXCEPTIONS`\nenvironment variable to `0`, or use the `--gtest_catch_exceptions=0` flag when\nrunning the tests.\n"
  },
  {
    "path": "test/googletest/docs/faq.md",
    "content": "# Googletest FAQ\n\n<!-- GOOGLETEST_CM0014 DO NOT DELETE -->\n\n## Why should test suite names and test names not contain underscore?\n\nUnderscore (`_`) is special, as C++ reserves the following to be used by the\ncompiler and the standard library:\n\n1.  any identifier that starts with an `_` followed by an upper-case letter, and\n2.  any identifier that contains two consecutive underscores (i.e. `__`)\n    *anywhere* in its name.\n\nUser code is *prohibited* from using such identifiers.\n\nNow let's look at what this means for `TEST` and `TEST_F`.\n\nCurrently `TEST(TestSuiteName, TestName)` generates a class named\n`TestSuiteName_TestName_Test`. What happens if `TestSuiteName` or `TestName`\ncontains `_`?\n\n1.  If `TestSuiteName` starts with an `_` followed by an upper-case letter (say,\n    `_Foo`), we end up with `_Foo_TestName_Test`, which is reserved and thus\n    invalid.\n2.  If `TestSuiteName` ends with an `_` (say, `Foo_`), we get\n    `Foo__TestName_Test`, which is invalid.\n3.  If `TestName` starts with an `_` (say, `_Bar`), we get\n    `TestSuiteName__Bar_Test`, which is invalid.\n4.  If `TestName` ends with an `_` (say, `Bar_`), we get\n    `TestSuiteName_Bar__Test`, which is invalid.\n\nSo clearly `TestSuiteName` and `TestName` cannot start or end with `_`\n(Actually, `TestSuiteName` can start with `_` -- as long as the `_` isn't\nfollowed by an upper-case letter. But that's getting complicated. So for\nsimplicity we just say that it cannot start with `_`.).\n\nIt may seem fine for `TestSuiteName` and `TestName` to contain `_` in the\nmiddle. However, consider this:\n\n```c++\nTEST(Time, Flies_Like_An_Arrow) { ... }\nTEST(Time_Flies, Like_An_Arrow) { ... }\n```\n\nNow, the two `TEST`s will both generate the same class\n(`Time_Flies_Like_An_Arrow_Test`). That's not good.\n\nSo for simplicity, we just ask the users to avoid `_` in `TestSuiteName` and\n`TestName`. The rule is more constraining than necessary, but it's simple and\neasy to remember. It also gives googletest some wiggle room in case its\nimplementation needs to change in the future.\n\nIf you violate the rule, there may not be immediate consequences, but your test\nmay (just may) break with a new compiler (or a new version of the compiler you\nare using) or with a new version of googletest. Therefore it's best to follow\nthe rule.\n\n## Why does googletest support `EXPECT_EQ(NULL, ptr)` and `ASSERT_EQ(NULL, ptr)` but not `EXPECT_NE(NULL, ptr)` and `ASSERT_NE(NULL, ptr)`?\n\nFirst of all you can use `EXPECT_NE(nullptr, ptr)` and `ASSERT_NE(nullptr,\nptr)`. This is the preferred syntax in the style guide because nullptr does not\nhave the type problems that NULL does. Which is why NULL does not work.\n\nDue to some peculiarity of C++, it requires some non-trivial template meta\nprogramming tricks to support using `NULL` as an argument of the `EXPECT_XX()`\nand `ASSERT_XX()` macros. Therefore we only do it where it's most needed\n(otherwise we make the implementation of googletest harder to maintain and more\nerror-prone than necessary).\n\nThe `EXPECT_EQ()` macro takes the *expected* value as its first argument and the\n*actual* value as the second. It's reasonable that someone wants to write\n`EXPECT_EQ(NULL, some_expression)`, and this indeed was requested several times.\nTherefore we implemented it.\n\nThe need for `EXPECT_NE(NULL, ptr)` isn't nearly as strong. When the assertion\nfails, you already know that `ptr` must be `NULL`, so it doesn't add any\ninformation to print `ptr` in this case. That means `EXPECT_TRUE(ptr != NULL)`\nworks just as well.\n\nIf we were to support `EXPECT_NE(NULL, ptr)`, for consistency we'll have to\nsupport `EXPECT_NE(ptr, NULL)` as well, as unlike `EXPECT_EQ`, we don't have a\nconvention on the order of the two arguments for `EXPECT_NE`. This means using\nthe template meta programming tricks twice in the implementation, making it even\nharder to understand and maintain. We believe the benefit doesn't justify the\ncost.\n\nFinally, with the growth of the gMock matcher library, we are encouraging people\nto use the unified `EXPECT_THAT(value, matcher)` syntax more often in tests. One\nsignificant advantage of the matcher approach is that matchers can be easily\ncombined to form new matchers, while the `EXPECT_NE`, etc, macros cannot be\neasily combined. Therefore we want to invest more in the matchers than in the\n`EXPECT_XX()` macros.\n\n## I need to test that different implementations of an interface satisfy some common requirements. Should I use typed tests or value-parameterized tests?\n\nFor testing various implementations of the same interface, either typed tests or\nvalue-parameterized tests can get it done. It's really up to you the user to\ndecide which is more convenient for you, depending on your particular case. Some\nrough guidelines:\n\n*   Typed tests can be easier to write if instances of the different\n    implementations can be created the same way, modulo the type. For example,\n    if all these implementations have a public default constructor (such that\n    you can write `new TypeParam`), or if their factory functions have the same\n    form (e.g. `CreateInstance<TypeParam>()`).\n*   Value-parameterized tests can be easier to write if you need different code\n    patterns to create different implementations' instances, e.g. `new Foo` vs\n    `new Bar(5)`. To accommodate for the differences, you can write factory\n    function wrappers and pass these function pointers to the tests as their\n    parameters.\n*   When a typed test fails, the default output includes the name of the type,\n    which can help you quickly identify which implementation is wrong.\n    Value-parameterized tests only show the number of the failed iteration by\n    default. You will need to define a function that returns the iteration name\n    and pass it as the third parameter to INSTANTIATE_TEST_SUITE_P to have more\n    useful output.\n*   When using typed tests, you need to make sure you are testing against the\n    interface type, not the concrete types (in other words, you want to make\n    sure `implicit_cast<MyInterface*>(my_concrete_impl)` works, not just that\n    `my_concrete_impl` works). It's less likely to make mistakes in this area\n    when using value-parameterized tests.\n\nI hope I didn't confuse you more. :-) If you don't mind, I'd suggest you to give\nboth approaches a try. Practice is a much better way to grasp the subtle\ndifferences between the two tools. Once you have some concrete experience, you\ncan much more easily decide which one to use the next time.\n\n## I got some run-time errors about invalid proto descriptors when using `ProtocolMessageEquals`. Help!\n\n**Note:** `ProtocolMessageEquals` and `ProtocolMessageEquiv` are *deprecated*\nnow. Please use `EqualsProto`, etc instead.\n\n`ProtocolMessageEquals` and `ProtocolMessageEquiv` were redefined recently and\nare now less tolerant of invalid protocol buffer definitions. In particular, if\nyou have a `foo.proto` that doesn't fully qualify the type of a protocol message\nit references (e.g. `message<Bar>` where it should be `message<blah.Bar>`), you\nwill now get run-time errors like:\n\n```\n... descriptor.cc:...] Invalid proto descriptor for file \"path/to/foo.proto\":\n... descriptor.cc:...]  blah.MyMessage.my_field: \".Bar\" is not defined.\n```\n\nIf you see this, your `.proto` file is broken and needs to be fixed by making\nthe types fully qualified. The new definition of `ProtocolMessageEquals` and\n`ProtocolMessageEquiv` just happen to reveal your bug.\n\n## My death test modifies some state, but the change seems lost after the death test finishes. Why?\n\nDeath tests (`EXPECT_DEATH`, etc) are executed in a sub-process s.t. the\nexpected crash won't kill the test program (i.e. the parent process). As a\nresult, any in-memory side effects they incur are observable in their respective\nsub-processes, but not in the parent process. You can think of them as running\nin a parallel universe, more or less.\n\nIn particular, if you use mocking and the death test statement invokes some mock\nmethods, the parent process will think the calls have never occurred. Therefore,\nyou may want to move your `EXPECT_CALL` statements inside the `EXPECT_DEATH`\nmacro.\n\n## EXPECT_EQ(htonl(blah), blah_blah) generates weird compiler errors in opt mode. Is this a googletest bug?\n\nActually, the bug is in `htonl()`.\n\nAccording to `'man htonl'`, `htonl()` is a *function*, which means it's valid to\nuse `htonl` as a function pointer. However, in opt mode `htonl()` is defined as\na *macro*, which breaks this usage.\n\nWorse, the macro definition of `htonl()` uses a `gcc` extension and is *not*\nstandard C++. That hacky implementation has some ad hoc limitations. In\nparticular, it prevents you from writing `Foo<sizeof(htonl(x))>()`, where `Foo`\nis a template that has an integral argument.\n\nThe implementation of `EXPECT_EQ(a, b)` uses `sizeof(... a ...)` inside a\ntemplate argument, and thus doesn't compile in opt mode when `a` contains a call\nto `htonl()`. It is difficult to make `EXPECT_EQ` bypass the `htonl()` bug, as\nthe solution must work with different compilers on various platforms.\n\n`htonl()` has some other problems as described in `//util/endian/endian.h`,\nwhich defines `ghtonl()` to replace it. `ghtonl()` does the same thing `htonl()`\ndoes, only without its problems. We suggest you to use `ghtonl()` instead of\n`htonl()`, both in your tests and production code.\n\n`//util/endian/endian.h` also defines `ghtons()`, which solves similar problems\nin `htons()`.\n\nDon't forget to add `//util/endian` to the list of dependencies in the `BUILD`\nfile wherever `ghtonl()` and `ghtons()` are used. The library consists of a\nsingle header file and will not bloat your binary.\n\n## The compiler complains about \"undefined references\" to some static const member variables, but I did define them in the class body. What's wrong?\n\nIf your class has a static data member:\n\n```c++\n// foo.h\nclass Foo {\n  ...\n  static const int kBar = 100;\n};\n```\n\nYou also need to define it *outside* of the class body in `foo.cc`:\n\n```c++\nconst int Foo::kBar;  // No initializer here.\n```\n\nOtherwise your code is **invalid C++**, and may break in unexpected ways. In\nparticular, using it in googletest comparison assertions (`EXPECT_EQ`, etc) will\ngenerate an \"undefined reference\" linker error. The fact that \"it used to work\"\ndoesn't mean it's valid. It just means that you were lucky. :-)\n\n## Can I derive a test fixture from another?\n\nYes.\n\nEach test fixture has a corresponding and same named test suite. This means only\none test suite can use a particular fixture. Sometimes, however, multiple test\ncases may want to use the same or slightly different fixtures. For example, you\nmay want to make sure that all of a GUI library's test suites don't leak\nimportant system resources like fonts and brushes.\n\nIn googletest, you share a fixture among test suites by putting the shared logic\nin a base test fixture, then deriving from that base a separate fixture for each\ntest suite that wants to use this common logic. You then use `TEST_F()` to write\ntests using each derived fixture.\n\nTypically, your code looks like this:\n\n```c++\n// Defines a base test fixture.\nclass BaseTest : public ::testing::Test {\n protected:\n  ...\n};\n\n// Derives a fixture FooTest from BaseTest.\nclass FooTest : public BaseTest {\n protected:\n  void SetUp() override {\n    BaseTest::SetUp();  // Sets up the base fixture first.\n    ... additional set-up work ...\n  }\n\n  void TearDown() override {\n    ... clean-up work for FooTest ...\n    BaseTest::TearDown();  // Remember to tear down the base fixture\n                           // after cleaning up FooTest!\n  }\n\n  ... functions and variables for FooTest ...\n};\n\n// Tests that use the fixture FooTest.\nTEST_F(FooTest, Bar) { ... }\nTEST_F(FooTest, Baz) { ... }\n\n... additional fixtures derived from BaseTest ...\n```\n\nIf necessary, you can continue to derive test fixtures from a derived fixture.\ngoogletest has no limit on how deep the hierarchy can be.\n\nFor a complete example using derived test fixtures, see\n[sample5_unittest.cc](../samples/sample5_unittest.cc).\n\n## My compiler complains \"void value not ignored as it ought to be.\" What does this mean?\n\nYou're probably using an `ASSERT_*()` in a function that doesn't return `void`.\n`ASSERT_*()` can only be used in `void` functions, due to exceptions being\ndisabled by our build system. Please see more details\n[here](advanced.md#assertion-placement).\n\n## My death test hangs (or seg-faults). How do I fix it?\n\nIn googletest, death tests are run in a child process and the way they work is\ndelicate. To write death tests you really need to understand how they work.\nPlease make sure you have read [this](advanced.md#how-it-works).\n\nIn particular, death tests don't like having multiple threads in the parent\nprocess. So the first thing you can try is to eliminate creating threads outside\nof `EXPECT_DEATH()`. For example, you may want to use mocks or fake objects\ninstead of real ones in your tests.\n\nSometimes this is impossible as some library you must use may be creating\nthreads before `main()` is even reached. In this case, you can try to minimize\nthe chance of conflicts by either moving as many activities as possible inside\n`EXPECT_DEATH()` (in the extreme case, you want to move everything inside), or\nleaving as few things as possible in it. Also, you can try to set the death test\nstyle to `\"threadsafe\"`, which is safer but slower, and see if it helps.\n\nIf you go with thread-safe death tests, remember that they rerun the test\nprogram from the beginning in the child process. Therefore make sure your\nprogram can run side-by-side with itself and is deterministic.\n\nIn the end, this boils down to good concurrent programming. You have to make\nsure that there is no race conditions or dead locks in your program. No silver\nbullet - sorry!\n\n## Should I use the constructor/destructor of the test fixture or SetUp()/TearDown()? {#CtorVsSetUp}\n\nThe first thing to remember is that googletest does **not** reuse the same test\nfixture object across multiple tests. For each `TEST_F`, googletest will create\na **fresh** test fixture object, immediately call `SetUp()`, run the test body,\ncall `TearDown()`, and then delete the test fixture object.\n\nWhen you need to write per-test set-up and tear-down logic, you have the choice\nbetween using the test fixture constructor/destructor or `SetUp()/TearDown()`.\nThe former is usually preferred, as it has the following benefits:\n\n*   By initializing a member variable in the constructor, we have the option to\n    make it `const`, which helps prevent accidental changes to its value and\n    makes the tests more obviously correct.\n*   In case we need to subclass the test fixture class, the subclass'\n    constructor is guaranteed to call the base class' constructor *first*, and\n    the subclass' destructor is guaranteed to call the base class' destructor\n    *afterward*. With `SetUp()/TearDown()`, a subclass may make the mistake of\n    forgetting to call the base class' `SetUp()/TearDown()` or call them at the\n    wrong time.\n\nYou may still want to use `SetUp()/TearDown()` in the following cases:\n\n*   C++ does not allow virtual function calls in constructors and destructors.\n    You can call a method declared as virtual, but it will not use dynamic\n    dispatch, it will use the definition from the class the constructor of which\n    is currently executing. This is because calling a virtual method before the\n    derived class constructor has a chance to run is very dangerous - the\n    virtual method might operate on uninitialized data. Therefore, if you need\n    to call a method that will be overridden in a derived class, you have to use\n    `SetUp()/TearDown()`.\n*   In the body of a constructor (or destructor), it's not possible to use the\n    `ASSERT_xx` macros. Therefore, if the set-up operation could cause a fatal\n    test failure that should prevent the test from running, it's necessary to\n    use `abort` <!-- GOOGLETEST_CM0015 DO NOT DELETE --> and abort the whole test executable,\n    or to use `SetUp()` instead of a constructor.\n*   If the tear-down operation could throw an exception, you must use\n    `TearDown()` as opposed to the destructor, as throwing in a destructor leads\n    to undefined behavior and usually will kill your program right away. Note\n    that many standard libraries (like STL) may throw when exceptions are\n    enabled in the compiler. Therefore you should prefer `TearDown()` if you\n    want to write portable tests that work with or without exceptions.\n*   The googletest team is considering making the assertion macros throw on\n    platforms where exceptions are enabled (e.g. Windows, Mac OS, and Linux\n    client-side), which will eliminate the need for the user to propagate\n    failures from a subroutine to its caller. Therefore, you shouldn't use\n    googletest assertions in a destructor if your code could run on such a\n    platform.\n\n## The compiler complains \"no matching function to call\" when I use ASSERT_PRED*. How do I fix it?\n\nIf the predicate function you use in `ASSERT_PRED*` or `EXPECT_PRED*` is\noverloaded or a template, the compiler will have trouble figuring out which\noverloaded version it should use. `ASSERT_PRED_FORMAT*` and\n`EXPECT_PRED_FORMAT*` don't have this problem.\n\nIf you see this error, you might want to switch to\n`(ASSERT|EXPECT)_PRED_FORMAT*`, which will also give you a better failure\nmessage. If, however, that is not an option, you can resolve the problem by\nexplicitly telling the compiler which version to pick.\n\nFor example, suppose you have\n\n```c++\nbool IsPositive(int n) {\n  return n > 0;\n}\n\nbool IsPositive(double x) {\n  return x > 0;\n}\n```\n\nyou will get a compiler error if you write\n\n```c++\nEXPECT_PRED1(IsPositive, 5);\n```\n\nHowever, this will work:\n\n```c++\nEXPECT_PRED1(static_cast<bool (*)(int)>(IsPositive), 5);\n```\n\n(The stuff inside the angled brackets for the `static_cast` operator is the type\nof the function pointer for the `int`-version of `IsPositive()`.)\n\nAs another example, when you have a template function\n\n```c++\ntemplate <typename T>\nbool IsNegative(T x) {\n  return x < 0;\n}\n```\n\nyou can use it in a predicate assertion like this:\n\n```c++\nASSERT_PRED1(IsNegative<int>, -5);\n```\n\nThings are more interesting if your template has more than one parameters. The\nfollowing won't compile:\n\n```c++\nASSERT_PRED2(GreaterThan<int, int>, 5, 0);\n```\n\nas the C++ pre-processor thinks you are giving `ASSERT_PRED2` 4 arguments, which\nis one more than expected. The workaround is to wrap the predicate function in\nparentheses:\n\n```c++\nASSERT_PRED2((GreaterThan<int, int>), 5, 0);\n```\n\n## My compiler complains about \"ignoring return value\" when I call RUN_ALL_TESTS(). Why?\n\nSome people had been ignoring the return value of `RUN_ALL_TESTS()`. That is,\ninstead of\n\n```c++\n  return RUN_ALL_TESTS();\n```\n\nthey write\n\n```c++\n  RUN_ALL_TESTS();\n```\n\nThis is **wrong and dangerous**. The testing services needs to see the return\nvalue of `RUN_ALL_TESTS()` in order to determine if a test has passed. If your\n`main()` function ignores it, your test will be considered successful even if it\nhas a googletest assertion failure. Very bad.\n\nWe have decided to fix this (thanks to Michael Chastain for the idea). Now, your\ncode will no longer be able to ignore `RUN_ALL_TESTS()` when compiled with\n`gcc`. If you do so, you'll get a compiler error.\n\nIf you see the compiler complaining about you ignoring the return value of\n`RUN_ALL_TESTS()`, the fix is simple: just make sure its value is used as the\nreturn value of `main()`.\n\nBut how could we introduce a change that breaks existing tests? Well, in this\ncase, the code was already broken in the first place, so we didn't break it. :-)\n\n## My compiler complains that a constructor (or destructor) cannot return a value. What's going on?\n\nDue to a peculiarity of C++, in order to support the syntax for streaming\nmessages to an `ASSERT_*`, e.g.\n\n```c++\n  ASSERT_EQ(1, Foo()) << \"blah blah\" << foo;\n```\n\nwe had to give up using `ASSERT*` and `FAIL*` (but not `EXPECT*` and\n`ADD_FAILURE*`) in constructors and destructors. The workaround is to move the\ncontent of your constructor/destructor to a private void member function, or\nswitch to `EXPECT_*()` if that works. This\n[section](advanced.md#assertion-placement) in the user's guide explains it.\n\n## My SetUp() function is not called. Why?\n\nC++ is case-sensitive. Did you spell it as `Setup()`?\n\nSimilarly, sometimes people spell `SetUpTestSuite()` as `SetupTestSuite()` and\nwonder why it's never called.\n\n\n## I have several test suites which share the same test fixture logic, do I have to define a new test fixture class for each of them? This seems pretty tedious.\n\nYou don't have to. Instead of\n\n```c++\nclass FooTest : public BaseTest {};\n\nTEST_F(FooTest, Abc) { ... }\nTEST_F(FooTest, Def) { ... }\n\nclass BarTest : public BaseTest {};\n\nTEST_F(BarTest, Abc) { ... }\nTEST_F(BarTest, Def) { ... }\n```\n\nyou can simply `typedef` the test fixtures:\n\n```c++\ntypedef BaseTest FooTest;\n\nTEST_F(FooTest, Abc) { ... }\nTEST_F(FooTest, Def) { ... }\n\ntypedef BaseTest BarTest;\n\nTEST_F(BarTest, Abc) { ... }\nTEST_F(BarTest, Def) { ... }\n```\n\n## googletest output is buried in a whole bunch of LOG messages. What do I do?\n\nThe googletest output is meant to be a concise and human-friendly report. If\nyour test generates textual output itself, it will mix with the googletest\noutput, making it hard to read. However, there is an easy solution to this\nproblem.\n\nSince `LOG` messages go to stderr, we decided to let googletest output go to\nstdout. This way, you can easily separate the two using redirection. For\nexample:\n\n```shell\n$ ./my_test > gtest_output.txt\n```\n\n## Why should I prefer test fixtures over global variables?\n\nThere are several good reasons:\n\n1.  It's likely your test needs to change the states of its global variables.\n    This makes it difficult to keep side effects from escaping one test and\n    contaminating others, making debugging difficult. By using fixtures, each\n    test has a fresh set of variables that's different (but with the same\n    names). Thus, tests are kept independent of each other.\n2.  Global variables pollute the global namespace.\n3.  Test fixtures can be reused via subclassing, which cannot be done easily\n    with global variables. This is useful if many test suites have something in\n    common.\n\n## What can the statement argument in ASSERT_DEATH() be?\n\n`ASSERT_DEATH(statement, matcher)` (or any death assertion macro) can be used\nwherever *`statement`* is valid. So basically *`statement`* can be any C++\nstatement that makes sense in the current context. In particular, it can\nreference global and/or local variables, and can be:\n\n*   a simple function call (often the case),\n*   a complex expression, or\n*   a compound statement.\n\nSome examples are shown here:\n\n```c++\n// A death test can be a simple function call.\nTEST(MyDeathTest, FunctionCall) {\n  ASSERT_DEATH(Xyz(5), \"Xyz failed\");\n}\n\n// Or a complex expression that references variables and functions.\nTEST(MyDeathTest, ComplexExpression) {\n  const bool c = Condition();\n  ASSERT_DEATH((c ? Func1(0) : object2.Method(\"test\")),\n               \"(Func1|Method) failed\");\n}\n\n// Death assertions can be used any where in a function.  In\n// particular, they can be inside a loop.\nTEST(MyDeathTest, InsideLoop) {\n  // Verifies that Foo(0), Foo(1), ..., and Foo(4) all die.\n  for (int i = 0; i < 5; i++) {\n    EXPECT_DEATH_M(Foo(i), \"Foo has \\\\d+ errors\",\n                   ::testing::Message() << \"where i is \" << i);\n  }\n}\n\n// A death assertion can contain a compound statement.\nTEST(MyDeathTest, CompoundStatement) {\n  // Verifies that at lease one of Bar(0), Bar(1), ..., and\n  // Bar(4) dies.\n  ASSERT_DEATH({\n    for (int i = 0; i < 5; i++) {\n      Bar(i);\n    }\n  },\n  \"Bar has \\\\d+ errors\");\n}\n```\n\ngtest-death-test_test.cc contains more examples if you are interested.\n\n## I have a fixture class `FooTest`, but `TEST_F(FooTest, Bar)` gives me error ``\"no matching function for call to `FooTest::FooTest()'\"``. Why?\n\nGoogletest needs to be able to create objects of your test fixture class, so it\nmust have a default constructor. Normally the compiler will define one for you.\nHowever, there are cases where you have to define your own:\n\n*   If you explicitly declare a non-default constructor for class `FooTest`\n    (`DISALLOW_EVIL_CONSTRUCTORS()` does this), then you need to define a\n    default constructor, even if it would be empty.\n*   If `FooTest` has a const non-static data member, then you have to define the\n    default constructor *and* initialize the const member in the initializer\n    list of the constructor. (Early versions of `gcc` doesn't force you to\n    initialize the const member. It's a bug that has been fixed in `gcc 4`.)\n\n## Why does ASSERT_DEATH complain about previous threads that were already joined?\n\nWith the Linux pthread library, there is no turning back once you cross the line\nfrom single thread to multiple threads. The first time you create a thread, a\nmanager thread is created in addition, so you get 3, not 2, threads. Later when\nthe thread you create joins the main thread, the thread count decrements by 1,\nbut the manager thread will never be killed, so you still have 2 threads, which\nmeans you cannot safely run a death test.\n\nThe new NPTL thread library doesn't suffer from this problem, as it doesn't\ncreate a manager thread. However, if you don't control which machine your test\nruns on, you shouldn't depend on this.\n\n## Why does googletest require the entire test suite, instead of individual tests, to be named *DeathTest when it uses ASSERT_DEATH?\n\ngoogletest does not interleave tests from different test suites. That is, it\nruns all tests in one test suite first, and then runs all tests in the next test\nsuite, and so on. googletest does this because it needs to set up a test suite\nbefore the first test in it is run, and tear it down afterwords. Splitting up\nthe test case would require multiple set-up and tear-down processes, which is\ninefficient and makes the semantics unclean.\n\nIf we were to determine the order of tests based on test name instead of test\ncase name, then we would have a problem with the following situation:\n\n```c++\nTEST_F(FooTest, AbcDeathTest) { ... }\nTEST_F(FooTest, Uvw) { ... }\n\nTEST_F(BarTest, DefDeathTest) { ... }\nTEST_F(BarTest, Xyz) { ... }\n```\n\nSince `FooTest.AbcDeathTest` needs to run before `BarTest.Xyz`, and we don't\ninterleave tests from different test suites, we need to run all tests in the\n`FooTest` case before running any test in the `BarTest` case. This contradicts\nwith the requirement to run `BarTest.DefDeathTest` before `FooTest.Uvw`.\n\n## But I don't like calling my entire test suite \\*DeathTest when it contains both death tests and non-death tests. What do I do?\n\nYou don't have to, but if you like, you may split up the test suite into\n`FooTest` and `FooDeathTest`, where the names make it clear that they are\nrelated:\n\n```c++\nclass FooTest : public ::testing::Test { ... };\n\nTEST_F(FooTest, Abc) { ... }\nTEST_F(FooTest, Def) { ... }\n\nusing FooDeathTest = FooTest;\n\nTEST_F(FooDeathTest, Uvw) { ... EXPECT_DEATH(...) ... }\nTEST_F(FooDeathTest, Xyz) { ... ASSERT_DEATH(...) ... }\n```\n\n## googletest prints the LOG messages in a death test's child process only when the test fails. How can I see the LOG messages when the death test succeeds?\n\nPrinting the LOG messages generated by the statement inside `EXPECT_DEATH()`\nmakes it harder to search for real problems in the parent's log. Therefore,\ngoogletest only prints them when the death test has failed.\n\nIf you really need to see such LOG messages, a workaround is to temporarily\nbreak the death test (e.g. by changing the regex pattern it is expected to\nmatch). Admittedly, this is a hack. We'll consider a more permanent solution\nafter the fork-and-exec-style death tests are implemented.\n\n## The compiler complains about \"no match for 'operator<<'\" when I use an assertion. What gives?\n\nIf you use a user-defined type `FooType` in an assertion, you must make sure\nthere is an `std::ostream& operator<<(std::ostream&, const FooType&)` function\ndefined such that we can print a value of `FooType`.\n\nIn addition, if `FooType` is declared in a name space, the `<<` operator also\nneeds to be defined in the *same* name space. See https://abseil.io/tips/49 for details.\n\n## How do I suppress the memory leak messages on Windows?\n\nSince the statically initialized googletest singleton requires allocations on\nthe heap, the Visual C++ memory leak detector will report memory leaks at the\nend of the program run. The easiest way to avoid this is to use the\n`_CrtMemCheckpoint` and `_CrtMemDumpAllObjectsSince` calls to not report any\nstatically initialized heap objects. See MSDN for more details and additional\nheap check/debug routines.\n\n## How can my code detect if it is running in a test?\n\nIf you write code that sniffs whether it's running in a test and does different\nthings accordingly, you are leaking test-only logic into production code and\nthere is no easy way to ensure that the test-only code paths aren't run by\nmistake in production. Such cleverness also leads to\n[Heisenbugs](https://en.wikipedia.org/wiki/Heisenbug). Therefore we strongly\nadvise against the practice, and googletest doesn't provide a way to do it.\n\nIn general, the recommended way to cause the code to behave differently under\ntest is [Dependency Injection](https://en.wikipedia.org/wiki/Dependency_injection). You can inject\ndifferent functionality from the test and from the production code. Since your\nproduction code doesn't link in the for-test logic at all (the\n[`testonly`](https://docs.bazel.build/versions/master/be/common-definitions.html#common.testonly) attribute for BUILD targets helps to ensure\nthat), there is no danger in accidentally running it.\n\nHowever, if you *really*, *really*, *really* have no choice, and if you follow\nthe rule of ending your test program names with `_test`, you can use the\n*horrible* hack of sniffing your executable name (`argv[0]` in `main()`) to know\nwhether the code is under test.\n\n## How do I temporarily disable a test?\n\nIf you have a broken test that you cannot fix right away, you can add the\nDISABLED_ prefix to its name. This will exclude it from execution. This is\nbetter than commenting out the code or using #if 0, as disabled tests are still\ncompiled (and thus won't rot).\n\nTo include disabled tests in test execution, just invoke the test program with\nthe --gtest_also_run_disabled_tests flag.\n\n## Is it OK if I have two separate `TEST(Foo, Bar)` test methods defined in different namespaces?\n\nYes.\n\nThe rule is **all test methods in the same test suite must use the same fixture\nclass.** This means that the following is **allowed** because both tests use the\nsame fixture class (`::testing::Test`).\n\n```c++\nnamespace foo {\nTEST(CoolTest, DoSomething) {\n  SUCCEED();\n}\n}  // namespace foo\n\nnamespace bar {\nTEST(CoolTest, DoSomething) {\n  SUCCEED();\n}\n}  // namespace bar\n```\n\nHowever, the following code is **not allowed** and will produce a runtime error\nfrom googletest because the test methods are using different test fixture\nclasses with the same test suite name.\n\n```c++\nnamespace foo {\nclass CoolTest : public ::testing::Test {};  // Fixture foo::CoolTest\nTEST_F(CoolTest, DoSomething) {\n  SUCCEED();\n}\n}  // namespace foo\n\nnamespace bar {\nclass CoolTest : public ::testing::Test {};  // Fixture: bar::CoolTest\nTEST_F(CoolTest, DoSomething) {\n  SUCCEED();\n}\n}  // namespace bar\n```\n"
  },
  {
    "path": "test/googletest/docs/pkgconfig.md",
    "content": "## Using GoogleTest from various build systems\n\nGoogleTest comes with pkg-config files that can be used to determine all\nnecessary flags for compiling and linking to GoogleTest (and GoogleMock).\nPkg-config is a standardised plain-text format containing\n\n*   the includedir (-I) path\n*   necessary macro (-D) definitions\n*   further required flags (-pthread)\n*   the library (-L) path\n*   the library (-l) to link to\n\nAll current build systems support pkg-config in one way or another. For all\nexamples here we assume you want to compile the sample\n`samples/sample3_unittest.cc`.\n\n### CMake\n\nUsing `pkg-config` in CMake is fairly easy:\n\n```cmake\ncmake_minimum_required(VERSION 3.0)\n\ncmake_policy(SET CMP0048 NEW)\nproject(my_gtest_pkgconfig VERSION 0.0.1 LANGUAGES CXX)\n\nfind_package(PkgConfig)\npkg_search_module(GTEST REQUIRED gtest_main)\n\nadd_executable(testapp samples/sample3_unittest.cc)\ntarget_link_libraries(testapp ${GTEST_LDFLAGS})\ntarget_compile_options(testapp PUBLIC ${GTEST_CFLAGS})\n\ninclude(CTest)\nadd_test(first_and_only_test testapp)\n```\n\nIt is generally recommended that you use `target_compile_options` + `_CFLAGS`\nover `target_include_directories` + `_INCLUDE_DIRS` as the former includes not\njust -I flags (GoogleTest might require a macro indicating to internal headers\nthat all libraries have been compiled with threading enabled. In addition,\nGoogleTest might also require `-pthread` in the compiling step, and as such\nsplitting the pkg-config `Cflags` variable into include dirs and macros for\n`target_compile_definitions()` might still miss this). The same recommendation\ngoes for using `_LDFLAGS` over the more commonplace `_LIBRARIES`, which happens\nto discard `-L` flags and `-pthread`.\n\n### Autotools\n\nFinding GoogleTest in Autoconf and using it from Automake is also fairly easy:\n\nIn your `configure.ac`:\n\n```\nAC_PREREQ([2.69])\nAC_INIT([my_gtest_pkgconfig], [0.0.1])\nAC_CONFIG_SRCDIR([samples/sample3_unittest.cc])\nAC_PROG_CXX\n\nPKG_CHECK_MODULES([GTEST], [gtest_main])\n\nAM_INIT_AUTOMAKE([foreign subdir-objects])\nAC_CONFIG_FILES([Makefile])\nAC_OUTPUT\n```\n\nand in your `Makefile.am`:\n\n```\ncheck_PROGRAMS = testapp\nTESTS = $(check_PROGRAMS)\n\ntestapp_SOURCES = samples/sample3_unittest.cc\ntestapp_CXXFLAGS = $(GTEST_CFLAGS)\ntestapp_LDADD = $(GTEST_LIBS)\n```\n\n### Meson\n\nMeson natively uses pkgconfig to query dependencies:\n\n```\nproject('my_gtest_pkgconfig', 'cpp', version : '0.0.1')\n\ngtest_dep = dependency('gtest_main')\n\ntestapp = executable(\n  'testapp',\n  files(['samples/sample3_unittest.cc']),\n  dependencies : gtest_dep,\n  install : false)\n\ntest('first_and_only_test', testapp)\n```\n\n### Plain Makefiles\n\nSince `pkg-config` is a small Unix command-line utility, it can be used in\nhandwritten `Makefile`s too:\n\n```makefile\nGTEST_CFLAGS = `pkg-config --cflags gtest_main`\nGTEST_LIBS = `pkg-config --libs gtest_main`\n\n.PHONY: tests all\n\ntests: all\n  ./testapp\n\nall: testapp\n\ntestapp: testapp.o\n  $(CXX) $(CXXFLAGS) $(LDFLAGS) $< -o $@ $(GTEST_LIBS)\n\ntestapp.o: samples/sample3_unittest.cc\n  $(CXX) $(CPPFLAGS) $(CXXFLAGS) $< -c -o $@ $(GTEST_CFLAGS)\n```\n\n### Help! pkg-config can't find GoogleTest!\n\nLet's say you have a `CMakeLists.txt` along the lines of the one in this\ntutorial and you try to run `cmake`. It is very possible that you get a failure\nalong the lines of:\n\n```\n-- Checking for one of the modules 'gtest_main'\nCMake Error at /usr/share/cmake/Modules/FindPkgConfig.cmake:640 (message):\n  None of the required 'gtest_main' found\n```\n\nThese failures are common if you installed GoogleTest yourself and have not\nsourced it from a distro or other package manager. If so, you need to tell\npkg-config where it can find the `.pc` files containing the information. Say you\ninstalled GoogleTest to `/usr/local`, then it might be that the `.pc` files are\ninstalled under `/usr/local/lib64/pkgconfig`. If you set\n\n```\nexport PKG_CONFIG_PATH=/usr/local/lib64/pkgconfig\n```\n\npkg-config will also try to look in `PKG_CONFIG_PATH` to find `gtest_main.pc`.\n\n### Using pkg-config in a cross-compilation setting\n\nPkg-config can be used in a cross-compilation setting too. To do this, let's\nassume the final prefix of the cross-compiled installation will be `/usr`, and\nyour sysroot is `/home/MYUSER/sysroot`. Configure and install GTest using\n\n```\nmkdir build && cmake -DCMAKE_INSTALL_PREFIX=/usr ..\n```\n\nInstall into the sysroot using `DESTDIR`:\n\n```\nmake -j install DESTDIR=/home/MYUSER/sysroot\n```\n\nBefore we continue, it is recommended to **always** define the following two\nvariables for pkg-config in a cross-compilation setting:\n\n```\nexport PKG_CONFIG_ALLOW_SYSTEM_CFLAGS=yes\nexport PKG_CONFIG_ALLOW_SYSTEM_LIBS=yes\n```\n\notherwise `pkg-config` will filter `-I` and `-L` flags against standard prefixes\nsuch as `/usr` (see https://bugs.freedesktop.org/show_bug.cgi?id=28264#c3 for\nreasons why this stripping needs to occur usually).\n\nIf you look at the generated pkg-config file, it will look something like\n\n```\nlibdir=/usr/lib64\nincludedir=/usr/include\n\nName: gtest\nDescription: GoogleTest (without main() function)\nVersion: 1.10.0\nURL: https://github.com/google/googletest\nLibs: -L${libdir} -lgtest -lpthread\nCflags: -I${includedir} -DGTEST_HAS_PTHREAD=1 -lpthread\n```\n\nNotice that the sysroot is not included in `libdir` and `includedir`! If you try\nto run `pkg-config` with the correct\n`PKG_CONFIG_LIBDIR=/home/MYUSER/sysroot/usr/lib64/pkgconfig` against this `.pc`\nfile, you will get\n\n```\n$ pkg-config --cflags gtest\n-DGTEST_HAS_PTHREAD=1 -lpthread -I/usr/include\n$ pkg-config --libs gtest\n-L/usr/lib64 -lgtest -lpthread\n```\n\nwhich is obviously wrong and points to the `CBUILD` and not `CHOST` root. In\norder to use this in a cross-compilation setting, we need to tell pkg-config to\ninject the actual sysroot into `-I` and `-L` variables. Let us now tell\npkg-config about the actual sysroot\n\n```\nexport PKG_CONFIG_DIR=\nexport PKG_CONFIG_SYSROOT_DIR=/home/MYUSER/sysroot\nexport PKG_CONFIG_LIBDIR=${PKG_CONFIG_SYSROOT_DIR}/usr/lib64/pkgconfig\n```\n\nand running `pkg-config` again we get\n\n```\n$ pkg-config --cflags gtest\n-DGTEST_HAS_PTHREAD=1 -lpthread -I/home/MYUSER/sysroot/usr/include\n$ pkg-config --libs gtest\n-L/home/MYUSER/sysroot/usr/lib64 -lgtest -lpthread\n```\n\nwhich contains the correct sysroot now. For a more comprehensive guide to also\nincluding `${CHOST}` in build system calls, see the excellent tutorial by Diego\nElio Pettenò: https://autotools.io/pkgconfig/cross-compiling.html\n"
  },
  {
    "path": "test/googletest/docs/primer.md",
    "content": "# Googletest Primer\n\n## Introduction: Why googletest?\n\n*googletest* helps you write better C++ tests.\n\ngoogletest is a testing framework developed by the Testing Technology team with\nGoogle's specific requirements and constraints in mind. Whether you work on\nLinux, Windows, or a Mac, if you write C++ code, googletest can help you. And it\nsupports *any* kind of tests, not just unit tests.\n\nSo what makes a good test, and how does googletest fit in? We believe:\n\n1.  Tests should be *independent* and *repeatable*. It's a pain to debug a test\n    that succeeds or fails as a result of other tests. googletest isolates the\n    tests by running each of them on a different object. When a test fails,\n    googletest allows you to run it in isolation for quick debugging.\n2.  Tests should be well *organized* and reflect the structure of the tested\n    code. googletest groups related tests into test suites that can share data\n    and subroutines. This common pattern is easy to recognize and makes tests\n    easy to maintain. Such consistency is especially helpful when people switch\n    projects and start to work on a new code base.\n3.  Tests should be *portable* and *reusable*. Google has a lot of code that is\n    platform-neutral; its tests should also be platform-neutral. googletest\n    works on different OSes, with different compilers, with or without\n    exceptions, so googletest tests can work with a variety of configurations.\n4.  When tests fail, they should provide as much *information* about the problem\n    as possible. googletest doesn't stop at the first test failure. Instead, it\n    only stops the current test and continues with the next. You can also set up\n    tests that report non-fatal failures after which the current test continues.\n    Thus, you can detect and fix multiple bugs in a single run-edit-compile\n    cycle.\n5.  The testing framework should liberate test writers from housekeeping chores\n    and let them focus on the test *content*. googletest automatically keeps\n    track of all tests defined, and doesn't require the user to enumerate them\n    in order to run them.\n6.  Tests should be *fast*. With googletest, you can reuse shared resources\n    across tests and pay for the set-up/tear-down only once, without making\n    tests depend on each other.\n\nSince googletest is based on the popular xUnit architecture, you'll feel right\nat home if you've used JUnit or PyUnit before. If not, it will take you about 10\nminutes to learn the basics and get started. So let's go!\n\n## Beware of the nomenclature\n\n_Note:_ There might be some confusion arising from different definitions of the\nterms _Test_, _Test Case_ and _Test Suite_, so beware of misunderstanding these.\n\nHistorically, googletest started to use the term _Test Case_ for grouping\nrelated tests, whereas current publications, including International Software\nTesting Qualifications Board ([ISTQB](http://www.istqb.org/)) materials and\nvarious textbooks on software quality, use the term\n_[Test Suite][istqb test suite]_ for this.\n\nThe related term _Test_, as it is used in googletest, corresponds to the term\n_[Test Case][istqb test case]_ of ISTQB and others.\n\nThe term _Test_ is commonly of broad enough sense, including ISTQB's definition\nof _Test Case_, so it's not much of a problem here. But the term _Test Case_ as\nwas used in Google Test is of contradictory sense and thus confusing.\n\ngoogletest recently started replacing the term _Test Case_ with _Test Suite_.\nThe preferred API is *TestSuite*. The older TestCase API is being slowly\ndeprecated and refactored away.\n\nSo please be aware of the different definitions of the terms:\n\n<!-- mdformat off(github rendering does not support multiline tables) -->\n\nMeaning                                                                              | googletest Term         | [ISTQB](http://www.istqb.org/) Term\n:----------------------------------------------------------------------------------- | :---------------------- | :----------------------------------\nExercise a particular program path with specific input values and verify the results | [TEST()](#simple-tests) | [Test Case][istqb test case]\n\n<!-- mdformat on -->\n\n[istqb test case]: http://glossary.istqb.org/en/search/test%20case\n[istqb test suite]: http://glossary.istqb.org/en/search/test%20suite\n\n## Basic Concepts\n\nWhen using googletest, you start by writing *assertions*, which are statements\nthat check whether a condition is true. An assertion's result can be *success*,\n*nonfatal failure*, or *fatal failure*. If a fatal failure occurs, it aborts the\ncurrent function; otherwise the program continues normally.\n\n*Tests* use assertions to verify the tested code's behavior. If a test crashes\nor has a failed assertion, then it *fails*; otherwise it *succeeds*.\n\nA *test suite* contains one or many tests. You should group your tests into test\nsuites that reflect the structure of the tested code. When multiple tests in a\ntest suite need to share common objects and subroutines, you can put them into a\n*test fixture* class.\n\nA *test program* can contain multiple test suites.\n\nWe'll now explain how to write a test program, starting at the individual\nassertion level and building up to tests and test suites.\n\n## Assertions\n\ngoogletest assertions are macros that resemble function calls. You test a class\nor function by making assertions about its behavior. When an assertion fails,\ngoogletest prints the assertion's source file and line number location, along\nwith a failure message. You may also supply a custom failure message which will\nbe appended to googletest's message.\n\nThe assertions come in pairs that test the same thing but have different effects\non the current function. `ASSERT_*` versions generate fatal failures when they\nfail, and **abort the current function**. `EXPECT_*` versions generate nonfatal\nfailures, which don't abort the current function. Usually `EXPECT_*` are\npreferred, as they allow more than one failure to be reported in a test.\nHowever, you should use `ASSERT_*` if it doesn't make sense to continue when the\nassertion in question fails.\n\nSince a failed `ASSERT_*` returns from the current function immediately,\npossibly skipping clean-up code that comes after it, it may cause a space leak.\nDepending on the nature of the leak, it may or may not be worth fixing - so keep\nthis in mind if you get a heap checker error in addition to assertion errors.\n\nTo provide a custom failure message, simply stream it into the macro using the\n`<<` operator or a sequence of such operators. An example:\n\n```c++\nASSERT_EQ(x.size(), y.size()) << \"Vectors x and y are of unequal length\";\n\nfor (int i = 0; i < x.size(); ++i) {\n  EXPECT_EQ(x[i], y[i]) << \"Vectors x and y differ at index \" << i;\n}\n```\n\nAnything that can be streamed to an `ostream` can be streamed to an assertion\nmacro--in particular, C strings and `string` objects. If a wide string\n(`wchar_t*`, `TCHAR*` in `UNICODE` mode on Windows, or `std::wstring`) is\nstreamed to an assertion, it will be translated to UTF-8 when printed.\n\n### Basic Assertions\n\nThese assertions do basic true/false condition testing.\n\nFatal assertion            | Nonfatal assertion         | Verifies\n-------------------------- | -------------------------- | --------------------\n`ASSERT_TRUE(condition);`  | `EXPECT_TRUE(condition);`  | `condition` is true\n`ASSERT_FALSE(condition);` | `EXPECT_FALSE(condition);` | `condition` is false\n\nRemember, when they fail, `ASSERT_*` yields a fatal failure and returns from the\ncurrent function, while `EXPECT_*` yields a nonfatal failure, allowing the\nfunction to continue running. In either case, an assertion failure means its\ncontaining test fails.\n\n**Availability**: Linux, Windows, Mac.\n\n### Binary Comparison\n\nThis section describes assertions that compare two values.\n\nFatal assertion          | Nonfatal assertion       | Verifies\n------------------------ | ------------------------ | --------------\n`ASSERT_EQ(val1, val2);` | `EXPECT_EQ(val1, val2);` | `val1 == val2`\n`ASSERT_NE(val1, val2);` | `EXPECT_NE(val1, val2);` | `val1 != val2`\n`ASSERT_LT(val1, val2);` | `EXPECT_LT(val1, val2);` | `val1 < val2`\n`ASSERT_LE(val1, val2);` | `EXPECT_LE(val1, val2);` | `val1 <= val2`\n`ASSERT_GT(val1, val2);` | `EXPECT_GT(val1, val2);` | `val1 > val2`\n`ASSERT_GE(val1, val2);` | `EXPECT_GE(val1, val2);` | `val1 >= val2`\n\nValue arguments must be comparable by the assertion's comparison operator or\nyou'll get a compiler error. We used to require the arguments to support the\n`<<` operator for streaming to an `ostream`, but this is no longer necessary. If\n`<<` is supported, it will be called to print the arguments when the assertion\nfails; otherwise googletest will attempt to print them in the best way it can.\nFor more details and how to customize the printing of the arguments, see the\n[documentation](../../googlemock/docs/cook_book.md#teaching-gmock-how-to-print-your-values).\n\nThese assertions can work with a user-defined type, but only if you define the\ncorresponding comparison operator (e.g., `==` or `<`). Since this is discouraged\nby the Google\n[C++ Style Guide](https://google.github.io/styleguide/cppguide.html#Operator_Overloading),\nyou may need to use `ASSERT_TRUE()` or `EXPECT_TRUE()` to assert the equality of\ntwo objects of a user-defined type.\n\nHowever, when possible, `ASSERT_EQ(actual, expected)` is preferred to\n`ASSERT_TRUE(actual == expected)`, since it tells you `actual` and `expected`'s\nvalues on failure.\n\nArguments are always evaluated exactly once. Therefore, it's OK for the\narguments to have side effects. However, as with any ordinary C/C++ function,\nthe arguments' evaluation order is undefined (i.e., the compiler is free to\nchoose any order), and your code should not depend on any particular argument\nevaluation order.\n\n`ASSERT_EQ()` does pointer equality on pointers. If used on two C strings, it\ntests if they are in the same memory location, not if they have the same value.\nTherefore, if you want to compare C strings (e.g. `const char*`) by value, use\n`ASSERT_STREQ()`, which will be described later on. In particular, to assert\nthat a C string is `NULL`, use `ASSERT_STREQ(c_string, NULL)`. Consider using\n`ASSERT_EQ(c_string, nullptr)` if c++11 is supported. To compare two `string`\nobjects, you should use `ASSERT_EQ`.\n\nWhen doing pointer comparisons use `*_EQ(ptr, nullptr)` and `*_NE(ptr, nullptr)`\ninstead of `*_EQ(ptr, NULL)` and `*_NE(ptr, NULL)`. This is because `nullptr` is\ntyped, while `NULL` is not. See the [FAQ](faq.md) for more details.\n\nIf you're working with floating point numbers, you may want to use the floating\npoint variations of some of these macros in order to avoid problems caused by\nrounding. See [Advanced googletest Topics](advanced.md) for details.\n\nMacros in this section work with both narrow and wide string objects (`string`\nand `wstring`).\n\n**Availability**: Linux, Windows, Mac.\n\n**Historical note**: Before February 2016 `*_EQ` had a convention of calling it\nas `ASSERT_EQ(expected, actual)`, so lots of existing code uses this order. Now\n`*_EQ` treats both parameters in the same way.\n\n### String Comparison\n\nThe assertions in this group compare two **C strings**. If you want to compare\ntwo `string` objects, use `EXPECT_EQ`, `EXPECT_NE`, and etc instead.\n\n<!-- mdformat off(github rendering does not support multiline tables) -->\n\n| Fatal assertion                | Nonfatal assertion             | Verifies                                                 |\n| --------------------------     | ------------------------------ | -------------------------------------------------------- |\n| `ASSERT_STREQ(str1,str2);`     | `EXPECT_STREQ(str1,str2);`     | the two C strings have the same content   \t\t     |\n| `ASSERT_STRNE(str1,str2);`     | `EXPECT_STRNE(str1,str2);`     | the two C strings have different contents \t\t     |\n| `ASSERT_STRCASEEQ(str1,str2);` | `EXPECT_STRCASEEQ(str1,str2);` | the two C strings have the same content, ignoring case   |\n| `ASSERT_STRCASENE(str1,str2);` | `EXPECT_STRCASENE(str1,str2);` | the two C strings have different contents, ignoring case |\n\n<!-- mdformat on-->\n\nNote that \"CASE\" in an assertion name means that case is ignored. A `NULL`\npointer and an empty string are considered *different*.\n\n`*STREQ*` and `*STRNE*` also accept wide C strings (`wchar_t*`). If a comparison\nof two wide strings fails, their values will be printed as UTF-8 narrow strings.\n\n**Availability**: Linux, Windows, Mac.\n\n**See also**: For more string comparison tricks (substring, prefix, suffix, and\nregular expression matching, for example), see [this](advanced.md) in the\nAdvanced googletest Guide.\n\n## Simple Tests\n\nTo create a test:\n\n1.  Use the `TEST()` macro to define and name a test function. These are\n    ordinary C++ functions that don't return a value.\n2.  In this function, along with any valid C++ statements you want to include,\n    use the various googletest assertions to check values.\n3.  The test's result is determined by the assertions; if any assertion in the\n    test fails (either fatally or non-fatally), or if the test crashes, the\n    entire test fails. Otherwise, it succeeds.\n\n```c++\nTEST(TestSuiteName, TestName) {\n  ... test body ...\n}\n```\n\n`TEST()` arguments go from general to specific. The *first* argument is the name\nof the test suite, and the *second* argument is the test's name within the test\ncase. Both names must be valid C++ identifiers, and they should not contain\nany underscores (`_`). A test's *full name* consists of its containing test suite and\nits individual name. Tests from different test suites can have the same\nindividual name.\n\nFor example, let's take a simple integer function:\n\n```c++\nint Factorial(int n);  // Returns the factorial of n\n```\n\nA test suite for this function might look like:\n\n```c++\n// Tests factorial of 0.\nTEST(FactorialTest, HandlesZeroInput) {\n  EXPECT_EQ(Factorial(0), 1);\n}\n\n// Tests factorial of positive numbers.\nTEST(FactorialTest, HandlesPositiveInput) {\n  EXPECT_EQ(Factorial(1), 1);\n  EXPECT_EQ(Factorial(2), 2);\n  EXPECT_EQ(Factorial(3), 6);\n  EXPECT_EQ(Factorial(8), 40320);\n}\n```\n\ngoogletest groups the test results by test suites, so logically related tests\nshould be in the same test suite; in other words, the first argument to their\n`TEST()` should be the same. In the above example, we have two tests,\n`HandlesZeroInput` and `HandlesPositiveInput`, that belong to the same test\nsuite `FactorialTest`.\n\nWhen naming your test suites and tests, you should follow the same convention as\nfor\n[naming functions and classes](https://google.github.io/styleguide/cppguide.html#Function_Names).\n\n**Availability**: Linux, Windows, Mac.\n\n## Test Fixtures: Using the Same Data Configuration for Multiple Tests {#same-data-multiple-tests}\n\nIf you find yourself writing two or more tests that operate on similar data, you\ncan use a *test fixture*. This allows you to reuse the same configuration of\nobjects for several different tests.\n\nTo create a fixture:\n\n1.  Derive a class from `::testing::Test` . Start its body with `protected:`, as\n    we'll want to access fixture members from sub-classes.\n2.  Inside the class, declare any objects you plan to use.\n3.  If necessary, write a default constructor or `SetUp()` function to prepare\n    the objects for each test. A common mistake is to spell `SetUp()` as\n    **`Setup()`** with a small `u` - Use `override` in C++11 to make sure you\n    spelled it correctly.\n4.  If necessary, write a destructor or `TearDown()` function to release any\n    resources you allocated in `SetUp()` . To learn when you should use the\n    constructor/destructor and when you should use `SetUp()/TearDown()`, read\n    the [FAQ](faq.md#CtorVsSetUp).\n5.  If needed, define subroutines for your tests to share.\n\nWhen using a fixture, use `TEST_F()` instead of `TEST()` as it allows you to\naccess objects and subroutines in the test fixture:\n\n```c++\nTEST_F(TestFixtureName, TestName) {\n  ... test body ...\n}\n```\n\nLike `TEST()`, the first argument is the test suite name, but for `TEST_F()`\nthis must be the name of the test fixture class. You've probably guessed: `_F`\nis for fixture.\n\nUnfortunately, the C++ macro system does not allow us to create a single macro\nthat can handle both types of tests. Using the wrong macro causes a compiler\nerror.\n\nAlso, you must first define a test fixture class before using it in a\n`TEST_F()`, or you'll get the compiler error \"`virtual outside class\ndeclaration`\".\n\nFor each test defined with `TEST_F()`, googletest will create a *fresh* test\nfixture at runtime, immediately initialize it via `SetUp()`, run the test,\nclean up by calling `TearDown()`, and then delete the test fixture. Note that\ndifferent tests in the same test suite have different test fixture objects, and\ngoogletest always deletes a test fixture before it creates the next one.\ngoogletest does **not** reuse the same test fixture for multiple tests. Any\nchanges one test makes to the fixture do not affect other tests.\n\nAs an example, let's write tests for a FIFO queue class named `Queue`, which has\nthe following interface:\n\n```c++\ntemplate <typename E>  // E is the element type.\nclass Queue {\n public:\n  Queue();\n  void Enqueue(const E& element);\n  E* Dequeue();  // Returns NULL if the queue is empty.\n  size_t size() const;\n  ...\n};\n```\n\nFirst, define a fixture class. By convention, you should give it the name\n`FooTest` where `Foo` is the class being tested.\n\n```c++\nclass QueueTest : public ::testing::Test {\n protected:\n  void SetUp() override {\n     q1_.Enqueue(1);\n     q2_.Enqueue(2);\n     q2_.Enqueue(3);\n  }\n\n  // void TearDown() override {}\n\n  Queue<int> q0_;\n  Queue<int> q1_;\n  Queue<int> q2_;\n};\n```\n\nIn this case, `TearDown()` is not needed since we don't have to clean up after\neach test, other than what's already done by the destructor.\n\nNow we'll write tests using `TEST_F()` and this fixture.\n\n```c++\nTEST_F(QueueTest, IsEmptyInitially) {\n  EXPECT_EQ(q0_.size(), 0);\n}\n\nTEST_F(QueueTest, DequeueWorks) {\n  int* n = q0_.Dequeue();\n  EXPECT_EQ(n, nullptr);\n\n  n = q1_.Dequeue();\n  ASSERT_NE(n, nullptr);\n  EXPECT_EQ(*n, 1);\n  EXPECT_EQ(q1_.size(), 0);\n  delete n;\n\n  n = q2_.Dequeue();\n  ASSERT_NE(n, nullptr);\n  EXPECT_EQ(*n, 2);\n  EXPECT_EQ(q2_.size(), 1);\n  delete n;\n}\n```\n\nThe above uses both `ASSERT_*` and `EXPECT_*` assertions. The rule of thumb is\nto use `EXPECT_*` when you want the test to continue to reveal more errors after\nthe assertion failure, and use `ASSERT_*` when continuing after failure doesn't\nmake sense. For example, the second assertion in the `Dequeue` test is\n`ASSERT_NE(nullptr, n)`, as we need to dereference the pointer `n` later, which\nwould lead to a segfault when `n` is `NULL`.\n\nWhen these tests run, the following happens:\n\n1.  googletest constructs a `QueueTest` object (let's call it `t1`).\n2.  `t1.SetUp()` initializes `t1`.\n3.  The first test (`IsEmptyInitially`) runs on `t1`.\n4.  `t1.TearDown()` cleans up after the test finishes.\n5.  `t1` is destructed.\n6.  The above steps are repeated on another `QueueTest` object, this time\n    running the `DequeueWorks` test.\n\n**Availability**: Linux, Windows, Mac.\n\n## Invoking the Tests\n\n`TEST()` and `TEST_F()` implicitly register their tests with googletest. So,\nunlike with many other C++ testing frameworks, you don't have to re-list all\nyour defined tests in order to run them.\n\nAfter defining your tests, you can run them with `RUN_ALL_TESTS()`, which\nreturns `0` if all the tests are successful, or `1` otherwise. Note that\n`RUN_ALL_TESTS()` runs *all tests* in your link unit--they can be from\ndifferent test suites, or even different source files.\n\nWhen invoked, the `RUN_ALL_TESTS()` macro:\n\n*   Saves the state of all googletest flags.\n\n*   Creates a test fixture object for the first test.\n\n*   Initializes it via `SetUp()`.\n\n*   Runs the test on the fixture object.\n\n*   Cleans up the fixture via `TearDown()`.\n\n*   Deletes the fixture.\n\n*   Restores the state of all googletest flags.\n\n*   Repeats the above steps for the next test, until all tests have run.\n\nIf a fatal failure happens the subsequent steps will be skipped.\n\n> IMPORTANT: You must **not** ignore the return value of `RUN_ALL_TESTS()`, or\n> you will get a compiler error. The rationale for this design is that the\n> automated testing service determines whether a test has passed based on its\n> exit code, not on its stdout/stderr output; thus your `main()` function must\n> return the value of `RUN_ALL_TESTS()`.\n>\n> Also, you should call `RUN_ALL_TESTS()` only **once**. Calling it more than\n> once conflicts with some advanced googletest features (e.g., thread-safe\n> [death tests](advanced.md#death-tests)) and thus is not supported.\n\n**Availability**: Linux, Windows, Mac.\n\n## Writing the main() Function\n\nMost users should _not_ need to write their own `main` function and instead link\nwith `gtest_main` (as opposed to with `gtest`), which defines a suitable entry\npoint. See the end of this section for details. The remainder of this section\nshould only apply when you need to do something custom before the tests run that\ncannot be expressed within the framework of fixtures and test suites.\n\nIf you write your own `main` function, it should return the value of\n`RUN_ALL_TESTS()`.\n\nYou can start from this boilerplate:\n\n```c++\n#include \"this/package/foo.h\"\n\n#include \"gtest/gtest.h\"\n\nnamespace my {\nnamespace project {\nnamespace {\n\n// The fixture for testing class Foo.\nclass FooTest : public ::testing::Test {\n protected:\n  // You can remove any or all of the following functions if their bodies would\n  // be empty.\n\n  FooTest() {\n     // You can do set-up work for each test here.\n  }\n\n  ~FooTest() override {\n     // You can do clean-up work that doesn't throw exceptions here.\n  }\n\n  // If the constructor and destructor are not enough for setting up\n  // and cleaning up each test, you can define the following methods:\n\n  void SetUp() override {\n     // Code here will be called immediately after the constructor (right\n     // before each test).\n  }\n\n  void TearDown() override {\n     // Code here will be called immediately after each test (right\n     // before the destructor).\n  }\n\n  // Class members declared here can be used by all tests in the test suite\n  // for Foo.\n};\n\n// Tests that the Foo::Bar() method does Abc.\nTEST_F(FooTest, MethodBarDoesAbc) {\n  const std::string input_filepath = \"this/package/testdata/myinputfile.dat\";\n  const std::string output_filepath = \"this/package/testdata/myoutputfile.dat\";\n  Foo f;\n  EXPECT_EQ(f.Bar(input_filepath, output_filepath), 0);\n}\n\n// Tests that Foo does Xyz.\nTEST_F(FooTest, DoesXyz) {\n  // Exercises the Xyz feature of Foo.\n}\n\n}  // namespace\n}  // namespace project\n}  // namespace my\n\nint main(int argc, char **argv) {\n  ::testing::InitGoogleTest(&argc, argv);\n  return RUN_ALL_TESTS();\n}\n```\n\nThe `::testing::InitGoogleTest()` function parses the command line for\ngoogletest flags, and removes all recognized flags. This allows the user to\ncontrol a test program's behavior via various flags, which we'll cover in\nthe [AdvancedGuide](advanced.md). You **must** call this function before calling\n`RUN_ALL_TESTS()`, or the flags won't be properly initialized.\n\nOn Windows, `InitGoogleTest()` also works with wide strings, so it can be used\nin programs compiled in `UNICODE` mode as well.\n\nBut maybe you think that writing all those `main` functions is too much work? We\nagree with you completely, and that's why Google Test provides a basic\nimplementation of main(). If it fits your needs, then just link your test with\nthe `gtest_main` library and you are good to go.\n\nNOTE: `ParseGUnitFlags()` is deprecated in favor of `InitGoogleTest()`.\n\n## Known Limitations\n\n*   Google Test is designed to be thread-safe. The implementation is thread-safe\n    on systems where the `pthreads` library is available. It is currently\n    _unsafe_ to use Google Test assertions from two threads concurrently on\n    other systems (e.g. Windows). In most tests this is not an issue as usually\n    the assertions are done in the main thread. If you want to help, you can\n    volunteer to implement the necessary synchronization primitives in\n    `gtest-port.h` for your platform.\n"
  },
  {
    "path": "test/googletest/docs/samples.md",
    "content": "# Googletest Samples {#samples}\n\nIf you're like us, you'd like to look at\n[googletest samples.](https://github.com/google/googletest/tree/master/googletest/samples)\nThe sample directory has a number of well-commented samples showing how to use a\nvariety of googletest features.\n\n*   Sample #1 shows the basic steps of using googletest to test C++ functions.\n*   Sample #2 shows a more complex unit test for a class with multiple member\n    functions.\n*   Sample #3 uses a test fixture.\n*   Sample #4 teaches you how to use googletest and `googletest.h` together to\n    get the best of both libraries.\n*   Sample #5 puts shared testing logic in a base test fixture, and reuses it in\n    derived fixtures.\n*   Sample #6 demonstrates type-parameterized tests.\n*   Sample #7 teaches the basics of value-parameterized tests.\n*   Sample #8 shows using `Combine()` in value-parameterized tests.\n*   Sample #9 shows use of the listener API to modify Google Test's console\n    output and the use of its reflection API to inspect test results.\n*   Sample #10 shows use of the listener API to implement a primitive memory\n    leak checker.\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-death-test.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for death tests.  It is\n// #included by gtest.h so a user doesn't need to include this\n// directly.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n#define GTEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n\n#include \"gtest/internal/gtest-death-test-internal.h\"\n\nnamespace testing {\n\n// This flag controls the style of death tests.  Valid values are \"threadsafe\",\n// meaning that the death test child process will re-execute the test binary\n// from the start, running only a single death test, or \"fast\",\n// meaning that the child process will execute the test logic immediately\n// after forking.\nGTEST_DECLARE_string_(death_test_style);\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process.  Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests.  IMPORTANT: This is an internal utility.  Using it may break the\n// implementation of death tests.  User code MUST NOT use it.\nGTEST_API_ bool InDeathTestChild();\n\n}  // namespace internal\n\n// The following macros are useful for writing death tests.\n\n// Here's what happens when an ASSERT_DEATH* or EXPECT_DEATH* is\n// executed:\n//\n//   1. It generates a warning if there is more than one active\n//   thread.  This is because it's safe to fork() or clone() only\n//   when there is a single thread.\n//\n//   2. The parent process clone()s a sub-process and runs the death\n//   test in it; the sub-process exits with code 0 at the end of the\n//   death test, if it hasn't exited already.\n//\n//   3. The parent process waits for the sub-process to terminate.\n//\n//   4. The parent process checks the exit code and error message of\n//   the sub-process.\n//\n// Examples:\n//\n//   ASSERT_DEATH(server.SendMessage(56, \"Hello\"), \"Invalid port number\");\n//   for (int i = 0; i < 5; i++) {\n//     EXPECT_DEATH(server.ProcessRequest(i),\n//                  \"Invalid request .* in ProcessRequest()\")\n//                  << \"Failed to die on request \" << i;\n//   }\n//\n//   ASSERT_EXIT(server.ExitNow(), ::testing::ExitedWithCode(0), \"Exiting\");\n//\n//   bool KilledBySIGHUP(int exit_code) {\n//     return WIFSIGNALED(exit_code) && WTERMSIG(exit_code) == SIGHUP;\n//   }\n//\n//   ASSERT_EXIT(client.HangUpServer(), KilledBySIGHUP, \"Hanging up!\");\n//\n// On the regular expressions used in death tests:\n//\n//   GOOGLETEST_CM0005 DO NOT DELETE\n//   On POSIX-compliant systems (*nix), we use the <regex.h> library,\n//   which uses the POSIX extended regex syntax.\n//\n//   On other platforms (e.g. Windows or Mac), we only support a simple regex\n//   syntax implemented as part of Google Test.  This limited\n//   implementation should be enough most of the time when writing\n//   death tests; though it lacks many features you can find in PCRE\n//   or POSIX extended regex syntax.  For example, we don't support\n//   union (\"x|y\"), grouping (\"(xy)\"), brackets (\"[xy]\"), and\n//   repetition count (\"x{5,7}\"), among others.\n//\n//   Below is the syntax that we do support.  We chose it to be a\n//   subset of both PCRE and POSIX extended regex, so it's easy to\n//   learn wherever you come from.  In the following: 'A' denotes a\n//   literal character, period (.), or a single \\\\ escape sequence;\n//   'x' and 'y' denote regular expressions; 'm' and 'n' are for\n//   natural numbers.\n//\n//     c     matches any literal character c\n//     \\\\d   matches any decimal digit\n//     \\\\D   matches any character that's not a decimal digit\n//     \\\\f   matches \\f\n//     \\\\n   matches \\n\n//     \\\\r   matches \\r\n//     \\\\s   matches any ASCII whitespace, including \\n\n//     \\\\S   matches any character that's not a whitespace\n//     \\\\t   matches \\t\n//     \\\\v   matches \\v\n//     \\\\w   matches any letter, _, or decimal digit\n//     \\\\W   matches any character that \\\\w doesn't match\n//     \\\\c   matches any literal character c, which must be a punctuation\n//     .     matches any single character except \\n\n//     A?    matches 0 or 1 occurrences of A\n//     A*    matches 0 or many occurrences of A\n//     A+    matches 1 or many occurrences of A\n//     ^     matches the beginning of a string (not that of each line)\n//     $     matches the end of a string (not that of each line)\n//     xy    matches x followed by y\n//\n//   If you accidentally use PCRE or POSIX extended regex features\n//   not implemented by us, you will get a run-time failure.  In that\n//   case, please try to rewrite your regular expression within the\n//   above syntax.\n//\n//   This implementation is *not* meant to be as highly tuned or robust\n//   as a compiled regex library, but should perform well enough for a\n//   death test, which already incurs significant overhead by launching\n//   a child process.\n//\n// Known caveats:\n//\n//   A \"threadsafe\" style death test obtains the path to the test\n//   program from argv[0] and re-executes it in the sub-process.  For\n//   simplicity, the current implementation doesn't search the PATH\n//   when launching the sub-process.  This means that the user must\n//   invoke the test program via a path that contains at least one\n//   path separator (e.g. path/to/foo_test and\n//   /absolute/path/to/bar_test are fine, but foo_test is not).  This\n//   is rarely a problem as people usually don't put the test binary\n//   directory in PATH.\n//\n\n// Asserts that a given statement causes the program to exit, with an\n// integer exit status that satisfies predicate, and emitting error output\n// that matches regex.\n# define ASSERT_EXIT(statement, predicate, regex) \\\n    GTEST_DEATH_TEST_(statement, predicate, regex, GTEST_FATAL_FAILURE_)\n\n// Like ASSERT_EXIT, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_EXIT(statement, predicate, regex) \\\n    GTEST_DEATH_TEST_(statement, predicate, regex, GTEST_NONFATAL_FAILURE_)\n\n// Asserts that a given statement causes the program to exit, either by\n// explicitly exiting with a nonzero exit code or being killed by a\n// signal, and emitting error output that matches regex.\n# define ASSERT_DEATH(statement, regex) \\\n    ASSERT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, regex)\n\n// Like ASSERT_DEATH, but continues on to successive tests in the\n// test suite, if any:\n# define EXPECT_DEATH(statement, regex) \\\n    EXPECT_EXIT(statement, ::testing::internal::ExitedUnsuccessfully, regex)\n\n// Two predicate classes that can be used in {ASSERT,EXPECT}_EXIT*:\n\n// Tests that an exit code describes a normal exit with a given exit code.\nclass GTEST_API_ ExitedWithCode {\n public:\n  explicit ExitedWithCode(int exit_code);\n  bool operator()(int exit_status) const;\n private:\n  // No implementation - assignment is unsupported.\n  void operator=(const ExitedWithCode& other);\n\n  const int exit_code_;\n};\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Tests that an exit code describes an exit due to termination by a\n// given signal.\n// GOOGLETEST_CM0006 DO NOT DELETE\nclass GTEST_API_ KilledBySignal {\n public:\n  explicit KilledBySignal(int signum);\n  bool operator()(int exit_status) const;\n private:\n  const int signum_;\n};\n# endif  // !GTEST_OS_WINDOWS\n\n// EXPECT_DEBUG_DEATH asserts that the given statements die in debug mode.\n// The death testing framework causes this to have interesting semantics,\n// since the sideeffects of the call are only visible in opt mode, and not\n// in debug mode.\n//\n// In practice, this can be used to test functions that utilize the\n// LOG(DFATAL) macro using the following style:\n//\n// int DieInDebugOr12(int* sideeffect) {\n//   if (sideeffect) {\n//     *sideeffect = 12;\n//   }\n//   LOG(DFATAL) << \"death\";\n//   return 12;\n// }\n//\n// TEST(TestSuite, TestDieOr12WorksInDgbAndOpt) {\n//   int sideeffect = 0;\n//   // Only asserts in dbg.\n//   EXPECT_DEBUG_DEATH(DieInDebugOr12(&sideeffect), \"death\");\n//\n// #ifdef NDEBUG\n//   // opt-mode has sideeffect visible.\n//   EXPECT_EQ(12, sideeffect);\n// #else\n//   // dbg-mode no visible sideeffect.\n//   EXPECT_EQ(0, sideeffect);\n// #endif\n// }\n//\n// This will assert that DieInDebugReturn12InOpt() crashes in debug\n// mode, usually due to a DCHECK or LOG(DFATAL), but returns the\n// appropriate fallback value (12 in this case) in opt mode. If you\n// need to test that a function has appropriate side-effects in opt\n// mode, include assertions against the side-effects.  A general\n// pattern for this is:\n//\n// EXPECT_DEBUG_DEATH({\n//   // Side-effects here will have an effect after this statement in\n//   // opt mode, but none in debug mode.\n//   EXPECT_EQ(12, DieInDebugOr12(&sideeffect));\n// }, \"death\");\n//\n# ifdef NDEBUG\n\n#  define EXPECT_DEBUG_DEATH(statement, regex) \\\n  GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n#  define ASSERT_DEBUG_DEATH(statement, regex) \\\n  GTEST_EXECUTE_STATEMENT_(statement, regex)\n\n# else\n\n#  define EXPECT_DEBUG_DEATH(statement, regex) \\\n  EXPECT_DEATH(statement, regex)\n\n#  define ASSERT_DEBUG_DEATH(statement, regex) \\\n  ASSERT_DEATH(statement, regex)\n\n# endif  // NDEBUG for EXPECT_DEBUG_DEATH\n#endif  // GTEST_HAS_DEATH_TEST\n\n// This macro is used for implementing macros such as\n// EXPECT_DEATH_IF_SUPPORTED and ASSERT_DEATH_IF_SUPPORTED on systems where\n// death tests are not supported. Those macros must compile on such systems\n// if and only if EXPECT_DEATH and ASSERT_DEATH compile with the same parameters\n// on systems that support death tests. This allows one to write such a macro on\n// a system that does not support death tests and be sure that it will compile\n// on a death-test supporting system. It is exposed publicly so that systems\n// that have death-tests with stricter requirements than GTEST_HAS_DEATH_TEST\n// can write their own equivalent of EXPECT_DEATH_IF_SUPPORTED and\n// ASSERT_DEATH_IF_SUPPORTED.\n//\n// Parameters:\n//   statement -  A statement that a macro such as EXPECT_DEATH would test\n//                for program termination. This macro has to make sure this\n//                statement is compiled but not executed, to ensure that\n//                EXPECT_DEATH_IF_SUPPORTED compiles with a certain\n//                parameter if and only if EXPECT_DEATH compiles with it.\n//   regex     -  A regex that a macro such as EXPECT_DEATH would use to test\n//                the output of statement.  This parameter has to be\n//                compiled but not evaluated by this macro, to ensure that\n//                this macro only accepts expressions that a macro such as\n//                EXPECT_DEATH would accept.\n//   terminator - Must be an empty statement for EXPECT_DEATH_IF_SUPPORTED\n//                and a return statement for ASSERT_DEATH_IF_SUPPORTED.\n//                This ensures that ASSERT_DEATH_IF_SUPPORTED will not\n//                compile inside functions where ASSERT_DEATH doesn't\n//                compile.\n//\n//  The branch that has an always false condition is used to ensure that\n//  statement and regex are compiled (and thus syntactically correct) but\n//  never executed. The unreachable code macro protects the terminator\n//  statement from generating an 'unreachable code' warning in case\n//  statement unconditionally returns or throws. The Message constructor at\n//  the end allows the syntax of streaming additional messages into the\n//  macro, for compilational compatibility with EXPECT_DEATH/ASSERT_DEATH.\n# define GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, terminator) \\\n    GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n    if (::testing::internal::AlwaysTrue()) { \\\n      GTEST_LOG_(WARNING) \\\n          << \"Death tests are not supported on this platform.\\n\" \\\n          << \"Statement '\" #statement \"' cannot be verified.\"; \\\n    } else if (::testing::internal::AlwaysFalse()) { \\\n      ::testing::internal::RE::PartialMatch(\".*\", (regex)); \\\n      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n      terminator; \\\n    } else \\\n      ::testing::Message()\n\n// EXPECT_DEATH_IF_SUPPORTED(statement, regex) and\n// ASSERT_DEATH_IF_SUPPORTED(statement, regex) expand to real death tests if\n// death tests are supported; otherwise they just issue a warning.  This is\n// useful when you are combining death test assertions with normal test\n// assertions in one test.\n#if GTEST_HAS_DEATH_TEST\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n    EXPECT_DEATH(statement, regex)\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n    ASSERT_DEATH(statement, regex)\n#else\n# define EXPECT_DEATH_IF_SUPPORTED(statement, regex) \\\n    GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, )\n# define ASSERT_DEATH_IF_SUPPORTED(statement, regex) \\\n    GTEST_UNSUPPORTED_DEATH_TEST(statement, regex, return)\n#endif\n\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_DEATH_TEST_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-matchers.h",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This file implements just enough of the matcher interface to allow\n// EXPECT_DEATH and friends to accept a matcher argument.\n\n// IWYU pragma: private, include \"testing/base/public/gunit.h\"\n// IWYU pragma: friend third_party/googletest/googlemock/.*\n// IWYU pragma: friend third_party/googletest/googletest/.*\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_MATCHERS_H_\n#define GTEST_INCLUDE_GTEST_GTEST_MATCHERS_H_\n\n#include <memory>\n#include <ostream>\n#include <string>\n#include <type_traits>\n\n#include \"gtest/gtest-printers.h\"\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-port.h\"\n\n// MSVC warning C5046 is new as of VS2017 version 15.8.\n#if defined(_MSC_VER) && _MSC_VER >= 1915\n#define GTEST_MAYBE_5046_ 5046\n#else\n#define GTEST_MAYBE_5046_\n#endif\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(\n    4251 GTEST_MAYBE_5046_ /* class A needs to have dll-interface to be used by\n                              clients of class B */\n    /* Symbol involving type with internal linkage not defined */)\n\nnamespace testing {\n\n// To implement a matcher Foo for type T, define:\n//   1. a class FooMatcherImpl that implements the\n//      MatcherInterface<T> interface, and\n//   2. a factory function that creates a Matcher<T> object from a\n//      FooMatcherImpl*.\n//\n// The two-level delegation design makes it possible to allow a user\n// to write \"v\" instead of \"Eq(v)\" where a Matcher is expected, which\n// is impossible if we pass matchers by pointers.  It also eases\n// ownership management as Matcher objects can now be copied like\n// plain values.\n\n// MatchResultListener is an abstract class.  Its << operator can be\n// used by a matcher to explain why a value matches or doesn't match.\n//\nclass MatchResultListener {\n public:\n  // Creates a listener object with the given underlying ostream.  The\n  // listener does not own the ostream, and does not dereference it\n  // in the constructor or destructor.\n  explicit MatchResultListener(::std::ostream* os) : stream_(os) {}\n  virtual ~MatchResultListener() = 0;  // Makes this class abstract.\n\n  // Streams x to the underlying ostream; does nothing if the ostream\n  // is NULL.\n  template <typename T>\n  MatchResultListener& operator<<(const T& x) {\n    if (stream_ != nullptr) *stream_ << x;\n    return *this;\n  }\n\n  // Returns the underlying ostream.\n  ::std::ostream* stream() { return stream_; }\n\n  // Returns true if and only if the listener is interested in an explanation\n  // of the match result.  A matcher's MatchAndExplain() method can use\n  // this information to avoid generating the explanation when no one\n  // intends to hear it.\n  bool IsInterested() const { return stream_ != nullptr; }\n\n private:\n  ::std::ostream* const stream_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(MatchResultListener);\n};\n\ninline MatchResultListener::~MatchResultListener() {\n}\n\n// An instance of a subclass of this knows how to describe itself as a\n// matcher.\nclass MatcherDescriberInterface {\n public:\n  virtual ~MatcherDescriberInterface() {}\n\n  // Describes this matcher to an ostream.  The function should print\n  // a verb phrase that describes the property a value matching this\n  // matcher should have.  The subject of the verb phrase is the value\n  // being matched.  For example, the DescribeTo() method of the Gt(7)\n  // matcher prints \"is greater than 7\".\n  virtual void DescribeTo(::std::ostream* os) const = 0;\n\n  // Describes the negation of this matcher to an ostream.  For\n  // example, if the description of this matcher is \"is greater than\n  // 7\", the negated description could be \"is not greater than 7\".\n  // You are not required to override this when implementing\n  // MatcherInterface, but it is highly advised so that your matcher\n  // can produce good error messages.\n  virtual void DescribeNegationTo(::std::ostream* os) const {\n    *os << \"not (\";\n    DescribeTo(os);\n    *os << \")\";\n  }\n};\n\n// The implementation of a matcher.\ntemplate <typename T>\nclass MatcherInterface : public MatcherDescriberInterface {\n public:\n  // Returns true if and only if the matcher matches x; also explains the\n  // match result to 'listener' if necessary (see the next paragraph), in\n  // the form of a non-restrictive relative clause (\"which ...\",\n  // \"whose ...\", etc) that describes x.  For example, the\n  // MatchAndExplain() method of the Pointee(...) matcher should\n  // generate an explanation like \"which points to ...\".\n  //\n  // Implementations of MatchAndExplain() should add an explanation of\n  // the match result *if and only if* they can provide additional\n  // information that's not already present (or not obvious) in the\n  // print-out of x and the matcher's description.  Whether the match\n  // succeeds is not a factor in deciding whether an explanation is\n  // needed, as sometimes the caller needs to print a failure message\n  // when the match succeeds (e.g. when the matcher is used inside\n  // Not()).\n  //\n  // For example, a \"has at least 10 elements\" matcher should explain\n  // what the actual element count is, regardless of the match result,\n  // as it is useful information to the reader; on the other hand, an\n  // \"is empty\" matcher probably only needs to explain what the actual\n  // size is when the match fails, as it's redundant to say that the\n  // size is 0 when the value is already known to be empty.\n  //\n  // You should override this method when defining a new matcher.\n  //\n  // It's the responsibility of the caller (Google Test) to guarantee\n  // that 'listener' is not NULL.  This helps to simplify a matcher's\n  // implementation when it doesn't care about the performance, as it\n  // can talk to 'listener' without checking its validity first.\n  // However, in order to implement dummy listeners efficiently,\n  // listener->stream() may be NULL.\n  virtual bool MatchAndExplain(T x, MatchResultListener* listener) const = 0;\n\n  // Inherits these methods from MatcherDescriberInterface:\n  //   virtual void DescribeTo(::std::ostream* os) const = 0;\n  //   virtual void DescribeNegationTo(::std::ostream* os) const;\n};\n\nnamespace internal {\n\n// Converts a MatcherInterface<T> to a MatcherInterface<const T&>.\ntemplate <typename T>\nclass MatcherInterfaceAdapter : public MatcherInterface<const T&> {\n public:\n  explicit MatcherInterfaceAdapter(const MatcherInterface<T>* impl)\n      : impl_(impl) {}\n  ~MatcherInterfaceAdapter() override { delete impl_; }\n\n  void DescribeTo(::std::ostream* os) const override { impl_->DescribeTo(os); }\n\n  void DescribeNegationTo(::std::ostream* os) const override {\n    impl_->DescribeNegationTo(os);\n  }\n\n  bool MatchAndExplain(const T& x,\n                       MatchResultListener* listener) const override {\n    return impl_->MatchAndExplain(x, listener);\n  }\n\n private:\n  const MatcherInterface<T>* const impl_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(MatcherInterfaceAdapter);\n};\n\nstruct AnyEq {\n  template <typename A, typename B>\n  bool operator()(const A& a, const B& b) const { return a == b; }\n};\nstruct AnyNe {\n  template <typename A, typename B>\n  bool operator()(const A& a, const B& b) const { return a != b; }\n};\nstruct AnyLt {\n  template <typename A, typename B>\n  bool operator()(const A& a, const B& b) const { return a < b; }\n};\nstruct AnyGt {\n  template <typename A, typename B>\n  bool operator()(const A& a, const B& b) const { return a > b; }\n};\nstruct AnyLe {\n  template <typename A, typename B>\n  bool operator()(const A& a, const B& b) const { return a <= b; }\n};\nstruct AnyGe {\n  template <typename A, typename B>\n  bool operator()(const A& a, const B& b) const { return a >= b; }\n};\n\n// A match result listener that ignores the explanation.\nclass DummyMatchResultListener : public MatchResultListener {\n public:\n  DummyMatchResultListener() : MatchResultListener(nullptr) {}\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(DummyMatchResultListener);\n};\n\n// A match result listener that forwards the explanation to a given\n// ostream.  The difference between this and MatchResultListener is\n// that the former is concrete.\nclass StreamMatchResultListener : public MatchResultListener {\n public:\n  explicit StreamMatchResultListener(::std::ostream* os)\n      : MatchResultListener(os) {}\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(StreamMatchResultListener);\n};\n\n// An internal class for implementing Matcher<T>, which will derive\n// from it.  We put functionalities common to all Matcher<T>\n// specializations here to avoid code duplication.\ntemplate <typename T>\nclass MatcherBase {\n public:\n  // Returns true if and only if the matcher matches x; also explains the\n  // match result to 'listener'.\n  bool MatchAndExplain(const T& x, MatchResultListener* listener) const {\n    return impl_->MatchAndExplain(x, listener);\n  }\n\n  // Returns true if and only if this matcher matches x.\n  bool Matches(const T& x) const {\n    DummyMatchResultListener dummy;\n    return MatchAndExplain(x, &dummy);\n  }\n\n  // Describes this matcher to an ostream.\n  void DescribeTo(::std::ostream* os) const { impl_->DescribeTo(os); }\n\n  // Describes the negation of this matcher to an ostream.\n  void DescribeNegationTo(::std::ostream* os) const {\n    impl_->DescribeNegationTo(os);\n  }\n\n  // Explains why x matches, or doesn't match, the matcher.\n  void ExplainMatchResultTo(const T& x, ::std::ostream* os) const {\n    StreamMatchResultListener listener(os);\n    MatchAndExplain(x, &listener);\n  }\n\n  // Returns the describer for this matcher object; retains ownership\n  // of the describer, which is only guaranteed to be alive when\n  // this matcher object is alive.\n  const MatcherDescriberInterface* GetDescriber() const {\n    return impl_.get();\n  }\n\n protected:\n  MatcherBase() {}\n\n  // Constructs a matcher from its implementation.\n  explicit MatcherBase(const MatcherInterface<const T&>* impl) : impl_(impl) {}\n\n  template <typename U>\n  explicit MatcherBase(\n      const MatcherInterface<U>* impl,\n      typename std::enable_if<!std::is_same<U, const U&>::value>::type* =\n          nullptr)\n      : impl_(new internal::MatcherInterfaceAdapter<U>(impl)) {}\n\n  MatcherBase(const MatcherBase&) = default;\n  MatcherBase& operator=(const MatcherBase&) = default;\n  MatcherBase(MatcherBase&&) = default;\n  MatcherBase& operator=(MatcherBase&&) = default;\n\n  virtual ~MatcherBase() {}\n\n private:\n  std::shared_ptr<const MatcherInterface<const T&>> impl_;\n};\n\n}  // namespace internal\n\n// A Matcher<T> is a copyable and IMMUTABLE (except by assignment)\n// object that can check whether a value of type T matches.  The\n// implementation of Matcher<T> is just a std::shared_ptr to const\n// MatcherInterface<T>.  Don't inherit from Matcher!\ntemplate <typename T>\nclass Matcher : public internal::MatcherBase<T> {\n public:\n  // Constructs a null matcher.  Needed for storing Matcher objects in STL\n  // containers.  A default-constructed matcher is not yet initialized.  You\n  // cannot use it until a valid value has been assigned to it.\n  explicit Matcher() {}  // NOLINT\n\n  // Constructs a matcher from its implementation.\n  explicit Matcher(const MatcherInterface<const T&>* impl)\n      : internal::MatcherBase<T>(impl) {}\n\n  template <typename U>\n  explicit Matcher(\n      const MatcherInterface<U>* impl,\n      typename std::enable_if<!std::is_same<U, const U&>::value>::type* =\n          nullptr)\n      : internal::MatcherBase<T>(impl) {}\n\n  // Implicit constructor here allows people to write\n  // EXPECT_CALL(foo, Bar(5)) instead of EXPECT_CALL(foo, Bar(Eq(5))) sometimes\n  Matcher(T value);  // NOLINT\n};\n\n// The following two specializations allow the user to write str\n// instead of Eq(str) and \"foo\" instead of Eq(\"foo\") when a std::string\n// matcher is expected.\ntemplate <>\nclass GTEST_API_ Matcher<const std::string&>\n    : public internal::MatcherBase<const std::string&> {\n public:\n  Matcher() {}\n\n  explicit Matcher(const MatcherInterface<const std::string&>* impl)\n      : internal::MatcherBase<const std::string&>(impl) {}\n\n  // Allows the user to write str instead of Eq(str) sometimes, where\n  // str is a std::string object.\n  Matcher(const std::string& s);  // NOLINT\n\n  // Allows the user to write \"foo\" instead of Eq(\"foo\") sometimes.\n  Matcher(const char* s);  // NOLINT\n};\n\ntemplate <>\nclass GTEST_API_ Matcher<std::string>\n    : public internal::MatcherBase<std::string> {\n public:\n  Matcher() {}\n\n  explicit Matcher(const MatcherInterface<const std::string&>* impl)\n      : internal::MatcherBase<std::string>(impl) {}\n  explicit Matcher(const MatcherInterface<std::string>* impl)\n      : internal::MatcherBase<std::string>(impl) {}\n\n  // Allows the user to write str instead of Eq(str) sometimes, where\n  // str is a string object.\n  Matcher(const std::string& s);  // NOLINT\n\n  // Allows the user to write \"foo\" instead of Eq(\"foo\") sometimes.\n  Matcher(const char* s);  // NOLINT\n};\n\n#if GTEST_HAS_ABSL\n// The following two specializations allow the user to write str\n// instead of Eq(str) and \"foo\" instead of Eq(\"foo\") when a absl::string_view\n// matcher is expected.\ntemplate <>\nclass GTEST_API_ Matcher<const absl::string_view&>\n    : public internal::MatcherBase<const absl::string_view&> {\n public:\n  Matcher() {}\n\n  explicit Matcher(const MatcherInterface<const absl::string_view&>* impl)\n      : internal::MatcherBase<const absl::string_view&>(impl) {}\n\n  // Allows the user to write str instead of Eq(str) sometimes, where\n  // str is a std::string object.\n  Matcher(const std::string& s);  // NOLINT\n\n  // Allows the user to write \"foo\" instead of Eq(\"foo\") sometimes.\n  Matcher(const char* s);  // NOLINT\n\n  // Allows the user to pass absl::string_views directly.\n  Matcher(absl::string_view s);  // NOLINT\n};\n\ntemplate <>\nclass GTEST_API_ Matcher<absl::string_view>\n    : public internal::MatcherBase<absl::string_view> {\n public:\n  Matcher() {}\n\n  explicit Matcher(const MatcherInterface<const absl::string_view&>* impl)\n      : internal::MatcherBase<absl::string_view>(impl) {}\n  explicit Matcher(const MatcherInterface<absl::string_view>* impl)\n      : internal::MatcherBase<absl::string_view>(impl) {}\n\n  // Allows the user to write str instead of Eq(str) sometimes, where\n  // str is a std::string object.\n  Matcher(const std::string& s);  // NOLINT\n\n  // Allows the user to write \"foo\" instead of Eq(\"foo\") sometimes.\n  Matcher(const char* s);  // NOLINT\n\n  // Allows the user to pass absl::string_views directly.\n  Matcher(absl::string_view s);  // NOLINT\n};\n#endif  // GTEST_HAS_ABSL\n\n// Prints a matcher in a human-readable format.\ntemplate <typename T>\nstd::ostream& operator<<(std::ostream& os, const Matcher<T>& matcher) {\n  matcher.DescribeTo(&os);\n  return os;\n}\n\n// The PolymorphicMatcher class template makes it easy to implement a\n// polymorphic matcher (i.e. a matcher that can match values of more\n// than one type, e.g. Eq(n) and NotNull()).\n//\n// To define a polymorphic matcher, a user should provide an Impl\n// class that has a DescribeTo() method and a DescribeNegationTo()\n// method, and define a member function (or member function template)\n//\n//   bool MatchAndExplain(const Value& value,\n//                        MatchResultListener* listener) const;\n//\n// See the definition of NotNull() for a complete example.\ntemplate <class Impl>\nclass PolymorphicMatcher {\n public:\n  explicit PolymorphicMatcher(const Impl& an_impl) : impl_(an_impl) {}\n\n  // Returns a mutable reference to the underlying matcher\n  // implementation object.\n  Impl& mutable_impl() { return impl_; }\n\n  // Returns an immutable reference to the underlying matcher\n  // implementation object.\n  const Impl& impl() const { return impl_; }\n\n  template <typename T>\n  operator Matcher<T>() const {\n    return Matcher<T>(new MonomorphicImpl<const T&>(impl_));\n  }\n\n private:\n  template <typename T>\n  class MonomorphicImpl : public MatcherInterface<T> {\n   public:\n    explicit MonomorphicImpl(const Impl& impl) : impl_(impl) {}\n\n    void DescribeTo(::std::ostream* os) const override { impl_.DescribeTo(os); }\n\n    void DescribeNegationTo(::std::ostream* os) const override {\n      impl_.DescribeNegationTo(os);\n    }\n\n    bool MatchAndExplain(T x, MatchResultListener* listener) const override {\n      return impl_.MatchAndExplain(x, listener);\n    }\n\n   private:\n    const Impl impl_;\n  };\n\n  Impl impl_;\n};\n\n// Creates a matcher from its implementation.\n// DEPRECATED: Especially in the generic code, prefer:\n//   Matcher<T>(new MyMatcherImpl<const T&>(...));\n//\n// MakeMatcher may create a Matcher that accepts its argument by value, which\n// leads to unnecessary copies & lack of support for non-copyable types.\ntemplate <typename T>\ninline Matcher<T> MakeMatcher(const MatcherInterface<T>* impl) {\n  return Matcher<T>(impl);\n}\n\n// Creates a polymorphic matcher from its implementation.  This is\n// easier to use than the PolymorphicMatcher<Impl> constructor as it\n// doesn't require you to explicitly write the template argument, e.g.\n//\n//   MakePolymorphicMatcher(foo);\n// vs\n//   PolymorphicMatcher<TypeOfFoo>(foo);\ntemplate <class Impl>\ninline PolymorphicMatcher<Impl> MakePolymorphicMatcher(const Impl& impl) {\n  return PolymorphicMatcher<Impl>(impl);\n}\n\nnamespace internal {\n// Implements a matcher that compares a given value with a\n// pre-supplied value using one of the ==, <=, <, etc, operators.  The\n// two values being compared don't have to have the same type.\n//\n// The matcher defined here is polymorphic (for example, Eq(5) can be\n// used to match an int, a short, a double, etc).  Therefore we use\n// a template type conversion operator in the implementation.\n//\n// The following template definition assumes that the Rhs parameter is\n// a \"bare\" type (i.e. neither 'const T' nor 'T&').\ntemplate <typename D, typename Rhs, typename Op>\nclass ComparisonBase {\n public:\n  explicit ComparisonBase(const Rhs& rhs) : rhs_(rhs) {}\n  template <typename Lhs>\n  operator Matcher<Lhs>() const {\n    return Matcher<Lhs>(new Impl<const Lhs&>(rhs_));\n  }\n\n private:\n  template <typename T>\n  static const T& Unwrap(const T& v) { return v; }\n  template <typename T>\n  static const T& Unwrap(std::reference_wrapper<T> v) { return v; }\n\n  template <typename Lhs, typename = Rhs>\n  class Impl : public MatcherInterface<Lhs> {\n   public:\n    explicit Impl(const Rhs& rhs) : rhs_(rhs) {}\n    bool MatchAndExplain(Lhs lhs,\n                         MatchResultListener* /* listener */) const override {\n      return Op()(lhs, Unwrap(rhs_));\n    }\n    void DescribeTo(::std::ostream* os) const override {\n      *os << D::Desc() << \" \";\n      UniversalPrint(Unwrap(rhs_), os);\n    }\n    void DescribeNegationTo(::std::ostream* os) const override {\n      *os << D::NegatedDesc() <<  \" \";\n      UniversalPrint(Unwrap(rhs_), os);\n    }\n\n   private:\n    Rhs rhs_;\n  };\n  Rhs rhs_;\n};\n\ntemplate <typename Rhs>\nclass EqMatcher : public ComparisonBase<EqMatcher<Rhs>, Rhs, AnyEq> {\n public:\n  explicit EqMatcher(const Rhs& rhs)\n      : ComparisonBase<EqMatcher<Rhs>, Rhs, AnyEq>(rhs) { }\n  static const char* Desc() { return \"is equal to\"; }\n  static const char* NegatedDesc() { return \"isn't equal to\"; }\n};\ntemplate <typename Rhs>\nclass NeMatcher : public ComparisonBase<NeMatcher<Rhs>, Rhs, AnyNe> {\n public:\n  explicit NeMatcher(const Rhs& rhs)\n      : ComparisonBase<NeMatcher<Rhs>, Rhs, AnyNe>(rhs) { }\n  static const char* Desc() { return \"isn't equal to\"; }\n  static const char* NegatedDesc() { return \"is equal to\"; }\n};\ntemplate <typename Rhs>\nclass LtMatcher : public ComparisonBase<LtMatcher<Rhs>, Rhs, AnyLt> {\n public:\n  explicit LtMatcher(const Rhs& rhs)\n      : ComparisonBase<LtMatcher<Rhs>, Rhs, AnyLt>(rhs) { }\n  static const char* Desc() { return \"is <\"; }\n  static const char* NegatedDesc() { return \"isn't <\"; }\n};\ntemplate <typename Rhs>\nclass GtMatcher : public ComparisonBase<GtMatcher<Rhs>, Rhs, AnyGt> {\n public:\n  explicit GtMatcher(const Rhs& rhs)\n      : ComparisonBase<GtMatcher<Rhs>, Rhs, AnyGt>(rhs) { }\n  static const char* Desc() { return \"is >\"; }\n  static const char* NegatedDesc() { return \"isn't >\"; }\n};\ntemplate <typename Rhs>\nclass LeMatcher : public ComparisonBase<LeMatcher<Rhs>, Rhs, AnyLe> {\n public:\n  explicit LeMatcher(const Rhs& rhs)\n      : ComparisonBase<LeMatcher<Rhs>, Rhs, AnyLe>(rhs) { }\n  static const char* Desc() { return \"is <=\"; }\n  static const char* NegatedDesc() { return \"isn't <=\"; }\n};\ntemplate <typename Rhs>\nclass GeMatcher : public ComparisonBase<GeMatcher<Rhs>, Rhs, AnyGe> {\n public:\n  explicit GeMatcher(const Rhs& rhs)\n      : ComparisonBase<GeMatcher<Rhs>, Rhs, AnyGe>(rhs) { }\n  static const char* Desc() { return \"is >=\"; }\n  static const char* NegatedDesc() { return \"isn't >=\"; }\n};\n\n// Implements polymorphic matchers MatchesRegex(regex) and\n// ContainsRegex(regex), which can be used as a Matcher<T> as long as\n// T can be converted to a string.\nclass MatchesRegexMatcher {\n public:\n  MatchesRegexMatcher(const RE* regex, bool full_match)\n      : regex_(regex), full_match_(full_match) {}\n\n#if GTEST_HAS_ABSL\n  bool MatchAndExplain(const absl::string_view& s,\n                       MatchResultListener* listener) const {\n    return MatchAndExplain(std::string(s), listener);\n  }\n#endif  // GTEST_HAS_ABSL\n\n  // Accepts pointer types, particularly:\n  //   const char*\n  //   char*\n  //   const wchar_t*\n  //   wchar_t*\n  template <typename CharType>\n  bool MatchAndExplain(CharType* s, MatchResultListener* listener) const {\n    return s != nullptr && MatchAndExplain(std::string(s), listener);\n  }\n\n  // Matches anything that can convert to std::string.\n  //\n  // This is a template, not just a plain function with const std::string&,\n  // because absl::string_view has some interfering non-explicit constructors.\n  template <class MatcheeStringType>\n  bool MatchAndExplain(const MatcheeStringType& s,\n                       MatchResultListener* /* listener */) const {\n    const std::string& s2(s);\n    return full_match_ ? RE::FullMatch(s2, *regex_)\n                       : RE::PartialMatch(s2, *regex_);\n  }\n\n  void DescribeTo(::std::ostream* os) const {\n    *os << (full_match_ ? \"matches\" : \"contains\") << \" regular expression \";\n    UniversalPrinter<std::string>::Print(regex_->pattern(), os);\n  }\n\n  void DescribeNegationTo(::std::ostream* os) const {\n    *os << \"doesn't \" << (full_match_ ? \"match\" : \"contain\")\n        << \" regular expression \";\n    UniversalPrinter<std::string>::Print(regex_->pattern(), os);\n  }\n\n private:\n  const std::shared_ptr<const RE> regex_;\n  const bool full_match_;\n};\n}  // namespace internal\n\n// Matches a string that fully matches regular expression 'regex'.\n// The matcher takes ownership of 'regex'.\ninline PolymorphicMatcher<internal::MatchesRegexMatcher> MatchesRegex(\n    const internal::RE* regex) {\n  return MakePolymorphicMatcher(internal::MatchesRegexMatcher(regex, true));\n}\ninline PolymorphicMatcher<internal::MatchesRegexMatcher> MatchesRegex(\n    const std::string& regex) {\n  return MatchesRegex(new internal::RE(regex));\n}\n\n// Matches a string that contains regular expression 'regex'.\n// The matcher takes ownership of 'regex'.\ninline PolymorphicMatcher<internal::MatchesRegexMatcher> ContainsRegex(\n    const internal::RE* regex) {\n  return MakePolymorphicMatcher(internal::MatchesRegexMatcher(regex, false));\n}\ninline PolymorphicMatcher<internal::MatchesRegexMatcher> ContainsRegex(\n    const std::string& regex) {\n  return ContainsRegex(new internal::RE(regex));\n}\n\n// Creates a polymorphic matcher that matches anything equal to x.\n// Note: if the parameter of Eq() were declared as const T&, Eq(\"foo\")\n// wouldn't compile.\ntemplate <typename T>\ninline internal::EqMatcher<T> Eq(T x) { return internal::EqMatcher<T>(x); }\n\n// Constructs a Matcher<T> from a 'value' of type T.  The constructed\n// matcher matches any value that's equal to 'value'.\ntemplate <typename T>\nMatcher<T>::Matcher(T value) { *this = Eq(value); }\n\n// Creates a monomorphic matcher that matches anything with type Lhs\n// and equal to rhs.  A user may need to use this instead of Eq(...)\n// in order to resolve an overloading ambiguity.\n//\n// TypedEq<T>(x) is just a convenient short-hand for Matcher<T>(Eq(x))\n// or Matcher<T>(x), but more readable than the latter.\n//\n// We could define similar monomorphic matchers for other comparison\n// operations (e.g. TypedLt, TypedGe, and etc), but decided not to do\n// it yet as those are used much less than Eq() in practice.  A user\n// can always write Matcher<T>(Lt(5)) to be explicit about the type,\n// for example.\ntemplate <typename Lhs, typename Rhs>\ninline Matcher<Lhs> TypedEq(const Rhs& rhs) { return Eq(rhs); }\n\n// Creates a polymorphic matcher that matches anything >= x.\ntemplate <typename Rhs>\ninline internal::GeMatcher<Rhs> Ge(Rhs x) {\n  return internal::GeMatcher<Rhs>(x);\n}\n\n// Creates a polymorphic matcher that matches anything > x.\ntemplate <typename Rhs>\ninline internal::GtMatcher<Rhs> Gt(Rhs x) {\n  return internal::GtMatcher<Rhs>(x);\n}\n\n// Creates a polymorphic matcher that matches anything <= x.\ntemplate <typename Rhs>\ninline internal::LeMatcher<Rhs> Le(Rhs x) {\n  return internal::LeMatcher<Rhs>(x);\n}\n\n// Creates a polymorphic matcher that matches anything < x.\ntemplate <typename Rhs>\ninline internal::LtMatcher<Rhs> Lt(Rhs x) {\n  return internal::LtMatcher<Rhs>(x);\n}\n\n// Creates a polymorphic matcher that matches anything != x.\ntemplate <typename Rhs>\ninline internal::NeMatcher<Rhs> Ne(Rhs x) {\n  return internal::NeMatcher<Rhs>(x);\n}\n}  // namespace testing\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251 5046\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_MATCHERS_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-message.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the Message class.\n//\n// IMPORTANT NOTE: Due to limitation of the C++ language, we have to\n// leave some internal implementation details in this header file.\n// They are clearly marked by comments like this:\n//\n//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n//\n// Such code is NOT meant to be used by a user directly, and is subject\n// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user\n// program!\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_MESSAGE_H_\n#define GTEST_INCLUDE_GTEST_GTEST_MESSAGE_H_\n\n#include <limits>\n#include <memory>\n#include <sstream>\n\n#include \"gtest/internal/gtest-port.h\"\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\n// Ensures that there is at least one operator<< in the global namespace.\n// See Message& operator<<(...) below for why.\nvoid operator<<(const testing::internal::Secret&, int);\n\nnamespace testing {\n\n// The Message class works like an ostream repeater.\n//\n// Typical usage:\n//\n//   1. You stream a bunch of values to a Message object.\n//      It will remember the text in a stringstream.\n//   2. Then you stream the Message object to an ostream.\n//      This causes the text in the Message to be streamed\n//      to the ostream.\n//\n// For example;\n//\n//   testing::Message foo;\n//   foo << 1 << \" != \" << 2;\n//   std::cout << foo;\n//\n// will print \"1 != 2\".\n//\n// Message is not intended to be inherited from.  In particular, its\n// destructor is not virtual.\n//\n// Note that stringstream behaves differently in gcc and in MSVC.  You\n// can stream a NULL char pointer to it in the former, but not in the\n// latter (it causes an access violation if you do).  The Message\n// class hides this difference by treating a NULL char pointer as\n// \"(null)\".\nclass GTEST_API_ Message {\n private:\n  // The type of basic IO manipulators (endl, ends, and flush) for\n  // narrow streams.\n  typedef std::ostream& (*BasicNarrowIoManip)(std::ostream&);\n\n public:\n  // Constructs an empty Message.\n  Message();\n\n  // Copy constructor.\n  Message(const Message& msg) : ss_(new ::std::stringstream) {  // NOLINT\n    *ss_ << msg.GetString();\n  }\n\n  // Constructs a Message from a C-string.\n  explicit Message(const char* str) : ss_(new ::std::stringstream) {\n    *ss_ << str;\n  }\n\n  // Streams a non-pointer value to this object.\n  template <typename T>\n  inline Message& operator <<(const T& val) {\n    // Some libraries overload << for STL containers.  These\n    // overloads are defined in the global namespace instead of ::std.\n    //\n    // C++'s symbol lookup rule (i.e. Koenig lookup) says that these\n    // overloads are visible in either the std namespace or the global\n    // namespace, but not other namespaces, including the testing\n    // namespace which Google Test's Message class is in.\n    //\n    // To allow STL containers (and other types that has a << operator\n    // defined in the global namespace) to be used in Google Test\n    // assertions, testing::Message must access the custom << operator\n    // from the global namespace.  With this using declaration,\n    // overloads of << defined in the global namespace and those\n    // visible via Koenig lookup are both exposed in this function.\n    using ::operator <<;\n    *ss_ << val;\n    return *this;\n  }\n\n  // Streams a pointer value to this object.\n  //\n  // This function is an overload of the previous one.  When you\n  // stream a pointer to a Message, this definition will be used as it\n  // is more specialized.  (The C++ Standard, section\n  // [temp.func.order].)  If you stream a non-pointer, then the\n  // previous definition will be used.\n  //\n  // The reason for this overload is that streaming a NULL pointer to\n  // ostream is undefined behavior.  Depending on the compiler, you\n  // may get \"0\", \"(nil)\", \"(null)\", or an access violation.  To\n  // ensure consistent result across compilers, we always treat NULL\n  // as \"(null)\".\n  template <typename T>\n  inline Message& operator <<(T* const& pointer) {  // NOLINT\n    if (pointer == nullptr) {\n      *ss_ << \"(null)\";\n    } else {\n      *ss_ << pointer;\n    }\n    return *this;\n  }\n\n  // Since the basic IO manipulators are overloaded for both narrow\n  // and wide streams, we have to provide this specialized definition\n  // of operator <<, even though its body is the same as the\n  // templatized version above.  Without this definition, streaming\n  // endl or other basic IO manipulators to Message will confuse the\n  // compiler.\n  Message& operator <<(BasicNarrowIoManip val) {\n    *ss_ << val;\n    return *this;\n  }\n\n  // Instead of 1/0, we want to see true/false for bool values.\n  Message& operator <<(bool b) {\n    return *this << (b ? \"true\" : \"false\");\n  }\n\n  // These two overloads allow streaming a wide C string to a Message\n  // using the UTF-8 encoding.\n  Message& operator <<(const wchar_t* wide_c_str);\n  Message& operator <<(wchar_t* wide_c_str);\n\n#if GTEST_HAS_STD_WSTRING\n  // Converts the given wide string to a narrow string using the UTF-8\n  // encoding, and streams the result to this Message object.\n  Message& operator <<(const ::std::wstring& wstr);\n#endif  // GTEST_HAS_STD_WSTRING\n\n  // Gets the text streamed to this object so far as an std::string.\n  // Each '\\0' character in the buffer is replaced with \"\\\\0\".\n  //\n  // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n  std::string GetString() const;\n\n private:\n  // We'll hold the text streamed to this object here.\n  const std::unique_ptr< ::std::stringstream> ss_;\n\n  // We declare (but don't implement) this to prevent the compiler\n  // from implementing the assignment operator.\n  void operator=(const Message&);\n};\n\n// Streams a Message to an ostream.\ninline std::ostream& operator <<(std::ostream& os, const Message& sb) {\n  return os << sb.GetString();\n}\n\nnamespace internal {\n\n// Converts a streamable value to an std::string.  A NULL pointer is\n// converted to \"(null)\".  When the input value is a ::string,\n// ::std::string, ::wstring, or ::std::wstring object, each NUL\n// character in it is replaced with \"\\\\0\".\ntemplate <typename T>\nstd::string StreamableToString(const T& streamable) {\n  return (Message() << streamable).GetString();\n}\n\n}  // namespace internal\n}  // namespace testing\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_MESSAGE_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-param-test.h",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Macros and functions for implementing parameterized tests\n// in Google C++ Testing and Mocking Framework (Google Test)\n//\n// This file is generated by a SCRIPT.  DO NOT EDIT BY HAND!\n//\n// GOOGLETEST_CM0001 DO NOT DELETE\n#ifndef GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_\n#define GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_\n\n\n// Value-parameterized tests allow you to test your code with different\n// parameters without writing multiple copies of the same test.\n//\n// Here is how you use value-parameterized tests:\n\n#if 0\n\n// To write value-parameterized tests, first you should define a fixture\n// class. It is usually derived from testing::TestWithParam<T> (see below for\n// another inheritance scheme that's sometimes useful in more complicated\n// class hierarchies), where the type of your parameter values.\n// TestWithParam<T> is itself derived from testing::Test. T can be any\n// copyable type. If it's a raw pointer, you are responsible for managing the\n// lifespan of the pointed values.\n\nclass FooTest : public ::testing::TestWithParam<const char*> {\n  // You can implement all the usual class fixture members here.\n};\n\n// Then, use the TEST_P macro to define as many parameterized tests\n// for this fixture as you want. The _P suffix is for \"parameterized\"\n// or \"pattern\", whichever you prefer to think. The arguments to the\n// TEST_P macro are the test_suite_name and test_case (both which must be\n// non-empty) that will form the test name.\n\nTEST_P(FooTest, DoesBlah) {\n  // Inside a test, access the test parameter with the GetParam() method\n  // of the TestWithParam<T> class:\n  EXPECT_TRUE(foo.Blah(GetParam()));\n  ...\n}\n\nTEST_P(FooTest, HasBlahBlah) {\n  ...\n}\n\n// Finally, you can use INSTANTIATE_TEST_SUITE_P to instantiate the test\n// case with any set of parameters you want. Google Test defines a number\n// of functions for generating test parameters. They return what we call\n// (surprise!) parameter generators. Here is a summary of them, which\n// are all in the testing namespace:\n//\n//\n//  Range(begin, end [, step]) - Yields values {begin, begin+step,\n//                               begin+step+step, ...}. The values do not\n//                               include end. step defaults to 1.\n//  Values(v1, v2, ..., vN)    - Yields values {v1, v2, ..., vN}.\n//  ValuesIn(container)        - Yields values from a C-style array, an STL\n//  ValuesIn(begin,end)          container, or an iterator range [begin, end).\n//  Bool()                     - Yields sequence {false, true}.\n//  Combine(g1, g2, ..., gN)   - Yields all combinations (the Cartesian product\n//                               for the math savvy) of the values generated\n//                               by the N generators.\n//\n// For more details, see comments at the definitions of these functions below\n// in this file.\n//\n// The following statement will instantiate tests from the FooTest test suite\n// each with parameter values \"meeny\", \"miny\", and \"moe\".\n\nINSTANTIATE_TEST_SUITE_P(InstantiationName,\n                         FooTest,\n                         Values(\"meeny\", \"miny\", \"moe\"));\n\n// To distinguish different instances of the pattern, (yes, you\n// can instantiate it more than once) the first argument to the\n// INSTANTIATE_TEST_SUITE_P macro is a prefix (which must be non-empty) that\n// will be added to the actual test suite name. Remember to pick unique prefixes\n// for different instantiations. The tests from the instantiation above will\n// have these names:\n//\n//    * InstantiationName/FooTest.DoesBlah/0 for \"meeny\"\n//    * InstantiationName/FooTest.DoesBlah/1 for \"miny\"\n//    * InstantiationName/FooTest.DoesBlah/2 for \"moe\"\n//    * InstantiationName/FooTest.HasBlahBlah/0 for \"meeny\"\n//    * InstantiationName/FooTest.HasBlahBlah/1 for \"miny\"\n//    * InstantiationName/FooTest.HasBlahBlah/2 for \"moe\"\n//\n// You can use these names in --gtest_filter.\n//\n// This statement will instantiate all tests from FooTest again, each\n// with parameter values \"cat\" and \"dog\":\n\nconst char* pets[] = {\"cat\", \"dog\"};\nINSTANTIATE_TEST_SUITE_P(AnotherInstantiationName, FooTest, ValuesIn(pets));\n\n// The tests from the instantiation above will have these names:\n//\n//    * AnotherInstantiationName/FooTest.DoesBlah/0 for \"cat\"\n//    * AnotherInstantiationName/FooTest.DoesBlah/1 for \"dog\"\n//    * AnotherInstantiationName/FooTest.HasBlahBlah/0 for \"cat\"\n//    * AnotherInstantiationName/FooTest.HasBlahBlah/1 for \"dog\"\n//\n// Please note that INSTANTIATE_TEST_SUITE_P will instantiate all tests\n// in the given test suite, whether their definitions come before or\n// AFTER the INSTANTIATE_TEST_SUITE_P statement.\n//\n// Please also note that generator expressions (including parameters to the\n// generators) are evaluated in InitGoogleTest(), after main() has started.\n// This allows the user on one hand, to adjust generator parameters in order\n// to dynamically determine a set of tests to run and on the other hand,\n// give the user a chance to inspect the generated tests with Google Test\n// reflection API before RUN_ALL_TESTS() is executed.\n//\n// You can see samples/sample7_unittest.cc and samples/sample8_unittest.cc\n// for more examples.\n//\n// In the future, we plan to publish the API for defining new parameter\n// generators. But for now this interface remains part of the internal\n// implementation and is subject to change.\n//\n//\n// A parameterized test fixture must be derived from testing::Test and from\n// testing::WithParamInterface<T>, where T is the type of the parameter\n// values. Inheriting from TestWithParam<T> satisfies that requirement because\n// TestWithParam<T> inherits from both Test and WithParamInterface. In more\n// complicated hierarchies, however, it is occasionally useful to inherit\n// separately from Test and WithParamInterface. For example:\n\nclass BaseTest : public ::testing::Test {\n  // You can inherit all the usual members for a non-parameterized test\n  // fixture here.\n};\n\nclass DerivedTest : public BaseTest, public ::testing::WithParamInterface<int> {\n  // The usual test fixture members go here too.\n};\n\nTEST_F(BaseTest, HasFoo) {\n  // This is an ordinary non-parameterized test.\n}\n\nTEST_P(DerivedTest, DoesBlah) {\n  // GetParam works just the same here as if you inherit from TestWithParam.\n  EXPECT_TRUE(foo.Blah(GetParam()));\n}\n\n#endif  // 0\n\n#include <iterator>\n#include <utility>\n\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-param-util.h\"\n#include \"gtest/internal/gtest-port.h\"\n\nnamespace testing {\n\n// Functions producing parameter generators.\n//\n// Google Test uses these generators to produce parameters for value-\n// parameterized tests. When a parameterized test suite is instantiated\n// with a particular generator, Google Test creates and runs tests\n// for each element in the sequence produced by the generator.\n//\n// In the following sample, tests from test suite FooTest are instantiated\n// each three times with parameter values 3, 5, and 8:\n//\n// class FooTest : public TestWithParam<int> { ... };\n//\n// TEST_P(FooTest, TestThis) {\n// }\n// TEST_P(FooTest, TestThat) {\n// }\n// INSTANTIATE_TEST_SUITE_P(TestSequence, FooTest, Values(3, 5, 8));\n//\n\n// Range() returns generators providing sequences of values in a range.\n//\n// Synopsis:\n// Range(start, end)\n//   - returns a generator producing a sequence of values {start, start+1,\n//     start+2, ..., }.\n// Range(start, end, step)\n//   - returns a generator producing a sequence of values {start, start+step,\n//     start+step+step, ..., }.\n// Notes:\n//   * The generated sequences never include end. For example, Range(1, 5)\n//     returns a generator producing a sequence {1, 2, 3, 4}. Range(1, 9, 2)\n//     returns a generator producing {1, 3, 5, 7}.\n//   * start and end must have the same type. That type may be any integral or\n//     floating-point type or a user defined type satisfying these conditions:\n//     * It must be assignable (have operator=() defined).\n//     * It must have operator+() (operator+(int-compatible type) for\n//       two-operand version).\n//     * It must have operator<() defined.\n//     Elements in the resulting sequences will also have that type.\n//   * Condition start < end must be satisfied in order for resulting sequences\n//     to contain any elements.\n//\ntemplate <typename T, typename IncrementT>\ninternal::ParamGenerator<T> Range(T start, T end, IncrementT step) {\n  return internal::ParamGenerator<T>(\n      new internal::RangeGenerator<T, IncrementT>(start, end, step));\n}\n\ntemplate <typename T>\ninternal::ParamGenerator<T> Range(T start, T end) {\n  return Range(start, end, 1);\n}\n\n// ValuesIn() function allows generation of tests with parameters coming from\n// a container.\n//\n// Synopsis:\n// ValuesIn(const T (&array)[N])\n//   - returns a generator producing sequences with elements from\n//     a C-style array.\n// ValuesIn(const Container& container)\n//   - returns a generator producing sequences with elements from\n//     an STL-style container.\n// ValuesIn(Iterator begin, Iterator end)\n//   - returns a generator producing sequences with elements from\n//     a range [begin, end) defined by a pair of STL-style iterators. These\n//     iterators can also be plain C pointers.\n//\n// Please note that ValuesIn copies the values from the containers\n// passed in and keeps them to generate tests in RUN_ALL_TESTS().\n//\n// Examples:\n//\n// This instantiates tests from test suite StringTest\n// each with C-string values of \"foo\", \"bar\", and \"baz\":\n//\n// const char* strings[] = {\"foo\", \"bar\", \"baz\"};\n// INSTANTIATE_TEST_SUITE_P(StringSequence, StringTest, ValuesIn(strings));\n//\n// This instantiates tests from test suite StlStringTest\n// each with STL strings with values \"a\" and \"b\":\n//\n// ::std::vector< ::std::string> GetParameterStrings() {\n//   ::std::vector< ::std::string> v;\n//   v.push_back(\"a\");\n//   v.push_back(\"b\");\n//   return v;\n// }\n//\n// INSTANTIATE_TEST_SUITE_P(CharSequence,\n//                          StlStringTest,\n//                          ValuesIn(GetParameterStrings()));\n//\n//\n// This will also instantiate tests from CharTest\n// each with parameter values 'a' and 'b':\n//\n// ::std::list<char> GetParameterChars() {\n//   ::std::list<char> list;\n//   list.push_back('a');\n//   list.push_back('b');\n//   return list;\n// }\n// ::std::list<char> l = GetParameterChars();\n// INSTANTIATE_TEST_SUITE_P(CharSequence2,\n//                          CharTest,\n//                          ValuesIn(l.begin(), l.end()));\n//\ntemplate <typename ForwardIterator>\ninternal::ParamGenerator<\n    typename std::iterator_traits<ForwardIterator>::value_type>\nValuesIn(ForwardIterator begin, ForwardIterator end) {\n  typedef typename std::iterator_traits<ForwardIterator>::value_type ParamType;\n  return internal::ParamGenerator<ParamType>(\n      new internal::ValuesInIteratorRangeGenerator<ParamType>(begin, end));\n}\n\ntemplate <typename T, size_t N>\ninternal::ParamGenerator<T> ValuesIn(const T (&array)[N]) {\n  return ValuesIn(array, array + N);\n}\n\ntemplate <class Container>\ninternal::ParamGenerator<typename Container::value_type> ValuesIn(\n    const Container& container) {\n  return ValuesIn(container.begin(), container.end());\n}\n\n// Values() allows generating tests from explicitly specified list of\n// parameters.\n//\n// Synopsis:\n// Values(T v1, T v2, ..., T vN)\n//   - returns a generator producing sequences with elements v1, v2, ..., vN.\n//\n// For example, this instantiates tests from test suite BarTest each\n// with values \"one\", \"two\", and \"three\":\n//\n// INSTANTIATE_TEST_SUITE_P(NumSequence,\n//                          BarTest,\n//                          Values(\"one\", \"two\", \"three\"));\n//\n// This instantiates tests from test suite BazTest each with values 1, 2, 3.5.\n// The exact type of values will depend on the type of parameter in BazTest.\n//\n// INSTANTIATE_TEST_SUITE_P(FloatingNumbers, BazTest, Values(1, 2, 3.5));\n//\n//\ntemplate <typename... T>\ninternal::ValueArray<T...> Values(T... v) {\n  return internal::ValueArray<T...>(std::move(v)...);\n}\n\n// Bool() allows generating tests with parameters in a set of (false, true).\n//\n// Synopsis:\n// Bool()\n//   - returns a generator producing sequences with elements {false, true}.\n//\n// It is useful when testing code that depends on Boolean flags. Combinations\n// of multiple flags can be tested when several Bool()'s are combined using\n// Combine() function.\n//\n// In the following example all tests in the test suite FlagDependentTest\n// will be instantiated twice with parameters false and true.\n//\n// class FlagDependentTest : public testing::TestWithParam<bool> {\n//   virtual void SetUp() {\n//     external_flag = GetParam();\n//   }\n// }\n// INSTANTIATE_TEST_SUITE_P(BoolSequence, FlagDependentTest, Bool());\n//\ninline internal::ParamGenerator<bool> Bool() {\n  return Values(false, true);\n}\n\n// Combine() allows the user to combine two or more sequences to produce\n// values of a Cartesian product of those sequences' elements.\n//\n// Synopsis:\n// Combine(gen1, gen2, ..., genN)\n//   - returns a generator producing sequences with elements coming from\n//     the Cartesian product of elements from the sequences generated by\n//     gen1, gen2, ..., genN. The sequence elements will have a type of\n//     std::tuple<T1, T2, ..., TN> where T1, T2, ..., TN are the types\n//     of elements from sequences produces by gen1, gen2, ..., genN.\n//\n// Combine can have up to 10 arguments.\n//\n// Example:\n//\n// This will instantiate tests in test suite AnimalTest each one with\n// the parameter values tuple(\"cat\", BLACK), tuple(\"cat\", WHITE),\n// tuple(\"dog\", BLACK), and tuple(\"dog\", WHITE):\n//\n// enum Color { BLACK, GRAY, WHITE };\n// class AnimalTest\n//     : public testing::TestWithParam<std::tuple<const char*, Color> > {...};\n//\n// TEST_P(AnimalTest, AnimalLooksNice) {...}\n//\n// INSTANTIATE_TEST_SUITE_P(AnimalVariations, AnimalTest,\n//                          Combine(Values(\"cat\", \"dog\"),\n//                                  Values(BLACK, WHITE)));\n//\n// This will instantiate tests in FlagDependentTest with all variations of two\n// Boolean flags:\n//\n// class FlagDependentTest\n//     : public testing::TestWithParam<std::tuple<bool, bool> > {\n//   virtual void SetUp() {\n//     // Assigns external_flag_1 and external_flag_2 values from the tuple.\n//     std::tie(external_flag_1, external_flag_2) = GetParam();\n//   }\n// };\n//\n// TEST_P(FlagDependentTest, TestFeature1) {\n//   // Test your code using external_flag_1 and external_flag_2 here.\n// }\n// INSTANTIATE_TEST_SUITE_P(TwoBoolSequence, FlagDependentTest,\n//                          Combine(Bool(), Bool()));\n//\ntemplate <typename... Generator>\ninternal::CartesianProductHolder<Generator...> Combine(const Generator&... g) {\n  return internal::CartesianProductHolder<Generator...>(g...);\n}\n\n#define TEST_P(test_suite_name, test_name)                                     \\\n  static_assert(sizeof(GTEST_STRINGIFY_(test_suite_name)) > 1,                 \\\n                \"test_suite_name must not be empty\");                          \\\n  static_assert(sizeof(GTEST_STRINGIFY_(test_name)) > 1,                       \\\n                \"test_name must not be empty\");                                \\\n  class GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)                     \\\n      : public test_suite_name {                                               \\\n   public:                                                                     \\\n    GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)() {}                    \\\n    void TestBody() override;                                                  \\\n                                                                               \\\n   private:                                                                    \\\n    static int AddToRegistry() {                                               \\\n      ::testing::UnitTest::GetInstance()                                       \\\n          ->parameterized_test_registry()                                      \\\n          .GetTestSuitePatternHolder<test_suite_name>(                         \\\n              GTEST_STRINGIFY_(test_suite_name),                               \\\n              ::testing::internal::CodeLocation(__FILE__, __LINE__))           \\\n          ->AddTestPattern(                                                    \\\n              GTEST_STRINGIFY_(test_suite_name), GTEST_STRINGIFY_(test_name),  \\\n              new ::testing::internal::TestMetaFactory<GTEST_TEST_CLASS_NAME_( \\\n                  test_suite_name, test_name)>());                             \\\n      return 0;                                                                \\\n    }                                                                          \\\n    static int gtest_registering_dummy_ GTEST_ATTRIBUTE_UNUSED_;               \\\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(GTEST_TEST_CLASS_NAME_(test_suite_name,    \\\n                                                           test_name));        \\\n  };                                                                           \\\n  int GTEST_TEST_CLASS_NAME_(test_suite_name,                                  \\\n                             test_name)::gtest_registering_dummy_ =            \\\n      GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)::AddToRegistry();     \\\n  void GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)::TestBody()\n\n// The last argument to INSTANTIATE_TEST_SUITE_P allows the user to specify\n// generator and an optional function or functor that generates custom test name\n// suffixes based on the test parameters. Such a function or functor should\n// accept one argument of type testing::TestParamInfo<class ParamType>, and\n// return std::string.\n//\n// testing::PrintToStringParamName is a builtin test suffix generator that\n// returns the value of testing::PrintToString(GetParam()).\n//\n// Note: test names must be non-empty, unique, and may only contain ASCII\n// alphanumeric characters or underscore. Because PrintToString adds quotes\n// to std::string and C strings, it won't work for these types.\n\n#define GTEST_EXPAND_(arg) arg\n#define GTEST_GET_FIRST_(first, ...) first\n#define GTEST_GET_SECOND_(first, second, ...) second\n\n#define INSTANTIATE_TEST_SUITE_P(prefix, test_suite_name, ...)                \\\n  static_assert(sizeof(GTEST_STRINGIFY_(test_suite_name)) > 1,                \\\n                \"test_suite_name must not be empty\");                         \\\n  static_assert(sizeof(GTEST_STRINGIFY_(prefix)) > 1,                         \\\n                \"prefix must not be empty\");                                  \\\n  static ::testing::internal::ParamGenerator<test_suite_name::ParamType>      \\\n      gtest_##prefix##test_suite_name##_EvalGenerator_() {                    \\\n    return GTEST_EXPAND_(GTEST_GET_FIRST_(__VA_ARGS__, DUMMY_PARAM_));        \\\n  }                                                                           \\\n  static ::std::string gtest_##prefix##test_suite_name##_EvalGenerateName_(   \\\n      const ::testing::TestParamInfo<test_suite_name::ParamType>& info) {     \\\n    if (::testing::internal::AlwaysFalse()) {                                 \\\n      ::testing::internal::TestNotEmpty(GTEST_EXPAND_(GTEST_GET_SECOND_(      \\\n          __VA_ARGS__,                                                        \\\n          ::testing::internal::DefaultParamName<test_suite_name::ParamType>,  \\\n          DUMMY_PARAM_)));                                                    \\\n      auto t = std::make_tuple(__VA_ARGS__);                                  \\\n      static_assert(std::tuple_size<decltype(t)>::value <= 2,                 \\\n                    \"Too Many Args!\");                                        \\\n    }                                                                         \\\n    return ((GTEST_EXPAND_(GTEST_GET_SECOND_(                                 \\\n        __VA_ARGS__,                                                          \\\n        ::testing::internal::DefaultParamName<test_suite_name::ParamType>,    \\\n        DUMMY_PARAM_))))(info);                                               \\\n  }                                                                           \\\n  static int gtest_##prefix##test_suite_name##_dummy_                         \\\n      GTEST_ATTRIBUTE_UNUSED_ =                                               \\\n          ::testing::UnitTest::GetInstance()                                  \\\n              ->parameterized_test_registry()                                 \\\n              .GetTestSuitePatternHolder<test_suite_name>(                    \\\n                  GTEST_STRINGIFY_(test_suite_name),                          \\\n                  ::testing::internal::CodeLocation(__FILE__, __LINE__))      \\\n              ->AddTestSuiteInstantiation(                                    \\\n                  GTEST_STRINGIFY_(prefix),                                   \\\n                  &gtest_##prefix##test_suite_name##_EvalGenerator_,          \\\n                  &gtest_##prefix##test_suite_name##_EvalGenerateName_,       \\\n                  __FILE__, __LINE__)\n\n// Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n#define INSTANTIATE_TEST_CASE_P                                            \\\n  static_assert(::testing::internal::InstantiateTestCase_P_IsDeprecated(), \\\n                \"\");                                                       \\\n  INSTANTIATE_TEST_SUITE_P\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_PARAM_TEST_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-printers.h",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Google Test - The Google C++ Testing and Mocking Framework\n//\n// This file implements a universal value printer that can print a\n// value of any type T:\n//\n//   void ::testing::internal::UniversalPrinter<T>::Print(value, ostream_ptr);\n//\n// A user can teach this function how to print a class type T by\n// defining either operator<<() or PrintTo() in the namespace that\n// defines T.  More specifically, the FIRST defined function in the\n// following list will be used (assuming T is defined in namespace\n// foo):\n//\n//   1. foo::PrintTo(const T&, ostream*)\n//   2. operator<<(ostream&, const T&) defined in either foo or the\n//      global namespace.\n//\n// However if T is an STL-style container then it is printed element-wise\n// unless foo::PrintTo(const T&, ostream*) is defined. Note that\n// operator<<() is ignored for container types.\n//\n// If none of the above is defined, it will print the debug string of\n// the value if it is a protocol buffer, or print the raw bytes in the\n// value otherwise.\n//\n// To aid debugging: when T is a reference type, the address of the\n// value is also printed; when T is a (const) char pointer, both the\n// pointer value and the NUL-terminated string it points to are\n// printed.\n//\n// We also provide some convenient wrappers:\n//\n//   // Prints a value to a string.  For a (const or not) char\n//   // pointer, the NUL-terminated string (but not the pointer) is\n//   // printed.\n//   std::string ::testing::PrintToString(const T& value);\n//\n//   // Prints a value tersely: for a reference type, the referenced\n//   // value (but not the address) is printed; for a (const or not) char\n//   // pointer, the NUL-terminated string (but not the pointer) is\n//   // printed.\n//   void ::testing::internal::UniversalTersePrint(const T& value, ostream*);\n//\n//   // Prints value using the type inferred by the compiler.  The difference\n//   // from UniversalTersePrint() is that this function prints both the\n//   // pointer and the NUL-terminated string for a (const or not) char pointer.\n//   void ::testing::internal::UniversalPrint(const T& value, ostream*);\n//\n//   // Prints the fields of a tuple tersely to a string vector, one\n//   // element for each field. Tuple support must be enabled in\n//   // gtest-port.h.\n//   std::vector<string> UniversalTersePrintTupleFieldsToStrings(\n//       const Tuple& value);\n//\n// Known limitation:\n//\n// The print primitives print the elements of an STL-style container\n// using the compiler-inferred type of *iter where iter is a\n// const_iterator of the container.  When const_iterator is an input\n// iterator but not a forward iterator, this inferred type may not\n// match value_type, and the print output may be incorrect.  In\n// practice, this is rarely a problem as for most containers\n// const_iterator is a forward iterator.  We'll fix this if there's an\n// actual need for it.  Note that this fix cannot rely on value_type\n// being defined as many user-defined container types don't have\n// value_type.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_PRINTERS_H_\n#define GTEST_INCLUDE_GTEST_GTEST_PRINTERS_H_\n\n#include <functional>\n#include <ostream>  // NOLINT\n#include <sstream>\n#include <string>\n#include <tuple>\n#include <type_traits>\n#include <utility>\n#include <vector>\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-port.h\"\n\n#if GTEST_HAS_ABSL\n#include \"absl/strings/string_view.h\"\n#include \"absl/types/optional.h\"\n#include \"absl/types/variant.h\"\n#endif  // GTEST_HAS_ABSL\n\nnamespace testing {\n\n// Definitions in the 'internal' and 'internal2' name spaces are\n// subject to change without notice.  DO NOT USE THEM IN USER CODE!\nnamespace internal2 {\n\n// Prints the given number of bytes in the given object to the given\n// ostream.\nGTEST_API_ void PrintBytesInObjectTo(const unsigned char* obj_bytes,\n                                     size_t count,\n                                     ::std::ostream* os);\n\n// For selecting which printer to use when a given type has neither <<\n// nor PrintTo().\nenum TypeKind {\n  kProtobuf,              // a protobuf type\n  kConvertibleToInteger,  // a type implicitly convertible to BiggestInt\n                          // (e.g. a named or unnamed enum type)\n#if GTEST_HAS_ABSL\n  kConvertibleToStringView,  // a type implicitly convertible to\n                             // absl::string_view\n#endif\n  kOtherType  // anything else\n};\n\n// TypeWithoutFormatter<T, kTypeKind>::PrintValue(value, os) is called\n// by the universal printer to print a value of type T when neither\n// operator<< nor PrintTo() is defined for T, where kTypeKind is the\n// \"kind\" of T as defined by enum TypeKind.\ntemplate <typename T, TypeKind kTypeKind>\nclass TypeWithoutFormatter {\n public:\n  // This default version is called when kTypeKind is kOtherType.\n  static void PrintValue(const T& value, ::std::ostream* os) {\n    PrintBytesInObjectTo(\n        static_cast<const unsigned char*>(\n            reinterpret_cast<const void*>(std::addressof(value))),\n        sizeof(value), os);\n  }\n};\n\n// We print a protobuf using its ShortDebugString() when the string\n// doesn't exceed this many characters; otherwise we print it using\n// DebugString() for better readability.\nconst size_t kProtobufOneLinerMaxLength = 50;\n\ntemplate <typename T>\nclass TypeWithoutFormatter<T, kProtobuf> {\n public:\n  static void PrintValue(const T& value, ::std::ostream* os) {\n    std::string pretty_str = value.ShortDebugString();\n    if (pretty_str.length() > kProtobufOneLinerMaxLength) {\n      pretty_str = \"\\n\" + value.DebugString();\n    }\n    *os << (\"<\" + pretty_str + \">\");\n  }\n};\n\ntemplate <typename T>\nclass TypeWithoutFormatter<T, kConvertibleToInteger> {\n public:\n  // Since T has no << operator or PrintTo() but can be implicitly\n  // converted to BiggestInt, we print it as a BiggestInt.\n  //\n  // Most likely T is an enum type (either named or unnamed), in which\n  // case printing it as an integer is the desired behavior.  In case\n  // T is not an enum, printing it as an integer is the best we can do\n  // given that it has no user-defined printer.\n  static void PrintValue(const T& value, ::std::ostream* os) {\n    const internal::BiggestInt kBigInt = value;\n    *os << kBigInt;\n  }\n};\n\n#if GTEST_HAS_ABSL\ntemplate <typename T>\nclass TypeWithoutFormatter<T, kConvertibleToStringView> {\n public:\n  // Since T has neither operator<< nor PrintTo() but can be implicitly\n  // converted to absl::string_view, we print it as a absl::string_view.\n  //\n  // Note: the implementation is further below, as it depends on\n  // internal::PrintTo symbol which is defined later in the file.\n  static void PrintValue(const T& value, ::std::ostream* os);\n};\n#endif\n\n// Prints the given value to the given ostream.  If the value is a\n// protocol message, its debug string is printed; if it's an enum or\n// of a type implicitly convertible to BiggestInt, it's printed as an\n// integer; otherwise the bytes in the value are printed.  This is\n// what UniversalPrinter<T>::Print() does when it knows nothing about\n// type T and T has neither << operator nor PrintTo().\n//\n// A user can override this behavior for a class type Foo by defining\n// a << operator in the namespace where Foo is defined.\n//\n// We put this operator in namespace 'internal2' instead of 'internal'\n// to simplify the implementation, as much code in 'internal' needs to\n// use << in STL, which would conflict with our own << were it defined\n// in 'internal'.\n//\n// Note that this operator<< takes a generic std::basic_ostream<Char,\n// CharTraits> type instead of the more restricted std::ostream.  If\n// we define it to take an std::ostream instead, we'll get an\n// \"ambiguous overloads\" compiler error when trying to print a type\n// Foo that supports streaming to std::basic_ostream<Char,\n// CharTraits>, as the compiler cannot tell whether\n// operator<<(std::ostream&, const T&) or\n// operator<<(std::basic_stream<Char, CharTraits>, const Foo&) is more\n// specific.\ntemplate <typename Char, typename CharTraits, typename T>\n::std::basic_ostream<Char, CharTraits>& operator<<(\n    ::std::basic_ostream<Char, CharTraits>& os, const T& x) {\n  TypeWithoutFormatter<T, (internal::IsAProtocolMessage<T>::value\n                               ? kProtobuf\n                               : std::is_convertible<\n                                     const T&, internal::BiggestInt>::value\n                                     ? kConvertibleToInteger\n                                     :\n#if GTEST_HAS_ABSL\n                                     std::is_convertible<\n                                         const T&, absl::string_view>::value\n                                         ? kConvertibleToStringView\n                                         :\n#endif\n                                         kOtherType)>::PrintValue(x, &os);\n  return os;\n}\n\n}  // namespace internal2\n}  // namespace testing\n\n// This namespace MUST NOT BE NESTED IN ::testing, or the name look-up\n// magic needed for implementing UniversalPrinter won't work.\nnamespace testing_internal {\n\n// Used to print a value that is not an STL-style container when the\n// user doesn't define PrintTo() for it.\ntemplate <typename T>\nvoid DefaultPrintNonContainerTo(const T& value, ::std::ostream* os) {\n  // With the following statement, during unqualified name lookup,\n  // testing::internal2::operator<< appears as if it was declared in\n  // the nearest enclosing namespace that contains both\n  // ::testing_internal and ::testing::internal2, i.e. the global\n  // namespace.  For more details, refer to the C++ Standard section\n  // 7.3.4-1 [namespace.udir].  This allows us to fall back onto\n  // testing::internal2::operator<< in case T doesn't come with a <<\n  // operator.\n  //\n  // We cannot write 'using ::testing::internal2::operator<<;', which\n  // gcc 3.3 fails to compile due to a compiler bug.\n  using namespace ::testing::internal2;  // NOLINT\n\n  // Assuming T is defined in namespace foo, in the next statement,\n  // the compiler will consider all of:\n  //\n  //   1. foo::operator<< (thanks to Koenig look-up),\n  //   2. ::operator<< (as the current namespace is enclosed in ::),\n  //   3. testing::internal2::operator<< (thanks to the using statement above).\n  //\n  // The operator<< whose type matches T best will be picked.\n  //\n  // We deliberately allow #2 to be a candidate, as sometimes it's\n  // impossible to define #1 (e.g. when foo is ::std, defining\n  // anything in it is undefined behavior unless you are a compiler\n  // vendor.).\n  *os << value;\n}\n\n}  // namespace testing_internal\n\nnamespace testing {\nnamespace internal {\n\n// FormatForComparison<ToPrint, OtherOperand>::Format(value) formats a\n// value of type ToPrint that is an operand of a comparison assertion\n// (e.g. ASSERT_EQ).  OtherOperand is the type of the other operand in\n// the comparison, and is used to help determine the best way to\n// format the value.  In particular, when the value is a C string\n// (char pointer) and the other operand is an STL string object, we\n// want to format the C string as a string, since we know it is\n// compared by value with the string object.  If the value is a char\n// pointer but the other operand is not an STL string object, we don't\n// know whether the pointer is supposed to point to a NUL-terminated\n// string, and thus want to print it as a pointer to be safe.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n\n// The default case.\ntemplate <typename ToPrint, typename OtherOperand>\nclass FormatForComparison {\n public:\n  static ::std::string Format(const ToPrint& value) {\n    return ::testing::PrintToString(value);\n  }\n};\n\n// Array.\ntemplate <typename ToPrint, size_t N, typename OtherOperand>\nclass FormatForComparison<ToPrint[N], OtherOperand> {\n public:\n  static ::std::string Format(const ToPrint* value) {\n    return FormatForComparison<const ToPrint*, OtherOperand>::Format(value);\n  }\n};\n\n// By default, print C string as pointers to be safe, as we don't know\n// whether they actually point to a NUL-terminated string.\n\n#define GTEST_IMPL_FORMAT_C_STRING_AS_POINTER_(CharType)                \\\n  template <typename OtherOperand>                                      \\\n  class FormatForComparison<CharType*, OtherOperand> {                  \\\n   public:                                                              \\\n    static ::std::string Format(CharType* value) {                      \\\n      return ::testing::PrintToString(static_cast<const void*>(value)); \\\n    }                                                                   \\\n  }\n\nGTEST_IMPL_FORMAT_C_STRING_AS_POINTER_(char);\nGTEST_IMPL_FORMAT_C_STRING_AS_POINTER_(const char);\nGTEST_IMPL_FORMAT_C_STRING_AS_POINTER_(wchar_t);\nGTEST_IMPL_FORMAT_C_STRING_AS_POINTER_(const wchar_t);\n\n#undef GTEST_IMPL_FORMAT_C_STRING_AS_POINTER_\n\n// If a C string is compared with an STL string object, we know it's meant\n// to point to a NUL-terminated string, and thus can print it as a string.\n\n#define GTEST_IMPL_FORMAT_C_STRING_AS_STRING_(CharType, OtherStringType) \\\n  template <>                                                           \\\n  class FormatForComparison<CharType*, OtherStringType> {               \\\n   public:                                                              \\\n    static ::std::string Format(CharType* value) {                      \\\n      return ::testing::PrintToString(value);                           \\\n    }                                                                   \\\n  }\n\nGTEST_IMPL_FORMAT_C_STRING_AS_STRING_(char, ::std::string);\nGTEST_IMPL_FORMAT_C_STRING_AS_STRING_(const char, ::std::string);\n\n#if GTEST_HAS_STD_WSTRING\nGTEST_IMPL_FORMAT_C_STRING_AS_STRING_(wchar_t, ::std::wstring);\nGTEST_IMPL_FORMAT_C_STRING_AS_STRING_(const wchar_t, ::std::wstring);\n#endif\n\n#undef GTEST_IMPL_FORMAT_C_STRING_AS_STRING_\n\n// Formats a comparison assertion (e.g. ASSERT_EQ, EXPECT_LT, and etc)\n// operand to be used in a failure message.  The type (but not value)\n// of the other operand may affect the format.  This allows us to\n// print a char* as a raw pointer when it is compared against another\n// char* or void*, and print it as a C string when it is compared\n// against an std::string object, for example.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\ntemplate <typename T1, typename T2>\nstd::string FormatForComparisonFailureMessage(\n    const T1& value, const T2& /* other_operand */) {\n  return FormatForComparison<T1, T2>::Format(value);\n}\n\n// UniversalPrinter<T>::Print(value, ostream_ptr) prints the given\n// value to the given ostream.  The caller must ensure that\n// 'ostream_ptr' is not NULL, or the behavior is undefined.\n//\n// We define UniversalPrinter as a class template (as opposed to a\n// function template), as we need to partially specialize it for\n// reference types, which cannot be done with function templates.\ntemplate <typename T>\nclass UniversalPrinter;\n\ntemplate <typename T>\nvoid UniversalPrint(const T& value, ::std::ostream* os);\n\nenum DefaultPrinterType {\n  kPrintContainer,\n  kPrintPointer,\n  kPrintFunctionPointer,\n  kPrintOther,\n};\ntemplate <DefaultPrinterType type> struct WrapPrinterType {};\n\n// Used to print an STL-style container when the user doesn't define\n// a PrintTo() for it.\ntemplate <typename C>\nvoid DefaultPrintTo(WrapPrinterType<kPrintContainer> /* dummy */,\n                    const C& container, ::std::ostream* os) {\n  const size_t kMaxCount = 32;  // The maximum number of elements to print.\n  *os << '{';\n  size_t count = 0;\n  for (typename C::const_iterator it = container.begin();\n       it != container.end(); ++it, ++count) {\n    if (count > 0) {\n      *os << ',';\n      if (count == kMaxCount) {  // Enough has been printed.\n        *os << \" ...\";\n        break;\n      }\n    }\n    *os << ' ';\n    // We cannot call PrintTo(*it, os) here as PrintTo() doesn't\n    // handle *it being a native array.\n    internal::UniversalPrint(*it, os);\n  }\n\n  if (count > 0) {\n    *os << ' ';\n  }\n  *os << '}';\n}\n\n// Used to print a pointer that is neither a char pointer nor a member\n// pointer, when the user doesn't define PrintTo() for it.  (A member\n// variable pointer or member function pointer doesn't really point to\n// a location in the address space.  Their representation is\n// implementation-defined.  Therefore they will be printed as raw\n// bytes.)\ntemplate <typename T>\nvoid DefaultPrintTo(WrapPrinterType<kPrintPointer> /* dummy */,\n                    T* p, ::std::ostream* os) {\n  if (p == nullptr) {\n    *os << \"NULL\";\n  } else {\n    // T is not a function type.  We just call << to print p,\n    // relying on ADL to pick up user-defined << for their pointer\n    // types, if any.\n    *os << p;\n  }\n}\ntemplate <typename T>\nvoid DefaultPrintTo(WrapPrinterType<kPrintFunctionPointer> /* dummy */,\n                    T* p, ::std::ostream* os) {\n  if (p == nullptr) {\n    *os << \"NULL\";\n  } else {\n    // T is a function type, so '*os << p' doesn't do what we want\n    // (it just prints p as bool).  We want to print p as a const\n    // void*.\n    *os << reinterpret_cast<const void*>(p);\n  }\n}\n\n// Used to print a non-container, non-pointer value when the user\n// doesn't define PrintTo() for it.\ntemplate <typename T>\nvoid DefaultPrintTo(WrapPrinterType<kPrintOther> /* dummy */,\n                    const T& value, ::std::ostream* os) {\n  ::testing_internal::DefaultPrintNonContainerTo(value, os);\n}\n\n// Prints the given value using the << operator if it has one;\n// otherwise prints the bytes in it.  This is what\n// UniversalPrinter<T>::Print() does when PrintTo() is not specialized\n// or overloaded for type T.\n//\n// A user can override this behavior for a class type Foo by defining\n// an overload of PrintTo() in the namespace where Foo is defined.  We\n// give the user this option as sometimes defining a << operator for\n// Foo is not desirable (e.g. the coding style may prevent doing it,\n// or there is already a << operator but it doesn't do what the user\n// wants).\ntemplate <typename T>\nvoid PrintTo(const T& value, ::std::ostream* os) {\n  // DefaultPrintTo() is overloaded.  The type of its first argument\n  // determines which version will be picked.\n  //\n  // Note that we check for container types here, prior to we check\n  // for protocol message types in our operator<<.  The rationale is:\n  //\n  // For protocol messages, we want to give people a chance to\n  // override Google Mock's format by defining a PrintTo() or\n  // operator<<.  For STL containers, other formats can be\n  // incompatible with Google Mock's format for the container\n  // elements; therefore we check for container types here to ensure\n  // that our format is used.\n  //\n  // Note that MSVC and clang-cl do allow an implicit conversion from\n  // pointer-to-function to pointer-to-object, but clang-cl warns on it.\n  // So don't use ImplicitlyConvertible if it can be helped since it will\n  // cause this warning, and use a separate overload of DefaultPrintTo for\n  // function pointers so that the `*os << p` in the object pointer overload\n  // doesn't cause that warning either.\n  DefaultPrintTo(\n      WrapPrinterType <\n                  (sizeof(IsContainerTest<T>(0)) == sizeof(IsContainer)) &&\n              !IsRecursiveContainer<T>::value\n          ? kPrintContainer\n          : !std::is_pointer<T>::value\n                ? kPrintOther\n                : std::is_function<typename std::remove_pointer<T>::type>::value\n                      ? kPrintFunctionPointer\n                      : kPrintPointer > (),\n      value, os);\n}\n\n// The following list of PrintTo() overloads tells\n// UniversalPrinter<T>::Print() how to print standard types (built-in\n// types, strings, plain arrays, and pointers).\n\n// Overloads for various char types.\nGTEST_API_ void PrintTo(unsigned char c, ::std::ostream* os);\nGTEST_API_ void PrintTo(signed char c, ::std::ostream* os);\ninline void PrintTo(char c, ::std::ostream* os) {\n  // When printing a plain char, we always treat it as unsigned.  This\n  // way, the output won't be affected by whether the compiler thinks\n  // char is signed or not.\n  PrintTo(static_cast<unsigned char>(c), os);\n}\n\n// Overloads for other simple built-in types.\ninline void PrintTo(bool x, ::std::ostream* os) {\n  *os << (x ? \"true\" : \"false\");\n}\n\n// Overload for wchar_t type.\n// Prints a wchar_t as a symbol if it is printable or as its internal\n// code otherwise and also as its decimal code (except for L'\\0').\n// The L'\\0' char is printed as \"L'\\\\0'\". The decimal code is printed\n// as signed integer when wchar_t is implemented by the compiler\n// as a signed type and is printed as an unsigned integer when wchar_t\n// is implemented as an unsigned type.\nGTEST_API_ void PrintTo(wchar_t wc, ::std::ostream* os);\n\n// Overloads for C strings.\nGTEST_API_ void PrintTo(const char* s, ::std::ostream* os);\ninline void PrintTo(char* s, ::std::ostream* os) {\n  PrintTo(ImplicitCast_<const char*>(s), os);\n}\n\n// signed/unsigned char is often used for representing binary data, so\n// we print pointers to it as void* to be safe.\ninline void PrintTo(const signed char* s, ::std::ostream* os) {\n  PrintTo(ImplicitCast_<const void*>(s), os);\n}\ninline void PrintTo(signed char* s, ::std::ostream* os) {\n  PrintTo(ImplicitCast_<const void*>(s), os);\n}\ninline void PrintTo(const unsigned char* s, ::std::ostream* os) {\n  PrintTo(ImplicitCast_<const void*>(s), os);\n}\ninline void PrintTo(unsigned char* s, ::std::ostream* os) {\n  PrintTo(ImplicitCast_<const void*>(s), os);\n}\n\n// MSVC can be configured to define wchar_t as a typedef of unsigned\n// short.  It defines _NATIVE_WCHAR_T_DEFINED when wchar_t is a native\n// type.  When wchar_t is a typedef, defining an overload for const\n// wchar_t* would cause unsigned short* be printed as a wide string,\n// possibly causing invalid memory accesses.\n#if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED)\n// Overloads for wide C strings\nGTEST_API_ void PrintTo(const wchar_t* s, ::std::ostream* os);\ninline void PrintTo(wchar_t* s, ::std::ostream* os) {\n  PrintTo(ImplicitCast_<const wchar_t*>(s), os);\n}\n#endif\n\n// Overload for C arrays.  Multi-dimensional arrays are printed\n// properly.\n\n// Prints the given number of elements in an array, without printing\n// the curly braces.\ntemplate <typename T>\nvoid PrintRawArrayTo(const T a[], size_t count, ::std::ostream* os) {\n  UniversalPrint(a[0], os);\n  for (size_t i = 1; i != count; i++) {\n    *os << \", \";\n    UniversalPrint(a[i], os);\n  }\n}\n\n// Overloads for ::std::string.\nGTEST_API_ void PrintStringTo(const ::std::string&s, ::std::ostream* os);\ninline void PrintTo(const ::std::string& s, ::std::ostream* os) {\n  PrintStringTo(s, os);\n}\n\n// Overloads for ::std::wstring.\n#if GTEST_HAS_STD_WSTRING\nGTEST_API_ void PrintWideStringTo(const ::std::wstring&s, ::std::ostream* os);\ninline void PrintTo(const ::std::wstring& s, ::std::ostream* os) {\n  PrintWideStringTo(s, os);\n}\n#endif  // GTEST_HAS_STD_WSTRING\n\n#if GTEST_HAS_ABSL\n// Overload for absl::string_view.\ninline void PrintTo(absl::string_view sp, ::std::ostream* os) {\n  PrintTo(::std::string(sp), os);\n}\n#endif  // GTEST_HAS_ABSL\n\ninline void PrintTo(std::nullptr_t, ::std::ostream* os) { *os << \"(nullptr)\"; }\n\ntemplate <typename T>\nvoid PrintTo(std::reference_wrapper<T> ref, ::std::ostream* os) {\n  UniversalPrinter<T&>::Print(ref.get(), os);\n}\n\n// Helper function for printing a tuple.  T must be instantiated with\n// a tuple type.\ntemplate <typename T>\nvoid PrintTupleTo(const T&, std::integral_constant<size_t, 0>,\n                  ::std::ostream*) {}\n\ntemplate <typename T, size_t I>\nvoid PrintTupleTo(const T& t, std::integral_constant<size_t, I>,\n                  ::std::ostream* os) {\n  PrintTupleTo(t, std::integral_constant<size_t, I - 1>(), os);\n  GTEST_INTENTIONAL_CONST_COND_PUSH_()\n  if (I > 1) {\n    GTEST_INTENTIONAL_CONST_COND_POP_()\n    *os << \", \";\n  }\n  UniversalPrinter<typename std::tuple_element<I - 1, T>::type>::Print(\n      std::get<I - 1>(t), os);\n}\n\ntemplate <typename... Types>\nvoid PrintTo(const ::std::tuple<Types...>& t, ::std::ostream* os) {\n  *os << \"(\";\n  PrintTupleTo(t, std::integral_constant<size_t, sizeof...(Types)>(), os);\n  *os << \")\";\n}\n\n// Overload for std::pair.\ntemplate <typename T1, typename T2>\nvoid PrintTo(const ::std::pair<T1, T2>& value, ::std::ostream* os) {\n  *os << '(';\n  // We cannot use UniversalPrint(value.first, os) here, as T1 may be\n  // a reference type.  The same for printing value.second.\n  UniversalPrinter<T1>::Print(value.first, os);\n  *os << \", \";\n  UniversalPrinter<T2>::Print(value.second, os);\n  *os << ')';\n}\n\n// Implements printing a non-reference type T by letting the compiler\n// pick the right overload of PrintTo() for T.\ntemplate <typename T>\nclass UniversalPrinter {\n public:\n  // MSVC warns about adding const to a function type, so we want to\n  // disable the warning.\n  GTEST_DISABLE_MSC_WARNINGS_PUSH_(4180)\n\n  // Note: we deliberately don't call this PrintTo(), as that name\n  // conflicts with ::testing::internal::PrintTo in the body of the\n  // function.\n  static void Print(const T& value, ::std::ostream* os) {\n    // By default, ::testing::internal::PrintTo() is used for printing\n    // the value.\n    //\n    // Thanks to Koenig look-up, if T is a class and has its own\n    // PrintTo() function defined in its namespace, that function will\n    // be visible here.  Since it is more specific than the generic ones\n    // in ::testing::internal, it will be picked by the compiler in the\n    // following statement - exactly what we want.\n    PrintTo(value, os);\n  }\n\n  GTEST_DISABLE_MSC_WARNINGS_POP_()\n};\n\n#if GTEST_HAS_ABSL\n\n// Printer for absl::optional\n\ntemplate <typename T>\nclass UniversalPrinter<::absl::optional<T>> {\n public:\n  static void Print(const ::absl::optional<T>& value, ::std::ostream* os) {\n    *os << '(';\n    if (!value) {\n      *os << \"nullopt\";\n    } else {\n      UniversalPrint(*value, os);\n    }\n    *os << ')';\n  }\n};\n\n// Printer for absl::variant\n\ntemplate <typename... T>\nclass UniversalPrinter<::absl::variant<T...>> {\n public:\n  static void Print(const ::absl::variant<T...>& value, ::std::ostream* os) {\n    *os << '(';\n    absl::visit(Visitor{os}, value);\n    *os << ')';\n  }\n\n private:\n  struct Visitor {\n    template <typename U>\n    void operator()(const U& u) const {\n      *os << \"'\" << GetTypeName<U>() << \"' with value \";\n      UniversalPrint(u, os);\n    }\n    ::std::ostream* os;\n  };\n};\n\n#endif  // GTEST_HAS_ABSL\n\n// UniversalPrintArray(begin, len, os) prints an array of 'len'\n// elements, starting at address 'begin'.\ntemplate <typename T>\nvoid UniversalPrintArray(const T* begin, size_t len, ::std::ostream* os) {\n  if (len == 0) {\n    *os << \"{}\";\n  } else {\n    *os << \"{ \";\n    const size_t kThreshold = 18;\n    const size_t kChunkSize = 8;\n    // If the array has more than kThreshold elements, we'll have to\n    // omit some details by printing only the first and the last\n    // kChunkSize elements.\n    if (len <= kThreshold) {\n      PrintRawArrayTo(begin, len, os);\n    } else {\n      PrintRawArrayTo(begin, kChunkSize, os);\n      *os << \", ..., \";\n      PrintRawArrayTo(begin + len - kChunkSize, kChunkSize, os);\n    }\n    *os << \" }\";\n  }\n}\n// This overload prints a (const) char array compactly.\nGTEST_API_ void UniversalPrintArray(\n    const char* begin, size_t len, ::std::ostream* os);\n\n// This overload prints a (const) wchar_t array compactly.\nGTEST_API_ void UniversalPrintArray(\n    const wchar_t* begin, size_t len, ::std::ostream* os);\n\n// Implements printing an array type T[N].\ntemplate <typename T, size_t N>\nclass UniversalPrinter<T[N]> {\n public:\n  // Prints the given array, omitting some elements when there are too\n  // many.\n  static void Print(const T (&a)[N], ::std::ostream* os) {\n    UniversalPrintArray(a, N, os);\n  }\n};\n\n// Implements printing a reference type T&.\ntemplate <typename T>\nclass UniversalPrinter<T&> {\n public:\n  // MSVC warns about adding const to a function type, so we want to\n  // disable the warning.\n  GTEST_DISABLE_MSC_WARNINGS_PUSH_(4180)\n\n  static void Print(const T& value, ::std::ostream* os) {\n    // Prints the address of the value.  We use reinterpret_cast here\n    // as static_cast doesn't compile when T is a function type.\n    *os << \"@\" << reinterpret_cast<const void*>(&value) << \" \";\n\n    // Then prints the value itself.\n    UniversalPrint(value, os);\n  }\n\n  GTEST_DISABLE_MSC_WARNINGS_POP_()\n};\n\n// Prints a value tersely: for a reference type, the referenced value\n// (but not the address) is printed; for a (const) char pointer, the\n// NUL-terminated string (but not the pointer) is printed.\n\ntemplate <typename T>\nclass UniversalTersePrinter {\n public:\n  static void Print(const T& value, ::std::ostream* os) {\n    UniversalPrint(value, os);\n  }\n};\ntemplate <typename T>\nclass UniversalTersePrinter<T&> {\n public:\n  static void Print(const T& value, ::std::ostream* os) {\n    UniversalPrint(value, os);\n  }\n};\ntemplate <typename T, size_t N>\nclass UniversalTersePrinter<T[N]> {\n public:\n  static void Print(const T (&value)[N], ::std::ostream* os) {\n    UniversalPrinter<T[N]>::Print(value, os);\n  }\n};\ntemplate <>\nclass UniversalTersePrinter<const char*> {\n public:\n  static void Print(const char* str, ::std::ostream* os) {\n    if (str == nullptr) {\n      *os << \"NULL\";\n    } else {\n      UniversalPrint(std::string(str), os);\n    }\n  }\n};\ntemplate <>\nclass UniversalTersePrinter<char*> {\n public:\n  static void Print(char* str, ::std::ostream* os) {\n    UniversalTersePrinter<const char*>::Print(str, os);\n  }\n};\n\n#if GTEST_HAS_STD_WSTRING\ntemplate <>\nclass UniversalTersePrinter<const wchar_t*> {\n public:\n  static void Print(const wchar_t* str, ::std::ostream* os) {\n    if (str == nullptr) {\n      *os << \"NULL\";\n    } else {\n      UniversalPrint(::std::wstring(str), os);\n    }\n  }\n};\n#endif\n\ntemplate <>\nclass UniversalTersePrinter<wchar_t*> {\n public:\n  static void Print(wchar_t* str, ::std::ostream* os) {\n    UniversalTersePrinter<const wchar_t*>::Print(str, os);\n  }\n};\n\ntemplate <typename T>\nvoid UniversalTersePrint(const T& value, ::std::ostream* os) {\n  UniversalTersePrinter<T>::Print(value, os);\n}\n\n// Prints a value using the type inferred by the compiler.  The\n// difference between this and UniversalTersePrint() is that for a\n// (const) char pointer, this prints both the pointer and the\n// NUL-terminated string.\ntemplate <typename T>\nvoid UniversalPrint(const T& value, ::std::ostream* os) {\n  // A workarond for the bug in VC++ 7.1 that prevents us from instantiating\n  // UniversalPrinter with T directly.\n  typedef T T1;\n  UniversalPrinter<T1>::Print(value, os);\n}\n\ntypedef ::std::vector< ::std::string> Strings;\n\n  // Tersely prints the first N fields of a tuple to a string vector,\n  // one element for each field.\ntemplate <typename Tuple>\nvoid TersePrintPrefixToStrings(const Tuple&, std::integral_constant<size_t, 0>,\n                               Strings*) {}\ntemplate <typename Tuple, size_t I>\nvoid TersePrintPrefixToStrings(const Tuple& t,\n                               std::integral_constant<size_t, I>,\n                               Strings* strings) {\n  TersePrintPrefixToStrings(t, std::integral_constant<size_t, I - 1>(),\n                            strings);\n  ::std::stringstream ss;\n  UniversalTersePrint(std::get<I - 1>(t), &ss);\n  strings->push_back(ss.str());\n}\n\n// Prints the fields of a tuple tersely to a string vector, one\n// element for each field.  See the comment before\n// UniversalTersePrint() for how we define \"tersely\".\ntemplate <typename Tuple>\nStrings UniversalTersePrintTupleFieldsToStrings(const Tuple& value) {\n  Strings result;\n  TersePrintPrefixToStrings(\n      value, std::integral_constant<size_t, std::tuple_size<Tuple>::value>(),\n      &result);\n  return result;\n}\n\n}  // namespace internal\n\n#if GTEST_HAS_ABSL\nnamespace internal2 {\ntemplate <typename T>\nvoid TypeWithoutFormatter<T, kConvertibleToStringView>::PrintValue(\n    const T& value, ::std::ostream* os) {\n  internal::PrintTo(absl::string_view(value), os);\n}\n}  // namespace internal2\n#endif\n\ntemplate <typename T>\n::std::string PrintToString(const T& value) {\n  ::std::stringstream ss;\n  internal::UniversalTersePrinter<T>::Print(value, &ss);\n  return ss.str();\n}\n\n}  // namespace testing\n\n// Include any custom printer added by the local installation.\n// We must include this header at the end to make sure it can use the\n// declarations from this file.\n#include \"gtest/internal/custom/gtest-printers.h\"\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_PRINTERS_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-spi.h",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Utilities for testing Google Test itself and code that uses Google Test\n// (e.g. frameworks built on top of Google Test).\n\n// GOOGLETEST_CM0004 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_SPI_H_\n#define GTEST_INCLUDE_GTEST_GTEST_SPI_H_\n\n#include \"gtest/gtest.h\"\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\nnamespace testing {\n\n// This helper class can be used to mock out Google Test failure reporting\n// so that we can test Google Test or code that builds on Google Test.\n//\n// An object of this class appends a TestPartResult object to the\n// TestPartResultArray object given in the constructor whenever a Google Test\n// failure is reported. It can either intercept only failures that are\n// generated in the same thread that created this object or it can intercept\n// all generated failures. The scope of this mock object can be controlled with\n// the second argument to the two arguments constructor.\nclass GTEST_API_ ScopedFakeTestPartResultReporter\n    : public TestPartResultReporterInterface {\n public:\n  // The two possible mocking modes of this object.\n  enum InterceptMode {\n    INTERCEPT_ONLY_CURRENT_THREAD,  // Intercepts only thread local failures.\n    INTERCEPT_ALL_THREADS           // Intercepts all failures.\n  };\n\n  // The c'tor sets this object as the test part result reporter used\n  // by Google Test.  The 'result' parameter specifies where to report the\n  // results. This reporter will only catch failures generated in the current\n  // thread. DEPRECATED\n  explicit ScopedFakeTestPartResultReporter(TestPartResultArray* result);\n\n  // Same as above, but you can choose the interception scope of this object.\n  ScopedFakeTestPartResultReporter(InterceptMode intercept_mode,\n                                   TestPartResultArray* result);\n\n  // The d'tor restores the previous test part result reporter.\n  ~ScopedFakeTestPartResultReporter() override;\n\n  // Appends the TestPartResult object to the TestPartResultArray\n  // received in the constructor.\n  //\n  // This method is from the TestPartResultReporterInterface\n  // interface.\n  void ReportTestPartResult(const TestPartResult& result) override;\n\n private:\n  void Init();\n\n  const InterceptMode intercept_mode_;\n  TestPartResultReporterInterface* old_reporter_;\n  TestPartResultArray* const result_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedFakeTestPartResultReporter);\n};\n\nnamespace internal {\n\n// A helper class for implementing EXPECT_FATAL_FAILURE() and\n// EXPECT_NONFATAL_FAILURE().  Its destructor verifies that the given\n// TestPartResultArray contains exactly one failure that has the given\n// type and contains the given substring.  If that's not the case, a\n// non-fatal failure will be generated.\nclass GTEST_API_ SingleFailureChecker {\n public:\n  // The constructor remembers the arguments.\n  SingleFailureChecker(const TestPartResultArray* results,\n                       TestPartResult::Type type, const std::string& substr);\n  ~SingleFailureChecker();\n private:\n  const TestPartResultArray* const results_;\n  const TestPartResult::Type type_;\n  const std::string substr_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(SingleFailureChecker);\n};\n\n}  // namespace internal\n\n}  // namespace testing\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n// A set of macros for testing Google Test assertions or code that's expected\n// to generate Google Test fatal failures.  It verifies that the given\n// statement will cause exactly one fatal Google Test failure with 'substr'\n// being part of the failure message.\n//\n// There are two different versions of this macro. EXPECT_FATAL_FAILURE only\n// affects and considers failures generated in the current thread and\n// EXPECT_FATAL_FAILURE_ON_ALL_THREADS does the same but for all threads.\n//\n// The verification of the assertion is done correctly even when the statement\n// throws an exception or aborts the current function.\n//\n// Known restrictions:\n//   - 'statement' cannot reference local non-static variables or\n//     non-static members of the current object.\n//   - 'statement' cannot return a value.\n//   - You cannot stream a failure message to this macro.\n//\n// Note that even though the implementations of the following two\n// macros are much alike, we cannot refactor them to use a common\n// helper macro, due to some peculiarity in how the preprocessor\n// works.  The AcceptsMacroThatExpandsToUnprotectedComma test in\n// gtest_unittest.cc will fail to compile if we do that.\n#define EXPECT_FATAL_FAILURE(statement, substr) \\\n  do { \\\n    class GTestExpectFatalFailureHelper {\\\n     public:\\\n      static void Execute() { statement; }\\\n    };\\\n    ::testing::TestPartResultArray gtest_failures;\\\n    ::testing::internal::SingleFailureChecker gtest_checker(\\\n        &gtest_failures, ::testing::TestPartResult::kFatalFailure, (substr));\\\n    {\\\n      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\\\n          ::testing::ScopedFakeTestPartResultReporter:: \\\n          INTERCEPT_ONLY_CURRENT_THREAD, &gtest_failures);\\\n      GTestExpectFatalFailureHelper::Execute();\\\n    }\\\n  } while (::testing::internal::AlwaysFalse())\n\n#define EXPECT_FATAL_FAILURE_ON_ALL_THREADS(statement, substr) \\\n  do { \\\n    class GTestExpectFatalFailureHelper {\\\n     public:\\\n      static void Execute() { statement; }\\\n    };\\\n    ::testing::TestPartResultArray gtest_failures;\\\n    ::testing::internal::SingleFailureChecker gtest_checker(\\\n        &gtest_failures, ::testing::TestPartResult::kFatalFailure, (substr));\\\n    {\\\n      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\\\n          ::testing::ScopedFakeTestPartResultReporter:: \\\n          INTERCEPT_ALL_THREADS, &gtest_failures);\\\n      GTestExpectFatalFailureHelper::Execute();\\\n    }\\\n  } while (::testing::internal::AlwaysFalse())\n\n// A macro for testing Google Test assertions or code that's expected to\n// generate Google Test non-fatal failures.  It asserts that the given\n// statement will cause exactly one non-fatal Google Test failure with 'substr'\n// being part of the failure message.\n//\n// There are two different versions of this macro. EXPECT_NONFATAL_FAILURE only\n// affects and considers failures generated in the current thread and\n// EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS does the same but for all threads.\n//\n// 'statement' is allowed to reference local variables and members of\n// the current object.\n//\n// The verification of the assertion is done correctly even when the statement\n// throws an exception or aborts the current function.\n//\n// Known restrictions:\n//   - You cannot stream a failure message to this macro.\n//\n// Note that even though the implementations of the following two\n// macros are much alike, we cannot refactor them to use a common\n// helper macro, due to some peculiarity in how the preprocessor\n// works.  If we do that, the code won't compile when the user gives\n// EXPECT_NONFATAL_FAILURE() a statement that contains a macro that\n// expands to code containing an unprotected comma.  The\n// AcceptsMacroThatExpandsToUnprotectedComma test in gtest_unittest.cc\n// catches that.\n//\n// For the same reason, we have to write\n//   if (::testing::internal::AlwaysTrue()) { statement; }\n// instead of\n//   GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement)\n// to avoid an MSVC warning on unreachable code.\n#define EXPECT_NONFATAL_FAILURE(statement, substr) \\\n  do {\\\n    ::testing::TestPartResultArray gtest_failures;\\\n    ::testing::internal::SingleFailureChecker gtest_checker(\\\n        &gtest_failures, ::testing::TestPartResult::kNonFatalFailure, \\\n        (substr));\\\n    {\\\n      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\\\n          ::testing::ScopedFakeTestPartResultReporter:: \\\n          INTERCEPT_ONLY_CURRENT_THREAD, &gtest_failures);\\\n      if (::testing::internal::AlwaysTrue()) { statement; }\\\n    }\\\n  } while (::testing::internal::AlwaysFalse())\n\n#define EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(statement, substr) \\\n  do {\\\n    ::testing::TestPartResultArray gtest_failures;\\\n    ::testing::internal::SingleFailureChecker gtest_checker(\\\n        &gtest_failures, ::testing::TestPartResult::kNonFatalFailure, \\\n        (substr));\\\n    {\\\n      ::testing::ScopedFakeTestPartResultReporter gtest_reporter(\\\n          ::testing::ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS, \\\n          &gtest_failures);\\\n      if (::testing::internal::AlwaysTrue()) { statement; }\\\n    }\\\n  } while (::testing::internal::AlwaysFalse())\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_SPI_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-test-part.h",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_TEST_PART_H_\n#define GTEST_INCLUDE_GTEST_GTEST_TEST_PART_H_\n\n#include <iosfwd>\n#include <vector>\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-string.h\"\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\nnamespace testing {\n\n// A copyable object representing the result of a test part (i.e. an\n// assertion or an explicit FAIL(), ADD_FAILURE(), or SUCCESS()).\n//\n// Don't inherit from TestPartResult as its destructor is not virtual.\nclass GTEST_API_ TestPartResult {\n public:\n  // The possible outcomes of a test part (i.e. an assertion or an\n  // explicit SUCCEED(), FAIL(), or ADD_FAILURE()).\n  enum Type {\n    kSuccess,          // Succeeded.\n    kNonFatalFailure,  // Failed but the test can continue.\n    kFatalFailure,     // Failed and the test should be terminated.\n    kSkip              // Skipped.\n  };\n\n  // C'tor.  TestPartResult does NOT have a default constructor.\n  // Always use this constructor (with parameters) to create a\n  // TestPartResult object.\n  TestPartResult(Type a_type, const char* a_file_name, int a_line_number,\n                 const char* a_message)\n      : type_(a_type),\n        file_name_(a_file_name == nullptr ? \"\" : a_file_name),\n        line_number_(a_line_number),\n        summary_(ExtractSummary(a_message)),\n        message_(a_message) {}\n\n  // Gets the outcome of the test part.\n  Type type() const { return type_; }\n\n  // Gets the name of the source file where the test part took place, or\n  // NULL if it's unknown.\n  const char* file_name() const {\n    return file_name_.empty() ? nullptr : file_name_.c_str();\n  }\n\n  // Gets the line in the source file where the test part took place,\n  // or -1 if it's unknown.\n  int line_number() const { return line_number_; }\n\n  // Gets the summary of the failure message.\n  const char* summary() const { return summary_.c_str(); }\n\n  // Gets the message associated with the test part.\n  const char* message() const { return message_.c_str(); }\n\n  // Returns true if and only if the test part was skipped.\n  bool skipped() const { return type_ == kSkip; }\n\n  // Returns true if and only if the test part passed.\n  bool passed() const { return type_ == kSuccess; }\n\n  // Returns true if and only if the test part non-fatally failed.\n  bool nonfatally_failed() const { return type_ == kNonFatalFailure; }\n\n  // Returns true if and only if the test part fatally failed.\n  bool fatally_failed() const { return type_ == kFatalFailure; }\n\n  // Returns true if and only if the test part failed.\n  bool failed() const { return fatally_failed() || nonfatally_failed(); }\n\n private:\n  Type type_;\n\n  // Gets the summary of the failure message by omitting the stack\n  // trace in it.\n  static std::string ExtractSummary(const char* message);\n\n  // The name of the source file where the test part took place, or\n  // \"\" if the source file is unknown.\n  std::string file_name_;\n  // The line in the source file where the test part took place, or -1\n  // if the line number is unknown.\n  int line_number_;\n  std::string summary_;  // The test failure summary.\n  std::string message_;  // The test failure message.\n};\n\n// Prints a TestPartResult object.\nstd::ostream& operator<<(std::ostream& os, const TestPartResult& result);\n\n// An array of TestPartResult objects.\n//\n// Don't inherit from TestPartResultArray as its destructor is not\n// virtual.\nclass GTEST_API_ TestPartResultArray {\n public:\n  TestPartResultArray() {}\n\n  // Appends the given TestPartResult to the array.\n  void Append(const TestPartResult& result);\n\n  // Returns the TestPartResult at the given index (0-based).\n  const TestPartResult& GetTestPartResult(int index) const;\n\n  // Returns the number of TestPartResult objects in the array.\n  int size() const;\n\n private:\n  std::vector<TestPartResult> array_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestPartResultArray);\n};\n\n// This interface knows how to report a test part result.\nclass GTEST_API_ TestPartResultReporterInterface {\n public:\n  virtual ~TestPartResultReporterInterface() {}\n\n  virtual void ReportTestPartResult(const TestPartResult& result) = 0;\n};\n\nnamespace internal {\n\n// This helper class is used by {ASSERT|EXPECT}_NO_FATAL_FAILURE to check if a\n// statement generates new fatal failures. To do so it registers itself as the\n// current test part result reporter. Besides checking if fatal failures were\n// reported, it only delegates the reporting to the former result reporter.\n// The original result reporter is restored in the destructor.\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nclass GTEST_API_ HasNewFatalFailureHelper\n    : public TestPartResultReporterInterface {\n public:\n  HasNewFatalFailureHelper();\n  ~HasNewFatalFailureHelper() override;\n  void ReportTestPartResult(const TestPartResult& result) override;\n  bool has_new_fatal_failure() const { return has_new_fatal_failure_; }\n private:\n  bool has_new_fatal_failure_;\n  TestPartResultReporterInterface* original_reporter_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(HasNewFatalFailureHelper);\n};\n\n}  // namespace internal\n\n}  // namespace testing\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_TEST_PART_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest-typed-test.h",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_\n#define GTEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_\n\n// This header implements typed tests and type-parameterized tests.\n\n// Typed (aka type-driven) tests repeat the same test for types in a\n// list.  You must know which types you want to test with when writing\n// typed tests. Here's how you do it:\n\n#if 0\n\n// First, define a fixture class template.  It should be parameterized\n// by a type.  Remember to derive it from testing::Test.\ntemplate <typename T>\nclass FooTest : public testing::Test {\n public:\n  ...\n  typedef std::list<T> List;\n  static T shared_;\n  T value_;\n};\n\n// Next, associate a list of types with the test suite, which will be\n// repeated for each type in the list.  The typedef is necessary for\n// the macro to parse correctly.\ntypedef testing::Types<char, int, unsigned int> MyTypes;\nTYPED_TEST_SUITE(FooTest, MyTypes);\n\n// If the type list contains only one type, you can write that type\n// directly without Types<...>:\n//   TYPED_TEST_SUITE(FooTest, int);\n\n// Then, use TYPED_TEST() instead of TEST_F() to define as many typed\n// tests for this test suite as you want.\nTYPED_TEST(FooTest, DoesBlah) {\n  // Inside a test, refer to the special name TypeParam to get the type\n  // parameter.  Since we are inside a derived class template, C++ requires\n  // us to visit the members of FooTest via 'this'.\n  TypeParam n = this->value_;\n\n  // To visit static members of the fixture, add the TestFixture::\n  // prefix.\n  n += TestFixture::shared_;\n\n  // To refer to typedefs in the fixture, add the \"typename\n  // TestFixture::\" prefix.\n  typename TestFixture::List values;\n  values.push_back(n);\n  ...\n}\n\nTYPED_TEST(FooTest, HasPropertyA) { ... }\n\n// TYPED_TEST_SUITE takes an optional third argument which allows to specify a\n// class that generates custom test name suffixes based on the type. This should\n// be a class which has a static template function GetName(int index) returning\n// a string for each type. The provided integer index equals the index of the\n// type in the provided type list. In many cases the index can be ignored.\n//\n// For example:\n//   class MyTypeNames {\n//    public:\n//     template <typename T>\n//     static std::string GetName(int) {\n//       if (std::is_same<T, char>()) return \"char\";\n//       if (std::is_same<T, int>()) return \"int\";\n//       if (std::is_same<T, unsigned int>()) return \"unsignedInt\";\n//     }\n//   };\n//   TYPED_TEST_SUITE(FooTest, MyTypes, MyTypeNames);\n\n#endif  // 0\n\n// Type-parameterized tests are abstract test patterns parameterized\n// by a type.  Compared with typed tests, type-parameterized tests\n// allow you to define the test pattern without knowing what the type\n// parameters are.  The defined pattern can be instantiated with\n// different types any number of times, in any number of translation\n// units.\n//\n// If you are designing an interface or concept, you can define a\n// suite of type-parameterized tests to verify properties that any\n// valid implementation of the interface/concept should have.  Then,\n// each implementation can easily instantiate the test suite to verify\n// that it conforms to the requirements, without having to write\n// similar tests repeatedly.  Here's an example:\n\n#if 0\n\n// First, define a fixture class template.  It should be parameterized\n// by a type.  Remember to derive it from testing::Test.\ntemplate <typename T>\nclass FooTest : public testing::Test {\n  ...\n};\n\n// Next, declare that you will define a type-parameterized test suite\n// (the _P suffix is for \"parameterized\" or \"pattern\", whichever you\n// prefer):\nTYPED_TEST_SUITE_P(FooTest);\n\n// Then, use TYPED_TEST_P() to define as many type-parameterized tests\n// for this type-parameterized test suite as you want.\nTYPED_TEST_P(FooTest, DoesBlah) {\n  // Inside a test, refer to TypeParam to get the type parameter.\n  TypeParam n = 0;\n  ...\n}\n\nTYPED_TEST_P(FooTest, HasPropertyA) { ... }\n\n// Now the tricky part: you need to register all test patterns before\n// you can instantiate them.  The first argument of the macro is the\n// test suite name; the rest are the names of the tests in this test\n// case.\nREGISTER_TYPED_TEST_SUITE_P(FooTest,\n                            DoesBlah, HasPropertyA);\n\n// Finally, you are free to instantiate the pattern with the types you\n// want.  If you put the above code in a header file, you can #include\n// it in multiple C++ source files and instantiate it multiple times.\n//\n// To distinguish different instances of the pattern, the first\n// argument to the INSTANTIATE_* macro is a prefix that will be added\n// to the actual test suite name.  Remember to pick unique prefixes for\n// different instances.\ntypedef testing::Types<char, int, unsigned int> MyTypes;\nINSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, MyTypes);\n\n// If the type list contains only one type, you can write that type\n// directly without Types<...>:\n//   INSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, int);\n//\n// Similar to the optional argument of TYPED_TEST_SUITE above,\n// INSTANTIATE_TEST_SUITE_P takes an optional fourth argument which allows to\n// generate custom names.\n//   INSTANTIATE_TYPED_TEST_SUITE_P(My, FooTest, MyTypes, MyTypeNames);\n\n#endif  // 0\n\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-port.h\"\n#include \"gtest/internal/gtest-type-util.h\"\n\n// Implements typed tests.\n\n#if GTEST_HAS_TYPED_TEST\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// Expands to the name of the typedef for the type parameters of the\n// given test suite.\n#define GTEST_TYPE_PARAMS_(TestSuiteName) gtest_type_params_##TestSuiteName##_\n\n// Expands to the name of the typedef for the NameGenerator, responsible for\n// creating the suffixes of the name.\n#define GTEST_NAME_GENERATOR_(TestSuiteName) \\\n  gtest_type_params_##TestSuiteName##_NameGenerator\n\n#define TYPED_TEST_SUITE(CaseName, Types, ...)                          \\\n  typedef ::testing::internal::GenerateTypeList<Types>::type            \\\n      GTEST_TYPE_PARAMS_(CaseName);                                     \\\n  typedef ::testing::internal::NameGeneratorSelector<__VA_ARGS__>::type \\\n      GTEST_NAME_GENERATOR_(CaseName)\n\n#define TYPED_TEST(CaseName, TestName)                                        \\\n  static_assert(sizeof(GTEST_STRINGIFY_(TestName)) > 1,                       \\\n                \"test-name must not be empty\");                               \\\n  template <typename gtest_TypeParam_>                                        \\\n  class GTEST_TEST_CLASS_NAME_(CaseName, TestName)                            \\\n      : public CaseName<gtest_TypeParam_> {                                   \\\n   private:                                                                   \\\n    typedef CaseName<gtest_TypeParam_> TestFixture;                           \\\n    typedef gtest_TypeParam_ TypeParam;                                       \\\n    void TestBody() override;                                                 \\\n  };                                                                          \\\n  static bool gtest_##CaseName##_##TestName##_registered_                     \\\n      GTEST_ATTRIBUTE_UNUSED_ = ::testing::internal::TypeParameterizedTest<   \\\n          CaseName,                                                           \\\n          ::testing::internal::TemplateSel<GTEST_TEST_CLASS_NAME_(CaseName,   \\\n                                                                  TestName)>, \\\n          GTEST_TYPE_PARAMS_(                                                 \\\n              CaseName)>::Register(\"\",                                        \\\n                                   ::testing::internal::CodeLocation(         \\\n                                       __FILE__, __LINE__),                   \\\n                                   GTEST_STRINGIFY_(CaseName),                \\\n                                   GTEST_STRINGIFY_(TestName), 0,             \\\n                                   ::testing::internal::GenerateNames<        \\\n                                       GTEST_NAME_GENERATOR_(CaseName),       \\\n                                       GTEST_TYPE_PARAMS_(CaseName)>());      \\\n  template <typename gtest_TypeParam_>                                        \\\n  void GTEST_TEST_CLASS_NAME_(CaseName,                                       \\\n                              TestName)<gtest_TypeParam_>::TestBody()\n\n// Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n#define TYPED_TEST_CASE                                                \\\n  static_assert(::testing::internal::TypedTestCaseIsDeprecated(), \"\"); \\\n  TYPED_TEST_SUITE\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n#endif  // GTEST_HAS_TYPED_TEST\n\n// Implements type-parameterized tests.\n\n#if GTEST_HAS_TYPED_TEST_P\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// Expands to the namespace name that the type-parameterized tests for\n// the given type-parameterized test suite are defined in.  The exact\n// name of the namespace is subject to change without notice.\n#define GTEST_SUITE_NAMESPACE_(TestSuiteName) gtest_suite_##TestSuiteName##_\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// Expands to the name of the variable used to remember the names of\n// the defined tests in the given test suite.\n#define GTEST_TYPED_TEST_SUITE_P_STATE_(TestSuiteName) \\\n  gtest_typed_test_suite_p_state_##TestSuiteName##_\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE DIRECTLY.\n//\n// Expands to the name of the variable used to remember the names of\n// the registered tests in the given test suite.\n#define GTEST_REGISTERED_TEST_NAMES_(TestSuiteName) \\\n  gtest_registered_test_names_##TestSuiteName##_\n\n// The variables defined in the type-parameterized test macros are\n// static as typically these macros are used in a .h file that can be\n// #included in multiple translation units linked together.\n#define TYPED_TEST_SUITE_P(SuiteName)              \\\n  static ::testing::internal::TypedTestSuitePState \\\n      GTEST_TYPED_TEST_SUITE_P_STATE_(SuiteName)\n\n// Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n#define TYPED_TEST_CASE_P                                                 \\\n  static_assert(::testing::internal::TypedTestCase_P_IsDeprecated(), \"\"); \\\n  TYPED_TEST_SUITE_P\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n#define TYPED_TEST_P(SuiteName, TestName)                             \\\n  namespace GTEST_SUITE_NAMESPACE_(SuiteName) {                       \\\n    template <typename gtest_TypeParam_>                              \\\n    class TestName : public SuiteName<gtest_TypeParam_> {             \\\n     private:                                                         \\\n      typedef SuiteName<gtest_TypeParam_> TestFixture;                \\\n      typedef gtest_TypeParam_ TypeParam;                             \\\n      void TestBody() override;                                       \\\n    };                                                                \\\n    static bool gtest_##TestName##_defined_ GTEST_ATTRIBUTE_UNUSED_ = \\\n        GTEST_TYPED_TEST_SUITE_P_STATE_(SuiteName).AddTestName(       \\\n            __FILE__, __LINE__, GTEST_STRINGIFY_(SuiteName),          \\\n            GTEST_STRINGIFY_(TestName));                              \\\n  }                                                                   \\\n  template <typename gtest_TypeParam_>                                \\\n  void GTEST_SUITE_NAMESPACE_(                                        \\\n      SuiteName)::TestName<gtest_TypeParam_>::TestBody()\n\n// Note: this won't work correctly if the trailing arguments are macros.\n#define REGISTER_TYPED_TEST_SUITE_P(SuiteName, ...)                         \\\n  namespace GTEST_SUITE_NAMESPACE_(SuiteName) {                             \\\n    typedef ::testing::internal::Templates<__VA_ARGS__> gtest_AllTests_;    \\\n  }                                                                         \\\n  static const char* const GTEST_REGISTERED_TEST_NAMES_(                    \\\n      SuiteName) GTEST_ATTRIBUTE_UNUSED_ =                                  \\\n      GTEST_TYPED_TEST_SUITE_P_STATE_(SuiteName).VerifyRegisteredTestNames( \\\n          __FILE__, __LINE__, #__VA_ARGS__)\n\n// Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n#define REGISTER_TYPED_TEST_CASE_P                                           \\\n  static_assert(::testing::internal::RegisterTypedTestCase_P_IsDeprecated(), \\\n                \"\");                                                         \\\n  REGISTER_TYPED_TEST_SUITE_P\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n#define INSTANTIATE_TYPED_TEST_SUITE_P(Prefix, SuiteName, Types, ...)       \\\n  static_assert(sizeof(GTEST_STRINGIFY_(Prefix)) > 1,                       \\\n                \"test-suit-prefix must not be empty\");                      \\\n  static bool gtest_##Prefix##_##SuiteName GTEST_ATTRIBUTE_UNUSED_ =        \\\n      ::testing::internal::TypeParameterizedTestSuite<                      \\\n          SuiteName, GTEST_SUITE_NAMESPACE_(SuiteName)::gtest_AllTests_,    \\\n          ::testing::internal::GenerateTypeList<Types>::type>::             \\\n          Register(GTEST_STRINGIFY_(Prefix),                                \\\n                   ::testing::internal::CodeLocation(__FILE__, __LINE__),   \\\n                   &GTEST_TYPED_TEST_SUITE_P_STATE_(SuiteName),             \\\n                   GTEST_STRINGIFY_(SuiteName),                             \\\n                   GTEST_REGISTERED_TEST_NAMES_(SuiteName),                 \\\n                   ::testing::internal::GenerateNames<                      \\\n                       ::testing::internal::NameGeneratorSelector<          \\\n                           __VA_ARGS__>::type,                              \\\n                       ::testing::internal::GenerateTypeList<Types>::type>())\n\n// Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n#define INSTANTIATE_TYPED_TEST_CASE_P                                      \\\n  static_assert(                                                           \\\n      ::testing::internal::InstantiateTypedTestCase_P_IsDeprecated(), \"\"); \\\n  INSTANTIATE_TYPED_TEST_SUITE_P\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_TYPED_TEST_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the public API for Google Test.  It should be\n// included by any test program that uses Google Test.\n//\n// IMPORTANT NOTE: Due to limitation of the C++ language, we have to\n// leave some internal implementation details in this header file.\n// They are clearly marked by comments like this:\n//\n//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n//\n// Such code is NOT meant to be used by a user directly, and is subject\n// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user\n// program!\n//\n// Acknowledgment: Google Test borrowed the idea of automatic test\n// registration from Barthelemy Dagenais' (barthelemy@prologique.com)\n// easyUnit framework.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_H_\n#define GTEST_INCLUDE_GTEST_GTEST_H_\n\n#include <cstddef>\n#include <limits>\n#include <memory>\n#include <ostream>\n#include <type_traits>\n#include <vector>\n\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-string.h\"\n#include \"gtest/gtest-death-test.h\"\n#include \"gtest/gtest-matchers.h\"\n#include \"gtest/gtest-message.h\"\n#include \"gtest/gtest-param-test.h\"\n#include \"gtest/gtest-printers.h\"\n#include \"gtest/gtest_prod.h\"\n#include \"gtest/gtest-test-part.h\"\n#include \"gtest/gtest-typed-test.h\"\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\nnamespace testing {\n\n// Silence C4100 (unreferenced formal parameter) and 4805\n// unsafe mix of type 'const int' and type 'const bool'\n#ifdef _MSC_VER\n# pragma warning(push)\n# pragma warning(disable:4805)\n# pragma warning(disable:4100)\n#endif\n\n\n// Declares the flags.\n\n// This flag temporary enables the disabled tests.\nGTEST_DECLARE_bool_(also_run_disabled_tests);\n\n// This flag brings the debugger on an assertion failure.\nGTEST_DECLARE_bool_(break_on_failure);\n\n// This flag controls whether Google Test catches all test-thrown exceptions\n// and logs them as failures.\nGTEST_DECLARE_bool_(catch_exceptions);\n\n// This flag enables using colors in terminal output. Available values are\n// \"yes\" to enable colors, \"no\" (disable colors), or \"auto\" (the default)\n// to let Google Test decide.\nGTEST_DECLARE_string_(color);\n\n// This flag sets up the filter to select by name using a glob pattern\n// the tests to run. If the filter is not given all tests are executed.\nGTEST_DECLARE_string_(filter);\n\n// This flag controls whether Google Test installs a signal handler that dumps\n// debugging information when fatal signals are raised.\nGTEST_DECLARE_bool_(install_failure_signal_handler);\n\n// This flag causes the Google Test to list tests. None of the tests listed\n// are actually run if the flag is provided.\nGTEST_DECLARE_bool_(list_tests);\n\n// This flag controls whether Google Test emits a detailed XML report to a file\n// in addition to its normal textual output.\nGTEST_DECLARE_string_(output);\n\n// This flags control whether Google Test prints the elapsed time for each\n// test.\nGTEST_DECLARE_bool_(print_time);\n\n// This flags control whether Google Test prints UTF8 characters as text.\nGTEST_DECLARE_bool_(print_utf8);\n\n// This flag specifies the random number seed.\nGTEST_DECLARE_int32_(random_seed);\n\n// This flag sets how many times the tests are repeated. The default value\n// is 1. If the value is -1 the tests are repeating forever.\nGTEST_DECLARE_int32_(repeat);\n\n// This flag controls whether Google Test includes Google Test internal\n// stack frames in failure stack traces.\nGTEST_DECLARE_bool_(show_internal_stack_frames);\n\n// When this flag is specified, tests' order is randomized on every iteration.\nGTEST_DECLARE_bool_(shuffle);\n\n// This flag specifies the maximum number of stack frames to be\n// printed in a failure message.\nGTEST_DECLARE_int32_(stack_trace_depth);\n\n// When this flag is specified, a failed assertion will throw an\n// exception if exceptions are enabled, or exit the program with a\n// non-zero code otherwise. For use with an external test framework.\nGTEST_DECLARE_bool_(throw_on_failure);\n\n// When this flag is set with a \"host:port\" string, on supported\n// platforms test results are streamed to the specified port on\n// the specified host machine.\nGTEST_DECLARE_string_(stream_result_to);\n\n#if GTEST_USE_OWN_FLAGFILE_FLAG_\nGTEST_DECLARE_string_(flagfile);\n#endif  // GTEST_USE_OWN_FLAGFILE_FLAG_\n\n// The upper limit for valid stack trace depths.\nconst int kMaxStackTraceDepth = 100;\n\nnamespace internal {\n\nclass AssertHelper;\nclass DefaultGlobalTestPartResultReporter;\nclass ExecDeathTest;\nclass NoExecDeathTest;\nclass FinalSuccessChecker;\nclass GTestFlagSaver;\nclass StreamingListenerTest;\nclass TestResultAccessor;\nclass TestEventListenersAccessor;\nclass TestEventRepeater;\nclass UnitTestRecordPropertyTestHelper;\nclass WindowsDeathTest;\nclass FuchsiaDeathTest;\nclass UnitTestImpl* GetUnitTestImpl();\nvoid ReportFailureInUnknownLocation(TestPartResult::Type result_type,\n                                    const std::string& message);\n\n}  // namespace internal\n\n// The friend relationship of some of these classes is cyclic.\n// If we don't forward declare them the compiler might confuse the classes\n// in friendship clauses with same named classes on the scope.\nclass Test;\nclass TestSuite;\n\n// Old API is still available but deprecated\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nusing TestCase = TestSuite;\n#endif\nclass TestInfo;\nclass UnitTest;\n\n// A class for indicating whether an assertion was successful.  When\n// the assertion wasn't successful, the AssertionResult object\n// remembers a non-empty message that describes how it failed.\n//\n// To create an instance of this class, use one of the factory functions\n// (AssertionSuccess() and AssertionFailure()).\n//\n// This class is useful for two purposes:\n//   1. Defining predicate functions to be used with Boolean test assertions\n//      EXPECT_TRUE/EXPECT_FALSE and their ASSERT_ counterparts\n//   2. Defining predicate-format functions to be\n//      used with predicate assertions (ASSERT_PRED_FORMAT*, etc).\n//\n// For example, if you define IsEven predicate:\n//\n//   testing::AssertionResult IsEven(int n) {\n//     if ((n % 2) == 0)\n//       return testing::AssertionSuccess();\n//     else\n//       return testing::AssertionFailure() << n << \" is odd\";\n//   }\n//\n// Then the failed expectation EXPECT_TRUE(IsEven(Fib(5)))\n// will print the message\n//\n//   Value of: IsEven(Fib(5))\n//     Actual: false (5 is odd)\n//   Expected: true\n//\n// instead of a more opaque\n//\n//   Value of: IsEven(Fib(5))\n//     Actual: false\n//   Expected: true\n//\n// in case IsEven is a simple Boolean predicate.\n//\n// If you expect your predicate to be reused and want to support informative\n// messages in EXPECT_FALSE and ASSERT_FALSE (negative assertions show up\n// about half as often as positive ones in our tests), supply messages for\n// both success and failure cases:\n//\n//   testing::AssertionResult IsEven(int n) {\n//     if ((n % 2) == 0)\n//       return testing::AssertionSuccess() << n << \" is even\";\n//     else\n//       return testing::AssertionFailure() << n << \" is odd\";\n//   }\n//\n// Then a statement EXPECT_FALSE(IsEven(Fib(6))) will print\n//\n//   Value of: IsEven(Fib(6))\n//     Actual: true (8 is even)\n//   Expected: false\n//\n// NB: Predicates that support negative Boolean assertions have reduced\n// performance in positive ones so be careful not to use them in tests\n// that have lots (tens of thousands) of positive Boolean assertions.\n//\n// To use this class with EXPECT_PRED_FORMAT assertions such as:\n//\n//   // Verifies that Foo() returns an even number.\n//   EXPECT_PRED_FORMAT1(IsEven, Foo());\n//\n// you need to define:\n//\n//   testing::AssertionResult IsEven(const char* expr, int n) {\n//     if ((n % 2) == 0)\n//       return testing::AssertionSuccess();\n//     else\n//       return testing::AssertionFailure()\n//         << \"Expected: \" << expr << \" is even\\n  Actual: it's \" << n;\n//   }\n//\n// If Foo() returns 5, you will see the following message:\n//\n//   Expected: Foo() is even\n//     Actual: it's 5\n//\nclass GTEST_API_ AssertionResult {\n public:\n  // Copy constructor.\n  // Used in EXPECT_TRUE/FALSE(assertion_result).\n  AssertionResult(const AssertionResult& other);\n\n#if defined(_MSC_VER) && _MSC_VER < 1910\n  GTEST_DISABLE_MSC_WARNINGS_PUSH_(4800 /* forcing value to bool */)\n#endif\n\n  // Used in the EXPECT_TRUE/FALSE(bool_expression).\n  //\n  // T must be contextually convertible to bool.\n  //\n  // The second parameter prevents this overload from being considered if\n  // the argument is implicitly convertible to AssertionResult. In that case\n  // we want AssertionResult's copy constructor to be used.\n  template <typename T>\n  explicit AssertionResult(\n      const T& success,\n      typename std::enable_if<\n          !std::is_convertible<T, AssertionResult>::value>::type*\n      /*enabler*/\n      = nullptr)\n      : success_(success) {}\n\n#if defined(_MSC_VER) && _MSC_VER < 1910\n  GTEST_DISABLE_MSC_WARNINGS_POP_()\n#endif\n\n  // Assignment operator.\n  AssertionResult& operator=(AssertionResult other) {\n    swap(other);\n    return *this;\n  }\n\n  // Returns true if and only if the assertion succeeded.\n  operator bool() const { return success_; }  // NOLINT\n\n  // Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE.\n  AssertionResult operator!() const;\n\n  // Returns the text streamed into this AssertionResult. Test assertions\n  // use it when they fail (i.e., the predicate's outcome doesn't match the\n  // assertion's expectation). When nothing has been streamed into the\n  // object, returns an empty string.\n  const char* message() const {\n    return message_.get() != nullptr ? message_->c_str() : \"\";\n  }\n  // Deprecated; please use message() instead.\n  const char* failure_message() const { return message(); }\n\n  // Streams a custom failure message into this object.\n  template <typename T> AssertionResult& operator<<(const T& value) {\n    AppendMessage(Message() << value);\n    return *this;\n  }\n\n  // Allows streaming basic output manipulators such as endl or flush into\n  // this object.\n  AssertionResult& operator<<(\n      ::std::ostream& (*basic_manipulator)(::std::ostream& stream)) {\n    AppendMessage(Message() << basic_manipulator);\n    return *this;\n  }\n\n private:\n  // Appends the contents of message to message_.\n  void AppendMessage(const Message& a_message) {\n    if (message_.get() == nullptr) message_.reset(new ::std::string);\n    message_->append(a_message.GetString().c_str());\n  }\n\n  // Swap the contents of this AssertionResult with other.\n  void swap(AssertionResult& other);\n\n  // Stores result of the assertion predicate.\n  bool success_;\n  // Stores the message describing the condition in case the expectation\n  // construct is not satisfied with the predicate's outcome.\n  // Referenced via a pointer to avoid taking too much stack frame space\n  // with test assertions.\n  std::unique_ptr< ::std::string> message_;\n};\n\n// Makes a successful assertion result.\nGTEST_API_ AssertionResult AssertionSuccess();\n\n// Makes a failed assertion result.\nGTEST_API_ AssertionResult AssertionFailure();\n\n// Makes a failed assertion result with the given failure message.\n// Deprecated; use AssertionFailure() << msg.\nGTEST_API_ AssertionResult AssertionFailure(const Message& msg);\n\n}  // namespace testing\n\n// Includes the auto-generated header that implements a family of generic\n// predicate assertion macros. This include comes late because it relies on\n// APIs declared above.\n#include \"gtest/gtest_pred_impl.h\"\n\nnamespace testing {\n\n// The abstract class that all tests inherit from.\n//\n// In Google Test, a unit test program contains one or many TestSuites, and\n// each TestSuite contains one or many Tests.\n//\n// When you define a test using the TEST macro, you don't need to\n// explicitly derive from Test - the TEST macro automatically does\n// this for you.\n//\n// The only time you derive from Test is when defining a test fixture\n// to be used in a TEST_F.  For example:\n//\n//   class FooTest : public testing::Test {\n//    protected:\n//     void SetUp() override { ... }\n//     void TearDown() override { ... }\n//     ...\n//   };\n//\n//   TEST_F(FooTest, Bar) { ... }\n//   TEST_F(FooTest, Baz) { ... }\n//\n// Test is not copyable.\nclass GTEST_API_ Test {\n public:\n  friend class TestInfo;\n\n  // The d'tor is virtual as we intend to inherit from Test.\n  virtual ~Test();\n\n  // Sets up the stuff shared by all tests in this test case.\n  //\n  // Google Test will call Foo::SetUpTestSuite() before running the first\n  // test in test case Foo.  Hence a sub-class can define its own\n  // SetUpTestSuite() method to shadow the one defined in the super\n  // class.\n  // Failures that happen during SetUpTestSuite are logged but otherwise\n  // ignored.\n  static void SetUpTestSuite() {}\n\n  // Tears down the stuff shared by all tests in this test suite.\n  //\n  // Google Test will call Foo::TearDownTestSuite() after running the last\n  // test in test case Foo.  Hence a sub-class can define its own\n  // TearDownTestSuite() method to shadow the one defined in the super\n  // class.\n  // Failures that happen during TearDownTestSuite are logged but otherwise\n  // ignored.\n  static void TearDownTestSuite() {}\n\n  // Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  static void TearDownTestCase() {}\n  static void SetUpTestCase() {}\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Returns true if and only if the current test has a fatal failure.\n  static bool HasFatalFailure();\n\n  // Returns true if and only if the current test has a non-fatal failure.\n  static bool HasNonfatalFailure();\n\n  // Returns true if and only if the current test was skipped.\n  static bool IsSkipped();\n\n  // Returns true if and only if the current test has a (either fatal or\n  // non-fatal) failure.\n  static bool HasFailure() { return HasFatalFailure() || HasNonfatalFailure(); }\n\n  // Logs a property for the current test, test suite, or for the entire\n  // invocation of the test program when used outside of the context of a\n  // test suite.  Only the last value for a given key is remembered.  These\n  // are public static so they can be called from utility functions that are\n  // not members of the test fixture.  Calls to RecordProperty made during\n  // lifespan of the test (from the moment its constructor starts to the\n  // moment its destructor finishes) will be output in XML as attributes of\n  // the <testcase> element.  Properties recorded from fixture's\n  // SetUpTestSuite or TearDownTestSuite are logged as attributes of the\n  // corresponding <testsuite> element.  Calls to RecordProperty made in the\n  // global context (before or after invocation of RUN_ALL_TESTS and from\n  // SetUp/TearDown method of Environment objects registered with Google\n  // Test) will be output as attributes of the <testsuites> element.\n  static void RecordProperty(const std::string& key, const std::string& value);\n  static void RecordProperty(const std::string& key, int value);\n\n protected:\n  // Creates a Test object.\n  Test();\n\n  // Sets up the test fixture.\n  virtual void SetUp();\n\n  // Tears down the test fixture.\n  virtual void TearDown();\n\n private:\n  // Returns true if and only if the current test has the same fixture class\n  // as the first test in the current test suite.\n  static bool HasSameFixtureClass();\n\n  // Runs the test after the test fixture has been set up.\n  //\n  // A sub-class must implement this to define the test logic.\n  //\n  // DO NOT OVERRIDE THIS FUNCTION DIRECTLY IN A USER PROGRAM.\n  // Instead, use the TEST or TEST_F macro.\n  virtual void TestBody() = 0;\n\n  // Sets up, executes, and tears down the test.\n  void Run();\n\n  // Deletes self.  We deliberately pick an unusual name for this\n  // internal method to avoid clashing with names used in user TESTs.\n  void DeleteSelf_() { delete this; }\n\n  const std::unique_ptr<GTEST_FLAG_SAVER_> gtest_flag_saver_;\n\n  // Often a user misspells SetUp() as Setup() and spends a long time\n  // wondering why it is never called by Google Test.  The declaration of\n  // the following method is solely for catching such an error at\n  // compile time:\n  //\n  //   - The return type is deliberately chosen to be not void, so it\n  //   will be a conflict if void Setup() is declared in the user's\n  //   test fixture.\n  //\n  //   - This method is private, so it will be another compiler error\n  //   if the method is called from the user's test fixture.\n  //\n  // DO NOT OVERRIDE THIS FUNCTION.\n  //\n  // If you see an error about overriding the following function or\n  // about it being private, you have mis-spelled SetUp() as Setup().\n  struct Setup_should_be_spelled_SetUp {};\n  virtual Setup_should_be_spelled_SetUp* Setup() { return nullptr; }\n\n  // We disallow copying Tests.\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(Test);\n};\n\ntypedef internal::TimeInMillis TimeInMillis;\n\n// A copyable object representing a user specified test property which can be\n// output as a key/value string pair.\n//\n// Don't inherit from TestProperty as its destructor is not virtual.\nclass TestProperty {\n public:\n  // C'tor.  TestProperty does NOT have a default constructor.\n  // Always use this constructor (with parameters) to create a\n  // TestProperty object.\n  TestProperty(const std::string& a_key, const std::string& a_value) :\n    key_(a_key), value_(a_value) {\n  }\n\n  // Gets the user supplied key.\n  const char* key() const {\n    return key_.c_str();\n  }\n\n  // Gets the user supplied value.\n  const char* value() const {\n    return value_.c_str();\n  }\n\n  // Sets a new value, overriding the one supplied in the constructor.\n  void SetValue(const std::string& new_value) {\n    value_ = new_value;\n  }\n\n private:\n  // The key supplied by the user.\n  std::string key_;\n  // The value supplied by the user.\n  std::string value_;\n};\n\n// The result of a single Test.  This includes a list of\n// TestPartResults, a list of TestProperties, a count of how many\n// death tests there are in the Test, and how much time it took to run\n// the Test.\n//\n// TestResult is not copyable.\nclass GTEST_API_ TestResult {\n public:\n  // Creates an empty TestResult.\n  TestResult();\n\n  // D'tor.  Do not inherit from TestResult.\n  ~TestResult();\n\n  // Gets the number of all test parts.  This is the sum of the number\n  // of successful test parts and the number of failed test parts.\n  int total_part_count() const;\n\n  // Returns the number of the test properties.\n  int test_property_count() const;\n\n  // Returns true if and only if the test passed (i.e. no test part failed).\n  bool Passed() const { return !Skipped() && !Failed(); }\n\n  // Returns true if and only if the test was skipped.\n  bool Skipped() const;\n\n  // Returns true if and only if the test failed.\n  bool Failed() const;\n\n  // Returns true if and only if the test fatally failed.\n  bool HasFatalFailure() const;\n\n  // Returns true if and only if the test has a non-fatal failure.\n  bool HasNonfatalFailure() const;\n\n  // Returns the elapsed time, in milliseconds.\n  TimeInMillis elapsed_time() const { return elapsed_time_; }\n\n  // Gets the time of the test case start, in ms from the start of the\n  // UNIX epoch.\n  TimeInMillis start_timestamp() const { return start_timestamp_; }\n\n  // Returns the i-th test part result among all the results. i can range from 0\n  // to total_part_count() - 1. If i is not in that range, aborts the program.\n  const TestPartResult& GetTestPartResult(int i) const;\n\n  // Returns the i-th test property. i can range from 0 to\n  // test_property_count() - 1. If i is not in that range, aborts the\n  // program.\n  const TestProperty& GetTestProperty(int i) const;\n\n private:\n  friend class TestInfo;\n  friend class TestSuite;\n  friend class UnitTest;\n  friend class internal::DefaultGlobalTestPartResultReporter;\n  friend class internal::ExecDeathTest;\n  friend class internal::TestResultAccessor;\n  friend class internal::UnitTestImpl;\n  friend class internal::WindowsDeathTest;\n  friend class internal::FuchsiaDeathTest;\n\n  // Gets the vector of TestPartResults.\n  const std::vector<TestPartResult>& test_part_results() const {\n    return test_part_results_;\n  }\n\n  // Gets the vector of TestProperties.\n  const std::vector<TestProperty>& test_properties() const {\n    return test_properties_;\n  }\n\n  // Sets the start time.\n  void set_start_timestamp(TimeInMillis start) { start_timestamp_ = start; }\n\n  // Sets the elapsed time.\n  void set_elapsed_time(TimeInMillis elapsed) { elapsed_time_ = elapsed; }\n\n  // Adds a test property to the list. The property is validated and may add\n  // a non-fatal failure if invalid (e.g., if it conflicts with reserved\n  // key names). If a property is already recorded for the same key, the\n  // value will be updated, rather than storing multiple values for the same\n  // key.  xml_element specifies the element for which the property is being\n  // recorded and is used for validation.\n  void RecordProperty(const std::string& xml_element,\n                      const TestProperty& test_property);\n\n  // Adds a failure if the key is a reserved attribute of Google Test\n  // testsuite tags.  Returns true if the property is valid.\n  // FIXME: Validate attribute names are legal and human readable.\n  static bool ValidateTestProperty(const std::string& xml_element,\n                                   const TestProperty& test_property);\n\n  // Adds a test part result to the list.\n  void AddTestPartResult(const TestPartResult& test_part_result);\n\n  // Returns the death test count.\n  int death_test_count() const { return death_test_count_; }\n\n  // Increments the death test count, returning the new count.\n  int increment_death_test_count() { return ++death_test_count_; }\n\n  // Clears the test part results.\n  void ClearTestPartResults();\n\n  // Clears the object.\n  void Clear();\n\n  // Protects mutable state of the property vector and of owned\n  // properties, whose values may be updated.\n  internal::Mutex test_properites_mutex_;\n\n  // The vector of TestPartResults\n  std::vector<TestPartResult> test_part_results_;\n  // The vector of TestProperties\n  std::vector<TestProperty> test_properties_;\n  // Running count of death tests.\n  int death_test_count_;\n  // The start time, in milliseconds since UNIX Epoch.\n  TimeInMillis start_timestamp_;\n  // The elapsed time, in milliseconds.\n  TimeInMillis elapsed_time_;\n\n  // We disallow copying TestResult.\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestResult);\n};  // class TestResult\n\n// A TestInfo object stores the following information about a test:\n//\n//   Test suite name\n//   Test name\n//   Whether the test should be run\n//   A function pointer that creates the test object when invoked\n//   Test result\n//\n// The constructor of TestInfo registers itself with the UnitTest\n// singleton such that the RUN_ALL_TESTS() macro knows which tests to\n// run.\nclass GTEST_API_ TestInfo {\n public:\n  // Destructs a TestInfo object.  This function is not virtual, so\n  // don't inherit from TestInfo.\n  ~TestInfo();\n\n  // Returns the test suite name.\n  const char* test_suite_name() const { return test_suite_name_.c_str(); }\n\n// Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  const char* test_case_name() const { return test_suite_name(); }\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Returns the test name.\n  const char* name() const { return name_.c_str(); }\n\n  // Returns the name of the parameter type, or NULL if this is not a typed\n  // or a type-parameterized test.\n  const char* type_param() const {\n    if (type_param_.get() != nullptr) return type_param_->c_str();\n    return nullptr;\n  }\n\n  // Returns the text representation of the value parameter, or NULL if this\n  // is not a value-parameterized test.\n  const char* value_param() const {\n    if (value_param_.get() != nullptr) return value_param_->c_str();\n    return nullptr;\n  }\n\n  // Returns the file name where this test is defined.\n  const char* file() const { return location_.file.c_str(); }\n\n  // Returns the line where this test is defined.\n  int line() const { return location_.line; }\n\n  // Return true if this test should not be run because it's in another shard.\n  bool is_in_another_shard() const { return is_in_another_shard_; }\n\n  // Returns true if this test should run, that is if the test is not\n  // disabled (or it is disabled but the also_run_disabled_tests flag has\n  // been specified) and its full name matches the user-specified filter.\n  //\n  // Google Test allows the user to filter the tests by their full names.\n  // The full name of a test Bar in test suite Foo is defined as\n  // \"Foo.Bar\".  Only the tests that match the filter will run.\n  //\n  // A filter is a colon-separated list of glob (not regex) patterns,\n  // optionally followed by a '-' and a colon-separated list of\n  // negative patterns (tests to exclude).  A test is run if it\n  // matches one of the positive patterns and does not match any of\n  // the negative patterns.\n  //\n  // For example, *A*:Foo.* is a filter that matches any string that\n  // contains the character 'A' or starts with \"Foo.\".\n  bool should_run() const { return should_run_; }\n\n  // Returns true if and only if this test will appear in the XML report.\n  bool is_reportable() const {\n    // The XML report includes tests matching the filter, excluding those\n    // run in other shards.\n    return matches_filter_ && !is_in_another_shard_;\n  }\n\n  // Returns the result of the test.\n  const TestResult* result() const { return &result_; }\n\n private:\n#if GTEST_HAS_DEATH_TEST\n  friend class internal::DefaultDeathTestFactory;\n#endif  // GTEST_HAS_DEATH_TEST\n  friend class Test;\n  friend class TestSuite;\n  friend class internal::UnitTestImpl;\n  friend class internal::StreamingListenerTest;\n  friend TestInfo* internal::MakeAndRegisterTestInfo(\n      const char* test_suite_name, const char* name, const char* type_param,\n      const char* value_param, internal::CodeLocation code_location,\n      internal::TypeId fixture_class_id, internal::SetUpTestSuiteFunc set_up_tc,\n      internal::TearDownTestSuiteFunc tear_down_tc,\n      internal::TestFactoryBase* factory);\n\n  // Constructs a TestInfo object. The newly constructed instance assumes\n  // ownership of the factory object.\n  TestInfo(const std::string& test_suite_name, const std::string& name,\n           const char* a_type_param,   // NULL if not a type-parameterized test\n           const char* a_value_param,  // NULL if not a value-parameterized test\n           internal::CodeLocation a_code_location,\n           internal::TypeId fixture_class_id,\n           internal::TestFactoryBase* factory);\n\n  // Increments the number of death tests encountered in this test so\n  // far.\n  int increment_death_test_count() {\n    return result_.increment_death_test_count();\n  }\n\n  // Creates the test object, runs it, records its result, and then\n  // deletes it.\n  void Run();\n\n  static void ClearTestResult(TestInfo* test_info) {\n    test_info->result_.Clear();\n  }\n\n  // These fields are immutable properties of the test.\n  const std::string test_suite_name_;    // test suite name\n  const std::string name_;               // Test name\n  // Name of the parameter type, or NULL if this is not a typed or a\n  // type-parameterized test.\n  const std::unique_ptr<const ::std::string> type_param_;\n  // Text representation of the value parameter, or NULL if this is not a\n  // value-parameterized test.\n  const std::unique_ptr<const ::std::string> value_param_;\n  internal::CodeLocation location_;\n  const internal::TypeId fixture_class_id_;  // ID of the test fixture class\n  bool should_run_;           // True if and only if this test should run\n  bool is_disabled_;          // True if and only if this test is disabled\n  bool matches_filter_;       // True if this test matches the\n                              // user-specified filter.\n  bool is_in_another_shard_;  // Will be run in another shard.\n  internal::TestFactoryBase* const factory_;  // The factory that creates\n                                              // the test object\n\n  // This field is mutable and needs to be reset before running the\n  // test for the second time.\n  TestResult result_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestInfo);\n};\n\n// A test suite, which consists of a vector of TestInfos.\n//\n// TestSuite is not copyable.\nclass GTEST_API_ TestSuite {\n public:\n  // Creates a TestSuite with the given name.\n  //\n  // TestSuite does NOT have a default constructor.  Always use this\n  // constructor to create a TestSuite object.\n  //\n  // Arguments:\n  //\n  //   name:         name of the test suite\n  //   a_type_param: the name of the test's type parameter, or NULL if\n  //                 this is not a type-parameterized test.\n  //   set_up_tc:    pointer to the function that sets up the test suite\n  //   tear_down_tc: pointer to the function that tears down the test suite\n  TestSuite(const char* name, const char* a_type_param,\n            internal::SetUpTestSuiteFunc set_up_tc,\n            internal::TearDownTestSuiteFunc tear_down_tc);\n\n  // Destructor of TestSuite.\n  virtual ~TestSuite();\n\n  // Gets the name of the TestSuite.\n  const char* name() const { return name_.c_str(); }\n\n  // Returns the name of the parameter type, or NULL if this is not a\n  // type-parameterized test suite.\n  const char* type_param() const {\n    if (type_param_.get() != nullptr) return type_param_->c_str();\n    return nullptr;\n  }\n\n  // Returns true if any test in this test suite should run.\n  bool should_run() const { return should_run_; }\n\n  // Gets the number of successful tests in this test suite.\n  int successful_test_count() const;\n\n  // Gets the number of skipped tests in this test suite.\n  int skipped_test_count() const;\n\n  // Gets the number of failed tests in this test suite.\n  int failed_test_count() const;\n\n  // Gets the number of disabled tests that will be reported in the XML report.\n  int reportable_disabled_test_count() const;\n\n  // Gets the number of disabled tests in this test suite.\n  int disabled_test_count() const;\n\n  // Gets the number of tests to be printed in the XML report.\n  int reportable_test_count() const;\n\n  // Get the number of tests in this test suite that should run.\n  int test_to_run_count() const;\n\n  // Gets the number of all tests in this test suite.\n  int total_test_count() const;\n\n  // Returns true if and only if the test suite passed.\n  bool Passed() const { return !Failed(); }\n\n  // Returns true if and only if the test suite failed.\n  bool Failed() const { return failed_test_count() > 0; }\n\n  // Returns the elapsed time, in milliseconds.\n  TimeInMillis elapsed_time() const { return elapsed_time_; }\n\n  // Gets the time of the test suite start, in ms from the start of the\n  // UNIX epoch.\n  TimeInMillis start_timestamp() const { return start_timestamp_; }\n\n  // Returns the i-th test among all the tests. i can range from 0 to\n  // total_test_count() - 1. If i is not in that range, returns NULL.\n  const TestInfo* GetTestInfo(int i) const;\n\n  // Returns the TestResult that holds test properties recorded during\n  // execution of SetUpTestSuite and TearDownTestSuite.\n  const TestResult& ad_hoc_test_result() const { return ad_hoc_test_result_; }\n\n private:\n  friend class Test;\n  friend class internal::UnitTestImpl;\n\n  // Gets the (mutable) vector of TestInfos in this TestSuite.\n  std::vector<TestInfo*>& test_info_list() { return test_info_list_; }\n\n  // Gets the (immutable) vector of TestInfos in this TestSuite.\n  const std::vector<TestInfo*>& test_info_list() const {\n    return test_info_list_;\n  }\n\n  // Returns the i-th test among all the tests. i can range from 0 to\n  // total_test_count() - 1. If i is not in that range, returns NULL.\n  TestInfo* GetMutableTestInfo(int i);\n\n  // Sets the should_run member.\n  void set_should_run(bool should) { should_run_ = should; }\n\n  // Adds a TestInfo to this test suite.  Will delete the TestInfo upon\n  // destruction of the TestSuite object.\n  void AddTestInfo(TestInfo * test_info);\n\n  // Clears the results of all tests in this test suite.\n  void ClearResult();\n\n  // Clears the results of all tests in the given test suite.\n  static void ClearTestSuiteResult(TestSuite* test_suite) {\n    test_suite->ClearResult();\n  }\n\n  // Runs every test in this TestSuite.\n  void Run();\n\n  // Runs SetUpTestSuite() for this TestSuite.  This wrapper is needed\n  // for catching exceptions thrown from SetUpTestSuite().\n  void RunSetUpTestSuite() {\n    if (set_up_tc_ != nullptr) {\n      (*set_up_tc_)();\n    }\n  }\n\n  // Runs TearDownTestSuite() for this TestSuite.  This wrapper is\n  // needed for catching exceptions thrown from TearDownTestSuite().\n  void RunTearDownTestSuite() {\n    if (tear_down_tc_ != nullptr) {\n      (*tear_down_tc_)();\n    }\n  }\n\n  // Returns true if and only if test passed.\n  static bool TestPassed(const TestInfo* test_info) {\n    return test_info->should_run() && test_info->result()->Passed();\n  }\n\n  // Returns true if and only if test skipped.\n  static bool TestSkipped(const TestInfo* test_info) {\n    return test_info->should_run() && test_info->result()->Skipped();\n  }\n\n  // Returns true if and only if test failed.\n  static bool TestFailed(const TestInfo* test_info) {\n    return test_info->should_run() && test_info->result()->Failed();\n  }\n\n  // Returns true if and only if the test is disabled and will be reported in\n  // the XML report.\n  static bool TestReportableDisabled(const TestInfo* test_info) {\n    return test_info->is_reportable() && test_info->is_disabled_;\n  }\n\n  // Returns true if and only if test is disabled.\n  static bool TestDisabled(const TestInfo* test_info) {\n    return test_info->is_disabled_;\n  }\n\n  // Returns true if and only if this test will appear in the XML report.\n  static bool TestReportable(const TestInfo* test_info) {\n    return test_info->is_reportable();\n  }\n\n  // Returns true if the given test should run.\n  static bool ShouldRunTest(const TestInfo* test_info) {\n    return test_info->should_run();\n  }\n\n  // Shuffles the tests in this test suite.\n  void ShuffleTests(internal::Random* random);\n\n  // Restores the test order to before the first shuffle.\n  void UnshuffleTests();\n\n  // Name of the test suite.\n  std::string name_;\n  // Name of the parameter type, or NULL if this is not a typed or a\n  // type-parameterized test.\n  const std::unique_ptr<const ::std::string> type_param_;\n  // The vector of TestInfos in their original order.  It owns the\n  // elements in the vector.\n  std::vector<TestInfo*> test_info_list_;\n  // Provides a level of indirection for the test list to allow easy\n  // shuffling and restoring the test order.  The i-th element in this\n  // vector is the index of the i-th test in the shuffled test list.\n  std::vector<int> test_indices_;\n  // Pointer to the function that sets up the test suite.\n  internal::SetUpTestSuiteFunc set_up_tc_;\n  // Pointer to the function that tears down the test suite.\n  internal::TearDownTestSuiteFunc tear_down_tc_;\n  // True if and only if any test in this test suite should run.\n  bool should_run_;\n  // The start time, in milliseconds since UNIX Epoch.\n  TimeInMillis start_timestamp_;\n  // Elapsed time, in milliseconds.\n  TimeInMillis elapsed_time_;\n  // Holds test properties recorded during execution of SetUpTestSuite and\n  // TearDownTestSuite.\n  TestResult ad_hoc_test_result_;\n\n  // We disallow copying TestSuites.\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestSuite);\n};\n\n// An Environment object is capable of setting up and tearing down an\n// environment.  You should subclass this to define your own\n// environment(s).\n//\n// An Environment object does the set-up and tear-down in virtual\n// methods SetUp() and TearDown() instead of the constructor and the\n// destructor, as:\n//\n//   1. You cannot safely throw from a destructor.  This is a problem\n//      as in some cases Google Test is used where exceptions are enabled, and\n//      we may want to implement ASSERT_* using exceptions where they are\n//      available.\n//   2. You cannot use ASSERT_* directly in a constructor or\n//      destructor.\nclass Environment {\n public:\n  // The d'tor is virtual as we need to subclass Environment.\n  virtual ~Environment() {}\n\n  // Override this to define how to set up the environment.\n  virtual void SetUp() {}\n\n  // Override this to define how to tear down the environment.\n  virtual void TearDown() {}\n private:\n  // If you see an error about overriding the following function or\n  // about it being private, you have mis-spelled SetUp() as Setup().\n  struct Setup_should_be_spelled_SetUp {};\n  virtual Setup_should_be_spelled_SetUp* Setup() { return nullptr; }\n};\n\n#if GTEST_HAS_EXCEPTIONS\n\n// Exception which can be thrown from TestEventListener::OnTestPartResult.\nclass GTEST_API_ AssertionException\n    : public internal::GoogleTestFailureException {\n public:\n  explicit AssertionException(const TestPartResult& result)\n      : GoogleTestFailureException(result) {}\n};\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// The interface for tracing execution of tests. The methods are organized in\n// the order the corresponding events are fired.\nclass TestEventListener {\n public:\n  virtual ~TestEventListener() {}\n\n  // Fired before any test activity starts.\n  virtual void OnTestProgramStart(const UnitTest& unit_test) = 0;\n\n  // Fired before each iteration of tests starts.  There may be more than\n  // one iteration if GTEST_FLAG(repeat) is set. iteration is the iteration\n  // index, starting from 0.\n  virtual void OnTestIterationStart(const UnitTest& unit_test,\n                                    int iteration) = 0;\n\n  // Fired before environment set-up for each iteration of tests starts.\n  virtual void OnEnvironmentsSetUpStart(const UnitTest& unit_test) = 0;\n\n  // Fired after environment set-up for each iteration of tests ends.\n  virtual void OnEnvironmentsSetUpEnd(const UnitTest& unit_test) = 0;\n\n  // Fired before the test suite starts.\n  virtual void OnTestSuiteStart(const TestSuite& /*test_suite*/) {}\n\n  //  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  virtual void OnTestCaseStart(const TestCase& /*test_case*/) {}\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Fired before the test starts.\n  virtual void OnTestStart(const TestInfo& test_info) = 0;\n\n  // Fired after a failed assertion or a SUCCEED() invocation.\n  // If you want to throw an exception from this function to skip to the next\n  // TEST, it must be AssertionException defined above, or inherited from it.\n  virtual void OnTestPartResult(const TestPartResult& test_part_result) = 0;\n\n  // Fired after the test ends.\n  virtual void OnTestEnd(const TestInfo& test_info) = 0;\n\n  // Fired after the test suite ends.\n  virtual void OnTestSuiteEnd(const TestSuite& /*test_suite*/) {}\n\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  virtual void OnTestCaseEnd(const TestCase& /*test_case*/) {}\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Fired before environment tear-down for each iteration of tests starts.\n  virtual void OnEnvironmentsTearDownStart(const UnitTest& unit_test) = 0;\n\n  // Fired after environment tear-down for each iteration of tests ends.\n  virtual void OnEnvironmentsTearDownEnd(const UnitTest& unit_test) = 0;\n\n  // Fired after each iteration of tests finishes.\n  virtual void OnTestIterationEnd(const UnitTest& unit_test,\n                                  int iteration) = 0;\n\n  // Fired after all test activities have ended.\n  virtual void OnTestProgramEnd(const UnitTest& unit_test) = 0;\n};\n\n// The convenience class for users who need to override just one or two\n// methods and are not concerned that a possible change to a signature of\n// the methods they override will not be caught during the build.  For\n// comments about each method please see the definition of TestEventListener\n// above.\nclass EmptyTestEventListener : public TestEventListener {\n public:\n  void OnTestProgramStart(const UnitTest& /*unit_test*/) override {}\n  void OnTestIterationStart(const UnitTest& /*unit_test*/,\n                            int /*iteration*/) override {}\n  void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override {}\n  void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {}\n  void OnTestSuiteStart(const TestSuite& /*test_suite*/) override {}\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseStart(const TestCase& /*test_case*/) override {}\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  void OnTestStart(const TestInfo& /*test_info*/) override {}\n  void OnTestPartResult(const TestPartResult& /*test_part_result*/) override {}\n  void OnTestEnd(const TestInfo& /*test_info*/) override {}\n  void OnTestSuiteEnd(const TestSuite& /*test_suite*/) override {}\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseEnd(const TestCase& /*test_case*/) override {}\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override {}\n  void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {}\n  void OnTestIterationEnd(const UnitTest& /*unit_test*/,\n                          int /*iteration*/) override {}\n  void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {}\n};\n\n// TestEventListeners lets users add listeners to track events in Google Test.\nclass GTEST_API_ TestEventListeners {\n public:\n  TestEventListeners();\n  ~TestEventListeners();\n\n  // Appends an event listener to the end of the list. Google Test assumes\n  // the ownership of the listener (i.e. it will delete the listener when\n  // the test program finishes).\n  void Append(TestEventListener* listener);\n\n  // Removes the given event listener from the list and returns it.  It then\n  // becomes the caller's responsibility to delete the listener. Returns\n  // NULL if the listener is not found in the list.\n  TestEventListener* Release(TestEventListener* listener);\n\n  // Returns the standard listener responsible for the default console\n  // output.  Can be removed from the listeners list to shut down default\n  // console output.  Note that removing this object from the listener list\n  // with Release transfers its ownership to the caller and makes this\n  // function return NULL the next time.\n  TestEventListener* default_result_printer() const {\n    return default_result_printer_;\n  }\n\n  // Returns the standard listener responsible for the default XML output\n  // controlled by the --gtest_output=xml flag.  Can be removed from the\n  // listeners list by users who want to shut down the default XML output\n  // controlled by this flag and substitute it with custom one.  Note that\n  // removing this object from the listener list with Release transfers its\n  // ownership to the caller and makes this function return NULL the next\n  // time.\n  TestEventListener* default_xml_generator() const {\n    return default_xml_generator_;\n  }\n\n private:\n  friend class TestSuite;\n  friend class TestInfo;\n  friend class internal::DefaultGlobalTestPartResultReporter;\n  friend class internal::NoExecDeathTest;\n  friend class internal::TestEventListenersAccessor;\n  friend class internal::UnitTestImpl;\n\n  // Returns repeater that broadcasts the TestEventListener events to all\n  // subscribers.\n  TestEventListener* repeater();\n\n  // Sets the default_result_printer attribute to the provided listener.\n  // The listener is also added to the listener list and previous\n  // default_result_printer is removed from it and deleted. The listener can\n  // also be NULL in which case it will not be added to the list. Does\n  // nothing if the previous and the current listener objects are the same.\n  void SetDefaultResultPrinter(TestEventListener* listener);\n\n  // Sets the default_xml_generator attribute to the provided listener.  The\n  // listener is also added to the listener list and previous\n  // default_xml_generator is removed from it and deleted. The listener can\n  // also be NULL in which case it will not be added to the list. Does\n  // nothing if the previous and the current listener objects are the same.\n  void SetDefaultXmlGenerator(TestEventListener* listener);\n\n  // Controls whether events will be forwarded by the repeater to the\n  // listeners in the list.\n  bool EventForwardingEnabled() const;\n  void SuppressEventForwarding();\n\n  // The actual list of listeners.\n  internal::TestEventRepeater* repeater_;\n  // Listener responsible for the standard result output.\n  TestEventListener* default_result_printer_;\n  // Listener responsible for the creation of the XML output file.\n  TestEventListener* default_xml_generator_;\n\n  // We disallow copying TestEventListeners.\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventListeners);\n};\n\n// A UnitTest consists of a vector of TestSuites.\n//\n// This is a singleton class.  The only instance of UnitTest is\n// created when UnitTest::GetInstance() is first called.  This\n// instance is never deleted.\n//\n// UnitTest is not copyable.\n//\n// This class is thread-safe as long as the methods are called\n// according to their specification.\nclass GTEST_API_ UnitTest {\n public:\n  // Gets the singleton UnitTest object.  The first time this method\n  // is called, a UnitTest object is constructed and returned.\n  // Consecutive calls will return the same object.\n  static UnitTest* GetInstance();\n\n  // Runs all tests in this UnitTest object and prints the result.\n  // Returns 0 if successful, or 1 otherwise.\n  //\n  // This method can only be called from the main thread.\n  //\n  // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n  int Run() GTEST_MUST_USE_RESULT_;\n\n  // Returns the working directory when the first TEST() or TEST_F()\n  // was executed.  The UnitTest object owns the string.\n  const char* original_working_dir() const;\n\n  // Returns the TestSuite object for the test that's currently running,\n  // or NULL if no test is running.\n  const TestSuite* current_test_suite() const GTEST_LOCK_EXCLUDED_(mutex_);\n\n// Legacy API is still available but deprecated\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  const TestCase* current_test_case() const GTEST_LOCK_EXCLUDED_(mutex_);\n#endif\n\n  // Returns the TestInfo object for the test that's currently running,\n  // or NULL if no test is running.\n  const TestInfo* current_test_info() const\n      GTEST_LOCK_EXCLUDED_(mutex_);\n\n  // Returns the random seed used at the start of the current test run.\n  int random_seed() const;\n\n  // Returns the ParameterizedTestSuiteRegistry object used to keep track of\n  // value-parameterized tests and instantiate and register them.\n  //\n  // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n  internal::ParameterizedTestSuiteRegistry& parameterized_test_registry()\n      GTEST_LOCK_EXCLUDED_(mutex_);\n\n  // Gets the number of successful test suites.\n  int successful_test_suite_count() const;\n\n  // Gets the number of failed test suites.\n  int failed_test_suite_count() const;\n\n  // Gets the number of all test suites.\n  int total_test_suite_count() const;\n\n  // Gets the number of all test suites that contain at least one test\n  // that should run.\n  int test_suite_to_run_count() const;\n\n  //  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  int successful_test_case_count() const;\n  int failed_test_case_count() const;\n  int total_test_case_count() const;\n  int test_case_to_run_count() const;\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Gets the number of successful tests.\n  int successful_test_count() const;\n\n  // Gets the number of skipped tests.\n  int skipped_test_count() const;\n\n  // Gets the number of failed tests.\n  int failed_test_count() const;\n\n  // Gets the number of disabled tests that will be reported in the XML report.\n  int reportable_disabled_test_count() const;\n\n  // Gets the number of disabled tests.\n  int disabled_test_count() const;\n\n  // Gets the number of tests to be printed in the XML report.\n  int reportable_test_count() const;\n\n  // Gets the number of all tests.\n  int total_test_count() const;\n\n  // Gets the number of tests that should run.\n  int test_to_run_count() const;\n\n  // Gets the time of the test program start, in ms from the start of the\n  // UNIX epoch.\n  TimeInMillis start_timestamp() const;\n\n  // Gets the elapsed time, in milliseconds.\n  TimeInMillis elapsed_time() const;\n\n  // Returns true if and only if the unit test passed (i.e. all test suites\n  // passed).\n  bool Passed() const;\n\n  // Returns true if and only if the unit test failed (i.e. some test suite\n  // failed or something outside of all tests failed).\n  bool Failed() const;\n\n  // Gets the i-th test suite among all the test suites. i can range from 0 to\n  // total_test_suite_count() - 1. If i is not in that range, returns NULL.\n  const TestSuite* GetTestSuite(int i) const;\n\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  const TestCase* GetTestCase(int i) const;\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Returns the TestResult containing information on test failures and\n  // properties logged outside of individual test suites.\n  const TestResult& ad_hoc_test_result() const;\n\n  // Returns the list of event listeners that can be used to track events\n  // inside Google Test.\n  TestEventListeners& listeners();\n\n private:\n  // Registers and returns a global test environment.  When a test\n  // program is run, all global test environments will be set-up in\n  // the order they were registered.  After all tests in the program\n  // have finished, all global test environments will be torn-down in\n  // the *reverse* order they were registered.\n  //\n  // The UnitTest object takes ownership of the given environment.\n  //\n  // This method can only be called from the main thread.\n  Environment* AddEnvironment(Environment* env);\n\n  // Adds a TestPartResult to the current TestResult object.  All\n  // Google Test assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc)\n  // eventually call this to report their results.  The user code\n  // should use the assertion macros instead of calling this directly.\n  void AddTestPartResult(TestPartResult::Type result_type,\n                         const char* file_name,\n                         int line_number,\n                         const std::string& message,\n                         const std::string& os_stack_trace)\n      GTEST_LOCK_EXCLUDED_(mutex_);\n\n  // Adds a TestProperty to the current TestResult object when invoked from\n  // inside a test, to current TestSuite's ad_hoc_test_result_ when invoked\n  // from SetUpTestSuite or TearDownTestSuite, or to the global property set\n  // when invoked elsewhere.  If the result already contains a property with\n  // the same key, the value will be updated.\n  void RecordProperty(const std::string& key, const std::string& value);\n\n  // Gets the i-th test suite among all the test suites. i can range from 0 to\n  // total_test_suite_count() - 1. If i is not in that range, returns NULL.\n  TestSuite* GetMutableTestSuite(int i);\n\n  // Accessors for the implementation object.\n  internal::UnitTestImpl* impl() { return impl_; }\n  const internal::UnitTestImpl* impl() const { return impl_; }\n\n  // These classes and functions are friends as they need to access private\n  // members of UnitTest.\n  friend class ScopedTrace;\n  friend class Test;\n  friend class internal::AssertHelper;\n  friend class internal::StreamingListenerTest;\n  friend class internal::UnitTestRecordPropertyTestHelper;\n  friend Environment* AddGlobalTestEnvironment(Environment* env);\n  friend internal::UnitTestImpl* internal::GetUnitTestImpl();\n  friend void internal::ReportFailureInUnknownLocation(\n      TestPartResult::Type result_type,\n      const std::string& message);\n\n  // Creates an empty UnitTest.\n  UnitTest();\n\n  // D'tor\n  virtual ~UnitTest();\n\n  // Pushes a trace defined by SCOPED_TRACE() on to the per-thread\n  // Google Test trace stack.\n  void PushGTestTrace(const internal::TraceInfo& trace)\n      GTEST_LOCK_EXCLUDED_(mutex_);\n\n  // Pops a trace from the per-thread Google Test trace stack.\n  void PopGTestTrace()\n      GTEST_LOCK_EXCLUDED_(mutex_);\n\n  // Protects mutable state in *impl_.  This is mutable as some const\n  // methods need to lock it too.\n  mutable internal::Mutex mutex_;\n\n  // Opaque implementation object.  This field is never changed once\n  // the object is constructed.  We don't mark it as const here, as\n  // doing so will cause a warning in the constructor of UnitTest.\n  // Mutable state in *impl_ is protected by mutex_.\n  internal::UnitTestImpl* impl_;\n\n  // We disallow copying UnitTest.\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(UnitTest);\n};\n\n// A convenient wrapper for adding an environment for the test\n// program.\n//\n// You should call this before RUN_ALL_TESTS() is called, probably in\n// main().  If you use gtest_main, you need to call this before main()\n// starts for it to take effect.  For example, you can define a global\n// variable like this:\n//\n//   testing::Environment* const foo_env =\n//       testing::AddGlobalTestEnvironment(new FooEnvironment);\n//\n// However, we strongly recommend you to write your own main() and\n// call AddGlobalTestEnvironment() there, as relying on initialization\n// of global variables makes the code harder to read and may cause\n// problems when you register multiple environments from different\n// translation units and the environments have dependencies among them\n// (remember that the compiler doesn't guarantee the order in which\n// global variables from different translation units are initialized).\ninline Environment* AddGlobalTestEnvironment(Environment* env) {\n  return UnitTest::GetInstance()->AddEnvironment(env);\n}\n\n// Initializes Google Test.  This must be called before calling\n// RUN_ALL_TESTS().  In particular, it parses a command line for the\n// flags that Google Test recognizes.  Whenever a Google Test flag is\n// seen, it is removed from argv, and *argc is decremented.\n//\n// No value is returned.  Instead, the Google Test flag variables are\n// updated.\n//\n// Calling the function for the second time has no user-visible effect.\nGTEST_API_ void InitGoogleTest(int* argc, char** argv);\n\n// This overloaded version can be used in Windows programs compiled in\n// UNICODE mode.\nGTEST_API_ void InitGoogleTest(int* argc, wchar_t** argv);\n\n// This overloaded version can be used on Arduino/embedded platforms where\n// there is no argc/argv.\nGTEST_API_ void InitGoogleTest();\n\nnamespace internal {\n\n// Separate the error generating code from the code path to reduce the stack\n// frame size of CmpHelperEQ. This helps reduce the overhead of some sanitizers\n// when calling EXPECT_* in a tight loop.\ntemplate <typename T1, typename T2>\nAssertionResult CmpHelperEQFailure(const char* lhs_expression,\n                                   const char* rhs_expression,\n                                   const T1& lhs, const T2& rhs) {\n  return EqFailure(lhs_expression,\n                   rhs_expression,\n                   FormatForComparisonFailureMessage(lhs, rhs),\n                   FormatForComparisonFailureMessage(rhs, lhs),\n                   false);\n}\n\n// This block of code defines operator==/!=\n// to block lexical scope lookup.\n// It prevents using invalid operator==/!= defined at namespace scope.\nstruct faketype {};\ninline bool operator==(faketype, faketype) { return true; }\ninline bool operator!=(faketype, faketype) { return false; }\n\n// The helper function for {ASSERT|EXPECT}_EQ.\ntemplate <typename T1, typename T2>\nAssertionResult CmpHelperEQ(const char* lhs_expression,\n                            const char* rhs_expression,\n                            const T1& lhs,\n                            const T2& rhs) {\n  if (lhs == rhs) {\n    return AssertionSuccess();\n  }\n\n  return CmpHelperEQFailure(lhs_expression, rhs_expression, lhs, rhs);\n}\n\n// With this overloaded version, we allow anonymous enums to be used\n// in {ASSERT|EXPECT}_EQ when compiled with gcc 4, as anonymous enums\n// can be implicitly cast to BiggestInt.\nGTEST_API_ AssertionResult CmpHelperEQ(const char* lhs_expression,\n                                       const char* rhs_expression,\n                                       BiggestInt lhs,\n                                       BiggestInt rhs);\n\nclass EqHelper {\n public:\n  // This templatized version is for the general case.\n  template <\n      typename T1, typename T2,\n      // Disable this overload for cases where one argument is a pointer\n      // and the other is the null pointer constant.\n      typename std::enable_if<!std::is_integral<T1>::value ||\n                              !std::is_pointer<T2>::value>::type* = nullptr>\n  static AssertionResult Compare(const char* lhs_expression,\n                                 const char* rhs_expression, const T1& lhs,\n                                 const T2& rhs) {\n    return CmpHelperEQ(lhs_expression, rhs_expression, lhs, rhs);\n  }\n\n  // With this overloaded version, we allow anonymous enums to be used\n  // in {ASSERT|EXPECT}_EQ when compiled with gcc 4, as anonymous\n  // enums can be implicitly cast to BiggestInt.\n  //\n  // Even though its body looks the same as the above version, we\n  // cannot merge the two, as it will make anonymous enums unhappy.\n  static AssertionResult Compare(const char* lhs_expression,\n                                 const char* rhs_expression,\n                                 BiggestInt lhs,\n                                 BiggestInt rhs) {\n    return CmpHelperEQ(lhs_expression, rhs_expression, lhs, rhs);\n  }\n\n  template <typename T>\n  static AssertionResult Compare(\n      const char* lhs_expression, const char* rhs_expression,\n      // Handle cases where '0' is used as a null pointer literal.\n      std::nullptr_t /* lhs */, T* rhs) {\n    // We already know that 'lhs' is a null pointer.\n    return CmpHelperEQ(lhs_expression, rhs_expression, static_cast<T*>(nullptr),\n                       rhs);\n  }\n};\n\n// Separate the error generating code from the code path to reduce the stack\n// frame size of CmpHelperOP. This helps reduce the overhead of some sanitizers\n// when calling EXPECT_OP in a tight loop.\ntemplate <typename T1, typename T2>\nAssertionResult CmpHelperOpFailure(const char* expr1, const char* expr2,\n                                   const T1& val1, const T2& val2,\n                                   const char* op) {\n  return AssertionFailure()\n         << \"Expected: (\" << expr1 << \") \" << op << \" (\" << expr2\n         << \"), actual: \" << FormatForComparisonFailureMessage(val1, val2)\n         << \" vs \" << FormatForComparisonFailureMessage(val2, val1);\n}\n\n// A macro for implementing the helper functions needed to implement\n// ASSERT_?? and EXPECT_??.  It is here just to avoid copy-and-paste\n// of similar code.\n//\n// For each templatized helper function, we also define an overloaded\n// version for BiggestInt in order to reduce code bloat and allow\n// anonymous enums to be used with {ASSERT|EXPECT}_?? when compiled\n// with gcc 4.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n\n#define GTEST_IMPL_CMP_HELPER_(op_name, op)\\\ntemplate <typename T1, typename T2>\\\nAssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \\\n                                   const T1& val1, const T2& val2) {\\\n  if (val1 op val2) {\\\n    return AssertionSuccess();\\\n  } else {\\\n    return CmpHelperOpFailure(expr1, expr2, val1, val2, #op);\\\n  }\\\n}\\\nGTEST_API_ AssertionResult CmpHelper##op_name(\\\n    const char* expr1, const char* expr2, BiggestInt val1, BiggestInt val2)\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\n\n// Implements the helper function for {ASSERT|EXPECT}_NE\nGTEST_IMPL_CMP_HELPER_(NE, !=);\n// Implements the helper function for {ASSERT|EXPECT}_LE\nGTEST_IMPL_CMP_HELPER_(LE, <=);\n// Implements the helper function for {ASSERT|EXPECT}_LT\nGTEST_IMPL_CMP_HELPER_(LT, <);\n// Implements the helper function for {ASSERT|EXPECT}_GE\nGTEST_IMPL_CMP_HELPER_(GE, >=);\n// Implements the helper function for {ASSERT|EXPECT}_GT\nGTEST_IMPL_CMP_HELPER_(GT, >);\n\n#undef GTEST_IMPL_CMP_HELPER_\n\n// The helper function for {ASSERT|EXPECT}_STREQ.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nGTEST_API_ AssertionResult CmpHelperSTREQ(const char* s1_expression,\n                                          const char* s2_expression,\n                                          const char* s1,\n                                          const char* s2);\n\n// The helper function for {ASSERT|EXPECT}_STRCASEEQ.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nGTEST_API_ AssertionResult CmpHelperSTRCASEEQ(const char* s1_expression,\n                                              const char* s2_expression,\n                                              const char* s1,\n                                              const char* s2);\n\n// The helper function for {ASSERT|EXPECT}_STRNE.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nGTEST_API_ AssertionResult CmpHelperSTRNE(const char* s1_expression,\n                                          const char* s2_expression,\n                                          const char* s1,\n                                          const char* s2);\n\n// The helper function for {ASSERT|EXPECT}_STRCASENE.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nGTEST_API_ AssertionResult CmpHelperSTRCASENE(const char* s1_expression,\n                                              const char* s2_expression,\n                                              const char* s1,\n                                              const char* s2);\n\n\n// Helper function for *_STREQ on wide strings.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nGTEST_API_ AssertionResult CmpHelperSTREQ(const char* s1_expression,\n                                          const char* s2_expression,\n                                          const wchar_t* s1,\n                                          const wchar_t* s2);\n\n// Helper function for *_STRNE on wide strings.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nGTEST_API_ AssertionResult CmpHelperSTRNE(const char* s1_expression,\n                                          const char* s2_expression,\n                                          const wchar_t* s1,\n                                          const wchar_t* s2);\n\n}  // namespace internal\n\n// IsSubstring() and IsNotSubstring() are intended to be used as the\n// first argument to {EXPECT,ASSERT}_PRED_FORMAT2(), not by\n// themselves.  They check whether needle is a substring of haystack\n// (NULL is considered a substring of itself only), and return an\n// appropriate error message when they fail.\n//\n// The {needle,haystack}_expr arguments are the stringified\n// expressions that generated the two real arguments.\nGTEST_API_ AssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const char* needle, const char* haystack);\nGTEST_API_ AssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const wchar_t* needle, const wchar_t* haystack);\nGTEST_API_ AssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const char* needle, const char* haystack);\nGTEST_API_ AssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const wchar_t* needle, const wchar_t* haystack);\nGTEST_API_ AssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::string& needle, const ::std::string& haystack);\nGTEST_API_ AssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::string& needle, const ::std::string& haystack);\n\n#if GTEST_HAS_STD_WSTRING\nGTEST_API_ AssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::wstring& needle, const ::std::wstring& haystack);\nGTEST_API_ AssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::wstring& needle, const ::std::wstring& haystack);\n#endif  // GTEST_HAS_STD_WSTRING\n\nnamespace internal {\n\n// Helper template function for comparing floating-points.\n//\n// Template parameter:\n//\n//   RawType: the raw floating-point type (either float or double)\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\ntemplate <typename RawType>\nAssertionResult CmpHelperFloatingPointEQ(const char* lhs_expression,\n                                         const char* rhs_expression,\n                                         RawType lhs_value,\n                                         RawType rhs_value) {\n  const FloatingPoint<RawType> lhs(lhs_value), rhs(rhs_value);\n\n  if (lhs.AlmostEquals(rhs)) {\n    return AssertionSuccess();\n  }\n\n  ::std::stringstream lhs_ss;\n  lhs_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)\n         << lhs_value;\n\n  ::std::stringstream rhs_ss;\n  rhs_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)\n         << rhs_value;\n\n  return EqFailure(lhs_expression,\n                   rhs_expression,\n                   StringStreamToString(&lhs_ss),\n                   StringStreamToString(&rhs_ss),\n                   false);\n}\n\n// Helper function for implementing ASSERT_NEAR.\n//\n// INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.\nGTEST_API_ AssertionResult DoubleNearPredFormat(const char* expr1,\n                                                const char* expr2,\n                                                const char* abs_error_expr,\n                                                double val1,\n                                                double val2,\n                                                double abs_error);\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n// A class that enables one to stream messages to assertion macros\nclass GTEST_API_ AssertHelper {\n public:\n  // Constructor.\n  AssertHelper(TestPartResult::Type type,\n               const char* file,\n               int line,\n               const char* message);\n  ~AssertHelper();\n\n  // Message assignment is a semantic trick to enable assertion\n  // streaming; see the GTEST_MESSAGE_ macro below.\n  void operator=(const Message& message) const;\n\n private:\n  // We put our data in a struct so that the size of the AssertHelper class can\n  // be as small as possible.  This is important because gcc is incapable of\n  // re-using stack space even for temporary variables, so every EXPECT_EQ\n  // reserves stack space for another AssertHelper.\n  struct AssertHelperData {\n    AssertHelperData(TestPartResult::Type t,\n                     const char* srcfile,\n                     int line_num,\n                     const char* msg)\n        : type(t), file(srcfile), line(line_num), message(msg) { }\n\n    TestPartResult::Type const type;\n    const char* const file;\n    int const line;\n    std::string const message;\n\n   private:\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(AssertHelperData);\n  };\n\n  AssertHelperData* const data_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(AssertHelper);\n};\n\nenum GTestColor { COLOR_DEFAULT, COLOR_RED, COLOR_GREEN, COLOR_YELLOW };\n\nGTEST_API_ GTEST_ATTRIBUTE_PRINTF_(2, 3) void ColoredPrintf(GTestColor color,\n                                                            const char* fmt,\n                                                            ...);\n\n}  // namespace internal\n\n// The pure interface class that all value-parameterized tests inherit from.\n// A value-parameterized class must inherit from both ::testing::Test and\n// ::testing::WithParamInterface. In most cases that just means inheriting\n// from ::testing::TestWithParam, but more complicated test hierarchies\n// may need to inherit from Test and WithParamInterface at different levels.\n//\n// This interface has support for accessing the test parameter value via\n// the GetParam() method.\n//\n// Use it with one of the parameter generator defining functions, like Range(),\n// Values(), ValuesIn(), Bool(), and Combine().\n//\n// class FooTest : public ::testing::TestWithParam<int> {\n//  protected:\n//   FooTest() {\n//     // Can use GetParam() here.\n//   }\n//   ~FooTest() override {\n//     // Can use GetParam() here.\n//   }\n//   void SetUp() override {\n//     // Can use GetParam() here.\n//   }\n//   void TearDown override {\n//     // Can use GetParam() here.\n//   }\n// };\n// TEST_P(FooTest, DoesBar) {\n//   // Can use GetParam() method here.\n//   Foo foo;\n//   ASSERT_TRUE(foo.DoesBar(GetParam()));\n// }\n// INSTANTIATE_TEST_SUITE_P(OneToTenRange, FooTest, ::testing::Range(1, 10));\n\ntemplate <typename T>\nclass WithParamInterface {\n public:\n  typedef T ParamType;\n  virtual ~WithParamInterface() {}\n\n  // The current parameter value. Is also available in the test fixture's\n  // constructor.\n  static const ParamType& GetParam() {\n    GTEST_CHECK_(parameter_ != nullptr)\n        << \"GetParam() can only be called inside a value-parameterized test \"\n        << \"-- did you intend to write TEST_P instead of TEST_F?\";\n    return *parameter_;\n  }\n\n private:\n  // Sets parameter value. The caller is responsible for making sure the value\n  // remains alive and unchanged throughout the current test.\n  static void SetParam(const ParamType* parameter) {\n    parameter_ = parameter;\n  }\n\n  // Static value used for accessing parameter during a test lifetime.\n  static const ParamType* parameter_;\n\n  // TestClass must be a subclass of WithParamInterface<T> and Test.\n  template <class TestClass> friend class internal::ParameterizedTestFactory;\n};\n\ntemplate <typename T>\nconst T* WithParamInterface<T>::parameter_ = nullptr;\n\n// Most value-parameterized classes can ignore the existence of\n// WithParamInterface, and can just inherit from ::testing::TestWithParam.\n\ntemplate <typename T>\nclass TestWithParam : public Test, public WithParamInterface<T> {\n};\n\n// Macros for indicating success/failure in test code.\n\n// Skips test in runtime.\n// Skipping test aborts current function.\n// Skipped tests are neither successful nor failed.\n#define GTEST_SKIP() GTEST_SKIP_(\"\")\n\n// ADD_FAILURE unconditionally adds a failure to the current test.\n// SUCCEED generates a success - it doesn't automatically make the\n// current test successful, as a test is only successful when it has\n// no failure.\n//\n// EXPECT_* verifies that a certain condition is satisfied.  If not,\n// it behaves like ADD_FAILURE.  In particular:\n//\n//   EXPECT_TRUE  verifies that a Boolean condition is true.\n//   EXPECT_FALSE verifies that a Boolean condition is false.\n//\n// FAIL and ASSERT_* are similar to ADD_FAILURE and EXPECT_*, except\n// that they will also abort the current function on failure.  People\n// usually want the fail-fast behavior of FAIL and ASSERT_*, but those\n// writing data-driven tests often find themselves using ADD_FAILURE\n// and EXPECT_* more.\n\n// Generates a nonfatal failure with a generic message.\n#define ADD_FAILURE() GTEST_NONFATAL_FAILURE_(\"Failed\")\n\n// Generates a nonfatal failure at the given source file location with\n// a generic message.\n#define ADD_FAILURE_AT(file, line) \\\n  GTEST_MESSAGE_AT_(file, line, \"Failed\", \\\n                    ::testing::TestPartResult::kNonFatalFailure)\n\n// Generates a fatal failure with a generic message.\n#define GTEST_FAIL() GTEST_FATAL_FAILURE_(\"Failed\")\n\n// Like GTEST_FAIL(), but at the given source file location.\n#define GTEST_FAIL_AT(file, line)         \\\n  GTEST_MESSAGE_AT_(file, line, \"Failed\", \\\n                    ::testing::TestPartResult::kFatalFailure)\n\n// Define this macro to 1 to omit the definition of FAIL(), which is a\n// generic name and clashes with some other libraries.\n#if !GTEST_DONT_DEFINE_FAIL\n# define FAIL() GTEST_FAIL()\n#endif\n\n// Generates a success with a generic message.\n#define GTEST_SUCCEED() GTEST_SUCCESS_(\"Succeeded\")\n\n// Define this macro to 1 to omit the definition of SUCCEED(), which\n// is a generic name and clashes with some other libraries.\n#if !GTEST_DONT_DEFINE_SUCCEED\n# define SUCCEED() GTEST_SUCCEED()\n#endif\n\n// Macros for testing exceptions.\n//\n//    * {ASSERT|EXPECT}_THROW(statement, expected_exception):\n//         Tests that the statement throws the expected exception.\n//    * {ASSERT|EXPECT}_NO_THROW(statement):\n//         Tests that the statement doesn't throw any exception.\n//    * {ASSERT|EXPECT}_ANY_THROW(statement):\n//         Tests that the statement throws an exception.\n\n#define EXPECT_THROW(statement, expected_exception) \\\n  GTEST_TEST_THROW_(statement, expected_exception, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_NO_THROW(statement) \\\n  GTEST_TEST_NO_THROW_(statement, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_ANY_THROW(statement) \\\n  GTEST_TEST_ANY_THROW_(statement, GTEST_NONFATAL_FAILURE_)\n#define ASSERT_THROW(statement, expected_exception) \\\n  GTEST_TEST_THROW_(statement, expected_exception, GTEST_FATAL_FAILURE_)\n#define ASSERT_NO_THROW(statement) \\\n  GTEST_TEST_NO_THROW_(statement, GTEST_FATAL_FAILURE_)\n#define ASSERT_ANY_THROW(statement) \\\n  GTEST_TEST_ANY_THROW_(statement, GTEST_FATAL_FAILURE_)\n\n// Boolean assertions. Condition can be either a Boolean expression or an\n// AssertionResult. For more information on how to use AssertionResult with\n// these macros see comments on that class.\n#define EXPECT_TRUE(condition) \\\n  GTEST_TEST_BOOLEAN_(condition, #condition, false, true, \\\n                      GTEST_NONFATAL_FAILURE_)\n#define EXPECT_FALSE(condition) \\\n  GTEST_TEST_BOOLEAN_(!(condition), #condition, true, false, \\\n                      GTEST_NONFATAL_FAILURE_)\n#define ASSERT_TRUE(condition) \\\n  GTEST_TEST_BOOLEAN_(condition, #condition, false, true, \\\n                      GTEST_FATAL_FAILURE_)\n#define ASSERT_FALSE(condition) \\\n  GTEST_TEST_BOOLEAN_(!(condition), #condition, true, false, \\\n                      GTEST_FATAL_FAILURE_)\n\n// Macros for testing equalities and inequalities.\n//\n//    * {ASSERT|EXPECT}_EQ(v1, v2): Tests that v1 == v2\n//    * {ASSERT|EXPECT}_NE(v1, v2): Tests that v1 != v2\n//    * {ASSERT|EXPECT}_LT(v1, v2): Tests that v1 < v2\n//    * {ASSERT|EXPECT}_LE(v1, v2): Tests that v1 <= v2\n//    * {ASSERT|EXPECT}_GT(v1, v2): Tests that v1 > v2\n//    * {ASSERT|EXPECT}_GE(v1, v2): Tests that v1 >= v2\n//\n// When they are not, Google Test prints both the tested expressions and\n// their actual values.  The values must be compatible built-in types,\n// or you will get a compiler error.  By \"compatible\" we mean that the\n// values can be compared by the respective operator.\n//\n// Note:\n//\n//   1. It is possible to make a user-defined type work with\n//   {ASSERT|EXPECT}_??(), but that requires overloading the\n//   comparison operators and is thus discouraged by the Google C++\n//   Usage Guide.  Therefore, you are advised to use the\n//   {ASSERT|EXPECT}_TRUE() macro to assert that two objects are\n//   equal.\n//\n//   2. The {ASSERT|EXPECT}_??() macros do pointer comparisons on\n//   pointers (in particular, C strings).  Therefore, if you use it\n//   with two C strings, you are testing how their locations in memory\n//   are related, not how their content is related.  To compare two C\n//   strings by content, use {ASSERT|EXPECT}_STR*().\n//\n//   3. {ASSERT|EXPECT}_EQ(v1, v2) is preferred to\n//   {ASSERT|EXPECT}_TRUE(v1 == v2), as the former tells you\n//   what the actual value is when it fails, and similarly for the\n//   other comparisons.\n//\n//   4. Do not depend on the order in which {ASSERT|EXPECT}_??()\n//   evaluate their arguments, which is undefined.\n//\n//   5. These macros evaluate their arguments exactly once.\n//\n// Examples:\n//\n//   EXPECT_NE(Foo(), 5);\n//   EXPECT_EQ(a_pointer, NULL);\n//   ASSERT_LT(i, array_size);\n//   ASSERT_GT(records.size(), 0) << \"There is no record left.\";\n\n#define EXPECT_EQ(val1, val2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::EqHelper::Compare, val1, val2)\n#define EXPECT_NE(val1, val2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperNE, val1, val2)\n#define EXPECT_LE(val1, val2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperLE, val1, val2)\n#define EXPECT_LT(val1, val2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperLT, val1, val2)\n#define EXPECT_GE(val1, val2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperGE, val1, val2)\n#define EXPECT_GT(val1, val2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperGT, val1, val2)\n\n#define GTEST_ASSERT_EQ(val1, val2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::EqHelper::Compare, val1, val2)\n#define GTEST_ASSERT_NE(val1, val2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperNE, val1, val2)\n#define GTEST_ASSERT_LE(val1, val2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperLE, val1, val2)\n#define GTEST_ASSERT_LT(val1, val2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperLT, val1, val2)\n#define GTEST_ASSERT_GE(val1, val2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperGE, val1, val2)\n#define GTEST_ASSERT_GT(val1, val2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperGT, val1, val2)\n\n// Define macro GTEST_DONT_DEFINE_ASSERT_XY to 1 to omit the definition of\n// ASSERT_XY(), which clashes with some users' own code.\n\n#if !GTEST_DONT_DEFINE_ASSERT_EQ\n# define ASSERT_EQ(val1, val2) GTEST_ASSERT_EQ(val1, val2)\n#endif\n\n#if !GTEST_DONT_DEFINE_ASSERT_NE\n# define ASSERT_NE(val1, val2) GTEST_ASSERT_NE(val1, val2)\n#endif\n\n#if !GTEST_DONT_DEFINE_ASSERT_LE\n# define ASSERT_LE(val1, val2) GTEST_ASSERT_LE(val1, val2)\n#endif\n\n#if !GTEST_DONT_DEFINE_ASSERT_LT\n# define ASSERT_LT(val1, val2) GTEST_ASSERT_LT(val1, val2)\n#endif\n\n#if !GTEST_DONT_DEFINE_ASSERT_GE\n# define ASSERT_GE(val1, val2) GTEST_ASSERT_GE(val1, val2)\n#endif\n\n#if !GTEST_DONT_DEFINE_ASSERT_GT\n# define ASSERT_GT(val1, val2) GTEST_ASSERT_GT(val1, val2)\n#endif\n\n// C-string Comparisons.  All tests treat NULL and any non-NULL string\n// as different.  Two NULLs are equal.\n//\n//    * {ASSERT|EXPECT}_STREQ(s1, s2):     Tests that s1 == s2\n//    * {ASSERT|EXPECT}_STRNE(s1, s2):     Tests that s1 != s2\n//    * {ASSERT|EXPECT}_STRCASEEQ(s1, s2): Tests that s1 == s2, ignoring case\n//    * {ASSERT|EXPECT}_STRCASENE(s1, s2): Tests that s1 != s2, ignoring case\n//\n// For wide or narrow string objects, you can use the\n// {ASSERT|EXPECT}_??() macros.\n//\n// Don't depend on the order in which the arguments are evaluated,\n// which is undefined.\n//\n// These macros evaluate their arguments exactly once.\n\n#define EXPECT_STREQ(s1, s2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTREQ, s1, s2)\n#define EXPECT_STRNE(s1, s2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRNE, s1, s2)\n#define EXPECT_STRCASEEQ(s1, s2) \\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASEEQ, s1, s2)\n#define EXPECT_STRCASENE(s1, s2)\\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASENE, s1, s2)\n\n#define ASSERT_STREQ(s1, s2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTREQ, s1, s2)\n#define ASSERT_STRNE(s1, s2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRNE, s1, s2)\n#define ASSERT_STRCASEEQ(s1, s2) \\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASEEQ, s1, s2)\n#define ASSERT_STRCASENE(s1, s2)\\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperSTRCASENE, s1, s2)\n\n// Macros for comparing floating-point numbers.\n//\n//    * {ASSERT|EXPECT}_FLOAT_EQ(val1, val2):\n//         Tests that two float values are almost equal.\n//    * {ASSERT|EXPECT}_DOUBLE_EQ(val1, val2):\n//         Tests that two double values are almost equal.\n//    * {ASSERT|EXPECT}_NEAR(v1, v2, abs_error):\n//         Tests that v1 and v2 are within the given distance to each other.\n//\n// Google Test uses ULP-based comparison to automatically pick a default\n// error bound that is appropriate for the operands.  See the\n// FloatingPoint template class in gtest-internal.h if you are\n// interested in the implementation details.\n\n#define EXPECT_FLOAT_EQ(val1, val2)\\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<float>, \\\n                      val1, val2)\n\n#define EXPECT_DOUBLE_EQ(val1, val2)\\\n  EXPECT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<double>, \\\n                      val1, val2)\n\n#define ASSERT_FLOAT_EQ(val1, val2)\\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<float>, \\\n                      val1, val2)\n\n#define ASSERT_DOUBLE_EQ(val1, val2)\\\n  ASSERT_PRED_FORMAT2(::testing::internal::CmpHelperFloatingPointEQ<double>, \\\n                      val1, val2)\n\n#define EXPECT_NEAR(val1, val2, abs_error)\\\n  EXPECT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, \\\n                      val1, val2, abs_error)\n\n#define ASSERT_NEAR(val1, val2, abs_error)\\\n  ASSERT_PRED_FORMAT3(::testing::internal::DoubleNearPredFormat, \\\n                      val1, val2, abs_error)\n\n// These predicate format functions work on floating-point values, and\n// can be used in {ASSERT|EXPECT}_PRED_FORMAT2*(), e.g.\n//\n//   EXPECT_PRED_FORMAT2(testing::DoubleLE, Foo(), 5.0);\n\n// Asserts that val1 is less than, or almost equal to, val2.  Fails\n// otherwise.  In particular, it fails if either val1 or val2 is NaN.\nGTEST_API_ AssertionResult FloatLE(const char* expr1, const char* expr2,\n                                   float val1, float val2);\nGTEST_API_ AssertionResult DoubleLE(const char* expr1, const char* expr2,\n                                    double val1, double val2);\n\n\n#if GTEST_OS_WINDOWS\n\n// Macros that test for HRESULT failure and success, these are only useful\n// on Windows, and rely on Windows SDK macros and APIs to compile.\n//\n//    * {ASSERT|EXPECT}_HRESULT_{SUCCEEDED|FAILED}(expr)\n//\n// When expr unexpectedly fails or succeeds, Google Test prints the\n// expected result and the actual result with both a human-readable\n// string representation of the error, if available, as well as the\n// hex result code.\n# define EXPECT_HRESULT_SUCCEEDED(expr) \\\n    EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr))\n\n# define ASSERT_HRESULT_SUCCEEDED(expr) \\\n    ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTSuccess, (expr))\n\n# define EXPECT_HRESULT_FAILED(expr) \\\n    EXPECT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr))\n\n# define ASSERT_HRESULT_FAILED(expr) \\\n    ASSERT_PRED_FORMAT1(::testing::internal::IsHRESULTFailure, (expr))\n\n#endif  // GTEST_OS_WINDOWS\n\n// Macros that execute statement and check that it doesn't generate new fatal\n// failures in the current thread.\n//\n//   * {ASSERT|EXPECT}_NO_FATAL_FAILURE(statement);\n//\n// Examples:\n//\n//   EXPECT_NO_FATAL_FAILURE(Process());\n//   ASSERT_NO_FATAL_FAILURE(Process()) << \"Process() failed\";\n//\n#define ASSERT_NO_FATAL_FAILURE(statement) \\\n    GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_FATAL_FAILURE_)\n#define EXPECT_NO_FATAL_FAILURE(statement) \\\n    GTEST_TEST_NO_FATAL_FAILURE_(statement, GTEST_NONFATAL_FAILURE_)\n\n// Causes a trace (including the given source file path and line number,\n// and the given message) to be included in every test failure message generated\n// by code in the scope of the lifetime of an instance of this class. The effect\n// is undone with the destruction of the instance.\n//\n// The message argument can be anything streamable to std::ostream.\n//\n// Example:\n//   testing::ScopedTrace trace(\"file.cc\", 123, \"message\");\n//\nclass GTEST_API_ ScopedTrace {\n public:\n  // The c'tor pushes the given source file location and message onto\n  // a trace stack maintained by Google Test.\n\n  // Template version. Uses Message() to convert the values into strings.\n  // Slow, but flexible.\n  template <typename T>\n  ScopedTrace(const char* file, int line, const T& message) {\n    PushTrace(file, line, (Message() << message).GetString());\n  }\n\n  // Optimize for some known types.\n  ScopedTrace(const char* file, int line, const char* message) {\n    PushTrace(file, line, message ? message : \"(null)\");\n  }\n\n  ScopedTrace(const char* file, int line, const std::string& message) {\n    PushTrace(file, line, message);\n  }\n\n  // The d'tor pops the info pushed by the c'tor.\n  //\n  // Note that the d'tor is not virtual in order to be efficient.\n  // Don't inherit from ScopedTrace!\n  ~ScopedTrace();\n\n private:\n  void PushTrace(const char* file, int line, std::string message);\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedTrace);\n} GTEST_ATTRIBUTE_UNUSED_;  // A ScopedTrace object does its job in its\n                            // c'tor and d'tor.  Therefore it doesn't\n                            // need to be used otherwise.\n\n// Causes a trace (including the source file path, the current line\n// number, and the given message) to be included in every test failure\n// message generated by code in the current scope.  The effect is\n// undone when the control leaves the current scope.\n//\n// The message argument can be anything streamable to std::ostream.\n//\n// In the implementation, we include the current line number as part\n// of the dummy variable name, thus allowing multiple SCOPED_TRACE()s\n// to appear in the same block - as long as they are on different\n// lines.\n//\n// Assuming that each thread maintains its own stack of traces.\n// Therefore, a SCOPED_TRACE() would (correctly) only affect the\n// assertions in its own thread.\n#define SCOPED_TRACE(message) \\\n  ::testing::ScopedTrace GTEST_CONCAT_TOKEN_(gtest_trace_, __LINE__)(\\\n    __FILE__, __LINE__, (message))\n\n// Compile-time assertion for type equality.\n// StaticAssertTypeEq<type1, type2>() compiles if and only if type1 and type2\n// are the same type.  The value it returns is not interesting.\n//\n// Instead of making StaticAssertTypeEq a class template, we make it a\n// function template that invokes a helper class template.  This\n// prevents a user from misusing StaticAssertTypeEq<T1, T2> by\n// defining objects of that type.\n//\n// CAVEAT:\n//\n// When used inside a method of a class template,\n// StaticAssertTypeEq<T1, T2>() is effective ONLY IF the method is\n// instantiated.  For example, given:\n//\n//   template <typename T> class Foo {\n//    public:\n//     void Bar() { testing::StaticAssertTypeEq<int, T>(); }\n//   };\n//\n// the code:\n//\n//   void Test1() { Foo<bool> foo; }\n//\n// will NOT generate a compiler error, as Foo<bool>::Bar() is never\n// actually instantiated.  Instead, you need:\n//\n//   void Test2() { Foo<bool> foo; foo.Bar(); }\n//\n// to cause a compiler error.\ntemplate <typename T1, typename T2>\nconstexpr bool StaticAssertTypeEq() noexcept {\n  static_assert(std::is_same<T1, T2>::value, \"T1 and T2 are not the same type\");\n  return true;\n}\n\n// Defines a test.\n//\n// The first parameter is the name of the test suite, and the second\n// parameter is the name of the test within the test suite.\n//\n// The convention is to end the test suite name with \"Test\".  For\n// example, a test suite for the Foo class can be named FooTest.\n//\n// Test code should appear between braces after an invocation of\n// this macro.  Example:\n//\n//   TEST(FooTest, InitializesCorrectly) {\n//     Foo foo;\n//     EXPECT_TRUE(foo.StatusIsOK());\n//   }\n\n// Note that we call GetTestTypeId() instead of GetTypeId<\n// ::testing::Test>() here to get the type ID of testing::Test.  This\n// is to work around a suspected linker bug when using Google Test as\n// a framework on Mac OS X.  The bug causes GetTypeId<\n// ::testing::Test>() to return different values depending on whether\n// the call is from the Google Test framework itself or from user test\n// code.  GetTestTypeId() is guaranteed to always return the same\n// value, as it always calls GetTypeId<>() from the Google Test\n// framework.\n#define GTEST_TEST(test_suite_name, test_name)             \\\n  GTEST_TEST_(test_suite_name, test_name, ::testing::Test, \\\n              ::testing::internal::GetTestTypeId())\n\n// Define this macro to 1 to omit the definition of TEST(), which\n// is a generic name and clashes with some other libraries.\n#if !GTEST_DONT_DEFINE_TEST\n#define TEST(test_suite_name, test_name) GTEST_TEST(test_suite_name, test_name)\n#endif\n\n// Defines a test that uses a test fixture.\n//\n// The first parameter is the name of the test fixture class, which\n// also doubles as the test suite name.  The second parameter is the\n// name of the test within the test suite.\n//\n// A test fixture class must be declared earlier.  The user should put\n// the test code between braces after using this macro.  Example:\n//\n//   class FooTest : public testing::Test {\n//    protected:\n//     void SetUp() override { b_.AddElement(3); }\n//\n//     Foo a_;\n//     Foo b_;\n//   };\n//\n//   TEST_F(FooTest, InitializesCorrectly) {\n//     EXPECT_TRUE(a_.StatusIsOK());\n//   }\n//\n//   TEST_F(FooTest, ReturnsElementCountCorrectly) {\n//     EXPECT_EQ(a_.size(), 0);\n//     EXPECT_EQ(b_.size(), 1);\n//   }\n//\n// GOOGLETEST_CM0011 DO NOT DELETE\n#define TEST_F(test_fixture, test_name)\\\n  GTEST_TEST_(test_fixture, test_name, test_fixture, \\\n              ::testing::internal::GetTypeId<test_fixture>())\n\n// Returns a path to temporary directory.\n// Tries to determine an appropriate directory for the platform.\nGTEST_API_ std::string TempDir();\n\n#ifdef _MSC_VER\n#  pragma warning(pop)\n#endif\n\n// Dynamically registers a test with the framework.\n//\n// This is an advanced API only to be used when the `TEST` macros are\n// insufficient. The macros should be preferred when possible, as they avoid\n// most of the complexity of calling this function.\n//\n// The `factory` argument is a factory callable (move-constructible) object or\n// function pointer that creates a new instance of the Test object. It\n// handles ownership to the caller. The signature of the callable is\n// `Fixture*()`, where `Fixture` is the test fixture class for the test. All\n// tests registered with the same `test_suite_name` must return the same\n// fixture type. This is checked at runtime.\n//\n// The framework will infer the fixture class from the factory and will call\n// the `SetUpTestSuite` and `TearDownTestSuite` for it.\n//\n// Must be called before `RUN_ALL_TESTS()` is invoked, otherwise behavior is\n// undefined.\n//\n// Use case example:\n//\n// class MyFixture : public ::testing::Test {\n//  public:\n//   // All of these optional, just like in regular macro usage.\n//   static void SetUpTestSuite() { ... }\n//   static void TearDownTestSuite() { ... }\n//   void SetUp() override { ... }\n//   void TearDown() override { ... }\n// };\n//\n// class MyTest : public MyFixture {\n//  public:\n//   explicit MyTest(int data) : data_(data) {}\n//   void TestBody() override { ... }\n//\n//  private:\n//   int data_;\n// };\n//\n// void RegisterMyTests(const std::vector<int>& values) {\n//   for (int v : values) {\n//     ::testing::RegisterTest(\n//         \"MyFixture\", (\"Test\" + std::to_string(v)).c_str(), nullptr,\n//         std::to_string(v).c_str(),\n//         __FILE__, __LINE__,\n//         // Important to use the fixture type as the return type here.\n//         [=]() -> MyFixture* { return new MyTest(v); });\n//   }\n// }\n// ...\n// int main(int argc, char** argv) {\n//   std::vector<int> values_to_test = LoadValuesFromConfig();\n//   RegisterMyTests(values_to_test);\n//   ...\n//   return RUN_ALL_TESTS();\n// }\n//\ntemplate <int&... ExplicitParameterBarrier, typename Factory>\nTestInfo* RegisterTest(const char* test_suite_name, const char* test_name,\n                       const char* type_param, const char* value_param,\n                       const char* file, int line, Factory factory) {\n  using TestT = typename std::remove_pointer<decltype(factory())>::type;\n\n  class FactoryImpl : public internal::TestFactoryBase {\n   public:\n    explicit FactoryImpl(Factory f) : factory_(std::move(f)) {}\n    Test* CreateTest() override { return factory_(); }\n\n   private:\n    Factory factory_;\n  };\n\n  return internal::MakeAndRegisterTestInfo(\n      test_suite_name, test_name, type_param, value_param,\n      internal::CodeLocation(file, line), internal::GetTypeId<TestT>(),\n      internal::SuiteApiResolver<TestT>::GetSetUpCaseOrSuite(file, line),\n      internal::SuiteApiResolver<TestT>::GetTearDownCaseOrSuite(file, line),\n      new FactoryImpl{std::move(factory)});\n}\n\n}  // namespace testing\n\n// Use this function in main() to run all tests.  It returns 0 if all\n// tests are successful, or 1 otherwise.\n//\n// RUN_ALL_TESTS() should be invoked after the command line has been\n// parsed by InitGoogleTest().\n//\n// This function was formerly a macro; thus, it is in the global\n// namespace and has an all-caps name.\nint RUN_ALL_TESTS() GTEST_MUST_USE_RESULT_;\n\ninline int RUN_ALL_TESTS() {\n  return ::testing::UnitTest::GetInstance()->Run();\n}\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest_pred_impl.h",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// This file is AUTOMATICALLY GENERATED on 01/02/2019 by command\n// 'gen_gtest_pred_impl.py 5'.  DO NOT EDIT BY HAND!\n//\n// Implements a family of generic predicate assertion macros.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_\n#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_\n\n#include \"gtest/gtest.h\"\n\nnamespace testing {\n\n// This header implements a family of generic predicate assertion\n// macros:\n//\n//   ASSERT_PRED_FORMAT1(pred_format, v1)\n//   ASSERT_PRED_FORMAT2(pred_format, v1, v2)\n//   ...\n//\n// where pred_format is a function or functor that takes n (in the\n// case of ASSERT_PRED_FORMATn) values and their source expression\n// text, and returns a testing::AssertionResult.  See the definition\n// of ASSERT_EQ in gtest.h for an example.\n//\n// If you don't care about formatting, you can use the more\n// restrictive version:\n//\n//   ASSERT_PRED1(pred, v1)\n//   ASSERT_PRED2(pred, v1, v2)\n//   ...\n//\n// where pred is an n-ary function or functor that returns bool,\n// and the values v1, v2, ..., must support the << operator for\n// streaming to std::ostream.\n//\n// We also define the EXPECT_* variations.\n//\n// For now we only support predicates whose arity is at most 5.\n// Please email googletestframework@googlegroups.com if you need\n// support for higher arities.\n\n// GTEST_ASSERT_ is the basic statement to which all of the assertions\n// in this file reduce.  Don't use this in your code.\n\n#define GTEST_ASSERT_(expression, on_failure) \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n  if (const ::testing::AssertionResult gtest_ar = (expression)) \\\n    ; \\\n  else \\\n    on_failure(gtest_ar.failure_message())\n\n\n// Helper function for implementing {EXPECT|ASSERT}_PRED1.  Don't use\n// this in your code.\ntemplate <typename Pred,\n          typename T1>\nAssertionResult AssertPred1Helper(const char* pred_text,\n                                  const char* e1,\n                                  Pred pred,\n                                  const T1& v1) {\n  if (pred(v1)) return AssertionSuccess();\n\n  return AssertionFailure()\n         << pred_text << \"(\" << e1 << \") evaluates to false, where\"\n         << \"\\n\"\n         << e1 << \" evaluates to \" << ::testing::PrintToString(v1);\n}\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT1.\n// Don't use this in your code.\n#define GTEST_PRED_FORMAT1_(pred_format, v1, on_failure)\\\n  GTEST_ASSERT_(pred_format(#v1, v1), \\\n                on_failure)\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED1.  Don't use\n// this in your code.\n#define GTEST_PRED1_(pred, v1, on_failure)\\\n  GTEST_ASSERT_(::testing::AssertPred1Helper(#pred, \\\n                                             #v1, \\\n                                             pred, \\\n                                             v1), on_failure)\n\n// Unary predicate assertion macros.\n#define EXPECT_PRED_FORMAT1(pred_format, v1) \\\n  GTEST_PRED_FORMAT1_(pred_format, v1, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_PRED1(pred, v1) \\\n  GTEST_PRED1_(pred, v1, GTEST_NONFATAL_FAILURE_)\n#define ASSERT_PRED_FORMAT1(pred_format, v1) \\\n  GTEST_PRED_FORMAT1_(pred_format, v1, GTEST_FATAL_FAILURE_)\n#define ASSERT_PRED1(pred, v1) \\\n  GTEST_PRED1_(pred, v1, GTEST_FATAL_FAILURE_)\n\n\n\n// Helper function for implementing {EXPECT|ASSERT}_PRED2.  Don't use\n// this in your code.\ntemplate <typename Pred,\n          typename T1,\n          typename T2>\nAssertionResult AssertPred2Helper(const char* pred_text,\n                                  const char* e1,\n                                  const char* e2,\n                                  Pred pred,\n                                  const T1& v1,\n                                  const T2& v2) {\n  if (pred(v1, v2)) return AssertionSuccess();\n\n  return AssertionFailure()\n         << pred_text << \"(\" << e1 << \", \" << e2\n         << \") evaluates to false, where\"\n         << \"\\n\"\n         << e1 << \" evaluates to \" << ::testing::PrintToString(v1) << \"\\n\"\n         << e2 << \" evaluates to \" << ::testing::PrintToString(v2);\n}\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT2.\n// Don't use this in your code.\n#define GTEST_PRED_FORMAT2_(pred_format, v1, v2, on_failure)\\\n  GTEST_ASSERT_(pred_format(#v1, #v2, v1, v2), \\\n                on_failure)\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED2.  Don't use\n// this in your code.\n#define GTEST_PRED2_(pred, v1, v2, on_failure)\\\n  GTEST_ASSERT_(::testing::AssertPred2Helper(#pred, \\\n                                             #v1, \\\n                                             #v2, \\\n                                             pred, \\\n                                             v1, \\\n                                             v2), on_failure)\n\n// Binary predicate assertion macros.\n#define EXPECT_PRED_FORMAT2(pred_format, v1, v2) \\\n  GTEST_PRED_FORMAT2_(pred_format, v1, v2, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_PRED2(pred, v1, v2) \\\n  GTEST_PRED2_(pred, v1, v2, GTEST_NONFATAL_FAILURE_)\n#define ASSERT_PRED_FORMAT2(pred_format, v1, v2) \\\n  GTEST_PRED_FORMAT2_(pred_format, v1, v2, GTEST_FATAL_FAILURE_)\n#define ASSERT_PRED2(pred, v1, v2) \\\n  GTEST_PRED2_(pred, v1, v2, GTEST_FATAL_FAILURE_)\n\n\n\n// Helper function for implementing {EXPECT|ASSERT}_PRED3.  Don't use\n// this in your code.\ntemplate <typename Pred,\n          typename T1,\n          typename T2,\n          typename T3>\nAssertionResult AssertPred3Helper(const char* pred_text,\n                                  const char* e1,\n                                  const char* e2,\n                                  const char* e3,\n                                  Pred pred,\n                                  const T1& v1,\n                                  const T2& v2,\n                                  const T3& v3) {\n  if (pred(v1, v2, v3)) return AssertionSuccess();\n\n  return AssertionFailure()\n         << pred_text << \"(\" << e1 << \", \" << e2 << \", \" << e3\n         << \") evaluates to false, where\"\n         << \"\\n\"\n         << e1 << \" evaluates to \" << ::testing::PrintToString(v1) << \"\\n\"\n         << e2 << \" evaluates to \" << ::testing::PrintToString(v2) << \"\\n\"\n         << e3 << \" evaluates to \" << ::testing::PrintToString(v3);\n}\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT3.\n// Don't use this in your code.\n#define GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, on_failure)\\\n  GTEST_ASSERT_(pred_format(#v1, #v2, #v3, v1, v2, v3), \\\n                on_failure)\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED3.  Don't use\n// this in your code.\n#define GTEST_PRED3_(pred, v1, v2, v3, on_failure)\\\n  GTEST_ASSERT_(::testing::AssertPred3Helper(#pred, \\\n                                             #v1, \\\n                                             #v2, \\\n                                             #v3, \\\n                                             pred, \\\n                                             v1, \\\n                                             v2, \\\n                                             v3), on_failure)\n\n// Ternary predicate assertion macros.\n#define EXPECT_PRED_FORMAT3(pred_format, v1, v2, v3) \\\n  GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_PRED3(pred, v1, v2, v3) \\\n  GTEST_PRED3_(pred, v1, v2, v3, GTEST_NONFATAL_FAILURE_)\n#define ASSERT_PRED_FORMAT3(pred_format, v1, v2, v3) \\\n  GTEST_PRED_FORMAT3_(pred_format, v1, v2, v3, GTEST_FATAL_FAILURE_)\n#define ASSERT_PRED3(pred, v1, v2, v3) \\\n  GTEST_PRED3_(pred, v1, v2, v3, GTEST_FATAL_FAILURE_)\n\n\n\n// Helper function for implementing {EXPECT|ASSERT}_PRED4.  Don't use\n// this in your code.\ntemplate <typename Pred,\n          typename T1,\n          typename T2,\n          typename T3,\n          typename T4>\nAssertionResult AssertPred4Helper(const char* pred_text,\n                                  const char* e1,\n                                  const char* e2,\n                                  const char* e3,\n                                  const char* e4,\n                                  Pred pred,\n                                  const T1& v1,\n                                  const T2& v2,\n                                  const T3& v3,\n                                  const T4& v4) {\n  if (pred(v1, v2, v3, v4)) return AssertionSuccess();\n\n  return AssertionFailure()\n         << pred_text << \"(\" << e1 << \", \" << e2 << \", \" << e3 << \", \" << e4\n         << \") evaluates to false, where\"\n         << \"\\n\"\n         << e1 << \" evaluates to \" << ::testing::PrintToString(v1) << \"\\n\"\n         << e2 << \" evaluates to \" << ::testing::PrintToString(v2) << \"\\n\"\n         << e3 << \" evaluates to \" << ::testing::PrintToString(v3) << \"\\n\"\n         << e4 << \" evaluates to \" << ::testing::PrintToString(v4);\n}\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT4.\n// Don't use this in your code.\n#define GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, on_failure)\\\n  GTEST_ASSERT_(pred_format(#v1, #v2, #v3, #v4, v1, v2, v3, v4), \\\n                on_failure)\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED4.  Don't use\n// this in your code.\n#define GTEST_PRED4_(pred, v1, v2, v3, v4, on_failure)\\\n  GTEST_ASSERT_(::testing::AssertPred4Helper(#pred, \\\n                                             #v1, \\\n                                             #v2, \\\n                                             #v3, \\\n                                             #v4, \\\n                                             pred, \\\n                                             v1, \\\n                                             v2, \\\n                                             v3, \\\n                                             v4), on_failure)\n\n// 4-ary predicate assertion macros.\n#define EXPECT_PRED_FORMAT4(pred_format, v1, v2, v3, v4) \\\n  GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_PRED4(pred, v1, v2, v3, v4) \\\n  GTEST_PRED4_(pred, v1, v2, v3, v4, GTEST_NONFATAL_FAILURE_)\n#define ASSERT_PRED_FORMAT4(pred_format, v1, v2, v3, v4) \\\n  GTEST_PRED_FORMAT4_(pred_format, v1, v2, v3, v4, GTEST_FATAL_FAILURE_)\n#define ASSERT_PRED4(pred, v1, v2, v3, v4) \\\n  GTEST_PRED4_(pred, v1, v2, v3, v4, GTEST_FATAL_FAILURE_)\n\n\n\n// Helper function for implementing {EXPECT|ASSERT}_PRED5.  Don't use\n// this in your code.\ntemplate <typename Pred,\n          typename T1,\n          typename T2,\n          typename T3,\n          typename T4,\n          typename T5>\nAssertionResult AssertPred5Helper(const char* pred_text,\n                                  const char* e1,\n                                  const char* e2,\n                                  const char* e3,\n                                  const char* e4,\n                                  const char* e5,\n                                  Pred pred,\n                                  const T1& v1,\n                                  const T2& v2,\n                                  const T3& v3,\n                                  const T4& v4,\n                                  const T5& v5) {\n  if (pred(v1, v2, v3, v4, v5)) return AssertionSuccess();\n\n  return AssertionFailure()\n         << pred_text << \"(\" << e1 << \", \" << e2 << \", \" << e3 << \", \" << e4\n         << \", \" << e5 << \") evaluates to false, where\"\n         << \"\\n\"\n         << e1 << \" evaluates to \" << ::testing::PrintToString(v1) << \"\\n\"\n         << e2 << \" evaluates to \" << ::testing::PrintToString(v2) << \"\\n\"\n         << e3 << \" evaluates to \" << ::testing::PrintToString(v3) << \"\\n\"\n         << e4 << \" evaluates to \" << ::testing::PrintToString(v4) << \"\\n\"\n         << e5 << \" evaluates to \" << ::testing::PrintToString(v5);\n}\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT5.\n// Don't use this in your code.\n#define GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, on_failure)\\\n  GTEST_ASSERT_(pred_format(#v1, #v2, #v3, #v4, #v5, v1, v2, v3, v4, v5), \\\n                on_failure)\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED5.  Don't use\n// this in your code.\n#define GTEST_PRED5_(pred, v1, v2, v3, v4, v5, on_failure)\\\n  GTEST_ASSERT_(::testing::AssertPred5Helper(#pred, \\\n                                             #v1, \\\n                                             #v2, \\\n                                             #v3, \\\n                                             #v4, \\\n                                             #v5, \\\n                                             pred, \\\n                                             v1, \\\n                                             v2, \\\n                                             v3, \\\n                                             v4, \\\n                                             v5), on_failure)\n\n// 5-ary predicate assertion macros.\n#define EXPECT_PRED_FORMAT5(pred_format, v1, v2, v3, v4, v5) \\\n  GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_PRED5(pred, v1, v2, v3, v4, v5) \\\n  GTEST_PRED5_(pred, v1, v2, v3, v4, v5, GTEST_NONFATAL_FAILURE_)\n#define ASSERT_PRED_FORMAT5(pred_format, v1, v2, v3, v4, v5) \\\n  GTEST_PRED_FORMAT5_(pred_format, v1, v2, v3, v4, v5, GTEST_FATAL_FAILURE_)\n#define ASSERT_PRED5(pred, v1, v2, v3, v4, v5) \\\n  GTEST_PRED5_(pred, v1, v2, v3, v4, v5, GTEST_FATAL_FAILURE_)\n\n\n\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/gtest_prod.h",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Google C++ Testing and Mocking Framework definitions useful in production code.\n// GOOGLETEST_CM0003 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_PROD_H_\n#define GTEST_INCLUDE_GTEST_GTEST_PROD_H_\n\n// When you need to test the private or protected members of a class,\n// use the FRIEND_TEST macro to declare your tests as friends of the\n// class.  For example:\n//\n// class MyClass {\n//  private:\n//   void PrivateMethod();\n//   FRIEND_TEST(MyClassTest, PrivateMethodWorks);\n// };\n//\n// class MyClassTest : public testing::Test {\n//   // ...\n// };\n//\n// TEST_F(MyClassTest, PrivateMethodWorks) {\n//   // Can call MyClass::PrivateMethod() here.\n// }\n//\n// Note: The test class must be in the same namespace as the class being tested.\n// For example, putting MyClassTest in an anonymous namespace will not work.\n\n#define FRIEND_TEST(test_case_name, test_name)\\\nfriend class test_case_name##_##test_name##_Test\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_PROD_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/custom/README.md",
    "content": "# Customization Points\n\nThe custom directory is an injection point for custom user configurations.\n\n## Header `gtest.h`\n\n### The following macros can be defined:\n\n*   `GTEST_OS_STACK_TRACE_GETTER_` - The name of an implementation of\n    `OsStackTraceGetterInterface`.\n*   `GTEST_CUSTOM_TEMPDIR_FUNCTION_` - An override for `testing::TempDir()`. See\n    `testing::TempDir` for semantics and signature.\n\n## Header `gtest-port.h`\n\nThe following macros can be defined:\n\n### Flag related macros:\n\n*   `GTEST_FLAG(flag_name)`\n*   `GTEST_USE_OWN_FLAGFILE_FLAG_` - Define to 0 when the system provides its\n    own flagfile flag parsing.\n*   `GTEST_DECLARE_bool_(name)`\n*   `GTEST_DECLARE_int32_(name)`\n*   `GTEST_DECLARE_string_(name)`\n*   `GTEST_DEFINE_bool_(name, default_val, doc)`\n*   `GTEST_DEFINE_int32_(name, default_val, doc)`\n*   `GTEST_DEFINE_string_(name, default_val, doc)`\n\n### Logging:\n\n*   `GTEST_LOG_(severity)`\n*   `GTEST_CHECK_(condition)`\n*   Functions `LogToStderr()` and `FlushInfoLog()` have to be provided too.\n\n### Threading:\n\n*   `GTEST_HAS_NOTIFICATION_` - Enabled if Notification is already provided.\n*   `GTEST_HAS_MUTEX_AND_THREAD_LOCAL_` - Enabled if `Mutex` and `ThreadLocal`\n    are already provided. Must also provide `GTEST_DECLARE_STATIC_MUTEX_(mutex)`\n    and `GTEST_DEFINE_STATIC_MUTEX_(mutex)`\n*   `GTEST_EXCLUSIVE_LOCK_REQUIRED_(locks)`\n*   `GTEST_LOCK_EXCLUDED_(locks)`\n\n### Underlying library support features\n\n*   `GTEST_HAS_CXXABI_H_`\n\n### Exporting API symbols:\n\n*   `GTEST_API_` - Specifier for exported symbols.\n\n## Header `gtest-printers.h`\n\n*   See documentation at `gtest/gtest-printers.h` for details on how to define a\n    custom printer.\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/custom/gtest-port.h",
    "content": "// Copyright 2015, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Injection point for custom user configurations. See README for details\n//\n// ** Custom implementation starts here **\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_PORT_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_PORT_H_\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_PORT_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/custom/gtest-printers.h",
    "content": "// Copyright 2015, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// This file provides an injection point for custom printers in a local\n// installation of gTest.\n// It will be included from gtest-printers.h and the overrides in this file\n// will be visible to everyone.\n//\n// Injection point for custom user configurations. See README for details\n//\n// ** Custom implementation starts here **\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_PRINTERS_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_PRINTERS_H_\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_PRINTERS_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/custom/gtest.h",
    "content": "// Copyright 2015, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Injection point for custom user configurations. See README for details\n//\n// ** Custom implementation starts here **\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_H_\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_CUSTOM_GTEST_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-death-test-internal.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines internal utilities needed for implementing\n// death tests.  They are subject to change without notice.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_\n\n#include \"gtest/gtest-matchers.h\"\n#include \"gtest/internal/gtest-internal.h\"\n\n#include <stdio.h>\n#include <memory>\n\nnamespace testing {\nnamespace internal {\n\nGTEST_DECLARE_string_(internal_run_death_test);\n\n// Names of the flags (needed for parsing Google Test flags).\nconst char kDeathTestStyleFlag[] = \"death_test_style\";\nconst char kDeathTestUseFork[] = \"death_test_use_fork\";\nconst char kInternalRunDeathTestFlag[] = \"internal_run_death_test\";\n\n#if GTEST_HAS_DEATH_TEST\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\n// DeathTest is a class that hides much of the complexity of the\n// GTEST_DEATH_TEST_ macro.  It is abstract; its static Create method\n// returns a concrete class that depends on the prevailing death test\n// style, as defined by the --gtest_death_test_style and/or\n// --gtest_internal_run_death_test flags.\n\n// In describing the results of death tests, these terms are used with\n// the corresponding definitions:\n//\n// exit status:  The integer exit information in the format specified\n//               by wait(2)\n// exit code:    The integer code passed to exit(3), _exit(2), or\n//               returned from main()\nclass GTEST_API_ DeathTest {\n public:\n  // Create returns false if there was an error determining the\n  // appropriate action to take for the current death test; for example,\n  // if the gtest_death_test_style flag is set to an invalid value.\n  // The LastMessage method will return a more detailed message in that\n  // case.  Otherwise, the DeathTest pointer pointed to by the \"test\"\n  // argument is set.  If the death test should be skipped, the pointer\n  // is set to NULL; otherwise, it is set to the address of a new concrete\n  // DeathTest object that controls the execution of the current test.\n  static bool Create(const char* statement, Matcher<const std::string&> matcher,\n                     const char* file, int line, DeathTest** test);\n  DeathTest();\n  virtual ~DeathTest() { }\n\n  // A helper class that aborts a death test when it's deleted.\n  class ReturnSentinel {\n   public:\n    explicit ReturnSentinel(DeathTest* test) : test_(test) { }\n    ~ReturnSentinel() { test_->Abort(TEST_ENCOUNTERED_RETURN_STATEMENT); }\n   private:\n    DeathTest* const test_;\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(ReturnSentinel);\n  } GTEST_ATTRIBUTE_UNUSED_;\n\n  // An enumeration of possible roles that may be taken when a death\n  // test is encountered.  EXECUTE means that the death test logic should\n  // be executed immediately.  OVERSEE means that the program should prepare\n  // the appropriate environment for a child process to execute the death\n  // test, then wait for it to complete.\n  enum TestRole { OVERSEE_TEST, EXECUTE_TEST };\n\n  // An enumeration of the three reasons that a test might be aborted.\n  enum AbortReason {\n    TEST_ENCOUNTERED_RETURN_STATEMENT,\n    TEST_THREW_EXCEPTION,\n    TEST_DID_NOT_DIE\n  };\n\n  // Assumes one of the above roles.\n  virtual TestRole AssumeRole() = 0;\n\n  // Waits for the death test to finish and returns its status.\n  virtual int Wait() = 0;\n\n  // Returns true if the death test passed; that is, the test process\n  // exited during the test, its exit status matches a user-supplied\n  // predicate, and its stderr output matches a user-supplied regular\n  // expression.\n  // The user-supplied predicate may be a macro expression rather\n  // than a function pointer or functor, or else Wait and Passed could\n  // be combined.\n  virtual bool Passed(bool exit_status_ok) = 0;\n\n  // Signals that the death test did not die as expected.\n  virtual void Abort(AbortReason reason) = 0;\n\n  // Returns a human-readable outcome message regarding the outcome of\n  // the last death test.\n  static const char* LastMessage();\n\n  static void set_last_death_test_message(const std::string& message);\n\n private:\n  // A string containing a description of the outcome of the last death test.\n  static std::string last_death_test_message_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(DeathTest);\n};\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n// Factory interface for death tests.  May be mocked out for testing.\nclass DeathTestFactory {\n public:\n  virtual ~DeathTestFactory() { }\n  virtual bool Create(const char* statement,\n                      Matcher<const std::string&> matcher, const char* file,\n                      int line, DeathTest** test) = 0;\n};\n\n// A concrete DeathTestFactory implementation for normal use.\nclass DefaultDeathTestFactory : public DeathTestFactory {\n public:\n  bool Create(const char* statement, Matcher<const std::string&> matcher,\n              const char* file, int line, DeathTest** test) override;\n};\n\n// Returns true if exit_status describes a process that was terminated\n// by a signal, or exited normally with a nonzero exit code.\nGTEST_API_ bool ExitedUnsuccessfully(int exit_status);\n\n// A string passed to EXPECT_DEATH (etc.) is caught by one of these overloads\n// and interpreted as a regex (rather than an Eq matcher) for legacy\n// compatibility.\ninline Matcher<const ::std::string&> MakeDeathTestMatcher(\n    ::testing::internal::RE regex) {\n  return ContainsRegex(regex.pattern());\n}\ninline Matcher<const ::std::string&> MakeDeathTestMatcher(const char* regex) {\n  return ContainsRegex(regex);\n}\ninline Matcher<const ::std::string&> MakeDeathTestMatcher(\n    const ::std::string& regex) {\n  return ContainsRegex(regex);\n}\n\n// If a Matcher<const ::std::string&> is passed to EXPECT_DEATH (etc.), it's\n// used directly.\ninline Matcher<const ::std::string&> MakeDeathTestMatcher(\n    Matcher<const ::std::string&> matcher) {\n  return matcher;\n}\n\n// Traps C++ exceptions escaping statement and reports them as test\n// failures. Note that trapping SEH exceptions is not implemented here.\n# if GTEST_HAS_EXCEPTIONS\n#  define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \\\n  try { \\\n    GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n  } catch (const ::std::exception& gtest_exception) { \\\n    fprintf(\\\n        stderr, \\\n        \"\\n%s: Caught std::exception-derived exception escaping the \" \\\n        \"death test statement. Exception message: %s\\n\", \\\n        ::testing::internal::FormatFileLocation(__FILE__, __LINE__).c_str(), \\\n        gtest_exception.what()); \\\n    fflush(stderr); \\\n    death_test->Abort(::testing::internal::DeathTest::TEST_THREW_EXCEPTION); \\\n  } catch (...) { \\\n    death_test->Abort(::testing::internal::DeathTest::TEST_THREW_EXCEPTION); \\\n  }\n\n# else\n#  define GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, death_test) \\\n  GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement)\n\n# endif\n\n// This macro is for implementing ASSERT_DEATH*, EXPECT_DEATH*,\n// ASSERT_EXIT*, and EXPECT_EXIT*.\n#define GTEST_DEATH_TEST_(statement, predicate, regex_or_matcher, fail)        \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_                                                \\\n  if (::testing::internal::AlwaysTrue()) {                                     \\\n    ::testing::internal::DeathTest* gtest_dt;                                  \\\n    if (!::testing::internal::DeathTest::Create(                               \\\n            #statement,                                                        \\\n            ::testing::internal::MakeDeathTestMatcher(regex_or_matcher),       \\\n            __FILE__, __LINE__, &gtest_dt)) {                                  \\\n      goto GTEST_CONCAT_TOKEN_(gtest_label_, __LINE__);                        \\\n    }                                                                          \\\n    if (gtest_dt != nullptr) {                                                 \\\n      std::unique_ptr< ::testing::internal::DeathTest> gtest_dt_ptr(gtest_dt); \\\n      switch (gtest_dt->AssumeRole()) {                                        \\\n        case ::testing::internal::DeathTest::OVERSEE_TEST:                     \\\n          if (!gtest_dt->Passed(predicate(gtest_dt->Wait()))) {                \\\n            goto GTEST_CONCAT_TOKEN_(gtest_label_, __LINE__);                  \\\n          }                                                                    \\\n          break;                                                               \\\n        case ::testing::internal::DeathTest::EXECUTE_TEST: {                   \\\n          ::testing::internal::DeathTest::ReturnSentinel gtest_sentinel(       \\\n              gtest_dt);                                                       \\\n          GTEST_EXECUTE_DEATH_TEST_STATEMENT_(statement, gtest_dt);            \\\n          gtest_dt->Abort(::testing::internal::DeathTest::TEST_DID_NOT_DIE);   \\\n          break;                                                               \\\n        }                                                                      \\\n        default:                                                               \\\n          break;                                                               \\\n      }                                                                        \\\n    }                                                                          \\\n  } else                                                                       \\\n    GTEST_CONCAT_TOKEN_(gtest_label_, __LINE__)                                \\\n        : fail(::testing::internal::DeathTest::LastMessage())\n// The symbol \"fail\" here expands to something into which a message\n// can be streamed.\n\n// This macro is for implementing ASSERT/EXPECT_DEBUG_DEATH when compiled in\n// NDEBUG mode. In this case we need the statements to be executed and the macro\n// must accept a streamed message even though the message is never printed.\n// The regex object is not evaluated, but it is used to prevent \"unused\"\n// warnings and to avoid an expression that doesn't compile in debug mode.\n#define GTEST_EXECUTE_STATEMENT_(statement, regex_or_matcher)    \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_                                  \\\n  if (::testing::internal::AlwaysTrue()) {                       \\\n    GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement);   \\\n  } else if (!::testing::internal::AlwaysTrue()) {               \\\n    ::testing::internal::MakeDeathTestMatcher(regex_or_matcher); \\\n  } else                                                         \\\n    ::testing::Message()\n\n// A class representing the parsed contents of the\n// --gtest_internal_run_death_test flag, as it existed when\n// RUN_ALL_TESTS was called.\nclass InternalRunDeathTestFlag {\n public:\n  InternalRunDeathTestFlag(const std::string& a_file,\n                           int a_line,\n                           int an_index,\n                           int a_write_fd)\n      : file_(a_file), line_(a_line), index_(an_index),\n        write_fd_(a_write_fd) {}\n\n  ~InternalRunDeathTestFlag() {\n    if (write_fd_ >= 0)\n      posix::Close(write_fd_);\n  }\n\n  const std::string& file() const { return file_; }\n  int line() const { return line_; }\n  int index() const { return index_; }\n  int write_fd() const { return write_fd_; }\n\n private:\n  std::string file_;\n  int line_;\n  int index_;\n  int write_fd_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(InternalRunDeathTestFlag);\n};\n\n// Returns a newly created InternalRunDeathTestFlag object with fields\n// initialized from the GTEST_FLAG(internal_run_death_test) flag if\n// the flag is specified; otherwise returns NULL.\nInternalRunDeathTestFlag* ParseInternalRunDeathTestFlag();\n\n#endif  // GTEST_HAS_DEATH_TEST\n\n}  // namespace internal\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_DEATH_TEST_INTERNAL_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-filepath.h",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Google Test filepath utilities\n//\n// This header file declares classes and functions used internally by\n// Google Test.  They are subject to change without notice.\n//\n// This file is #included in gtest/internal/gtest-internal.h.\n// Do not include this header file separately!\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_\n\n#include \"gtest/internal/gtest-string.h\"\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\nnamespace testing {\nnamespace internal {\n\n// FilePath - a class for file and directory pathname manipulation which\n// handles platform-specific conventions (like the pathname separator).\n// Used for helper functions for naming files in a directory for xml output.\n// Except for Set methods, all methods are const or static, which provides an\n// \"immutable value object\" -- useful for peace of mind.\n// A FilePath with a value ending in a path separator (\"like/this/\") represents\n// a directory, otherwise it is assumed to represent a file. In either case,\n// it may or may not represent an actual file or directory in the file system.\n// Names are NOT checked for syntax correctness -- no checking for illegal\n// characters, malformed paths, etc.\n\nclass GTEST_API_ FilePath {\n public:\n  FilePath() : pathname_(\"\") { }\n  FilePath(const FilePath& rhs) : pathname_(rhs.pathname_) { }\n\n  explicit FilePath(const std::string& pathname) : pathname_(pathname) {\n    Normalize();\n  }\n\n  FilePath& operator=(const FilePath& rhs) {\n    Set(rhs);\n    return *this;\n  }\n\n  void Set(const FilePath& rhs) {\n    pathname_ = rhs.pathname_;\n  }\n\n  const std::string& string() const { return pathname_; }\n  const char* c_str() const { return pathname_.c_str(); }\n\n  // Returns the current working directory, or \"\" if unsuccessful.\n  static FilePath GetCurrentDir();\n\n  // Given directory = \"dir\", base_name = \"test\", number = 0,\n  // extension = \"xml\", returns \"dir/test.xml\". If number is greater\n  // than zero (e.g., 12), returns \"dir/test_12.xml\".\n  // On Windows platform, uses \\ as the separator rather than /.\n  static FilePath MakeFileName(const FilePath& directory,\n                               const FilePath& base_name,\n                               int number,\n                               const char* extension);\n\n  // Given directory = \"dir\", relative_path = \"test.xml\",\n  // returns \"dir/test.xml\".\n  // On Windows, uses \\ as the separator rather than /.\n  static FilePath ConcatPaths(const FilePath& directory,\n                              const FilePath& relative_path);\n\n  // Returns a pathname for a file that does not currently exist. The pathname\n  // will be directory/base_name.extension or\n  // directory/base_name_<number>.extension if directory/base_name.extension\n  // already exists. The number will be incremented until a pathname is found\n  // that does not already exist.\n  // Examples: 'dir/foo_test.xml' or 'dir/foo_test_1.xml'.\n  // There could be a race condition if two or more processes are calling this\n  // function at the same time -- they could both pick the same filename.\n  static FilePath GenerateUniqueFileName(const FilePath& directory,\n                                         const FilePath& base_name,\n                                         const char* extension);\n\n  // Returns true if and only if the path is \"\".\n  bool IsEmpty() const { return pathname_.empty(); }\n\n  // If input name has a trailing separator character, removes it and returns\n  // the name, otherwise return the name string unmodified.\n  // On Windows platform, uses \\ as the separator, other platforms use /.\n  FilePath RemoveTrailingPathSeparator() const;\n\n  // Returns a copy of the FilePath with the directory part removed.\n  // Example: FilePath(\"path/to/file\").RemoveDirectoryName() returns\n  // FilePath(\"file\"). If there is no directory part (\"just_a_file\"), it returns\n  // the FilePath unmodified. If there is no file part (\"just_a_dir/\") it\n  // returns an empty FilePath (\"\").\n  // On Windows platform, '\\' is the path separator, otherwise it is '/'.\n  FilePath RemoveDirectoryName() const;\n\n  // RemoveFileName returns the directory path with the filename removed.\n  // Example: FilePath(\"path/to/file\").RemoveFileName() returns \"path/to/\".\n  // If the FilePath is \"a_file\" or \"/a_file\", RemoveFileName returns\n  // FilePath(\"./\") or, on Windows, FilePath(\".\\\\\"). If the filepath does\n  // not have a file, like \"just/a/dir/\", it returns the FilePath unmodified.\n  // On Windows platform, '\\' is the path separator, otherwise it is '/'.\n  FilePath RemoveFileName() const;\n\n  // Returns a copy of the FilePath with the case-insensitive extension removed.\n  // Example: FilePath(\"dir/file.exe\").RemoveExtension(\"EXE\") returns\n  // FilePath(\"dir/file\"). If a case-insensitive extension is not\n  // found, returns a copy of the original FilePath.\n  FilePath RemoveExtension(const char* extension) const;\n\n  // Creates directories so that path exists. Returns true if successful or if\n  // the directories already exist; returns false if unable to create\n  // directories for any reason. Will also return false if the FilePath does\n  // not represent a directory (that is, it doesn't end with a path separator).\n  bool CreateDirectoriesRecursively() const;\n\n  // Create the directory so that path exists. Returns true if successful or\n  // if the directory already exists; returns false if unable to create the\n  // directory for any reason, including if the parent directory does not\n  // exist. Not named \"CreateDirectory\" because that's a macro on Windows.\n  bool CreateFolder() const;\n\n  // Returns true if FilePath describes something in the file-system,\n  // either a file, directory, or whatever, and that something exists.\n  bool FileOrDirectoryExists() const;\n\n  // Returns true if pathname describes a directory in the file-system\n  // that exists.\n  bool DirectoryExists() const;\n\n  // Returns true if FilePath ends with a path separator, which indicates that\n  // it is intended to represent a directory. Returns false otherwise.\n  // This does NOT check that a directory (or file) actually exists.\n  bool IsDirectory() const;\n\n  // Returns true if pathname describes a root directory. (Windows has one\n  // root directory per disk drive.)\n  bool IsRootDirectory() const;\n\n  // Returns true if pathname describes an absolute path.\n  bool IsAbsolutePath() const;\n\n private:\n  // Replaces multiple consecutive separators with a single separator.\n  // For example, \"bar///foo\" becomes \"bar/foo\". Does not eliminate other\n  // redundancies that might be in a pathname involving \".\" or \"..\".\n  //\n  // A pathname with multiple consecutive separators may occur either through\n  // user error or as a result of some scripts or APIs that generate a pathname\n  // with a trailing separator. On other platforms the same API or script\n  // may NOT generate a pathname with a trailing \"/\". Then elsewhere that\n  // pathname may have another \"/\" and pathname components added to it,\n  // without checking for the separator already being there.\n  // The script language and operating system may allow paths like \"foo//bar\"\n  // but some of the functions in FilePath will not handle that correctly. In\n  // particular, RemoveTrailingPathSeparator() only removes one separator, and\n  // it is called in CreateDirectoriesRecursively() assuming that it will change\n  // a pathname from directory syntax (trailing separator) to filename syntax.\n  //\n  // On Windows this method also replaces the alternate path separator '/' with\n  // the primary path separator '\\\\', so that for example \"bar\\\\/\\\\foo\" becomes\n  // \"bar\\\\foo\".\n\n  void Normalize();\n\n  // Returns a pointer to the last occurence of a valid path separator in\n  // the FilePath. On Windows, for example, both '/' and '\\' are valid path\n  // separators. Returns NULL if no path separator was found.\n  const char* FindLastPathSeparator() const;\n\n  std::string pathname_;\n};  // class FilePath\n\n}  // namespace internal\n}  // namespace testing\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_FILEPATH_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-internal.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file declares functions and macros used internally by\n// Google Test.  They are subject to change without notice.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_\n\n#include \"gtest/internal/gtest-port.h\"\n\n#if GTEST_OS_LINUX\n# include <stdlib.h>\n# include <sys/types.h>\n# include <sys/wait.h>\n# include <unistd.h>\n#endif  // GTEST_OS_LINUX\n\n#if GTEST_HAS_EXCEPTIONS\n# include <stdexcept>\n#endif\n\n#include <ctype.h>\n#include <float.h>\n#include <string.h>\n#include <iomanip>\n#include <limits>\n#include <map>\n#include <set>\n#include <string>\n#include <type_traits>\n#include <vector>\n\n#include \"gtest/gtest-message.h\"\n#include \"gtest/internal/gtest-filepath.h\"\n#include \"gtest/internal/gtest-string.h\"\n#include \"gtest/internal/gtest-type-util.h\"\n\n// Due to C++ preprocessor weirdness, we need double indirection to\n// concatenate two tokens when one of them is __LINE__.  Writing\n//\n//   foo ## __LINE__\n//\n// will result in the token foo__LINE__, instead of foo followed by\n// the current line number.  For more details, see\n// http://www.parashift.com/c++-faq-lite/misc-technical-issues.html#faq-39.6\n#define GTEST_CONCAT_TOKEN_(foo, bar) GTEST_CONCAT_TOKEN_IMPL_(foo, bar)\n#define GTEST_CONCAT_TOKEN_IMPL_(foo, bar) foo ## bar\n\n// Stringifies its argument.\n#define GTEST_STRINGIFY_(name) #name\n\nnamespace proto2 { class Message; }\n\nnamespace testing {\n\n// Forward declarations.\n\nclass AssertionResult;                 // Result of an assertion.\nclass Message;                         // Represents a failure message.\nclass Test;                            // Represents a test.\nclass TestInfo;                        // Information about a test.\nclass TestPartResult;                  // Result of a test part.\nclass UnitTest;                        // A collection of test suites.\n\ntemplate <typename T>\n::std::string PrintToString(const T& value);\n\nnamespace internal {\n\nstruct TraceInfo;                      // Information about a trace point.\nclass TestInfoImpl;                    // Opaque implementation of TestInfo\nclass UnitTestImpl;                    // Opaque implementation of UnitTest\n\n// The text used in failure messages to indicate the start of the\n// stack trace.\nGTEST_API_ extern const char kStackTraceMarker[];\n\n// An IgnoredValue object can be implicitly constructed from ANY value.\nclass IgnoredValue {\n  struct Sink {};\n public:\n  // This constructor template allows any value to be implicitly\n  // converted to IgnoredValue.  The object has no data member and\n  // doesn't try to remember anything about the argument.  We\n  // deliberately omit the 'explicit' keyword in order to allow the\n  // conversion to be implicit.\n  // Disable the conversion if T already has a magical conversion operator.\n  // Otherwise we get ambiguity.\n  template <typename T,\n            typename std::enable_if<!std::is_convertible<T, Sink>::value,\n                                    int>::type = 0>\n  IgnoredValue(const T& /* ignored */) {}  // NOLINT(runtime/explicit)\n};\n\n// Appends the user-supplied message to the Google-Test-generated message.\nGTEST_API_ std::string AppendUserMessage(\n    const std::string& gtest_msg, const Message& user_msg);\n\n#if GTEST_HAS_EXCEPTIONS\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4275 \\\n/* an exported class was derived from a class that was not exported */)\n\n// This exception is thrown by (and only by) a failed Google Test\n// assertion when GTEST_FLAG(throw_on_failure) is true (if exceptions\n// are enabled).  We derive it from std::runtime_error, which is for\n// errors presumably detectable only at run time.  Since\n// std::runtime_error inherits from std::exception, many testing\n// frameworks know how to extract and print the message inside it.\nclass GTEST_API_ GoogleTestFailureException : public ::std::runtime_error {\n public:\n  explicit GoogleTestFailureException(const TestPartResult& failure);\n};\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4275\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\nnamespace edit_distance {\n// Returns the optimal edits to go from 'left' to 'right'.\n// All edits cost the same, with replace having lower priority than\n// add/remove.\n// Simple implementation of the Wagner-Fischer algorithm.\n// See http://en.wikipedia.org/wiki/Wagner-Fischer_algorithm\nenum EditType { kMatch, kAdd, kRemove, kReplace };\nGTEST_API_ std::vector<EditType> CalculateOptimalEdits(\n    const std::vector<size_t>& left, const std::vector<size_t>& right);\n\n// Same as above, but the input is represented as strings.\nGTEST_API_ std::vector<EditType> CalculateOptimalEdits(\n    const std::vector<std::string>& left,\n    const std::vector<std::string>& right);\n\n// Create a diff of the input strings in Unified diff format.\nGTEST_API_ std::string CreateUnifiedDiff(const std::vector<std::string>& left,\n                                         const std::vector<std::string>& right,\n                                         size_t context = 2);\n\n}  // namespace edit_distance\n\n// Calculate the diff between 'left' and 'right' and return it in unified diff\n// format.\n// If not null, stores in 'total_line_count' the total number of lines found\n// in left + right.\nGTEST_API_ std::string DiffStrings(const std::string& left,\n                                   const std::string& right,\n                                   size_t* total_line_count);\n\n// Constructs and returns the message for an equality assertion\n// (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure.\n//\n// The first four parameters are the expressions used in the assertion\n// and their values, as strings.  For example, for ASSERT_EQ(foo, bar)\n// where foo is 5 and bar is 6, we have:\n//\n//   expected_expression: \"foo\"\n//   actual_expression:   \"bar\"\n//   expected_value:      \"5\"\n//   actual_value:        \"6\"\n//\n// The ignoring_case parameter is true if and only if the assertion is a\n// *_STRCASEEQ*.  When it's true, the string \" (ignoring case)\" will\n// be inserted into the message.\nGTEST_API_ AssertionResult EqFailure(const char* expected_expression,\n                                     const char* actual_expression,\n                                     const std::string& expected_value,\n                                     const std::string& actual_value,\n                                     bool ignoring_case);\n\n// Constructs a failure message for Boolean assertions such as EXPECT_TRUE.\nGTEST_API_ std::string GetBoolAssertionFailureMessage(\n    const AssertionResult& assertion_result,\n    const char* expression_text,\n    const char* actual_predicate_value,\n    const char* expected_predicate_value);\n\n// This template class represents an IEEE floating-point number\n// (either single-precision or double-precision, depending on the\n// template parameters).\n//\n// The purpose of this class is to do more sophisticated number\n// comparison.  (Due to round-off error, etc, it's very unlikely that\n// two floating-points will be equal exactly.  Hence a naive\n// comparison by the == operation often doesn't work.)\n//\n// Format of IEEE floating-point:\n//\n//   The most-significant bit being the leftmost, an IEEE\n//   floating-point looks like\n//\n//     sign_bit exponent_bits fraction_bits\n//\n//   Here, sign_bit is a single bit that designates the sign of the\n//   number.\n//\n//   For float, there are 8 exponent bits and 23 fraction bits.\n//\n//   For double, there are 11 exponent bits and 52 fraction bits.\n//\n//   More details can be found at\n//   http://en.wikipedia.org/wiki/IEEE_floating-point_standard.\n//\n// Template parameter:\n//\n//   RawType: the raw floating-point type (either float or double)\ntemplate <typename RawType>\nclass FloatingPoint {\n public:\n  // Defines the unsigned integer type that has the same size as the\n  // floating point number.\n  typedef typename TypeWithSize<sizeof(RawType)>::UInt Bits;\n\n  // Constants.\n\n  // # of bits in a number.\n  static const size_t kBitCount = 8*sizeof(RawType);\n\n  // # of fraction bits in a number.\n  static const size_t kFractionBitCount =\n    std::numeric_limits<RawType>::digits - 1;\n\n  // # of exponent bits in a number.\n  static const size_t kExponentBitCount = kBitCount - 1 - kFractionBitCount;\n\n  // The mask for the sign bit.\n  static const Bits kSignBitMask = static_cast<Bits>(1) << (kBitCount - 1);\n\n  // The mask for the fraction bits.\n  static const Bits kFractionBitMask =\n    ~static_cast<Bits>(0) >> (kExponentBitCount + 1);\n\n  // The mask for the exponent bits.\n  static const Bits kExponentBitMask = ~(kSignBitMask | kFractionBitMask);\n\n  // How many ULP's (Units in the Last Place) we want to tolerate when\n  // comparing two numbers.  The larger the value, the more error we\n  // allow.  A 0 value means that two numbers must be exactly the same\n  // to be considered equal.\n  //\n  // The maximum error of a single floating-point operation is 0.5\n  // units in the last place.  On Intel CPU's, all floating-point\n  // calculations are done with 80-bit precision, while double has 64\n  // bits.  Therefore, 4 should be enough for ordinary use.\n  //\n  // See the following article for more details on ULP:\n  // http://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/\n  static const size_t kMaxUlps = 4;\n\n  // Constructs a FloatingPoint from a raw floating-point number.\n  //\n  // On an Intel CPU, passing a non-normalized NAN (Not a Number)\n  // around may change its bits, although the new value is guaranteed\n  // to be also a NAN.  Therefore, don't expect this constructor to\n  // preserve the bits in x when x is a NAN.\n  explicit FloatingPoint(const RawType& x) { u_.value_ = x; }\n\n  // Static methods\n\n  // Reinterprets a bit pattern as a floating-point number.\n  //\n  // This function is needed to test the AlmostEquals() method.\n  static RawType ReinterpretBits(const Bits bits) {\n    FloatingPoint fp(0);\n    fp.u_.bits_ = bits;\n    return fp.u_.value_;\n  }\n\n  // Returns the floating-point number that represent positive infinity.\n  static RawType Infinity() {\n    return ReinterpretBits(kExponentBitMask);\n  }\n\n  // Returns the maximum representable finite floating-point number.\n  static RawType Max();\n\n  // Non-static methods\n\n  // Returns the bits that represents this number.\n  const Bits &bits() const { return u_.bits_; }\n\n  // Returns the exponent bits of this number.\n  Bits exponent_bits() const { return kExponentBitMask & u_.bits_; }\n\n  // Returns the fraction bits of this number.\n  Bits fraction_bits() const { return kFractionBitMask & u_.bits_; }\n\n  // Returns the sign bit of this number.\n  Bits sign_bit() const { return kSignBitMask & u_.bits_; }\n\n  // Returns true if and only if this is NAN (not a number).\n  bool is_nan() const {\n    // It's a NAN if the exponent bits are all ones and the fraction\n    // bits are not entirely zeros.\n    return (exponent_bits() == kExponentBitMask) && (fraction_bits() != 0);\n  }\n\n  // Returns true if and only if this number is at most kMaxUlps ULP's away\n  // from rhs.  In particular, this function:\n  //\n  //   - returns false if either number is (or both are) NAN.\n  //   - treats really large numbers as almost equal to infinity.\n  //   - thinks +0.0 and -0.0 are 0 DLP's apart.\n  bool AlmostEquals(const FloatingPoint& rhs) const {\n    // The IEEE standard says that any comparison operation involving\n    // a NAN must return false.\n    if (is_nan() || rhs.is_nan()) return false;\n\n    return DistanceBetweenSignAndMagnitudeNumbers(u_.bits_, rhs.u_.bits_)\n        <= kMaxUlps;\n  }\n\n private:\n  // The data type used to store the actual floating-point number.\n  union FloatingPointUnion {\n    RawType value_;  // The raw floating-point number.\n    Bits bits_;      // The bits that represent the number.\n  };\n\n  // Converts an integer from the sign-and-magnitude representation to\n  // the biased representation.  More precisely, let N be 2 to the\n  // power of (kBitCount - 1), an integer x is represented by the\n  // unsigned number x + N.\n  //\n  // For instance,\n  //\n  //   -N + 1 (the most negative number representable using\n  //          sign-and-magnitude) is represented by 1;\n  //   0      is represented by N; and\n  //   N - 1  (the biggest number representable using\n  //          sign-and-magnitude) is represented by 2N - 1.\n  //\n  // Read http://en.wikipedia.org/wiki/Signed_number_representations\n  // for more details on signed number representations.\n  static Bits SignAndMagnitudeToBiased(const Bits &sam) {\n    if (kSignBitMask & sam) {\n      // sam represents a negative number.\n      return ~sam + 1;\n    } else {\n      // sam represents a positive number.\n      return kSignBitMask | sam;\n    }\n  }\n\n  // Given two numbers in the sign-and-magnitude representation,\n  // returns the distance between them as an unsigned number.\n  static Bits DistanceBetweenSignAndMagnitudeNumbers(const Bits &sam1,\n                                                     const Bits &sam2) {\n    const Bits biased1 = SignAndMagnitudeToBiased(sam1);\n    const Bits biased2 = SignAndMagnitudeToBiased(sam2);\n    return (biased1 >= biased2) ? (biased1 - biased2) : (biased2 - biased1);\n  }\n\n  FloatingPointUnion u_;\n};\n\n// We cannot use std::numeric_limits<T>::max() as it clashes with the max()\n// macro defined by <windows.h>.\ntemplate <>\ninline float FloatingPoint<float>::Max() { return FLT_MAX; }\ntemplate <>\ninline double FloatingPoint<double>::Max() { return DBL_MAX; }\n\n// Typedefs the instances of the FloatingPoint template class that we\n// care to use.\ntypedef FloatingPoint<float> Float;\ntypedef FloatingPoint<double> Double;\n\n// In order to catch the mistake of putting tests that use different\n// test fixture classes in the same test suite, we need to assign\n// unique IDs to fixture classes and compare them.  The TypeId type is\n// used to hold such IDs.  The user should treat TypeId as an opaque\n// type: the only operation allowed on TypeId values is to compare\n// them for equality using the == operator.\ntypedef const void* TypeId;\n\ntemplate <typename T>\nclass TypeIdHelper {\n public:\n  // dummy_ must not have a const type.  Otherwise an overly eager\n  // compiler (e.g. MSVC 7.1 & 8.0) may try to merge\n  // TypeIdHelper<T>::dummy_ for different Ts as an \"optimization\".\n  static bool dummy_;\n};\n\ntemplate <typename T>\nbool TypeIdHelper<T>::dummy_ = false;\n\n// GetTypeId<T>() returns the ID of type T.  Different values will be\n// returned for different types.  Calling the function twice with the\n// same type argument is guaranteed to return the same ID.\ntemplate <typename T>\nTypeId GetTypeId() {\n  // The compiler is required to allocate a different\n  // TypeIdHelper<T>::dummy_ variable for each T used to instantiate\n  // the template.  Therefore, the address of dummy_ is guaranteed to\n  // be unique.\n  return &(TypeIdHelper<T>::dummy_);\n}\n\n// Returns the type ID of ::testing::Test.  Always call this instead\n// of GetTypeId< ::testing::Test>() to get the type ID of\n// ::testing::Test, as the latter may give the wrong result due to a\n// suspected linker bug when compiling Google Test as a Mac OS X\n// framework.\nGTEST_API_ TypeId GetTestTypeId();\n\n// Defines the abstract factory interface that creates instances\n// of a Test object.\nclass TestFactoryBase {\n public:\n  virtual ~TestFactoryBase() {}\n\n  // Creates a test instance to run. The instance is both created and destroyed\n  // within TestInfoImpl::Run()\n  virtual Test* CreateTest() = 0;\n\n protected:\n  TestFactoryBase() {}\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestFactoryBase);\n};\n\n// This class provides implementation of TeastFactoryBase interface.\n// It is used in TEST and TEST_F macros.\ntemplate <class TestClass>\nclass TestFactoryImpl : public TestFactoryBase {\n public:\n  Test* CreateTest() override { return new TestClass; }\n};\n\n#if GTEST_OS_WINDOWS\n\n// Predicate-formatters for implementing the HRESULT checking macros\n// {ASSERT|EXPECT}_HRESULT_{SUCCEEDED|FAILED}\n// We pass a long instead of HRESULT to avoid causing an\n// include dependency for the HRESULT type.\nGTEST_API_ AssertionResult IsHRESULTSuccess(const char* expr,\n                                            long hr);  // NOLINT\nGTEST_API_ AssertionResult IsHRESULTFailure(const char* expr,\n                                            long hr);  // NOLINT\n\n#endif  // GTEST_OS_WINDOWS\n\n// Types of SetUpTestSuite() and TearDownTestSuite() functions.\nusing SetUpTestSuiteFunc = void (*)();\nusing TearDownTestSuiteFunc = void (*)();\n\nstruct CodeLocation {\n  CodeLocation(const std::string& a_file, int a_line)\n      : file(a_file), line(a_line) {}\n\n  std::string file;\n  int line;\n};\n\n//  Helper to identify which setup function for TestCase / TestSuite to call.\n//  Only one function is allowed, either TestCase or TestSute but not both.\n\n// Utility functions to help SuiteApiResolver\nusing SetUpTearDownSuiteFuncType = void (*)();\n\ninline SetUpTearDownSuiteFuncType GetNotDefaultOrNull(\n    SetUpTearDownSuiteFuncType a, SetUpTearDownSuiteFuncType def) {\n  return a == def ? nullptr : a;\n}\n\ntemplate <typename T>\n//  Note that SuiteApiResolver inherits from T because\n//  SetUpTestSuite()/TearDownTestSuite() could be protected. Ths way\n//  SuiteApiResolver can access them.\nstruct SuiteApiResolver : T {\n  // testing::Test is only forward declared at this point. So we make it a\n  // dependend class for the compiler to be OK with it.\n  using Test =\n      typename std::conditional<sizeof(T) != 0, ::testing::Test, void>::type;\n\n  static SetUpTearDownSuiteFuncType GetSetUpCaseOrSuite(const char* filename,\n                                                        int line_num) {\n    SetUpTearDownSuiteFuncType test_case_fp =\n        GetNotDefaultOrNull(&T::SetUpTestCase, &Test::SetUpTestCase);\n    SetUpTearDownSuiteFuncType test_suite_fp =\n        GetNotDefaultOrNull(&T::SetUpTestSuite, &Test::SetUpTestSuite);\n\n    GTEST_CHECK_(!test_case_fp || !test_suite_fp)\n        << \"Test can not provide both SetUpTestSuite and SetUpTestCase, please \"\n           \"make sure there is only one present at \"\n        << filename << \":\" << line_num;\n\n    return test_case_fp != nullptr ? test_case_fp : test_suite_fp;\n  }\n\n  static SetUpTearDownSuiteFuncType GetTearDownCaseOrSuite(const char* filename,\n                                                           int line_num) {\n    SetUpTearDownSuiteFuncType test_case_fp =\n        GetNotDefaultOrNull(&T::TearDownTestCase, &Test::TearDownTestCase);\n    SetUpTearDownSuiteFuncType test_suite_fp =\n        GetNotDefaultOrNull(&T::TearDownTestSuite, &Test::TearDownTestSuite);\n\n    GTEST_CHECK_(!test_case_fp || !test_suite_fp)\n        << \"Test can not provide both TearDownTestSuite and TearDownTestCase,\"\n           \" please make sure there is only one present at\"\n        << filename << \":\" << line_num;\n\n    return test_case_fp != nullptr ? test_case_fp : test_suite_fp;\n  }\n};\n\n// Creates a new TestInfo object and registers it with Google Test;\n// returns the created object.\n//\n// Arguments:\n//\n//   test_suite_name:   name of the test suite\n//   name:             name of the test\n//   type_param        the name of the test's type parameter, or NULL if\n//                     this is not a typed or a type-parameterized test.\n//   value_param       text representation of the test's value parameter,\n//                     or NULL if this is not a type-parameterized test.\n//   code_location:    code location where the test is defined\n//   fixture_class_id: ID of the test fixture class\n//   set_up_tc:        pointer to the function that sets up the test suite\n//   tear_down_tc:     pointer to the function that tears down the test suite\n//   factory:          pointer to the factory that creates a test object.\n//                     The newly created TestInfo instance will assume\n//                     ownership of the factory object.\nGTEST_API_ TestInfo* MakeAndRegisterTestInfo(\n    const char* test_suite_name, const char* name, const char* type_param,\n    const char* value_param, CodeLocation code_location,\n    TypeId fixture_class_id, SetUpTestSuiteFunc set_up_tc,\n    TearDownTestSuiteFunc tear_down_tc, TestFactoryBase* factory);\n\n// If *pstr starts with the given prefix, modifies *pstr to be right\n// past the prefix and returns true; otherwise leaves *pstr unchanged\n// and returns false.  None of pstr, *pstr, and prefix can be NULL.\nGTEST_API_ bool SkipPrefix(const char* prefix, const char** pstr);\n\n#if GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\n// State of the definition of a type-parameterized test suite.\nclass GTEST_API_ TypedTestSuitePState {\n public:\n  TypedTestSuitePState() : registered_(false) {}\n\n  // Adds the given test name to defined_test_names_ and return true\n  // if the test suite hasn't been registered; otherwise aborts the\n  // program.\n  bool AddTestName(const char* file, int line, const char* case_name,\n                   const char* test_name) {\n    if (registered_) {\n      fprintf(stderr,\n              \"%s Test %s must be defined before \"\n              \"REGISTER_TYPED_TEST_SUITE_P(%s, ...).\\n\",\n              FormatFileLocation(file, line).c_str(), test_name, case_name);\n      fflush(stderr);\n      posix::Abort();\n    }\n    registered_tests_.insert(\n        ::std::make_pair(test_name, CodeLocation(file, line)));\n    return true;\n  }\n\n  bool TestExists(const std::string& test_name) const {\n    return registered_tests_.count(test_name) > 0;\n  }\n\n  const CodeLocation& GetCodeLocation(const std::string& test_name) const {\n    RegisteredTestsMap::const_iterator it = registered_tests_.find(test_name);\n    GTEST_CHECK_(it != registered_tests_.end());\n    return it->second;\n  }\n\n  // Verifies that registered_tests match the test names in\n  // defined_test_names_; returns registered_tests if successful, or\n  // aborts the program otherwise.\n  const char* VerifyRegisteredTestNames(\n      const char* file, int line, const char* registered_tests);\n\n private:\n  typedef ::std::map<std::string, CodeLocation> RegisteredTestsMap;\n\n  bool registered_;\n  RegisteredTestsMap registered_tests_;\n};\n\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nusing TypedTestCasePState = TypedTestSuitePState;\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n// Skips to the first non-space char after the first comma in 'str';\n// returns NULL if no comma is found in 'str'.\ninline const char* SkipComma(const char* str) {\n  const char* comma = strchr(str, ',');\n  if (comma == nullptr) {\n    return nullptr;\n  }\n  while (IsSpace(*(++comma))) {}\n  return comma;\n}\n\n// Returns the prefix of 'str' before the first comma in it; returns\n// the entire string if it contains no comma.\ninline std::string GetPrefixUntilComma(const char* str) {\n  const char* comma = strchr(str, ',');\n  return comma == nullptr ? str : std::string(str, comma);\n}\n\n// Splits a given string on a given delimiter, populating a given\n// vector with the fields.\nvoid SplitString(const ::std::string& str, char delimiter,\n                 ::std::vector< ::std::string>* dest);\n\n// The default argument to the template below for the case when the user does\n// not provide a name generator.\nstruct DefaultNameGenerator {\n  template <typename T>\n  static std::string GetName(int i) {\n    return StreamableToString(i);\n  }\n};\n\ntemplate <typename Provided = DefaultNameGenerator>\nstruct NameGeneratorSelector {\n  typedef Provided type;\n};\n\ntemplate <typename NameGenerator>\nvoid GenerateNamesRecursively(internal::None, std::vector<std::string>*, int) {}\n\ntemplate <typename NameGenerator, typename Types>\nvoid GenerateNamesRecursively(Types, std::vector<std::string>* result, int i) {\n  result->push_back(NameGenerator::template GetName<typename Types::Head>(i));\n  GenerateNamesRecursively<NameGenerator>(typename Types::Tail(), result,\n                                          i + 1);\n}\n\ntemplate <typename NameGenerator, typename Types>\nstd::vector<std::string> GenerateNames() {\n  std::vector<std::string> result;\n  GenerateNamesRecursively<NameGenerator>(Types(), &result, 0);\n  return result;\n}\n\n// TypeParameterizedTest<Fixture, TestSel, Types>::Register()\n// registers a list of type-parameterized tests with Google Test.  The\n// return value is insignificant - we just need to return something\n// such that we can call this function in a namespace scope.\n//\n// Implementation note: The GTEST_TEMPLATE_ macro declares a template\n// template parameter.  It's defined in gtest-type-util.h.\ntemplate <GTEST_TEMPLATE_ Fixture, class TestSel, typename Types>\nclass TypeParameterizedTest {\n public:\n  // 'index' is the index of the test in the type list 'Types'\n  // specified in INSTANTIATE_TYPED_TEST_SUITE_P(Prefix, TestSuite,\n  // Types).  Valid values for 'index' are [0, N - 1] where N is the\n  // length of Types.\n  static bool Register(const char* prefix, const CodeLocation& code_location,\n                       const char* case_name, const char* test_names, int index,\n                       const std::vector<std::string>& type_names =\n                           GenerateNames<DefaultNameGenerator, Types>()) {\n    typedef typename Types::Head Type;\n    typedef Fixture<Type> FixtureClass;\n    typedef typename GTEST_BIND_(TestSel, Type) TestClass;\n\n    // First, registers the first type-parameterized test in the type\n    // list.\n    MakeAndRegisterTestInfo(\n        (std::string(prefix) + (prefix[0] == '\\0' ? \"\" : \"/\") + case_name +\n         \"/\" + type_names[static_cast<size_t>(index)])\n            .c_str(),\n        StripTrailingSpaces(GetPrefixUntilComma(test_names)).c_str(),\n        GetTypeName<Type>().c_str(),\n        nullptr,  // No value parameter.\n        code_location, GetTypeId<FixtureClass>(),\n        SuiteApiResolver<TestClass>::GetSetUpCaseOrSuite(\n            code_location.file.c_str(), code_location.line),\n        SuiteApiResolver<TestClass>::GetTearDownCaseOrSuite(\n            code_location.file.c_str(), code_location.line),\n        new TestFactoryImpl<TestClass>);\n\n    // Next, recurses (at compile time) with the tail of the type list.\n    return TypeParameterizedTest<Fixture, TestSel,\n                                 typename Types::Tail>::Register(prefix,\n                                                                 code_location,\n                                                                 case_name,\n                                                                 test_names,\n                                                                 index + 1,\n                                                                 type_names);\n  }\n};\n\n// The base case for the compile time recursion.\ntemplate <GTEST_TEMPLATE_ Fixture, class TestSel>\nclass TypeParameterizedTest<Fixture, TestSel, internal::None> {\n public:\n  static bool Register(const char* /*prefix*/, const CodeLocation&,\n                       const char* /*case_name*/, const char* /*test_names*/,\n                       int /*index*/,\n                       const std::vector<std::string>& =\n                           std::vector<std::string>() /*type_names*/) {\n    return true;\n  }\n};\n\n// TypeParameterizedTestSuite<Fixture, Tests, Types>::Register()\n// registers *all combinations* of 'Tests' and 'Types' with Google\n// Test.  The return value is insignificant - we just need to return\n// something such that we can call this function in a namespace scope.\ntemplate <GTEST_TEMPLATE_ Fixture, typename Tests, typename Types>\nclass TypeParameterizedTestSuite {\n public:\n  static bool Register(const char* prefix, CodeLocation code_location,\n                       const TypedTestSuitePState* state, const char* case_name,\n                       const char* test_names,\n                       const std::vector<std::string>& type_names =\n                           GenerateNames<DefaultNameGenerator, Types>()) {\n    std::string test_name = StripTrailingSpaces(\n        GetPrefixUntilComma(test_names));\n    if (!state->TestExists(test_name)) {\n      fprintf(stderr, \"Failed to get code location for test %s.%s at %s.\",\n              case_name, test_name.c_str(),\n              FormatFileLocation(code_location.file.c_str(),\n                                 code_location.line).c_str());\n      fflush(stderr);\n      posix::Abort();\n    }\n    const CodeLocation& test_location = state->GetCodeLocation(test_name);\n\n    typedef typename Tests::Head Head;\n\n    // First, register the first test in 'Test' for each type in 'Types'.\n    TypeParameterizedTest<Fixture, Head, Types>::Register(\n        prefix, test_location, case_name, test_names, 0, type_names);\n\n    // Next, recurses (at compile time) with the tail of the test list.\n    return TypeParameterizedTestSuite<Fixture, typename Tests::Tail,\n                                      Types>::Register(prefix, code_location,\n                                                       state, case_name,\n                                                       SkipComma(test_names),\n                                                       type_names);\n  }\n};\n\n// The base case for the compile time recursion.\ntemplate <GTEST_TEMPLATE_ Fixture, typename Types>\nclass TypeParameterizedTestSuite<Fixture, internal::None, Types> {\n public:\n  static bool Register(const char* /*prefix*/, const CodeLocation&,\n                       const TypedTestSuitePState* /*state*/,\n                       const char* /*case_name*/, const char* /*test_names*/,\n                       const std::vector<std::string>& =\n                           std::vector<std::string>() /*type_names*/) {\n    return true;\n  }\n};\n\n#endif  // GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P\n\n// Returns the current OS stack trace as an std::string.\n//\n// The maximum number of stack frames to be included is specified by\n// the gtest_stack_trace_depth flag.  The skip_count parameter\n// specifies the number of top frames to be skipped, which doesn't\n// count against the number of frames to be included.\n//\n// For example, if Foo() calls Bar(), which in turn calls\n// GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in\n// the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't.\nGTEST_API_ std::string GetCurrentOsStackTraceExceptTop(\n    UnitTest* unit_test, int skip_count);\n\n// Helpers for suppressing warnings on unreachable code or constant\n// condition.\n\n// Always returns true.\nGTEST_API_ bool AlwaysTrue();\n\n// Always returns false.\ninline bool AlwaysFalse() { return !AlwaysTrue(); }\n\n// Helper for suppressing false warning from Clang on a const char*\n// variable declared in a conditional expression always being NULL in\n// the else branch.\nstruct GTEST_API_ ConstCharPtr {\n  ConstCharPtr(const char* str) : value(str) {}\n  operator bool() const { return true; }\n  const char* value;\n};\n\n// Helper for declaring std::string within 'if' statement\n// in pre C++17 build environment.\nstruct GTEST_API_ TrueWithString {\n  TrueWithString() = default;\n  explicit TrueWithString(const char* str) : value(str) {}\n  explicit TrueWithString(const std::string& str) : value(str) {}\n  explicit operator bool() const { return true; }\n  std::string value;\n};\n\n// A simple Linear Congruential Generator for generating random\n// numbers with a uniform distribution.  Unlike rand() and srand(), it\n// doesn't use global state (and therefore can't interfere with user\n// code).  Unlike rand_r(), it's portable.  An LCG isn't very random,\n// but it's good enough for our purposes.\nclass GTEST_API_ Random {\n public:\n  static const UInt32 kMaxRange = 1u << 31;\n\n  explicit Random(UInt32 seed) : state_(seed) {}\n\n  void Reseed(UInt32 seed) { state_ = seed; }\n\n  // Generates a random number from [0, range).  Crashes if 'range' is\n  // 0 or greater than kMaxRange.\n  UInt32 Generate(UInt32 range);\n\n private:\n  UInt32 state_;\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(Random);\n};\n\n// Turns const U&, U&, const U, and U all into U.\n#define GTEST_REMOVE_REFERENCE_AND_CONST_(T) \\\n  typename std::remove_const<typename std::remove_reference<T>::type>::type\n\n// IsAProtocolMessage<T>::value is a compile-time bool constant that's\n// true if and only if T is type proto2::Message or a subclass of it.\ntemplate <typename T>\nstruct IsAProtocolMessage\n    : public std::is_convertible<const T*, const ::proto2::Message*> {};\n\n// When the compiler sees expression IsContainerTest<C>(0), if C is an\n// STL-style container class, the first overload of IsContainerTest\n// will be viable (since both C::iterator* and C::const_iterator* are\n// valid types and NULL can be implicitly converted to them).  It will\n// be picked over the second overload as 'int' is a perfect match for\n// the type of argument 0.  If C::iterator or C::const_iterator is not\n// a valid type, the first overload is not viable, and the second\n// overload will be picked.  Therefore, we can determine whether C is\n// a container class by checking the type of IsContainerTest<C>(0).\n// The value of the expression is insignificant.\n//\n// In C++11 mode we check the existence of a const_iterator and that an\n// iterator is properly implemented for the container.\n//\n// For pre-C++11 that we look for both C::iterator and C::const_iterator.\n// The reason is that C++ injects the name of a class as a member of the\n// class itself (e.g. you can refer to class iterator as either\n// 'iterator' or 'iterator::iterator').  If we look for C::iterator\n// only, for example, we would mistakenly think that a class named\n// iterator is an STL container.\n//\n// Also note that the simpler approach of overloading\n// IsContainerTest(typename C::const_iterator*) and\n// IsContainerTest(...) doesn't work with Visual Age C++ and Sun C++.\ntypedef int IsContainer;\ntemplate <class C,\n          class Iterator = decltype(::std::declval<const C&>().begin()),\n          class = decltype(::std::declval<const C&>().end()),\n          class = decltype(++::std::declval<Iterator&>()),\n          class = decltype(*::std::declval<Iterator>()),\n          class = typename C::const_iterator>\nIsContainer IsContainerTest(int /* dummy */) {\n  return 0;\n}\n\ntypedef char IsNotContainer;\ntemplate <class C>\nIsNotContainer IsContainerTest(long /* dummy */) { return '\\0'; }\n\n// Trait to detect whether a type T is a hash table.\n// The heuristic used is that the type contains an inner type `hasher` and does\n// not contain an inner type `reverse_iterator`.\n// If the container is iterable in reverse, then order might actually matter.\ntemplate <typename T>\nstruct IsHashTable {\n private:\n  template <typename U>\n  static char test(typename U::hasher*, typename U::reverse_iterator*);\n  template <typename U>\n  static int test(typename U::hasher*, ...);\n  template <typename U>\n  static char test(...);\n\n public:\n  static const bool value = sizeof(test<T>(nullptr, nullptr)) == sizeof(int);\n};\n\ntemplate <typename T>\nconst bool IsHashTable<T>::value;\n\ntemplate <typename C,\n          bool = sizeof(IsContainerTest<C>(0)) == sizeof(IsContainer)>\nstruct IsRecursiveContainerImpl;\n\ntemplate <typename C>\nstruct IsRecursiveContainerImpl<C, false> : public std::false_type {};\n\n// Since the IsRecursiveContainerImpl depends on the IsContainerTest we need to\n// obey the same inconsistencies as the IsContainerTest, namely check if\n// something is a container is relying on only const_iterator in C++11 and\n// is relying on both const_iterator and iterator otherwise\ntemplate <typename C>\nstruct IsRecursiveContainerImpl<C, true> {\n  using value_type = decltype(*std::declval<typename C::const_iterator>());\n  using type =\n      std::is_same<typename std::remove_const<\n                       typename std::remove_reference<value_type>::type>::type,\n                   C>;\n};\n\n// IsRecursiveContainer<Type> is a unary compile-time predicate that\n// evaluates whether C is a recursive container type. A recursive container\n// type is a container type whose value_type is equal to the container type\n// itself. An example for a recursive container type is\n// boost::filesystem::path, whose iterator has a value_type that is equal to\n// boost::filesystem::path.\ntemplate <typename C>\nstruct IsRecursiveContainer : public IsRecursiveContainerImpl<C>::type {};\n\n// Utilities for native arrays.\n\n// ArrayEq() compares two k-dimensional native arrays using the\n// elements' operator==, where k can be any integer >= 0.  When k is\n// 0, ArrayEq() degenerates into comparing a single pair of values.\n\ntemplate <typename T, typename U>\nbool ArrayEq(const T* lhs, size_t size, const U* rhs);\n\n// This generic version is used when k is 0.\ntemplate <typename T, typename U>\ninline bool ArrayEq(const T& lhs, const U& rhs) { return lhs == rhs; }\n\n// This overload is used when k >= 1.\ntemplate <typename T, typename U, size_t N>\ninline bool ArrayEq(const T(&lhs)[N], const U(&rhs)[N]) {\n  return internal::ArrayEq(lhs, N, rhs);\n}\n\n// This helper reduces code bloat.  If we instead put its logic inside\n// the previous ArrayEq() function, arrays with different sizes would\n// lead to different copies of the template code.\ntemplate <typename T, typename U>\nbool ArrayEq(const T* lhs, size_t size, const U* rhs) {\n  for (size_t i = 0; i != size; i++) {\n    if (!internal::ArrayEq(lhs[i], rhs[i]))\n      return false;\n  }\n  return true;\n}\n\n// Finds the first element in the iterator range [begin, end) that\n// equals elem.  Element may be a native array type itself.\ntemplate <typename Iter, typename Element>\nIter ArrayAwareFind(Iter begin, Iter end, const Element& elem) {\n  for (Iter it = begin; it != end; ++it) {\n    if (internal::ArrayEq(*it, elem))\n      return it;\n  }\n  return end;\n}\n\n// CopyArray() copies a k-dimensional native array using the elements'\n// operator=, where k can be any integer >= 0.  When k is 0,\n// CopyArray() degenerates into copying a single value.\n\ntemplate <typename T, typename U>\nvoid CopyArray(const T* from, size_t size, U* to);\n\n// This generic version is used when k is 0.\ntemplate <typename T, typename U>\ninline void CopyArray(const T& from, U* to) { *to = from; }\n\n// This overload is used when k >= 1.\ntemplate <typename T, typename U, size_t N>\ninline void CopyArray(const T(&from)[N], U(*to)[N]) {\n  internal::CopyArray(from, N, *to);\n}\n\n// This helper reduces code bloat.  If we instead put its logic inside\n// the previous CopyArray() function, arrays with different sizes\n// would lead to different copies of the template code.\ntemplate <typename T, typename U>\nvoid CopyArray(const T* from, size_t size, U* to) {\n  for (size_t i = 0; i != size; i++) {\n    internal::CopyArray(from[i], to + i);\n  }\n}\n\n// The relation between an NativeArray object (see below) and the\n// native array it represents.\n// We use 2 different structs to allow non-copyable types to be used, as long\n// as RelationToSourceReference() is passed.\nstruct RelationToSourceReference {};\nstruct RelationToSourceCopy {};\n\n// Adapts a native array to a read-only STL-style container.  Instead\n// of the complete STL container concept, this adaptor only implements\n// members useful for Google Mock's container matchers.  New members\n// should be added as needed.  To simplify the implementation, we only\n// support Element being a raw type (i.e. having no top-level const or\n// reference modifier).  It's the client's responsibility to satisfy\n// this requirement.  Element can be an array type itself (hence\n// multi-dimensional arrays are supported).\ntemplate <typename Element>\nclass NativeArray {\n public:\n  // STL-style container typedefs.\n  typedef Element value_type;\n  typedef Element* iterator;\n  typedef const Element* const_iterator;\n\n  // Constructs from a native array. References the source.\n  NativeArray(const Element* array, size_t count, RelationToSourceReference) {\n    InitRef(array, count);\n  }\n\n  // Constructs from a native array. Copies the source.\n  NativeArray(const Element* array, size_t count, RelationToSourceCopy) {\n    InitCopy(array, count);\n  }\n\n  // Copy constructor.\n  NativeArray(const NativeArray& rhs) {\n    (this->*rhs.clone_)(rhs.array_, rhs.size_);\n  }\n\n  ~NativeArray() {\n    if (clone_ != &NativeArray::InitRef)\n      delete[] array_;\n  }\n\n  // STL-style container methods.\n  size_t size() const { return size_; }\n  const_iterator begin() const { return array_; }\n  const_iterator end() const { return array_ + size_; }\n  bool operator==(const NativeArray& rhs) const {\n    return size() == rhs.size() &&\n        ArrayEq(begin(), size(), rhs.begin());\n  }\n\n private:\n  static_assert(!std::is_const<Element>::value, \"Type must not be const\");\n  static_assert(!std::is_reference<Element>::value,\n                \"Type must not be a reference\");\n\n  // Initializes this object with a copy of the input.\n  void InitCopy(const Element* array, size_t a_size) {\n    Element* const copy = new Element[a_size];\n    CopyArray(array, a_size, copy);\n    array_ = copy;\n    size_ = a_size;\n    clone_ = &NativeArray::InitCopy;\n  }\n\n  // Initializes this object with a reference of the input.\n  void InitRef(const Element* array, size_t a_size) {\n    array_ = array;\n    size_ = a_size;\n    clone_ = &NativeArray::InitRef;\n  }\n\n  const Element* array_;\n  size_t size_;\n  void (NativeArray::*clone_)(const Element*, size_t);\n\n  GTEST_DISALLOW_ASSIGN_(NativeArray);\n};\n\n// Backport of std::index_sequence.\ntemplate <size_t... Is>\nstruct IndexSequence {\n  using type = IndexSequence;\n};\n\n// Double the IndexSequence, and one if plus_one is true.\ntemplate <bool plus_one, typename T, size_t sizeofT>\nstruct DoubleSequence;\ntemplate <size_t... I, size_t sizeofT>\nstruct DoubleSequence<true, IndexSequence<I...>, sizeofT> {\n  using type = IndexSequence<I..., (sizeofT + I)..., 2 * sizeofT>;\n};\ntemplate <size_t... I, size_t sizeofT>\nstruct DoubleSequence<false, IndexSequence<I...>, sizeofT> {\n  using type = IndexSequence<I..., (sizeofT + I)...>;\n};\n\n// Backport of std::make_index_sequence.\n// It uses O(ln(N)) instantiation depth.\ntemplate <size_t N>\nstruct MakeIndexSequence\n    : DoubleSequence<N % 2 == 1, typename MakeIndexSequence<N / 2>::type,\n                     N / 2>::type {};\n\ntemplate <>\nstruct MakeIndexSequence<0> : IndexSequence<> {};\n\ntemplate <size_t>\nstruct Ignore {\n  Ignore(...);  // NOLINT\n};\n\ntemplate <typename>\nstruct ElemFromListImpl;\ntemplate <size_t... I>\nstruct ElemFromListImpl<IndexSequence<I...>> {\n  // We make Ignore a template to solve a problem with MSVC.\n  // A non-template Ignore would work fine with `decltype(Ignore(I))...`, but\n  // MSVC doesn't understand how to deal with that pack expansion.\n  // Use `0 * I` to have a single instantiation of Ignore.\n  template <typename R>\n  static R Apply(Ignore<0 * I>..., R (*)(), ...);\n};\n\ntemplate <size_t N, typename... T>\nstruct ElemFromList {\n  using type =\n      decltype(ElemFromListImpl<typename MakeIndexSequence<N>::type>::Apply(\n          static_cast<T (*)()>(nullptr)...));\n};\n\ntemplate <typename... T>\nclass FlatTuple;\n\ntemplate <typename Derived, size_t I>\nstruct FlatTupleElemBase;\n\ntemplate <typename... T, size_t I>\nstruct FlatTupleElemBase<FlatTuple<T...>, I> {\n  using value_type = typename ElemFromList<I, T...>::type;\n  FlatTupleElemBase() = default;\n  explicit FlatTupleElemBase(value_type t) : value(std::move(t)) {}\n  value_type value;\n};\n\ntemplate <typename Derived, typename Idx>\nstruct FlatTupleBase;\n\ntemplate <size_t... Idx, typename... T>\nstruct FlatTupleBase<FlatTuple<T...>, IndexSequence<Idx...>>\n    : FlatTupleElemBase<FlatTuple<T...>, Idx>... {\n  using Indices = IndexSequence<Idx...>;\n  FlatTupleBase() = default;\n  explicit FlatTupleBase(T... t)\n      : FlatTupleElemBase<FlatTuple<T...>, Idx>(std::move(t))... {}\n};\n\n// Analog to std::tuple but with different tradeoffs.\n// This class minimizes the template instantiation depth, thus allowing more\n// elements that std::tuple would. std::tuple has been seen to require an\n// instantiation depth of more than 10x the number of elements in some\n// implementations.\n// FlatTuple and ElemFromList are not recursive and have a fixed depth\n// regardless of T...\n// MakeIndexSequence, on the other hand, it is recursive but with an\n// instantiation depth of O(ln(N)).\ntemplate <typename... T>\nclass FlatTuple\n    : private FlatTupleBase<FlatTuple<T...>,\n                            typename MakeIndexSequence<sizeof...(T)>::type> {\n  using Indices = typename FlatTuple::FlatTupleBase::Indices;\n\n public:\n  FlatTuple() = default;\n  explicit FlatTuple(T... t) : FlatTuple::FlatTupleBase(std::move(t)...) {}\n\n  template <size_t I>\n  const typename ElemFromList<I, T...>::type& Get() const {\n    return static_cast<const FlatTupleElemBase<FlatTuple, I>*>(this)->value;\n  }\n\n  template <size_t I>\n  typename ElemFromList<I, T...>::type& Get() {\n    return static_cast<FlatTupleElemBase<FlatTuple, I>*>(this)->value;\n  }\n};\n\n// Utility functions to be called with static_assert to induce deprecation\n// warnings.\nGTEST_INTERNAL_DEPRECATED(\n    \"INSTANTIATE_TEST_CASE_P is deprecated, please use \"\n    \"INSTANTIATE_TEST_SUITE_P\")\nconstexpr bool InstantiateTestCase_P_IsDeprecated() { return true; }\n\nGTEST_INTERNAL_DEPRECATED(\n    \"TYPED_TEST_CASE_P is deprecated, please use \"\n    \"TYPED_TEST_SUITE_P\")\nconstexpr bool TypedTestCase_P_IsDeprecated() { return true; }\n\nGTEST_INTERNAL_DEPRECATED(\n    \"TYPED_TEST_CASE is deprecated, please use \"\n    \"TYPED_TEST_SUITE\")\nconstexpr bool TypedTestCaseIsDeprecated() { return true; }\n\nGTEST_INTERNAL_DEPRECATED(\n    \"REGISTER_TYPED_TEST_CASE_P is deprecated, please use \"\n    \"REGISTER_TYPED_TEST_SUITE_P\")\nconstexpr bool RegisterTypedTestCase_P_IsDeprecated() { return true; }\n\nGTEST_INTERNAL_DEPRECATED(\n    \"INSTANTIATE_TYPED_TEST_CASE_P is deprecated, please use \"\n    \"INSTANTIATE_TYPED_TEST_SUITE_P\")\nconstexpr bool InstantiateTypedTestCase_P_IsDeprecated() { return true; }\n\n}  // namespace internal\n}  // namespace testing\n\n#define GTEST_MESSAGE_AT_(file, line, message, result_type) \\\n  ::testing::internal::AssertHelper(result_type, file, line, message) \\\n    = ::testing::Message()\n\n#define GTEST_MESSAGE_(message, result_type) \\\n  GTEST_MESSAGE_AT_(__FILE__, __LINE__, message, result_type)\n\n#define GTEST_FATAL_FAILURE_(message) \\\n  return GTEST_MESSAGE_(message, ::testing::TestPartResult::kFatalFailure)\n\n#define GTEST_NONFATAL_FAILURE_(message) \\\n  GTEST_MESSAGE_(message, ::testing::TestPartResult::kNonFatalFailure)\n\n#define GTEST_SUCCESS_(message) \\\n  GTEST_MESSAGE_(message, ::testing::TestPartResult::kSuccess)\n\n#define GTEST_SKIP_(message) \\\n  return GTEST_MESSAGE_(message, ::testing::TestPartResult::kSkip)\n\n// Suppress MSVC warning 4072 (unreachable code) for the code following\n// statement if it returns or throws (or doesn't return or throw in some\n// situations).\n#define GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement) \\\n  if (::testing::internal::AlwaysTrue()) { statement; }\n\n#define GTEST_TEST_THROW_(statement, expected_exception, fail) \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n  if (::testing::internal::ConstCharPtr gtest_msg = \"\") { \\\n    bool gtest_caught_expected = false; \\\n    try { \\\n      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n    } \\\n    catch (expected_exception const&) { \\\n      gtest_caught_expected = true; \\\n    } \\\n    catch (...) { \\\n      gtest_msg.value = \\\n          \"Expected: \" #statement \" throws an exception of type \" \\\n          #expected_exception \".\\n  Actual: it throws a different type.\"; \\\n      goto GTEST_CONCAT_TOKEN_(gtest_label_testthrow_, __LINE__); \\\n    } \\\n    if (!gtest_caught_expected) { \\\n      gtest_msg.value = \\\n          \"Expected: \" #statement \" throws an exception of type \" \\\n          #expected_exception \".\\n  Actual: it throws nothing.\"; \\\n      goto GTEST_CONCAT_TOKEN_(gtest_label_testthrow_, __LINE__); \\\n    } \\\n  } else \\\n    GTEST_CONCAT_TOKEN_(gtest_label_testthrow_, __LINE__): \\\n      fail(gtest_msg.value)\n\n#if GTEST_HAS_EXCEPTIONS\n\n#define GTEST_TEST_NO_THROW_CATCH_STD_EXCEPTION_() \\\n  catch (std::exception const& e) { \\\n    gtest_msg.value = ( \\\n      \"it throws std::exception-derived exception with description: \\\"\" \\\n    ); \\\n    gtest_msg.value += e.what(); \\\n    gtest_msg.value += \"\\\".\"; \\\n    goto GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__); \\\n  }\n\n#else  // GTEST_HAS_EXCEPTIONS\n\n#define GTEST_TEST_NO_THROW_CATCH_STD_EXCEPTION_()\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n#define GTEST_TEST_NO_THROW_(statement, fail) \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n  if (::testing::internal::TrueWithString gtest_msg{}) { \\\n    try { \\\n      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n    } \\\n    GTEST_TEST_NO_THROW_CATCH_STD_EXCEPTION_() \\\n    catch (...) { \\\n      gtest_msg.value = \"it throws.\"; \\\n      goto GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__); \\\n    } \\\n  } else \\\n    GTEST_CONCAT_TOKEN_(gtest_label_testnothrow_, __LINE__): \\\n      fail((\"Expected: \" #statement \" doesn't throw an exception.\\n\" \\\n            \"  Actual: \" + gtest_msg.value).c_str())\n\n#define GTEST_TEST_ANY_THROW_(statement, fail) \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n  if (::testing::internal::AlwaysTrue()) { \\\n    bool gtest_caught_any = false; \\\n    try { \\\n      GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n    } \\\n    catch (...) { \\\n      gtest_caught_any = true; \\\n    } \\\n    if (!gtest_caught_any) { \\\n      goto GTEST_CONCAT_TOKEN_(gtest_label_testanythrow_, __LINE__); \\\n    } \\\n  } else \\\n    GTEST_CONCAT_TOKEN_(gtest_label_testanythrow_, __LINE__): \\\n      fail(\"Expected: \" #statement \" throws an exception.\\n\" \\\n           \"  Actual: it doesn't.\")\n\n\n// Implements Boolean test assertions such as EXPECT_TRUE. expression can be\n// either a boolean expression or an AssertionResult. text is a textual\n// represenation of expression as it was passed into the EXPECT_TRUE.\n#define GTEST_TEST_BOOLEAN_(expression, text, actual, expected, fail) \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n  if (const ::testing::AssertionResult gtest_ar_ = \\\n      ::testing::AssertionResult(expression)) \\\n    ; \\\n  else \\\n    fail(::testing::internal::GetBoolAssertionFailureMessage(\\\n        gtest_ar_, text, #actual, #expected).c_str())\n\n#define GTEST_TEST_NO_FATAL_FAILURE_(statement, fail) \\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n  if (::testing::internal::AlwaysTrue()) { \\\n    ::testing::internal::HasNewFatalFailureHelper gtest_fatal_failure_checker; \\\n    GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(statement); \\\n    if (gtest_fatal_failure_checker.has_new_fatal_failure()) { \\\n      goto GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__); \\\n    } \\\n  } else \\\n    GTEST_CONCAT_TOKEN_(gtest_label_testnofatal_, __LINE__): \\\n      fail(\"Expected: \" #statement \" doesn't generate new fatal \" \\\n           \"failures in the current thread.\\n\" \\\n           \"  Actual: it does.\")\n\n// Expands to the name of the class that implements the given test.\n#define GTEST_TEST_CLASS_NAME_(test_suite_name, test_name) \\\n  test_suite_name##_##test_name##_Test\n\n// Helper macro for defining tests.\n#define GTEST_TEST_(test_suite_name, test_name, parent_class, parent_id)      \\\n  static_assert(sizeof(GTEST_STRINGIFY_(test_suite_name)) > 1,                \\\n                \"test_suite_name must not be empty\");                         \\\n  static_assert(sizeof(GTEST_STRINGIFY_(test_name)) > 1,                      \\\n                \"test_name must not be empty\");                               \\\n  class GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)                    \\\n      : public parent_class {                                                 \\\n   public:                                                                    \\\n    GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)() {}                   \\\n                                                                              \\\n   private:                                                                   \\\n    void TestBody() override;                                                 \\\n    static ::testing::TestInfo* const test_info_ GTEST_ATTRIBUTE_UNUSED_;     \\\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(GTEST_TEST_CLASS_NAME_(test_suite_name,   \\\n                                                           test_name));       \\\n  };                                                                          \\\n                                                                              \\\n  ::testing::TestInfo* const GTEST_TEST_CLASS_NAME_(test_suite_name,          \\\n                                                    test_name)::test_info_ =  \\\n      ::testing::internal::MakeAndRegisterTestInfo(                           \\\n          #test_suite_name, #test_name, nullptr, nullptr,                     \\\n          ::testing::internal::CodeLocation(__FILE__, __LINE__), (parent_id), \\\n          ::testing::internal::SuiteApiResolver<                              \\\n              parent_class>::GetSetUpCaseOrSuite(__FILE__, __LINE__),         \\\n          ::testing::internal::SuiteApiResolver<                              \\\n              parent_class>::GetTearDownCaseOrSuite(__FILE__, __LINE__),      \\\n          new ::testing::internal::TestFactoryImpl<GTEST_TEST_CLASS_NAME_(    \\\n              test_suite_name, test_name)>);                                  \\\n  void GTEST_TEST_CLASS_NAME_(test_suite_name, test_name)::TestBody()\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_INTERNAL_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-param-util.h",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Type and function utilities for implementing parameterized tests.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_\n\n#include <ctype.h>\n\n#include <cassert>\n#include <iterator>\n#include <memory>\n#include <set>\n#include <tuple>\n#include <utility>\n#include <vector>\n\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-port.h\"\n#include \"gtest/gtest-printers.h\"\n\nnamespace testing {\n// Input to a parameterized test name generator, describing a test parameter.\n// Consists of the parameter value and the integer parameter index.\ntemplate <class ParamType>\nstruct TestParamInfo {\n  TestParamInfo(const ParamType& a_param, size_t an_index) :\n    param(a_param),\n    index(an_index) {}\n  ParamType param;\n  size_t index;\n};\n\n// A builtin parameterized test name generator which returns the result of\n// testing::PrintToString.\nstruct PrintToStringParamName {\n  template <class ParamType>\n  std::string operator()(const TestParamInfo<ParamType>& info) const {\n    return PrintToString(info.param);\n  }\n};\n\nnamespace internal {\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n// Utility Functions\n\n// Outputs a message explaining invalid registration of different\n// fixture class for the same test suite. This may happen when\n// TEST_P macro is used to define two tests with the same name\n// but in different namespaces.\nGTEST_API_ void ReportInvalidTestSuiteType(const char* test_suite_name,\n                                           CodeLocation code_location);\n\ntemplate <typename> class ParamGeneratorInterface;\ntemplate <typename> class ParamGenerator;\n\n// Interface for iterating over elements provided by an implementation\n// of ParamGeneratorInterface<T>.\ntemplate <typename T>\nclass ParamIteratorInterface {\n public:\n  virtual ~ParamIteratorInterface() {}\n  // A pointer to the base generator instance.\n  // Used only for the purposes of iterator comparison\n  // to make sure that two iterators belong to the same generator.\n  virtual const ParamGeneratorInterface<T>* BaseGenerator() const = 0;\n  // Advances iterator to point to the next element\n  // provided by the generator. The caller is responsible\n  // for not calling Advance() on an iterator equal to\n  // BaseGenerator()->End().\n  virtual void Advance() = 0;\n  // Clones the iterator object. Used for implementing copy semantics\n  // of ParamIterator<T>.\n  virtual ParamIteratorInterface* Clone() const = 0;\n  // Dereferences the current iterator and provides (read-only) access\n  // to the pointed value. It is the caller's responsibility not to call\n  // Current() on an iterator equal to BaseGenerator()->End().\n  // Used for implementing ParamGenerator<T>::operator*().\n  virtual const T* Current() const = 0;\n  // Determines whether the given iterator and other point to the same\n  // element in the sequence generated by the generator.\n  // Used for implementing ParamGenerator<T>::operator==().\n  virtual bool Equals(const ParamIteratorInterface& other) const = 0;\n};\n\n// Class iterating over elements provided by an implementation of\n// ParamGeneratorInterface<T>. It wraps ParamIteratorInterface<T>\n// and implements the const forward iterator concept.\ntemplate <typename T>\nclass ParamIterator {\n public:\n  typedef T value_type;\n  typedef const T& reference;\n  typedef ptrdiff_t difference_type;\n\n  // ParamIterator assumes ownership of the impl_ pointer.\n  ParamIterator(const ParamIterator& other) : impl_(other.impl_->Clone()) {}\n  ParamIterator& operator=(const ParamIterator& other) {\n    if (this != &other)\n      impl_.reset(other.impl_->Clone());\n    return *this;\n  }\n\n  const T& operator*() const { return *impl_->Current(); }\n  const T* operator->() const { return impl_->Current(); }\n  // Prefix version of operator++.\n  ParamIterator& operator++() {\n    impl_->Advance();\n    return *this;\n  }\n  // Postfix version of operator++.\n  ParamIterator operator++(int /*unused*/) {\n    ParamIteratorInterface<T>* clone = impl_->Clone();\n    impl_->Advance();\n    return ParamIterator(clone);\n  }\n  bool operator==(const ParamIterator& other) const {\n    return impl_.get() == other.impl_.get() || impl_->Equals(*other.impl_);\n  }\n  bool operator!=(const ParamIterator& other) const {\n    return !(*this == other);\n  }\n\n private:\n  friend class ParamGenerator<T>;\n  explicit ParamIterator(ParamIteratorInterface<T>* impl) : impl_(impl) {}\n  std::unique_ptr<ParamIteratorInterface<T> > impl_;\n};\n\n// ParamGeneratorInterface<T> is the binary interface to access generators\n// defined in other translation units.\ntemplate <typename T>\nclass ParamGeneratorInterface {\n public:\n  typedef T ParamType;\n\n  virtual ~ParamGeneratorInterface() {}\n\n  // Generator interface definition\n  virtual ParamIteratorInterface<T>* Begin() const = 0;\n  virtual ParamIteratorInterface<T>* End() const = 0;\n};\n\n// Wraps ParamGeneratorInterface<T> and provides general generator syntax\n// compatible with the STL Container concept.\n// This class implements copy initialization semantics and the contained\n// ParamGeneratorInterface<T> instance is shared among all copies\n// of the original object. This is possible because that instance is immutable.\ntemplate<typename T>\nclass ParamGenerator {\n public:\n  typedef ParamIterator<T> iterator;\n\n  explicit ParamGenerator(ParamGeneratorInterface<T>* impl) : impl_(impl) {}\n  ParamGenerator(const ParamGenerator& other) : impl_(other.impl_) {}\n\n  ParamGenerator& operator=(const ParamGenerator& other) {\n    impl_ = other.impl_;\n    return *this;\n  }\n\n  iterator begin() const { return iterator(impl_->Begin()); }\n  iterator end() const { return iterator(impl_->End()); }\n\n private:\n  std::shared_ptr<const ParamGeneratorInterface<T> > impl_;\n};\n\n// Generates values from a range of two comparable values. Can be used to\n// generate sequences of user-defined types that implement operator+() and\n// operator<().\n// This class is used in the Range() function.\ntemplate <typename T, typename IncrementT>\nclass RangeGenerator : public ParamGeneratorInterface<T> {\n public:\n  RangeGenerator(T begin, T end, IncrementT step)\n      : begin_(begin), end_(end),\n        step_(step), end_index_(CalculateEndIndex(begin, end, step)) {}\n  ~RangeGenerator() override {}\n\n  ParamIteratorInterface<T>* Begin() const override {\n    return new Iterator(this, begin_, 0, step_);\n  }\n  ParamIteratorInterface<T>* End() const override {\n    return new Iterator(this, end_, end_index_, step_);\n  }\n\n private:\n  class Iterator : public ParamIteratorInterface<T> {\n   public:\n    Iterator(const ParamGeneratorInterface<T>* base, T value, int index,\n             IncrementT step)\n        : base_(base), value_(value), index_(index), step_(step) {}\n    ~Iterator() override {}\n\n    const ParamGeneratorInterface<T>* BaseGenerator() const override {\n      return base_;\n    }\n    void Advance() override {\n      value_ = static_cast<T>(value_ + step_);\n      index_++;\n    }\n    ParamIteratorInterface<T>* Clone() const override {\n      return new Iterator(*this);\n    }\n    const T* Current() const override { return &value_; }\n    bool Equals(const ParamIteratorInterface<T>& other) const override {\n      // Having the same base generator guarantees that the other\n      // iterator is of the same type and we can downcast.\n      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())\n          << \"The program attempted to compare iterators \"\n          << \"from different generators.\" << std::endl;\n      const int other_index =\n          CheckedDowncastToActualType<const Iterator>(&other)->index_;\n      return index_ == other_index;\n    }\n\n   private:\n    Iterator(const Iterator& other)\n        : ParamIteratorInterface<T>(),\n          base_(other.base_), value_(other.value_), index_(other.index_),\n          step_(other.step_) {}\n\n    // No implementation - assignment is unsupported.\n    void operator=(const Iterator& other);\n\n    const ParamGeneratorInterface<T>* const base_;\n    T value_;\n    int index_;\n    const IncrementT step_;\n  };  // class RangeGenerator::Iterator\n\n  static int CalculateEndIndex(const T& begin,\n                               const T& end,\n                               const IncrementT& step) {\n    int end_index = 0;\n    for (T i = begin; i < end; i = static_cast<T>(i + step))\n      end_index++;\n    return end_index;\n  }\n\n  // No implementation - assignment is unsupported.\n  void operator=(const RangeGenerator& other);\n\n  const T begin_;\n  const T end_;\n  const IncrementT step_;\n  // The index for the end() iterator. All the elements in the generated\n  // sequence are indexed (0-based) to aid iterator comparison.\n  const int end_index_;\n};  // class RangeGenerator\n\n\n// Generates values from a pair of STL-style iterators. Used in the\n// ValuesIn() function. The elements are copied from the source range\n// since the source can be located on the stack, and the generator\n// is likely to persist beyond that stack frame.\ntemplate <typename T>\nclass ValuesInIteratorRangeGenerator : public ParamGeneratorInterface<T> {\n public:\n  template <typename ForwardIterator>\n  ValuesInIteratorRangeGenerator(ForwardIterator begin, ForwardIterator end)\n      : container_(begin, end) {}\n  ~ValuesInIteratorRangeGenerator() override {}\n\n  ParamIteratorInterface<T>* Begin() const override {\n    return new Iterator(this, container_.begin());\n  }\n  ParamIteratorInterface<T>* End() const override {\n    return new Iterator(this, container_.end());\n  }\n\n private:\n  typedef typename ::std::vector<T> ContainerType;\n\n  class Iterator : public ParamIteratorInterface<T> {\n   public:\n    Iterator(const ParamGeneratorInterface<T>* base,\n             typename ContainerType::const_iterator iterator)\n        : base_(base), iterator_(iterator) {}\n    ~Iterator() override {}\n\n    const ParamGeneratorInterface<T>* BaseGenerator() const override {\n      return base_;\n    }\n    void Advance() override {\n      ++iterator_;\n      value_.reset();\n    }\n    ParamIteratorInterface<T>* Clone() const override {\n      return new Iterator(*this);\n    }\n    // We need to use cached value referenced by iterator_ because *iterator_\n    // can return a temporary object (and of type other then T), so just\n    // having \"return &*iterator_;\" doesn't work.\n    // value_ is updated here and not in Advance() because Advance()\n    // can advance iterator_ beyond the end of the range, and we cannot\n    // detect that fact. The client code, on the other hand, is\n    // responsible for not calling Current() on an out-of-range iterator.\n    const T* Current() const override {\n      if (value_.get() == nullptr) value_.reset(new T(*iterator_));\n      return value_.get();\n    }\n    bool Equals(const ParamIteratorInterface<T>& other) const override {\n      // Having the same base generator guarantees that the other\n      // iterator is of the same type and we can downcast.\n      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())\n          << \"The program attempted to compare iterators \"\n          << \"from different generators.\" << std::endl;\n      return iterator_ ==\n          CheckedDowncastToActualType<const Iterator>(&other)->iterator_;\n    }\n\n   private:\n    Iterator(const Iterator& other)\n          // The explicit constructor call suppresses a false warning\n          // emitted by gcc when supplied with the -Wextra option.\n        : ParamIteratorInterface<T>(),\n          base_(other.base_),\n          iterator_(other.iterator_) {}\n\n    const ParamGeneratorInterface<T>* const base_;\n    typename ContainerType::const_iterator iterator_;\n    // A cached value of *iterator_. We keep it here to allow access by\n    // pointer in the wrapping iterator's operator->().\n    // value_ needs to be mutable to be accessed in Current().\n    // Use of std::unique_ptr helps manage cached value's lifetime,\n    // which is bound by the lifespan of the iterator itself.\n    mutable std::unique_ptr<const T> value_;\n  };  // class ValuesInIteratorRangeGenerator::Iterator\n\n  // No implementation - assignment is unsupported.\n  void operator=(const ValuesInIteratorRangeGenerator& other);\n\n  const ContainerType container_;\n};  // class ValuesInIteratorRangeGenerator\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// Default parameterized test name generator, returns a string containing the\n// integer test parameter index.\ntemplate <class ParamType>\nstd::string DefaultParamName(const TestParamInfo<ParamType>& info) {\n  Message name_stream;\n  name_stream << info.index;\n  return name_stream.GetString();\n}\n\ntemplate <typename T = int>\nvoid TestNotEmpty() {\n  static_assert(sizeof(T) == 0, \"Empty arguments are not allowed.\");\n}\ntemplate <typename T = int>\nvoid TestNotEmpty(const T&) {}\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// Stores a parameter value and later creates tests parameterized with that\n// value.\ntemplate <class TestClass>\nclass ParameterizedTestFactory : public TestFactoryBase {\n public:\n  typedef typename TestClass::ParamType ParamType;\n  explicit ParameterizedTestFactory(ParamType parameter) :\n      parameter_(parameter) {}\n  Test* CreateTest() override {\n    TestClass::SetParam(&parameter_);\n    return new TestClass();\n  }\n\n private:\n  const ParamType parameter_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestFactory);\n};\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// TestMetaFactoryBase is a base class for meta-factories that create\n// test factories for passing into MakeAndRegisterTestInfo function.\ntemplate <class ParamType>\nclass TestMetaFactoryBase {\n public:\n  virtual ~TestMetaFactoryBase() {}\n\n  virtual TestFactoryBase* CreateTestFactory(ParamType parameter) = 0;\n};\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// TestMetaFactory creates test factories for passing into\n// MakeAndRegisterTestInfo function. Since MakeAndRegisterTestInfo receives\n// ownership of test factory pointer, same factory object cannot be passed\n// into that method twice. But ParameterizedTestSuiteInfo is going to call\n// it for each Test/Parameter value combination. Thus it needs meta factory\n// creator class.\ntemplate <class TestSuite>\nclass TestMetaFactory\n    : public TestMetaFactoryBase<typename TestSuite::ParamType> {\n public:\n  using ParamType = typename TestSuite::ParamType;\n\n  TestMetaFactory() {}\n\n  TestFactoryBase* CreateTestFactory(ParamType parameter) override {\n    return new ParameterizedTestFactory<TestSuite>(parameter);\n  }\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestMetaFactory);\n};\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// ParameterizedTestSuiteInfoBase is a generic interface\n// to ParameterizedTestSuiteInfo classes. ParameterizedTestSuiteInfoBase\n// accumulates test information provided by TEST_P macro invocations\n// and generators provided by INSTANTIATE_TEST_SUITE_P macro invocations\n// and uses that information to register all resulting test instances\n// in RegisterTests method. The ParameterizeTestSuiteRegistry class holds\n// a collection of pointers to the ParameterizedTestSuiteInfo objects\n// and calls RegisterTests() on each of them when asked.\nclass ParameterizedTestSuiteInfoBase {\n public:\n  virtual ~ParameterizedTestSuiteInfoBase() {}\n\n  // Base part of test suite name for display purposes.\n  virtual const std::string& GetTestSuiteName() const = 0;\n  // Test case id to verify identity.\n  virtual TypeId GetTestSuiteTypeId() const = 0;\n  // UnitTest class invokes this method to register tests in this\n  // test suite right before running them in RUN_ALL_TESTS macro.\n  // This method should not be called more than once on any single\n  // instance of a ParameterizedTestSuiteInfoBase derived class.\n  virtual void RegisterTests() = 0;\n\n protected:\n  ParameterizedTestSuiteInfoBase() {}\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestSuiteInfoBase);\n};\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// ParameterizedTestSuiteInfo accumulates tests obtained from TEST_P\n// macro invocations for a particular test suite and generators\n// obtained from INSTANTIATE_TEST_SUITE_P macro invocations for that\n// test suite. It registers tests with all values generated by all\n// generators when asked.\ntemplate <class TestSuite>\nclass ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase {\n public:\n  // ParamType and GeneratorCreationFunc are private types but are required\n  // for declarations of public methods AddTestPattern() and\n  // AddTestSuiteInstantiation().\n  using ParamType = typename TestSuite::ParamType;\n  // A function that returns an instance of appropriate generator type.\n  typedef ParamGenerator<ParamType>(GeneratorCreationFunc)();\n  using ParamNameGeneratorFunc = std::string(const TestParamInfo<ParamType>&);\n\n  explicit ParameterizedTestSuiteInfo(const char* name,\n                                      CodeLocation code_location)\n      : test_suite_name_(name), code_location_(code_location) {}\n\n  // Test case base name for display purposes.\n  const std::string& GetTestSuiteName() const override {\n    return test_suite_name_;\n  }\n  // Test case id to verify identity.\n  TypeId GetTestSuiteTypeId() const override { return GetTypeId<TestSuite>(); }\n  // TEST_P macro uses AddTestPattern() to record information\n  // about a single test in a LocalTestInfo structure.\n  // test_suite_name is the base name of the test suite (without invocation\n  // prefix). test_base_name is the name of an individual test without\n  // parameter index. For the test SequenceA/FooTest.DoBar/1 FooTest is\n  // test suite base name and DoBar is test base name.\n  void AddTestPattern(const char* test_suite_name, const char* test_base_name,\n                      TestMetaFactoryBase<ParamType>* meta_factory) {\n    tests_.push_back(std::shared_ptr<TestInfo>(\n        new TestInfo(test_suite_name, test_base_name, meta_factory)));\n  }\n  // INSTANTIATE_TEST_SUITE_P macro uses AddGenerator() to record information\n  // about a generator.\n  int AddTestSuiteInstantiation(const std::string& instantiation_name,\n                                GeneratorCreationFunc* func,\n                                ParamNameGeneratorFunc* name_func,\n                                const char* file, int line) {\n    instantiations_.push_back(\n        InstantiationInfo(instantiation_name, func, name_func, file, line));\n    return 0;  // Return value used only to run this method in namespace scope.\n  }\n  // UnitTest class invokes this method to register tests in this test suite\n  // test suites right before running tests in RUN_ALL_TESTS macro.\n  // This method should not be called more than once on any single\n  // instance of a ParameterizedTestSuiteInfoBase derived class.\n  // UnitTest has a guard to prevent from calling this method more than once.\n  void RegisterTests() override {\n    for (typename TestInfoContainer::iterator test_it = tests_.begin();\n         test_it != tests_.end(); ++test_it) {\n      std::shared_ptr<TestInfo> test_info = *test_it;\n      for (typename InstantiationContainer::iterator gen_it =\n               instantiations_.begin(); gen_it != instantiations_.end();\n               ++gen_it) {\n        const std::string& instantiation_name = gen_it->name;\n        ParamGenerator<ParamType> generator((*gen_it->generator)());\n        ParamNameGeneratorFunc* name_func = gen_it->name_func;\n        const char* file = gen_it->file;\n        int line = gen_it->line;\n\n        std::string test_suite_name;\n        if ( !instantiation_name.empty() )\n          test_suite_name = instantiation_name + \"/\";\n        test_suite_name += test_info->test_suite_base_name;\n\n        size_t i = 0;\n        std::set<std::string> test_param_names;\n        for (typename ParamGenerator<ParamType>::iterator param_it =\n                 generator.begin();\n             param_it != generator.end(); ++param_it, ++i) {\n          Message test_name_stream;\n\n          std::string param_name = name_func(\n              TestParamInfo<ParamType>(*param_it, i));\n\n          GTEST_CHECK_(IsValidParamName(param_name))\n              << \"Parameterized test name '\" << param_name\n              << \"' is invalid, in \" << file\n              << \" line \" << line << std::endl;\n\n          GTEST_CHECK_(test_param_names.count(param_name) == 0)\n              << \"Duplicate parameterized test name '\" << param_name\n              << \"', in \" << file << \" line \" << line << std::endl;\n\n          test_param_names.insert(param_name);\n\n          test_name_stream << test_info->test_base_name << \"/\" << param_name;\n          MakeAndRegisterTestInfo(\n              test_suite_name.c_str(), test_name_stream.GetString().c_str(),\n              nullptr,  // No type parameter.\n              PrintToString(*param_it).c_str(), code_location_,\n              GetTestSuiteTypeId(),\n              SuiteApiResolver<TestSuite>::GetSetUpCaseOrSuite(file, line),\n              SuiteApiResolver<TestSuite>::GetTearDownCaseOrSuite(file, line),\n              test_info->test_meta_factory->CreateTestFactory(*param_it));\n        }  // for param_it\n      }  // for gen_it\n    }  // for test_it\n  }    // RegisterTests\n\n private:\n  // LocalTestInfo structure keeps information about a single test registered\n  // with TEST_P macro.\n  struct TestInfo {\n    TestInfo(const char* a_test_suite_base_name, const char* a_test_base_name,\n             TestMetaFactoryBase<ParamType>* a_test_meta_factory)\n        : test_suite_base_name(a_test_suite_base_name),\n          test_base_name(a_test_base_name),\n          test_meta_factory(a_test_meta_factory) {}\n\n    const std::string test_suite_base_name;\n    const std::string test_base_name;\n    const std::unique_ptr<TestMetaFactoryBase<ParamType> > test_meta_factory;\n  };\n  using TestInfoContainer = ::std::vector<std::shared_ptr<TestInfo> >;\n  // Records data received from INSTANTIATE_TEST_SUITE_P macros:\n  //  <Instantiation name, Sequence generator creation function,\n  //     Name generator function, Source file, Source line>\n  struct InstantiationInfo {\n      InstantiationInfo(const std::string &name_in,\n                        GeneratorCreationFunc* generator_in,\n                        ParamNameGeneratorFunc* name_func_in,\n                        const char* file_in,\n                        int line_in)\n          : name(name_in),\n            generator(generator_in),\n            name_func(name_func_in),\n            file(file_in),\n            line(line_in) {}\n\n      std::string name;\n      GeneratorCreationFunc* generator;\n      ParamNameGeneratorFunc* name_func;\n      const char* file;\n      int line;\n  };\n  typedef ::std::vector<InstantiationInfo> InstantiationContainer;\n\n  static bool IsValidParamName(const std::string& name) {\n    // Check for empty string\n    if (name.empty())\n      return false;\n\n    // Check for invalid characters\n    for (std::string::size_type index = 0; index < name.size(); ++index) {\n      if (!isalnum(name[index]) && name[index] != '_')\n        return false;\n    }\n\n    return true;\n  }\n\n  const std::string test_suite_name_;\n  CodeLocation code_location_;\n  TestInfoContainer tests_;\n  InstantiationContainer instantiations_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestSuiteInfo);\n};  // class ParameterizedTestSuiteInfo\n\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\ntemplate <class TestCase>\nusing ParameterizedTestCaseInfo = ParameterizedTestSuiteInfo<TestCase>;\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// ParameterizedTestSuiteRegistry contains a map of\n// ParameterizedTestSuiteInfoBase classes accessed by test suite names. TEST_P\n// and INSTANTIATE_TEST_SUITE_P macros use it to locate their corresponding\n// ParameterizedTestSuiteInfo descriptors.\nclass ParameterizedTestSuiteRegistry {\n public:\n  ParameterizedTestSuiteRegistry() {}\n  ~ParameterizedTestSuiteRegistry() {\n    for (auto& test_suite_info : test_suite_infos_) {\n      delete test_suite_info;\n    }\n  }\n\n  // Looks up or creates and returns a structure containing information about\n  // tests and instantiations of a particular test suite.\n  template <class TestSuite>\n  ParameterizedTestSuiteInfo<TestSuite>* GetTestSuitePatternHolder(\n      const char* test_suite_name, CodeLocation code_location) {\n    ParameterizedTestSuiteInfo<TestSuite>* typed_test_info = nullptr;\n    for (auto& test_suite_info : test_suite_infos_) {\n      if (test_suite_info->GetTestSuiteName() == test_suite_name) {\n        if (test_suite_info->GetTestSuiteTypeId() != GetTypeId<TestSuite>()) {\n          // Complain about incorrect usage of Google Test facilities\n          // and terminate the program since we cannot guaranty correct\n          // test suite setup and tear-down in this case.\n          ReportInvalidTestSuiteType(test_suite_name, code_location);\n          posix::Abort();\n        } else {\n          // At this point we are sure that the object we found is of the same\n          // type we are looking for, so we downcast it to that type\n          // without further checks.\n          typed_test_info = CheckedDowncastToActualType<\n              ParameterizedTestSuiteInfo<TestSuite> >(test_suite_info);\n        }\n        break;\n      }\n    }\n    if (typed_test_info == nullptr) {\n      typed_test_info = new ParameterizedTestSuiteInfo<TestSuite>(\n          test_suite_name, code_location);\n      test_suite_infos_.push_back(typed_test_info);\n    }\n    return typed_test_info;\n  }\n  void RegisterTests() {\n    for (auto& test_suite_info : test_suite_infos_) {\n      test_suite_info->RegisterTests();\n    }\n  }\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  template <class TestCase>\n  ParameterizedTestCaseInfo<TestCase>* GetTestCasePatternHolder(\n      const char* test_case_name, CodeLocation code_location) {\n    return GetTestSuitePatternHolder<TestCase>(test_case_name, code_location);\n  }\n\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n private:\n  using TestSuiteInfoContainer = ::std::vector<ParameterizedTestSuiteInfoBase*>;\n\n  TestSuiteInfoContainer test_suite_infos_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ParameterizedTestSuiteRegistry);\n};\n\n}  // namespace internal\n\n// Forward declarations of ValuesIn(), which is implemented in\n// include/gtest/gtest-param-test.h.\ntemplate <class Container>\ninternal::ParamGenerator<typename Container::value_type> ValuesIn(\n    const Container& container);\n\nnamespace internal {\n// Used in the Values() function to provide polymorphic capabilities.\n\ntemplate <typename... Ts>\nclass ValueArray {\n public:\n  ValueArray(Ts... v) : v_{std::move(v)...} {}\n\n  template <typename T>\n  operator ParamGenerator<T>() const {  // NOLINT\n    return ValuesIn(MakeVector<T>(MakeIndexSequence<sizeof...(Ts)>()));\n  }\n\n private:\n  template <typename T, size_t... I>\n  std::vector<T> MakeVector(IndexSequence<I...>) const {\n    return std::vector<T>{static_cast<T>(v_.template Get<I>())...};\n  }\n\n  FlatTuple<Ts...> v_;\n};\n\ntemplate <typename... T>\nclass CartesianProductGenerator\n    : public ParamGeneratorInterface<::std::tuple<T...>> {\n public:\n  typedef ::std::tuple<T...> ParamType;\n\n  CartesianProductGenerator(const std::tuple<ParamGenerator<T>...>& g)\n      : generators_(g) {}\n  ~CartesianProductGenerator() override {}\n\n  ParamIteratorInterface<ParamType>* Begin() const override {\n    return new Iterator(this, generators_, false);\n  }\n  ParamIteratorInterface<ParamType>* End() const override {\n    return new Iterator(this, generators_, true);\n  }\n\n private:\n  template <class I>\n  class IteratorImpl;\n  template <size_t... I>\n  class IteratorImpl<IndexSequence<I...>>\n      : public ParamIteratorInterface<ParamType> {\n   public:\n    IteratorImpl(const ParamGeneratorInterface<ParamType>* base,\n             const std::tuple<ParamGenerator<T>...>& generators, bool is_end)\n        : base_(base),\n          begin_(std::get<I>(generators).begin()...),\n          end_(std::get<I>(generators).end()...),\n          current_(is_end ? end_ : begin_) {\n      ComputeCurrentValue();\n    }\n    ~IteratorImpl() override {}\n\n    const ParamGeneratorInterface<ParamType>* BaseGenerator() const override {\n      return base_;\n    }\n    // Advance should not be called on beyond-of-range iterators\n    // so no component iterators must be beyond end of range, either.\n    void Advance() override {\n      assert(!AtEnd());\n      // Advance the last iterator.\n      ++std::get<sizeof...(T) - 1>(current_);\n      // if that reaches end, propagate that up.\n      AdvanceIfEnd<sizeof...(T) - 1>();\n      ComputeCurrentValue();\n    }\n    ParamIteratorInterface<ParamType>* Clone() const override {\n      return new IteratorImpl(*this);\n    }\n\n    const ParamType* Current() const override { return current_value_.get(); }\n\n    bool Equals(const ParamIteratorInterface<ParamType>& other) const override {\n      // Having the same base generator guarantees that the other\n      // iterator is of the same type and we can downcast.\n      GTEST_CHECK_(BaseGenerator() == other.BaseGenerator())\n          << \"The program attempted to compare iterators \"\n          << \"from different generators.\" << std::endl;\n      const IteratorImpl* typed_other =\n          CheckedDowncastToActualType<const IteratorImpl>(&other);\n\n      // We must report iterators equal if they both point beyond their\n      // respective ranges. That can happen in a variety of fashions,\n      // so we have to consult AtEnd().\n      if (AtEnd() && typed_other->AtEnd()) return true;\n\n      bool same = true;\n      bool dummy[] = {\n          (same = same && std::get<I>(current_) ==\n                              std::get<I>(typed_other->current_))...};\n      (void)dummy;\n      return same;\n    }\n\n   private:\n    template <size_t ThisI>\n    void AdvanceIfEnd() {\n      if (std::get<ThisI>(current_) != std::get<ThisI>(end_)) return;\n\n      bool last = ThisI == 0;\n      if (last) {\n        // We are done. Nothing else to propagate.\n        return;\n      }\n\n      constexpr size_t NextI = ThisI - (ThisI != 0);\n      std::get<ThisI>(current_) = std::get<ThisI>(begin_);\n      ++std::get<NextI>(current_);\n      AdvanceIfEnd<NextI>();\n    }\n\n    void ComputeCurrentValue() {\n      if (!AtEnd())\n        current_value_ = std::make_shared<ParamType>(*std::get<I>(current_)...);\n    }\n    bool AtEnd() const {\n      bool at_end = false;\n      bool dummy[] = {\n          (at_end = at_end || std::get<I>(current_) == std::get<I>(end_))...};\n      (void)dummy;\n      return at_end;\n    }\n\n    const ParamGeneratorInterface<ParamType>* const base_;\n    std::tuple<typename ParamGenerator<T>::iterator...> begin_;\n    std::tuple<typename ParamGenerator<T>::iterator...> end_;\n    std::tuple<typename ParamGenerator<T>::iterator...> current_;\n    std::shared_ptr<ParamType> current_value_;\n  };\n\n  using Iterator = IteratorImpl<typename MakeIndexSequence<sizeof...(T)>::type>;\n\n  std::tuple<ParamGenerator<T>...> generators_;\n};\n\ntemplate <class... Gen>\nclass CartesianProductHolder {\n public:\n  CartesianProductHolder(const Gen&... g) : generators_(g...) {}\n  template <typename... T>\n  operator ParamGenerator<::std::tuple<T...>>() const {\n    return ParamGenerator<::std::tuple<T...>>(\n        new CartesianProductGenerator<T...>(generators_));\n  }\n\n private:\n  std::tuple<Gen...> generators_;\n};\n\n}  // namespace internal\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PARAM_UTIL_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-port-arch.h",
    "content": "// Copyright 2015, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file defines the GTEST_OS_* macro.\n// It is separate from gtest-port.h so that custom/gtest-port.h can include it.\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_ARCH_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_ARCH_H_\n\n// Determines the platform on which Google Test is compiled.\n#ifdef __CYGWIN__\n# define GTEST_OS_CYGWIN 1\n# elif defined(__MINGW__) || defined(__MINGW32__) || defined(__MINGW64__)\n#  define GTEST_OS_WINDOWS_MINGW 1\n#  define GTEST_OS_WINDOWS 1\n#elif defined _WIN32\n# define GTEST_OS_WINDOWS 1\n# ifdef _WIN32_WCE\n#  define GTEST_OS_WINDOWS_MOBILE 1\n# elif defined(WINAPI_FAMILY)\n#  include <winapifamily.h>\n#  if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)\n#   define GTEST_OS_WINDOWS_DESKTOP 1\n#  elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_PHONE_APP)\n#   define GTEST_OS_WINDOWS_PHONE 1\n#  elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_APP)\n#   define GTEST_OS_WINDOWS_RT 1\n#  elif WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_TV_TITLE)\n#   define GTEST_OS_WINDOWS_PHONE 1\n#   define GTEST_OS_WINDOWS_TV_TITLE 1\n#  else\n    // WINAPI_FAMILY defined but no known partition matched.\n    // Default to desktop.\n#   define GTEST_OS_WINDOWS_DESKTOP 1\n#  endif\n# else\n#  define GTEST_OS_WINDOWS_DESKTOP 1\n# endif  // _WIN32_WCE\n#elif defined __OS2__\n# define GTEST_OS_OS2 1\n#elif defined __APPLE__\n# define GTEST_OS_MAC 1\n# if TARGET_OS_IPHONE\n#  define GTEST_OS_IOS 1\n# endif\n#elif defined __DragonFly__\n# define GTEST_OS_DRAGONFLY 1\n#elif defined __FreeBSD__\n# define GTEST_OS_FREEBSD 1\n#elif defined __Fuchsia__\n# define GTEST_OS_FUCHSIA 1\n#elif defined(__GLIBC__) && defined(__FreeBSD_kernel__)\n# define GTEST_OS_GNU_KFREEBSD 1\n#elif defined __linux__\n# define GTEST_OS_LINUX 1\n# if defined __ANDROID__\n#  define GTEST_OS_LINUX_ANDROID 1\n# endif\n#elif defined __MVS__\n# define GTEST_OS_ZOS 1\n#elif defined(__sun) && defined(__SVR4)\n# define GTEST_OS_SOLARIS 1\n#elif defined(_AIX)\n# define GTEST_OS_AIX 1\n#elif defined(__hpux)\n# define GTEST_OS_HPUX 1\n#elif defined __native_client__\n# define GTEST_OS_NACL 1\n#elif defined __NetBSD__\n# define GTEST_OS_NETBSD 1\n#elif defined __OpenBSD__\n# define GTEST_OS_OPENBSD 1\n#elif defined __QNX__\n# define GTEST_OS_QNX 1\n#elif defined(__HAIKU__)\n#define GTEST_OS_HAIKU 1\n#elif defined ESP8266\n#define GTEST_OS_ESP8266 1\n#elif defined ESP32\n#define GTEST_OS_ESP32 1\n#endif  // __CYGWIN__\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_ARCH_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-port.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Low-level types and utilities for porting Google Test to various\n// platforms.  All macros ending with _ and symbols defined in an\n// internal namespace are subject to change without notice.  Code\n// outside Google Test MUST NOT USE THEM DIRECTLY.  Macros that don't\n// end with _ are part of Google Test's public API and can be used by\n// code outside Google Test.\n//\n// This file is fundamental to Google Test.  All other Google Test source\n// files are expected to #include this.  Therefore, it cannot #include\n// any other Google Test header.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_\n\n// Environment-describing macros\n// -----------------------------\n//\n// Google Test can be used in many different environments.  Macros in\n// this section tell Google Test what kind of environment it is being\n// used in, such that Google Test can provide environment-specific\n// features and implementations.\n//\n// Google Test tries to automatically detect the properties of its\n// environment, so users usually don't need to worry about these\n// macros.  However, the automatic detection is not perfect.\n// Sometimes it's necessary for a user to define some of the following\n// macros in the build script to override Google Test's decisions.\n//\n// If the user doesn't define a macro in the list, Google Test will\n// provide a default definition.  After this header is #included, all\n// macros in this list will be defined to either 1 or 0.\n//\n// Notes to maintainers:\n//   - Each macro here is a user-tweakable knob; do not grow the list\n//     lightly.\n//   - Use #if to key off these macros.  Don't use #ifdef or \"#if\n//     defined(...)\", which will not work as these macros are ALWAYS\n//     defined.\n//\n//   GTEST_HAS_CLONE          - Define it to 1/0 to indicate that clone(2)\n//                              is/isn't available.\n//   GTEST_HAS_EXCEPTIONS     - Define it to 1/0 to indicate that exceptions\n//                              are enabled.\n//   GTEST_HAS_POSIX_RE       - Define it to 1/0 to indicate that POSIX regular\n//                              expressions are/aren't available.\n//   GTEST_HAS_PTHREAD        - Define it to 1/0 to indicate that <pthread.h>\n//                              is/isn't available.\n//   GTEST_HAS_RTTI           - Define it to 1/0 to indicate that RTTI is/isn't\n//                              enabled.\n//   GTEST_HAS_STD_WSTRING    - Define it to 1/0 to indicate that\n//                              std::wstring does/doesn't work (Google Test can\n//                              be used where std::wstring is unavailable).\n//   GTEST_HAS_SEH            - Define it to 1/0 to indicate whether the\n//                              compiler supports Microsoft's \"Structured\n//                              Exception Handling\".\n//   GTEST_HAS_STREAM_REDIRECTION\n//                            - Define it to 1/0 to indicate whether the\n//                              platform supports I/O stream redirection using\n//                              dup() and dup2().\n//   GTEST_LINKED_AS_SHARED_LIBRARY\n//                            - Define to 1 when compiling tests that use\n//                              Google Test as a shared library (known as\n//                              DLL on Windows).\n//   GTEST_CREATE_SHARED_LIBRARY\n//                            - Define to 1 when compiling Google Test itself\n//                              as a shared library.\n//   GTEST_DEFAULT_DEATH_TEST_STYLE\n//                            - The default value of --gtest_death_test_style.\n//                              The legacy default has been \"fast\" in the open\n//                              source version since 2008. The recommended value\n//                              is \"threadsafe\", and can be set in\n//                              custom/gtest-port.h.\n\n// Platform-indicating macros\n// --------------------------\n//\n// Macros indicating the platform on which Google Test is being used\n// (a macro is defined to 1 if compiled on the given platform;\n// otherwise UNDEFINED -- it's never defined to 0.).  Google Test\n// defines these macros automatically.  Code outside Google Test MUST\n// NOT define them.\n//\n//   GTEST_OS_AIX      - IBM AIX\n//   GTEST_OS_CYGWIN   - Cygwin\n//   GTEST_OS_DRAGONFLY - DragonFlyBSD\n//   GTEST_OS_FREEBSD  - FreeBSD\n//   GTEST_OS_FUCHSIA  - Fuchsia\n//   GTEST_OS_GNU_KFREEBSD - GNU/kFreeBSD\n//   GTEST_OS_HAIKU    - Haiku\n//   GTEST_OS_HPUX     - HP-UX\n//   GTEST_OS_LINUX    - Linux\n//     GTEST_OS_LINUX_ANDROID - Google Android\n//   GTEST_OS_MAC      - Mac OS X\n//     GTEST_OS_IOS    - iOS\n//   GTEST_OS_NACL     - Google Native Client (NaCl)\n//   GTEST_OS_NETBSD   - NetBSD\n//   GTEST_OS_OPENBSD  - OpenBSD\n//   GTEST_OS_OS2      - OS/2\n//   GTEST_OS_QNX      - QNX\n//   GTEST_OS_SOLARIS  - Sun Solaris\n//   GTEST_OS_WINDOWS  - Windows (Desktop, MinGW, or Mobile)\n//     GTEST_OS_WINDOWS_DESKTOP  - Windows Desktop\n//     GTEST_OS_WINDOWS_MINGW    - MinGW\n//     GTEST_OS_WINDOWS_MOBILE   - Windows Mobile\n//     GTEST_OS_WINDOWS_PHONE    - Windows Phone\n//     GTEST_OS_WINDOWS_RT       - Windows Store App/WinRT\n//   GTEST_OS_ZOS      - z/OS\n//\n// Among the platforms, Cygwin, Linux, Mac OS X, and Windows have the\n// most stable support.  Since core members of the Google Test project\n// don't have access to other platforms, support for them may be less\n// stable.  If you notice any problems on your platform, please notify\n// googletestframework@googlegroups.com (patches for fixing them are\n// even more welcome!).\n//\n// It is possible that none of the GTEST_OS_* macros are defined.\n\n// Feature-indicating macros\n// -------------------------\n//\n// Macros indicating which Google Test features are available (a macro\n// is defined to 1 if the corresponding feature is supported;\n// otherwise UNDEFINED -- it's never defined to 0.).  Google Test\n// defines these macros automatically.  Code outside Google Test MUST\n// NOT define them.\n//\n// These macros are public so that portable tests can be written.\n// Such tests typically surround code using a feature with an #if\n// which controls that code.  For example:\n//\n// #if GTEST_HAS_DEATH_TEST\n//   EXPECT_DEATH(DoSomethingDeadly());\n// #endif\n//\n//   GTEST_HAS_DEATH_TEST   - death tests\n//   GTEST_HAS_TYPED_TEST   - typed tests\n//   GTEST_HAS_TYPED_TEST_P - type-parameterized tests\n//   GTEST_IS_THREADSAFE    - Google Test is thread-safe.\n//   GOOGLETEST_CM0007 DO NOT DELETE\n//   GTEST_USES_POSIX_RE    - enhanced POSIX regex is used. Do not confuse with\n//                            GTEST_HAS_POSIX_RE (see above) which users can\n//                            define themselves.\n//   GTEST_USES_SIMPLE_RE   - our own simple regex is used;\n//                            the above RE\\b(s) are mutually exclusive.\n\n// Misc public macros\n// ------------------\n//\n//   GTEST_FLAG(flag_name)  - references the variable corresponding to\n//                            the given Google Test flag.\n\n// Internal utilities\n// ------------------\n//\n// The following macros and utilities are for Google Test's INTERNAL\n// use only.  Code outside Google Test MUST NOT USE THEM DIRECTLY.\n//\n// Macros for basic C++ coding:\n//   GTEST_AMBIGUOUS_ELSE_BLOCKER_ - for disabling a gcc warning.\n//   GTEST_ATTRIBUTE_UNUSED_  - declares that a class' instances or a\n//                              variable don't have to be used.\n//   GTEST_DISALLOW_ASSIGN_   - disables operator=.\n//   GTEST_DISALLOW_COPY_AND_ASSIGN_ - disables copy ctor and operator=.\n//   GTEST_MUST_USE_RESULT_   - declares that a function's result must be used.\n//   GTEST_INTENTIONAL_CONST_COND_PUSH_ - start code section where MSVC C4127 is\n//                                        suppressed (constant conditional).\n//   GTEST_INTENTIONAL_CONST_COND_POP_  - finish code section where MSVC C4127\n//                                        is suppressed.\n//\n// Synchronization:\n//   Mutex, MutexLock, ThreadLocal, GetThreadCount()\n//                            - synchronization primitives.\n//\n// Regular expressions:\n//   RE             - a simple regular expression class using the POSIX\n//                    Extended Regular Expression syntax on UNIX-like platforms\n//                    GOOGLETEST_CM0008 DO NOT DELETE\n//                    or a reduced regular exception syntax on other\n//                    platforms, including Windows.\n// Logging:\n//   GTEST_LOG_()   - logs messages at the specified severity level.\n//   LogToStderr()  - directs all log messages to stderr.\n//   FlushInfoLog() - flushes informational log messages.\n//\n// Stdout and stderr capturing:\n//   CaptureStdout()     - starts capturing stdout.\n//   GetCapturedStdout() - stops capturing stdout and returns the captured\n//                         string.\n//   CaptureStderr()     - starts capturing stderr.\n//   GetCapturedStderr() - stops capturing stderr and returns the captured\n//                         string.\n//\n// Integer types:\n//   TypeWithSize   - maps an integer to a int type.\n//   Int32, UInt32, Int64, UInt64, TimeInMillis\n//                  - integers of known sizes.\n//   BiggestInt     - the biggest signed integer type.\n//\n// Command-line utilities:\n//   GTEST_DECLARE_*()  - declares a flag.\n//   GTEST_DEFINE_*()   - defines a flag.\n//   GetInjectableArgvs() - returns the command line as a vector of strings.\n//\n// Environment variable utilities:\n//   GetEnv()             - gets the value of an environment variable.\n//   BoolFromGTestEnv()   - parses a bool environment variable.\n//   Int32FromGTestEnv()  - parses an Int32 environment variable.\n//   StringFromGTestEnv() - parses a string environment variable.\n//\n// Deprecation warnings:\n//   GTEST_INTERNAL_DEPRECATED(message) - attribute marking a function as\n//                                        deprecated; calling a marked function\n//                                        should generate a compiler warning\n\n#include <ctype.h>   // for isspace, etc\n#include <stddef.h>  // for ptrdiff_t\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n#include <type_traits>\n\n#ifndef _WIN32_WCE\n# include <sys/types.h>\n# include <sys/stat.h>\n#endif  // !_WIN32_WCE\n\n#if defined __APPLE__\n# include <AvailabilityMacros.h>\n# include <TargetConditionals.h>\n#endif\n\n#include <iostream>  // NOLINT\n#include <memory>\n#include <string>  // NOLINT\n#include <tuple>\n#include <vector>  // NOLINT\n\n#include \"gtest/internal/custom/gtest-port.h\"\n#include \"gtest/internal/gtest-port-arch.h\"\n\n#if !defined(GTEST_DEV_EMAIL_)\n# define GTEST_DEV_EMAIL_ \"googletestframework@@googlegroups.com\"\n# define GTEST_FLAG_PREFIX_ \"gtest_\"\n# define GTEST_FLAG_PREFIX_DASH_ \"gtest-\"\n# define GTEST_FLAG_PREFIX_UPPER_ \"GTEST_\"\n# define GTEST_NAME_ \"Google Test\"\n# define GTEST_PROJECT_URL_ \"https://github.com/google/googletest/\"\n#endif  // !defined(GTEST_DEV_EMAIL_)\n\n#if !defined(GTEST_INIT_GOOGLE_TEST_NAME_)\n# define GTEST_INIT_GOOGLE_TEST_NAME_ \"testing::InitGoogleTest\"\n#endif  // !defined(GTEST_INIT_GOOGLE_TEST_NAME_)\n\n// Determines the version of gcc that is used to compile this.\n#ifdef __GNUC__\n// 40302 means version 4.3.2.\n# define GTEST_GCC_VER_ \\\n    (__GNUC__*10000 + __GNUC_MINOR__*100 + __GNUC_PATCHLEVEL__)\n#endif  // __GNUC__\n\n// Macros for disabling Microsoft Visual C++ warnings.\n//\n//   GTEST_DISABLE_MSC_WARNINGS_PUSH_(4800 4385)\n//   /* code that triggers warnings C4800 and C4385 */\n//   GTEST_DISABLE_MSC_WARNINGS_POP_()\n#if defined(_MSC_VER)\n# define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings) \\\n    __pragma(warning(push))                        \\\n    __pragma(warning(disable: warnings))\n# define GTEST_DISABLE_MSC_WARNINGS_POP_()          \\\n    __pragma(warning(pop))\n#else\n// Not all compilers are MSVC\n# define GTEST_DISABLE_MSC_WARNINGS_PUSH_(warnings)\n# define GTEST_DISABLE_MSC_WARNINGS_POP_()\n#endif\n\n// Clang on Windows does not understand MSVC's pragma warning.\n// We need clang-specific way to disable function deprecation warning.\n#ifdef __clang__\n# define GTEST_DISABLE_MSC_DEPRECATED_PUSH_()                         \\\n    _Pragma(\"clang diagnostic push\")                                  \\\n    _Pragma(\"clang diagnostic ignored \\\"-Wdeprecated-declarations\\\"\") \\\n    _Pragma(\"clang diagnostic ignored \\\"-Wdeprecated-implementations\\\"\")\n#define GTEST_DISABLE_MSC_DEPRECATED_POP_() \\\n    _Pragma(\"clang diagnostic pop\")\n#else\n# define GTEST_DISABLE_MSC_DEPRECATED_PUSH_() \\\n    GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996)\n# define GTEST_DISABLE_MSC_DEPRECATED_POP_() \\\n    GTEST_DISABLE_MSC_WARNINGS_POP_()\n#endif\n\n// Brings in definitions for functions used in the testing::internal::posix\n// namespace (read, write, close, chdir, isatty, stat). We do not currently\n// use them on Windows Mobile.\n#if GTEST_OS_WINDOWS\n# if !GTEST_OS_WINDOWS_MOBILE\n#  include <direct.h>\n#  include <io.h>\n# endif\n// In order to avoid having to include <windows.h>, use forward declaration\n#if GTEST_OS_WINDOWS_MINGW && !defined(__MINGW64_VERSION_MAJOR)\n// MinGW defined _CRITICAL_SECTION and _RTL_CRITICAL_SECTION as two\n// separate (equivalent) structs, instead of using typedef\ntypedef struct _CRITICAL_SECTION GTEST_CRITICAL_SECTION;\n#else\n// Assume CRITICAL_SECTION is a typedef of _RTL_CRITICAL_SECTION.\n// This assumption is verified by\n// WindowsTypesTest.CRITICAL_SECTIONIs_RTL_CRITICAL_SECTION.\ntypedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION;\n#endif\n#else\n// This assumes that non-Windows OSes provide unistd.h. For OSes where this\n// is not the case, we need to include headers that provide the functions\n// mentioned above.\n# include <unistd.h>\n# include <strings.h>\n#endif  // GTEST_OS_WINDOWS\n\n#if GTEST_OS_LINUX_ANDROID\n// Used to define __ANDROID_API__ matching the target NDK API level.\n#  include <android/api-level.h>  // NOLINT\n#endif\n\n// Defines this to true if and only if Google Test can use POSIX regular\n// expressions.\n#ifndef GTEST_HAS_POSIX_RE\n# if GTEST_OS_LINUX_ANDROID\n// On Android, <regex.h> is only available starting with Gingerbread.\n#  define GTEST_HAS_POSIX_RE (__ANDROID_API__ >= 9)\n# else\n#  define GTEST_HAS_POSIX_RE (!GTEST_OS_WINDOWS)\n# endif\n#endif\n\n#if GTEST_USES_PCRE\n// The appropriate headers have already been included.\n\n#elif GTEST_HAS_POSIX_RE\n\n// On some platforms, <regex.h> needs someone to define size_t, and\n// won't compile otherwise.  We can #include it here as we already\n// included <stdlib.h>, which is guaranteed to define size_t through\n// <stddef.h>.\n# include <regex.h>  // NOLINT\n\n# define GTEST_USES_POSIX_RE 1\n\n#elif GTEST_OS_WINDOWS\n\n// <regex.h> is not available on Windows.  Use our own simple regex\n// implementation instead.\n# define GTEST_USES_SIMPLE_RE 1\n\n#else\n\n// <regex.h> may not be available on this platform.  Use our own\n// simple regex implementation instead.\n# define GTEST_USES_SIMPLE_RE 1\n\n#endif  // GTEST_USES_PCRE\n\n#ifndef GTEST_HAS_EXCEPTIONS\n// The user didn't tell us whether exceptions are enabled, so we need\n// to figure it out.\n# if defined(_MSC_VER) && defined(_CPPUNWIND)\n// MSVC defines _CPPUNWIND to 1 if and only if exceptions are enabled.\n#  define GTEST_HAS_EXCEPTIONS 1\n# elif defined(__BORLANDC__)\n// C++Builder's implementation of the STL uses the _HAS_EXCEPTIONS\n// macro to enable exceptions, so we'll do the same.\n// Assumes that exceptions are enabled by default.\n#  ifndef _HAS_EXCEPTIONS\n#   define _HAS_EXCEPTIONS 1\n#  endif  // _HAS_EXCEPTIONS\n#  define GTEST_HAS_EXCEPTIONS _HAS_EXCEPTIONS\n# elif defined(__clang__)\n// clang defines __EXCEPTIONS if and only if exceptions are enabled before clang\n// 220714, but if and only if cleanups are enabled after that. In Obj-C++ files,\n// there can be cleanups for ObjC exceptions which also need cleanups, even if\n// C++ exceptions are disabled. clang has __has_feature(cxx_exceptions) which\n// checks for C++ exceptions starting at clang r206352, but which checked for\n// cleanups prior to that. To reliably check for C++ exception availability with\n// clang, check for\n// __EXCEPTIONS && __has_feature(cxx_exceptions).\n#  define GTEST_HAS_EXCEPTIONS (__EXCEPTIONS && __has_feature(cxx_exceptions))\n# elif defined(__GNUC__) && __EXCEPTIONS\n// gcc defines __EXCEPTIONS to 1 if and only if exceptions are enabled.\n#  define GTEST_HAS_EXCEPTIONS 1\n# elif defined(__SUNPRO_CC)\n// Sun Pro CC supports exceptions.  However, there is no compile-time way of\n// detecting whether they are enabled or not.  Therefore, we assume that\n// they are enabled unless the user tells us otherwise.\n#  define GTEST_HAS_EXCEPTIONS 1\n# elif defined(__IBMCPP__) && __EXCEPTIONS\n// xlC defines __EXCEPTIONS to 1 if and only if exceptions are enabled.\n#  define GTEST_HAS_EXCEPTIONS 1\n# elif defined(__HP_aCC)\n// Exception handling is in effect by default in HP aCC compiler. It has to\n// be turned of by +noeh compiler option if desired.\n#  define GTEST_HAS_EXCEPTIONS 1\n# else\n// For other compilers, we assume exceptions are disabled to be\n// conservative.\n#  define GTEST_HAS_EXCEPTIONS 0\n# endif  // defined(_MSC_VER) || defined(__BORLANDC__)\n#endif  // GTEST_HAS_EXCEPTIONS\n\n#ifndef GTEST_HAS_STD_WSTRING\n// The user didn't tell us whether ::std::wstring is available, so we need\n// to figure it out.\n// Cygwin 1.7 and below doesn't support ::std::wstring.\n// Solaris' libc++ doesn't support it either.  Android has\n// no support for it at least as recent as Froyo (2.2).\n#define GTEST_HAS_STD_WSTRING                                         \\\n  (!(GTEST_OS_LINUX_ANDROID || GTEST_OS_CYGWIN || GTEST_OS_SOLARIS || \\\n     GTEST_OS_HAIKU || GTEST_OS_ESP32 || GTEST_OS_ESP8266))\n\n#endif  // GTEST_HAS_STD_WSTRING\n\n// Determines whether RTTI is available.\n#ifndef GTEST_HAS_RTTI\n// The user didn't tell us whether RTTI is enabled, so we need to\n// figure it out.\n\n# ifdef _MSC_VER\n\n#ifdef _CPPRTTI  // MSVC defines this macro if and only if RTTI is enabled.\n#   define GTEST_HAS_RTTI 1\n#  else\n#   define GTEST_HAS_RTTI 0\n#  endif\n\n// Starting with version 4.3.2, gcc defines __GXX_RTTI if and only if RTTI is\n// enabled.\n# elif defined(__GNUC__)\n\n#  ifdef __GXX_RTTI\n// When building against STLport with the Android NDK and with\n// -frtti -fno-exceptions, the build fails at link time with undefined\n// references to __cxa_bad_typeid. Note sure if STL or toolchain bug,\n// so disable RTTI when detected.\n#   if GTEST_OS_LINUX_ANDROID && defined(_STLPORT_MAJOR) && \\\n       !defined(__EXCEPTIONS)\n#    define GTEST_HAS_RTTI 0\n#   else\n#    define GTEST_HAS_RTTI 1\n#   endif  // GTEST_OS_LINUX_ANDROID && __STLPORT_MAJOR && !__EXCEPTIONS\n#  else\n#   define GTEST_HAS_RTTI 0\n#  endif  // __GXX_RTTI\n\n// Clang defines __GXX_RTTI starting with version 3.0, but its manual recommends\n// using has_feature instead. has_feature(cxx_rtti) is supported since 2.7, the\n// first version with C++ support.\n# elif defined(__clang__)\n\n#  define GTEST_HAS_RTTI __has_feature(cxx_rtti)\n\n// Starting with version 9.0 IBM Visual Age defines __RTTI_ALL__ to 1 if\n// both the typeid and dynamic_cast features are present.\n# elif defined(__IBMCPP__) && (__IBMCPP__ >= 900)\n\n#  ifdef __RTTI_ALL__\n#   define GTEST_HAS_RTTI 1\n#  else\n#   define GTEST_HAS_RTTI 0\n#  endif\n\n# else\n\n// For all other compilers, we assume RTTI is enabled.\n#  define GTEST_HAS_RTTI 1\n\n# endif  // _MSC_VER\n\n#endif  // GTEST_HAS_RTTI\n\n// It's this header's responsibility to #include <typeinfo> when RTTI\n// is enabled.\n#if GTEST_HAS_RTTI\n# include <typeinfo>\n#endif\n\n// Determines whether Google Test can use the pthreads library.\n#ifndef GTEST_HAS_PTHREAD\n// The user didn't tell us explicitly, so we make reasonable assumptions about\n// which platforms have pthreads support.\n//\n// To disable threading support in Google Test, add -DGTEST_HAS_PTHREAD=0\n// to your compiler flags.\n#define GTEST_HAS_PTHREAD                                                      \\\n  (GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_HPUX || GTEST_OS_QNX ||          \\\n   GTEST_OS_FREEBSD || GTEST_OS_NACL || GTEST_OS_NETBSD || GTEST_OS_FUCHSIA || \\\n   GTEST_OS_DRAGONFLY || GTEST_OS_GNU_KFREEBSD || GTEST_OS_OPENBSD ||          \\\n   GTEST_OS_HAIKU)\n#endif  // GTEST_HAS_PTHREAD\n\n#if GTEST_HAS_PTHREAD\n// gtest-port.h guarantees to #include <pthread.h> when GTEST_HAS_PTHREAD is\n// true.\n# include <pthread.h>  // NOLINT\n\n// For timespec and nanosleep, used below.\n# include <time.h>  // NOLINT\n#endif\n\n// Determines whether clone(2) is supported.\n// Usually it will only be available on Linux, excluding\n// Linux on the Itanium architecture.\n// Also see http://linux.die.net/man/2/clone.\n#ifndef GTEST_HAS_CLONE\n// The user didn't tell us, so we need to figure it out.\n\n# if GTEST_OS_LINUX && !defined(__ia64__)\n#  if GTEST_OS_LINUX_ANDROID\n// On Android, clone() became available at different API levels for each 32-bit\n// architecture.\n#    if defined(__LP64__) || \\\n        (defined(__arm__) && __ANDROID_API__ >= 9) || \\\n        (defined(__mips__) && __ANDROID_API__ >= 12) || \\\n        (defined(__i386__) && __ANDROID_API__ >= 17)\n#     define GTEST_HAS_CLONE 1\n#    else\n#     define GTEST_HAS_CLONE 0\n#    endif\n#  else\n#   define GTEST_HAS_CLONE 1\n#  endif\n# else\n#  define GTEST_HAS_CLONE 0\n# endif  // GTEST_OS_LINUX && !defined(__ia64__)\n\n#endif  // GTEST_HAS_CLONE\n\n// Determines whether to support stream redirection. This is used to test\n// output correctness and to implement death tests.\n#ifndef GTEST_HAS_STREAM_REDIRECTION\n// By default, we assume that stream redirection is supported on all\n// platforms except known mobile ones.\n#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \\\n    GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266\n#  define GTEST_HAS_STREAM_REDIRECTION 0\n# else\n#  define GTEST_HAS_STREAM_REDIRECTION 1\n# endif  // !GTEST_OS_WINDOWS_MOBILE\n#endif  // GTEST_HAS_STREAM_REDIRECTION\n\n// Determines whether to support death tests.\n// pops up a dialog window that cannot be suppressed programmatically.\n#if (GTEST_OS_LINUX || GTEST_OS_CYGWIN || GTEST_OS_SOLARIS ||             \\\n     (GTEST_OS_MAC && !GTEST_OS_IOS) ||                                   \\\n     (GTEST_OS_WINDOWS_DESKTOP && _MSC_VER) || GTEST_OS_WINDOWS_MINGW ||  \\\n     GTEST_OS_AIX || GTEST_OS_HPUX || GTEST_OS_OPENBSD || GTEST_OS_QNX || \\\n     GTEST_OS_FREEBSD || GTEST_OS_NETBSD || GTEST_OS_FUCHSIA ||           \\\n     GTEST_OS_DRAGONFLY || GTEST_OS_GNU_KFREEBSD || GTEST_OS_HAIKU)\n# define GTEST_HAS_DEATH_TEST 1\n#endif\n\n// Determines whether to support type-driven tests.\n\n// Typed tests need <typeinfo> and variadic macros, which GCC, VC++ 8.0,\n// Sun Pro CC, IBM Visual Age, and HP aCC support.\n#if defined(__GNUC__) || defined(_MSC_VER) || defined(__SUNPRO_CC) || \\\n    defined(__IBMCPP__) || defined(__HP_aCC)\n# define GTEST_HAS_TYPED_TEST 1\n# define GTEST_HAS_TYPED_TEST_P 1\n#endif\n\n// Determines whether the system compiler uses UTF-16 for encoding wide strings.\n#define GTEST_WIDE_STRING_USES_UTF16_ \\\n  (GTEST_OS_WINDOWS || GTEST_OS_CYGWIN || GTEST_OS_AIX || GTEST_OS_OS2)\n\n// Determines whether test results can be streamed to a socket.\n#if GTEST_OS_LINUX || GTEST_OS_GNU_KFREEBSD || GTEST_OS_DRAGONFLY || \\\n    GTEST_OS_FREEBSD || GTEST_OS_NETBSD || GTEST_OS_OPENBSD\n# define GTEST_CAN_STREAM_RESULTS_ 1\n#endif\n\n// Defines some utility macros.\n\n// The GNU compiler emits a warning if nested \"if\" statements are followed by\n// an \"else\" statement and braces are not used to explicitly disambiguate the\n// \"else\" binding.  This leads to problems with code like:\n//\n//   if (gate)\n//     ASSERT_*(condition) << \"Some message\";\n//\n// The \"switch (0) case 0:\" idiom is used to suppress this.\n#ifdef __INTEL_COMPILER\n# define GTEST_AMBIGUOUS_ELSE_BLOCKER_\n#else\n# define GTEST_AMBIGUOUS_ELSE_BLOCKER_ switch (0) case 0: default:  // NOLINT\n#endif\n\n// Use this annotation at the end of a struct/class definition to\n// prevent the compiler from optimizing away instances that are never\n// used.  This is useful when all interesting logic happens inside the\n// c'tor and / or d'tor.  Example:\n//\n//   struct Foo {\n//     Foo() { ... }\n//   } GTEST_ATTRIBUTE_UNUSED_;\n//\n// Also use it after a variable or parameter declaration to tell the\n// compiler the variable/parameter does not have to be used.\n#if defined(__GNUC__) && !defined(COMPILER_ICC)\n# define GTEST_ATTRIBUTE_UNUSED_ __attribute__ ((unused))\n#elif defined(__clang__)\n# if __has_attribute(unused)\n#  define GTEST_ATTRIBUTE_UNUSED_ __attribute__ ((unused))\n# endif\n#endif\n#ifndef GTEST_ATTRIBUTE_UNUSED_\n# define GTEST_ATTRIBUTE_UNUSED_\n#endif\n\n// Use this annotation before a function that takes a printf format string.\n#if (defined(__GNUC__) || defined(__clang__)) && !defined(COMPILER_ICC)\n# if defined(__MINGW_PRINTF_FORMAT)\n// MinGW has two different printf implementations. Ensure the format macro\n// matches the selected implementation. See\n// https://sourceforge.net/p/mingw-w64/wiki2/gnu%20printf/.\n#  define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \\\n       __attribute__((__format__(__MINGW_PRINTF_FORMAT, string_index, \\\n                                 first_to_check)))\n# else\n#  define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check) \\\n       __attribute__((__format__(__printf__, string_index, first_to_check)))\n# endif\n#else\n# define GTEST_ATTRIBUTE_PRINTF_(string_index, first_to_check)\n#endif\n\n\n// A macro to disallow operator=\n// This should be used in the private: declarations for a class.\n#define GTEST_DISALLOW_ASSIGN_(type) \\\n  void operator=(type const &) = delete\n\n// A macro to disallow copy constructor and operator=\n// This should be used in the private: declarations for a class.\n#define GTEST_DISALLOW_COPY_AND_ASSIGN_(type) \\\n  type(type const &) = delete; \\\n  GTEST_DISALLOW_ASSIGN_(type)\n\n// Tell the compiler to warn about unused return values for functions declared\n// with this macro.  The macro should be used on function declarations\n// following the argument list:\n//\n//   Sprocket* AllocateSprocket() GTEST_MUST_USE_RESULT_;\n#if defined(__GNUC__) && !defined(COMPILER_ICC)\n# define GTEST_MUST_USE_RESULT_ __attribute__ ((warn_unused_result))\n#else\n# define GTEST_MUST_USE_RESULT_\n#endif  // __GNUC__ && !COMPILER_ICC\n\n// MS C++ compiler emits warning when a conditional expression is compile time\n// constant. In some contexts this warning is false positive and needs to be\n// suppressed. Use the following two macros in such cases:\n//\n// GTEST_INTENTIONAL_CONST_COND_PUSH_()\n// while (true) {\n// GTEST_INTENTIONAL_CONST_COND_POP_()\n// }\n# define GTEST_INTENTIONAL_CONST_COND_PUSH_() \\\n    GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127)\n# define GTEST_INTENTIONAL_CONST_COND_POP_() \\\n    GTEST_DISABLE_MSC_WARNINGS_POP_()\n\n// Determine whether the compiler supports Microsoft's Structured Exception\n// Handling.  This is supported by several Windows compilers but generally\n// does not exist on any other system.\n#ifndef GTEST_HAS_SEH\n// The user didn't tell us, so we need to figure it out.\n\n# if defined(_MSC_VER) || defined(__BORLANDC__)\n// These two compilers are known to support SEH.\n#  define GTEST_HAS_SEH 1\n# else\n// Assume no SEH.\n#  define GTEST_HAS_SEH 0\n# endif\n\n#endif  // GTEST_HAS_SEH\n\n#ifndef GTEST_IS_THREADSAFE\n\n#define GTEST_IS_THREADSAFE                                                 \\\n  (GTEST_HAS_MUTEX_AND_THREAD_LOCAL_ ||                                     \\\n   (GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT) || \\\n   GTEST_HAS_PTHREAD)\n\n#endif  // GTEST_IS_THREADSAFE\n\n// GTEST_API_ qualifies all symbols that must be exported. The definitions below\n// are guarded by #ifndef to give embedders a chance to define GTEST_API_ in\n// gtest/internal/custom/gtest-port.h\n#ifndef GTEST_API_\n\n#ifdef _MSC_VER\n# if GTEST_LINKED_AS_SHARED_LIBRARY\n#  define GTEST_API_ __declspec(dllimport)\n# elif GTEST_CREATE_SHARED_LIBRARY\n#  define GTEST_API_ __declspec(dllexport)\n# endif\n#elif __GNUC__ >= 4 || defined(__clang__)\n# define GTEST_API_ __attribute__((visibility (\"default\")))\n#endif  // _MSC_VER\n\n#endif  // GTEST_API_\n\n#ifndef GTEST_API_\n# define GTEST_API_\n#endif  // GTEST_API_\n\n#ifndef GTEST_DEFAULT_DEATH_TEST_STYLE\n# define GTEST_DEFAULT_DEATH_TEST_STYLE  \"fast\"\n#endif  // GTEST_DEFAULT_DEATH_TEST_STYLE\n\n#ifdef __GNUC__\n// Ask the compiler to never inline a given function.\n# define GTEST_NO_INLINE_ __attribute__((noinline))\n#else\n# define GTEST_NO_INLINE_\n#endif\n\n// _LIBCPP_VERSION is defined by the libc++ library from the LLVM project.\n#if !defined(GTEST_HAS_CXXABI_H_)\n# if defined(__GLIBCXX__) || (defined(_LIBCPP_VERSION) && !defined(_MSC_VER))\n#  define GTEST_HAS_CXXABI_H_ 1\n# else\n#  define GTEST_HAS_CXXABI_H_ 0\n# endif\n#endif\n\n// A function level attribute to disable checking for use of uninitialized\n// memory when built with MemorySanitizer.\n#if defined(__clang__)\n# if __has_feature(memory_sanitizer)\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_ \\\n       __attribute__((no_sanitize_memory))\n# else\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_\n# endif  // __has_feature(memory_sanitizer)\n#else\n# define GTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_\n#endif  // __clang__\n\n// A function level attribute to disable AddressSanitizer instrumentation.\n#if defined(__clang__)\n# if __has_feature(address_sanitizer)\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ \\\n       __attribute__((no_sanitize_address))\n# else\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_\n# endif  // __has_feature(address_sanitizer)\n#else\n# define GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_\n#endif  // __clang__\n\n// A function level attribute to disable HWAddressSanitizer instrumentation.\n#if defined(__clang__)\n# if __has_feature(hwaddress_sanitizer)\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_ \\\n       __attribute__((no_sanitize(\"hwaddress\")))\n# else\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_\n# endif  // __has_feature(hwaddress_sanitizer)\n#else\n# define GTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_\n#endif  // __clang__\n\n// A function level attribute to disable ThreadSanitizer instrumentation.\n#if defined(__clang__)\n# if __has_feature(thread_sanitizer)\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_ \\\n       __attribute__((no_sanitize_thread))\n# else\n#  define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_\n# endif  // __has_feature(thread_sanitizer)\n#else\n# define GTEST_ATTRIBUTE_NO_SANITIZE_THREAD_\n#endif  // __clang__\n\nnamespace testing {\n\nclass Message;\n\n// Legacy imports for backwards compatibility.\n// New code should use std:: names directly.\nusing std::get;\nusing std::make_tuple;\nusing std::tuple;\nusing std::tuple_element;\nusing std::tuple_size;\n\nnamespace internal {\n\n// A secret type that Google Test users don't know about.  It has no\n// definition on purpose.  Therefore it's impossible to create a\n// Secret object, which is what we want.\nclass Secret;\n\n// The GTEST_COMPILE_ASSERT_ is a legacy macro used to verify that a compile\n// time expression is true (in new code, use static_assert instead). For\n// example, you could use it to verify the size of a static array:\n//\n//   GTEST_COMPILE_ASSERT_(GTEST_ARRAY_SIZE_(names) == NUM_NAMES,\n//                         names_incorrect_size);\n//\n// The second argument to the macro must be a valid C++ identifier. If the\n// expression is false, compiler will issue an error containing this identifier.\n#define GTEST_COMPILE_ASSERT_(expr, msg) static_assert(expr, #msg)\n\n// A helper for suppressing warnings on constant condition.  It just\n// returns 'condition'.\nGTEST_API_ bool IsTrue(bool condition);\n\n// Defines RE.\n\n#if GTEST_USES_PCRE\n// if used, PCRE is injected by custom/gtest-port.h\n#elif GTEST_USES_POSIX_RE || GTEST_USES_SIMPLE_RE\n\n// A simple C++ wrapper for <regex.h>.  It uses the POSIX Extended\n// Regular Expression syntax.\nclass GTEST_API_ RE {\n public:\n  // A copy constructor is required by the Standard to initialize object\n  // references from r-values.\n  RE(const RE& other) { Init(other.pattern()); }\n\n  // Constructs an RE from a string.\n  RE(const ::std::string& regex) { Init(regex.c_str()); }  // NOLINT\n\n  RE(const char* regex) { Init(regex); }  // NOLINT\n  ~RE();\n\n  // Returns the string representation of the regex.\n  const char* pattern() const { return pattern_; }\n\n  // FullMatch(str, re) returns true if and only if regular expression re\n  // matches the entire str.\n  // PartialMatch(str, re) returns true if and only if regular expression re\n  // matches a substring of str (including str itself).\n  static bool FullMatch(const ::std::string& str, const RE& re) {\n    return FullMatch(str.c_str(), re);\n  }\n  static bool PartialMatch(const ::std::string& str, const RE& re) {\n    return PartialMatch(str.c_str(), re);\n  }\n\n  static bool FullMatch(const char* str, const RE& re);\n  static bool PartialMatch(const char* str, const RE& re);\n\n private:\n  void Init(const char* regex);\n  const char* pattern_;\n  bool is_valid_;\n\n# if GTEST_USES_POSIX_RE\n\n  regex_t full_regex_;     // For FullMatch().\n  regex_t partial_regex_;  // For PartialMatch().\n\n# else  // GTEST_USES_SIMPLE_RE\n\n  const char* full_pattern_;  // For FullMatch();\n\n# endif\n\n  GTEST_DISALLOW_ASSIGN_(RE);\n};\n\n#endif  // GTEST_USES_PCRE\n\n// Formats a source file path and a line number as they would appear\n// in an error message from the compiler used to compile this code.\nGTEST_API_ ::std::string FormatFileLocation(const char* file, int line);\n\n// Formats a file location for compiler-independent XML output.\n// Although this function is not platform dependent, we put it next to\n// FormatFileLocation in order to contrast the two functions.\nGTEST_API_ ::std::string FormatCompilerIndependentFileLocation(const char* file,\n                                                               int line);\n\n// Defines logging utilities:\n//   GTEST_LOG_(severity) - logs messages at the specified severity level. The\n//                          message itself is streamed into the macro.\n//   LogToStderr()  - directs all log messages to stderr.\n//   FlushInfoLog() - flushes informational log messages.\n\nenum GTestLogSeverity {\n  GTEST_INFO,\n  GTEST_WARNING,\n  GTEST_ERROR,\n  GTEST_FATAL\n};\n\n// Formats log entry severity, provides a stream object for streaming the\n// log message, and terminates the message with a newline when going out of\n// scope.\nclass GTEST_API_ GTestLog {\n public:\n  GTestLog(GTestLogSeverity severity, const char* file, int line);\n\n  // Flushes the buffers and, if severity is GTEST_FATAL, aborts the program.\n  ~GTestLog();\n\n  ::std::ostream& GetStream() { return ::std::cerr; }\n\n private:\n  const GTestLogSeverity severity_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestLog);\n};\n\n#if !defined(GTEST_LOG_)\n\n# define GTEST_LOG_(severity) \\\n    ::testing::internal::GTestLog(::testing::internal::GTEST_##severity, \\\n                                  __FILE__, __LINE__).GetStream()\n\ninline void LogToStderr() {}\ninline void FlushInfoLog() { fflush(nullptr); }\n\n#endif  // !defined(GTEST_LOG_)\n\n#if !defined(GTEST_CHECK_)\n// INTERNAL IMPLEMENTATION - DO NOT USE.\n//\n// GTEST_CHECK_ is an all-mode assert. It aborts the program if the condition\n// is not satisfied.\n//  Synopsys:\n//    GTEST_CHECK_(boolean_condition);\n//     or\n//    GTEST_CHECK_(boolean_condition) << \"Additional message\";\n//\n//    This checks the condition and if the condition is not satisfied\n//    it prints message about the condition violation, including the\n//    condition itself, plus additional message streamed into it, if any,\n//    and then it aborts the program. It aborts the program irrespective of\n//    whether it is built in the debug mode or not.\n# define GTEST_CHECK_(condition) \\\n    GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\n    if (::testing::internal::IsTrue(condition)) \\\n      ; \\\n    else \\\n      GTEST_LOG_(FATAL) << \"Condition \" #condition \" failed. \"\n#endif  // !defined(GTEST_CHECK_)\n\n// An all-mode assert to verify that the given POSIX-style function\n// call returns 0 (indicating success).  Known limitation: this\n// doesn't expand to a balanced 'if' statement, so enclose the macro\n// in {} if you need to use it as the only statement in an 'if'\n// branch.\n#define GTEST_CHECK_POSIX_SUCCESS_(posix_call) \\\n  if (const int gtest_error = (posix_call)) \\\n    GTEST_LOG_(FATAL) << #posix_call << \"failed with error \" \\\n                      << gtest_error\n\n// Transforms \"T\" into \"const T&\" according to standard reference collapsing\n// rules (this is only needed as a backport for C++98 compilers that do not\n// support reference collapsing). Specifically, it transforms:\n//\n//   char         ==> const char&\n//   const char   ==> const char&\n//   char&        ==> char&\n//   const char&  ==> const char&\n//\n// Note that the non-const reference will not have \"const\" added. This is\n// standard, and necessary so that \"T\" can always bind to \"const T&\".\ntemplate <typename T>\nstruct ConstRef { typedef const T& type; };\ntemplate <typename T>\nstruct ConstRef<T&> { typedef T& type; };\n\n// The argument T must depend on some template parameters.\n#define GTEST_REFERENCE_TO_CONST_(T) \\\n  typename ::testing::internal::ConstRef<T>::type\n\n// INTERNAL IMPLEMENTATION - DO NOT USE IN USER CODE.\n//\n// Use ImplicitCast_ as a safe version of static_cast for upcasting in\n// the type hierarchy (e.g. casting a Foo* to a SuperclassOfFoo* or a\n// const Foo*).  When you use ImplicitCast_, the compiler checks that\n// the cast is safe.  Such explicit ImplicitCast_s are necessary in\n// surprisingly many situations where C++ demands an exact type match\n// instead of an argument type convertable to a target type.\n//\n// The syntax for using ImplicitCast_ is the same as for static_cast:\n//\n//   ImplicitCast_<ToType>(expr)\n//\n// ImplicitCast_ would have been part of the C++ standard library,\n// but the proposal was submitted too late.  It will probably make\n// its way into the language in the future.\n//\n// This relatively ugly name is intentional. It prevents clashes with\n// similar functions users may have (e.g., implicit_cast). The internal\n// namespace alone is not enough because the function can be found by ADL.\ntemplate<typename To>\ninline To ImplicitCast_(To x) { return x; }\n\n// When you upcast (that is, cast a pointer from type Foo to type\n// SuperclassOfFoo), it's fine to use ImplicitCast_<>, since upcasts\n// always succeed.  When you downcast (that is, cast a pointer from\n// type Foo to type SubclassOfFoo), static_cast<> isn't safe, because\n// how do you know the pointer is really of type SubclassOfFoo?  It\n// could be a bare Foo, or of type DifferentSubclassOfFoo.  Thus,\n// when you downcast, you should use this macro.  In debug mode, we\n// use dynamic_cast<> to double-check the downcast is legal (we die\n// if it's not).  In normal mode, we do the efficient static_cast<>\n// instead.  Thus, it's important to test in debug mode to make sure\n// the cast is legal!\n//    This is the only place in the code we should use dynamic_cast<>.\n// In particular, you SHOULDN'T be using dynamic_cast<> in order to\n// do RTTI (eg code like this:\n//    if (dynamic_cast<Subclass1>(foo)) HandleASubclass1Object(foo);\n//    if (dynamic_cast<Subclass2>(foo)) HandleASubclass2Object(foo);\n// You should design the code some other way not to need this.\n//\n// This relatively ugly name is intentional. It prevents clashes with\n// similar functions users may have (e.g., down_cast). The internal\n// namespace alone is not enough because the function can be found by ADL.\ntemplate<typename To, typename From>  // use like this: DownCast_<T*>(foo);\ninline To DownCast_(From* f) {  // so we only accept pointers\n  // Ensures that To is a sub-type of From *.  This test is here only\n  // for compile-time type checking, and has no overhead in an\n  // optimized build at run-time, as it will be optimized away\n  // completely.\n  GTEST_INTENTIONAL_CONST_COND_PUSH_()\n  if (false) {\n  GTEST_INTENTIONAL_CONST_COND_POP_()\n  const To to = nullptr;\n  ::testing::internal::ImplicitCast_<From*>(to);\n  }\n\n#if GTEST_HAS_RTTI\n  // RTTI: debug mode only!\n  GTEST_CHECK_(f == nullptr || dynamic_cast<To>(f) != nullptr);\n#endif\n  return static_cast<To>(f);\n}\n\n// Downcasts the pointer of type Base to Derived.\n// Derived must be a subclass of Base. The parameter MUST\n// point to a class of type Derived, not any subclass of it.\n// When RTTI is available, the function performs a runtime\n// check to enforce this.\ntemplate <class Derived, class Base>\nDerived* CheckedDowncastToActualType(Base* base) {\n#if GTEST_HAS_RTTI\n  GTEST_CHECK_(typeid(*base) == typeid(Derived));\n#endif\n\n#if GTEST_HAS_DOWNCAST_\n  return ::down_cast<Derived*>(base);\n#elif GTEST_HAS_RTTI\n  return dynamic_cast<Derived*>(base);  // NOLINT\n#else\n  return static_cast<Derived*>(base);  // Poor man's downcast.\n#endif\n}\n\n#if GTEST_HAS_STREAM_REDIRECTION\n\n// Defines the stderr capturer:\n//   CaptureStdout     - starts capturing stdout.\n//   GetCapturedStdout - stops capturing stdout and returns the captured string.\n//   CaptureStderr     - starts capturing stderr.\n//   GetCapturedStderr - stops capturing stderr and returns the captured string.\n//\nGTEST_API_ void CaptureStdout();\nGTEST_API_ std::string GetCapturedStdout();\nGTEST_API_ void CaptureStderr();\nGTEST_API_ std::string GetCapturedStderr();\n\n#endif  // GTEST_HAS_STREAM_REDIRECTION\n// Returns the size (in bytes) of a file.\nGTEST_API_ size_t GetFileSize(FILE* file);\n\n// Reads the entire content of a file as a string.\nGTEST_API_ std::string ReadEntireFile(FILE* file);\n\n// All command line arguments.\nGTEST_API_ std::vector<std::string> GetArgvs();\n\n#if GTEST_HAS_DEATH_TEST\n\nstd::vector<std::string> GetInjectableArgvs();\n// Deprecated: pass the args vector by value instead.\nvoid SetInjectableArgvs(const std::vector<std::string>* new_argvs);\nvoid SetInjectableArgvs(const std::vector<std::string>& new_argvs);\nvoid ClearInjectableArgvs();\n\n#endif  // GTEST_HAS_DEATH_TEST\n\n// Defines synchronization primitives.\n#if GTEST_IS_THREADSAFE\n# if GTEST_HAS_PTHREAD\n// Sleeps for (roughly) n milliseconds.  This function is only for testing\n// Google Test's own constructs.  Don't use it in user tests, either\n// directly or indirectly.\ninline void SleepMilliseconds(int n) {\n  const timespec time = {\n    0,                  // 0 seconds.\n    n * 1000L * 1000L,  // And n ms.\n  };\n  nanosleep(&time, nullptr);\n}\n# endif  // GTEST_HAS_PTHREAD\n\n# if GTEST_HAS_NOTIFICATION_\n// Notification has already been imported into the namespace.\n// Nothing to do here.\n\n# elif GTEST_HAS_PTHREAD\n// Allows a controller thread to pause execution of newly created\n// threads until notified.  Instances of this class must be created\n// and destroyed in the controller thread.\n//\n// This class is only for testing Google Test's own constructs. Do not\n// use it in user tests, either directly or indirectly.\nclass Notification {\n public:\n  Notification() : notified_(false) {\n    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_init(&mutex_, nullptr));\n  }\n  ~Notification() {\n    pthread_mutex_destroy(&mutex_);\n  }\n\n  // Notifies all threads created with this notification to start. Must\n  // be called from the controller thread.\n  void Notify() {\n    pthread_mutex_lock(&mutex_);\n    notified_ = true;\n    pthread_mutex_unlock(&mutex_);\n  }\n\n  // Blocks until the controller thread notifies. Must be called from a test\n  // thread.\n  void WaitForNotification() {\n    for (;;) {\n      pthread_mutex_lock(&mutex_);\n      const bool notified = notified_;\n      pthread_mutex_unlock(&mutex_);\n      if (notified)\n        break;\n      SleepMilliseconds(10);\n    }\n  }\n\n private:\n  pthread_mutex_t mutex_;\n  bool notified_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(Notification);\n};\n\n# elif GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT\n\nGTEST_API_ void SleepMilliseconds(int n);\n\n// Provides leak-safe Windows kernel handle ownership.\n// Used in death tests and in threading support.\nclass GTEST_API_ AutoHandle {\n public:\n  // Assume that Win32 HANDLE type is equivalent to void*. Doing so allows us to\n  // avoid including <windows.h> in this header file. Including <windows.h> is\n  // undesirable because it defines a lot of symbols and macros that tend to\n  // conflict with client code. This assumption is verified by\n  // WindowsTypesTest.HANDLEIsVoidStar.\n  typedef void* Handle;\n  AutoHandle();\n  explicit AutoHandle(Handle handle);\n\n  ~AutoHandle();\n\n  Handle Get() const;\n  void Reset();\n  void Reset(Handle handle);\n\n private:\n  // Returns true if and only if the handle is a valid handle object that can be\n  // closed.\n  bool IsCloseable() const;\n\n  Handle handle_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(AutoHandle);\n};\n\n// Allows a controller thread to pause execution of newly created\n// threads until notified.  Instances of this class must be created\n// and destroyed in the controller thread.\n//\n// This class is only for testing Google Test's own constructs. Do not\n// use it in user tests, either directly or indirectly.\nclass GTEST_API_ Notification {\n public:\n  Notification();\n  void Notify();\n  void WaitForNotification();\n\n private:\n  AutoHandle event_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(Notification);\n};\n# endif  // GTEST_HAS_NOTIFICATION_\n\n// On MinGW, we can have both GTEST_OS_WINDOWS and GTEST_HAS_PTHREAD\n// defined, but we don't want to use MinGW's pthreads implementation, which\n// has conformance problems with some versions of the POSIX standard.\n# if GTEST_HAS_PTHREAD && !GTEST_OS_WINDOWS_MINGW\n\n// As a C-function, ThreadFuncWithCLinkage cannot be templated itself.\n// Consequently, it cannot select a correct instantiation of ThreadWithParam\n// in order to call its Run(). Introducing ThreadWithParamBase as a\n// non-templated base class for ThreadWithParam allows us to bypass this\n// problem.\nclass ThreadWithParamBase {\n public:\n  virtual ~ThreadWithParamBase() {}\n  virtual void Run() = 0;\n};\n\n// pthread_create() accepts a pointer to a function type with the C linkage.\n// According to the Standard (7.5/1), function types with different linkages\n// are different even if they are otherwise identical.  Some compilers (for\n// example, SunStudio) treat them as different types.  Since class methods\n// cannot be defined with C-linkage we need to define a free C-function to\n// pass into pthread_create().\nextern \"C\" inline void* ThreadFuncWithCLinkage(void* thread) {\n  static_cast<ThreadWithParamBase*>(thread)->Run();\n  return nullptr;\n}\n\n// Helper class for testing Google Test's multi-threading constructs.\n// To use it, write:\n//\n//   void ThreadFunc(int param) { /* Do things with param */ }\n//   Notification thread_can_start;\n//   ...\n//   // The thread_can_start parameter is optional; you can supply NULL.\n//   ThreadWithParam<int> thread(&ThreadFunc, 5, &thread_can_start);\n//   thread_can_start.Notify();\n//\n// These classes are only for testing Google Test's own constructs. Do\n// not use them in user tests, either directly or indirectly.\ntemplate <typename T>\nclass ThreadWithParam : public ThreadWithParamBase {\n public:\n  typedef void UserThreadFunc(T);\n\n  ThreadWithParam(UserThreadFunc* func, T param, Notification* thread_can_start)\n      : func_(func),\n        param_(param),\n        thread_can_start_(thread_can_start),\n        finished_(false) {\n    ThreadWithParamBase* const base = this;\n    // The thread can be created only after all fields except thread_\n    // have been initialized.\n    GTEST_CHECK_POSIX_SUCCESS_(\n        pthread_create(&thread_, nullptr, &ThreadFuncWithCLinkage, base));\n  }\n  ~ThreadWithParam() override { Join(); }\n\n  void Join() {\n    if (!finished_) {\n      GTEST_CHECK_POSIX_SUCCESS_(pthread_join(thread_, nullptr));\n      finished_ = true;\n    }\n  }\n\n  void Run() override {\n    if (thread_can_start_ != nullptr) thread_can_start_->WaitForNotification();\n    func_(param_);\n  }\n\n private:\n  UserThreadFunc* const func_;  // User-supplied thread function.\n  const T param_;  // User-supplied parameter to the thread function.\n  // When non-NULL, used to block execution until the controller thread\n  // notifies.\n  Notification* const thread_can_start_;\n  bool finished_;  // true if and only if we know that the thread function has\n                   // finished.\n  pthread_t thread_;  // The native thread object.\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParam);\n};\n# endif  // !GTEST_OS_WINDOWS && GTEST_HAS_PTHREAD ||\n         // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_\n\n# if GTEST_HAS_MUTEX_AND_THREAD_LOCAL_\n// Mutex and ThreadLocal have already been imported into the namespace.\n// Nothing to do here.\n\n# elif GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT\n\n// Mutex implements mutex on Windows platforms.  It is used in conjunction\n// with class MutexLock:\n//\n//   Mutex mutex;\n//   ...\n//   MutexLock lock(&mutex);  // Acquires the mutex and releases it at the\n//                            // end of the current scope.\n//\n// A static Mutex *must* be defined or declared using one of the following\n// macros:\n//   GTEST_DEFINE_STATIC_MUTEX_(g_some_mutex);\n//   GTEST_DECLARE_STATIC_MUTEX_(g_some_mutex);\n//\n// (A non-static Mutex is defined/declared in the usual way).\nclass GTEST_API_ Mutex {\n public:\n  enum MutexType { kStatic = 0, kDynamic = 1 };\n  // We rely on kStaticMutex being 0 as it is to what the linker initializes\n  // type_ in static mutexes.  critical_section_ will be initialized lazily\n  // in ThreadSafeLazyInit().\n  enum StaticConstructorSelector { kStaticMutex = 0 };\n\n  // This constructor intentionally does nothing.  It relies on type_ being\n  // statically initialized to 0 (effectively setting it to kStatic) and on\n  // ThreadSafeLazyInit() to lazily initialize the rest of the members.\n  explicit Mutex(StaticConstructorSelector /*dummy*/) {}\n\n  Mutex();\n  ~Mutex();\n\n  void Lock();\n\n  void Unlock();\n\n  // Does nothing if the current thread holds the mutex. Otherwise, crashes\n  // with high probability.\n  void AssertHeld();\n\n private:\n  // Initializes owner_thread_id_ and critical_section_ in static mutexes.\n  void ThreadSafeLazyInit();\n\n  // Per https://blogs.msdn.microsoft.com/oldnewthing/20040223-00/?p=40503,\n  // we assume that 0 is an invalid value for thread IDs.\n  unsigned int owner_thread_id_;\n\n  // For static mutexes, we rely on these members being initialized to zeros\n  // by the linker.\n  MutexType type_;\n  long critical_section_init_phase_;  // NOLINT\n  GTEST_CRITICAL_SECTION* critical_section_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(Mutex);\n};\n\n# define GTEST_DECLARE_STATIC_MUTEX_(mutex) \\\n    extern ::testing::internal::Mutex mutex\n\n# define GTEST_DEFINE_STATIC_MUTEX_(mutex) \\\n    ::testing::internal::Mutex mutex(::testing::internal::Mutex::kStaticMutex)\n\n// We cannot name this class MutexLock because the ctor declaration would\n// conflict with a macro named MutexLock, which is defined on some\n// platforms. That macro is used as a defensive measure to prevent against\n// inadvertent misuses of MutexLock like \"MutexLock(&mu)\" rather than\n// \"MutexLock l(&mu)\".  Hence the typedef trick below.\nclass GTestMutexLock {\n public:\n  explicit GTestMutexLock(Mutex* mutex)\n      : mutex_(mutex) { mutex_->Lock(); }\n\n  ~GTestMutexLock() { mutex_->Unlock(); }\n\n private:\n  Mutex* const mutex_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestMutexLock);\n};\n\ntypedef GTestMutexLock MutexLock;\n\n// Base class for ValueHolder<T>.  Allows a caller to hold and delete a value\n// without knowing its type.\nclass ThreadLocalValueHolderBase {\n public:\n  virtual ~ThreadLocalValueHolderBase() {}\n};\n\n// Provides a way for a thread to send notifications to a ThreadLocal\n// regardless of its parameter type.\nclass ThreadLocalBase {\n public:\n  // Creates a new ValueHolder<T> object holding a default value passed to\n  // this ThreadLocal<T>'s constructor and returns it.  It is the caller's\n  // responsibility not to call this when the ThreadLocal<T> instance already\n  // has a value on the current thread.\n  virtual ThreadLocalValueHolderBase* NewValueForCurrentThread() const = 0;\n\n protected:\n  ThreadLocalBase() {}\n  virtual ~ThreadLocalBase() {}\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocalBase);\n};\n\n// Maps a thread to a set of ThreadLocals that have values instantiated on that\n// thread and notifies them when the thread exits.  A ThreadLocal instance is\n// expected to persist until all threads it has values on have terminated.\nclass GTEST_API_ ThreadLocalRegistry {\n public:\n  // Registers thread_local_instance as having value on the current thread.\n  // Returns a value that can be used to identify the thread from other threads.\n  static ThreadLocalValueHolderBase* GetValueOnCurrentThread(\n      const ThreadLocalBase* thread_local_instance);\n\n  // Invoked when a ThreadLocal instance is destroyed.\n  static void OnThreadLocalDestroyed(\n      const ThreadLocalBase* thread_local_instance);\n};\n\nclass GTEST_API_ ThreadWithParamBase {\n public:\n  void Join();\n\n protected:\n  class Runnable {\n   public:\n    virtual ~Runnable() {}\n    virtual void Run() = 0;\n  };\n\n  ThreadWithParamBase(Runnable *runnable, Notification* thread_can_start);\n  virtual ~ThreadWithParamBase();\n\n private:\n  AutoHandle thread_;\n};\n\n// Helper class for testing Google Test's multi-threading constructs.\ntemplate <typename T>\nclass ThreadWithParam : public ThreadWithParamBase {\n public:\n  typedef void UserThreadFunc(T);\n\n  ThreadWithParam(UserThreadFunc* func, T param, Notification* thread_can_start)\n      : ThreadWithParamBase(new RunnableImpl(func, param), thread_can_start) {\n  }\n  virtual ~ThreadWithParam() {}\n\n private:\n  class RunnableImpl : public Runnable {\n   public:\n    RunnableImpl(UserThreadFunc* func, T param)\n        : func_(func),\n          param_(param) {\n    }\n    virtual ~RunnableImpl() {}\n    virtual void Run() {\n      func_(param_);\n    }\n\n   private:\n    UserThreadFunc* const func_;\n    const T param_;\n\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(RunnableImpl);\n  };\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParam);\n};\n\n// Implements thread-local storage on Windows systems.\n//\n//   // Thread 1\n//   ThreadLocal<int> tl(100);  // 100 is the default value for each thread.\n//\n//   // Thread 2\n//   tl.set(150);  // Changes the value for thread 2 only.\n//   EXPECT_EQ(150, tl.get());\n//\n//   // Thread 1\n//   EXPECT_EQ(100, tl.get());  // In thread 1, tl has the original value.\n//   tl.set(200);\n//   EXPECT_EQ(200, tl.get());\n//\n// The template type argument T must have a public copy constructor.\n// In addition, the default ThreadLocal constructor requires T to have\n// a public default constructor.\n//\n// The users of a TheadLocal instance have to make sure that all but one\n// threads (including the main one) using that instance have exited before\n// destroying it. Otherwise, the per-thread objects managed for them by the\n// ThreadLocal instance are not guaranteed to be destroyed on all platforms.\n//\n// Google Test only uses global ThreadLocal objects.  That means they\n// will die after main() has returned.  Therefore, no per-thread\n// object managed by Google Test will be leaked as long as all threads\n// using Google Test have exited when main() returns.\ntemplate <typename T>\nclass ThreadLocal : public ThreadLocalBase {\n public:\n  ThreadLocal() : default_factory_(new DefaultValueHolderFactory()) {}\n  explicit ThreadLocal(const T& value)\n      : default_factory_(new InstanceValueHolderFactory(value)) {}\n\n  ~ThreadLocal() { ThreadLocalRegistry::OnThreadLocalDestroyed(this); }\n\n  T* pointer() { return GetOrCreateValue(); }\n  const T* pointer() const { return GetOrCreateValue(); }\n  const T& get() const { return *pointer(); }\n  void set(const T& value) { *pointer() = value; }\n\n private:\n  // Holds a value of T.  Can be deleted via its base class without the caller\n  // knowing the type of T.\n  class ValueHolder : public ThreadLocalValueHolderBase {\n   public:\n    ValueHolder() : value_() {}\n    explicit ValueHolder(const T& value) : value_(value) {}\n\n    T* pointer() { return &value_; }\n\n   private:\n    T value_;\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolder);\n  };\n\n\n  T* GetOrCreateValue() const {\n    return static_cast<ValueHolder*>(\n        ThreadLocalRegistry::GetValueOnCurrentThread(this))->pointer();\n  }\n\n  virtual ThreadLocalValueHolderBase* NewValueForCurrentThread() const {\n    return default_factory_->MakeNewHolder();\n  }\n\n  class ValueHolderFactory {\n   public:\n    ValueHolderFactory() {}\n    virtual ~ValueHolderFactory() {}\n    virtual ValueHolder* MakeNewHolder() const = 0;\n\n   private:\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolderFactory);\n  };\n\n  class DefaultValueHolderFactory : public ValueHolderFactory {\n   public:\n    DefaultValueHolderFactory() {}\n    ValueHolder* MakeNewHolder() const override { return new ValueHolder(); }\n\n   private:\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultValueHolderFactory);\n  };\n\n  class InstanceValueHolderFactory : public ValueHolderFactory {\n   public:\n    explicit InstanceValueHolderFactory(const T& value) : value_(value) {}\n    ValueHolder* MakeNewHolder() const override {\n      return new ValueHolder(value_);\n    }\n\n   private:\n    const T value_;  // The value for each thread.\n\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(InstanceValueHolderFactory);\n  };\n\n  std::unique_ptr<ValueHolderFactory> default_factory_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocal);\n};\n\n# elif GTEST_HAS_PTHREAD\n\n// MutexBase and Mutex implement mutex on pthreads-based platforms.\nclass MutexBase {\n public:\n  // Acquires this mutex.\n  void Lock() {\n    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&mutex_));\n    owner_ = pthread_self();\n    has_owner_ = true;\n  }\n\n  // Releases this mutex.\n  void Unlock() {\n    // Since the lock is being released the owner_ field should no longer be\n    // considered valid. We don't protect writing to has_owner_ here, as it's\n    // the caller's responsibility to ensure that the current thread holds the\n    // mutex when this is called.\n    has_owner_ = false;\n    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&mutex_));\n  }\n\n  // Does nothing if the current thread holds the mutex. Otherwise, crashes\n  // with high probability.\n  void AssertHeld() const {\n    GTEST_CHECK_(has_owner_ && pthread_equal(owner_, pthread_self()))\n        << \"The current thread is not holding the mutex @\" << this;\n  }\n\n  // A static mutex may be used before main() is entered.  It may even\n  // be used before the dynamic initialization stage.  Therefore we\n  // must be able to initialize a static mutex object at link time.\n  // This means MutexBase has to be a POD and its member variables\n  // have to be public.\n public:\n  pthread_mutex_t mutex_;  // The underlying pthread mutex.\n  // has_owner_ indicates whether the owner_ field below contains a valid thread\n  // ID and is therefore safe to inspect (e.g., to use in pthread_equal()). All\n  // accesses to the owner_ field should be protected by a check of this field.\n  // An alternative might be to memset() owner_ to all zeros, but there's no\n  // guarantee that a zero'd pthread_t is necessarily invalid or even different\n  // from pthread_self().\n  bool has_owner_;\n  pthread_t owner_;  // The thread holding the mutex.\n};\n\n// Forward-declares a static mutex.\n#  define GTEST_DECLARE_STATIC_MUTEX_(mutex) \\\n     extern ::testing::internal::MutexBase mutex\n\n// Defines and statically (i.e. at link time) initializes a static mutex.\n// The initialization list here does not explicitly initialize each field,\n// instead relying on default initialization for the unspecified fields. In\n// particular, the owner_ field (a pthread_t) is not explicitly initialized.\n// This allows initialization to work whether pthread_t is a scalar or struct.\n// The flag -Wmissing-field-initializers must not be specified for this to work.\n#define GTEST_DEFINE_STATIC_MUTEX_(mutex) \\\n  ::testing::internal::MutexBase mutex = {PTHREAD_MUTEX_INITIALIZER, false, 0}\n\n// The Mutex class can only be used for mutexes created at runtime. It\n// shares its API with MutexBase otherwise.\nclass Mutex : public MutexBase {\n public:\n  Mutex() {\n    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_init(&mutex_, nullptr));\n    has_owner_ = false;\n  }\n  ~Mutex() {\n    GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&mutex_));\n  }\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(Mutex);\n};\n\n// We cannot name this class MutexLock because the ctor declaration would\n// conflict with a macro named MutexLock, which is defined on some\n// platforms. That macro is used as a defensive measure to prevent against\n// inadvertent misuses of MutexLock like \"MutexLock(&mu)\" rather than\n// \"MutexLock l(&mu)\".  Hence the typedef trick below.\nclass GTestMutexLock {\n public:\n  explicit GTestMutexLock(MutexBase* mutex)\n      : mutex_(mutex) { mutex_->Lock(); }\n\n  ~GTestMutexLock() { mutex_->Unlock(); }\n\n private:\n  MutexBase* const mutex_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(GTestMutexLock);\n};\n\ntypedef GTestMutexLock MutexLock;\n\n// Helpers for ThreadLocal.\n\n// pthread_key_create() requires DeleteThreadLocalValue() to have\n// C-linkage.  Therefore it cannot be templatized to access\n// ThreadLocal<T>.  Hence the need for class\n// ThreadLocalValueHolderBase.\nclass ThreadLocalValueHolderBase {\n public:\n  virtual ~ThreadLocalValueHolderBase() {}\n};\n\n// Called by pthread to delete thread-local data stored by\n// pthread_setspecific().\nextern \"C\" inline void DeleteThreadLocalValue(void* value_holder) {\n  delete static_cast<ThreadLocalValueHolderBase*>(value_holder);\n}\n\n// Implements thread-local storage on pthreads-based systems.\ntemplate <typename T>\nclass GTEST_API_ ThreadLocal {\n public:\n  ThreadLocal()\n      : key_(CreateKey()), default_factory_(new DefaultValueHolderFactory()) {}\n  explicit ThreadLocal(const T& value)\n      : key_(CreateKey()),\n        default_factory_(new InstanceValueHolderFactory(value)) {}\n\n  ~ThreadLocal() {\n    // Destroys the managed object for the current thread, if any.\n    DeleteThreadLocalValue(pthread_getspecific(key_));\n\n    // Releases resources associated with the key.  This will *not*\n    // delete managed objects for other threads.\n    GTEST_CHECK_POSIX_SUCCESS_(pthread_key_delete(key_));\n  }\n\n  T* pointer() { return GetOrCreateValue(); }\n  const T* pointer() const { return GetOrCreateValue(); }\n  const T& get() const { return *pointer(); }\n  void set(const T& value) { *pointer() = value; }\n\n private:\n  // Holds a value of type T.\n  class ValueHolder : public ThreadLocalValueHolderBase {\n   public:\n    ValueHolder() : value_() {}\n    explicit ValueHolder(const T& value) : value_(value) {}\n\n    T* pointer() { return &value_; }\n\n   private:\n    T value_;\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolder);\n  };\n\n  static pthread_key_t CreateKey() {\n    pthread_key_t key;\n    // When a thread exits, DeleteThreadLocalValue() will be called on\n    // the object managed for that thread.\n    GTEST_CHECK_POSIX_SUCCESS_(\n        pthread_key_create(&key, &DeleteThreadLocalValue));\n    return key;\n  }\n\n  T* GetOrCreateValue() const {\n    ThreadLocalValueHolderBase* const holder =\n        static_cast<ThreadLocalValueHolderBase*>(pthread_getspecific(key_));\n    if (holder != nullptr) {\n      return CheckedDowncastToActualType<ValueHolder>(holder)->pointer();\n    }\n\n    ValueHolder* const new_holder = default_factory_->MakeNewHolder();\n    ThreadLocalValueHolderBase* const holder_base = new_holder;\n    GTEST_CHECK_POSIX_SUCCESS_(pthread_setspecific(key_, holder_base));\n    return new_holder->pointer();\n  }\n\n  class ValueHolderFactory {\n   public:\n    ValueHolderFactory() {}\n    virtual ~ValueHolderFactory() {}\n    virtual ValueHolder* MakeNewHolder() const = 0;\n\n   private:\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(ValueHolderFactory);\n  };\n\n  class DefaultValueHolderFactory : public ValueHolderFactory {\n   public:\n    DefaultValueHolderFactory() {}\n    ValueHolder* MakeNewHolder() const override { return new ValueHolder(); }\n\n   private:\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultValueHolderFactory);\n  };\n\n  class InstanceValueHolderFactory : public ValueHolderFactory {\n   public:\n    explicit InstanceValueHolderFactory(const T& value) : value_(value) {}\n    ValueHolder* MakeNewHolder() const override {\n      return new ValueHolder(value_);\n    }\n\n   private:\n    const T value_;  // The value for each thread.\n\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(InstanceValueHolderFactory);\n  };\n\n  // A key pthreads uses for looking up per-thread values.\n  const pthread_key_t key_;\n  std::unique_ptr<ValueHolderFactory> default_factory_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadLocal);\n};\n\n# endif  // GTEST_HAS_MUTEX_AND_THREAD_LOCAL_\n\n#else  // GTEST_IS_THREADSAFE\n\n// A dummy implementation of synchronization primitives (mutex, lock,\n// and thread-local variable).  Necessary for compiling Google Test where\n// mutex is not supported - using Google Test in multiple threads is not\n// supported on such platforms.\n\nclass Mutex {\n public:\n  Mutex() {}\n  void Lock() {}\n  void Unlock() {}\n  void AssertHeld() const {}\n};\n\n# define GTEST_DECLARE_STATIC_MUTEX_(mutex) \\\n  extern ::testing::internal::Mutex mutex\n\n# define GTEST_DEFINE_STATIC_MUTEX_(mutex) ::testing::internal::Mutex mutex\n\n// We cannot name this class MutexLock because the ctor declaration would\n// conflict with a macro named MutexLock, which is defined on some\n// platforms. That macro is used as a defensive measure to prevent against\n// inadvertent misuses of MutexLock like \"MutexLock(&mu)\" rather than\n// \"MutexLock l(&mu)\".  Hence the typedef trick below.\nclass GTestMutexLock {\n public:\n  explicit GTestMutexLock(Mutex*) {}  // NOLINT\n};\n\ntypedef GTestMutexLock MutexLock;\n\ntemplate <typename T>\nclass GTEST_API_ ThreadLocal {\n public:\n  ThreadLocal() : value_() {}\n  explicit ThreadLocal(const T& value) : value_(value) {}\n  T* pointer() { return &value_; }\n  const T* pointer() const { return &value_; }\n  const T& get() const { return value_; }\n  void set(const T& value) { value_ = value; }\n private:\n  T value_;\n};\n\n#endif  // GTEST_IS_THREADSAFE\n\n// Returns the number of threads running in the process, or 0 to indicate that\n// we cannot detect it.\nGTEST_API_ size_t GetThreadCount();\n\n#if GTEST_OS_WINDOWS\n# define GTEST_PATH_SEP_ \"\\\\\"\n# define GTEST_HAS_ALT_PATH_SEP_ 1\n// The biggest signed integer type the compiler supports.\ntypedef __int64 BiggestInt;\n#else\n# define GTEST_PATH_SEP_ \"/\"\n# define GTEST_HAS_ALT_PATH_SEP_ 0\ntypedef long long BiggestInt;  // NOLINT\n#endif  // GTEST_OS_WINDOWS\n\n// Utilities for char.\n\n// isspace(int ch) and friends accept an unsigned char or EOF.  char\n// may be signed, depending on the compiler (or compiler flags).\n// Therefore we need to cast a char to unsigned char before calling\n// isspace(), etc.\n\ninline bool IsAlpha(char ch) {\n  return isalpha(static_cast<unsigned char>(ch)) != 0;\n}\ninline bool IsAlNum(char ch) {\n  return isalnum(static_cast<unsigned char>(ch)) != 0;\n}\ninline bool IsDigit(char ch) {\n  return isdigit(static_cast<unsigned char>(ch)) != 0;\n}\ninline bool IsLower(char ch) {\n  return islower(static_cast<unsigned char>(ch)) != 0;\n}\ninline bool IsSpace(char ch) {\n  return isspace(static_cast<unsigned char>(ch)) != 0;\n}\ninline bool IsUpper(char ch) {\n  return isupper(static_cast<unsigned char>(ch)) != 0;\n}\ninline bool IsXDigit(char ch) {\n  return isxdigit(static_cast<unsigned char>(ch)) != 0;\n}\ninline bool IsXDigit(wchar_t ch) {\n  const unsigned char low_byte = static_cast<unsigned char>(ch);\n  return ch == low_byte && isxdigit(low_byte) != 0;\n}\n\ninline char ToLower(char ch) {\n  return static_cast<char>(tolower(static_cast<unsigned char>(ch)));\n}\ninline char ToUpper(char ch) {\n  return static_cast<char>(toupper(static_cast<unsigned char>(ch)));\n}\n\ninline std::string StripTrailingSpaces(std::string str) {\n  std::string::iterator it = str.end();\n  while (it != str.begin() && IsSpace(*--it))\n    it = str.erase(it);\n  return str;\n}\n\n// The testing::internal::posix namespace holds wrappers for common\n// POSIX functions.  These wrappers hide the differences between\n// Windows/MSVC and POSIX systems.  Since some compilers define these\n// standard functions as macros, the wrapper cannot have the same name\n// as the wrapped function.\n\nnamespace posix {\n\n// Functions with a different name on Windows.\n\n#if GTEST_OS_WINDOWS\n\ntypedef struct _stat StatStruct;\n\n# ifdef __BORLANDC__\ninline int IsATTY(int fd) { return isatty(fd); }\ninline int StrCaseCmp(const char* s1, const char* s2) {\n  return stricmp(s1, s2);\n}\ninline char* StrDup(const char* src) { return strdup(src); }\n# else  // !__BORLANDC__\n#  if GTEST_OS_WINDOWS_MOBILE\ninline int IsATTY(int /* fd */) { return 0; }\n#  else\ninline int IsATTY(int fd) { return _isatty(fd); }\n#  endif  // GTEST_OS_WINDOWS_MOBILE\ninline int StrCaseCmp(const char* s1, const char* s2) {\n  return _stricmp(s1, s2);\n}\ninline char* StrDup(const char* src) { return _strdup(src); }\n# endif  // __BORLANDC__\n\n# if GTEST_OS_WINDOWS_MOBILE\ninline int FileNo(FILE* file) { return reinterpret_cast<int>(_fileno(file)); }\n// Stat(), RmDir(), and IsDir() are not needed on Windows CE at this\n// time and thus not defined there.\n# else\ninline int FileNo(FILE* file) { return _fileno(file); }\ninline int Stat(const char* path, StatStruct* buf) { return _stat(path, buf); }\ninline int RmDir(const char* dir) { return _rmdir(dir); }\ninline bool IsDir(const StatStruct& st) {\n  return (_S_IFDIR & st.st_mode) != 0;\n}\n# endif  // GTEST_OS_WINDOWS_MOBILE\n\n#elif GTEST_OS_ESP8266\ntypedef struct stat StatStruct;\n\ninline int FileNo(FILE* file) { return fileno(file); }\ninline int IsATTY(int fd) { return isatty(fd); }\ninline int Stat(const char* path, StatStruct* buf) {\n  // stat function not implemented on ESP8266\n  return 0;\n}\ninline int StrCaseCmp(const char* s1, const char* s2) {\n  return strcasecmp(s1, s2);\n}\ninline char* StrDup(const char* src) { return strdup(src); }\ninline int RmDir(const char* dir) { return rmdir(dir); }\ninline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); }\n\n#else\n\ntypedef struct stat StatStruct;\n\ninline int FileNo(FILE* file) { return fileno(file); }\ninline int IsATTY(int fd) { return isatty(fd); }\ninline int Stat(const char* path, StatStruct* buf) { return stat(path, buf); }\ninline int StrCaseCmp(const char* s1, const char* s2) {\n  return strcasecmp(s1, s2);\n}\ninline char* StrDup(const char* src) { return strdup(src); }\ninline int RmDir(const char* dir) { return rmdir(dir); }\ninline bool IsDir(const StatStruct& st) { return S_ISDIR(st.st_mode); }\n\n#endif  // GTEST_OS_WINDOWS\n\n// Functions deprecated by MSVC 8.0.\n\nGTEST_DISABLE_MSC_DEPRECATED_PUSH_()\n\n// ChDir(), FReopen(), FDOpen(), Read(), Write(), Close(), and\n// StrError() aren't needed on Windows CE at this time and thus not\n// defined there.\n\n#if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT\ninline int ChDir(const char* dir) { return chdir(dir); }\n#endif\ninline FILE* FOpen(const char* path, const char* mode) {\n  return fopen(path, mode);\n}\n#if !GTEST_OS_WINDOWS_MOBILE\ninline FILE *FReopen(const char* path, const char* mode, FILE* stream) {\n  return freopen(path, mode, stream);\n}\ninline FILE* FDOpen(int fd, const char* mode) { return fdopen(fd, mode); }\n#endif\ninline int FClose(FILE* fp) { return fclose(fp); }\n#if !GTEST_OS_WINDOWS_MOBILE\ninline int Read(int fd, void* buf, unsigned int count) {\n  return static_cast<int>(read(fd, buf, count));\n}\ninline int Write(int fd, const void* buf, unsigned int count) {\n  return static_cast<int>(write(fd, buf, count));\n}\ninline int Close(int fd) { return close(fd); }\ninline const char* StrError(int errnum) { return strerror(errnum); }\n#endif\ninline const char* GetEnv(const char* name) {\n#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \\\n    GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266\n  // We are on an embedded platform, which has no environment variables.\n  static_cast<void>(name);  // To prevent 'unused argument' warning.\n  return nullptr;\n#elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9)\n  // Environment variables which we programmatically clear will be set to the\n  // empty string rather than unset (NULL).  Handle that case.\n  const char* const env = getenv(name);\n  return (env != nullptr && env[0] != '\\0') ? env : nullptr;\n#else\n  return getenv(name);\n#endif\n}\n\nGTEST_DISABLE_MSC_DEPRECATED_POP_()\n\n#if GTEST_OS_WINDOWS_MOBILE\n// Windows CE has no C library. The abort() function is used in\n// several places in Google Test. This implementation provides a reasonable\n// imitation of standard behaviour.\n[[noreturn]] void Abort();\n#else\n[[noreturn]] inline void Abort() { abort(); }\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\n}  // namespace posix\n\n// MSVC \"deprecates\" snprintf and issues warnings wherever it is used.  In\n// order to avoid these warnings, we need to use _snprintf or _snprintf_s on\n// MSVC-based platforms.  We map the GTEST_SNPRINTF_ macro to the appropriate\n// function in order to achieve that.  We use macro definition here because\n// snprintf is a variadic function.\n#if _MSC_VER && !GTEST_OS_WINDOWS_MOBILE\n// MSVC 2005 and above support variadic macros.\n# define GTEST_SNPRINTF_(buffer, size, format, ...) \\\n     _snprintf_s(buffer, size, size, format, __VA_ARGS__)\n#elif defined(_MSC_VER)\n// Windows CE does not define _snprintf_s\n# define GTEST_SNPRINTF_ _snprintf\n#else\n# define GTEST_SNPRINTF_ snprintf\n#endif\n\n// The maximum number a BiggestInt can represent.  This definition\n// works no matter BiggestInt is represented in one's complement or\n// two's complement.\n//\n// We cannot rely on numeric_limits in STL, as __int64 and long long\n// are not part of standard C++ and numeric_limits doesn't need to be\n// defined for them.\nconst BiggestInt kMaxBiggestInt =\n    ~(static_cast<BiggestInt>(1) << (8*sizeof(BiggestInt) - 1));\n\n// This template class serves as a compile-time function from size to\n// type.  It maps a size in bytes to a primitive type with that\n// size. e.g.\n//\n//   TypeWithSize<4>::UInt\n//\n// is typedef-ed to be unsigned int (unsigned integer made up of 4\n// bytes).\n//\n// Such functionality should belong to STL, but I cannot find it\n// there.\n//\n// Google Test uses this class in the implementation of floating-point\n// comparison.\n//\n// For now it only handles UInt (unsigned int) as that's all Google Test\n// needs.  Other types can be easily added in the future if need\n// arises.\ntemplate <size_t size>\nclass TypeWithSize {\n public:\n  // This prevents the user from using TypeWithSize<N> with incorrect\n  // values of N.\n  typedef void UInt;\n};\n\n// The specialization for size 4.\ntemplate <>\nclass TypeWithSize<4> {\n public:\n  // unsigned int has size 4 in both gcc and MSVC.\n  //\n  // As base/basictypes.h doesn't compile on Windows, we cannot use\n  // uint32, uint64, and etc here.\n  typedef int Int;\n  typedef unsigned int UInt;\n};\n\n// The specialization for size 8.\ntemplate <>\nclass TypeWithSize<8> {\n public:\n#if GTEST_OS_WINDOWS\n  typedef __int64 Int;\n  typedef unsigned __int64 UInt;\n#else\n  typedef long long Int;  // NOLINT\n  typedef unsigned long long UInt;  // NOLINT\n#endif  // GTEST_OS_WINDOWS\n};\n\n// Integer types of known sizes.\ntypedef TypeWithSize<4>::Int Int32;\ntypedef TypeWithSize<4>::UInt UInt32;\ntypedef TypeWithSize<8>::Int Int64;\ntypedef TypeWithSize<8>::UInt UInt64;\ntypedef TypeWithSize<8>::Int TimeInMillis;  // Represents time in milliseconds.\n\n// Utilities for command line flags and environment variables.\n\n// Macro for referencing flags.\n#if !defined(GTEST_FLAG)\n# define GTEST_FLAG(name) FLAGS_gtest_##name\n#endif  // !defined(GTEST_FLAG)\n\n#if !defined(GTEST_USE_OWN_FLAGFILE_FLAG_)\n# define GTEST_USE_OWN_FLAGFILE_FLAG_ 1\n#endif  // !defined(GTEST_USE_OWN_FLAGFILE_FLAG_)\n\n#if !defined(GTEST_DECLARE_bool_)\n# define GTEST_FLAG_SAVER_ ::testing::internal::GTestFlagSaver\n\n// Macros for declaring flags.\n# define GTEST_DECLARE_bool_(name) GTEST_API_ extern bool GTEST_FLAG(name)\n# define GTEST_DECLARE_int32_(name) \\\n    GTEST_API_ extern ::testing::internal::Int32 GTEST_FLAG(name)\n# define GTEST_DECLARE_string_(name) \\\n    GTEST_API_ extern ::std::string GTEST_FLAG(name)\n\n// Macros for defining flags.\n# define GTEST_DEFINE_bool_(name, default_val, doc) \\\n    GTEST_API_ bool GTEST_FLAG(name) = (default_val)\n# define GTEST_DEFINE_int32_(name, default_val, doc) \\\n    GTEST_API_ ::testing::internal::Int32 GTEST_FLAG(name) = (default_val)\n# define GTEST_DEFINE_string_(name, default_val, doc) \\\n    GTEST_API_ ::std::string GTEST_FLAG(name) = (default_val)\n\n#endif  // !defined(GTEST_DECLARE_bool_)\n\n// Thread annotations\n#if !defined(GTEST_EXCLUSIVE_LOCK_REQUIRED_)\n# define GTEST_EXCLUSIVE_LOCK_REQUIRED_(locks)\n# define GTEST_LOCK_EXCLUDED_(locks)\n#endif  // !defined(GTEST_EXCLUSIVE_LOCK_REQUIRED_)\n\n// Parses 'str' for a 32-bit signed integer.  If successful, writes the result\n// to *value and returns true; otherwise leaves *value unchanged and returns\n// false.\nbool ParseInt32(const Message& src_text, const char* str, Int32* value);\n\n// Parses a bool/Int32/string from the environment variable\n// corresponding to the given Google Test flag.\nbool BoolFromGTestEnv(const char* flag, bool default_val);\nGTEST_API_ Int32 Int32FromGTestEnv(const char* flag, Int32 default_val);\nstd::string OutputFlagAlsoCheckEnvVar();\nconst char* StringFromGTestEnv(const char* flag, const char* default_val);\n\n}  // namespace internal\n}  // namespace testing\n\n#if !defined(GTEST_INTERNAL_DEPRECATED)\n\n// Internal Macro to mark an API deprecated, for googletest usage only\n// Usage: class GTEST_INTERNAL_DEPRECATED(message) MyClass or\n// GTEST_INTERNAL_DEPRECATED(message) <return_type> myFunction(); Every usage of\n// a deprecated entity will trigger a warning when compiled with\n// `-Wdeprecated-declarations` option (clang, gcc, any __GNUC__ compiler).\n// For msvc /W3 option will need to be used\n// Note that for 'other' compilers this macro evaluates to nothing to prevent\n// compilations errors.\n#if defined(_MSC_VER)\n#define GTEST_INTERNAL_DEPRECATED(message) __declspec(deprecated(message))\n#elif defined(__GNUC__)\n#define GTEST_INTERNAL_DEPRECATED(message) __attribute__((deprecated(message)))\n#else\n#define GTEST_INTERNAL_DEPRECATED(message)\n#endif\n\n#endif  // !defined(GTEST_INTERNAL_DEPRECATED)\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_PORT_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-string.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file declares the String class and functions used internally by\n// Google Test.  They are subject to change without notice. They should not used\n// by code external to Google Test.\n//\n// This header file is #included by gtest-internal.h.\n// It should not be #included by other files.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_\n\n#ifdef __BORLANDC__\n// string.h is not guaranteed to provide strcpy on C++ Builder.\n# include <mem.h>\n#endif\n\n#include <string.h>\n#include <string>\n\n#include \"gtest/internal/gtest-port.h\"\n\nnamespace testing {\nnamespace internal {\n\n// String - an abstract class holding static string utilities.\nclass GTEST_API_ String {\n public:\n  // Static utility methods\n\n  // Clones a 0-terminated C string, allocating memory using new.  The\n  // caller is responsible for deleting the return value using\n  // delete[].  Returns the cloned string, or NULL if the input is\n  // NULL.\n  //\n  // This is different from strdup() in string.h, which allocates\n  // memory using malloc().\n  static const char* CloneCString(const char* c_str);\n\n#if GTEST_OS_WINDOWS_MOBILE\n  // Windows CE does not have the 'ANSI' versions of Win32 APIs. To be\n  // able to pass strings to Win32 APIs on CE we need to convert them\n  // to 'Unicode', UTF-16.\n\n  // Creates a UTF-16 wide string from the given ANSI string, allocating\n  // memory using new. The caller is responsible for deleting the return\n  // value using delete[]. Returns the wide string, or NULL if the\n  // input is NULL.\n  //\n  // The wide string is created using the ANSI codepage (CP_ACP) to\n  // match the behaviour of the ANSI versions of Win32 calls and the\n  // C runtime.\n  static LPCWSTR AnsiToUtf16(const char* c_str);\n\n  // Creates an ANSI string from the given wide string, allocating\n  // memory using new. The caller is responsible for deleting the return\n  // value using delete[]. Returns the ANSI string, or NULL if the\n  // input is NULL.\n  //\n  // The returned string is created using the ANSI codepage (CP_ACP) to\n  // match the behaviour of the ANSI versions of Win32 calls and the\n  // C runtime.\n  static const char* Utf16ToAnsi(LPCWSTR utf16_str);\n#endif\n\n  // Compares two C strings.  Returns true if and only if they have the same\n  // content.\n  //\n  // Unlike strcmp(), this function can handle NULL argument(s).  A\n  // NULL C string is considered different to any non-NULL C string,\n  // including the empty string.\n  static bool CStringEquals(const char* lhs, const char* rhs);\n\n  // Converts a wide C string to a String using the UTF-8 encoding.\n  // NULL will be converted to \"(null)\".  If an error occurred during\n  // the conversion, \"(failed to convert from wide string)\" is\n  // returned.\n  static std::string ShowWideCString(const wchar_t* wide_c_str);\n\n  // Compares two wide C strings.  Returns true if and only if they have the\n  // same content.\n  //\n  // Unlike wcscmp(), this function can handle NULL argument(s).  A\n  // NULL C string is considered different to any non-NULL C string,\n  // including the empty string.\n  static bool WideCStringEquals(const wchar_t* lhs, const wchar_t* rhs);\n\n  // Compares two C strings, ignoring case.  Returns true if and only if\n  // they have the same content.\n  //\n  // Unlike strcasecmp(), this function can handle NULL argument(s).\n  // A NULL C string is considered different to any non-NULL C string,\n  // including the empty string.\n  static bool CaseInsensitiveCStringEquals(const char* lhs,\n                                           const char* rhs);\n\n  // Compares two wide C strings, ignoring case.  Returns true if and only if\n  // they have the same content.\n  //\n  // Unlike wcscasecmp(), this function can handle NULL argument(s).\n  // A NULL C string is considered different to any non-NULL wide C string,\n  // including the empty string.\n  // NB: The implementations on different platforms slightly differ.\n  // On windows, this method uses _wcsicmp which compares according to LC_CTYPE\n  // environment variable. On GNU platform this method uses wcscasecmp\n  // which compares according to LC_CTYPE category of the current locale.\n  // On MacOS X, it uses towlower, which also uses LC_CTYPE category of the\n  // current locale.\n  static bool CaseInsensitiveWideCStringEquals(const wchar_t* lhs,\n                                               const wchar_t* rhs);\n\n  // Returns true if and only if the given string ends with the given suffix,\n  // ignoring case. Any string is considered to end with an empty suffix.\n  static bool EndsWithCaseInsensitive(\n      const std::string& str, const std::string& suffix);\n\n  // Formats an int value as \"%02d\".\n  static std::string FormatIntWidth2(int value);  // \"%02d\" for width == 2\n\n  // Formats an int value as \"%X\".\n  static std::string FormatHexInt(int value);\n\n  // Formats an int value as \"%X\".\n  static std::string FormatHexUInt32(UInt32 value);\n\n  // Formats a byte as \"%02X\".\n  static std::string FormatByte(unsigned char value);\n\n private:\n  String();  // Not meant to be instantiated.\n};  // class String\n\n// Gets the content of the stringstream's buffer as an std::string.  Each '\\0'\n// character in the buffer is replaced with \"\\\\0\".\nGTEST_API_ std::string StringStreamToString(::std::stringstream* stream);\n\n}  // namespace internal\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_STRING_H_\n"
  },
  {
    "path": "test/googletest/include/gtest/internal/gtest-type-util.h",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// Type utilities needed for implementing typed and type-parameterized\n// tests.\n\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n#ifndef GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_\n#define GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_\n\n#include \"gtest/internal/gtest-port.h\"\n\n// #ifdef __GNUC__ is too general here.  It is possible to use gcc without using\n// libstdc++ (which is where cxxabi.h comes from).\n# if GTEST_HAS_CXXABI_H_\n#  include <cxxabi.h>\n# elif defined(__HP_aCC)\n#  include <acxx_demangle.h>\n# endif  // GTEST_HASH_CXXABI_H_\n\nnamespace testing {\nnamespace internal {\n\n// Canonicalizes a given name with respect to the Standard C++ Library.\n// This handles removing the inline namespace within `std` that is\n// used by various standard libraries (e.g., `std::__1`).  Names outside\n// of namespace std are returned unmodified.\ninline std::string CanonicalizeForStdLibVersioning(std::string s) {\n  static const char prefix[] = \"std::__\";\n  if (s.compare(0, strlen(prefix), prefix) == 0) {\n    std::string::size_type end = s.find(\"::\", strlen(prefix));\n    if (end != s.npos) {\n      // Erase everything between the initial `std` and the second `::`.\n      s.erase(strlen(\"std\"), end - strlen(\"std\"));\n    }\n  }\n  return s;\n}\n\n// GetTypeName<T>() returns a human-readable name of type T.\n// NB: This function is also used in Google Mock, so don't move it inside of\n// the typed-test-only section below.\ntemplate <typename T>\nstd::string GetTypeName() {\n# if GTEST_HAS_RTTI\n\n  const char* const name = typeid(T).name();\n#  if GTEST_HAS_CXXABI_H_ || defined(__HP_aCC)\n  int status = 0;\n  // gcc's implementation of typeid(T).name() mangles the type name,\n  // so we have to demangle it.\n#   if GTEST_HAS_CXXABI_H_\n  using abi::__cxa_demangle;\n#   endif  // GTEST_HAS_CXXABI_H_\n  char* const readable_name = __cxa_demangle(name, nullptr, nullptr, &status);\n  const std::string name_str(status == 0 ? readable_name : name);\n  free(readable_name);\n  return CanonicalizeForStdLibVersioning(name_str);\n#  else\n  return name;\n#  endif  // GTEST_HAS_CXXABI_H_ || __HP_aCC\n\n# else\n\n  return \"<type>\";\n\n# endif  // GTEST_HAS_RTTI\n}\n\n#if GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P\n\n// A unique type indicating an empty node\nstruct None {};\n\n# define GTEST_TEMPLATE_ template <typename T> class\n\n// The template \"selector\" struct TemplateSel<Tmpl> is used to\n// represent Tmpl, which must be a class template with one type\n// parameter, as a type.  TemplateSel<Tmpl>::Bind<T>::type is defined\n// as the type Tmpl<T>.  This allows us to actually instantiate the\n// template \"selected\" by TemplateSel<Tmpl>.\n//\n// This trick is necessary for simulating typedef for class templates,\n// which C++ doesn't support directly.\ntemplate <GTEST_TEMPLATE_ Tmpl>\nstruct TemplateSel {\n  template <typename T>\n  struct Bind {\n    typedef Tmpl<T> type;\n  };\n};\n\n# define GTEST_BIND_(TmplSel, T) \\\n  TmplSel::template Bind<T>::type\n\ntemplate <GTEST_TEMPLATE_ Head_, GTEST_TEMPLATE_... Tail_>\nstruct Templates {\n  using Head = TemplateSel<Head_>;\n  using Tail = Templates<Tail_...>;\n};\n\ntemplate <GTEST_TEMPLATE_ Head_>\nstruct Templates<Head_> {\n  using Head = TemplateSel<Head_>;\n  using Tail = None;\n};\n\n// Tuple-like type lists\ntemplate <typename Head_, typename... Tail_>\nstruct Types {\n  using Head = Head_;\n  using Tail = Types<Tail_...>;\n};\n\ntemplate <typename Head_>\nstruct Types<Head_> {\n  using Head = Head_;\n  using Tail = None;\n};\n\n// Helper metafunctions to tell apart a single type from types\n// generated by ::testing::Types\ntemplate <typename... Ts>\nstruct ProxyTypeList {\n  using type = Types<Ts...>;\n};\n\ntemplate <typename>\nstruct is_proxy_type_list : std::false_type {};\n\ntemplate <typename... Ts>\nstruct is_proxy_type_list<ProxyTypeList<Ts...>> : std::true_type {};\n\n// Generator which conditionally creates type lists.\n// It recognizes if a requested type list should be created\n// and prevents creating a new type list nested within another one.\ntemplate <typename T>\nstruct GenerateTypeList {\n private:\n  using proxy = typename std::conditional<is_proxy_type_list<T>::value, T,\n                                          ProxyTypeList<T>>::type;\n\n public:\n  using type = typename proxy::type;\n};\n\n#endif  // GTEST_HAS_TYPED_TEST || GTEST_HAS_TYPED_TEST_P\n\n}  // namespace internal\n\ntemplate <typename... Ts>\nusing Types = internal::ProxyTypeList<Ts...>;\n\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_INTERNAL_GTEST_TYPE_UTIL_H_\n"
  },
  {
    "path": "test/googletest/samples/prime_tables.h",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\n// This provides interface PrimeTable that determines whether a number is a\n// prime and determines a next prime number. This interface is used\n// in Google Test samples demonstrating use of parameterized tests.\n\n#ifndef GTEST_SAMPLES_PRIME_TABLES_H_\n#define GTEST_SAMPLES_PRIME_TABLES_H_\n\n#include <algorithm>\n\n// The prime table interface.\nclass PrimeTable {\n public:\n  virtual ~PrimeTable() {}\n\n  // Returns true if and only if n is a prime number.\n  virtual bool IsPrime(int n) const = 0;\n\n  // Returns the smallest prime number greater than p; or returns -1\n  // if the next prime is beyond the capacity of the table.\n  virtual int GetNextPrime(int p) const = 0;\n};\n\n// Implementation #1 calculates the primes on-the-fly.\nclass OnTheFlyPrimeTable : public PrimeTable {\n public:\n  bool IsPrime(int n) const override {\n    if (n <= 1) return false;\n\n    for (int i = 2; i*i <= n; i++) {\n      // n is divisible by an integer other than 1 and itself.\n      if ((n % i) == 0) return false;\n    }\n\n    return true;\n  }\n\n  int GetNextPrime(int p) const override {\n    for (int n = p + 1; n > 0; n++) {\n      if (IsPrime(n)) return n;\n    }\n\n    return -1;\n  }\n};\n\n// Implementation #2 pre-calculates the primes and stores the result\n// in an array.\nclass PreCalculatedPrimeTable : public PrimeTable {\n public:\n  // 'max' specifies the maximum number the prime table holds.\n  explicit PreCalculatedPrimeTable(int max)\n      : is_prime_size_(max + 1), is_prime_(new bool[max + 1]) {\n    CalculatePrimesUpTo(max);\n  }\n  ~PreCalculatedPrimeTable() override { delete[] is_prime_; }\n\n  bool IsPrime(int n) const override {\n    return 0 <= n && n < is_prime_size_ && is_prime_[n];\n  }\n\n  int GetNextPrime(int p) const override {\n    for (int n = p + 1; n < is_prime_size_; n++) {\n      if (is_prime_[n]) return n;\n    }\n\n    return -1;\n  }\n\n private:\n  void CalculatePrimesUpTo(int max) {\n    ::std::fill(is_prime_, is_prime_ + is_prime_size_, true);\n    is_prime_[0] = is_prime_[1] = false;\n\n    // Checks every candidate for prime number (we know that 2 is the only even\n    // prime).\n    for (int i = 2; i*i <= max; i += i%2+1) {\n      if (!is_prime_[i]) continue;\n\n      // Marks all multiples of i (except i itself) as non-prime.\n      // We are starting here from i-th multiplier, because all smaller\n      // complex numbers were already marked.\n      for (int j = i*i; j <= max; j += i) {\n        is_prime_[j] = false;\n      }\n    }\n  }\n\n  const int is_prime_size_;\n  bool* const is_prime_;\n\n  // Disables compiler warning \"assignment operator could not be generated.\"\n  void operator=(const PreCalculatedPrimeTable& rhs);\n};\n\n#endif  // GTEST_SAMPLES_PRIME_TABLES_H_\n"
  },
  {
    "path": "test/googletest/samples/sample1.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n#include \"sample1.h\"\n\n// Returns n! (the factorial of n).  For negative n, n! is defined to be 1.\nint Factorial(int n) {\n  int result = 1;\n  for (int i = 1; i <= n; i++) {\n    result *= i;\n  }\n\n  return result;\n}\n\n// Returns true if and only if n is a prime number.\nbool IsPrime(int n) {\n  // Trivial case 1: small numbers\n  if (n <= 1) return false;\n\n  // Trivial case 2: even numbers\n  if (n % 2 == 0) return n == 2;\n\n  // Now, we have that n is odd and n >= 3.\n\n  // Try to divide n by every odd number i, starting from 3\n  for (int i = 3; ; i += 2) {\n    // We only have to try i up to the square root of n\n    if (i > n/i) break;\n\n    // Now, we have i <= n/i < n.\n    // If n is divisible by i, n is not prime.\n    if (n % i == 0) return false;\n  }\n\n  // n has no integer factor in the range (1, n), and thus is prime.\n  return true;\n}\n"
  },
  {
    "path": "test/googletest/samples/sample1.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n#ifndef GTEST_SAMPLES_SAMPLE1_H_\n#define GTEST_SAMPLES_SAMPLE1_H_\n\n// Returns n! (the factorial of n).  For negative n, n! is defined to be 1.\nint Factorial(int n);\n\n// Returns true if and only if n is a prime number.\nbool IsPrime(int n);\n\n#endif  // GTEST_SAMPLES_SAMPLE1_H_\n"
  },
  {
    "path": "test/googletest/samples/sample10_unittest.cc",
    "content": "// Copyright 2009 Google Inc. All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This sample shows how to use Google Test listener API to implement\n// a primitive leak checker.\n\n#include <stdio.h>\n#include <stdlib.h>\n\n#include \"gtest/gtest.h\"\nusing ::testing::EmptyTestEventListener;\nusing ::testing::InitGoogleTest;\nusing ::testing::Test;\nusing ::testing::TestEventListeners;\nusing ::testing::TestInfo;\nusing ::testing::TestPartResult;\nusing ::testing::UnitTest;\n\nnamespace {\n// We will track memory used by this class.\nclass Water {\n public:\n  // Normal Water declarations go here.\n\n  // operator new and operator delete help us control water allocation.\n  void* operator new(size_t allocation_size) {\n    allocated_++;\n    return malloc(allocation_size);\n  }\n\n  void operator delete(void* block, size_t /* allocation_size */) {\n    allocated_--;\n    free(block);\n  }\n\n  static int allocated() { return allocated_; }\n\n private:\n  static int allocated_;\n};\n\nint Water::allocated_ = 0;\n\n// This event listener monitors how many Water objects are created and\n// destroyed by each test, and reports a failure if a test leaks some Water\n// objects. It does this by comparing the number of live Water objects at\n// the beginning of a test and at the end of a test.\nclass LeakChecker : public EmptyTestEventListener {\n private:\n  // Called before a test starts.\n  void OnTestStart(const TestInfo& /* test_info */) override {\n    initially_allocated_ = Water::allocated();\n  }\n\n  // Called after a test ends.\n  void OnTestEnd(const TestInfo& /* test_info */) override {\n    int difference = Water::allocated() - initially_allocated_;\n\n    // You can generate a failure in any event handler except\n    // OnTestPartResult. Just use an appropriate Google Test assertion to do\n    // it.\n    EXPECT_LE(difference, 0) << \"Leaked \" << difference << \" unit(s) of Water!\";\n  }\n\n  int initially_allocated_;\n};\n\nTEST(ListenersTest, DoesNotLeak) {\n  Water* water = new Water;\n  delete water;\n}\n\n// This should fail when the --check_for_leaks command line flag is\n// specified.\nTEST(ListenersTest, LeaksWater) {\n  Water* water = new Water;\n  EXPECT_TRUE(water != nullptr);\n}\n}  // namespace\n\nint main(int argc, char **argv) {\n  InitGoogleTest(&argc, argv);\n\n  bool check_for_leaks = false;\n  if (argc > 1 && strcmp(argv[1], \"--check_for_leaks\") == 0 )\n    check_for_leaks = true;\n  else\n    printf(\"%s\\n\", \"Run this program with --check_for_leaks to enable \"\n           \"custom leak checking in the tests.\");\n\n  // If we are given the --check_for_leaks command line flag, installs the\n  // leak checker.\n  if (check_for_leaks) {\n    TestEventListeners& listeners = UnitTest::GetInstance()->listeners();\n\n    // Adds the leak checker to the end of the test event listener list,\n    // after the default text output printer and the default XML report\n    // generator.\n    //\n    // The order is important - it ensures that failures generated in the\n    // leak checker's OnTestEnd() method are processed by the text and XML\n    // printers *before* their OnTestEnd() methods are called, such that\n    // they are attributed to the right test. Remember that a listener\n    // receives an OnXyzStart event *after* listeners preceding it in the\n    // list received that event, and receives an OnXyzEnd event *before*\n    // listeners preceding it.\n    //\n    // We don't need to worry about deleting the new listener later, as\n    // Google Test will do it.\n    listeners.Append(new LeakChecker);\n  }\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/samples/sample1_unittest.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n// This sample shows how to write a simple unit test for a function,\n// using Google C++ testing framework.\n//\n// Writing a unit test using Google C++ testing framework is easy as 1-2-3:\n\n\n// Step 1. Include necessary header files such that the stuff your\n// test logic needs is declared.\n//\n// Don't forget gtest.h, which declares the testing framework.\n\n#include <limits.h>\n#include \"sample1.h\"\n#include \"gtest/gtest.h\"\nnamespace {\n\n// Step 2. Use the TEST macro to define your tests.\n//\n// TEST has two parameters: the test case name and the test name.\n// After using the macro, you should define your test logic between a\n// pair of braces.  You can use a bunch of macros to indicate the\n// success or failure of a test.  EXPECT_TRUE and EXPECT_EQ are\n// examples of such macros.  For a complete list, see gtest.h.\n//\n// <TechnicalDetails>\n//\n// In Google Test, tests are grouped into test cases.  This is how we\n// keep test code organized.  You should put logically related tests\n// into the same test case.\n//\n// The test case name and the test name should both be valid C++\n// identifiers.  And you should not use underscore (_) in the names.\n//\n// Google Test guarantees that each test you define is run exactly\n// once, but it makes no guarantee on the order the tests are\n// executed.  Therefore, you should write your tests in such a way\n// that their results don't depend on their order.\n//\n// </TechnicalDetails>\n\n\n// Tests Factorial().\n\n// Tests factorial of negative numbers.\nTEST(FactorialTest, Negative) {\n  // This test is named \"Negative\", and belongs to the \"FactorialTest\"\n  // test case.\n  EXPECT_EQ(1, Factorial(-5));\n  EXPECT_EQ(1, Factorial(-1));\n  EXPECT_GT(Factorial(-10), 0);\n\n  // <TechnicalDetails>\n  //\n  // EXPECT_EQ(expected, actual) is the same as\n  //\n  //   EXPECT_TRUE((expected) == (actual))\n  //\n  // except that it will print both the expected value and the actual\n  // value when the assertion fails.  This is very helpful for\n  // debugging.  Therefore in this case EXPECT_EQ is preferred.\n  //\n  // On the other hand, EXPECT_TRUE accepts any Boolean expression,\n  // and is thus more general.\n  //\n  // </TechnicalDetails>\n}\n\n// Tests factorial of 0.\nTEST(FactorialTest, Zero) {\n  EXPECT_EQ(1, Factorial(0));\n}\n\n// Tests factorial of positive numbers.\nTEST(FactorialTest, Positive) {\n  EXPECT_EQ(1, Factorial(1));\n  EXPECT_EQ(2, Factorial(2));\n  EXPECT_EQ(6, Factorial(3));\n  EXPECT_EQ(40320, Factorial(8));\n}\n\n\n// Tests IsPrime()\n\n// Tests negative input.\nTEST(IsPrimeTest, Negative) {\n  // This test belongs to the IsPrimeTest test case.\n\n  EXPECT_FALSE(IsPrime(-1));\n  EXPECT_FALSE(IsPrime(-2));\n  EXPECT_FALSE(IsPrime(INT_MIN));\n}\n\n// Tests some trivial cases.\nTEST(IsPrimeTest, Trivial) {\n  EXPECT_FALSE(IsPrime(0));\n  EXPECT_FALSE(IsPrime(1));\n  EXPECT_TRUE(IsPrime(2));\n  EXPECT_TRUE(IsPrime(3));\n}\n\n// Tests positive input.\nTEST(IsPrimeTest, Positive) {\n  EXPECT_FALSE(IsPrime(4));\n  EXPECT_TRUE(IsPrime(5));\n  EXPECT_FALSE(IsPrime(6));\n  EXPECT_TRUE(IsPrime(23));\n}\n}  // namespace\n\n// Step 3. Call RUN_ALL_TESTS() in main().\n//\n// We do this by linking in src/gtest_main.cc file, which consists of\n// a main() function which calls RUN_ALL_TESTS() for us.\n//\n// This runs all the tests you've defined, prints the result, and\n// returns 0 if successful, or 1 otherwise.\n//\n// Did you notice that we didn't register the tests?  The\n// RUN_ALL_TESTS() macro magically knows about all the tests we\n// defined.  Isn't this convenient?\n"
  },
  {
    "path": "test/googletest/samples/sample2.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n#include \"sample2.h\"\n\n#include <string.h>\n\n// Clones a 0-terminated C string, allocating memory using new.\nconst char* MyString::CloneCString(const char* a_c_string) {\n  if (a_c_string == nullptr) return nullptr;\n\n  const size_t len = strlen(a_c_string);\n  char* const clone = new char[ len + 1 ];\n  memcpy(clone, a_c_string, len + 1);\n\n  return clone;\n}\n\n// Sets the 0-terminated C string this MyString object\n// represents.\nvoid MyString::Set(const char* a_c_string) {\n  // Makes sure this works when c_string == c_string_\n  const char* const temp = MyString::CloneCString(a_c_string);\n  delete[] c_string_;\n  c_string_ = temp;\n}\n"
  },
  {
    "path": "test/googletest/samples/sample2.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n#ifndef GTEST_SAMPLES_SAMPLE2_H_\n#define GTEST_SAMPLES_SAMPLE2_H_\n\n#include <string.h>\n\n\n// A simple string class.\nclass MyString {\n private:\n  const char* c_string_;\n  const MyString& operator=(const MyString& rhs);\n\n public:\n  // Clones a 0-terminated C string, allocating memory using new.\n  static const char* CloneCString(const char* a_c_string);\n\n  ////////////////////////////////////////////////////////////\n  //\n  // C'tors\n\n  // The default c'tor constructs a NULL string.\n  MyString() : c_string_(nullptr) {}\n\n  // Constructs a MyString by cloning a 0-terminated C string.\n  explicit MyString(const char* a_c_string) : c_string_(nullptr) {\n    Set(a_c_string);\n  }\n\n  // Copy c'tor\n  MyString(const MyString& string) : c_string_(nullptr) {\n    Set(string.c_string_);\n  }\n\n  ////////////////////////////////////////////////////////////\n  //\n  // D'tor.  MyString is intended to be a final class, so the d'tor\n  // doesn't need to be virtual.\n  ~MyString() { delete[] c_string_; }\n\n  // Gets the 0-terminated C string this MyString object represents.\n  const char* c_string() const { return c_string_; }\n\n  size_t Length() const { return c_string_ == nullptr ? 0 : strlen(c_string_); }\n\n  // Sets the 0-terminated C string this MyString object represents.\n  void Set(const char* c_string);\n};\n\n\n#endif  // GTEST_SAMPLES_SAMPLE2_H_\n"
  },
  {
    "path": "test/googletest/samples/sample2_unittest.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n// This sample shows how to write a more complex unit test for a class\n// that has multiple member functions.\n//\n// Usually, it's a good idea to have one test for each method in your\n// class.  You don't have to do that exactly, but it helps to keep\n// your tests organized.  You may also throw in additional tests as\n// needed.\n\n#include \"sample2.h\"\n#include \"gtest/gtest.h\"\nnamespace {\n// In this example, we test the MyString class (a simple string).\n\n// Tests the default c'tor.\nTEST(MyString, DefaultConstructor) {\n  const MyString s;\n\n  // Asserts that s.c_string() returns NULL.\n  //\n  // <TechnicalDetails>\n  //\n  // If we write NULL instead of\n  //\n  //   static_cast<const char *>(NULL)\n  //\n  // in this assertion, it will generate a warning on gcc 3.4.  The\n  // reason is that EXPECT_EQ needs to know the types of its\n  // arguments in order to print them when it fails.  Since NULL is\n  // #defined as 0, the compiler will use the formatter function for\n  // int to print it.  However, gcc thinks that NULL should be used as\n  // a pointer, not an int, and therefore complains.\n  //\n  // The root of the problem is C++'s lack of distinction between the\n  // integer number 0 and the null pointer constant.  Unfortunately,\n  // we have to live with this fact.\n  //\n  // </TechnicalDetails>\n  EXPECT_STREQ(nullptr, s.c_string());\n\n  EXPECT_EQ(0u, s.Length());\n}\n\nconst char kHelloString[] = \"Hello, world!\";\n\n// Tests the c'tor that accepts a C string.\nTEST(MyString, ConstructorFromCString) {\n  const MyString s(kHelloString);\n  EXPECT_EQ(0, strcmp(s.c_string(), kHelloString));\n  EXPECT_EQ(sizeof(kHelloString)/sizeof(kHelloString[0]) - 1,\n            s.Length());\n}\n\n// Tests the copy c'tor.\nTEST(MyString, CopyConstructor) {\n  const MyString s1(kHelloString);\n  const MyString s2 = s1;\n  EXPECT_EQ(0, strcmp(s2.c_string(), kHelloString));\n}\n\n// Tests the Set method.\nTEST(MyString, Set) {\n  MyString s;\n\n  s.Set(kHelloString);\n  EXPECT_EQ(0, strcmp(s.c_string(), kHelloString));\n\n  // Set should work when the input pointer is the same as the one\n  // already in the MyString object.\n  s.Set(s.c_string());\n  EXPECT_EQ(0, strcmp(s.c_string(), kHelloString));\n\n  // Can we set the MyString to NULL?\n  s.Set(nullptr);\n  EXPECT_STREQ(nullptr, s.c_string());\n}\n}  // namespace\n"
  },
  {
    "path": "test/googletest/samples/sample3-inl.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n#ifndef GTEST_SAMPLES_SAMPLE3_INL_H_\n#define GTEST_SAMPLES_SAMPLE3_INL_H_\n\n#include <stddef.h>\n\n\n// Queue is a simple queue implemented as a singled-linked list.\n//\n// The element type must support copy constructor.\ntemplate <typename E>  // E is the element type\nclass Queue;\n\n// QueueNode is a node in a Queue, which consists of an element of\n// type E and a pointer to the next node.\ntemplate <typename E>  // E is the element type\nclass QueueNode {\n  friend class Queue<E>;\n\n public:\n  // Gets the element in this node.\n  const E& element() const { return element_; }\n\n  // Gets the next node in the queue.\n  QueueNode* next() { return next_; }\n  const QueueNode* next() const { return next_; }\n\n private:\n  // Creates a node with a given element value.  The next pointer is\n  // set to NULL.\n  explicit QueueNode(const E& an_element)\n      : element_(an_element), next_(nullptr) {}\n\n  // We disable the default assignment operator and copy c'tor.\n  const QueueNode& operator = (const QueueNode&);\n  QueueNode(const QueueNode&);\n\n  E element_;\n  QueueNode* next_;\n};\n\ntemplate <typename E>  // E is the element type.\nclass Queue {\n public:\n  // Creates an empty queue.\n  Queue() : head_(nullptr), last_(nullptr), size_(0) {}\n\n  // D'tor.  Clears the queue.\n  ~Queue() { Clear(); }\n\n  // Clears the queue.\n  void Clear() {\n    if (size_ > 0) {\n      // 1. Deletes every node.\n      QueueNode<E>* node = head_;\n      QueueNode<E>* next = node->next();\n      for (; ;) {\n        delete node;\n        node = next;\n        if (node == nullptr) break;\n        next = node->next();\n      }\n\n      // 2. Resets the member variables.\n      head_ = last_ = nullptr;\n      size_ = 0;\n    }\n  }\n\n  // Gets the number of elements.\n  size_t Size() const { return size_; }\n\n  // Gets the first element of the queue, or NULL if the queue is empty.\n  QueueNode<E>* Head() { return head_; }\n  const QueueNode<E>* Head() const { return head_; }\n\n  // Gets the last element of the queue, or NULL if the queue is empty.\n  QueueNode<E>* Last() { return last_; }\n  const QueueNode<E>* Last() const { return last_; }\n\n  // Adds an element to the end of the queue.  A copy of the element is\n  // created using the copy constructor, and then stored in the queue.\n  // Changes made to the element in the queue doesn't affect the source\n  // object, and vice versa.\n  void Enqueue(const E& element) {\n    QueueNode<E>* new_node = new QueueNode<E>(element);\n\n    if (size_ == 0) {\n      head_ = last_ = new_node;\n      size_ = 1;\n    } else {\n      last_->next_ = new_node;\n      last_ = new_node;\n      size_++;\n    }\n  }\n\n  // Removes the head of the queue and returns it.  Returns NULL if\n  // the queue is empty.\n  E* Dequeue() {\n    if (size_ == 0) {\n      return nullptr;\n    }\n\n    const QueueNode<E>* const old_head = head_;\n    head_ = head_->next_;\n    size_--;\n    if (size_ == 0) {\n      last_ = nullptr;\n    }\n\n    E* element = new E(old_head->element());\n    delete old_head;\n\n    return element;\n  }\n\n  // Applies a function/functor on each element of the queue, and\n  // returns the result in a new queue.  The original queue is not\n  // affected.\n  template <typename F>\n  Queue* Map(F function) const {\n    Queue* new_queue = new Queue();\n    for (const QueueNode<E>* node = head_; node != nullptr;\n         node = node->next_) {\n      new_queue->Enqueue(function(node->element()));\n    }\n\n    return new_queue;\n  }\n\n private:\n  QueueNode<E>* head_;  // The first node of the queue.\n  QueueNode<E>* last_;  // The last node of the queue.\n  size_t size_;  // The number of elements in the queue.\n\n  // We disallow copying a queue.\n  Queue(const Queue&);\n  const Queue& operator = (const Queue&);\n};\n\n#endif  // GTEST_SAMPLES_SAMPLE3_INL_H_\n"
  },
  {
    "path": "test/googletest/samples/sample3_unittest.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n// In this example, we use a more advanced feature of Google Test called\n// test fixture.\n//\n// A test fixture is a place to hold objects and functions shared by\n// all tests in a test case.  Using a test fixture avoids duplicating\n// the test code necessary to initialize and cleanup those common\n// objects for each test.  It is also useful for defining sub-routines\n// that your tests need to invoke a lot.\n//\n// <TechnicalDetails>\n//\n// The tests share the test fixture in the sense of code sharing, not\n// data sharing.  Each test is given its own fresh copy of the\n// fixture.  You cannot expect the data modified by one test to be\n// passed on to another test, which is a bad idea.\n//\n// The reason for this design is that tests should be independent and\n// repeatable.  In particular, a test should not fail as the result of\n// another test's failure.  If one test depends on info produced by\n// another test, then the two tests should really be one big test.\n//\n// The macros for indicating the success/failure of a test\n// (EXPECT_TRUE, FAIL, etc) need to know what the current test is\n// (when Google Test prints the test result, it tells you which test\n// each failure belongs to).  Technically, these macros invoke a\n// member function of the Test class.  Therefore, you cannot use them\n// in a global function.  That's why you should put test sub-routines\n// in a test fixture.\n//\n// </TechnicalDetails>\n\n#include \"sample3-inl.h\"\n#include \"gtest/gtest.h\"\nnamespace {\n// To use a test fixture, derive a class from testing::Test.\nclass QueueTestSmpl3 : public testing::Test {\n protected:  // You should make the members protected s.t. they can be\n             // accessed from sub-classes.\n\n  // virtual void SetUp() will be called before each test is run.  You\n  // should define it if you need to initialize the variables.\n  // Otherwise, this can be skipped.\n  void SetUp() override {\n    q1_.Enqueue(1);\n    q2_.Enqueue(2);\n    q2_.Enqueue(3);\n  }\n\n  // virtual void TearDown() will be called after each test is run.\n  // You should define it if there is cleanup work to do.  Otherwise,\n  // you don't have to provide it.\n  //\n  // virtual void TearDown() {\n  // }\n\n  // A helper function that some test uses.\n  static int Double(int n) {\n    return 2*n;\n  }\n\n  // A helper function for testing Queue::Map().\n  void MapTester(const Queue<int> * q) {\n    // Creates a new queue, where each element is twice as big as the\n    // corresponding one in q.\n    const Queue<int> * const new_q = q->Map(Double);\n\n    // Verifies that the new queue has the same size as q.\n    ASSERT_EQ(q->Size(), new_q->Size());\n\n    // Verifies the relationship between the elements of the two queues.\n    for (const QueueNode<int>*n1 = q->Head(), *n2 = new_q->Head();\n         n1 != nullptr; n1 = n1->next(), n2 = n2->next()) {\n      EXPECT_EQ(2 * n1->element(), n2->element());\n    }\n\n    delete new_q;\n  }\n\n  // Declares the variables your tests want to use.\n  Queue<int> q0_;\n  Queue<int> q1_;\n  Queue<int> q2_;\n};\n\n// When you have a test fixture, you define a test using TEST_F\n// instead of TEST.\n\n// Tests the default c'tor.\nTEST_F(QueueTestSmpl3, DefaultConstructor) {\n  // You can access data in the test fixture here.\n  EXPECT_EQ(0u, q0_.Size());\n}\n\n// Tests Dequeue().\nTEST_F(QueueTestSmpl3, Dequeue) {\n  int * n = q0_.Dequeue();\n  EXPECT_TRUE(n == nullptr);\n\n  n = q1_.Dequeue();\n  ASSERT_TRUE(n != nullptr);\n  EXPECT_EQ(1, *n);\n  EXPECT_EQ(0u, q1_.Size());\n  delete n;\n\n  n = q2_.Dequeue();\n  ASSERT_TRUE(n != nullptr);\n  EXPECT_EQ(2, *n);\n  EXPECT_EQ(1u, q2_.Size());\n  delete n;\n}\n\n// Tests the Queue::Map() function.\nTEST_F(QueueTestSmpl3, Map) {\n  MapTester(&q0_);\n  MapTester(&q1_);\n  MapTester(&q2_);\n}\n}  // namespace\n"
  },
  {
    "path": "test/googletest/samples/sample4.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n\n#include <stdio.h>\n\n#include \"sample4.h\"\n\n// Returns the current counter value, and increments it.\nint Counter::Increment() {\n  return counter_++;\n}\n\n// Returns the current counter value, and decrements it.\n// counter can not be less than 0, return 0 in this case\nint Counter::Decrement() {\n  if (counter_ == 0) {\n    return counter_;\n  } else  {\n    return counter_--;\n  }\n}\n\n// Prints the current counter value to STDOUT.\nvoid Counter::Print() const {\n  printf(\"%d\", counter_);\n}\n"
  },
  {
    "path": "test/googletest/samples/sample4.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// A sample program demonstrating using Google C++ testing framework.\n#ifndef GTEST_SAMPLES_SAMPLE4_H_\n#define GTEST_SAMPLES_SAMPLE4_H_\n\n// A simple monotonic counter.\nclass Counter {\n private:\n  int counter_;\n\n public:\n  // Creates a counter that starts at 0.\n  Counter() : counter_(0) {}\n\n  // Returns the current counter value, and increments it.\n  int Increment();\n\n  // Returns the current counter value, and decrements it.\n  int Decrement();\n\n  // Prints the current counter value to STDOUT.\n  void Print() const;\n};\n\n#endif  // GTEST_SAMPLES_SAMPLE4_H_\n"
  },
  {
    "path": "test/googletest/samples/sample4_unittest.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"sample4.h\"\n#include \"gtest/gtest.h\"\n\nnamespace {\n// Tests the Increment() method.\n\nTEST(Counter, Increment) {\n  Counter c;\n\n  // Test that counter 0 returns 0\n  EXPECT_EQ(0, c.Decrement());\n\n  // EXPECT_EQ() evaluates its arguments exactly once, so they\n  // can have side effects.\n\n  EXPECT_EQ(0, c.Increment());\n  EXPECT_EQ(1, c.Increment());\n  EXPECT_EQ(2, c.Increment());\n\n  EXPECT_EQ(3, c.Decrement());\n}\n\n}  // namespace\n"
  },
  {
    "path": "test/googletest/samples/sample5_unittest.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This sample teaches how to reuse a test fixture in multiple test\n// cases by deriving sub-fixtures from it.\n//\n// When you define a test fixture, you specify the name of the test\n// case that will use this fixture.  Therefore, a test fixture can\n// be used by only one test case.\n//\n// Sometimes, more than one test cases may want to use the same or\n// slightly different test fixtures.  For example, you may want to\n// make sure that all tests for a GUI library don't leak important\n// system resources like fonts and brushes.  In Google Test, you do\n// this by putting the shared logic in a super (as in \"super class\")\n// test fixture, and then have each test case use a fixture derived\n// from this super fixture.\n\n#include <limits.h>\n#include <time.h>\n#include \"gtest/gtest.h\"\n#include \"sample1.h\"\n#include \"sample3-inl.h\"\nnamespace {\n// In this sample, we want to ensure that every test finishes within\n// ~5 seconds.  If a test takes longer to run, we consider it a\n// failure.\n//\n// We put the code for timing a test in a test fixture called\n// \"QuickTest\".  QuickTest is intended to be the super fixture that\n// other fixtures derive from, therefore there is no test case with\n// the name \"QuickTest\".  This is OK.\n//\n// Later, we will derive multiple test fixtures from QuickTest.\nclass QuickTest : public testing::Test {\n protected:\n  // Remember that SetUp() is run immediately before a test starts.\n  // This is a good place to record the start time.\n  void SetUp() override { start_time_ = time(nullptr); }\n\n  // TearDown() is invoked immediately after a test finishes.  Here we\n  // check if the test was too slow.\n  void TearDown() override {\n    // Gets the time when the test finishes\n    const time_t end_time = time(nullptr);\n\n    // Asserts that the test took no more than ~5 seconds.  Did you\n    // know that you can use assertions in SetUp() and TearDown() as\n    // well?\n    EXPECT_TRUE(end_time - start_time_ <= 5) << \"The test took too long.\";\n  }\n\n  // The UTC time (in seconds) when the test starts\n  time_t start_time_;\n};\n\n\n// We derive a fixture named IntegerFunctionTest from the QuickTest\n// fixture.  All tests using this fixture will be automatically\n// required to be quick.\nclass IntegerFunctionTest : public QuickTest {\n  // We don't need any more logic than already in the QuickTest fixture.\n  // Therefore the body is empty.\n};\n\n\n// Now we can write tests in the IntegerFunctionTest test case.\n\n// Tests Factorial()\nTEST_F(IntegerFunctionTest, Factorial) {\n  // Tests factorial of negative numbers.\n  EXPECT_EQ(1, Factorial(-5));\n  EXPECT_EQ(1, Factorial(-1));\n  EXPECT_GT(Factorial(-10), 0);\n\n  // Tests factorial of 0.\n  EXPECT_EQ(1, Factorial(0));\n\n  // Tests factorial of positive numbers.\n  EXPECT_EQ(1, Factorial(1));\n  EXPECT_EQ(2, Factorial(2));\n  EXPECT_EQ(6, Factorial(3));\n  EXPECT_EQ(40320, Factorial(8));\n}\n\n\n// Tests IsPrime()\nTEST_F(IntegerFunctionTest, IsPrime) {\n  // Tests negative input.\n  EXPECT_FALSE(IsPrime(-1));\n  EXPECT_FALSE(IsPrime(-2));\n  EXPECT_FALSE(IsPrime(INT_MIN));\n\n  // Tests some trivial cases.\n  EXPECT_FALSE(IsPrime(0));\n  EXPECT_FALSE(IsPrime(1));\n  EXPECT_TRUE(IsPrime(2));\n  EXPECT_TRUE(IsPrime(3));\n\n  // Tests positive input.\n  EXPECT_FALSE(IsPrime(4));\n  EXPECT_TRUE(IsPrime(5));\n  EXPECT_FALSE(IsPrime(6));\n  EXPECT_TRUE(IsPrime(23));\n}\n\n\n// The next test case (named \"QueueTest\") also needs to be quick, so\n// we derive another fixture from QuickTest.\n//\n// The QueueTest test fixture has some logic and shared objects in\n// addition to what's in QuickTest already.  We define the additional\n// stuff inside the body of the test fixture, as usual.\nclass QueueTest : public QuickTest {\n protected:\n  void SetUp() override {\n    // First, we need to set up the super fixture (QuickTest).\n    QuickTest::SetUp();\n\n    // Second, some additional setup for this fixture.\n    q1_.Enqueue(1);\n    q2_.Enqueue(2);\n    q2_.Enqueue(3);\n  }\n\n  // By default, TearDown() inherits the behavior of\n  // QuickTest::TearDown().  As we have no additional cleaning work\n  // for QueueTest, we omit it here.\n  //\n  // virtual void TearDown() {\n  //   QuickTest::TearDown();\n  // }\n\n  Queue<int> q0_;\n  Queue<int> q1_;\n  Queue<int> q2_;\n};\n\n\n// Now, let's write tests using the QueueTest fixture.\n\n// Tests the default constructor.\nTEST_F(QueueTest, DefaultConstructor) {\n  EXPECT_EQ(0u, q0_.Size());\n}\n\n// Tests Dequeue().\nTEST_F(QueueTest, Dequeue) {\n  int* n = q0_.Dequeue();\n  EXPECT_TRUE(n == nullptr);\n\n  n = q1_.Dequeue();\n  EXPECT_TRUE(n != nullptr);\n  EXPECT_EQ(1, *n);\n  EXPECT_EQ(0u, q1_.Size());\n  delete n;\n\n  n = q2_.Dequeue();\n  EXPECT_TRUE(n != nullptr);\n  EXPECT_EQ(2, *n);\n  EXPECT_EQ(1u, q2_.Size());\n  delete n;\n}\n}  // namespace\n// If necessary, you can derive further test fixtures from a derived\n// fixture itself.  For example, you can derive another fixture from\n// QueueTest.  Google Test imposes no limit on how deep the hierarchy\n// can be.  In practice, however, you probably don't want it to be too\n// deep as to be confusing.\n"
  },
  {
    "path": "test/googletest/samples/sample6_unittest.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This sample shows how to test common properties of multiple\n// implementations of the same interface (aka interface tests).\n\n// The interface and its implementations are in this header.\n#include \"prime_tables.h\"\n\n#include \"gtest/gtest.h\"\nnamespace {\n// First, we define some factory functions for creating instances of\n// the implementations.  You may be able to skip this step if all your\n// implementations can be constructed the same way.\n\ntemplate <class T>\nPrimeTable* CreatePrimeTable();\n\ntemplate <>\nPrimeTable* CreatePrimeTable<OnTheFlyPrimeTable>() {\n  return new OnTheFlyPrimeTable;\n}\n\ntemplate <>\nPrimeTable* CreatePrimeTable<PreCalculatedPrimeTable>() {\n  return new PreCalculatedPrimeTable(10000);\n}\n\n// Then we define a test fixture class template.\ntemplate <class T>\nclass PrimeTableTest : public testing::Test {\n protected:\n  // The ctor calls the factory function to create a prime table\n  // implemented by T.\n  PrimeTableTest() : table_(CreatePrimeTable<T>()) {}\n\n  ~PrimeTableTest() override { delete table_; }\n\n  // Note that we test an implementation via the base interface\n  // instead of the actual implementation class.  This is important\n  // for keeping the tests close to the real world scenario, where the\n  // implementation is invoked via the base interface.  It avoids\n  // got-yas where the implementation class has a method that shadows\n  // a method with the same name (but slightly different argument\n  // types) in the base interface, for example.\n  PrimeTable* const table_;\n};\n\n#if GTEST_HAS_TYPED_TEST\n\nusing testing::Types;\n\n// Google Test offers two ways for reusing tests for different types.\n// The first is called \"typed tests\".  You should use it if you\n// already know *all* the types you are gonna exercise when you write\n// the tests.\n\n// To write a typed test case, first use\n//\n//   TYPED_TEST_SUITE(TestCaseName, TypeList);\n//\n// to declare it and specify the type parameters.  As with TEST_F,\n// TestCaseName must match the test fixture name.\n\n// The list of types we want to test.\ntypedef Types<OnTheFlyPrimeTable, PreCalculatedPrimeTable> Implementations;\n\nTYPED_TEST_SUITE(PrimeTableTest, Implementations);\n\n// Then use TYPED_TEST(TestCaseName, TestName) to define a typed test,\n// similar to TEST_F.\nTYPED_TEST(PrimeTableTest, ReturnsFalseForNonPrimes) {\n  // Inside the test body, you can refer to the type parameter by\n  // TypeParam, and refer to the fixture class by TestFixture.  We\n  // don't need them in this example.\n\n  // Since we are in the template world, C++ requires explicitly\n  // writing 'this->' when referring to members of the fixture class.\n  // This is something you have to learn to live with.\n  EXPECT_FALSE(this->table_->IsPrime(-5));\n  EXPECT_FALSE(this->table_->IsPrime(0));\n  EXPECT_FALSE(this->table_->IsPrime(1));\n  EXPECT_FALSE(this->table_->IsPrime(4));\n  EXPECT_FALSE(this->table_->IsPrime(6));\n  EXPECT_FALSE(this->table_->IsPrime(100));\n}\n\nTYPED_TEST(PrimeTableTest, ReturnsTrueForPrimes) {\n  EXPECT_TRUE(this->table_->IsPrime(2));\n  EXPECT_TRUE(this->table_->IsPrime(3));\n  EXPECT_TRUE(this->table_->IsPrime(5));\n  EXPECT_TRUE(this->table_->IsPrime(7));\n  EXPECT_TRUE(this->table_->IsPrime(11));\n  EXPECT_TRUE(this->table_->IsPrime(131));\n}\n\nTYPED_TEST(PrimeTableTest, CanGetNextPrime) {\n  EXPECT_EQ(2, this->table_->GetNextPrime(0));\n  EXPECT_EQ(3, this->table_->GetNextPrime(2));\n  EXPECT_EQ(5, this->table_->GetNextPrime(3));\n  EXPECT_EQ(7, this->table_->GetNextPrime(5));\n  EXPECT_EQ(11, this->table_->GetNextPrime(7));\n  EXPECT_EQ(131, this->table_->GetNextPrime(128));\n}\n\n// That's it!  Google Test will repeat each TYPED_TEST for each type\n// in the type list specified in TYPED_TEST_SUITE.  Sit back and be\n// happy that you don't have to define them multiple times.\n\n#endif  // GTEST_HAS_TYPED_TEST\n\n#if GTEST_HAS_TYPED_TEST_P\n\nusing testing::Types;\n\n// Sometimes, however, you don't yet know all the types that you want\n// to test when you write the tests.  For example, if you are the\n// author of an interface and expect other people to implement it, you\n// might want to write a set of tests to make sure each implementation\n// conforms to some basic requirements, but you don't know what\n// implementations will be written in the future.\n//\n// How can you write the tests without committing to the type\n// parameters?  That's what \"type-parameterized tests\" can do for you.\n// It is a bit more involved than typed tests, but in return you get a\n// test pattern that can be reused in many contexts, which is a big\n// win.  Here's how you do it:\n\n// First, define a test fixture class template.  Here we just reuse\n// the PrimeTableTest fixture defined earlier:\n\ntemplate <class T>\nclass PrimeTableTest2 : public PrimeTableTest<T> {\n};\n\n// Then, declare the test case.  The argument is the name of the test\n// fixture, and also the name of the test case (as usual).  The _P\n// suffix is for \"parameterized\" or \"pattern\".\nTYPED_TEST_SUITE_P(PrimeTableTest2);\n\n// Next, use TYPED_TEST_P(TestCaseName, TestName) to define a test,\n// similar to what you do with TEST_F.\nTYPED_TEST_P(PrimeTableTest2, ReturnsFalseForNonPrimes) {\n  EXPECT_FALSE(this->table_->IsPrime(-5));\n  EXPECT_FALSE(this->table_->IsPrime(0));\n  EXPECT_FALSE(this->table_->IsPrime(1));\n  EXPECT_FALSE(this->table_->IsPrime(4));\n  EXPECT_FALSE(this->table_->IsPrime(6));\n  EXPECT_FALSE(this->table_->IsPrime(100));\n}\n\nTYPED_TEST_P(PrimeTableTest2, ReturnsTrueForPrimes) {\n  EXPECT_TRUE(this->table_->IsPrime(2));\n  EXPECT_TRUE(this->table_->IsPrime(3));\n  EXPECT_TRUE(this->table_->IsPrime(5));\n  EXPECT_TRUE(this->table_->IsPrime(7));\n  EXPECT_TRUE(this->table_->IsPrime(11));\n  EXPECT_TRUE(this->table_->IsPrime(131));\n}\n\nTYPED_TEST_P(PrimeTableTest2, CanGetNextPrime) {\n  EXPECT_EQ(2, this->table_->GetNextPrime(0));\n  EXPECT_EQ(3, this->table_->GetNextPrime(2));\n  EXPECT_EQ(5, this->table_->GetNextPrime(3));\n  EXPECT_EQ(7, this->table_->GetNextPrime(5));\n  EXPECT_EQ(11, this->table_->GetNextPrime(7));\n  EXPECT_EQ(131, this->table_->GetNextPrime(128));\n}\n\n// Type-parameterized tests involve one extra step: you have to\n// enumerate the tests you defined:\nREGISTER_TYPED_TEST_SUITE_P(\n    PrimeTableTest2,  // The first argument is the test case name.\n    // The rest of the arguments are the test names.\n    ReturnsFalseForNonPrimes, ReturnsTrueForPrimes, CanGetNextPrime);\n\n// At this point the test pattern is done.  However, you don't have\n// any real test yet as you haven't said which types you want to run\n// the tests with.\n\n// To turn the abstract test pattern into real tests, you instantiate\n// it with a list of types.  Usually the test pattern will be defined\n// in a .h file, and anyone can #include and instantiate it.  You can\n// even instantiate it more than once in the same program.  To tell\n// different instances apart, you give each of them a name, which will\n// become part of the test case name and can be used in test filters.\n\n// The list of types we want to test.  Note that it doesn't have to be\n// defined at the time we write the TYPED_TEST_P()s.\ntypedef Types<OnTheFlyPrimeTable, PreCalculatedPrimeTable>\n    PrimeTableImplementations;\nINSTANTIATE_TYPED_TEST_SUITE_P(OnTheFlyAndPreCalculated,    // Instance name\n                               PrimeTableTest2,             // Test case name\n                               PrimeTableImplementations);  // Type list\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n}  // namespace\n"
  },
  {
    "path": "test/googletest/samples/sample7_unittest.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This sample shows how to test common properties of multiple\n// implementations of an interface (aka interface tests) using\n// value-parameterized tests. Each test in the test case has\n// a parameter that is an interface pointer to an implementation\n// tested.\n\n// The interface and its implementations are in this header.\n#include \"prime_tables.h\"\n\n#include \"gtest/gtest.h\"\nnamespace {\n\nusing ::testing::TestWithParam;\nusing ::testing::Values;\n\n// As a general rule, to prevent a test from affecting the tests that come\n// after it, you should create and destroy the tested objects for each test\n// instead of reusing them.  In this sample we will define a simple factory\n// function for PrimeTable objects.  We will instantiate objects in test's\n// SetUp() method and delete them in TearDown() method.\ntypedef PrimeTable* CreatePrimeTableFunc();\n\nPrimeTable* CreateOnTheFlyPrimeTable() {\n  return new OnTheFlyPrimeTable();\n}\n\ntemplate <size_t max_precalculated>\nPrimeTable* CreatePreCalculatedPrimeTable() {\n  return new PreCalculatedPrimeTable(max_precalculated);\n}\n\n// Inside the test body, fixture constructor, SetUp(), and TearDown() you\n// can refer to the test parameter by GetParam().  In this case, the test\n// parameter is a factory function which we call in fixture's SetUp() to\n// create and store an instance of PrimeTable.\nclass PrimeTableTestSmpl7 : public TestWithParam<CreatePrimeTableFunc*> {\n public:\n  ~PrimeTableTestSmpl7() override { delete table_; }\n  void SetUp() override { table_ = (*GetParam())(); }\n  void TearDown() override {\n    delete table_;\n    table_ = nullptr;\n  }\n\n protected:\n  PrimeTable* table_;\n};\n\nTEST_P(PrimeTableTestSmpl7, ReturnsFalseForNonPrimes) {\n  EXPECT_FALSE(table_->IsPrime(-5));\n  EXPECT_FALSE(table_->IsPrime(0));\n  EXPECT_FALSE(table_->IsPrime(1));\n  EXPECT_FALSE(table_->IsPrime(4));\n  EXPECT_FALSE(table_->IsPrime(6));\n  EXPECT_FALSE(table_->IsPrime(100));\n}\n\nTEST_P(PrimeTableTestSmpl7, ReturnsTrueForPrimes) {\n  EXPECT_TRUE(table_->IsPrime(2));\n  EXPECT_TRUE(table_->IsPrime(3));\n  EXPECT_TRUE(table_->IsPrime(5));\n  EXPECT_TRUE(table_->IsPrime(7));\n  EXPECT_TRUE(table_->IsPrime(11));\n  EXPECT_TRUE(table_->IsPrime(131));\n}\n\nTEST_P(PrimeTableTestSmpl7, CanGetNextPrime) {\n  EXPECT_EQ(2, table_->GetNextPrime(0));\n  EXPECT_EQ(3, table_->GetNextPrime(2));\n  EXPECT_EQ(5, table_->GetNextPrime(3));\n  EXPECT_EQ(7, table_->GetNextPrime(5));\n  EXPECT_EQ(11, table_->GetNextPrime(7));\n  EXPECT_EQ(131, table_->GetNextPrime(128));\n}\n\n// In order to run value-parameterized tests, you need to instantiate them,\n// or bind them to a list of values which will be used as test parameters.\n// You can instantiate them in a different translation module, or even\n// instantiate them several times.\n//\n// Here, we instantiate our tests with a list of two PrimeTable object\n// factory functions:\nINSTANTIATE_TEST_SUITE_P(OnTheFlyAndPreCalculated, PrimeTableTestSmpl7,\n                         Values(&CreateOnTheFlyPrimeTable,\n                                &CreatePreCalculatedPrimeTable<1000>));\n\n}  // namespace\n"
  },
  {
    "path": "test/googletest/samples/sample8_unittest.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This sample shows how to test code relying on some global flag variables.\n// Combine() helps with generating all possible combinations of such flags,\n// and each test is given one combination as a parameter.\n\n// Use class definitions to test from this header.\n#include \"prime_tables.h\"\n\n#include \"gtest/gtest.h\"\nnamespace {\n\n// Suppose we want to introduce a new, improved implementation of PrimeTable\n// which combines speed of PrecalcPrimeTable and versatility of\n// OnTheFlyPrimeTable (see prime_tables.h). Inside it instantiates both\n// PrecalcPrimeTable and OnTheFlyPrimeTable and uses the one that is more\n// appropriate under the circumstances. But in low memory conditions, it can be\n// told to instantiate without PrecalcPrimeTable instance at all and use only\n// OnTheFlyPrimeTable.\nclass HybridPrimeTable : public PrimeTable {\n public:\n  HybridPrimeTable(bool force_on_the_fly, int max_precalculated)\n      : on_the_fly_impl_(new OnTheFlyPrimeTable),\n        precalc_impl_(force_on_the_fly\n                          ? nullptr\n                          : new PreCalculatedPrimeTable(max_precalculated)),\n        max_precalculated_(max_precalculated) {}\n  ~HybridPrimeTable() override {\n    delete on_the_fly_impl_;\n    delete precalc_impl_;\n  }\n\n  bool IsPrime(int n) const override {\n    if (precalc_impl_ != nullptr && n < max_precalculated_)\n      return precalc_impl_->IsPrime(n);\n    else\n      return on_the_fly_impl_->IsPrime(n);\n  }\n\n  int GetNextPrime(int p) const override {\n    int next_prime = -1;\n    if (precalc_impl_ != nullptr && p < max_precalculated_)\n      next_prime = precalc_impl_->GetNextPrime(p);\n\n    return next_prime != -1 ? next_prime : on_the_fly_impl_->GetNextPrime(p);\n  }\n\n private:\n  OnTheFlyPrimeTable* on_the_fly_impl_;\n  PreCalculatedPrimeTable* precalc_impl_;\n  int max_precalculated_;\n};\n\nusing ::testing::TestWithParam;\nusing ::testing::Bool;\nusing ::testing::Values;\nusing ::testing::Combine;\n\n// To test all code paths for HybridPrimeTable we must test it with numbers\n// both within and outside PreCalculatedPrimeTable's capacity and also with\n// PreCalculatedPrimeTable disabled. We do this by defining fixture which will\n// accept different combinations of parameters for instantiating a\n// HybridPrimeTable instance.\nclass PrimeTableTest : public TestWithParam< ::std::tuple<bool, int> > {\n protected:\n  void SetUp() override {\n    bool force_on_the_fly;\n    int max_precalculated;\n    std::tie(force_on_the_fly, max_precalculated) = GetParam();\n    table_ = new HybridPrimeTable(force_on_the_fly, max_precalculated);\n  }\n  void TearDown() override {\n    delete table_;\n    table_ = nullptr;\n  }\n  HybridPrimeTable* table_;\n};\n\nTEST_P(PrimeTableTest, ReturnsFalseForNonPrimes) {\n  // Inside the test body, you can refer to the test parameter by GetParam().\n  // In this case, the test parameter is a PrimeTable interface pointer which\n  // we can use directly.\n  // Please note that you can also save it in the fixture's SetUp() method\n  // or constructor and use saved copy in the tests.\n\n  EXPECT_FALSE(table_->IsPrime(-5));\n  EXPECT_FALSE(table_->IsPrime(0));\n  EXPECT_FALSE(table_->IsPrime(1));\n  EXPECT_FALSE(table_->IsPrime(4));\n  EXPECT_FALSE(table_->IsPrime(6));\n  EXPECT_FALSE(table_->IsPrime(100));\n}\n\nTEST_P(PrimeTableTest, ReturnsTrueForPrimes) {\n  EXPECT_TRUE(table_->IsPrime(2));\n  EXPECT_TRUE(table_->IsPrime(3));\n  EXPECT_TRUE(table_->IsPrime(5));\n  EXPECT_TRUE(table_->IsPrime(7));\n  EXPECT_TRUE(table_->IsPrime(11));\n  EXPECT_TRUE(table_->IsPrime(131));\n}\n\nTEST_P(PrimeTableTest, CanGetNextPrime) {\n  EXPECT_EQ(2, table_->GetNextPrime(0));\n  EXPECT_EQ(3, table_->GetNextPrime(2));\n  EXPECT_EQ(5, table_->GetNextPrime(3));\n  EXPECT_EQ(7, table_->GetNextPrime(5));\n  EXPECT_EQ(11, table_->GetNextPrime(7));\n  EXPECT_EQ(131, table_->GetNextPrime(128));\n}\n\n// In order to run value-parameterized tests, you need to instantiate them,\n// or bind them to a list of values which will be used as test parameters.\n// You can instantiate them in a different translation module, or even\n// instantiate them several times.\n//\n// Here, we instantiate our tests with a list of parameters. We must combine\n// all variations of the boolean flag suppressing PrecalcPrimeTable and some\n// meaningful values for tests. We choose a small value (1), and a value that\n// will put some of the tested numbers beyond the capability of the\n// PrecalcPrimeTable instance and some inside it (10). Combine will produce all\n// possible combinations.\nINSTANTIATE_TEST_SUITE_P(MeaningfulTestParameters, PrimeTableTest,\n                         Combine(Bool(), Values(1, 10)));\n\n}  // namespace\n"
  },
  {
    "path": "test/googletest/samples/sample9_unittest.cc",
    "content": "// Copyright 2009 Google Inc. All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This sample shows how to use Google Test listener API to implement\n// an alternative console output and how to use the UnitTest reflection API\n// to enumerate test cases and tests and to inspect their results.\n\n#include <stdio.h>\n\n#include \"gtest/gtest.h\"\n\nusing ::testing::EmptyTestEventListener;\nusing ::testing::InitGoogleTest;\nusing ::testing::Test;\nusing ::testing::TestCase;\nusing ::testing::TestEventListeners;\nusing ::testing::TestInfo;\nusing ::testing::TestPartResult;\nusing ::testing::UnitTest;\nnamespace {\n// Provides alternative output mode which produces minimal amount of\n// information about tests.\nclass TersePrinter : public EmptyTestEventListener {\n private:\n  // Called before any test activity starts.\n  void OnTestProgramStart(const UnitTest& /* unit_test */) override {}\n\n  // Called after all test activities have ended.\n  void OnTestProgramEnd(const UnitTest& unit_test) override {\n    fprintf(stdout, \"TEST %s\\n\", unit_test.Passed() ? \"PASSED\" : \"FAILED\");\n    fflush(stdout);\n  }\n\n  // Called before a test starts.\n  void OnTestStart(const TestInfo& test_info) override {\n    fprintf(stdout,\n            \"*** Test %s.%s starting.\\n\",\n            test_info.test_case_name(),\n            test_info.name());\n    fflush(stdout);\n  }\n\n  // Called after a failed assertion or a SUCCEED() invocation.\n  void OnTestPartResult(const TestPartResult& test_part_result) override {\n    fprintf(stdout,\n            \"%s in %s:%d\\n%s\\n\",\n            test_part_result.failed() ? \"*** Failure\" : \"Success\",\n            test_part_result.file_name(),\n            test_part_result.line_number(),\n            test_part_result.summary());\n    fflush(stdout);\n  }\n\n  // Called after a test ends.\n  void OnTestEnd(const TestInfo& test_info) override {\n    fprintf(stdout,\n            \"*** Test %s.%s ending.\\n\",\n            test_info.test_case_name(),\n            test_info.name());\n    fflush(stdout);\n  }\n};  // class TersePrinter\n\nTEST(CustomOutputTest, PrintsMessage) {\n  printf(\"Printing something from the test body...\\n\");\n}\n\nTEST(CustomOutputTest, Succeeds) {\n  SUCCEED() << \"SUCCEED() has been invoked from here\";\n}\n\nTEST(CustomOutputTest, Fails) {\n  EXPECT_EQ(1, 2)\n      << \"This test fails in order to demonstrate alternative failure messages\";\n}\n}  // namespace\n\nint main(int argc, char **argv) {\n  InitGoogleTest(&argc, argv);\n\n  bool terse_output = false;\n  if (argc > 1 && strcmp(argv[1], \"--terse_output\") == 0 )\n    terse_output = true;\n  else\n    printf(\"%s\\n\", \"Run this program with --terse_output to change the way \"\n           \"it prints its output.\");\n\n  UnitTest& unit_test = *UnitTest::GetInstance();\n\n  // If we are given the --terse_output command line flag, suppresses the\n  // standard output and attaches own result printer.\n  if (terse_output) {\n    TestEventListeners& listeners = unit_test.listeners();\n\n    // Removes the default console output listener from the list so it will\n    // not receive events from Google Test and won't print any output. Since\n    // this operation transfers ownership of the listener to the caller we\n    // have to delete it as well.\n    delete listeners.Release(listeners.default_result_printer());\n\n    // Adds the custom output listener to the list. It will now receive\n    // events from Google Test and print the alternative output. We don't\n    // have to worry about deleting it since Google Test assumes ownership\n    // over it after adding it to the list.\n    listeners.Append(new TersePrinter);\n  }\n  int ret_val = RUN_ALL_TESTS();\n\n  // This is an example of using the UnitTest reflection API to inspect test\n  // results. Here we discount failures from the tests we expected to fail.\n  int unexpectedly_failed_tests = 0;\n  for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {\n    const testing::TestSuite& test_suite = *unit_test.GetTestSuite(i);\n    for (int j = 0; j < test_suite.total_test_count(); ++j) {\n      const TestInfo& test_info = *test_suite.GetTestInfo(j);\n      // Counts failed tests that were not meant to fail (those without\n      // 'Fails' in the name).\n      if (test_info.result()->Failed() &&\n          strcmp(test_info.name(), \"Fails\") != 0) {\n        unexpectedly_failed_tests++;\n      }\n    }\n  }\n\n  // Test that were meant to fail should not affect the test program outcome.\n  if (unexpectedly_failed_tests == 0)\n    ret_val = 0;\n\n  return ret_val;\n}\n"
  },
  {
    "path": "test/googletest/scripts/README.md",
    "content": "# Please Note:\n\nFiles in this directory are no longer supported by the maintainers. They\nrepresent mosty historical artifacts and supported by the community only. There\nis no guarantee whatsoever that these scripts still work.\n"
  },
  {
    "path": "test/googletest/scripts/common.py",
    "content": "# Copyright 2013 Google Inc. All Rights Reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Shared utilities for writing scripts for Google Test/Mock.\"\"\"\n\n__author__ = 'wan@google.com (Zhanyong Wan)'\n\n\nimport os\nimport re\n\n\n# Matches the line from 'svn info .' output that describes what SVN\n# path the current local directory corresponds to.  For example, in\n# a googletest SVN workspace's trunk/test directory, the output will be:\n#\n# URL: https://googletest.googlecode.com/svn/trunk/test\n_SVN_INFO_URL_RE = re.compile(r'^URL: https://(\\w+)\\.googlecode\\.com/svn(.*)')\n\n\ndef GetCommandOutput(command):\n  \"\"\"Runs the shell command and returns its stdout as a list of lines.\"\"\"\n\n  f = os.popen(command, 'r')\n  lines = [line.strip() for line in f.readlines()]\n  f.close()\n  return lines\n\n\ndef GetSvnInfo():\n  \"\"\"Returns the project name and the current SVN workspace's root path.\"\"\"\n\n  for line in GetCommandOutput('svn info .'):\n    m = _SVN_INFO_URL_RE.match(line)\n    if m:\n      project = m.group(1)  # googletest or googlemock\n      rel_path = m.group(2)\n      root = os.path.realpath(rel_path.count('/') * '../')\n      return project, root\n\n  return None, None\n\n\ndef GetSvnTrunk():\n  \"\"\"Returns the current SVN workspace's trunk root path.\"\"\"\n\n  _, root = GetSvnInfo()\n  return root + '/trunk' if root else None\n\n\ndef IsInGTestSvn():\n  project, _ = GetSvnInfo()\n  return project == 'googletest'\n\n\ndef IsInGMockSvn():\n  project, _ = GetSvnInfo()\n  return project == 'googlemock'\n"
  },
  {
    "path": "test/googletest/scripts/fuse_gtest_files.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2009, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"fuse_gtest_files.py v0.2.0\nFuses Google Test source code into a .h file and a .cc file.\n\nSYNOPSIS\n       fuse_gtest_files.py [GTEST_ROOT_DIR] OUTPUT_DIR\n\n       Scans GTEST_ROOT_DIR for Google Test source code, and generates\n       two files: OUTPUT_DIR/gtest/gtest.h and OUTPUT_DIR/gtest/gtest-all.cc.\n       Then you can build your tests by adding OUTPUT_DIR to the include\n       search path and linking with OUTPUT_DIR/gtest/gtest-all.cc.  These\n       two files contain everything you need to use Google Test.  Hence\n       you can \"install\" Google Test by copying them to wherever you want.\n\n       GTEST_ROOT_DIR can be omitted and defaults to the parent\n       directory of the directory holding this script.\n\nEXAMPLES\n       ./fuse_gtest_files.py fused_gtest\n       ./fuse_gtest_files.py path/to/unpacked/gtest fused_gtest\n\nThis tool is experimental.  In particular, it assumes that there is no\nconditional inclusion of Google Test headers.  Please report any\nproblems to googletestframework@googlegroups.com.  You can read\nhttps://github.com/google/googletest/blob/master/googletest/docs/advanced.md for\nmore information.\n\"\"\"\n\n__author__ = 'wan@google.com (Zhanyong Wan)'\n\nimport os\nimport re\ntry:\n  from sets import Set as set  # For Python 2.3 compatibility\nexcept ImportError:\n  pass\nimport sys\n\n# We assume that this file is in the scripts/ directory in the Google\n# Test root directory.\nDEFAULT_GTEST_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')\n\n# Regex for matching '#include \"gtest/...\"'.\nINCLUDE_GTEST_FILE_REGEX = re.compile(r'^\\s*#\\s*include\\s*\"(gtest/.+)\"')\n\n# Regex for matching '#include \"src/...\"'.\nINCLUDE_SRC_FILE_REGEX = re.compile(r'^\\s*#\\s*include\\s*\"(src/.+)\"')\n\n# Where to find the source seed files.\nGTEST_H_SEED = 'include/gtest/gtest.h'\nGTEST_SPI_H_SEED = 'include/gtest/gtest-spi.h'\nGTEST_ALL_CC_SEED = 'src/gtest-all.cc'\n\n# Where to put the generated files.\nGTEST_H_OUTPUT = 'gtest/gtest.h'\nGTEST_ALL_CC_OUTPUT = 'gtest/gtest-all.cc'\n\n\ndef VerifyFileExists(directory, relative_path):\n  \"\"\"Verifies that the given file exists; aborts on failure.\n\n  relative_path is the file path relative to the given directory.\n  \"\"\"\n\n  if not os.path.isfile(os.path.join(directory, relative_path)):\n    print('ERROR: Cannot find %s in directory %s.' % (relative_path,\n                                                      directory))\n    print('Please either specify a valid project root directory '\n          'or omit it on the command line.')\n    sys.exit(1)\n\n\ndef ValidateGTestRootDir(gtest_root):\n  \"\"\"Makes sure gtest_root points to a valid gtest root directory.\n\n  The function aborts the program on failure.\n  \"\"\"\n\n  VerifyFileExists(gtest_root, GTEST_H_SEED)\n  VerifyFileExists(gtest_root, GTEST_ALL_CC_SEED)\n\n\ndef VerifyOutputFile(output_dir, relative_path):\n  \"\"\"Verifies that the given output file path is valid.\n\n  relative_path is relative to the output_dir directory.\n  \"\"\"\n\n  # Makes sure the output file either doesn't exist or can be overwritten.\n  output_file = os.path.join(output_dir, relative_path)\n  if os.path.exists(output_file):\n    # TODO(wan@google.com): The following user-interaction doesn't\n    # work with automated processes.  We should provide a way for the\n    # Makefile to force overwriting the files.\n    print('%s already exists in directory %s - overwrite it? (y/N) ' %\n          (relative_path, output_dir))\n    answer = sys.stdin.readline().strip()\n    if answer not in ['y', 'Y']:\n      print('ABORTED.')\n      sys.exit(1)\n\n  # Makes sure the directory holding the output file exists; creates\n  # it and all its ancestors if necessary.\n  parent_directory = os.path.dirname(output_file)\n  if not os.path.isdir(parent_directory):\n    os.makedirs(parent_directory)\n\n\ndef ValidateOutputDir(output_dir):\n  \"\"\"Makes sure output_dir points to a valid output directory.\n\n  The function aborts the program on failure.\n  \"\"\"\n\n  VerifyOutputFile(output_dir, GTEST_H_OUTPUT)\n  VerifyOutputFile(output_dir, GTEST_ALL_CC_OUTPUT)\n\n\ndef FuseGTestH(gtest_root, output_dir):\n  \"\"\"Scans folder gtest_root to generate gtest/gtest.h in output_dir.\"\"\"\n\n  output_file = open(os.path.join(output_dir, GTEST_H_OUTPUT), 'w')\n  processed_files = set()  # Holds all gtest headers we've processed.\n\n  def ProcessFile(gtest_header_path):\n    \"\"\"Processes the given gtest header file.\"\"\"\n\n    # We don't process the same header twice.\n    if gtest_header_path in processed_files:\n      return\n\n    processed_files.add(gtest_header_path)\n\n    # Reads each line in the given gtest header.\n    for line in open(os.path.join(gtest_root, gtest_header_path), 'r'):\n      m = INCLUDE_GTEST_FILE_REGEX.match(line)\n      if m:\n        # It's '#include \"gtest/...\"' - let's process it recursively.\n        ProcessFile('include/' + m.group(1))\n      else:\n        # Otherwise we copy the line unchanged to the output file.\n        output_file.write(line)\n\n  ProcessFile(GTEST_H_SEED)\n  output_file.close()\n\n\ndef FuseGTestAllCcToFile(gtest_root, output_file):\n  \"\"\"Scans folder gtest_root to generate gtest/gtest-all.cc in output_file.\"\"\"\n\n  processed_files = set()\n\n  def ProcessFile(gtest_source_file):\n    \"\"\"Processes the given gtest source file.\"\"\"\n\n    # We don't process the same #included file twice.\n    if gtest_source_file in processed_files:\n      return\n\n    processed_files.add(gtest_source_file)\n\n    # Reads each line in the given gtest source file.\n    for line in open(os.path.join(gtest_root, gtest_source_file), 'r'):\n      m = INCLUDE_GTEST_FILE_REGEX.match(line)\n      if m:\n        if 'include/' + m.group(1) == GTEST_SPI_H_SEED:\n          # It's '#include \"gtest/gtest-spi.h\"'.  This file is not\n          # #included by \"gtest/gtest.h\", so we need to process it.\n          ProcessFile(GTEST_SPI_H_SEED)\n        else:\n          # It's '#include \"gtest/foo.h\"' where foo is not gtest-spi.\n          # We treat it as '#include \"gtest/gtest.h\"', as all other\n          # gtest headers are being fused into gtest.h and cannot be\n          # #included directly.\n\n          # There is no need to #include \"gtest/gtest.h\" more than once.\n          if not GTEST_H_SEED in processed_files:\n            processed_files.add(GTEST_H_SEED)\n            output_file.write('#include \"%s\"\\n' % (GTEST_H_OUTPUT,))\n      else:\n        m = INCLUDE_SRC_FILE_REGEX.match(line)\n        if m:\n          # It's '#include \"src/foo\"' - let's process it recursively.\n          ProcessFile(m.group(1))\n        else:\n          output_file.write(line)\n\n  ProcessFile(GTEST_ALL_CC_SEED)\n\n\ndef FuseGTestAllCc(gtest_root, output_dir):\n  \"\"\"Scans folder gtest_root to generate gtest/gtest-all.cc in output_dir.\"\"\"\n\n  output_file = open(os.path.join(output_dir, GTEST_ALL_CC_OUTPUT), 'w')\n  FuseGTestAllCcToFile(gtest_root, output_file)\n  output_file.close()\n\n\ndef FuseGTest(gtest_root, output_dir):\n  \"\"\"Fuses gtest.h and gtest-all.cc.\"\"\"\n\n  ValidateGTestRootDir(gtest_root)\n  ValidateOutputDir(output_dir)\n\n  FuseGTestH(gtest_root, output_dir)\n  FuseGTestAllCc(gtest_root, output_dir)\n\n\ndef main():\n  argc = len(sys.argv)\n  if argc == 2:\n    # fuse_gtest_files.py OUTPUT_DIR\n    FuseGTest(DEFAULT_GTEST_ROOT_DIR, sys.argv[1])\n  elif argc == 3:\n    # fuse_gtest_files.py GTEST_ROOT_DIR OUTPUT_DIR\n    FuseGTest(sys.argv[1], sys.argv[2])\n  else:\n    print(__doc__)\n    sys.exit(1)\n\n\nif __name__ == '__main__':\n  main()\n"
  },
  {
    "path": "test/googletest/scripts/gen_gtest_pred_impl.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2006, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"gen_gtest_pred_impl.py v0.1\n\nGenerates the implementation of Google Test predicate assertions and\naccompanying tests.\n\nUsage:\n\n  gen_gtest_pred_impl.py MAX_ARITY\n\nwhere MAX_ARITY is a positive integer.\n\nThe command generates the implementation of up-to MAX_ARITY-ary\npredicate assertions, and writes it to file gtest_pred_impl.h in the\ndirectory where the script is.  It also generates the accompanying\nunit test in file gtest_pred_impl_unittest.cc.\n\"\"\"\n\n__author__ = 'wan@google.com (Zhanyong Wan)'\n\nimport os\nimport sys\nimport time\n\n# Where this script is.\nSCRIPT_DIR = os.path.dirname(sys.argv[0])\n\n# Where to store the generated header.\nHEADER = os.path.join(SCRIPT_DIR, '../include/gtest/gtest_pred_impl.h')\n\n# Where to store the generated unit test.\nUNIT_TEST = os.path.join(SCRIPT_DIR, '../test/gtest_pred_impl_unittest.cc')\n\n\ndef HeaderPreamble(n):\n  \"\"\"Returns the preamble for the header file.\n\n  Args:\n    n:  the maximum arity of the predicate macros to be generated.\n  \"\"\"\n\n  # A map that defines the values used in the preamble template.\n  DEFS = {\n    'today' : time.strftime('%m/%d/%Y'),\n    'year' : time.strftime('%Y'),\n    'command' : '%s %s' % (os.path.basename(sys.argv[0]), n),\n    'n' : n\n    }\n\n  return (\n  \"\"\"// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// This file is AUTOMATICALLY GENERATED on %(today)s by command\n// '%(command)s'.  DO NOT EDIT BY HAND!\n//\n// Implements a family of generic predicate assertion macros.\n// GOOGLETEST_CM0001 DO NOT DELETE\n\n\n#ifndef GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_\n#define GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_\n\n#include \"gtest/gtest.h\"\n\nnamespace testing {\n\n// This header implements a family of generic predicate assertion\n// macros:\n//\n//   ASSERT_PRED_FORMAT1(pred_format, v1)\n//   ASSERT_PRED_FORMAT2(pred_format, v1, v2)\n//   ...\n//\n// where pred_format is a function or functor that takes n (in the\n// case of ASSERT_PRED_FORMATn) values and their source expression\n// text, and returns a testing::AssertionResult.  See the definition\n// of ASSERT_EQ in gtest.h for an example.\n//\n// If you don't care about formatting, you can use the more\n// restrictive version:\n//\n//   ASSERT_PRED1(pred, v1)\n//   ASSERT_PRED2(pred, v1, v2)\n//   ...\n//\n// where pred is an n-ary function or functor that returns bool,\n// and the values v1, v2, ..., must support the << operator for\n// streaming to std::ostream.\n//\n// We also define the EXPECT_* variations.\n//\n// For now we only support predicates whose arity is at most %(n)s.\n// Please email googletestframework@googlegroups.com if you need\n// support for higher arities.\n\n// GTEST_ASSERT_ is the basic statement to which all of the assertions\n// in this file reduce.  Don't use this in your code.\n\n#define GTEST_ASSERT_(expression, on_failure) \\\\\n  GTEST_AMBIGUOUS_ELSE_BLOCKER_ \\\\\n  if (const ::testing::AssertionResult gtest_ar = (expression)) \\\\\n    ; \\\\\n  else \\\\\n    on_failure(gtest_ar.failure_message())\n\"\"\" % DEFS)\n\n\ndef Arity(n):\n  \"\"\"Returns the English name of the given arity.\"\"\"\n\n  if n < 0:\n    return None\n  elif n <= 3:\n    return ['nullary', 'unary', 'binary', 'ternary'][n]\n  else:\n    return '%s-ary' % n\n\n\ndef Title(word):\n  \"\"\"Returns the given word in title case.  The difference between\n  this and string's title() method is that Title('4-ary') is '4-ary'\n  while '4-ary'.title() is '4-Ary'.\"\"\"\n\n  return word[0].upper() + word[1:]\n\n\ndef OneTo(n):\n  \"\"\"Returns the list [1, 2, 3, ..., n].\"\"\"\n\n  return range(1, n + 1)\n\n\ndef Iter(n, format, sep=''):\n  \"\"\"Given a positive integer n, a format string that contains 0 or\n  more '%s' format specs, and optionally a separator string, returns\n  the join of n strings, each formatted with the format string on an\n  iterator ranged from 1 to n.\n\n  Example:\n\n  Iter(3, 'v%s', sep=', ') returns 'v1, v2, v3'.\n  \"\"\"\n\n  # How many '%s' specs are in format?\n  spec_count = len(format.split('%s')) - 1\n  return sep.join([format % (spec_count * (i,)) for i in OneTo(n)])\n\n\ndef ImplementationForArity(n):\n  \"\"\"Returns the implementation of n-ary predicate assertions.\"\"\"\n\n  # A map the defines the values used in the implementation template.\n  DEFS = {\n    'n' : str(n),\n    'vs' : Iter(n, 'v%s', sep=', '),\n    'vts' : Iter(n, '#v%s', sep=', '),\n    'arity' : Arity(n),\n    'Arity' : Title(Arity(n))\n    }\n\n  impl = \"\"\"\n\n// Helper function for implementing {EXPECT|ASSERT}_PRED%(n)s.  Don't use\n// this in your code.\ntemplate <typename Pred\"\"\" % DEFS\n\n  impl += Iter(n, \"\"\",\n          typename T%s\"\"\")\n\n  impl += \"\"\">\nAssertionResult AssertPred%(n)sHelper(const char* pred_text\"\"\" % DEFS\n\n  impl += Iter(n, \"\"\",\n                                  const char* e%s\"\"\")\n\n  impl += \"\"\",\n                                  Pred pred\"\"\"\n\n  impl += Iter(n, \"\"\",\n                                  const T%s& v%s\"\"\")\n\n  impl += \"\"\") {\n  if (pred(%(vs)s)) return AssertionSuccess();\n\n\"\"\" % DEFS\n\n  impl += '  return AssertionFailure() << pred_text << \"(\"'\n\n  impl += Iter(n, \"\"\"\n                            << e%s\"\"\", sep=' << \", \"')\n\n  impl += ' << \") evaluates to false, where\"'\n\n  impl += Iter(\n      n, \"\"\"\n      << \"\\\\n\" << e%s << \" evaluates to \" << ::testing::PrintToString(v%s)\"\"\"\n  )\n\n  impl += \"\"\";\n}\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED_FORMAT%(n)s.\n// Don't use this in your code.\n#define GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, on_failure)\\\\\n  GTEST_ASSERT_(pred_format(%(vts)s, %(vs)s), \\\\\n                on_failure)\n\n// Internal macro for implementing {EXPECT|ASSERT}_PRED%(n)s.  Don't use\n// this in your code.\n#define GTEST_PRED%(n)s_(pred, %(vs)s, on_failure)\\\\\n  GTEST_ASSERT_(::testing::AssertPred%(n)sHelper(#pred\"\"\" % DEFS\n\n  impl += Iter(n, \"\"\", \\\\\n                                             #v%s\"\"\")\n\n  impl += \"\"\", \\\\\n                                             pred\"\"\"\n\n  impl += Iter(n, \"\"\", \\\\\n                                             v%s\"\"\")\n\n  impl += \"\"\"), on_failure)\n\n// %(Arity)s predicate assertion macros.\n#define EXPECT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\\\\n  GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_NONFATAL_FAILURE_)\n#define EXPECT_PRED%(n)s(pred, %(vs)s) \\\\\n  GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_NONFATAL_FAILURE_)\n#define ASSERT_PRED_FORMAT%(n)s(pred_format, %(vs)s) \\\\\n  GTEST_PRED_FORMAT%(n)s_(pred_format, %(vs)s, GTEST_FATAL_FAILURE_)\n#define ASSERT_PRED%(n)s(pred, %(vs)s) \\\\\n  GTEST_PRED%(n)s_(pred, %(vs)s, GTEST_FATAL_FAILURE_)\n\n\"\"\" % DEFS\n\n  return impl\n\n\ndef HeaderPostamble():\n  \"\"\"Returns the postamble for the header file.\"\"\"\n\n  return \"\"\"\n\n}  // namespace testing\n\n#endif  // GTEST_INCLUDE_GTEST_GTEST_PRED_IMPL_H_\n\"\"\"\n\n\ndef GenerateFile(path, content):\n  \"\"\"Given a file path and a content string\n     overwrites it with the given content.\n  \"\"\"\n  print 'Updating file %s . . .' % path\n  f = file(path, 'w+')\n  print >>f, content,\n  f.close()\n\n  print 'File %s has been updated.' % path\n\n\ndef GenerateHeader(n):\n  \"\"\"Given the maximum arity n, updates the header file that implements\n  the predicate assertions.\n  \"\"\"\n  GenerateFile(HEADER,\n               HeaderPreamble(n)\n               + ''.join([ImplementationForArity(i) for i in OneTo(n)])\n               + HeaderPostamble())\n\n\ndef UnitTestPreamble():\n  \"\"\"Returns the preamble for the unit test file.\"\"\"\n\n  # A map that defines the values used in the preamble template.\n  DEFS = {\n    'today' : time.strftime('%m/%d/%Y'),\n    'year' : time.strftime('%Y'),\n    'command' : '%s %s' % (os.path.basename(sys.argv[0]), sys.argv[1]),\n    }\n\n  return (\n  \"\"\"// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// This file is AUTOMATICALLY GENERATED on %(today)s by command\n// '%(command)s'.  DO NOT EDIT BY HAND!\n\n// Regression test for gtest_pred_impl.h\n//\n// This file is generated by a script and quite long.  If you intend to\n// learn how Google Test works by reading its unit tests, read\n// gtest_unittest.cc instead.\n//\n// This is intended as a regression test for the Google Test predicate\n// assertions.  We compile it as part of the gtest_unittest target\n// only to keep the implementation tidy and compact, as it is quite\n// involved to set up the stage for testing Google Test using Google\n// Test itself.\n//\n// Currently, gtest_unittest takes ~11 seconds to run in the testing\n// daemon.  In the future, if it grows too large and needs much more\n// time to finish, we should consider separating this file into a\n// stand-alone regression test.\n\n#include <iostream>\n\n#include \"gtest/gtest.h\"\n#include \"gtest/gtest-spi.h\"\n\n// A user-defined data type.\nstruct Bool {\n  explicit Bool(int val) : value(val != 0) {}\n\n  bool operator>(int n) const { return value > Bool(n).value; }\n\n  Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }\n\n  bool operator==(const Bool& rhs) const { return value == rhs.value; }\n\n  bool value;\n};\n\n// Enables Bool to be used in assertions.\nstd::ostream& operator<<(std::ostream& os, const Bool& x) {\n  return os << (x.value ? \"true\" : \"false\");\n}\n\n\"\"\" % DEFS)\n\n\ndef TestsForArity(n):\n  \"\"\"Returns the tests for n-ary predicate assertions.\"\"\"\n\n  # A map that defines the values used in the template for the tests.\n  DEFS = {\n    'n' : n,\n    'es' : Iter(n, 'e%s', sep=', '),\n    'vs' : Iter(n, 'v%s', sep=', '),\n    'vts' : Iter(n, '#v%s', sep=', '),\n    'tvs' : Iter(n, 'T%s v%s', sep=', '),\n    'int_vs' : Iter(n, 'int v%s', sep=', '),\n    'Bool_vs' : Iter(n, 'Bool v%s', sep=', '),\n    'types' : Iter(n, 'typename T%s', sep=', '),\n    'v_sum' : Iter(n, 'v%s', sep=' + '),\n    'arity' : Arity(n),\n    'Arity' : Title(Arity(n)),\n    }\n\n  tests = (\n  \"\"\"// Sample functions/functors for testing %(arity)s predicate assertions.\n\n// A %(arity)s predicate function.\ntemplate <%(types)s>\nbool PredFunction%(n)s(%(tvs)s) {\n  return %(v_sum)s > 0;\n}\n\n// The following two functions are needed to circumvent a bug in\n// gcc 2.95.3, which sometimes has problem with the above template\n// function.\nbool PredFunction%(n)sInt(%(int_vs)s) {\n  return %(v_sum)s > 0;\n}\nbool PredFunction%(n)sBool(%(Bool_vs)s) {\n  return %(v_sum)s > 0;\n}\n\"\"\" % DEFS)\n\n  tests += \"\"\"\n// A %(arity)s predicate functor.\nstruct PredFunctor%(n)s {\n  template <%(types)s>\n  bool operator()(\"\"\" % DEFS\n\n  tests += Iter(n, 'const T%s& v%s', sep=\"\"\",\n                  \"\"\")\n\n  tests += \"\"\") {\n    return %(v_sum)s > 0;\n  }\n};\n\"\"\" % DEFS\n\n  tests += \"\"\"\n// A %(arity)s predicate-formatter function.\ntemplate <%(types)s>\ntesting::AssertionResult PredFormatFunction%(n)s(\"\"\" % DEFS\n\n  tests += Iter(n, 'const char* e%s', sep=\"\"\",\n                                             \"\"\")\n\n  tests += Iter(n, \"\"\",\n                                             const T%s& v%s\"\"\")\n\n  tests += \"\"\") {\n  if (PredFunction%(n)s(%(vs)s))\n    return testing::AssertionSuccess();\n\n  return testing::AssertionFailure()\n      << \"\"\" % DEFS\n\n  tests += Iter(n, 'e%s', sep=' << \" + \" << ')\n\n  tests += \"\"\"\n      << \" is expected to be positive, but evaluates to \"\n      << %(v_sum)s << \".\";\n}\n\"\"\" % DEFS\n\n  tests += \"\"\"\n// A %(arity)s predicate-formatter functor.\nstruct PredFormatFunctor%(n)s {\n  template <%(types)s>\n  testing::AssertionResult operator()(\"\"\" % DEFS\n\n  tests += Iter(n, 'const char* e%s', sep=\"\"\",\n                                      \"\"\")\n\n  tests += Iter(n, \"\"\",\n                                      const T%s& v%s\"\"\")\n\n  tests += \"\"\") const {\n    return PredFormatFunction%(n)s(%(es)s, %(vs)s);\n  }\n};\n\"\"\" % DEFS\n\n  tests += \"\"\"\n// Tests for {EXPECT|ASSERT}_PRED_FORMAT%(n)s.\n\nclass Predicate%(n)sTest : public testing::Test {\n protected:\n  void SetUp() override {\n    expected_to_finish_ = true;\n    finished_ = false;\"\"\" % DEFS\n\n  tests += \"\"\"\n    \"\"\" + Iter(n, 'n%s_ = ') + \"\"\"0;\n  }\n\"\"\"\n\n  tests += \"\"\"\n  void TearDown() override {\n    // Verifies that each of the predicate's arguments was evaluated\n    // exactly once.\"\"\"\n\n  tests += ''.join([\"\"\"\n    EXPECT_EQ(1, n%s_) <<\n        \"The predicate assertion didn't evaluate argument %s \"\n        \"exactly once.\";\"\"\" % (i, i + 1) for i in OneTo(n)])\n\n  tests += \"\"\"\n\n    // Verifies that the control flow in the test function is expected.\n    if (expected_to_finish_ && !finished_) {\n      FAIL() << \"The predicate assertion unexpactedly aborted the test.\";\n    } else if (!expected_to_finish_ && finished_) {\n      FAIL() << \"The failed predicate assertion didn't abort the test \"\n                \"as expected.\";\n    }\n  }\n\n  // true if and only if the test function is expected to run to finish.\n  static bool expected_to_finish_;\n\n  // true if and only if the test function did run to finish.\n  static bool finished_;\n\"\"\" % DEFS\n\n  tests += Iter(n, \"\"\"\n  static int n%s_;\"\"\")\n\n  tests += \"\"\"\n};\n\nbool Predicate%(n)sTest::expected_to_finish_;\nbool Predicate%(n)sTest::finished_;\n\"\"\" % DEFS\n\n  tests += Iter(n, \"\"\"int Predicate%%(n)sTest::n%s_;\n\"\"\") % DEFS\n\n  tests += \"\"\"\ntypedef Predicate%(n)sTest EXPECT_PRED_FORMAT%(n)sTest;\ntypedef Predicate%(n)sTest ASSERT_PRED_FORMAT%(n)sTest;\ntypedef Predicate%(n)sTest EXPECT_PRED%(n)sTest;\ntypedef Predicate%(n)sTest ASSERT_PRED%(n)sTest;\n\"\"\" % DEFS\n\n  def GenTest(use_format, use_assert, expect_failure,\n              use_functor, use_user_type):\n    \"\"\"Returns the test for a predicate assertion macro.\n\n    Args:\n      use_format:     true if and only if the assertion is a *_PRED_FORMAT*.\n      use_assert:     true if and only if the assertion is a ASSERT_*.\n      expect_failure: true if and only if the assertion is expected to fail.\n      use_functor:    true if and only if the first argument of the assertion is\n                      a functor (as opposed to a function)\n      use_user_type:  true if and only if the predicate functor/function takes\n                      argument(s) of a user-defined type.\n\n    Example:\n\n      GenTest(1, 0, 0, 1, 0) returns a test that tests the behavior\n      of a successful EXPECT_PRED_FORMATn() that takes a functor\n      whose arguments have built-in types.\"\"\"\n\n    if use_assert:\n      assrt = 'ASSERT'  # 'assert' is reserved, so we cannot use\n      # that identifier here.\n    else:\n      assrt = 'EXPECT'\n\n    assertion = assrt + '_PRED'\n\n    if use_format:\n      pred_format = 'PredFormat'\n      assertion += '_FORMAT'\n    else:\n      pred_format = 'Pred'\n\n    assertion += '%(n)s' % DEFS\n\n    if use_functor:\n      pred_format_type = 'functor'\n      pred_format += 'Functor%(n)s()'\n    else:\n      pred_format_type = 'function'\n      pred_format += 'Function%(n)s'\n      if not use_format:\n        if use_user_type:\n          pred_format += 'Bool'\n        else:\n          pred_format += 'Int'\n\n    test_name = pred_format_type.title()\n\n    if use_user_type:\n      arg_type = 'user-defined type (Bool)'\n      test_name += 'OnUserType'\n      if expect_failure:\n        arg = 'Bool(n%s_++)'\n      else:\n        arg = 'Bool(++n%s_)'\n    else:\n      arg_type = 'built-in type (int)'\n      test_name += 'OnBuiltInType'\n      if expect_failure:\n        arg = 'n%s_++'\n      else:\n        arg = '++n%s_'\n\n    if expect_failure:\n      successful_or_failed = 'failed'\n      expected_or_not = 'expected.'\n      test_name +=  'Failure'\n    else:\n      successful_or_failed = 'successful'\n      expected_or_not = 'UNEXPECTED!'\n      test_name +=  'Success'\n\n    # A map that defines the values used in the test template.\n    defs = DEFS.copy()\n    defs.update({\n      'assert' : assrt,\n      'assertion' : assertion,\n      'test_name' : test_name,\n      'pf_type' : pred_format_type,\n      'pf' : pred_format,\n      'arg_type' : arg_type,\n      'arg' : arg,\n      'successful' : successful_or_failed,\n      'expected' : expected_or_not,\n      })\n\n    test = \"\"\"\n// Tests a %(successful)s %(assertion)s where the\n// predicate-formatter is a %(pf_type)s on a %(arg_type)s.\nTEST_F(%(assertion)sTest, %(test_name)s) {\"\"\" % defs\n\n    indent = (len(assertion) + 3)*' '\n    extra_indent = ''\n\n    if expect_failure:\n      extra_indent = '  '\n      if use_assert:\n        test += \"\"\"\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\"\"\"\n      else:\n        test += \"\"\"\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\"\"\"\n\n    test += '\\n' + extra_indent + \"\"\"  %(assertion)s(%(pf)s\"\"\" % defs\n\n    test = test % defs\n    test += Iter(n, ',\\n' + indent + extra_indent + '%(arg)s' % defs)\n    test += ');\\n' + extra_indent + '  finished_ = true;\\n'\n\n    if expect_failure:\n      test += '  }, \"\");\\n'\n\n    test += '}\\n'\n    return test\n\n  # Generates tests for all 2**6 = 64 combinations.\n  tests += ''.join([GenTest(use_format, use_assert, expect_failure,\n                            use_functor, use_user_type)\n                    for use_format in [0, 1]\n                    for use_assert in [0, 1]\n                    for expect_failure in [0, 1]\n                    for use_functor in [0, 1]\n                    for use_user_type in [0, 1]\n                    ])\n\n  return tests\n\n\ndef UnitTestPostamble():\n  \"\"\"Returns the postamble for the tests.\"\"\"\n\n  return ''\n\n\ndef GenerateUnitTest(n):\n  \"\"\"Returns the tests for up-to n-ary predicate assertions.\"\"\"\n\n  GenerateFile(UNIT_TEST,\n               UnitTestPreamble()\n               + ''.join([TestsForArity(i) for i in OneTo(n)])\n               + UnitTestPostamble())\n\n\ndef _Main():\n  \"\"\"The entry point of the script.  Generates the header file and its\n  unit test.\"\"\"\n\n  if len(sys.argv) != 2:\n    print __doc__\n    print 'Author: ' + __author__\n    sys.exit(1)\n\n  n = int(sys.argv[1])\n  GenerateHeader(n)\n  GenerateUnitTest(n)\n\n\nif __name__ == '__main__':\n  _Main()\n"
  },
  {
    "path": "test/googletest/scripts/gtest-config.in",
    "content": "#!/bin/sh\n\n# These variables are automatically filled in by the configure script.\nname=\"@PACKAGE_TARNAME@\"\nversion=\"@PACKAGE_VERSION@\"\n\nshow_usage()\n{\n  echo \"Usage: gtest-config [OPTIONS...]\"\n}\n\nshow_help()\n{\n  show_usage\n  cat <<\\EOF\n\nThe `gtest-config' script provides access to the necessary compile and linking\nflags to connect with Google C++ Testing Framework, both in a build prior to\ninstallation, and on the system proper after installation. The installation\noverrides may be issued in combination with any other queries, but will only\naffect installation queries if called on a built but not installed gtest. The\ninstallation queries may not be issued with any other types of queries, and\nonly one installation query may be made at a time. The version queries and\ncompiler flag queries may be combined as desired but not mixed. Different\nversion queries are always combined with logical \"and\" semantics, and only the\nlast of any particular query is used while all previous ones ignored. All\nversions must be specified as a sequence of numbers separated by periods.\nCompiler flag queries output the union of the sets of flags when combined.\n\n Examples:\n  gtest-config --min-version=1.0 || echo \"Insufficient Google Test version.\"\n\n  g++ $(gtest-config --cppflags --cxxflags) -o foo.o -c foo.cpp\n  g++ $(gtest-config --ldflags --libs) -o foo foo.o\n\n  # When using a built but not installed Google Test:\n  g++ $(../../my_gtest_build/scripts/gtest-config ...) ...\n\n  # When using an installed Google Test, but with installation overrides:\n  export GTEST_PREFIX=\"/opt\"\n  g++ $(gtest-config --libdir=\"/opt/lib64\" ...) ...\n\n Help:\n  --usage                    brief usage information\n  --help                     display this help message\n\n Installation Overrides:\n  --prefix=<dir>             overrides the installation prefix\n  --exec-prefix=<dir>        overrides the executable installation prefix\n  --libdir=<dir>             overrides the library installation prefix\n  --includedir=<dir>         overrides the header file installation prefix\n\n Installation Queries:\n  --prefix                   installation prefix\n  --exec-prefix              executable installation prefix\n  --libdir                   library installation directory\n  --includedir               header file installation directory\n  --version                  the version of the Google Test installation\n\n Version Queries:\n  --min-version=VERSION      return 0 if the version is at least VERSION\n  --exact-version=VERSION    return 0 if the version is exactly VERSION\n  --max-version=VERSION      return 0 if the version is at most VERSION\n\n Compilation Flag Queries:\n  --cppflags                 compile flags specific to the C-like preprocessors\n  --cxxflags                 compile flags appropriate for C++ programs\n  --ldflags                  linker flags\n  --libs                     libraries for linking\n\nEOF\n}\n\n# This function bounds our version with a min and a max. It uses some clever\n# POSIX-compliant variable expansion to portably do all the work in the shell\n# and avoid any dependency on a particular \"sed\" or \"awk\" implementation.\n# Notable is that it will only ever compare the first 3 components of versions.\n# Further components will be cleanly stripped off. All versions must be\n# unadorned, so \"v1.0\" will *not* work. The minimum version must be in $1, and\n# the max in $2. TODO(chandlerc@google.com): If this ever breaks, we should\n# investigate expanding this via autom4te from AS_VERSION_COMPARE rather than\n# continuing to maintain our own shell version.\ncheck_versions()\n{\n  major_version=${version%%.*}\n  minor_version=\"0\"\n  point_version=\"0\"\n  if test \"${version#*.}\" != \"${version}\"; then\n    minor_version=${version#*.}\n    minor_version=${minor_version%%.*}\n  fi\n  if test \"${version#*.*.}\" != \"${version}\"; then\n    point_version=${version#*.*.}\n    point_version=${point_version%%.*}\n  fi\n\n  min_version=\"$1\"\n  min_major_version=${min_version%%.*}\n  min_minor_version=\"0\"\n  min_point_version=\"0\"\n  if test \"${min_version#*.}\" != \"${min_version}\"; then\n    min_minor_version=${min_version#*.}\n    min_minor_version=${min_minor_version%%.*}\n  fi\n  if test \"${min_version#*.*.}\" != \"${min_version}\"; then\n    min_point_version=${min_version#*.*.}\n    min_point_version=${min_point_version%%.*}\n  fi\n\n  max_version=\"$2\"\n  max_major_version=${max_version%%.*}\n  max_minor_version=\"0\"\n  max_point_version=\"0\"\n  if test \"${max_version#*.}\" != \"${max_version}\"; then\n    max_minor_version=${max_version#*.}\n    max_minor_version=${max_minor_version%%.*}\n  fi\n  if test \"${max_version#*.*.}\" != \"${max_version}\"; then\n    max_point_version=${max_version#*.*.}\n    max_point_version=${max_point_version%%.*}\n  fi\n\n  test $(($major_version)) -lt $(($min_major_version)) && exit 1\n  if test $(($major_version)) -eq $(($min_major_version)); then\n    test $(($minor_version)) -lt $(($min_minor_version)) && exit 1\n    if test $(($minor_version)) -eq $(($min_minor_version)); then\n      test $(($point_version)) -lt $(($min_point_version)) && exit 1\n    fi\n  fi\n\n  test $(($major_version)) -gt $(($max_major_version)) && exit 1\n  if test $(($major_version)) -eq $(($max_major_version)); then\n    test $(($minor_version)) -gt $(($max_minor_version)) && exit 1\n    if test $(($minor_version)) -eq $(($max_minor_version)); then\n      test $(($point_version)) -gt $(($max_point_version)) && exit 1\n    fi\n  fi\n\n  exit 0\n}\n\n# Show the usage line when no arguments are specified.\nif test $# -eq 0; then\n  show_usage\n  exit 1\nfi\n\nwhile test $# -gt 0; do\n  case $1 in\n    --usage)          show_usage;         exit 0;;\n    --help)           show_help;          exit 0;;\n\n    # Installation overrides\n    --prefix=*)       GTEST_PREFIX=${1#--prefix=};;\n    --exec-prefix=*)  GTEST_EXEC_PREFIX=${1#--exec-prefix=};;\n    --libdir=*)       GTEST_LIBDIR=${1#--libdir=};;\n    --includedir=*)   GTEST_INCLUDEDIR=${1#--includedir=};;\n\n    # Installation queries\n    --prefix|--exec-prefix|--libdir|--includedir|--version)\n      if test -n \"${do_query}\"; then\n        show_usage\n        exit 1\n      fi\n      do_query=${1#--}\n      ;;\n\n    # Version checking\n    --min-version=*)\n      do_check_versions=yes\n      min_version=${1#--min-version=}\n      ;;\n    --max-version=*)\n      do_check_versions=yes\n      max_version=${1#--max-version=}\n      ;;\n    --exact-version=*)\n      do_check_versions=yes\n      exact_version=${1#--exact-version=}\n      ;;\n\n    # Compiler flag output\n    --cppflags)       echo_cppflags=yes;;\n    --cxxflags)       echo_cxxflags=yes;;\n    --ldflags)        echo_ldflags=yes;;\n    --libs)           echo_libs=yes;;\n\n    # Everything else is an error\n    *)                show_usage;         exit 1;;\n  esac\n  shift\ndone\n\n# These have defaults filled in by the configure script but can also be\n# overridden by environment variables or command line parameters.\nprefix=\"${GTEST_PREFIX:-@prefix@}\"\nexec_prefix=\"${GTEST_EXEC_PREFIX:-@exec_prefix@}\"\nlibdir=\"${GTEST_LIBDIR:-@libdir@}\"\nincludedir=\"${GTEST_INCLUDEDIR:-@includedir@}\"\n\n# We try and detect if our binary is not located at its installed location. If\n# it's not, we provide variables pointing to the source and build tree rather\n# than to the install tree. This allows building against a just-built gtest\n# rather than an installed gtest.\nbindir=\"@bindir@\"\nthis_relative_bindir=`dirname $0`\nthis_bindir=`cd ${this_relative_bindir}; pwd -P`\nif test \"${this_bindir}\" = \"${this_bindir%${bindir}}\"; then\n  # The path to the script doesn't end in the bindir sequence from Autoconf,\n  # assume that we are in a build tree.\n  build_dir=`dirname ${this_bindir}`\n  src_dir=`cd ${this_bindir}; cd @top_srcdir@; pwd -P`\n\n  # TODO(chandlerc@google.com): This is a dangerous dependency on libtool, we\n  # should work to remove it, and/or remove libtool altogether, replacing it\n  # with direct references to the library and a link path.\n  gtest_libs=\"${build_dir}/lib/libgtest.la @PTHREAD_CFLAGS@ @PTHREAD_LIBS@\"\n  gtest_ldflags=\"\"\n\n  # We provide hooks to include from either the source or build dir, where the\n  # build dir is always preferred. This will potentially allow us to write\n  # build rules for generated headers and have them automatically be preferred\n  # over provided versions.\n  gtest_cppflags=\"-I${build_dir}/include -I${src_dir}/include\"\n  gtest_cxxflags=\"@PTHREAD_CFLAGS@\"\nelse\n  # We're using an installed gtest, although it may be staged under some\n  # prefix. Assume (as our own libraries do) that we can resolve the prefix,\n  # and are present in the dynamic link paths.\n  gtest_ldflags=\"-L${libdir}\"\n  gtest_libs=\"-l${name} @PTHREAD_CFLAGS@ @PTHREAD_LIBS@\"\n  gtest_cppflags=\"-I${includedir}\"\n  gtest_cxxflags=\"@PTHREAD_CFLAGS@\"\nfi\n\n# Do an installation query if requested.\nif test -n \"$do_query\"; then\n  case $do_query in\n    prefix)           echo $prefix;       exit 0;;\n    exec-prefix)      echo $exec_prefix;  exit 0;;\n    libdir)           echo $libdir;       exit 0;;\n    includedir)       echo $includedir;   exit 0;;\n    version)          echo $version;      exit 0;;\n    *)                show_usage;         exit 1;;\n  esac\nfi\n\n# Do a version check if requested.\nif test \"$do_check_versions\" = \"yes\"; then\n  # Make sure we didn't receive a bad combination of parameters.\n  test \"$echo_cppflags\" = \"yes\" && show_usage && exit 1\n  test \"$echo_cxxflags\" = \"yes\" && show_usage && exit 1\n  test \"$echo_ldflags\" = \"yes\"  && show_usage && exit 1\n  test \"$echo_libs\" = \"yes\"     && show_usage && exit 1\n\n  if test \"$exact_version\" != \"\"; then\n    check_versions $exact_version $exact_version\n    # unreachable\n  else\n    check_versions ${min_version:-0.0.0} ${max_version:-9999.9999.9999}\n    # unreachable\n  fi\nfi\n\n# Do the output in the correct order so that these can be used in-line of\n# a compiler invocation.\noutput=\"\"\ntest \"$echo_cppflags\" = \"yes\" && output=\"$output $gtest_cppflags\"\ntest \"$echo_cxxflags\" = \"yes\" && output=\"$output $gtest_cxxflags\"\ntest \"$echo_ldflags\" = \"yes\"  && output=\"$output $gtest_ldflags\"\ntest \"$echo_libs\" = \"yes\"     && output=\"$output $gtest_libs\"\necho $output\n\nexit 0\n"
  },
  {
    "path": "test/googletest/scripts/release_docs.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2013 Google Inc. All Rights Reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Script for branching Google Test/Mock wiki pages for a new version.\n\nSYNOPSIS\n       release_docs.py NEW_RELEASE_VERSION\n\n       Google Test and Google Mock's external user documentation is in\n       interlinked wiki files.  When we release a new version of\n       Google Test or Google Mock, we need to branch the wiki files\n       such that users of a specific version of Google Test/Mock can\n       look up documenation relevant for that version.  This script\n       automates that process by:\n\n         - branching the current wiki pages (which document the\n           behavior of the SVN trunk head) to pages for the specified\n           version (e.g. branching FAQ.wiki to V2_6_FAQ.wiki when\n           NEW_RELEASE_VERSION is 2.6);\n         - updating the links in the branched files to point to the branched\n           version (e.g. a link in V2_6_FAQ.wiki that pointed to\n           Primer.wiki#Anchor will now point to V2_6_Primer.wiki#Anchor).\n\n       NOTE: NEW_RELEASE_VERSION must be a NEW version number for\n       which the wiki pages don't yet exist; otherwise you'll get SVN\n       errors like \"svn: Path 'V1_7_PumpManual.wiki' is not a\n       directory\" when running the script.\n\nEXAMPLE\n       $ cd PATH/TO/GTEST_SVN_WORKSPACE/trunk\n       $ scripts/release_docs.py 2.6  # create wiki pages for v2.6\n       $ svn status                   # verify the file list\n       $ svn diff                     # verify the file contents\n       $ svn commit -m \"release wiki pages for v2.6\"\n\"\"\"\n\n__author__ = 'wan@google.com (Zhanyong Wan)'\n\nimport os\nimport re\nimport sys\n\nimport common\n\n\n# Wiki pages that shouldn't be branched for every gtest/gmock release.\nGTEST_UNVERSIONED_WIKIS = ['DevGuide.wiki']\nGMOCK_UNVERSIONED_WIKIS = [\n    'DesignDoc.wiki',\n    'DevGuide.wiki',\n    'KnownIssues.wiki'\n    ]\n\n\ndef DropWikiSuffix(wiki_filename):\n  \"\"\"Removes the .wiki suffix (if any) from the given filename.\"\"\"\n\n  return (wiki_filename[:-len('.wiki')] if wiki_filename.endswith('.wiki')\n          else wiki_filename)\n\n\nclass WikiBrancher(object):\n  \"\"\"Branches ...\"\"\"\n\n  def __init__(self, dot_version):\n    self.project, svn_root_path = common.GetSvnInfo()\n    if self.project not in ('googletest', 'googlemock'):\n      sys.exit('This script must be run in a gtest or gmock SVN workspace.')\n    self.wiki_dir = svn_root_path + '/wiki'\n    # Turn '2.6' to 'V2_6_'.\n    self.version_prefix = 'V' + dot_version.replace('.', '_') + '_'\n    self.files_to_branch = self.GetFilesToBranch()\n    page_names = [DropWikiSuffix(f) for f in self.files_to_branch]\n    # A link to Foo.wiki is in one of the following forms:\n    #   [Foo words]\n    #   [Foo#Anchor words]\n    #   [http://code.google.com/.../wiki/Foo words]\n    #   [http://code.google.com/.../wiki/Foo#Anchor words]\n    # We want to replace 'Foo' with 'V2_6_Foo' in the above cases.\n    self.search_for_re = re.compile(\n        # This regex matches either\n        #   [Foo\n        # or\n        #   /wiki/Foo\n        # followed by a space or a #, where Foo is the name of an\n        # unversioned wiki page.\n        r'(\\[|/wiki/)(%s)([ #])' % '|'.join(page_names))\n    self.replace_with = r'\\1%s\\2\\3' % (self.version_prefix,)\n\n  def GetFilesToBranch(self):\n    \"\"\"Returns a list of .wiki file names that need to be branched.\"\"\"\n\n    unversioned_wikis = (GTEST_UNVERSIONED_WIKIS if self.project == 'googletest'\n                         else GMOCK_UNVERSIONED_WIKIS)\n    return [f for f in os.listdir(self.wiki_dir)\n            if (f.endswith('.wiki') and\n                not re.match(r'^V\\d', f) and  # Excluded versioned .wiki files.\n                f not in unversioned_wikis)]\n\n  def BranchFiles(self):\n    \"\"\"Branches the .wiki files needed to be branched.\"\"\"\n\n    print 'Branching %d .wiki files:' % (len(self.files_to_branch),)\n    os.chdir(self.wiki_dir)\n    for f in self.files_to_branch:\n      command = 'svn cp %s %s%s' % (f, self.version_prefix, f)\n      print command\n      os.system(command)\n\n  def UpdateLinksInBranchedFiles(self):\n\n    for f in self.files_to_branch:\n      source_file = os.path.join(self.wiki_dir, f)\n      versioned_file = os.path.join(self.wiki_dir, self.version_prefix + f)\n      print 'Updating links in %s.' % (versioned_file,)\n      text = file(source_file, 'r').read()\n      new_text = self.search_for_re.sub(self.replace_with, text)\n      file(versioned_file, 'w').write(new_text)\n\n\ndef main():\n  if len(sys.argv) != 2:\n    sys.exit(__doc__)\n\n  brancher = WikiBrancher(sys.argv[1])\n  brancher.BranchFiles()\n  brancher.UpdateLinksInBranchedFiles()\n\n\nif __name__ == '__main__':\n  main()\n"
  },
  {
    "path": "test/googletest/scripts/run_with_path.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2010 Google Inc. All Rights Reserved.\n\n\"\"\"Runs program specified in the command line with the substituted PATH.\n\n   This script is needed for to support building under Pulse which is unable\n   to override the existing PATH variable.\n\"\"\"\n\nimport os\nimport subprocess\nimport sys\n\nSUBST_PATH_ENV_VAR_NAME = \"SUBST_PATH\"\n\ndef main():\n  if SUBST_PATH_ENV_VAR_NAME in os.environ:\n    os.environ[\"PATH\"] = os.environ[SUBST_PATH_ENV_VAR_NAME]\n\n  exit_code = subprocess.Popen(sys.argv[1:]).wait()\n\n  # exit_code is negative (-signal) if the process has been terminated by\n  # a signal. Returning negative exit code is not portable and so we return\n  # 100 instead.\n  if exit_code < 0:\n    exit_code = 100\n\n  sys.exit(exit_code)\n\nif __name__ == \"__main__\":\n  main()\n"
  },
  {
    "path": "test/googletest/scripts/test/Makefile",
    "content": "# A Makefile for fusing Google Test and building a sample test against it.\n#\n# SYNOPSIS:\n#\n#   make [all]  - makes everything.\n#   make TARGET - makes the given target.\n#   make check  - makes everything and runs the built sample test.\n#   make clean  - removes all files generated by make.\n\n# Points to the root of fused Google Test, relative to where this file is.\nFUSED_GTEST_DIR = output\n\n# Paths to the fused gtest files.\nFUSED_GTEST_H = $(FUSED_GTEST_DIR)/gtest/gtest.h\nFUSED_GTEST_ALL_CC = $(FUSED_GTEST_DIR)/gtest/gtest-all.cc\n\n# Where to find the sample test.\nSAMPLE_DIR = ../../samples\n\n# Where to find gtest_main.cc.\nGTEST_MAIN_CC = ../../src/gtest_main.cc\n\n# Flags passed to the preprocessor.\n# We have no idea here whether pthreads is available in the system, so\n# disable its use.\nCPPFLAGS += -I$(FUSED_GTEST_DIR) -DGTEST_HAS_PTHREAD=0\n\n# Flags passed to the C++ compiler.\nCXXFLAGS += -g\n\nall : sample1_unittest\n\ncheck : all\n\t./sample1_unittest\n\nclean :\n\trm -rf $(FUSED_GTEST_DIR) sample1_unittest *.o\n\n$(FUSED_GTEST_H) :\n\t../fuse_gtest_files.py $(FUSED_GTEST_DIR)\n\n$(FUSED_GTEST_ALL_CC) :\n\t../fuse_gtest_files.py $(FUSED_GTEST_DIR)\n\ngtest-all.o : $(FUSED_GTEST_H) $(FUSED_GTEST_ALL_CC)\n\t$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c $(FUSED_GTEST_DIR)/gtest/gtest-all.cc\n\ngtest_main.o : $(FUSED_GTEST_H) $(GTEST_MAIN_CC)\n\t$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c $(GTEST_MAIN_CC)\n\nsample1.o : $(SAMPLE_DIR)/sample1.cc $(SAMPLE_DIR)/sample1.h\n\t$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c $(SAMPLE_DIR)/sample1.cc\n\nsample1_unittest.o : $(SAMPLE_DIR)/sample1_unittest.cc \\\n                     $(SAMPLE_DIR)/sample1.h $(FUSED_GTEST_H)\n\t$(CXX) $(CPPFLAGS) $(CXXFLAGS) -c $(SAMPLE_DIR)/sample1_unittest.cc\n\nsample1_unittest : sample1.o sample1_unittest.o gtest-all.o gtest_main.o\n\t$(CXX) $(CPPFLAGS) $(CXXFLAGS) $^ -o $@\n"
  },
  {
    "path": "test/googletest/scripts/upload.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2007, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Tool for uploading diffs from a version control system to the codereview app.\n\nUsage summary: upload.py [options] [-- diff_options]\n\nDiff options are passed to the diff command of the underlying system.\n\nSupported version control systems:\n  Git\n  Mercurial\n  Subversion\n\nIt is important for Git/Mercurial users to specify a tree/node/branch to diff\nagainst by using the '--rev' option.\n\"\"\"\n# This code is derived from appcfg.py in the App Engine SDK (open source),\n# and from ASPN recipe #146306.\n\nimport cookielib\nimport getpass\nimport logging\nimport md5\nimport mimetypes\nimport optparse\nimport os\nimport re\nimport socket\nimport subprocess\nimport sys\nimport urllib\nimport urllib2\nimport urlparse\n\ntry:\n  import readline\nexcept ImportError:\n  pass\n\n# The logging verbosity:\n#  0: Errors only.\n#  1: Status messages.\n#  2: Info logs.\n#  3: Debug logs.\nverbosity = 1\n\n# Max size of patch or base file.\nMAX_UPLOAD_SIZE = 900 * 1024\n\n\ndef GetEmail(prompt):\n  \"\"\"Prompts the user for their email address and returns it.\n\n  The last used email address is saved to a file and offered up as a suggestion\n  to the user. If the user presses enter without typing in anything the last\n  used email address is used. If the user enters a new address, it is saved\n  for next time we prompt.\n\n  \"\"\"\n  last_email_file_name = os.path.expanduser(\"~/.last_codereview_email_address\")\n  last_email = \"\"\n  if os.path.exists(last_email_file_name):\n    try:\n      last_email_file = open(last_email_file_name, \"r\")\n      last_email = last_email_file.readline().strip(\"\\n\")\n      last_email_file.close()\n      prompt += \" [%s]\" % last_email\n    except IOError, e:\n      pass\n  email = raw_input(prompt + \": \").strip()\n  if email:\n    try:\n      last_email_file = open(last_email_file_name, \"w\")\n      last_email_file.write(email)\n      last_email_file.close()\n    except IOError, e:\n      pass\n  else:\n    email = last_email\n  return email\n\n\ndef StatusUpdate(msg):\n  \"\"\"Print a status message to stdout.\n\n  If 'verbosity' is greater than 0, print the message.\n\n  Args:\n    msg: The string to print.\n  \"\"\"\n  if verbosity > 0:\n    print msg\n\n\ndef ErrorExit(msg):\n  \"\"\"Print an error message to stderr and exit.\"\"\"\n  print >>sys.stderr, msg\n  sys.exit(1)\n\n\nclass ClientLoginError(urllib2.HTTPError):\n  \"\"\"Raised to indicate there was an error authenticating with ClientLogin.\"\"\"\n\n  def __init__(self, url, code, msg, headers, args):\n    urllib2.HTTPError.__init__(self, url, code, msg, headers, None)\n    self.args = args\n    self.reason = args[\"Error\"]\n\n\nclass AbstractRpcServer(object):\n  \"\"\"Provides a common interface for a simple RPC server.\"\"\"\n\n  def __init__(self, host, auth_function, host_override=None, extra_headers={},\n               save_cookies=False):\n    \"\"\"Creates a new HttpRpcServer.\n\n    Args:\n      host: The host to send requests to.\n      auth_function: A function that takes no arguments and returns an\n        (email, password) tuple when called. Will be called if authentication\n        is required.\n      host_override: The host header to send to the server (defaults to host).\n      extra_headers: A dict of extra headers to append to every request.\n      save_cookies: If True, save the authentication cookies to local disk.\n        If False, use an in-memory cookiejar instead.  Subclasses must\n        implement this functionality.  Defaults to False.\n    \"\"\"\n    self.host = host\n    self.host_override = host_override\n    self.auth_function = auth_function\n    self.authenticated = False\n    self.extra_headers = extra_headers\n    self.save_cookies = save_cookies\n    self.opener = self._GetOpener()\n    if self.host_override:\n      logging.info(\"Server: %s; Host: %s\", self.host, self.host_override)\n    else:\n      logging.info(\"Server: %s\", self.host)\n\n  def _GetOpener(self):\n    \"\"\"Returns an OpenerDirector for making HTTP requests.\n\n    Returns:\n      A urllib2.OpenerDirector object.\n    \"\"\"\n    raise NotImplementedError()\n\n  def _CreateRequest(self, url, data=None):\n    \"\"\"Creates a new urllib request.\"\"\"\n    logging.debug(\"Creating request for: '%s' with payload:\\n%s\", url, data)\n    req = urllib2.Request(url, data=data)\n    if self.host_override:\n      req.add_header(\"Host\", self.host_override)\n    for key, value in self.extra_headers.iteritems():\n      req.add_header(key, value)\n    return req\n\n  def _GetAuthToken(self, email, password):\n    \"\"\"Uses ClientLogin to authenticate the user, returning an auth token.\n\n    Args:\n      email:    The user's email address\n      password: The user's password\n\n    Raises:\n      ClientLoginError: If there was an error authenticating with ClientLogin.\n      HTTPError: If there was some other form of HTTP error.\n\n    Returns:\n      The authentication token returned by ClientLogin.\n    \"\"\"\n    account_type = \"GOOGLE\"\n    if self.host.endswith(\".google.com\"):\n      # Needed for use inside Google.\n      account_type = \"HOSTED\"\n    req = self._CreateRequest(\n        url=\"https://www.google.com/accounts/ClientLogin\",\n        data=urllib.urlencode({\n            \"Email\": email,\n            \"Passwd\": password,\n            \"service\": \"ah\",\n            \"source\": \"rietveld-codereview-upload\",\n            \"accountType\": account_type,\n        }),\n    )\n    try:\n      response = self.opener.open(req)\n      response_body = response.read()\n      response_dict = dict(x.split(\"=\")\n                           for x in response_body.split(\"\\n\") if x)\n      return response_dict[\"Auth\"]\n    except urllib2.HTTPError, e:\n      if e.code == 403:\n        body = e.read()\n        response_dict = dict(x.split(\"=\", 1) for x in body.split(\"\\n\") if x)\n        raise ClientLoginError(req.get_full_url(), e.code, e.msg,\n                               e.headers, response_dict)\n      else:\n        raise\n\n  def _GetAuthCookie(self, auth_token):\n    \"\"\"Fetches authentication cookies for an authentication token.\n\n    Args:\n      auth_token: The authentication token returned by ClientLogin.\n\n    Raises:\n      HTTPError: If there was an error fetching the authentication cookies.\n    \"\"\"\n    # This is a dummy value to allow us to identify when we're successful.\n    continue_location = \"http://localhost/\"\n    args = {\"continue\": continue_location, \"auth\": auth_token}\n    req = self._CreateRequest(\"http://%s/_ah/login?%s\" %\n                              (self.host, urllib.urlencode(args)))\n    try:\n      response = self.opener.open(req)\n    except urllib2.HTTPError, e:\n      response = e\n    if (response.code != 302 or\n        response.info()[\"location\"] != continue_location):\n      raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,\n                              response.headers, response.fp)\n    self.authenticated = True\n\n  def _Authenticate(self):\n    \"\"\"Authenticates the user.\n\n    The authentication process works as follows:\n     1) We get a username and password from the user\n     2) We use ClientLogin to obtain an AUTH token for the user\n        (see https://developers.google.com/identity/protocols/AuthForInstalledApps).\n     3) We pass the auth token to /_ah/login on the server to obtain an\n        authentication cookie. If login was successful, it tries to redirect\n        us to the URL we provided.\n\n    If we attempt to access the upload API without first obtaining an\n    authentication cookie, it returns a 401 response and directs us to\n    authenticate ourselves with ClientLogin.\n    \"\"\"\n    for i in range(3):\n      credentials = self.auth_function()\n      try:\n        auth_token = self._GetAuthToken(credentials[0], credentials[1])\n      except ClientLoginError, e:\n        if e.reason == \"BadAuthentication\":\n          print >>sys.stderr, \"Invalid username or password.\"\n          continue\n        if e.reason == \"CaptchaRequired\":\n          print >>sys.stderr, (\n              \"Please go to\\n\"\n              \"https://www.google.com/accounts/DisplayUnlockCaptcha\\n\"\n              \"and verify you are a human.  Then try again.\")\n          break\n        if e.reason == \"NotVerified\":\n          print >>sys.stderr, \"Account not verified.\"\n          break\n        if e.reason == \"TermsNotAgreed\":\n          print >>sys.stderr, \"User has not agreed to TOS.\"\n          break\n        if e.reason == \"AccountDeleted\":\n          print >>sys.stderr, \"The user account has been deleted.\"\n          break\n        if e.reason == \"AccountDisabled\":\n          print >>sys.stderr, \"The user account has been disabled.\"\n          break\n        if e.reason == \"ServiceDisabled\":\n          print >>sys.stderr, (\"The user's access to the service has been \"\n                               \"disabled.\")\n          break\n        if e.reason == \"ServiceUnavailable\":\n          print >>sys.stderr, \"The service is not available; try again later.\"\n          break\n        raise\n      self._GetAuthCookie(auth_token)\n      return\n\n  def Send(self, request_path, payload=None,\n           content_type=\"application/octet-stream\",\n           timeout=None,\n           **kwargs):\n    \"\"\"Sends an RPC and returns the response.\n\n    Args:\n      request_path: The path to send the request to, eg /api/appversion/create.\n      payload: The body of the request, or None to send an empty request.\n      content_type: The Content-Type header to use.\n      timeout: timeout in seconds; default None i.e. no timeout.\n        (Note: for large requests on OS X, the timeout doesn't work right.)\n      kwargs: Any keyword arguments are converted into query string parameters.\n\n    Returns:\n      The response body, as a string.\n    \"\"\"\n    # TODO: Don't require authentication.  Let the server say\n    # whether it is necessary.\n    if not self.authenticated:\n      self._Authenticate()\n\n    old_timeout = socket.getdefaulttimeout()\n    socket.setdefaulttimeout(timeout)\n    try:\n      tries = 0\n      while True:\n        tries += 1\n        args = dict(kwargs)\n        url = \"http://%s%s\" % (self.host, request_path)\n        if args:\n          url += \"?\" + urllib.urlencode(args)\n        req = self._CreateRequest(url=url, data=payload)\n        req.add_header(\"Content-Type\", content_type)\n        try:\n          f = self.opener.open(req)\n          response = f.read()\n          f.close()\n          return response\n        except urllib2.HTTPError, e:\n          if tries > 3:\n            raise\n          elif e.code == 401:\n            self._Authenticate()\n##           elif e.code >= 500 and e.code < 600:\n##             # Server Error - try again.\n##             continue\n          else:\n            raise\n    finally:\n      socket.setdefaulttimeout(old_timeout)\n\n\nclass HttpRpcServer(AbstractRpcServer):\n  \"\"\"Provides a simplified RPC-style interface for HTTP requests.\"\"\"\n\n  def _Authenticate(self):\n    \"\"\"Save the cookie jar after authentication.\"\"\"\n    super(HttpRpcServer, self)._Authenticate()\n    if self.save_cookies:\n      StatusUpdate(\"Saving authentication cookies to %s\" % self.cookie_file)\n      self.cookie_jar.save()\n\n  def _GetOpener(self):\n    \"\"\"Returns an OpenerDirector that supports cookies and ignores redirects.\n\n    Returns:\n      A urllib2.OpenerDirector object.\n    \"\"\"\n    opener = urllib2.OpenerDirector()\n    opener.add_handler(urllib2.ProxyHandler())\n    opener.add_handler(urllib2.UnknownHandler())\n    opener.add_handler(urllib2.HTTPHandler())\n    opener.add_handler(urllib2.HTTPDefaultErrorHandler())\n    opener.add_handler(urllib2.HTTPSHandler())\n    opener.add_handler(urllib2.HTTPErrorProcessor())\n    if self.save_cookies:\n      self.cookie_file = os.path.expanduser(\"~/.codereview_upload_cookies\")\n      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)\n      if os.path.exists(self.cookie_file):\n        try:\n          self.cookie_jar.load()\n          self.authenticated = True\n          StatusUpdate(\"Loaded authentication cookies from %s\" %\n                       self.cookie_file)\n        except (cookielib.LoadError, IOError):\n          # Failed to load cookies - just ignore them.\n          pass\n      else:\n        # Create an empty cookie file with mode 600\n        fd = os.open(self.cookie_file, os.O_CREAT, 0600)\n        os.close(fd)\n      # Always chmod the cookie file\n      os.chmod(self.cookie_file, 0600)\n    else:\n      # Don't save cookies across runs of update.py.\n      self.cookie_jar = cookielib.CookieJar()\n    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))\n    return opener\n\n\nparser = optparse.OptionParser(usage=\"%prog [options] [-- diff_options]\")\nparser.add_option(\"-y\", \"--assume_yes\", action=\"store_true\",\n                  dest=\"assume_yes\", default=False,\n                  help=\"Assume that the answer to yes/no questions is 'yes'.\")\n# Logging\ngroup = parser.add_option_group(\"Logging options\")\ngroup.add_option(\"-q\", \"--quiet\", action=\"store_const\", const=0,\n                 dest=\"verbose\", help=\"Print errors only.\")\ngroup.add_option(\"-v\", \"--verbose\", action=\"store_const\", const=2,\n                 dest=\"verbose\", default=1,\n                 help=\"Print info level logs (default).\")\ngroup.add_option(\"--noisy\", action=\"store_const\", const=3,\n                 dest=\"verbose\", help=\"Print all logs.\")\n# Review server\ngroup = parser.add_option_group(\"Review server options\")\ngroup.add_option(\"-s\", \"--server\", action=\"store\", dest=\"server\",\n                 default=\"codereview.appspot.com\",\n                 metavar=\"SERVER\",\n                 help=(\"The server to upload to. The format is host[:port]. \"\n                       \"Defaults to 'codereview.appspot.com'.\"))\ngroup.add_option(\"-e\", \"--email\", action=\"store\", dest=\"email\",\n                 metavar=\"EMAIL\", default=None,\n                 help=\"The username to use. Will prompt if omitted.\")\ngroup.add_option(\"-H\", \"--host\", action=\"store\", dest=\"host\",\n                 metavar=\"HOST\", default=None,\n                 help=\"Overrides the Host header sent with all RPCs.\")\ngroup.add_option(\"--no_cookies\", action=\"store_false\",\n                 dest=\"save_cookies\", default=True,\n                 help=\"Do not save authentication cookies to local disk.\")\n# Issue\ngroup = parser.add_option_group(\"Issue options\")\ngroup.add_option(\"-d\", \"--description\", action=\"store\", dest=\"description\",\n                 metavar=\"DESCRIPTION\", default=None,\n                 help=\"Optional description when creating an issue.\")\ngroup.add_option(\"-f\", \"--description_file\", action=\"store\",\n                 dest=\"description_file\", metavar=\"DESCRIPTION_FILE\",\n                 default=None,\n                 help=\"Optional path of a file that contains \"\n                      \"the description when creating an issue.\")\ngroup.add_option(\"-r\", \"--reviewers\", action=\"store\", dest=\"reviewers\",\n                 metavar=\"REVIEWERS\", default=None,\n                 help=\"Add reviewers (comma separated email addresses).\")\ngroup.add_option(\"--cc\", action=\"store\", dest=\"cc\",\n                 metavar=\"CC\", default=None,\n                 help=\"Add CC (comma separated email addresses).\")\n# Upload options\ngroup = parser.add_option_group(\"Patch options\")\ngroup.add_option(\"-m\", \"--message\", action=\"store\", dest=\"message\",\n                 metavar=\"MESSAGE\", default=None,\n                 help=\"A message to identify the patch. \"\n                      \"Will prompt if omitted.\")\ngroup.add_option(\"-i\", \"--issue\", type=\"int\", action=\"store\",\n                 metavar=\"ISSUE\", default=None,\n                 help=\"Issue number to which to add. Defaults to new issue.\")\ngroup.add_option(\"--download_base\", action=\"store_true\",\n                 dest=\"download_base\", default=False,\n                 help=\"Base files will be downloaded by the server \"\n                 \"(side-by-side diffs may not work on files with CRs).\")\ngroup.add_option(\"--rev\", action=\"store\", dest=\"revision\",\n                 metavar=\"REV\", default=None,\n                 help=\"Branch/tree/revision to diff against (used by DVCS).\")\ngroup.add_option(\"--send_mail\", action=\"store_true\",\n                 dest=\"send_mail\", default=False,\n                 help=\"Send notification email to reviewers.\")\n\n\ndef GetRpcServer(options):\n  \"\"\"Returns an instance of an AbstractRpcServer.\n\n  Returns:\n    A new AbstractRpcServer, on which RPC calls can be made.\n  \"\"\"\n\n  rpc_server_class = HttpRpcServer\n\n  def GetUserCredentials():\n    \"\"\"Prompts the user for a username and password.\"\"\"\n    email = options.email\n    if email is None:\n      email = GetEmail(\"Email (login for uploading to %s)\" % options.server)\n    password = getpass.getpass(\"Password for %s: \" % email)\n    return (email, password)\n\n  # If this is the dev_appserver, use fake authentication.\n  host = (options.host or options.server).lower()\n  if host == \"localhost\" or host.startswith(\"localhost:\"):\n    email = options.email\n    if email is None:\n      email = \"test@example.com\"\n      logging.info(\"Using debug user %s.  Override with --email\" % email)\n    server = rpc_server_class(\n        options.server,\n        lambda: (email, \"password\"),\n        host_override=options.host,\n        extra_headers={\"Cookie\":\n                       'dev_appserver_login=\"%s:False\"' % email},\n        save_cookies=options.save_cookies)\n    # Don't try to talk to ClientLogin.\n    server.authenticated = True\n    return server\n\n  return rpc_server_class(options.server, GetUserCredentials,\n                          host_override=options.host,\n                          save_cookies=options.save_cookies)\n\n\ndef EncodeMultipartFormData(fields, files):\n  \"\"\"Encode form fields for multipart/form-data.\n\n  Args:\n    fields: A sequence of (name, value) elements for regular form fields.\n    files: A sequence of (name, filename, value) elements for data to be\n           uploaded as files.\n  Returns:\n    (content_type, body) ready for httplib.HTTP instance.\n\n  Source:\n    https://web.archive.org/web/20160116052001/code.activestate.com/recipes/146306\n  \"\"\"\n  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'\n  CRLF = '\\r\\n'\n  lines = []\n  for (key, value) in fields:\n    lines.append('--' + BOUNDARY)\n    lines.append('Content-Disposition: form-data; name=\"%s\"' % key)\n    lines.append('')\n    lines.append(value)\n  for (key, filename, value) in files:\n    lines.append('--' + BOUNDARY)\n    lines.append('Content-Disposition: form-data; name=\"%s\"; filename=\"%s\"' %\n             (key, filename))\n    lines.append('Content-Type: %s' % GetContentType(filename))\n    lines.append('')\n    lines.append(value)\n  lines.append('--' + BOUNDARY + '--')\n  lines.append('')\n  body = CRLF.join(lines)\n  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY\n  return content_type, body\n\n\ndef GetContentType(filename):\n  \"\"\"Helper to guess the content-type from the filename.\"\"\"\n  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'\n\n\n# Use a shell for subcommands on Windows to get a PATH search.\nuse_shell = sys.platform.startswith(\"win\")\n\ndef RunShellWithReturnCode(command, print_output=False,\n                           universal_newlines=True):\n  \"\"\"Executes a command and returns the output from stdout and the return code.\n\n  Args:\n    command: Command to execute.\n    print_output: If True, the output is printed to stdout.\n                  If False, both stdout and stderr are ignored.\n    universal_newlines: Use universal_newlines flag (default: True).\n\n  Returns:\n    Tuple (output, return code)\n  \"\"\"\n  logging.info(\"Running %s\", command)\n  p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n                       shell=use_shell, universal_newlines=universal_newlines)\n  if print_output:\n    output_array = []\n    while True:\n      line = p.stdout.readline()\n      if not line:\n        break\n      print line.strip(\"\\n\")\n      output_array.append(line)\n    output = \"\".join(output_array)\n  else:\n    output = p.stdout.read()\n  p.wait()\n  errout = p.stderr.read()\n  if print_output and errout:\n    print >>sys.stderr, errout\n  p.stdout.close()\n  p.stderr.close()\n  return output, p.returncode\n\n\ndef RunShell(command, silent_ok=False, universal_newlines=True,\n             print_output=False):\n  data, retcode = RunShellWithReturnCode(command, print_output,\n                                         universal_newlines)\n  if retcode:\n    ErrorExit(\"Got error status from %s:\\n%s\" % (command, data))\n  if not silent_ok and not data:\n    ErrorExit(\"No output from %s\" % command)\n  return data\n\n\nclass VersionControlSystem(object):\n  \"\"\"Abstract base class providing an interface to the VCS.\"\"\"\n\n  def __init__(self, options):\n    \"\"\"Constructor.\n\n    Args:\n      options: Command line options.\n    \"\"\"\n    self.options = options\n\n  def GenerateDiff(self, args):\n    \"\"\"Return the current diff as a string.\n\n    Args:\n      args: Extra arguments to pass to the diff command.\n    \"\"\"\n    raise NotImplementedError(\n        \"abstract method -- subclass %s must override\" % self.__class__)\n\n  def GetUnknownFiles(self):\n    \"\"\"Return a list of files unknown to the VCS.\"\"\"\n    raise NotImplementedError(\n        \"abstract method -- subclass %s must override\" % self.__class__)\n\n  def CheckForUnknownFiles(self):\n    \"\"\"Show an \"are you sure?\" prompt if there are unknown files.\"\"\"\n    unknown_files = self.GetUnknownFiles()\n    if unknown_files:\n      print \"The following files are not added to version control:\"\n      for line in unknown_files:\n        print line\n      prompt = \"Are you sure to continue?(y/N) \"\n      answer = raw_input(prompt).strip()\n      if answer != \"y\":\n        ErrorExit(\"User aborted\")\n\n  def GetBaseFile(self, filename):\n    \"\"\"Get the content of the upstream version of a file.\n\n    Returns:\n      A tuple (base_content, new_content, is_binary, status)\n        base_content: The contents of the base file.\n        new_content: For text files, this is empty.  For binary files, this is\n          the contents of the new file, since the diff output won't contain\n          information to reconstruct the current file.\n        is_binary: True iff the file is binary.\n        status: The status of the file.\n    \"\"\"\n\n    raise NotImplementedError(\n        \"abstract method -- subclass %s must override\" % self.__class__)\n\n\n  def GetBaseFiles(self, diff):\n    \"\"\"Helper that calls GetBase file for each file in the patch.\n\n    Returns:\n      A dictionary that maps from filename to GetBaseFile's tuple.  Filenames\n      are retrieved based on lines that start with \"Index:\" or\n      \"Property changes on:\".\n    \"\"\"\n    files = {}\n    for line in diff.splitlines(True):\n      if line.startswith('Index:') or line.startswith('Property changes on:'):\n        unused, filename = line.split(':', 1)\n        # On Windows if a file has property changes its filename uses '\\'\n        # instead of '/'.\n        filename = filename.strip().replace('\\\\', '/')\n        files[filename] = self.GetBaseFile(filename)\n    return files\n\n\n  def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options,\n                      files):\n    \"\"\"Uploads the base files (and if necessary, the current ones as well).\"\"\"\n\n    def UploadFile(filename, file_id, content, is_binary, status, is_base):\n      \"\"\"Uploads a file to the server.\"\"\"\n      file_too_large = False\n      if is_base:\n        type = \"base\"\n      else:\n        type = \"current\"\n      if len(content) > MAX_UPLOAD_SIZE:\n        print (\"Not uploading the %s file for %s because it's too large.\" %\n               (type, filename))\n        file_too_large = True\n        content = \"\"\n      checksum = md5.new(content).hexdigest()\n      if options.verbose > 0 and not file_too_large:\n        print \"Uploading %s file for %s\" % (type, filename)\n      url = \"/%d/upload_content/%d/%d\" % (int(issue), int(patchset), file_id)\n      form_fields = [(\"filename\", filename),\n                     (\"status\", status),\n                     (\"checksum\", checksum),\n                     (\"is_binary\", str(is_binary)),\n                     (\"is_current\", str(not is_base)),\n                    ]\n      if file_too_large:\n        form_fields.append((\"file_too_large\", \"1\"))\n      if options.email:\n        form_fields.append((\"user\", options.email))\n      ctype, body = EncodeMultipartFormData(form_fields,\n                                            [(\"data\", filename, content)])\n      response_body = rpc_server.Send(url, body,\n                                      content_type=ctype)\n      if not response_body.startswith(\"OK\"):\n        StatusUpdate(\"  --> %s\" % response_body)\n        sys.exit(1)\n\n    patches = dict()\n    [patches.setdefault(v, k) for k, v in patch_list]\n    for filename in patches.keys():\n      base_content, new_content, is_binary, status = files[filename]\n      file_id_str = patches.get(filename)\n      if file_id_str.find(\"nobase\") != -1:\n        base_content = None\n        file_id_str = file_id_str[file_id_str.rfind(\"_\") + 1:]\n      file_id = int(file_id_str)\n      if base_content != None:\n        UploadFile(filename, file_id, base_content, is_binary, status, True)\n      if new_content != None:\n        UploadFile(filename, file_id, new_content, is_binary, status, False)\n\n  def IsImage(self, filename):\n    \"\"\"Returns true if the filename has an image extension.\"\"\"\n    mimetype =  mimetypes.guess_type(filename)[0]\n    if not mimetype:\n      return False\n    return mimetype.startswith(\"image/\")\n\n\nclass SubversionVCS(VersionControlSystem):\n  \"\"\"Implementation of the VersionControlSystem interface for Subversion.\"\"\"\n\n  def __init__(self, options):\n    super(SubversionVCS, self).__init__(options)\n    if self.options.revision:\n      match = re.match(r\"(\\d+)(:(\\d+))?\", self.options.revision)\n      if not match:\n        ErrorExit(\"Invalid Subversion revision %s.\" % self.options.revision)\n      self.rev_start = match.group(1)\n      self.rev_end = match.group(3)\n    else:\n      self.rev_start = self.rev_end = None\n    # Cache output from \"svn list -r REVNO dirname\".\n    # Keys: dirname, Values: 2-tuple (output for start rev and end rev).\n    self.svnls_cache = {}\n    # SVN base URL is required to fetch files deleted in an older revision.\n    # Result is cached to not guess it over and over again in GetBaseFile().\n    required = self.options.download_base or self.options.revision is not None\n    self.svn_base = self._GuessBase(required)\n\n  def GuessBase(self, required):\n    \"\"\"Wrapper for _GuessBase.\"\"\"\n    return self.svn_base\n\n  def _GuessBase(self, required):\n    \"\"\"Returns the SVN base URL.\n\n    Args:\n      required: If true, exits if the url can't be guessed, otherwise None is\n        returned.\n    \"\"\"\n    info = RunShell([\"svn\", \"info\"])\n    for line in info.splitlines():\n      words = line.split()\n      if len(words) == 2 and words[0] == \"URL:\":\n        url = words[1]\n        scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)\n        username, netloc = urllib.splituser(netloc)\n        if username:\n          logging.info(\"Removed username from base URL\")\n        if netloc.endswith(\"svn.python.org\"):\n          if netloc == \"svn.python.org\":\n            if path.startswith(\"/projects/\"):\n              path = path[9:]\n          elif netloc != \"pythondev@svn.python.org\":\n            ErrorExit(\"Unrecognized Python URL: %s\" % url)\n          base = \"http://svn.python.org/view/*checkout*%s/\" % path\n          logging.info(\"Guessed Python base = %s\", base)\n        elif netloc.endswith(\"svn.collab.net\"):\n          if path.startswith(\"/repos/\"):\n            path = path[6:]\n          base = \"http://svn.collab.net/viewvc/*checkout*%s/\" % path\n          logging.info(\"Guessed CollabNet base = %s\", base)\n        elif netloc.endswith(\".googlecode.com\"):\n          path = path + \"/\"\n          base = urlparse.urlunparse((\"http\", netloc, path, params,\n                                      query, fragment))\n          logging.info(\"Guessed Google Code base = %s\", base)\n        else:\n          path = path + \"/\"\n          base = urlparse.urlunparse((scheme, netloc, path, params,\n                                      query, fragment))\n          logging.info(\"Guessed base = %s\", base)\n        return base\n    if required:\n      ErrorExit(\"Can't find URL in output from svn info\")\n    return None\n\n  def GenerateDiff(self, args):\n    cmd = [\"svn\", \"diff\"]\n    if self.options.revision:\n      cmd += [\"-r\", self.options.revision]\n    cmd.extend(args)\n    data = RunShell(cmd)\n    count = 0\n    for line in data.splitlines():\n      if line.startswith(\"Index:\") or line.startswith(\"Property changes on:\"):\n        count += 1\n        logging.info(line)\n    if not count:\n      ErrorExit(\"No valid patches found in output from svn diff\")\n    return data\n\n  def _CollapseKeywords(self, content, keyword_str):\n    \"\"\"Collapses SVN keywords.\"\"\"\n    # svn cat translates keywords but svn diff doesn't. As a result of this\n    # behavior patching.PatchChunks() fails with a chunk mismatch error.\n    # This part was originally written by the Review Board development team\n    # who had the same problem (https://reviews.reviewboard.org/r/276/).\n    # Mapping of keywords to known aliases\n    svn_keywords = {\n      # Standard keywords\n      'Date':                ['Date', 'LastChangedDate'],\n      'Revision':            ['Revision', 'LastChangedRevision', 'Rev'],\n      'Author':              ['Author', 'LastChangedBy'],\n      'HeadURL':             ['HeadURL', 'URL'],\n      'Id':                  ['Id'],\n\n      # Aliases\n      'LastChangedDate':     ['LastChangedDate', 'Date'],\n      'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],\n      'LastChangedBy':       ['LastChangedBy', 'Author'],\n      'URL':                 ['URL', 'HeadURL'],\n    }\n\n    def repl(m):\n       if m.group(2):\n         return \"$%s::%s$\" % (m.group(1), \" \" * len(m.group(3)))\n       return \"$%s$\" % m.group(1)\n    keywords = [keyword\n                for name in keyword_str.split(\" \")\n                for keyword in svn_keywords.get(name, [])]\n    return re.sub(r\"\\$(%s):(:?)([^\\$]+)\\$\" % '|'.join(keywords), repl, content)\n\n  def GetUnknownFiles(self):\n    status = RunShell([\"svn\", \"status\", \"--ignore-externals\"], silent_ok=True)\n    unknown_files = []\n    for line in status.split(\"\\n\"):\n      if line and line[0] == \"?\":\n        unknown_files.append(line)\n    return unknown_files\n\n  def ReadFile(self, filename):\n    \"\"\"Returns the contents of a file.\"\"\"\n    file = open(filename, 'rb')\n    result = \"\"\n    try:\n      result = file.read()\n    finally:\n      file.close()\n    return result\n\n  def GetStatus(self, filename):\n    \"\"\"Returns the status of a file.\"\"\"\n    if not self.options.revision:\n      status = RunShell([\"svn\", \"status\", \"--ignore-externals\", filename])\n      if not status:\n        ErrorExit(\"svn status returned no output for %s\" % filename)\n      status_lines = status.splitlines()\n      # If file is in a cl, the output will begin with\n      # \"\\n--- Changelist 'cl_name':\\n\".  See\n      # https://web.archive.org/web/20090918234815/svn.collab.net/repos/svn/trunk/notes/changelist-design.txt\n      if (len(status_lines) == 3 and\n          not status_lines[0] and\n          status_lines[1].startswith(\"--- Changelist\")):\n        status = status_lines[2]\n      else:\n        status = status_lines[0]\n    # If we have a revision to diff against we need to run \"svn list\"\n    # for the old and the new revision and compare the results to get\n    # the correct status for a file.\n    else:\n      dirname, relfilename = os.path.split(filename)\n      if dirname not in self.svnls_cache:\n        cmd = [\"svn\", \"list\", \"-r\", self.rev_start, dirname or \".\"]\n        out, returncode = RunShellWithReturnCode(cmd)\n        if returncode:\n          ErrorExit(\"Failed to get status for %s.\" % filename)\n        old_files = out.splitlines()\n        args = [\"svn\", \"list\"]\n        if self.rev_end:\n          args += [\"-r\", self.rev_end]\n        cmd = args + [dirname or \".\"]\n        out, returncode = RunShellWithReturnCode(cmd)\n        if returncode:\n          ErrorExit(\"Failed to run command %s\" % cmd)\n        self.svnls_cache[dirname] = (old_files, out.splitlines())\n      old_files, new_files = self.svnls_cache[dirname]\n      if relfilename in old_files and relfilename not in new_files:\n        status = \"D   \"\n      elif relfilename in old_files and relfilename in new_files:\n        status = \"M   \"\n      else:\n        status = \"A   \"\n    return status\n\n  def GetBaseFile(self, filename):\n    status = self.GetStatus(filename)\n    base_content = None\n    new_content = None\n\n    # If a file is copied its status will be \"A  +\", which signifies\n    # \"addition-with-history\".  See \"svn st\" for more information.  We need to\n    # upload the original file or else diff parsing will fail if the file was\n    # edited.\n    if status[0] == \"A\" and status[3] != \"+\":\n      # We'll need to upload the new content if we're adding a binary file\n      # since diff's output won't contain it.\n      mimetype = RunShell([\"svn\", \"propget\", \"svn:mime-type\", filename],\n                          silent_ok=True)\n      base_content = \"\"\n      is_binary = mimetype and not mimetype.startswith(\"text/\")\n      if is_binary and self.IsImage(filename):\n        new_content = self.ReadFile(filename)\n    elif (status[0] in (\"M\", \"D\", \"R\") or\n          (status[0] == \"A\" and status[3] == \"+\") or  # Copied file.\n          (status[0] == \" \" and status[1] == \"M\")):  # Property change.\n      args = []\n      if self.options.revision:\n        url = \"%s/%s@%s\" % (self.svn_base, filename, self.rev_start)\n      else:\n        # Don't change filename, it's needed later.\n        url = filename\n        args += [\"-r\", \"BASE\"]\n      cmd = [\"svn\"] + args + [\"propget\", \"svn:mime-type\", url]\n      mimetype, returncode = RunShellWithReturnCode(cmd)\n      if returncode:\n        # File does not exist in the requested revision.\n        # Reset mimetype, it contains an error message.\n        mimetype = \"\"\n      get_base = False\n      is_binary = mimetype and not mimetype.startswith(\"text/\")\n      if status[0] == \" \":\n        # Empty base content just to force an upload.\n        base_content = \"\"\n      elif is_binary:\n        if self.IsImage(filename):\n          get_base = True\n          if status[0] == \"M\":\n            if not self.rev_end:\n              new_content = self.ReadFile(filename)\n            else:\n              url = \"%s/%s@%s\" % (self.svn_base, filename, self.rev_end)\n              new_content = RunShell([\"svn\", \"cat\", url],\n                                     universal_newlines=True, silent_ok=True)\n        else:\n          base_content = \"\"\n      else:\n        get_base = True\n\n      if get_base:\n        if is_binary:\n          universal_newlines = False\n        else:\n          universal_newlines = True\n        if self.rev_start:\n          # \"svn cat -r REV delete_file.txt\" doesn't work. cat requires\n          # the full URL with \"@REV\" appended instead of using \"-r\" option.\n          url = \"%s/%s@%s\" % (self.svn_base, filename, self.rev_start)\n          base_content = RunShell([\"svn\", \"cat\", url],\n                                  universal_newlines=universal_newlines,\n                                  silent_ok=True)\n        else:\n          base_content = RunShell([\"svn\", \"cat\", filename],\n                                  universal_newlines=universal_newlines,\n                                  silent_ok=True)\n        if not is_binary:\n          args = []\n          if self.rev_start:\n            url = \"%s/%s@%s\" % (self.svn_base, filename, self.rev_start)\n          else:\n            url = filename\n            args += [\"-r\", \"BASE\"]\n          cmd = [\"svn\"] + args + [\"propget\", \"svn:keywords\", url]\n          keywords, returncode = RunShellWithReturnCode(cmd)\n          if keywords and not returncode:\n            base_content = self._CollapseKeywords(base_content, keywords)\n    else:\n      StatusUpdate(\"svn status returned unexpected output: %s\" % status)\n      sys.exit(1)\n    return base_content, new_content, is_binary, status[0:5]\n\n\nclass GitVCS(VersionControlSystem):\n  \"\"\"Implementation of the VersionControlSystem interface for Git.\"\"\"\n\n  def __init__(self, options):\n    super(GitVCS, self).__init__(options)\n    # Map of filename -> hash of base file.\n    self.base_hashes = {}\n\n  def GenerateDiff(self, extra_args):\n    # This is more complicated than svn's GenerateDiff because we must convert\n    # the diff output to include an svn-style \"Index:\" line as well as record\n    # the hashes of the base files, so we can upload them along with our diff.\n    if self.options.revision:\n      extra_args = [self.options.revision] + extra_args\n    gitdiff = RunShell([\"git\", \"diff\", \"--full-index\"] + extra_args)\n    svndiff = []\n    filecount = 0\n    filename = None\n    for line in gitdiff.splitlines():\n      match = re.match(r\"diff --git a/(.*) b/.*$\", line)\n      if match:\n        filecount += 1\n        filename = match.group(1)\n        svndiff.append(\"Index: %s\\n\" % filename)\n      else:\n        # The \"index\" line in a git diff looks like this (long hashes elided):\n        #   index 82c0d44..b2cee3f 100755\n        # We want to save the left hash, as that identifies the base file.\n        match = re.match(r\"index (\\w+)\\.\\.\", line)\n        if match:\n          self.base_hashes[filename] = match.group(1)\n      svndiff.append(line + \"\\n\")\n    if not filecount:\n      ErrorExit(\"No valid patches found in output from git diff\")\n    return \"\".join(svndiff)\n\n  def GetUnknownFiles(self):\n    status = RunShell([\"git\", \"ls-files\", \"--exclude-standard\", \"--others\"],\n                      silent_ok=True)\n    return status.splitlines()\n\n  def GetBaseFile(self, filename):\n    hash = self.base_hashes[filename]\n    base_content = None\n    new_content = None\n    is_binary = False\n    if hash == \"0\" * 40:  # All-zero hash indicates no base file.\n      status = \"A\"\n      base_content = \"\"\n    else:\n      status = \"M\"\n      base_content, returncode = RunShellWithReturnCode([\"git\", \"show\", hash])\n      if returncode:\n        ErrorExit(\"Got error status from 'git show %s'\" % hash)\n    return (base_content, new_content, is_binary, status)\n\n\nclass MercurialVCS(VersionControlSystem):\n  \"\"\"Implementation of the VersionControlSystem interface for Mercurial.\"\"\"\n\n  def __init__(self, options, repo_dir):\n    super(MercurialVCS, self).__init__(options)\n    # Absolute path to repository (we can be in a subdir)\n    self.repo_dir = os.path.normpath(repo_dir)\n    # Compute the subdir\n    cwd = os.path.normpath(os.getcwd())\n    assert cwd.startswith(self.repo_dir)\n    self.subdir = cwd[len(self.repo_dir):].lstrip(r\"\\/\")\n    if self.options.revision:\n      self.base_rev = self.options.revision\n    else:\n      self.base_rev = RunShell([\"hg\", \"parent\", \"-q\"]).split(':')[1].strip()\n\n  def _GetRelPath(self, filename):\n    \"\"\"Get relative path of a file according to the current directory,\n    given its logical path in the repo.\"\"\"\n    assert filename.startswith(self.subdir), filename\n    return filename[len(self.subdir):].lstrip(r\"\\/\")\n\n  def GenerateDiff(self, extra_args):\n    # If no file specified, restrict to the current subdir\n    extra_args = extra_args or [\".\"]\n    cmd = [\"hg\", \"diff\", \"--git\", \"-r\", self.base_rev] + extra_args\n    data = RunShell(cmd, silent_ok=True)\n    svndiff = []\n    filecount = 0\n    for line in data.splitlines():\n      m = re.match(\"diff --git a/(\\S+) b/(\\S+)\", line)\n      if m:\n        # Modify line to make it look like as it comes from svn diff.\n        # With this modification no changes on the server side are required\n        # to make upload.py work with Mercurial repos.\n        # NOTE: for proper handling of moved/copied files, we have to use\n        # the second filename.\n        filename = m.group(2)\n        svndiff.append(\"Index: %s\" % filename)\n        svndiff.append(\"=\" * 67)\n        filecount += 1\n        logging.info(line)\n      else:\n        svndiff.append(line)\n    if not filecount:\n      ErrorExit(\"No valid patches found in output from hg diff\")\n    return \"\\n\".join(svndiff) + \"\\n\"\n\n  def GetUnknownFiles(self):\n    \"\"\"Return a list of files unknown to the VCS.\"\"\"\n    args = []\n    status = RunShell([\"hg\", \"status\", \"--rev\", self.base_rev, \"-u\", \".\"],\n        silent_ok=True)\n    unknown_files = []\n    for line in status.splitlines():\n      st, fn = line.split(\" \", 1)\n      if st == \"?\":\n        unknown_files.append(fn)\n    return unknown_files\n\n  def GetBaseFile(self, filename):\n    # \"hg status\" and \"hg cat\" both take a path relative to the current subdir\n    # rather than to the repo root, but \"hg diff\" has given us the full path\n    # to the repo root.\n    base_content = \"\"\n    new_content = None\n    is_binary = False\n    oldrelpath = relpath = self._GetRelPath(filename)\n    # \"hg status -C\" returns two lines for moved/copied files, one otherwise\n    out = RunShell([\"hg\", \"status\", \"-C\", \"--rev\", self.base_rev, relpath])\n    out = out.splitlines()\n    # HACK: strip error message about missing file/directory if it isn't in\n    # the working copy\n    if out[0].startswith('%s: ' % relpath):\n      out = out[1:]\n    if len(out) > 1:\n      # Moved/copied => considered as modified, use old filename to\n      # retrieve base contents\n      oldrelpath = out[1].strip()\n      status = \"M\"\n    else:\n      status, _ = out[0].split(' ', 1)\n    if status != \"A\":\n      base_content = RunShell([\"hg\", \"cat\", \"-r\", self.base_rev, oldrelpath],\n        silent_ok=True)\n      is_binary = \"\\0\" in base_content  # Mercurial's heuristic\n    if status != \"R\":\n      new_content = open(relpath, \"rb\").read()\n      is_binary = is_binary or \"\\0\" in new_content\n    if is_binary and base_content:\n      # Fetch again without converting newlines\n      base_content = RunShell([\"hg\", \"cat\", \"-r\", self.base_rev, oldrelpath],\n        silent_ok=True, universal_newlines=False)\n    if not is_binary or not self.IsImage(relpath):\n      new_content = None\n    return base_content, new_content, is_binary, status\n\n\n# NOTE: The SplitPatch function is duplicated in engine.py, keep them in sync.\ndef SplitPatch(data):\n  \"\"\"Splits a patch into separate pieces for each file.\n\n  Args:\n    data: A string containing the output of svn diff.\n\n  Returns:\n    A list of 2-tuple (filename, text) where text is the svn diff output\n      pertaining to filename.\n  \"\"\"\n  patches = []\n  filename = None\n  diff = []\n  for line in data.splitlines(True):\n    new_filename = None\n    if line.startswith('Index:'):\n      unused, new_filename = line.split(':', 1)\n      new_filename = new_filename.strip()\n    elif line.startswith('Property changes on:'):\n      unused, temp_filename = line.split(':', 1)\n      # When a file is modified, paths use '/' between directories, however\n      # when a property is modified '\\' is used on Windows.  Make them the same\n      # otherwise the file shows up twice.\n      temp_filename = temp_filename.strip().replace('\\\\', '/')\n      if temp_filename != filename:\n        # File has property changes but no modifications, create a new diff.\n        new_filename = temp_filename\n    if new_filename:\n      if filename and diff:\n        patches.append((filename, ''.join(diff)))\n      filename = new_filename\n      diff = [line]\n      continue\n    if diff is not None:\n      diff.append(line)\n  if filename and diff:\n    patches.append((filename, ''.join(diff)))\n  return patches\n\n\ndef UploadSeparatePatches(issue, rpc_server, patchset, data, options):\n  \"\"\"Uploads a separate patch for each file in the diff output.\n\n  Returns a list of [patch_key, filename] for each file.\n  \"\"\"\n  patches = SplitPatch(data)\n  rv = []\n  for patch in patches:\n    if len(patch[1]) > MAX_UPLOAD_SIZE:\n      print (\"Not uploading the patch for \" + patch[0] +\n             \" because the file is too large.\")\n      continue\n    form_fields = [(\"filename\", patch[0])]\n    if not options.download_base:\n      form_fields.append((\"content_upload\", \"1\"))\n    files = [(\"data\", \"data.diff\", patch[1])]\n    ctype, body = EncodeMultipartFormData(form_fields, files)\n    url = \"/%d/upload_patch/%d\" % (int(issue), int(patchset))\n    print \"Uploading patch for \" + patch[0]\n    response_body = rpc_server.Send(url, body, content_type=ctype)\n    lines = response_body.splitlines()\n    if not lines or lines[0] != \"OK\":\n      StatusUpdate(\"  --> %s\" % response_body)\n      sys.exit(1)\n    rv.append([lines[1], patch[0]])\n  return rv\n\n\ndef GuessVCS(options):\n  \"\"\"Helper to guess the version control system.\n\n  This examines the current directory, guesses which VersionControlSystem\n  we're using, and returns an instance of the appropriate class.  Exit with an\n  error if we can't figure it out.\n\n  Returns:\n    A VersionControlSystem instance. Exits if the VCS can't be guessed.\n  \"\"\"\n  # Mercurial has a command to get the base directory of a repository\n  # Try running it, but don't die if we don't have hg installed.\n  # NOTE: we try Mercurial first as it can sit on top of an SVN working copy.\n  try:\n    out, returncode = RunShellWithReturnCode([\"hg\", \"root\"])\n    if returncode == 0:\n      return MercurialVCS(options, out.strip())\n  except OSError, (errno, message):\n    if errno != 2:  # ENOENT -- they don't have hg installed.\n      raise\n\n  # Subversion has a .svn in all working directories.\n  if os.path.isdir('.svn'):\n    logging.info(\"Guessed VCS = Subversion\")\n    return SubversionVCS(options)\n\n  # Git has a command to test if you're in a git tree.\n  # Try running it, but don't die if we don't have git installed.\n  try:\n    out, returncode = RunShellWithReturnCode([\"git\", \"rev-parse\",\n                                              \"--is-inside-work-tree\"])\n    if returncode == 0:\n      return GitVCS(options)\n  except OSError, (errno, message):\n    if errno != 2:  # ENOENT -- they don't have git installed.\n      raise\n\n  ErrorExit((\"Could not guess version control system. \"\n             \"Are you in a working copy directory?\"))\n\n\ndef RealMain(argv, data=None):\n  \"\"\"The real main function.\n\n  Args:\n    argv: Command line arguments.\n    data: Diff contents. If None (default) the diff is generated by\n      the VersionControlSystem implementation returned by GuessVCS().\n\n  Returns:\n    A 2-tuple (issue id, patchset id).\n    The patchset id is None if the base files are not uploaded by this\n    script (applies only to SVN checkouts).\n  \"\"\"\n  logging.basicConfig(format=(\"%(asctime).19s %(levelname)s %(filename)s:\"\n                              \"%(lineno)s %(message)s \"))\n  os.environ['LC_ALL'] = 'C'\n  options, args = parser.parse_args(argv[1:])\n  global verbosity\n  verbosity = options.verbose\n  if verbosity >= 3:\n    logging.getLogger().setLevel(logging.DEBUG)\n  elif verbosity >= 2:\n    logging.getLogger().setLevel(logging.INFO)\n  vcs = GuessVCS(options)\n  if isinstance(vcs, SubversionVCS):\n    # base field is only allowed for Subversion.\n    # Note: Fetching base files may become deprecated in future releases.\n    base = vcs.GuessBase(options.download_base)\n  else:\n    base = None\n  if not base and options.download_base:\n    options.download_base = True\n    logging.info(\"Enabled upload of base file\")\n  if not options.assume_yes:\n    vcs.CheckForUnknownFiles()\n  if data is None:\n    data = vcs.GenerateDiff(args)\n  files = vcs.GetBaseFiles(data)\n  if verbosity >= 1:\n    print \"Upload server:\", options.server, \"(change with -s/--server)\"\n  if options.issue:\n    prompt = \"Message describing this patch set: \"\n  else:\n    prompt = \"New issue subject: \"\n  message = options.message or raw_input(prompt).strip()\n  if not message:\n    ErrorExit(\"A non-empty message is required\")\n  rpc_server = GetRpcServer(options)\n  form_fields = [(\"subject\", message)]\n  if base:\n    form_fields.append((\"base\", base))\n  if options.issue:\n    form_fields.append((\"issue\", str(options.issue)))\n  if options.email:\n    form_fields.append((\"user\", options.email))\n  if options.reviewers:\n    for reviewer in options.reviewers.split(','):\n      if \"@\" in reviewer and not reviewer.split(\"@\")[1].count(\".\") == 1:\n        ErrorExit(\"Invalid email address: %s\" % reviewer)\n    form_fields.append((\"reviewers\", options.reviewers))\n  if options.cc:\n    for cc in options.cc.split(','):\n      if \"@\" in cc and not cc.split(\"@\")[1].count(\".\") == 1:\n        ErrorExit(\"Invalid email address: %s\" % cc)\n    form_fields.append((\"cc\", options.cc))\n  description = options.description\n  if options.description_file:\n    if options.description:\n      ErrorExit(\"Can't specify description and description_file\")\n    file = open(options.description_file, 'r')\n    description = file.read()\n    file.close()\n  if description:\n    form_fields.append((\"description\", description))\n  # Send a hash of all the base file so the server can determine if a copy\n  # already exists in an earlier patchset.\n  base_hashes = \"\"\n  for file, info in files.iteritems():\n    if not info[0] is None:\n      checksum = md5.new(info[0]).hexdigest()\n      if base_hashes:\n        base_hashes += \"|\"\n      base_hashes += checksum + \":\" + file\n  form_fields.append((\"base_hashes\", base_hashes))\n  # If we're uploading base files, don't send the email before the uploads, so\n  # that it contains the file status.\n  if options.send_mail and options.download_base:\n    form_fields.append((\"send_mail\", \"1\"))\n  if not options.download_base:\n    form_fields.append((\"content_upload\", \"1\"))\n  if len(data) > MAX_UPLOAD_SIZE:\n    print \"Patch is large, so uploading file patches separately.\"\n    uploaded_diff_file = []\n    form_fields.append((\"separate_patches\", \"1\"))\n  else:\n    uploaded_diff_file = [(\"data\", \"data.diff\", data)]\n  ctype, body = EncodeMultipartFormData(form_fields, uploaded_diff_file)\n  response_body = rpc_server.Send(\"/upload\", body, content_type=ctype)\n  patchset = None\n  if not options.download_base or not uploaded_diff_file:\n    lines = response_body.splitlines()\n    if len(lines) >= 2:\n      msg = lines[0]\n      patchset = lines[1].strip()\n      patches = [x.split(\" \", 1) for x in lines[2:]]\n    else:\n      msg = response_body\n  else:\n    msg = response_body\n  StatusUpdate(msg)\n  if not response_body.startswith(\"Issue created.\") and \\\n  not response_body.startswith(\"Issue updated.\"):\n    sys.exit(0)\n  issue = msg[msg.rfind(\"/\")+1:]\n\n  if not uploaded_diff_file:\n    result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)\n    if not options.download_base:\n      patches = result\n\n  if not options.download_base:\n    vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options, files)\n    if options.send_mail:\n      rpc_server.Send(\"/\" + issue + \"/mail\", payload=\"\")\n  return issue, patchset\n\n\ndef main():\n  try:\n    RealMain(sys.argv)\n  except KeyboardInterrupt:\n    print\n    StatusUpdate(\"Interrupted.\")\n    sys.exit(1)\n\n\nif __name__ == \"__main__\":\n  main()\n"
  },
  {
    "path": "test/googletest/scripts/upload_gtest.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2009, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"upload_gtest.py v0.1.0 -- uploads a Google Test patch for review.\n\nThis simple wrapper passes all command line flags and\n--cc=googletestframework@googlegroups.com to upload.py.\n\nUSAGE: upload_gtest.py [options for upload.py]\n\"\"\"\n\n__author__ = 'wan@google.com (Zhanyong Wan)'\n\nimport os\nimport sys\n\nCC_FLAG = '--cc='\nGTEST_GROUP = 'googletestframework@googlegroups.com'\n\n\ndef main():\n  # Finds the path to upload.py, assuming it is in the same directory\n  # as this file.\n  my_dir = os.path.dirname(os.path.abspath(__file__))\n  upload_py_path = os.path.join(my_dir, 'upload.py')\n\n  # Adds Google Test discussion group to the cc line if it's not there\n  # already.\n  upload_py_argv = [upload_py_path]\n  found_cc_flag = False\n  for arg in sys.argv[1:]:\n    if arg.startswith(CC_FLAG):\n      found_cc_flag = True\n      cc_line = arg[len(CC_FLAG):]\n      cc_list = [addr for addr in cc_line.split(',') if addr]\n      if GTEST_GROUP not in cc_list:\n        cc_list.append(GTEST_GROUP)\n      upload_py_argv.append(CC_FLAG + ','.join(cc_list))\n    else:\n      upload_py_argv.append(arg)\n\n  if not found_cc_flag:\n    upload_py_argv.append(CC_FLAG + GTEST_GROUP)\n\n  # Invokes upload.py with the modified command line flags.\n  os.execv(upload_py_path, upload_py_argv)\n\n\nif __name__ == '__main__':\n  main()\n"
  },
  {
    "path": "test/googletest/src/gtest-all.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Google C++ Testing and Mocking Framework (Google Test)\n//\n// Sometimes it's desirable to build Google Test by compiling a single file.\n// This file serves this purpose.\n\n// This line ensures that gtest.h can be compiled on its own, even\n// when it's fused.\n#include \"gtest/gtest.h\"\n\n// The following lines pull in the real gtest *.cc files.\n#include \"src/gtest.cc\"\n#include \"src/gtest-death-test.cc\"\n#include \"src/gtest-filepath.cc\"\n#include \"src/gtest-matchers.cc\"\n#include \"src/gtest-port.cc\"\n#include \"src/gtest-printers.cc\"\n#include \"src/gtest-test-part.cc\"\n#include \"src/gtest-typed-test.cc\"\n"
  },
  {
    "path": "test/googletest/src/gtest-death-test.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// This file implements death tests.\n\n#include \"gtest/gtest-death-test.h\"\n\n#include <utility>\n\n#include \"gtest/internal/gtest-port.h\"\n#include \"gtest/internal/custom/gtest.h\"\n\n#if GTEST_HAS_DEATH_TEST\n\n# if GTEST_OS_MAC\n#  include <crt_externs.h>\n# endif  // GTEST_OS_MAC\n\n# include <errno.h>\n# include <fcntl.h>\n# include <limits.h>\n\n# if GTEST_OS_LINUX\n#  include <signal.h>\n# endif  // GTEST_OS_LINUX\n\n# include <stdarg.h>\n\n# if GTEST_OS_WINDOWS\n#  include <windows.h>\n# else\n#  include <sys/mman.h>\n#  include <sys/wait.h>\n# endif  // GTEST_OS_WINDOWS\n\n# if GTEST_OS_QNX\n#  include <spawn.h>\n# endif  // GTEST_OS_QNX\n\n# if GTEST_OS_FUCHSIA\n#  include <lib/fdio/fd.h>\n#  include <lib/fdio/io.h>\n#  include <lib/fdio/spawn.h>\n#  include <lib/zx/channel.h>\n#  include <lib/zx/port.h>\n#  include <lib/zx/process.h>\n#  include <lib/zx/socket.h>\n#  include <zircon/processargs.h>\n#  include <zircon/syscalls.h>\n#  include <zircon/syscalls/policy.h>\n#  include <zircon/syscalls/port.h>\n# endif  // GTEST_OS_FUCHSIA\n\n#endif  // GTEST_HAS_DEATH_TEST\n\n#include \"gtest/gtest-message.h\"\n#include \"gtest/internal/gtest-string.h\"\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\n\n// Constants.\n\n// The default death test style.\n//\n// This is defined in internal/gtest-port.h as \"fast\", but can be overridden by\n// a definition in internal/custom/gtest-port.h. The recommended value, which is\n// used internally at Google, is \"threadsafe\".\nstatic const char kDefaultDeathTestStyle[] = GTEST_DEFAULT_DEATH_TEST_STYLE;\n\nGTEST_DEFINE_string_(\n    death_test_style,\n    internal::StringFromGTestEnv(\"death_test_style\", kDefaultDeathTestStyle),\n    \"Indicates how to run a death test in a forked child process: \"\n    \"\\\"threadsafe\\\" (child process re-executes the test binary \"\n    \"from the beginning, running only the specific death test) or \"\n    \"\\\"fast\\\" (child process runs the death test immediately \"\n    \"after forking).\");\n\nGTEST_DEFINE_bool_(\n    death_test_use_fork,\n    internal::BoolFromGTestEnv(\"death_test_use_fork\", false),\n    \"Instructs to use fork()/_exit() instead of clone() in death tests. \"\n    \"Ignored and always uses fork() on POSIX systems where clone() is not \"\n    \"implemented. Useful when running under valgrind or similar tools if \"\n    \"those do not support clone(). Valgrind 3.3.1 will just fail if \"\n    \"it sees an unsupported combination of clone() flags. \"\n    \"It is not recommended to use this flag w/o valgrind though it will \"\n    \"work in 99% of the cases. Once valgrind is fixed, this flag will \"\n    \"most likely be removed.\");\n\nnamespace internal {\nGTEST_DEFINE_string_(\n    internal_run_death_test, \"\",\n    \"Indicates the file, line number, temporal index of \"\n    \"the single death test to run, and a file descriptor to \"\n    \"which a success code may be sent, all separated by \"\n    \"the '|' characters.  This flag is specified if and only if the \"\n    \"current process is a sub-process launched for running a thread-safe \"\n    \"death test.  FOR INTERNAL USE ONLY.\");\n}  // namespace internal\n\n#if GTEST_HAS_DEATH_TEST\n\nnamespace internal {\n\n// Valid only for fast death tests. Indicates the code is running in the\n// child process of a fast style death test.\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\nstatic bool g_in_fast_death_test_child = false;\n# endif\n\n// Returns a Boolean value indicating whether the caller is currently\n// executing in the context of the death test child process.  Tools such as\n// Valgrind heap checkers may need this to modify their behavior in death\n// tests.  IMPORTANT: This is an internal utility.  Using it may break the\n// implementation of death tests.  User code MUST NOT use it.\nbool InDeathTestChild() {\n# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA\n\n  // On Windows and Fuchsia, death tests are thread-safe regardless of the value\n  // of the death_test_style flag.\n  return !GTEST_FLAG(internal_run_death_test).empty();\n\n# else\n\n  if (GTEST_FLAG(death_test_style) == \"threadsafe\")\n    return !GTEST_FLAG(internal_run_death_test).empty();\n  else\n    return g_in_fast_death_test_child;\n#endif\n}\n\n}  // namespace internal\n\n// ExitedWithCode constructor.\nExitedWithCode::ExitedWithCode(int exit_code) : exit_code_(exit_code) {\n}\n\n// ExitedWithCode function-call operator.\nbool ExitedWithCode::operator()(int exit_status) const {\n# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA\n\n  return exit_status == exit_code_;\n\n# else\n\n  return WIFEXITED(exit_status) && WEXITSTATUS(exit_status) == exit_code_;\n\n# endif  // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA\n}\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// KilledBySignal constructor.\nKilledBySignal::KilledBySignal(int signum) : signum_(signum) {\n}\n\n// KilledBySignal function-call operator.\nbool KilledBySignal::operator()(int exit_status) const {\n#  if defined(GTEST_KILLED_BY_SIGNAL_OVERRIDE_)\n  {\n    bool result;\n    if (GTEST_KILLED_BY_SIGNAL_OVERRIDE_(signum_, exit_status, &result)) {\n      return result;\n    }\n  }\n#  endif  // defined(GTEST_KILLED_BY_SIGNAL_OVERRIDE_)\n  return WIFSIGNALED(exit_status) && WTERMSIG(exit_status) == signum_;\n}\n# endif  // !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n\nnamespace internal {\n\n// Utilities needed for death tests.\n\n// Generates a textual description of a given exit code, in the format\n// specified by wait(2).\nstatic std::string ExitSummary(int exit_code) {\n  Message m;\n\n# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA\n\n  m << \"Exited with exit status \" << exit_code;\n\n# else\n\n  if (WIFEXITED(exit_code)) {\n    m << \"Exited with exit status \" << WEXITSTATUS(exit_code);\n  } else if (WIFSIGNALED(exit_code)) {\n    m << \"Terminated by signal \" << WTERMSIG(exit_code);\n  }\n#  ifdef WCOREDUMP\n  if (WCOREDUMP(exit_code)) {\n    m << \" (core dumped)\";\n  }\n#  endif\n# endif  // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA\n\n  return m.GetString();\n}\n\n// Returns true if exit_status describes a process that was terminated\n// by a signal, or exited normally with a nonzero exit code.\nbool ExitedUnsuccessfully(int exit_status) {\n  return !ExitedWithCode(0)(exit_status);\n}\n\n# if !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n// Generates a textual failure message when a death test finds more than\n// one thread running, or cannot determine the number of threads, prior\n// to executing the given statement.  It is the responsibility of the\n// caller not to pass a thread_count of 1.\nstatic std::string DeathTestThreadWarning(size_t thread_count) {\n  Message msg;\n  msg << \"Death tests use fork(), which is unsafe particularly\"\n      << \" in a threaded context. For this test, \" << GTEST_NAME_ << \" \";\n  if (thread_count == 0) {\n    msg << \"couldn't detect the number of threads.\";\n  } else {\n    msg << \"detected \" << thread_count << \" threads.\";\n  }\n  msg << \" See \"\n         \"https://github.com/google/googletest/blob/master/googletest/docs/\"\n         \"advanced.md#death-tests-and-threads\"\n      << \" for more explanation and suggested solutions, especially if\"\n      << \" this is the last message you see before your test times out.\";\n  return msg.GetString();\n}\n# endif  // !GTEST_OS_WINDOWS && !GTEST_OS_FUCHSIA\n\n// Flag characters for reporting a death test that did not die.\nstatic const char kDeathTestLived = 'L';\nstatic const char kDeathTestReturned = 'R';\nstatic const char kDeathTestThrew = 'T';\nstatic const char kDeathTestInternalError = 'I';\n\n#if GTEST_OS_FUCHSIA\n\n// File descriptor used for the pipe in the child process.\nstatic const int kFuchsiaReadPipeFd = 3;\n\n#endif\n\n// An enumeration describing all of the possible ways that a death test can\n// conclude.  DIED means that the process died while executing the test\n// code; LIVED means that process lived beyond the end of the test code;\n// RETURNED means that the test statement attempted to execute a return\n// statement, which is not allowed; THREW means that the test statement\n// returned control by throwing an exception.  IN_PROGRESS means the test\n// has not yet concluded.\nenum DeathTestOutcome { IN_PROGRESS, DIED, LIVED, RETURNED, THREW };\n\n// Routine for aborting the program which is safe to call from an\n// exec-style death test child process, in which case the error\n// message is propagated back to the parent process.  Otherwise, the\n// message is simply printed to stderr.  In either case, the program\n// then exits with status 1.\nstatic void DeathTestAbort(const std::string& message) {\n  // On a POSIX system, this function may be called from a threadsafe-style\n  // death test child process, which operates on a very small stack.  Use\n  // the heap for any additional non-minuscule memory requirements.\n  const InternalRunDeathTestFlag* const flag =\n      GetUnitTestImpl()->internal_run_death_test_flag();\n  if (flag != nullptr) {\n    FILE* parent = posix::FDOpen(flag->write_fd(), \"w\");\n    fputc(kDeathTestInternalError, parent);\n    fprintf(parent, \"%s\", message.c_str());\n    fflush(parent);\n    _exit(1);\n  } else {\n    fprintf(stderr, \"%s\", message.c_str());\n    fflush(stderr);\n    posix::Abort();\n  }\n}\n\n// A replacement for CHECK that calls DeathTestAbort if the assertion\n// fails.\n# define GTEST_DEATH_TEST_CHECK_(expression) \\\n  do { \\\n    if (!::testing::internal::IsTrue(expression)) { \\\n      DeathTestAbort( \\\n          ::std::string(\"CHECK failed: File \") + __FILE__ +  \", line \" \\\n          + ::testing::internal::StreamableToString(__LINE__) + \": \" \\\n          + #expression); \\\n    } \\\n  } while (::testing::internal::AlwaysFalse())\n\n// This macro is similar to GTEST_DEATH_TEST_CHECK_, but it is meant for\n// evaluating any system call that fulfills two conditions: it must return\n// -1 on failure, and set errno to EINTR when it is interrupted and\n// should be tried again.  The macro expands to a loop that repeatedly\n// evaluates the expression as long as it evaluates to -1 and sets\n// errno to EINTR.  If the expression evaluates to -1 but errno is\n// something other than EINTR, DeathTestAbort is called.\n# define GTEST_DEATH_TEST_CHECK_SYSCALL_(expression) \\\n  do { \\\n    int gtest_retval; \\\n    do { \\\n      gtest_retval = (expression); \\\n    } while (gtest_retval == -1 && errno == EINTR); \\\n    if (gtest_retval == -1) { \\\n      DeathTestAbort( \\\n          ::std::string(\"CHECK failed: File \") + __FILE__ + \", line \" \\\n          + ::testing::internal::StreamableToString(__LINE__) + \": \" \\\n          + #expression + \" != -1\"); \\\n    } \\\n  } while (::testing::internal::AlwaysFalse())\n\n// Returns the message describing the last system error in errno.\nstd::string GetLastErrnoDescription() {\n    return errno == 0 ? \"\" : posix::StrError(errno);\n}\n\n// This is called from a death test parent process to read a failure\n// message from the death test child process and log it with the FATAL\n// severity. On Windows, the message is read from a pipe handle. On other\n// platforms, it is read from a file descriptor.\nstatic void FailFromInternalError(int fd) {\n  Message error;\n  char buffer[256];\n  int num_read;\n\n  do {\n    while ((num_read = posix::Read(fd, buffer, 255)) > 0) {\n      buffer[num_read] = '\\0';\n      error << buffer;\n    }\n  } while (num_read == -1 && errno == EINTR);\n\n  if (num_read == 0) {\n    GTEST_LOG_(FATAL) << error.GetString();\n  } else {\n    const int last_error = errno;\n    GTEST_LOG_(FATAL) << \"Error while reading death test internal: \"\n                      << GetLastErrnoDescription() << \" [\" << last_error << \"]\";\n  }\n}\n\n// Death test constructor.  Increments the running death test count\n// for the current test.\nDeathTest::DeathTest() {\n  TestInfo* const info = GetUnitTestImpl()->current_test_info();\n  if (info == nullptr) {\n    DeathTestAbort(\"Cannot run a death test outside of a TEST or \"\n                   \"TEST_F construct\");\n  }\n}\n\n// Creates and returns a death test by dispatching to the current\n// death test factory.\nbool DeathTest::Create(const char* statement,\n                       Matcher<const std::string&> matcher, const char* file,\n                       int line, DeathTest** test) {\n  return GetUnitTestImpl()->death_test_factory()->Create(\n      statement, std::move(matcher), file, line, test);\n}\n\nconst char* DeathTest::LastMessage() {\n  return last_death_test_message_.c_str();\n}\n\nvoid DeathTest::set_last_death_test_message(const std::string& message) {\n  last_death_test_message_ = message;\n}\n\nstd::string DeathTest::last_death_test_message_;\n\n// Provides cross platform implementation for some death functionality.\nclass DeathTestImpl : public DeathTest {\n protected:\n  DeathTestImpl(const char* a_statement, Matcher<const std::string&> matcher)\n      : statement_(a_statement),\n        matcher_(std::move(matcher)),\n        spawned_(false),\n        status_(-1),\n        outcome_(IN_PROGRESS),\n        read_fd_(-1),\n        write_fd_(-1) {}\n\n  // read_fd_ is expected to be closed and cleared by a derived class.\n  ~DeathTestImpl() override { GTEST_DEATH_TEST_CHECK_(read_fd_ == -1); }\n\n  void Abort(AbortReason reason) override;\n  bool Passed(bool status_ok) override;\n\n  const char* statement() const { return statement_; }\n  bool spawned() const { return spawned_; }\n  void set_spawned(bool is_spawned) { spawned_ = is_spawned; }\n  int status() const { return status_; }\n  void set_status(int a_status) { status_ = a_status; }\n  DeathTestOutcome outcome() const { return outcome_; }\n  void set_outcome(DeathTestOutcome an_outcome) { outcome_ = an_outcome; }\n  int read_fd() const { return read_fd_; }\n  void set_read_fd(int fd) { read_fd_ = fd; }\n  int write_fd() const { return write_fd_; }\n  void set_write_fd(int fd) { write_fd_ = fd; }\n\n  // Called in the parent process only. Reads the result code of the death\n  // test child process via a pipe, interprets it to set the outcome_\n  // member, and closes read_fd_.  Outputs diagnostics and terminates in\n  // case of unexpected codes.\n  void ReadAndInterpretStatusByte();\n\n  // Returns stderr output from the child process.\n  virtual std::string GetErrorLogs();\n\n private:\n  // The textual content of the code this object is testing.  This class\n  // doesn't own this string and should not attempt to delete it.\n  const char* const statement_;\n  // A matcher that's expected to match the stderr output by the child process.\n  Matcher<const std::string&> matcher_;\n  // True if the death test child process has been successfully spawned.\n  bool spawned_;\n  // The exit status of the child process.\n  int status_;\n  // How the death test concluded.\n  DeathTestOutcome outcome_;\n  // Descriptor to the read end of the pipe to the child process.  It is\n  // always -1 in the child process.  The child keeps its write end of the\n  // pipe in write_fd_.\n  int read_fd_;\n  // Descriptor to the child's write end of the pipe to the parent process.\n  // It is always -1 in the parent process.  The parent keeps its end of the\n  // pipe in read_fd_.\n  int write_fd_;\n};\n\n// Called in the parent process only. Reads the result code of the death\n// test child process via a pipe, interprets it to set the outcome_\n// member, and closes read_fd_.  Outputs diagnostics and terminates in\n// case of unexpected codes.\nvoid DeathTestImpl::ReadAndInterpretStatusByte() {\n  char flag;\n  int bytes_read;\n\n  // The read() here blocks until data is available (signifying the\n  // failure of the death test) or until the pipe is closed (signifying\n  // its success), so it's okay to call this in the parent before\n  // the child process has exited.\n  do {\n    bytes_read = posix::Read(read_fd(), &flag, 1);\n  } while (bytes_read == -1 && errno == EINTR);\n\n  if (bytes_read == 0) {\n    set_outcome(DIED);\n  } else if (bytes_read == 1) {\n    switch (flag) {\n      case kDeathTestReturned:\n        set_outcome(RETURNED);\n        break;\n      case kDeathTestThrew:\n        set_outcome(THREW);\n        break;\n      case kDeathTestLived:\n        set_outcome(LIVED);\n        break;\n      case kDeathTestInternalError:\n        FailFromInternalError(read_fd());  // Does not return.\n        break;\n      default:\n        GTEST_LOG_(FATAL) << \"Death test child process reported \"\n                          << \"unexpected status byte (\"\n                          << static_cast<unsigned int>(flag) << \")\";\n    }\n  } else {\n    GTEST_LOG_(FATAL) << \"Read from death test child process failed: \"\n                      << GetLastErrnoDescription();\n  }\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Close(read_fd()));\n  set_read_fd(-1);\n}\n\nstd::string DeathTestImpl::GetErrorLogs() {\n  return GetCapturedStderr();\n}\n\n// Signals that the death test code which should have exited, didn't.\n// Should be called only in a death test child process.\n// Writes a status byte to the child's status file descriptor, then\n// calls _exit(1).\nvoid DeathTestImpl::Abort(AbortReason reason) {\n  // The parent process considers the death test to be a failure if\n  // it finds any data in our pipe.  So, here we write a single flag byte\n  // to the pipe, then exit.\n  const char status_ch =\n      reason == TEST_DID_NOT_DIE ? kDeathTestLived :\n      reason == TEST_THREW_EXCEPTION ? kDeathTestThrew : kDeathTestReturned;\n\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(posix::Write(write_fd(), &status_ch, 1));\n  // We are leaking the descriptor here because on some platforms (i.e.,\n  // when built as Windows DLL), destructors of global objects will still\n  // run after calling _exit(). On such systems, write_fd_ will be\n  // indirectly closed from the destructor of UnitTestImpl, causing double\n  // close if it is also closed here. On debug configurations, double close\n  // may assert. As there are no in-process buffers to flush here, we are\n  // relying on the OS to close the descriptor after the process terminates\n  // when the destructors are not run.\n  _exit(1);  // Exits w/o any normal exit hooks (we were supposed to crash)\n}\n\n// Returns an indented copy of stderr output for a death test.\n// This makes distinguishing death test output lines from regular log lines\n// much easier.\nstatic ::std::string FormatDeathTestOutput(const ::std::string& output) {\n  ::std::string ret;\n  for (size_t at = 0; ; ) {\n    const size_t line_end = output.find('\\n', at);\n    ret += \"[  DEATH   ] \";\n    if (line_end == ::std::string::npos) {\n      ret += output.substr(at);\n      break;\n    }\n    ret += output.substr(at, line_end + 1 - at);\n    at = line_end + 1;\n  }\n  return ret;\n}\n\n// Assesses the success or failure of a death test, using both private\n// members which have previously been set, and one argument:\n//\n// Private data members:\n//   outcome:  An enumeration describing how the death test\n//             concluded: DIED, LIVED, THREW, or RETURNED.  The death test\n//             fails in the latter three cases.\n//   status:   The exit status of the child process. On *nix, it is in the\n//             in the format specified by wait(2). On Windows, this is the\n//             value supplied to the ExitProcess() API or a numeric code\n//             of the exception that terminated the program.\n//   matcher_: A matcher that's expected to match the stderr output by the child\n//             process.\n//\n// Argument:\n//   status_ok: true if exit_status is acceptable in the context of\n//              this particular death test, which fails if it is false\n//\n// Returns true if and only if all of the above conditions are met.  Otherwise,\n// the first failing condition, in the order given above, is the one that is\n// reported. Also sets the last death test message string.\nbool DeathTestImpl::Passed(bool status_ok) {\n  if (!spawned())\n    return false;\n\n  const std::string error_message = GetErrorLogs();\n\n  bool success = false;\n  Message buffer;\n\n  buffer << \"Death test: \" << statement() << \"\\n\";\n  switch (outcome()) {\n    case LIVED:\n      buffer << \"    Result: failed to die.\\n\"\n             << \" Error msg:\\n\" << FormatDeathTestOutput(error_message);\n      break;\n    case THREW:\n      buffer << \"    Result: threw an exception.\\n\"\n             << \" Error msg:\\n\" << FormatDeathTestOutput(error_message);\n      break;\n    case RETURNED:\n      buffer << \"    Result: illegal return in test statement.\\n\"\n             << \" Error msg:\\n\" << FormatDeathTestOutput(error_message);\n      break;\n    case DIED:\n      if (status_ok) {\n        if (matcher_.Matches(error_message)) {\n          success = true;\n        } else {\n          std::ostringstream stream;\n          matcher_.DescribeTo(&stream);\n          buffer << \"    Result: died but not with expected error.\\n\"\n                 << \"  Expected: \" << stream.str() << \"\\n\"\n                 << \"Actual msg:\\n\"\n                 << FormatDeathTestOutput(error_message);\n        }\n      } else {\n        buffer << \"    Result: died but not with expected exit code:\\n\"\n               << \"            \" << ExitSummary(status()) << \"\\n\"\n               << \"Actual msg:\\n\" << FormatDeathTestOutput(error_message);\n      }\n      break;\n    case IN_PROGRESS:\n    default:\n      GTEST_LOG_(FATAL)\n          << \"DeathTest::Passed somehow called before conclusion of test\";\n  }\n\n  DeathTest::set_last_death_test_message(buffer.GetString());\n  return success;\n}\n\n# if GTEST_OS_WINDOWS\n// WindowsDeathTest implements death tests on Windows. Due to the\n// specifics of starting new processes on Windows, death tests there are\n// always threadsafe, and Google Test considers the\n// --gtest_death_test_style=fast setting to be equivalent to\n// --gtest_death_test_style=threadsafe there.\n//\n// A few implementation notes:  Like the Linux version, the Windows\n// implementation uses pipes for child-to-parent communication. But due to\n// the specifics of pipes on Windows, some extra steps are required:\n//\n// 1. The parent creates a communication pipe and stores handles to both\n//    ends of it.\n// 2. The parent starts the child and provides it with the information\n//    necessary to acquire the handle to the write end of the pipe.\n// 3. The child acquires the write end of the pipe and signals the parent\n//    using a Windows event.\n// 4. Now the parent can release the write end of the pipe on its side. If\n//    this is done before step 3, the object's reference count goes down to\n//    0 and it is destroyed, preventing the child from acquiring it. The\n//    parent now has to release it, or read operations on the read end of\n//    the pipe will not return when the child terminates.\n// 5. The parent reads child's output through the pipe (outcome code and\n//    any possible error messages) from the pipe, and its stderr and then\n//    determines whether to fail the test.\n//\n// Note: to distinguish Win32 API calls from the local method and function\n// calls, the former are explicitly resolved in the global namespace.\n//\nclass WindowsDeathTest : public DeathTestImpl {\n public:\n  WindowsDeathTest(const char* a_statement, Matcher<const std::string&> matcher,\n                   const char* file, int line)\n      : DeathTestImpl(a_statement, std::move(matcher)),\n        file_(file),\n        line_(line) {}\n\n  // All of these virtual functions are inherited from DeathTest.\n  virtual int Wait();\n  virtual TestRole AssumeRole();\n\n private:\n  // The name of the file in which the death test is located.\n  const char* const file_;\n  // The line number on which the death test is located.\n  const int line_;\n  // Handle to the write end of the pipe to the child process.\n  AutoHandle write_handle_;\n  // Child process handle.\n  AutoHandle child_handle_;\n  // Event the child process uses to signal the parent that it has\n  // acquired the handle to the write end of the pipe. After seeing this\n  // event the parent can release its own handles to make sure its\n  // ReadFile() calls return when the child terminates.\n  AutoHandle event_handle_;\n};\n\n// Waits for the child in a death test to exit, returning its exit\n// status, or 0 if no child process exists.  As a side effect, sets the\n// outcome data member.\nint WindowsDeathTest::Wait() {\n  if (!spawned())\n    return 0;\n\n  // Wait until the child either signals that it has acquired the write end\n  // of the pipe or it dies.\n  const HANDLE wait_handles[2] = { child_handle_.Get(), event_handle_.Get() };\n  switch (::WaitForMultipleObjects(2,\n                                   wait_handles,\n                                   FALSE,  // Waits for any of the handles.\n                                   INFINITE)) {\n    case WAIT_OBJECT_0:\n    case WAIT_OBJECT_0 + 1:\n      break;\n    default:\n      GTEST_DEATH_TEST_CHECK_(false);  // Should not get here.\n  }\n\n  // The child has acquired the write end of the pipe or exited.\n  // We release the handle on our side and continue.\n  write_handle_.Reset();\n  event_handle_.Reset();\n\n  ReadAndInterpretStatusByte();\n\n  // Waits for the child process to exit if it haven't already. This\n  // returns immediately if the child has already exited, regardless of\n  // whether previous calls to WaitForMultipleObjects synchronized on this\n  // handle or not.\n  GTEST_DEATH_TEST_CHECK_(\n      WAIT_OBJECT_0 == ::WaitForSingleObject(child_handle_.Get(),\n                                             INFINITE));\n  DWORD status_code;\n  GTEST_DEATH_TEST_CHECK_(\n      ::GetExitCodeProcess(child_handle_.Get(), &status_code) != FALSE);\n  child_handle_.Reset();\n  set_status(static_cast<int>(status_code));\n  return status();\n}\n\n// The AssumeRole process for a Windows death test.  It creates a child\n// process with the same executable as the current process to run the\n// death test.  The child process is given the --gtest_filter and\n// --gtest_internal_run_death_test flags such that it knows to run the\n// current death test only.\nDeathTest::TestRole WindowsDeathTest::AssumeRole() {\n  const UnitTestImpl* const impl = GetUnitTestImpl();\n  const InternalRunDeathTestFlag* const flag =\n      impl->internal_run_death_test_flag();\n  const TestInfo* const info = impl->current_test_info();\n  const int death_test_index = info->result()->death_test_count();\n\n  if (flag != nullptr) {\n    // ParseInternalRunDeathTestFlag() has performed all the necessary\n    // processing.\n    set_write_fd(flag->write_fd());\n    return EXECUTE_TEST;\n  }\n\n  // WindowsDeathTest uses an anonymous pipe to communicate results of\n  // a death test.\n  SECURITY_ATTRIBUTES handles_are_inheritable = {sizeof(SECURITY_ATTRIBUTES),\n                                                 nullptr, TRUE};\n  HANDLE read_handle, write_handle;\n  GTEST_DEATH_TEST_CHECK_(\n      ::CreatePipe(&read_handle, &write_handle, &handles_are_inheritable,\n                   0)  // Default buffer size.\n      != FALSE);\n  set_read_fd(::_open_osfhandle(reinterpret_cast<intptr_t>(read_handle),\n                                O_RDONLY));\n  write_handle_.Reset(write_handle);\n  event_handle_.Reset(::CreateEvent(\n      &handles_are_inheritable,\n      TRUE,       // The event will automatically reset to non-signaled state.\n      FALSE,      // The initial state is non-signalled.\n      nullptr));  // The even is unnamed.\n  GTEST_DEATH_TEST_CHECK_(event_handle_.Get() != nullptr);\n  const std::string filter_flag = std::string(\"--\") + GTEST_FLAG_PREFIX_ +\n                                  kFilterFlag + \"=\" + info->test_suite_name() +\n                                  \".\" + info->name();\n  const std::string internal_flag =\n      std::string(\"--\") + GTEST_FLAG_PREFIX_ + kInternalRunDeathTestFlag +\n      \"=\" + file_ + \"|\" + StreamableToString(line_) + \"|\" +\n      StreamableToString(death_test_index) + \"|\" +\n      StreamableToString(static_cast<unsigned int>(::GetCurrentProcessId())) +\n      // size_t has the same width as pointers on both 32-bit and 64-bit\n      // Windows platforms.\n      // See http://msdn.microsoft.com/en-us/library/tcxf1dw6.aspx.\n      \"|\" + StreamableToString(reinterpret_cast<size_t>(write_handle)) +\n      \"|\" + StreamableToString(reinterpret_cast<size_t>(event_handle_.Get()));\n\n  char executable_path[_MAX_PATH + 1];  // NOLINT\n  GTEST_DEATH_TEST_CHECK_(_MAX_PATH + 1 != ::GetModuleFileNameA(nullptr,\n                                                                executable_path,\n                                                                _MAX_PATH));\n\n  std::string command_line =\n      std::string(::GetCommandLineA()) + \" \" + filter_flag + \" \\\"\" +\n      internal_flag + \"\\\"\";\n\n  DeathTest::set_last_death_test_message(\"\");\n\n  CaptureStderr();\n  // Flush the log buffers since the log streams are shared with the child.\n  FlushInfoLog();\n\n  // The child process will share the standard handles with the parent.\n  STARTUPINFOA startup_info;\n  memset(&startup_info, 0, sizeof(STARTUPINFO));\n  startup_info.dwFlags = STARTF_USESTDHANDLES;\n  startup_info.hStdInput = ::GetStdHandle(STD_INPUT_HANDLE);\n  startup_info.hStdOutput = ::GetStdHandle(STD_OUTPUT_HANDLE);\n  startup_info.hStdError = ::GetStdHandle(STD_ERROR_HANDLE);\n\n  PROCESS_INFORMATION process_info;\n  GTEST_DEATH_TEST_CHECK_(\n      ::CreateProcessA(\n          executable_path, const_cast<char*>(command_line.c_str()),\n          nullptr,  // Retuned process handle is not inheritable.\n          nullptr,  // Retuned thread handle is not inheritable.\n          TRUE,  // Child inherits all inheritable handles (for write_handle_).\n          0x0,   // Default creation flags.\n          nullptr,  // Inherit the parent's environment.\n          UnitTest::GetInstance()->original_working_dir(), &startup_info,\n          &process_info) != FALSE);\n  child_handle_.Reset(process_info.hProcess);\n  ::CloseHandle(process_info.hThread);\n  set_spawned(true);\n  return OVERSEE_TEST;\n}\n\n# elif GTEST_OS_FUCHSIA\n\nclass FuchsiaDeathTest : public DeathTestImpl {\n public:\n  FuchsiaDeathTest(const char* a_statement, Matcher<const std::string&> matcher,\n                   const char* file, int line)\n      : DeathTestImpl(a_statement, std::move(matcher)),\n        file_(file),\n        line_(line) {}\n\n  // All of these virtual functions are inherited from DeathTest.\n  int Wait() override;\n  TestRole AssumeRole() override;\n  std::string GetErrorLogs() override;\n\n private:\n  // The name of the file in which the death test is located.\n  const char* const file_;\n  // The line number on which the death test is located.\n  const int line_;\n  // The stderr data captured by the child process.\n  std::string captured_stderr_;\n\n  zx::process child_process_;\n  zx::channel exception_channel_;\n  zx::socket stderr_socket_;\n};\n\n// Utility class for accumulating command-line arguments.\nclass Arguments {\n public:\n  Arguments() { args_.push_back(nullptr); }\n\n  ~Arguments() {\n    for (std::vector<char*>::iterator i = args_.begin(); i != args_.end();\n         ++i) {\n      free(*i);\n    }\n  }\n  void AddArgument(const char* argument) {\n    args_.insert(args_.end() - 1, posix::StrDup(argument));\n  }\n\n  template <typename Str>\n  void AddArguments(const ::std::vector<Str>& arguments) {\n    for (typename ::std::vector<Str>::const_iterator i = arguments.begin();\n         i != arguments.end();\n         ++i) {\n      args_.insert(args_.end() - 1, posix::StrDup(i->c_str()));\n    }\n  }\n  char* const* Argv() {\n    return &args_[0];\n  }\n\n  int size() {\n    return args_.size() - 1;\n  }\n\n private:\n  std::vector<char*> args_;\n};\n\n// Waits for the child in a death test to exit, returning its exit\n// status, or 0 if no child process exists.  As a side effect, sets the\n// outcome data member.\nint FuchsiaDeathTest::Wait() {\n  const int kProcessKey = 0;\n  const int kSocketKey = 1;\n  const int kExceptionKey = 2;\n\n  if (!spawned())\n    return 0;\n\n  // Create a port to wait for socket/task/exception events.\n  zx_status_t status_zx;\n  zx::port port;\n  status_zx = zx::port::create(0, &port);\n  GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n\n  // Register to wait for the child process to terminate.\n  status_zx = child_process_.wait_async(\n      port, kProcessKey, ZX_PROCESS_TERMINATED, ZX_WAIT_ASYNC_ONCE);\n  GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n\n  // Register to wait for the socket to be readable or closed.\n  status_zx = stderr_socket_.wait_async(\n      port, kSocketKey, ZX_SOCKET_READABLE | ZX_SOCKET_PEER_CLOSED,\n      ZX_WAIT_ASYNC_ONCE);\n  GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n\n  // Register to wait for an exception.\n  status_zx = exception_channel_.wait_async(\n      port, kExceptionKey, ZX_CHANNEL_READABLE, ZX_WAIT_ASYNC_ONCE);\n  GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n\n  bool process_terminated = false;\n  bool socket_closed = false;\n  do {\n    zx_port_packet_t packet = {};\n    status_zx = port.wait(zx::time::infinite(), &packet);\n    GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n\n    if (packet.key == kExceptionKey) {\n      // Process encountered an exception. Kill it directly rather than\n      // letting other handlers process the event. We will get a kProcessKey\n      // event when the process actually terminates.\n      status_zx = child_process_.kill();\n      GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n    } else if (packet.key == kProcessKey) {\n      // Process terminated.\n      GTEST_DEATH_TEST_CHECK_(ZX_PKT_IS_SIGNAL_ONE(packet.type));\n      GTEST_DEATH_TEST_CHECK_(packet.signal.observed & ZX_PROCESS_TERMINATED);\n      process_terminated = true;\n    } else if (packet.key == kSocketKey) {\n      GTEST_DEATH_TEST_CHECK_(ZX_PKT_IS_SIGNAL_ONE(packet.type));\n      if (packet.signal.observed & ZX_SOCKET_READABLE) {\n        // Read data from the socket.\n        constexpr size_t kBufferSize = 1024;\n        do {\n          size_t old_length = captured_stderr_.length();\n          size_t bytes_read = 0;\n          captured_stderr_.resize(old_length + kBufferSize);\n          status_zx = stderr_socket_.read(\n              0, &captured_stderr_.front() + old_length, kBufferSize,\n              &bytes_read);\n          captured_stderr_.resize(old_length + bytes_read);\n        } while (status_zx == ZX_OK);\n        if (status_zx == ZX_ERR_PEER_CLOSED) {\n          socket_closed = true;\n        } else {\n          GTEST_DEATH_TEST_CHECK_(status_zx == ZX_ERR_SHOULD_WAIT);\n          status_zx = stderr_socket_.wait_async(\n              port, kSocketKey, ZX_SOCKET_READABLE | ZX_SOCKET_PEER_CLOSED,\n              ZX_WAIT_ASYNC_ONCE);\n          GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n        }\n      } else {\n        GTEST_DEATH_TEST_CHECK_(packet.signal.observed & ZX_SOCKET_PEER_CLOSED);\n        socket_closed = true;\n      }\n    }\n  } while (!process_terminated && !socket_closed);\n\n  ReadAndInterpretStatusByte();\n\n  zx_info_process_t buffer;\n  status_zx = child_process_.get_info(\n      ZX_INFO_PROCESS, &buffer, sizeof(buffer), nullptr, nullptr);\n  GTEST_DEATH_TEST_CHECK_(status_zx == ZX_OK);\n\n  GTEST_DEATH_TEST_CHECK_(buffer.exited);\n  set_status(buffer.return_code);\n  return status();\n}\n\n// The AssumeRole process for a Fuchsia death test.  It creates a child\n// process with the same executable as the current process to run the\n// death test.  The child process is given the --gtest_filter and\n// --gtest_internal_run_death_test flags such that it knows to run the\n// current death test only.\nDeathTest::TestRole FuchsiaDeathTest::AssumeRole() {\n  const UnitTestImpl* const impl = GetUnitTestImpl();\n  const InternalRunDeathTestFlag* const flag =\n      impl->internal_run_death_test_flag();\n  const TestInfo* const info = impl->current_test_info();\n  const int death_test_index = info->result()->death_test_count();\n\n  if (flag != nullptr) {\n    // ParseInternalRunDeathTestFlag() has performed all the necessary\n    // processing.\n    set_write_fd(kFuchsiaReadPipeFd);\n    return EXECUTE_TEST;\n  }\n\n  // Flush the log buffers since the log streams are shared with the child.\n  FlushInfoLog();\n\n  // Build the child process command line.\n  const std::string filter_flag = std::string(\"--\") + GTEST_FLAG_PREFIX_ +\n                                  kFilterFlag + \"=\" + info->test_suite_name() +\n                                  \".\" + info->name();\n  const std::string internal_flag =\n      std::string(\"--\") + GTEST_FLAG_PREFIX_ + kInternalRunDeathTestFlag + \"=\"\n      + file_ + \"|\"\n      + StreamableToString(line_) + \"|\"\n      + StreamableToString(death_test_index);\n  Arguments args;\n  args.AddArguments(GetInjectableArgvs());\n  args.AddArgument(filter_flag.c_str());\n  args.AddArgument(internal_flag.c_str());\n\n  // Build the pipe for communication with the child.\n  zx_status_t status;\n  zx_handle_t child_pipe_handle;\n  int child_pipe_fd;\n  status = fdio_pipe_half(&child_pipe_fd, &child_pipe_handle);\n  GTEST_DEATH_TEST_CHECK_(status == ZX_OK);\n  set_read_fd(child_pipe_fd);\n\n  // Set the pipe handle for the child.\n  fdio_spawn_action_t spawn_actions[2] = {};\n  fdio_spawn_action_t* add_handle_action = &spawn_actions[0];\n  add_handle_action->action = FDIO_SPAWN_ACTION_ADD_HANDLE;\n  add_handle_action->h.id = PA_HND(PA_FD, kFuchsiaReadPipeFd);\n  add_handle_action->h.handle = child_pipe_handle;\n\n  // Create a socket pair will be used to receive the child process' stderr.\n  zx::socket stderr_producer_socket;\n  status =\n      zx::socket::create(0, &stderr_producer_socket, &stderr_socket_);\n  GTEST_DEATH_TEST_CHECK_(status >= 0);\n  int stderr_producer_fd = -1;\n  status =\n      fdio_fd_create(stderr_producer_socket.release(), &stderr_producer_fd);\n  GTEST_DEATH_TEST_CHECK_(status >= 0);\n\n  // Make the stderr socket nonblocking.\n  GTEST_DEATH_TEST_CHECK_(fcntl(stderr_producer_fd, F_SETFL, 0) == 0);\n\n  fdio_spawn_action_t* add_stderr_action = &spawn_actions[1];\n  add_stderr_action->action = FDIO_SPAWN_ACTION_CLONE_FD;\n  add_stderr_action->fd.local_fd = stderr_producer_fd;\n  add_stderr_action->fd.target_fd = STDERR_FILENO;\n\n  // Create a child job.\n  zx_handle_t child_job = ZX_HANDLE_INVALID;\n  status = zx_job_create(zx_job_default(), 0, & child_job);\n  GTEST_DEATH_TEST_CHECK_(status == ZX_OK);\n  zx_policy_basic_t policy;\n  policy.condition = ZX_POL_NEW_ANY;\n  policy.policy = ZX_POL_ACTION_ALLOW;\n  status = zx_job_set_policy(\n      child_job, ZX_JOB_POL_RELATIVE, ZX_JOB_POL_BASIC, &policy, 1);\n  GTEST_DEATH_TEST_CHECK_(status == ZX_OK);\n\n  // Create an exception channel attached to the |child_job|, to allow\n  // us to suppress the system default exception handler from firing.\n  status =\n      zx_task_create_exception_channel(\n          child_job, 0, exception_channel_.reset_and_get_address());\n  GTEST_DEATH_TEST_CHECK_(status == ZX_OK);\n\n  // Spawn the child process.\n  status = fdio_spawn_etc(\n      child_job, FDIO_SPAWN_CLONE_ALL, args.Argv()[0], args.Argv(), nullptr,\n      2, spawn_actions, child_process_.reset_and_get_address(), nullptr);\n  GTEST_DEATH_TEST_CHECK_(status == ZX_OK);\n\n  set_spawned(true);\n  return OVERSEE_TEST;\n}\n\nstd::string FuchsiaDeathTest::GetErrorLogs() {\n  return captured_stderr_;\n}\n\n#else  // We are neither on Windows, nor on Fuchsia.\n\n// ForkingDeathTest provides implementations for most of the abstract\n// methods of the DeathTest interface.  Only the AssumeRole method is\n// left undefined.\nclass ForkingDeathTest : public DeathTestImpl {\n public:\n  ForkingDeathTest(const char* statement, Matcher<const std::string&> matcher);\n\n  // All of these virtual functions are inherited from DeathTest.\n  int Wait() override;\n\n protected:\n  void set_child_pid(pid_t child_pid) { child_pid_ = child_pid; }\n\n private:\n  // PID of child process during death test; 0 in the child process itself.\n  pid_t child_pid_;\n};\n\n// Constructs a ForkingDeathTest.\nForkingDeathTest::ForkingDeathTest(const char* a_statement,\n                                   Matcher<const std::string&> matcher)\n    : DeathTestImpl(a_statement, std::move(matcher)), child_pid_(-1) {}\n\n// Waits for the child in a death test to exit, returning its exit\n// status, or 0 if no child process exists.  As a side effect, sets the\n// outcome data member.\nint ForkingDeathTest::Wait() {\n  if (!spawned())\n    return 0;\n\n  ReadAndInterpretStatusByte();\n\n  int status_value;\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(waitpid(child_pid_, &status_value, 0));\n  set_status(status_value);\n  return status_value;\n}\n\n// A concrete death test class that forks, then immediately runs the test\n// in the child process.\nclass NoExecDeathTest : public ForkingDeathTest {\n public:\n  NoExecDeathTest(const char* a_statement, Matcher<const std::string&> matcher)\n      : ForkingDeathTest(a_statement, std::move(matcher)) {}\n  TestRole AssumeRole() override;\n};\n\n// The AssumeRole process for a fork-and-run death test.  It implements a\n// straightforward fork, with a simple pipe to transmit the status byte.\nDeathTest::TestRole NoExecDeathTest::AssumeRole() {\n  const size_t thread_count = GetThreadCount();\n  if (thread_count != 1) {\n    GTEST_LOG_(WARNING) << DeathTestThreadWarning(thread_count);\n  }\n\n  int pipe_fd[2];\n  GTEST_DEATH_TEST_CHECK_(pipe(pipe_fd) != -1);\n\n  DeathTest::set_last_death_test_message(\"\");\n  CaptureStderr();\n  // When we fork the process below, the log file buffers are copied, but the\n  // file descriptors are shared.  We flush all log files here so that closing\n  // the file descriptors in the child process doesn't throw off the\n  // synchronization between descriptors and buffers in the parent process.\n  // This is as close to the fork as possible to avoid a race condition in case\n  // there are multiple threads running before the death test, and another\n  // thread writes to the log file.\n  FlushInfoLog();\n\n  const pid_t child_pid = fork();\n  GTEST_DEATH_TEST_CHECK_(child_pid != -1);\n  set_child_pid(child_pid);\n  if (child_pid == 0) {\n    GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[0]));\n    set_write_fd(pipe_fd[1]);\n    // Redirects all logging to stderr in the child process to prevent\n    // concurrent writes to the log files.  We capture stderr in the parent\n    // process and append the child process' output to a log.\n    LogToStderr();\n    // Event forwarding to the listeners of event listener API mush be shut\n    // down in death test subprocesses.\n    GetUnitTestImpl()->listeners()->SuppressEventForwarding();\n    g_in_fast_death_test_child = true;\n    return EXECUTE_TEST;\n  } else {\n    GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[1]));\n    set_read_fd(pipe_fd[0]);\n    set_spawned(true);\n    return OVERSEE_TEST;\n  }\n}\n\n// A concrete death test class that forks and re-executes the main\n// program from the beginning, with command-line flags set that cause\n// only this specific death test to be run.\nclass ExecDeathTest : public ForkingDeathTest {\n public:\n  ExecDeathTest(const char* a_statement, Matcher<const std::string&> matcher,\n                const char* file, int line)\n      : ForkingDeathTest(a_statement, std::move(matcher)),\n        file_(file),\n        line_(line) {}\n  TestRole AssumeRole() override;\n\n private:\n  static ::std::vector<std::string> GetArgvsForDeathTestChildProcess() {\n    ::std::vector<std::string> args = GetInjectableArgvs();\n#  if defined(GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_)\n    ::std::vector<std::string> extra_args =\n        GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_();\n    args.insert(args.end(), extra_args.begin(), extra_args.end());\n#  endif  // defined(GTEST_EXTRA_DEATH_TEST_COMMAND_LINE_ARGS_)\n    return args;\n  }\n  // The name of the file in which the death test is located.\n  const char* const file_;\n  // The line number on which the death test is located.\n  const int line_;\n};\n\n// Utility class for accumulating command-line arguments.\nclass Arguments {\n public:\n  Arguments() { args_.push_back(nullptr); }\n\n  ~Arguments() {\n    for (std::vector<char*>::iterator i = args_.begin(); i != args_.end();\n         ++i) {\n      free(*i);\n    }\n  }\n  void AddArgument(const char* argument) {\n    args_.insert(args_.end() - 1, posix::StrDup(argument));\n  }\n\n  template <typename Str>\n  void AddArguments(const ::std::vector<Str>& arguments) {\n    for (typename ::std::vector<Str>::const_iterator i = arguments.begin();\n         i != arguments.end();\n         ++i) {\n      args_.insert(args_.end() - 1, posix::StrDup(i->c_str()));\n    }\n  }\n  char* const* Argv() {\n    return &args_[0];\n  }\n\n private:\n  std::vector<char*> args_;\n};\n\n// A struct that encompasses the arguments to the child process of a\n// threadsafe-style death test process.\nstruct ExecDeathTestArgs {\n  char* const* argv;  // Command-line arguments for the child's call to exec\n  int close_fd;       // File descriptor to close; the read end of a pipe\n};\n\n#  if GTEST_OS_MAC\ninline char** GetEnviron() {\n  // When Google Test is built as a framework on MacOS X, the environ variable\n  // is unavailable. Apple's documentation (man environ) recommends using\n  // _NSGetEnviron() instead.\n  return *_NSGetEnviron();\n}\n#  else\n// Some POSIX platforms expect you to declare environ. extern \"C\" makes\n// it reside in the global namespace.\nextern \"C\" char** environ;\ninline char** GetEnviron() { return environ; }\n#  endif  // GTEST_OS_MAC\n\n#  if !GTEST_OS_QNX\n// The main function for a threadsafe-style death test child process.\n// This function is called in a clone()-ed process and thus must avoid\n// any potentially unsafe operations like malloc or libc functions.\nstatic int ExecDeathTestChildMain(void* child_arg) {\n  ExecDeathTestArgs* const args = static_cast<ExecDeathTestArgs*>(child_arg);\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(close(args->close_fd));\n\n  // We need to execute the test program in the same environment where\n  // it was originally invoked.  Therefore we change to the original\n  // working directory first.\n  const char* const original_dir =\n      UnitTest::GetInstance()->original_working_dir();\n  // We can safely call chdir() as it's a direct system call.\n  if (chdir(original_dir) != 0) {\n    DeathTestAbort(std::string(\"chdir(\\\"\") + original_dir + \"\\\") failed: \" +\n                   GetLastErrnoDescription());\n    return EXIT_FAILURE;\n  }\n\n  // We can safely call execve() as it's a direct system call.  We\n  // cannot use execvp() as it's a libc function and thus potentially\n  // unsafe.  Since execve() doesn't search the PATH, the user must\n  // invoke the test program via a valid path that contains at least\n  // one path separator.\n  execve(args->argv[0], args->argv, GetEnviron());\n  DeathTestAbort(std::string(\"execve(\") + args->argv[0] + \", ...) in \" +\n                 original_dir + \" failed: \" +\n                 GetLastErrnoDescription());\n  return EXIT_FAILURE;\n}\n#  endif  // !GTEST_OS_QNX\n\n#  if GTEST_HAS_CLONE\n// Two utility routines that together determine the direction the stack\n// grows.\n// This could be accomplished more elegantly by a single recursive\n// function, but we want to guard against the unlikely possibility of\n// a smart compiler optimizing the recursion away.\n//\n// GTEST_NO_INLINE_ is required to prevent GCC 4.6 from inlining\n// StackLowerThanAddress into StackGrowsDown, which then doesn't give\n// correct answer.\nstatic void StackLowerThanAddress(const void* ptr,\n                                  bool* result) GTEST_NO_INLINE_;\n// HWAddressSanitizer add a random tag to the MSB of the local variable address,\n// making comparison result unpredictable.\nGTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_\nstatic void StackLowerThanAddress(const void* ptr, bool* result) {\n  int dummy;\n  *result = (&dummy < ptr);\n}\n\n// Make sure AddressSanitizer does not tamper with the stack here.\nGTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_\nGTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_\nstatic bool StackGrowsDown() {\n  int dummy;\n  bool result;\n  StackLowerThanAddress(&dummy, &result);\n  return result;\n}\n#  endif  // GTEST_HAS_CLONE\n\n// Spawns a child process with the same executable as the current process in\n// a thread-safe manner and instructs it to run the death test.  The\n// implementation uses fork(2) + exec.  On systems where clone(2) is\n// available, it is used instead, being slightly more thread-safe.  On QNX,\n// fork supports only single-threaded environments, so this function uses\n// spawn(2) there instead.  The function dies with an error message if\n// anything goes wrong.\nstatic pid_t ExecDeathTestSpawnChild(char* const* argv, int close_fd) {\n  ExecDeathTestArgs args = { argv, close_fd };\n  pid_t child_pid = -1;\n\n#  if GTEST_OS_QNX\n  // Obtains the current directory and sets it to be closed in the child\n  // process.\n  const int cwd_fd = open(\".\", O_RDONLY);\n  GTEST_DEATH_TEST_CHECK_(cwd_fd != -1);\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(fcntl(cwd_fd, F_SETFD, FD_CLOEXEC));\n  // We need to execute the test program in the same environment where\n  // it was originally invoked.  Therefore we change to the original\n  // working directory first.\n  const char* const original_dir =\n      UnitTest::GetInstance()->original_working_dir();\n  // We can safely call chdir() as it's a direct system call.\n  if (chdir(original_dir) != 0) {\n    DeathTestAbort(std::string(\"chdir(\\\"\") + original_dir + \"\\\") failed: \" +\n                   GetLastErrnoDescription());\n    return EXIT_FAILURE;\n  }\n\n  int fd_flags;\n  // Set close_fd to be closed after spawn.\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(fd_flags = fcntl(close_fd, F_GETFD));\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(fcntl(close_fd, F_SETFD,\n                                        fd_flags | FD_CLOEXEC));\n  struct inheritance inherit = {0};\n  // spawn is a system call.\n  child_pid =\n      spawn(args.argv[0], 0, nullptr, &inherit, args.argv, GetEnviron());\n  // Restores the current working directory.\n  GTEST_DEATH_TEST_CHECK_(fchdir(cwd_fd) != -1);\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(close(cwd_fd));\n\n#  else   // GTEST_OS_QNX\n#   if GTEST_OS_LINUX\n  // When a SIGPROF signal is received while fork() or clone() are executing,\n  // the process may hang. To avoid this, we ignore SIGPROF here and re-enable\n  // it after the call to fork()/clone() is complete.\n  struct sigaction saved_sigprof_action;\n  struct sigaction ignore_sigprof_action;\n  memset(&ignore_sigprof_action, 0, sizeof(ignore_sigprof_action));\n  sigemptyset(&ignore_sigprof_action.sa_mask);\n  ignore_sigprof_action.sa_handler = SIG_IGN;\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(sigaction(\n      SIGPROF, &ignore_sigprof_action, &saved_sigprof_action));\n#   endif  // GTEST_OS_LINUX\n\n#   if GTEST_HAS_CLONE\n  const bool use_fork = GTEST_FLAG(death_test_use_fork);\n\n  if (!use_fork) {\n    static const bool stack_grows_down = StackGrowsDown();\n    const auto stack_size = static_cast<size_t>(getpagesize() * 2);\n    // MMAP_ANONYMOUS is not defined on Mac, so we use MAP_ANON instead.\n    void* const stack = mmap(nullptr, stack_size, PROT_READ | PROT_WRITE,\n                             MAP_ANON | MAP_PRIVATE, -1, 0);\n    GTEST_DEATH_TEST_CHECK_(stack != MAP_FAILED);\n\n    // Maximum stack alignment in bytes:  For a downward-growing stack, this\n    // amount is subtracted from size of the stack space to get an address\n    // that is within the stack space and is aligned on all systems we care\n    // about.  As far as I know there is no ABI with stack alignment greater\n    // than 64.  We assume stack and stack_size already have alignment of\n    // kMaxStackAlignment.\n    const size_t kMaxStackAlignment = 64;\n    void* const stack_top =\n        static_cast<char*>(stack) +\n            (stack_grows_down ? stack_size - kMaxStackAlignment : 0);\n    GTEST_DEATH_TEST_CHECK_(\n        static_cast<size_t>(stack_size) > kMaxStackAlignment &&\n        reinterpret_cast<uintptr_t>(stack_top) % kMaxStackAlignment == 0);\n\n    child_pid = clone(&ExecDeathTestChildMain, stack_top, SIGCHLD, &args);\n\n    GTEST_DEATH_TEST_CHECK_(munmap(stack, stack_size) != -1);\n  }\n#   else\n  const bool use_fork = true;\n#   endif  // GTEST_HAS_CLONE\n\n  if (use_fork && (child_pid = fork()) == 0) {\n      ExecDeathTestChildMain(&args);\n      _exit(0);\n  }\n#  endif  // GTEST_OS_QNX\n#  if GTEST_OS_LINUX\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(\n      sigaction(SIGPROF, &saved_sigprof_action, nullptr));\n#  endif  // GTEST_OS_LINUX\n\n  GTEST_DEATH_TEST_CHECK_(child_pid != -1);\n  return child_pid;\n}\n\n// The AssumeRole process for a fork-and-exec death test.  It re-executes the\n// main program from the beginning, setting the --gtest_filter\n// and --gtest_internal_run_death_test flags to cause only the current\n// death test to be re-run.\nDeathTest::TestRole ExecDeathTest::AssumeRole() {\n  const UnitTestImpl* const impl = GetUnitTestImpl();\n  const InternalRunDeathTestFlag* const flag =\n      impl->internal_run_death_test_flag();\n  const TestInfo* const info = impl->current_test_info();\n  const int death_test_index = info->result()->death_test_count();\n\n  if (flag != nullptr) {\n    set_write_fd(flag->write_fd());\n    return EXECUTE_TEST;\n  }\n\n  int pipe_fd[2];\n  GTEST_DEATH_TEST_CHECK_(pipe(pipe_fd) != -1);\n  // Clear the close-on-exec flag on the write end of the pipe, lest\n  // it be closed when the child process does an exec:\n  GTEST_DEATH_TEST_CHECK_(fcntl(pipe_fd[1], F_SETFD, 0) != -1);\n\n  const std::string filter_flag = std::string(\"--\") + GTEST_FLAG_PREFIX_ +\n                                  kFilterFlag + \"=\" + info->test_suite_name() +\n                                  \".\" + info->name();\n  const std::string internal_flag =\n      std::string(\"--\") + GTEST_FLAG_PREFIX_ + kInternalRunDeathTestFlag + \"=\"\n      + file_ + \"|\" + StreamableToString(line_) + \"|\"\n      + StreamableToString(death_test_index) + \"|\"\n      + StreamableToString(pipe_fd[1]);\n  Arguments args;\n  args.AddArguments(GetArgvsForDeathTestChildProcess());\n  args.AddArgument(filter_flag.c_str());\n  args.AddArgument(internal_flag.c_str());\n\n  DeathTest::set_last_death_test_message(\"\");\n\n  CaptureStderr();\n  // See the comment in NoExecDeathTest::AssumeRole for why the next line\n  // is necessary.\n  FlushInfoLog();\n\n  const pid_t child_pid = ExecDeathTestSpawnChild(args.Argv(), pipe_fd[0]);\n  GTEST_DEATH_TEST_CHECK_SYSCALL_(close(pipe_fd[1]));\n  set_child_pid(child_pid);\n  set_read_fd(pipe_fd[0]);\n  set_spawned(true);\n  return OVERSEE_TEST;\n}\n\n# endif  // !GTEST_OS_WINDOWS\n\n// Creates a concrete DeathTest-derived class that depends on the\n// --gtest_death_test_style flag, and sets the pointer pointed to\n// by the \"test\" argument to its address.  If the test should be\n// skipped, sets that pointer to NULL.  Returns true, unless the\n// flag is set to an invalid value.\nbool DefaultDeathTestFactory::Create(const char* statement,\n                                     Matcher<const std::string&> matcher,\n                                     const char* file, int line,\n                                     DeathTest** test) {\n  UnitTestImpl* const impl = GetUnitTestImpl();\n  const InternalRunDeathTestFlag* const flag =\n      impl->internal_run_death_test_flag();\n  const int death_test_index = impl->current_test_info()\n      ->increment_death_test_count();\n\n  if (flag != nullptr) {\n    if (death_test_index > flag->index()) {\n      DeathTest::set_last_death_test_message(\n          \"Death test count (\" + StreamableToString(death_test_index)\n          + \") somehow exceeded expected maximum (\"\n          + StreamableToString(flag->index()) + \")\");\n      return false;\n    }\n\n    if (!(flag->file() == file && flag->line() == line &&\n          flag->index() == death_test_index)) {\n      *test = nullptr;\n      return true;\n    }\n  }\n\n# if GTEST_OS_WINDOWS\n\n  if (GTEST_FLAG(death_test_style) == \"threadsafe\" ||\n      GTEST_FLAG(death_test_style) == \"fast\") {\n    *test = new WindowsDeathTest(statement, std::move(matcher), file, line);\n  }\n\n# elif GTEST_OS_FUCHSIA\n\n  if (GTEST_FLAG(death_test_style) == \"threadsafe\" ||\n      GTEST_FLAG(death_test_style) == \"fast\") {\n    *test = new FuchsiaDeathTest(statement, std::move(matcher), file, line);\n  }\n\n# else\n\n  if (GTEST_FLAG(death_test_style) == \"threadsafe\") {\n    *test = new ExecDeathTest(statement, std::move(matcher), file, line);\n  } else if (GTEST_FLAG(death_test_style) == \"fast\") {\n    *test = new NoExecDeathTest(statement, std::move(matcher));\n  }\n\n# endif  // GTEST_OS_WINDOWS\n\n  else {  // NOLINT - this is more readable than unbalanced brackets inside #if.\n    DeathTest::set_last_death_test_message(\n        \"Unknown death test style \\\"\" + GTEST_FLAG(death_test_style)\n        + \"\\\" encountered\");\n    return false;\n  }\n\n  return true;\n}\n\n# if GTEST_OS_WINDOWS\n// Recreates the pipe and event handles from the provided parameters,\n// signals the event, and returns a file descriptor wrapped around the pipe\n// handle. This function is called in the child process only.\nstatic int GetStatusFileDescriptor(unsigned int parent_process_id,\n                            size_t write_handle_as_size_t,\n                            size_t event_handle_as_size_t) {\n  AutoHandle parent_process_handle(::OpenProcess(PROCESS_DUP_HANDLE,\n                                                   FALSE,  // Non-inheritable.\n                                                   parent_process_id));\n  if (parent_process_handle.Get() == INVALID_HANDLE_VALUE) {\n    DeathTestAbort(\"Unable to open parent process \" +\n                   StreamableToString(parent_process_id));\n  }\n\n  GTEST_CHECK_(sizeof(HANDLE) <= sizeof(size_t));\n\n  const HANDLE write_handle =\n      reinterpret_cast<HANDLE>(write_handle_as_size_t);\n  HANDLE dup_write_handle;\n\n  // The newly initialized handle is accessible only in the parent\n  // process. To obtain one accessible within the child, we need to use\n  // DuplicateHandle.\n  if (!::DuplicateHandle(parent_process_handle.Get(), write_handle,\n                         ::GetCurrentProcess(), &dup_write_handle,\n                         0x0,    // Requested privileges ignored since\n                                 // DUPLICATE_SAME_ACCESS is used.\n                         FALSE,  // Request non-inheritable handler.\n                         DUPLICATE_SAME_ACCESS)) {\n    DeathTestAbort(\"Unable to duplicate the pipe handle \" +\n                   StreamableToString(write_handle_as_size_t) +\n                   \" from the parent process \" +\n                   StreamableToString(parent_process_id));\n  }\n\n  const HANDLE event_handle = reinterpret_cast<HANDLE>(event_handle_as_size_t);\n  HANDLE dup_event_handle;\n\n  if (!::DuplicateHandle(parent_process_handle.Get(), event_handle,\n                         ::GetCurrentProcess(), &dup_event_handle,\n                         0x0,\n                         FALSE,\n                         DUPLICATE_SAME_ACCESS)) {\n    DeathTestAbort(\"Unable to duplicate the event handle \" +\n                   StreamableToString(event_handle_as_size_t) +\n                   \" from the parent process \" +\n                   StreamableToString(parent_process_id));\n  }\n\n  const int write_fd =\n      ::_open_osfhandle(reinterpret_cast<intptr_t>(dup_write_handle), O_APPEND);\n  if (write_fd == -1) {\n    DeathTestAbort(\"Unable to convert pipe handle \" +\n                   StreamableToString(write_handle_as_size_t) +\n                   \" to a file descriptor\");\n  }\n\n  // Signals the parent that the write end of the pipe has been acquired\n  // so the parent can release its own write end.\n  ::SetEvent(dup_event_handle);\n\n  return write_fd;\n}\n# endif  // GTEST_OS_WINDOWS\n\n// Returns a newly created InternalRunDeathTestFlag object with fields\n// initialized from the GTEST_FLAG(internal_run_death_test) flag if\n// the flag is specified; otherwise returns NULL.\nInternalRunDeathTestFlag* ParseInternalRunDeathTestFlag() {\n  if (GTEST_FLAG(internal_run_death_test) == \"\") return nullptr;\n\n  // GTEST_HAS_DEATH_TEST implies that we have ::std::string, so we\n  // can use it here.\n  int line = -1;\n  int index = -1;\n  ::std::vector< ::std::string> fields;\n  SplitString(GTEST_FLAG(internal_run_death_test).c_str(), '|', &fields);\n  int write_fd = -1;\n\n# if GTEST_OS_WINDOWS\n\n  unsigned int parent_process_id = 0;\n  size_t write_handle_as_size_t = 0;\n  size_t event_handle_as_size_t = 0;\n\n  if (fields.size() != 6\n      || !ParseNaturalNumber(fields[1], &line)\n      || !ParseNaturalNumber(fields[2], &index)\n      || !ParseNaturalNumber(fields[3], &parent_process_id)\n      || !ParseNaturalNumber(fields[4], &write_handle_as_size_t)\n      || !ParseNaturalNumber(fields[5], &event_handle_as_size_t)) {\n    DeathTestAbort(\"Bad --gtest_internal_run_death_test flag: \" +\n                   GTEST_FLAG(internal_run_death_test));\n  }\n  write_fd = GetStatusFileDescriptor(parent_process_id,\n                                     write_handle_as_size_t,\n                                     event_handle_as_size_t);\n\n# elif GTEST_OS_FUCHSIA\n\n  if (fields.size() != 3\n      || !ParseNaturalNumber(fields[1], &line)\n      || !ParseNaturalNumber(fields[2], &index)) {\n    DeathTestAbort(\"Bad --gtest_internal_run_death_test flag: \"\n        + GTEST_FLAG(internal_run_death_test));\n  }\n\n# else\n\n  if (fields.size() != 4\n      || !ParseNaturalNumber(fields[1], &line)\n      || !ParseNaturalNumber(fields[2], &index)\n      || !ParseNaturalNumber(fields[3], &write_fd)) {\n    DeathTestAbort(\"Bad --gtest_internal_run_death_test flag: \"\n        + GTEST_FLAG(internal_run_death_test));\n  }\n\n# endif  // GTEST_OS_WINDOWS\n\n  return new InternalRunDeathTestFlag(fields[0], line, index, write_fd);\n}\n\n}  // namespace internal\n\n#endif  // GTEST_HAS_DEATH_TEST\n\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest-filepath.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n#include \"gtest/internal/gtest-filepath.h\"\n\n#include <stdlib.h>\n#include \"gtest/internal/gtest-port.h\"\n#include \"gtest/gtest-message.h\"\n\n#if GTEST_OS_WINDOWS_MOBILE\n# include <windows.h>\n#elif GTEST_OS_WINDOWS\n# include <direct.h>\n# include <io.h>\n#else\n# include <limits.h>\n# include <climits>  // Some Linux distributions define PATH_MAX here.\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\n#include \"gtest/internal/gtest-string.h\"\n\n#if GTEST_OS_WINDOWS\n# define GTEST_PATH_MAX_ _MAX_PATH\n#elif defined(PATH_MAX)\n# define GTEST_PATH_MAX_ PATH_MAX\n#elif defined(_XOPEN_PATH_MAX)\n# define GTEST_PATH_MAX_ _XOPEN_PATH_MAX\n#else\n# define GTEST_PATH_MAX_ _POSIX_PATH_MAX\n#endif  // GTEST_OS_WINDOWS\n\nnamespace testing {\nnamespace internal {\n\n#if GTEST_OS_WINDOWS\n// On Windows, '\\\\' is the standard path separator, but many tools and the\n// Windows API also accept '/' as an alternate path separator. Unless otherwise\n// noted, a file path can contain either kind of path separators, or a mixture\n// of them.\nconst char kPathSeparator = '\\\\';\nconst char kAlternatePathSeparator = '/';\nconst char kAlternatePathSeparatorString[] = \"/\";\n# if GTEST_OS_WINDOWS_MOBILE\n// Windows CE doesn't have a current directory. You should not use\n// the current directory in tests on Windows CE, but this at least\n// provides a reasonable fallback.\nconst char kCurrentDirectoryString[] = \"\\\\\";\n// Windows CE doesn't define INVALID_FILE_ATTRIBUTES\nconst DWORD kInvalidFileAttributes = 0xffffffff;\n# else\nconst char kCurrentDirectoryString[] = \".\\\\\";\n# endif  // GTEST_OS_WINDOWS_MOBILE\n#else\nconst char kPathSeparator = '/';\nconst char kCurrentDirectoryString[] = \"./\";\n#endif  // GTEST_OS_WINDOWS\n\n// Returns whether the given character is a valid path separator.\nstatic bool IsPathSeparator(char c) {\n#if GTEST_HAS_ALT_PATH_SEP_\n  return (c == kPathSeparator) || (c == kAlternatePathSeparator);\n#else\n  return c == kPathSeparator;\n#endif\n}\n\n// Returns the current working directory, or \"\" if unsuccessful.\nFilePath FilePath::GetCurrentDir() {\n#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_PHONE || \\\n    GTEST_OS_WINDOWS_RT || GTEST_OS_ESP8266 || GTEST_OS_ESP32\n  // These platforms do not have a current directory, so we just return\n  // something reasonable.\n  return FilePath(kCurrentDirectoryString);\n#elif GTEST_OS_WINDOWS\n  char cwd[GTEST_PATH_MAX_ + 1] = { '\\0' };\n  return FilePath(_getcwd(cwd, sizeof(cwd)) == nullptr ? \"\" : cwd);\n#else\n  char cwd[GTEST_PATH_MAX_ + 1] = { '\\0' };\n  char* result = getcwd(cwd, sizeof(cwd));\n# if GTEST_OS_NACL\n  // getcwd will likely fail in NaCl due to the sandbox, so return something\n  // reasonable. The user may have provided a shim implementation for getcwd,\n  // however, so fallback only when failure is detected.\n  return FilePath(result == nullptr ? kCurrentDirectoryString : cwd);\n# endif  // GTEST_OS_NACL\n  return FilePath(result == nullptr ? \"\" : cwd);\n#endif  // GTEST_OS_WINDOWS_MOBILE\n}\n\n// Returns a copy of the FilePath with the case-insensitive extension removed.\n// Example: FilePath(\"dir/file.exe\").RemoveExtension(\"EXE\") returns\n// FilePath(\"dir/file\"). If a case-insensitive extension is not\n// found, returns a copy of the original FilePath.\nFilePath FilePath::RemoveExtension(const char* extension) const {\n  const std::string dot_extension = std::string(\".\") + extension;\n  if (String::EndsWithCaseInsensitive(pathname_, dot_extension)) {\n    return FilePath(pathname_.substr(\n        0, pathname_.length() - dot_extension.length()));\n  }\n  return *this;\n}\n\n// Returns a pointer to the last occurrence of a valid path separator in\n// the FilePath. On Windows, for example, both '/' and '\\' are valid path\n// separators. Returns NULL if no path separator was found.\nconst char* FilePath::FindLastPathSeparator() const {\n  const char* const last_sep = strrchr(c_str(), kPathSeparator);\n#if GTEST_HAS_ALT_PATH_SEP_\n  const char* const last_alt_sep = strrchr(c_str(), kAlternatePathSeparator);\n  // Comparing two pointers of which only one is NULL is undefined.\n  if (last_alt_sep != nullptr &&\n      (last_sep == nullptr || last_alt_sep > last_sep)) {\n    return last_alt_sep;\n  }\n#endif\n  return last_sep;\n}\n\n// Returns a copy of the FilePath with the directory part removed.\n// Example: FilePath(\"path/to/file\").RemoveDirectoryName() returns\n// FilePath(\"file\"). If there is no directory part (\"just_a_file\"), it returns\n// the FilePath unmodified. If there is no file part (\"just_a_dir/\") it\n// returns an empty FilePath (\"\").\n// On Windows platform, '\\' is the path separator, otherwise it is '/'.\nFilePath FilePath::RemoveDirectoryName() const {\n  const char* const last_sep = FindLastPathSeparator();\n  return last_sep ? FilePath(last_sep + 1) : *this;\n}\n\n// RemoveFileName returns the directory path with the filename removed.\n// Example: FilePath(\"path/to/file\").RemoveFileName() returns \"path/to/\".\n// If the FilePath is \"a_file\" or \"/a_file\", RemoveFileName returns\n// FilePath(\"./\") or, on Windows, FilePath(\".\\\\\"). If the filepath does\n// not have a file, like \"just/a/dir/\", it returns the FilePath unmodified.\n// On Windows platform, '\\' is the path separator, otherwise it is '/'.\nFilePath FilePath::RemoveFileName() const {\n  const char* const last_sep = FindLastPathSeparator();\n  std::string dir;\n  if (last_sep) {\n    dir = std::string(c_str(), static_cast<size_t>(last_sep + 1 - c_str()));\n  } else {\n    dir = kCurrentDirectoryString;\n  }\n  return FilePath(dir);\n}\n\n// Helper functions for naming files in a directory for xml output.\n\n// Given directory = \"dir\", base_name = \"test\", number = 0,\n// extension = \"xml\", returns \"dir/test.xml\". If number is greater\n// than zero (e.g., 12), returns \"dir/test_12.xml\".\n// On Windows platform, uses \\ as the separator rather than /.\nFilePath FilePath::MakeFileName(const FilePath& directory,\n                                const FilePath& base_name,\n                                int number,\n                                const char* extension) {\n  std::string file;\n  if (number == 0) {\n    file = base_name.string() + \".\" + extension;\n  } else {\n    file = base_name.string() + \"_\" + StreamableToString(number)\n        + \".\" + extension;\n  }\n  return ConcatPaths(directory, FilePath(file));\n}\n\n// Given directory = \"dir\", relative_path = \"test.xml\", returns \"dir/test.xml\".\n// On Windows, uses \\ as the separator rather than /.\nFilePath FilePath::ConcatPaths(const FilePath& directory,\n                               const FilePath& relative_path) {\n  if (directory.IsEmpty())\n    return relative_path;\n  const FilePath dir(directory.RemoveTrailingPathSeparator());\n  return FilePath(dir.string() + kPathSeparator + relative_path.string());\n}\n\n// Returns true if pathname describes something findable in the file-system,\n// either a file, directory, or whatever.\nbool FilePath::FileOrDirectoryExists() const {\n#if GTEST_OS_WINDOWS_MOBILE\n  LPCWSTR unicode = String::AnsiToUtf16(pathname_.c_str());\n  const DWORD attributes = GetFileAttributes(unicode);\n  delete [] unicode;\n  return attributes != kInvalidFileAttributes;\n#else\n  posix::StatStruct file_stat;\n  return posix::Stat(pathname_.c_str(), &file_stat) == 0;\n#endif  // GTEST_OS_WINDOWS_MOBILE\n}\n\n// Returns true if pathname describes a directory in the file-system\n// that exists.\nbool FilePath::DirectoryExists() const {\n  bool result = false;\n#if GTEST_OS_WINDOWS\n  // Don't strip off trailing separator if path is a root directory on\n  // Windows (like \"C:\\\\\").\n  const FilePath& path(IsRootDirectory() ? *this :\n                                           RemoveTrailingPathSeparator());\n#else\n  const FilePath& path(*this);\n#endif\n\n#if GTEST_OS_WINDOWS_MOBILE\n  LPCWSTR unicode = String::AnsiToUtf16(path.c_str());\n  const DWORD attributes = GetFileAttributes(unicode);\n  delete [] unicode;\n  if ((attributes != kInvalidFileAttributes) &&\n      (attributes & FILE_ATTRIBUTE_DIRECTORY)) {\n    result = true;\n  }\n#else\n  posix::StatStruct file_stat;\n  result = posix::Stat(path.c_str(), &file_stat) == 0 &&\n      posix::IsDir(file_stat);\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\n  return result;\n}\n\n// Returns true if pathname describes a root directory. (Windows has one\n// root directory per disk drive.)\nbool FilePath::IsRootDirectory() const {\n#if GTEST_OS_WINDOWS\n  return pathname_.length() == 3 && IsAbsolutePath();\n#else\n  return pathname_.length() == 1 && IsPathSeparator(pathname_.c_str()[0]);\n#endif\n}\n\n// Returns true if pathname describes an absolute path.\nbool FilePath::IsAbsolutePath() const {\n  const char* const name = pathname_.c_str();\n#if GTEST_OS_WINDOWS\n  return pathname_.length() >= 3 &&\n     ((name[0] >= 'a' && name[0] <= 'z') ||\n      (name[0] >= 'A' && name[0] <= 'Z')) &&\n     name[1] == ':' &&\n     IsPathSeparator(name[2]);\n#else\n  return IsPathSeparator(name[0]);\n#endif\n}\n\n// Returns a pathname for a file that does not currently exist. The pathname\n// will be directory/base_name.extension or\n// directory/base_name_<number>.extension if directory/base_name.extension\n// already exists. The number will be incremented until a pathname is found\n// that does not already exist.\n// Examples: 'dir/foo_test.xml' or 'dir/foo_test_1.xml'.\n// There could be a race condition if two or more processes are calling this\n// function at the same time -- they could both pick the same filename.\nFilePath FilePath::GenerateUniqueFileName(const FilePath& directory,\n                                          const FilePath& base_name,\n                                          const char* extension) {\n  FilePath full_pathname;\n  int number = 0;\n  do {\n    full_pathname.Set(MakeFileName(directory, base_name, number++, extension));\n  } while (full_pathname.FileOrDirectoryExists());\n  return full_pathname;\n}\n\n// Returns true if FilePath ends with a path separator, which indicates that\n// it is intended to represent a directory. Returns false otherwise.\n// This does NOT check that a directory (or file) actually exists.\nbool FilePath::IsDirectory() const {\n  return !pathname_.empty() &&\n         IsPathSeparator(pathname_.c_str()[pathname_.length() - 1]);\n}\n\n// Create directories so that path exists. Returns true if successful or if\n// the directories already exist; returns false if unable to create directories\n// for any reason.\nbool FilePath::CreateDirectoriesRecursively() const {\n  if (!this->IsDirectory()) {\n    return false;\n  }\n\n  if (pathname_.length() == 0 || this->DirectoryExists()) {\n    return true;\n  }\n\n  const FilePath parent(this->RemoveTrailingPathSeparator().RemoveFileName());\n  return parent.CreateDirectoriesRecursively() && this->CreateFolder();\n}\n\n// Create the directory so that path exists. Returns true if successful or\n// if the directory already exists; returns false if unable to create the\n// directory for any reason, including if the parent directory does not\n// exist. Not named \"CreateDirectory\" because that's a macro on Windows.\nbool FilePath::CreateFolder() const {\n#if GTEST_OS_WINDOWS_MOBILE\n  FilePath removed_sep(this->RemoveTrailingPathSeparator());\n  LPCWSTR unicode = String::AnsiToUtf16(removed_sep.c_str());\n  int result = CreateDirectory(unicode, nullptr) ? 0 : -1;\n  delete [] unicode;\n#elif GTEST_OS_WINDOWS\n  int result = _mkdir(pathname_.c_str());\n#elif GTEST_OS_ESP8266\n  // do nothing\n  int result = 0;\n#else\n  int result = mkdir(pathname_.c_str(), 0777);\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\n  if (result == -1) {\n    return this->DirectoryExists();  // An error is OK if the directory exists.\n  }\n  return true;  // No error.\n}\n\n// If input name has a trailing separator character, remove it and return the\n// name, otherwise return the name string unmodified.\n// On Windows platform, uses \\ as the separator, other platforms use /.\nFilePath FilePath::RemoveTrailingPathSeparator() const {\n  return IsDirectory()\n      ? FilePath(pathname_.substr(0, pathname_.length() - 1))\n      : *this;\n}\n\n// Removes any redundant separators that might be in the pathname.\n// For example, \"bar///foo\" becomes \"bar/foo\". Does not eliminate other\n// redundancies that might be in a pathname involving \".\" or \"..\".\nvoid FilePath::Normalize() {\n  if (pathname_.c_str() == nullptr) {\n    pathname_ = \"\";\n    return;\n  }\n  const char* src = pathname_.c_str();\n  char* const dest = new char[pathname_.length() + 1];\n  char* dest_ptr = dest;\n  memset(dest_ptr, 0, pathname_.length() + 1);\n\n  while (*src != '\\0') {\n    *dest_ptr = *src;\n    if (!IsPathSeparator(*src)) {\n      src++;\n    } else {\n#if GTEST_HAS_ALT_PATH_SEP_\n      if (*dest_ptr == kAlternatePathSeparator) {\n        *dest_ptr = kPathSeparator;\n      }\n#endif\n      while (IsPathSeparator(*src))\n        src++;\n    }\n    dest_ptr++;\n  }\n  *dest_ptr = '\\0';\n  pathname_ = dest;\n  delete[] dest;\n}\n\n}  // namespace internal\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest-internal-inl.h",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// Utility functions and classes used by the Google C++ testing framework.//\n// This file contains purely Google Test's internal implementation.  Please\n// DO NOT #INCLUDE IT IN A USER PROGRAM.\n\n#ifndef GTEST_SRC_GTEST_INTERNAL_INL_H_\n#define GTEST_SRC_GTEST_INTERNAL_INL_H_\n\n#ifndef _WIN32_WCE\n# include <errno.h>\n#endif  // !_WIN32_WCE\n#include <stddef.h>\n#include <stdlib.h>  // For strtoll/_strtoul64/malloc/free.\n#include <string.h>  // For memmove.\n\n#include <algorithm>\n#include <memory>\n#include <string>\n#include <vector>\n\n#include \"gtest/internal/gtest-port.h\"\n\n#if GTEST_CAN_STREAM_RESULTS_\n# include <arpa/inet.h>  // NOLINT\n# include <netdb.h>  // NOLINT\n#endif\n\n#if GTEST_OS_WINDOWS\n# include <windows.h>  // NOLINT\n#endif  // GTEST_OS_WINDOWS\n\n#include \"gtest/gtest.h\"\n#include \"gtest/gtest-spi.h\"\n\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4251 \\\n/* class A needs to have dll-interface to be used by clients of class B */)\n\nnamespace testing {\n\n// Declares the flags.\n//\n// We don't want the users to modify this flag in the code, but want\n// Google Test's own unit tests to be able to access it. Therefore we\n// declare it here as opposed to in gtest.h.\nGTEST_DECLARE_bool_(death_test_use_fork);\n\nnamespace internal {\n\n// The value of GetTestTypeId() as seen from within the Google Test\n// library.  This is solely for testing GetTestTypeId().\nGTEST_API_ extern const TypeId kTestTypeIdInGoogleTest;\n\n// Names of the flags (needed for parsing Google Test flags).\nconst char kAlsoRunDisabledTestsFlag[] = \"also_run_disabled_tests\";\nconst char kBreakOnFailureFlag[] = \"break_on_failure\";\nconst char kCatchExceptionsFlag[] = \"catch_exceptions\";\nconst char kColorFlag[] = \"color\";\nconst char kFilterFlag[] = \"filter\";\nconst char kListTestsFlag[] = \"list_tests\";\nconst char kOutputFlag[] = \"output\";\nconst char kPrintTimeFlag[] = \"print_time\";\nconst char kPrintUTF8Flag[] = \"print_utf8\";\nconst char kRandomSeedFlag[] = \"random_seed\";\nconst char kRepeatFlag[] = \"repeat\";\nconst char kShuffleFlag[] = \"shuffle\";\nconst char kStackTraceDepthFlag[] = \"stack_trace_depth\";\nconst char kStreamResultToFlag[] = \"stream_result_to\";\nconst char kThrowOnFailureFlag[] = \"throw_on_failure\";\nconst char kFlagfileFlag[] = \"flagfile\";\n\n// A valid random seed must be in [1, kMaxRandomSeed].\nconst int kMaxRandomSeed = 99999;\n\n// g_help_flag is true if and only if the --help flag or an equivalent form\n// is specified on the command line.\nGTEST_API_ extern bool g_help_flag;\n\n// Returns the current time in milliseconds.\nGTEST_API_ TimeInMillis GetTimeInMillis();\n\n// Returns true if and only if Google Test should use colors in the output.\nGTEST_API_ bool ShouldUseColor(bool stdout_is_tty);\n\n// Formats the given time in milliseconds as seconds.\nGTEST_API_ std::string FormatTimeInMillisAsSeconds(TimeInMillis ms);\n\n// Converts the given time in milliseconds to a date string in the ISO 8601\n// format, without the timezone information.  N.B.: due to the use the\n// non-reentrant localtime() function, this function is not thread safe.  Do\n// not use it in any code that can be called from multiple threads.\nGTEST_API_ std::string FormatEpochTimeInMillisAsIso8601(TimeInMillis ms);\n\n// Parses a string for an Int32 flag, in the form of \"--flag=value\".\n//\n// On success, stores the value of the flag in *value, and returns\n// true.  On failure, returns false without changing *value.\nGTEST_API_ bool ParseInt32Flag(\n    const char* str, const char* flag, Int32* value);\n\n// Returns a random seed in range [1, kMaxRandomSeed] based on the\n// given --gtest_random_seed flag value.\ninline int GetRandomSeedFromFlag(Int32 random_seed_flag) {\n  const unsigned int raw_seed = (random_seed_flag == 0) ?\n      static_cast<unsigned int>(GetTimeInMillis()) :\n      static_cast<unsigned int>(random_seed_flag);\n\n  // Normalizes the actual seed to range [1, kMaxRandomSeed] such that\n  // it's easy to type.\n  const int normalized_seed =\n      static_cast<int>((raw_seed - 1U) %\n                       static_cast<unsigned int>(kMaxRandomSeed)) + 1;\n  return normalized_seed;\n}\n\n// Returns the first valid random seed after 'seed'.  The behavior is\n// undefined if 'seed' is invalid.  The seed after kMaxRandomSeed is\n// considered to be 1.\ninline int GetNextRandomSeed(int seed) {\n  GTEST_CHECK_(1 <= seed && seed <= kMaxRandomSeed)\n      << \"Invalid random seed \" << seed << \" - must be in [1, \"\n      << kMaxRandomSeed << \"].\";\n  const int next_seed = seed + 1;\n  return (next_seed > kMaxRandomSeed) ? 1 : next_seed;\n}\n\n// This class saves the values of all Google Test flags in its c'tor, and\n// restores them in its d'tor.\nclass GTestFlagSaver {\n public:\n  // The c'tor.\n  GTestFlagSaver() {\n    also_run_disabled_tests_ = GTEST_FLAG(also_run_disabled_tests);\n    break_on_failure_ = GTEST_FLAG(break_on_failure);\n    catch_exceptions_ = GTEST_FLAG(catch_exceptions);\n    color_ = GTEST_FLAG(color);\n    death_test_style_ = GTEST_FLAG(death_test_style);\n    death_test_use_fork_ = GTEST_FLAG(death_test_use_fork);\n    filter_ = GTEST_FLAG(filter);\n    internal_run_death_test_ = GTEST_FLAG(internal_run_death_test);\n    list_tests_ = GTEST_FLAG(list_tests);\n    output_ = GTEST_FLAG(output);\n    print_time_ = GTEST_FLAG(print_time);\n    print_utf8_ = GTEST_FLAG(print_utf8);\n    random_seed_ = GTEST_FLAG(random_seed);\n    repeat_ = GTEST_FLAG(repeat);\n    shuffle_ = GTEST_FLAG(shuffle);\n    stack_trace_depth_ = GTEST_FLAG(stack_trace_depth);\n    stream_result_to_ = GTEST_FLAG(stream_result_to);\n    throw_on_failure_ = GTEST_FLAG(throw_on_failure);\n  }\n\n  // The d'tor is not virtual.  DO NOT INHERIT FROM THIS CLASS.\n  ~GTestFlagSaver() {\n    GTEST_FLAG(also_run_disabled_tests) = also_run_disabled_tests_;\n    GTEST_FLAG(break_on_failure) = break_on_failure_;\n    GTEST_FLAG(catch_exceptions) = catch_exceptions_;\n    GTEST_FLAG(color) = color_;\n    GTEST_FLAG(death_test_style) = death_test_style_;\n    GTEST_FLAG(death_test_use_fork) = death_test_use_fork_;\n    GTEST_FLAG(filter) = filter_;\n    GTEST_FLAG(internal_run_death_test) = internal_run_death_test_;\n    GTEST_FLAG(list_tests) = list_tests_;\n    GTEST_FLAG(output) = output_;\n    GTEST_FLAG(print_time) = print_time_;\n    GTEST_FLAG(print_utf8) = print_utf8_;\n    GTEST_FLAG(random_seed) = random_seed_;\n    GTEST_FLAG(repeat) = repeat_;\n    GTEST_FLAG(shuffle) = shuffle_;\n    GTEST_FLAG(stack_trace_depth) = stack_trace_depth_;\n    GTEST_FLAG(stream_result_to) = stream_result_to_;\n    GTEST_FLAG(throw_on_failure) = throw_on_failure_;\n  }\n\n private:\n  // Fields for saving the original values of flags.\n  bool also_run_disabled_tests_;\n  bool break_on_failure_;\n  bool catch_exceptions_;\n  std::string color_;\n  std::string death_test_style_;\n  bool death_test_use_fork_;\n  std::string filter_;\n  std::string internal_run_death_test_;\n  bool list_tests_;\n  std::string output_;\n  bool print_time_;\n  bool print_utf8_;\n  internal::Int32 random_seed_;\n  internal::Int32 repeat_;\n  bool shuffle_;\n  internal::Int32 stack_trace_depth_;\n  std::string stream_result_to_;\n  bool throw_on_failure_;\n} GTEST_ATTRIBUTE_UNUSED_;\n\n// Converts a Unicode code point to a narrow string in UTF-8 encoding.\n// code_point parameter is of type UInt32 because wchar_t may not be\n// wide enough to contain a code point.\n// If the code_point is not a valid Unicode code point\n// (i.e. outside of Unicode range U+0 to U+10FFFF) it will be converted\n// to \"(Invalid Unicode 0xXXXXXXXX)\".\nGTEST_API_ std::string CodePointToUtf8(UInt32 code_point);\n\n// Converts a wide string to a narrow string in UTF-8 encoding.\n// The wide string is assumed to have the following encoding:\n//   UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin)\n//   UTF-32 if sizeof(wchar_t) == 4 (on Linux)\n// Parameter str points to a null-terminated wide string.\n// Parameter num_chars may additionally limit the number\n// of wchar_t characters processed. -1 is used when the entire string\n// should be processed.\n// If the string contains code points that are not valid Unicode code points\n// (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output\n// as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding\n// and contains invalid UTF-16 surrogate pairs, values in those pairs\n// will be encoded as individual Unicode characters from Basic Normal Plane.\nGTEST_API_ std::string WideStringToUtf8(const wchar_t* str, int num_chars);\n\n// Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file\n// if the variable is present. If a file already exists at this location, this\n// function will write over it. If the variable is present, but the file cannot\n// be created, prints an error and exits.\nvoid WriteToShardStatusFileIfNeeded();\n\n// Checks whether sharding is enabled by examining the relevant\n// environment variable values. If the variables are present,\n// but inconsistent (e.g., shard_index >= total_shards), prints\n// an error and exits. If in_subprocess_for_death_test, sharding is\n// disabled because it must only be applied to the original test\n// process. Otherwise, we could filter out death tests we intended to execute.\nGTEST_API_ bool ShouldShard(const char* total_shards_str,\n                            const char* shard_index_str,\n                            bool in_subprocess_for_death_test);\n\n// Parses the environment variable var as an Int32. If it is unset,\n// returns default_val. If it is not an Int32, prints an error and\n// and aborts.\nGTEST_API_ Int32 Int32FromEnvOrDie(const char* env_var, Int32 default_val);\n\n// Given the total number of shards, the shard index, and the test id,\n// returns true if and only if the test should be run on this shard. The test id\n// is some arbitrary but unique non-negative integer assigned to each test\n// method. Assumes that 0 <= shard_index < total_shards.\nGTEST_API_ bool ShouldRunTestOnShard(\n    int total_shards, int shard_index, int test_id);\n\n// STL container utilities.\n\n// Returns the number of elements in the given container that satisfy\n// the given predicate.\ntemplate <class Container, typename Predicate>\ninline int CountIf(const Container& c, Predicate predicate) {\n  // Implemented as an explicit loop since std::count_if() in libCstd on\n  // Solaris has a non-standard signature.\n  int count = 0;\n  for (typename Container::const_iterator it = c.begin(); it != c.end(); ++it) {\n    if (predicate(*it))\n      ++count;\n  }\n  return count;\n}\n\n// Applies a function/functor to each element in the container.\ntemplate <class Container, typename Functor>\nvoid ForEach(const Container& c, Functor functor) {\n  std::for_each(c.begin(), c.end(), functor);\n}\n\n// Returns the i-th element of the vector, or default_value if i is not\n// in range [0, v.size()).\ntemplate <typename E>\ninline E GetElementOr(const std::vector<E>& v, int i, E default_value) {\n  return (i < 0 || i >= static_cast<int>(v.size())) ? default_value\n                                                    : v[static_cast<size_t>(i)];\n}\n\n// Performs an in-place shuffle of a range of the vector's elements.\n// 'begin' and 'end' are element indices as an STL-style range;\n// i.e. [begin, end) are shuffled, where 'end' == size() means to\n// shuffle to the end of the vector.\ntemplate <typename E>\nvoid ShuffleRange(internal::Random* random, int begin, int end,\n                  std::vector<E>* v) {\n  const int size = static_cast<int>(v->size());\n  GTEST_CHECK_(0 <= begin && begin <= size)\n      << \"Invalid shuffle range start \" << begin << \": must be in range [0, \"\n      << size << \"].\";\n  GTEST_CHECK_(begin <= end && end <= size)\n      << \"Invalid shuffle range finish \" << end << \": must be in range [\"\n      << begin << \", \" << size << \"].\";\n\n  // Fisher-Yates shuffle, from\n  // http://en.wikipedia.org/wiki/Fisher-Yates_shuffle\n  for (int range_width = end - begin; range_width >= 2; range_width--) {\n    const int last_in_range = begin + range_width - 1;\n    const int selected =\n        begin +\n        static_cast<int>(random->Generate(static_cast<UInt32>(range_width)));\n    std::swap((*v)[static_cast<size_t>(selected)],\n              (*v)[static_cast<size_t>(last_in_range)]);\n  }\n}\n\n// Performs an in-place shuffle of the vector's elements.\ntemplate <typename E>\ninline void Shuffle(internal::Random* random, std::vector<E>* v) {\n  ShuffleRange(random, 0, static_cast<int>(v->size()), v);\n}\n\n// A function for deleting an object.  Handy for being used as a\n// functor.\ntemplate <typename T>\nstatic void Delete(T* x) {\n  delete x;\n}\n\n// A predicate that checks the key of a TestProperty against a known key.\n//\n// TestPropertyKeyIs is copyable.\nclass TestPropertyKeyIs {\n public:\n  // Constructor.\n  //\n  // TestPropertyKeyIs has NO default constructor.\n  explicit TestPropertyKeyIs(const std::string& key) : key_(key) {}\n\n  // Returns true if and only if the test name of test property matches on key_.\n  bool operator()(const TestProperty& test_property) const {\n    return test_property.key() == key_;\n  }\n\n private:\n  std::string key_;\n};\n\n// Class UnitTestOptions.\n//\n// This class contains functions for processing options the user\n// specifies when running the tests.  It has only static members.\n//\n// In most cases, the user can specify an option using either an\n// environment variable or a command line flag.  E.g. you can set the\n// test filter using either GTEST_FILTER or --gtest_filter.  If both\n// the variable and the flag are present, the latter overrides the\n// former.\nclass GTEST_API_ UnitTestOptions {\n public:\n  // Functions for processing the gtest_output flag.\n\n  // Returns the output format, or \"\" for normal printed output.\n  static std::string GetOutputFormat();\n\n  // Returns the absolute path of the requested output file, or the\n  // default (test_detail.xml in the original working directory) if\n  // none was explicitly specified.\n  static std::string GetAbsolutePathToOutputFile();\n\n  // Functions for processing the gtest_filter flag.\n\n  // Returns true if and only if the wildcard pattern matches the string.\n  // The first ':' or '\\0' character in pattern marks the end of it.\n  //\n  // This recursive algorithm isn't very efficient, but is clear and\n  // works well enough for matching test names, which are short.\n  static bool PatternMatchesString(const char *pattern, const char *str);\n\n  // Returns true if and only if the user-specified filter matches the test\n  // suite name and the test name.\n  static bool FilterMatchesTest(const std::string& test_suite_name,\n                                const std::string& test_name);\n\n#if GTEST_OS_WINDOWS\n  // Function for supporting the gtest_catch_exception flag.\n\n  // Returns EXCEPTION_EXECUTE_HANDLER if Google Test should handle the\n  // given SEH exception, or EXCEPTION_CONTINUE_SEARCH otherwise.\n  // This function is useful as an __except condition.\n  static int GTestShouldProcessSEH(DWORD exception_code);\n#endif  // GTEST_OS_WINDOWS\n\n  // Returns true if \"name\" matches the ':' separated list of glob-style\n  // filters in \"filter\".\n  static bool MatchesFilter(const std::string& name, const char* filter);\n};\n\n// Returns the current application's name, removing directory path if that\n// is present.  Used by UnitTestOptions::GetOutputFile.\nGTEST_API_ FilePath GetCurrentExecutableName();\n\n// The role interface for getting the OS stack trace as a string.\nclass OsStackTraceGetterInterface {\n public:\n  OsStackTraceGetterInterface() {}\n  virtual ~OsStackTraceGetterInterface() {}\n\n  // Returns the current OS stack trace as an std::string.  Parameters:\n  //\n  //   max_depth  - the maximum number of stack frames to be included\n  //                in the trace.\n  //   skip_count - the number of top frames to be skipped; doesn't count\n  //                against max_depth.\n  virtual std::string CurrentStackTrace(int max_depth, int skip_count) = 0;\n\n  // UponLeavingGTest() should be called immediately before Google Test calls\n  // user code. It saves some information about the current stack that\n  // CurrentStackTrace() will use to find and hide Google Test stack frames.\n  virtual void UponLeavingGTest() = 0;\n\n  // This string is inserted in place of stack frames that are part of\n  // Google Test's implementation.\n  static const char* const kElidedFramesMarker;\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetterInterface);\n};\n\n// A working implementation of the OsStackTraceGetterInterface interface.\nclass OsStackTraceGetter : public OsStackTraceGetterInterface {\n public:\n  OsStackTraceGetter() {}\n\n  std::string CurrentStackTrace(int max_depth, int skip_count) override;\n  void UponLeavingGTest() override;\n\n private:\n#if GTEST_HAS_ABSL\n  Mutex mutex_;  // Protects all internal state.\n\n  // We save the stack frame below the frame that calls user code.\n  // We do this because the address of the frame immediately below\n  // the user code changes between the call to UponLeavingGTest()\n  // and any calls to the stack trace code from within the user code.\n  void* caller_frame_ = nullptr;\n#endif  // GTEST_HAS_ABSL\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(OsStackTraceGetter);\n};\n\n// Information about a Google Test trace point.\nstruct TraceInfo {\n  const char* file;\n  int line;\n  std::string message;\n};\n\n// This is the default global test part result reporter used in UnitTestImpl.\n// This class should only be used by UnitTestImpl.\nclass DefaultGlobalTestPartResultReporter\n  : public TestPartResultReporterInterface {\n public:\n  explicit DefaultGlobalTestPartResultReporter(UnitTestImpl* unit_test);\n  // Implements the TestPartResultReporterInterface. Reports the test part\n  // result in the current test.\n  void ReportTestPartResult(const TestPartResult& result) override;\n\n private:\n  UnitTestImpl* const unit_test_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultGlobalTestPartResultReporter);\n};\n\n// This is the default per thread test part result reporter used in\n// UnitTestImpl. This class should only be used by UnitTestImpl.\nclass DefaultPerThreadTestPartResultReporter\n    : public TestPartResultReporterInterface {\n public:\n  explicit DefaultPerThreadTestPartResultReporter(UnitTestImpl* unit_test);\n  // Implements the TestPartResultReporterInterface. The implementation just\n  // delegates to the current global test part result reporter of *unit_test_.\n  void ReportTestPartResult(const TestPartResult& result) override;\n\n private:\n  UnitTestImpl* const unit_test_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(DefaultPerThreadTestPartResultReporter);\n};\n\n// The private implementation of the UnitTest class.  We don't protect\n// the methods under a mutex, as this class is not accessible by a\n// user and the UnitTest class that delegates work to this class does\n// proper locking.\nclass GTEST_API_ UnitTestImpl {\n public:\n  explicit UnitTestImpl(UnitTest* parent);\n  virtual ~UnitTestImpl();\n\n  // There are two different ways to register your own TestPartResultReporter.\n  // You can register your own repoter to listen either only for test results\n  // from the current thread or for results from all threads.\n  // By default, each per-thread test result repoter just passes a new\n  // TestPartResult to the global test result reporter, which registers the\n  // test part result for the currently running test.\n\n  // Returns the global test part result reporter.\n  TestPartResultReporterInterface* GetGlobalTestPartResultReporter();\n\n  // Sets the global test part result reporter.\n  void SetGlobalTestPartResultReporter(\n      TestPartResultReporterInterface* reporter);\n\n  // Returns the test part result reporter for the current thread.\n  TestPartResultReporterInterface* GetTestPartResultReporterForCurrentThread();\n\n  // Sets the test part result reporter for the current thread.\n  void SetTestPartResultReporterForCurrentThread(\n      TestPartResultReporterInterface* reporter);\n\n  // Gets the number of successful test suites.\n  int successful_test_suite_count() const;\n\n  // Gets the number of failed test suites.\n  int failed_test_suite_count() const;\n\n  // Gets the number of all test suites.\n  int total_test_suite_count() const;\n\n  // Gets the number of all test suites that contain at least one test\n  // that should run.\n  int test_suite_to_run_count() const;\n\n  // Gets the number of successful tests.\n  int successful_test_count() const;\n\n  // Gets the number of skipped tests.\n  int skipped_test_count() const;\n\n  // Gets the number of failed tests.\n  int failed_test_count() const;\n\n  // Gets the number of disabled tests that will be reported in the XML report.\n  int reportable_disabled_test_count() const;\n\n  // Gets the number of disabled tests.\n  int disabled_test_count() const;\n\n  // Gets the number of tests to be printed in the XML report.\n  int reportable_test_count() const;\n\n  // Gets the number of all tests.\n  int total_test_count() const;\n\n  // Gets the number of tests that should run.\n  int test_to_run_count() const;\n\n  // Gets the time of the test program start, in ms from the start of the\n  // UNIX epoch.\n  TimeInMillis start_timestamp() const { return start_timestamp_; }\n\n  // Gets the elapsed time, in milliseconds.\n  TimeInMillis elapsed_time() const { return elapsed_time_; }\n\n  // Returns true if and only if the unit test passed (i.e. all test suites\n  // passed).\n  bool Passed() const { return !Failed(); }\n\n  // Returns true if and only if the unit test failed (i.e. some test suite\n  // failed or something outside of all tests failed).\n  bool Failed() const {\n    return failed_test_suite_count() > 0 || ad_hoc_test_result()->Failed();\n  }\n\n  // Gets the i-th test suite among all the test suites. i can range from 0 to\n  // total_test_suite_count() - 1. If i is not in that range, returns NULL.\n  const TestSuite* GetTestSuite(int i) const {\n    const int index = GetElementOr(test_suite_indices_, i, -1);\n    return index < 0 ? nullptr : test_suites_[static_cast<size_t>(i)];\n  }\n\n  //  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  const TestCase* GetTestCase(int i) const { return GetTestSuite(i); }\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Gets the i-th test suite among all the test suites. i can range from 0 to\n  // total_test_suite_count() - 1. If i is not in that range, returns NULL.\n  TestSuite* GetMutableSuiteCase(int i) {\n    const int index = GetElementOr(test_suite_indices_, i, -1);\n    return index < 0 ? nullptr : test_suites_[static_cast<size_t>(index)];\n  }\n\n  // Provides access to the event listener list.\n  TestEventListeners* listeners() { return &listeners_; }\n\n  // Returns the TestResult for the test that's currently running, or\n  // the TestResult for the ad hoc test if no test is running.\n  TestResult* current_test_result();\n\n  // Returns the TestResult for the ad hoc test.\n  const TestResult* ad_hoc_test_result() const { return &ad_hoc_test_result_; }\n\n  // Sets the OS stack trace getter.\n  //\n  // Does nothing if the input and the current OS stack trace getter\n  // are the same; otherwise, deletes the old getter and makes the\n  // input the current getter.\n  void set_os_stack_trace_getter(OsStackTraceGetterInterface* getter);\n\n  // Returns the current OS stack trace getter if it is not NULL;\n  // otherwise, creates an OsStackTraceGetter, makes it the current\n  // getter, and returns it.\n  OsStackTraceGetterInterface* os_stack_trace_getter();\n\n  // Returns the current OS stack trace as an std::string.\n  //\n  // The maximum number of stack frames to be included is specified by\n  // the gtest_stack_trace_depth flag.  The skip_count parameter\n  // specifies the number of top frames to be skipped, which doesn't\n  // count against the number of frames to be included.\n  //\n  // For example, if Foo() calls Bar(), which in turn calls\n  // CurrentOsStackTraceExceptTop(1), Foo() will be included in the\n  // trace but Bar() and CurrentOsStackTraceExceptTop() won't.\n  std::string CurrentOsStackTraceExceptTop(int skip_count) GTEST_NO_INLINE_;\n\n  // Finds and returns a TestSuite with the given name.  If one doesn't\n  // exist, creates one and returns it.\n  //\n  // Arguments:\n  //\n  //   test_suite_name: name of the test suite\n  //   type_param:     the name of the test's type parameter, or NULL if\n  //                   this is not a typed or a type-parameterized test.\n  //   set_up_tc:      pointer to the function that sets up the test suite\n  //   tear_down_tc:   pointer to the function that tears down the test suite\n  TestSuite* GetTestSuite(const char* test_suite_name, const char* type_param,\n                          internal::SetUpTestSuiteFunc set_up_tc,\n                          internal::TearDownTestSuiteFunc tear_down_tc);\n\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  TestCase* GetTestCase(const char* test_case_name, const char* type_param,\n                        internal::SetUpTestSuiteFunc set_up_tc,\n                        internal::TearDownTestSuiteFunc tear_down_tc) {\n    return GetTestSuite(test_case_name, type_param, set_up_tc, tear_down_tc);\n  }\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // Adds a TestInfo to the unit test.\n  //\n  // Arguments:\n  //\n  //   set_up_tc:    pointer to the function that sets up the test suite\n  //   tear_down_tc: pointer to the function that tears down the test suite\n  //   test_info:    the TestInfo object\n  void AddTestInfo(internal::SetUpTestSuiteFunc set_up_tc,\n                   internal::TearDownTestSuiteFunc tear_down_tc,\n                   TestInfo* test_info) {\n    // In order to support thread-safe death tests, we need to\n    // remember the original working directory when the test program\n    // was first invoked.  We cannot do this in RUN_ALL_TESTS(), as\n    // the user may have changed the current directory before calling\n    // RUN_ALL_TESTS().  Therefore we capture the current directory in\n    // AddTestInfo(), which is called to register a TEST or TEST_F\n    // before main() is reached.\n    if (original_working_dir_.IsEmpty()) {\n      original_working_dir_.Set(FilePath::GetCurrentDir());\n      GTEST_CHECK_(!original_working_dir_.IsEmpty())\n          << \"Failed to get the current working directory.\";\n    }\n\n    GetTestSuite(test_info->test_suite_name(), test_info->type_param(),\n                 set_up_tc, tear_down_tc)\n        ->AddTestInfo(test_info);\n  }\n\n  // Returns ParameterizedTestSuiteRegistry object used to keep track of\n  // value-parameterized tests and instantiate and register them.\n  internal::ParameterizedTestSuiteRegistry& parameterized_test_registry() {\n    return parameterized_test_registry_;\n  }\n\n  // Sets the TestSuite object for the test that's currently running.\n  void set_current_test_suite(TestSuite* a_current_test_suite) {\n    current_test_suite_ = a_current_test_suite;\n  }\n\n  // Sets the TestInfo object for the test that's currently running.  If\n  // current_test_info is NULL, the assertion results will be stored in\n  // ad_hoc_test_result_.\n  void set_current_test_info(TestInfo* a_current_test_info) {\n    current_test_info_ = a_current_test_info;\n  }\n\n  // Registers all parameterized tests defined using TEST_P and\n  // INSTANTIATE_TEST_SUITE_P, creating regular tests for each test/parameter\n  // combination. This method can be called more then once; it has guards\n  // protecting from registering the tests more then once.  If\n  // value-parameterized tests are disabled, RegisterParameterizedTests is\n  // present but does nothing.\n  void RegisterParameterizedTests();\n\n  // Runs all tests in this UnitTest object, prints the result, and\n  // returns true if all tests are successful.  If any exception is\n  // thrown during a test, this test is considered to be failed, but\n  // the rest of the tests will still be run.\n  bool RunAllTests();\n\n  // Clears the results of all tests, except the ad hoc tests.\n  void ClearNonAdHocTestResult() {\n    ForEach(test_suites_, TestSuite::ClearTestSuiteResult);\n  }\n\n  // Clears the results of ad-hoc test assertions.\n  void ClearAdHocTestResult() {\n    ad_hoc_test_result_.Clear();\n  }\n\n  // Adds a TestProperty to the current TestResult object when invoked in a\n  // context of a test or a test suite, or to the global property set. If the\n  // result already contains a property with the same key, the value will be\n  // updated.\n  void RecordProperty(const TestProperty& test_property);\n\n  enum ReactionToSharding {\n    HONOR_SHARDING_PROTOCOL,\n    IGNORE_SHARDING_PROTOCOL\n  };\n\n  // Matches the full name of each test against the user-specified\n  // filter to decide whether the test should run, then records the\n  // result in each TestSuite and TestInfo object.\n  // If shard_tests == HONOR_SHARDING_PROTOCOL, further filters tests\n  // based on sharding variables in the environment.\n  // Returns the number of tests that should run.\n  int FilterTests(ReactionToSharding shard_tests);\n\n  // Prints the names of the tests matching the user-specified filter flag.\n  void ListTestsMatchingFilter();\n\n  const TestSuite* current_test_suite() const { return current_test_suite_; }\n  TestInfo* current_test_info() { return current_test_info_; }\n  const TestInfo* current_test_info() const { return current_test_info_; }\n\n  // Returns the vector of environments that need to be set-up/torn-down\n  // before/after the tests are run.\n  std::vector<Environment*>& environments() { return environments_; }\n\n  // Getters for the per-thread Google Test trace stack.\n  std::vector<TraceInfo>& gtest_trace_stack() {\n    return *(gtest_trace_stack_.pointer());\n  }\n  const std::vector<TraceInfo>& gtest_trace_stack() const {\n    return gtest_trace_stack_.get();\n  }\n\n#if GTEST_HAS_DEATH_TEST\n  void InitDeathTestSubprocessControlInfo() {\n    internal_run_death_test_flag_.reset(ParseInternalRunDeathTestFlag());\n  }\n  // Returns a pointer to the parsed --gtest_internal_run_death_test\n  // flag, or NULL if that flag was not specified.\n  // This information is useful only in a death test child process.\n  // Must not be called before a call to InitGoogleTest.\n  const InternalRunDeathTestFlag* internal_run_death_test_flag() const {\n    return internal_run_death_test_flag_.get();\n  }\n\n  // Returns a pointer to the current death test factory.\n  internal::DeathTestFactory* death_test_factory() {\n    return death_test_factory_.get();\n  }\n\n  void SuppressTestEventsIfInSubprocess();\n\n  friend class ReplaceDeathTestFactory;\n#endif  // GTEST_HAS_DEATH_TEST\n\n  // Initializes the event listener performing XML output as specified by\n  // UnitTestOptions. Must not be called before InitGoogleTest.\n  void ConfigureXmlOutput();\n\n#if GTEST_CAN_STREAM_RESULTS_\n  // Initializes the event listener for streaming test results to a socket.\n  // Must not be called before InitGoogleTest.\n  void ConfigureStreamingOutput();\n#endif\n\n  // Performs initialization dependent upon flag values obtained in\n  // ParseGoogleTestFlagsOnly.  Is called from InitGoogleTest after the call to\n  // ParseGoogleTestFlagsOnly.  In case a user neglects to call InitGoogleTest\n  // this function is also called from RunAllTests.  Since this function can be\n  // called more than once, it has to be idempotent.\n  void PostFlagParsingInit();\n\n  // Gets the random seed used at the start of the current test iteration.\n  int random_seed() const { return random_seed_; }\n\n  // Gets the random number generator.\n  internal::Random* random() { return &random_; }\n\n  // Shuffles all test suites, and the tests within each test suite,\n  // making sure that death tests are still run first.\n  void ShuffleTests();\n\n  // Restores the test suites and tests to their order before the first shuffle.\n  void UnshuffleTests();\n\n  // Returns the value of GTEST_FLAG(catch_exceptions) at the moment\n  // UnitTest::Run() starts.\n  bool catch_exceptions() const { return catch_exceptions_; }\n\n private:\n  friend class ::testing::UnitTest;\n\n  // Used by UnitTest::Run() to capture the state of\n  // GTEST_FLAG(catch_exceptions) at the moment it starts.\n  void set_catch_exceptions(bool value) { catch_exceptions_ = value; }\n\n  // The UnitTest object that owns this implementation object.\n  UnitTest* const parent_;\n\n  // The working directory when the first TEST() or TEST_F() was\n  // executed.\n  internal::FilePath original_working_dir_;\n\n  // The default test part result reporters.\n  DefaultGlobalTestPartResultReporter default_global_test_part_result_reporter_;\n  DefaultPerThreadTestPartResultReporter\n      default_per_thread_test_part_result_reporter_;\n\n  // Points to (but doesn't own) the global test part result reporter.\n  TestPartResultReporterInterface* global_test_part_result_repoter_;\n\n  // Protects read and write access to global_test_part_result_reporter_.\n  internal::Mutex global_test_part_result_reporter_mutex_;\n\n  // Points to (but doesn't own) the per-thread test part result reporter.\n  internal::ThreadLocal<TestPartResultReporterInterface*>\n      per_thread_test_part_result_reporter_;\n\n  // The vector of environments that need to be set-up/torn-down\n  // before/after the tests are run.\n  std::vector<Environment*> environments_;\n\n  // The vector of TestSuites in their original order.  It owns the\n  // elements in the vector.\n  std::vector<TestSuite*> test_suites_;\n\n  // Provides a level of indirection for the test suite list to allow\n  // easy shuffling and restoring the test suite order.  The i-th\n  // element of this vector is the index of the i-th test suite in the\n  // shuffled order.\n  std::vector<int> test_suite_indices_;\n\n  // ParameterizedTestRegistry object used to register value-parameterized\n  // tests.\n  internal::ParameterizedTestSuiteRegistry parameterized_test_registry_;\n\n  // Indicates whether RegisterParameterizedTests() has been called already.\n  bool parameterized_tests_registered_;\n\n  // Index of the last death test suite registered.  Initially -1.\n  int last_death_test_suite_;\n\n  // This points to the TestSuite for the currently running test.  It\n  // changes as Google Test goes through one test suite after another.\n  // When no test is running, this is set to NULL and Google Test\n  // stores assertion results in ad_hoc_test_result_.  Initially NULL.\n  TestSuite* current_test_suite_;\n\n  // This points to the TestInfo for the currently running test.  It\n  // changes as Google Test goes through one test after another.  When\n  // no test is running, this is set to NULL and Google Test stores\n  // assertion results in ad_hoc_test_result_.  Initially NULL.\n  TestInfo* current_test_info_;\n\n  // Normally, a user only writes assertions inside a TEST or TEST_F,\n  // or inside a function called by a TEST or TEST_F.  Since Google\n  // Test keeps track of which test is current running, it can\n  // associate such an assertion with the test it belongs to.\n  //\n  // If an assertion is encountered when no TEST or TEST_F is running,\n  // Google Test attributes the assertion result to an imaginary \"ad hoc\"\n  // test, and records the result in ad_hoc_test_result_.\n  TestResult ad_hoc_test_result_;\n\n  // The list of event listeners that can be used to track events inside\n  // Google Test.\n  TestEventListeners listeners_;\n\n  // The OS stack trace getter.  Will be deleted when the UnitTest\n  // object is destructed.  By default, an OsStackTraceGetter is used,\n  // but the user can set this field to use a custom getter if that is\n  // desired.\n  OsStackTraceGetterInterface* os_stack_trace_getter_;\n\n  // True if and only if PostFlagParsingInit() has been called.\n  bool post_flag_parse_init_performed_;\n\n  // The random number seed used at the beginning of the test run.\n  int random_seed_;\n\n  // Our random number generator.\n  internal::Random random_;\n\n  // The time of the test program start, in ms from the start of the\n  // UNIX epoch.\n  TimeInMillis start_timestamp_;\n\n  // How long the test took to run, in milliseconds.\n  TimeInMillis elapsed_time_;\n\n#if GTEST_HAS_DEATH_TEST\n  // The decomposed components of the gtest_internal_run_death_test flag,\n  // parsed when RUN_ALL_TESTS is called.\n  std::unique_ptr<InternalRunDeathTestFlag> internal_run_death_test_flag_;\n  std::unique_ptr<internal::DeathTestFactory> death_test_factory_;\n#endif  // GTEST_HAS_DEATH_TEST\n\n  // A per-thread stack of traces created by the SCOPED_TRACE() macro.\n  internal::ThreadLocal<std::vector<TraceInfo> > gtest_trace_stack_;\n\n  // The value of GTEST_FLAG(catch_exceptions) at the moment RunAllTests()\n  // starts.\n  bool catch_exceptions_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(UnitTestImpl);\n};  // class UnitTestImpl\n\n// Convenience function for accessing the global UnitTest\n// implementation object.\ninline UnitTestImpl* GetUnitTestImpl() {\n  return UnitTest::GetInstance()->impl();\n}\n\n#if GTEST_USES_SIMPLE_RE\n\n// Internal helper functions for implementing the simple regular\n// expression matcher.\nGTEST_API_ bool IsInSet(char ch, const char* str);\nGTEST_API_ bool IsAsciiDigit(char ch);\nGTEST_API_ bool IsAsciiPunct(char ch);\nGTEST_API_ bool IsRepeat(char ch);\nGTEST_API_ bool IsAsciiWhiteSpace(char ch);\nGTEST_API_ bool IsAsciiWordChar(char ch);\nGTEST_API_ bool IsValidEscape(char ch);\nGTEST_API_ bool AtomMatchesChar(bool escaped, char pattern, char ch);\nGTEST_API_ bool ValidateRegex(const char* regex);\nGTEST_API_ bool MatchRegexAtHead(const char* regex, const char* str);\nGTEST_API_ bool MatchRepetitionAndRegexAtHead(\n    bool escaped, char ch, char repeat, const char* regex, const char* str);\nGTEST_API_ bool MatchRegexAnywhere(const char* regex, const char* str);\n\n#endif  // GTEST_USES_SIMPLE_RE\n\n// Parses the command line for Google Test flags, without initializing\n// other parts of Google Test.\nGTEST_API_ void ParseGoogleTestFlagsOnly(int* argc, char** argv);\nGTEST_API_ void ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv);\n\n#if GTEST_HAS_DEATH_TEST\n\n// Returns the message describing the last system error, regardless of the\n// platform.\nGTEST_API_ std::string GetLastErrnoDescription();\n\n// Attempts to parse a string into a positive integer pointed to by the\n// number parameter.  Returns true if that is possible.\n// GTEST_HAS_DEATH_TEST implies that we have ::std::string, so we can use\n// it here.\ntemplate <typename Integer>\nbool ParseNaturalNumber(const ::std::string& str, Integer* number) {\n  // Fail fast if the given string does not begin with a digit;\n  // this bypasses strtoXXX's \"optional leading whitespace and plus\n  // or minus sign\" semantics, which are undesirable here.\n  if (str.empty() || !IsDigit(str[0])) {\n    return false;\n  }\n  errno = 0;\n\n  char* end;\n  // BiggestConvertible is the largest integer type that system-provided\n  // string-to-number conversion routines can return.\n\n# if GTEST_OS_WINDOWS && !defined(__GNUC__)\n\n  // MSVC and C++ Builder define __int64 instead of the standard long long.\n  typedef unsigned __int64 BiggestConvertible;\n  const BiggestConvertible parsed = _strtoui64(str.c_str(), &end, 10);\n\n# else\n\n  typedef unsigned long long BiggestConvertible;  // NOLINT\n  const BiggestConvertible parsed = strtoull(str.c_str(), &end, 10);\n\n# endif  // GTEST_OS_WINDOWS && !defined(__GNUC__)\n\n  const bool parse_success = *end == '\\0' && errno == 0;\n\n  GTEST_CHECK_(sizeof(Integer) <= sizeof(parsed));\n\n  const Integer result = static_cast<Integer>(parsed);\n  if (parse_success && static_cast<BiggestConvertible>(result) == parsed) {\n    *number = result;\n    return true;\n  }\n  return false;\n}\n#endif  // GTEST_HAS_DEATH_TEST\n\n// TestResult contains some private methods that should be hidden from\n// Google Test user but are required for testing. This class allow our tests\n// to access them.\n//\n// This class is supplied only for the purpose of testing Google Test's own\n// constructs. Do not use it in user tests, either directly or indirectly.\nclass TestResultAccessor {\n public:\n  static void RecordProperty(TestResult* test_result,\n                             const std::string& xml_element,\n                             const TestProperty& property) {\n    test_result->RecordProperty(xml_element, property);\n  }\n\n  static void ClearTestPartResults(TestResult* test_result) {\n    test_result->ClearTestPartResults();\n  }\n\n  static const std::vector<testing::TestPartResult>& test_part_results(\n      const TestResult& test_result) {\n    return test_result.test_part_results();\n  }\n};\n\n#if GTEST_CAN_STREAM_RESULTS_\n\n// Streams test results to the given port on the given host machine.\nclass StreamingListener : public EmptyTestEventListener {\n public:\n  // Abstract base class for writing strings to a socket.\n  class AbstractSocketWriter {\n   public:\n    virtual ~AbstractSocketWriter() {}\n\n    // Sends a string to the socket.\n    virtual void Send(const std::string& message) = 0;\n\n    // Closes the socket.\n    virtual void CloseConnection() {}\n\n    // Sends a string and a newline to the socket.\n    void SendLn(const std::string& message) { Send(message + \"\\n\"); }\n  };\n\n  // Concrete class for actually writing strings to a socket.\n  class SocketWriter : public AbstractSocketWriter {\n   public:\n    SocketWriter(const std::string& host, const std::string& port)\n        : sockfd_(-1), host_name_(host), port_num_(port) {\n      MakeConnection();\n    }\n\n    ~SocketWriter() override {\n      if (sockfd_ != -1)\n        CloseConnection();\n    }\n\n    // Sends a string to the socket.\n    void Send(const std::string& message) override {\n      GTEST_CHECK_(sockfd_ != -1)\n          << \"Send() can be called only when there is a connection.\";\n\n      const auto len = static_cast<size_t>(message.length());\n      if (write(sockfd_, message.c_str(), len) != static_cast<ssize_t>(len)) {\n        GTEST_LOG_(WARNING)\n            << \"stream_result_to: failed to stream to \"\n            << host_name_ << \":\" << port_num_;\n      }\n    }\n\n   private:\n    // Creates a client socket and connects to the server.\n    void MakeConnection();\n\n    // Closes the socket.\n    void CloseConnection() override {\n      GTEST_CHECK_(sockfd_ != -1)\n          << \"CloseConnection() can be called only when there is a connection.\";\n\n      close(sockfd_);\n      sockfd_ = -1;\n    }\n\n    int sockfd_;  // socket file descriptor\n    const std::string host_name_;\n    const std::string port_num_;\n\n    GTEST_DISALLOW_COPY_AND_ASSIGN_(SocketWriter);\n  };  // class SocketWriter\n\n  // Escapes '=', '&', '%', and '\\n' characters in str as \"%xx\".\n  static std::string UrlEncode(const char* str);\n\n  StreamingListener(const std::string& host, const std::string& port)\n      : socket_writer_(new SocketWriter(host, port)) {\n    Start();\n  }\n\n  explicit StreamingListener(AbstractSocketWriter* socket_writer)\n      : socket_writer_(socket_writer) { Start(); }\n\n  void OnTestProgramStart(const UnitTest& /* unit_test */) override {\n    SendLn(\"event=TestProgramStart\");\n  }\n\n  void OnTestProgramEnd(const UnitTest& unit_test) override {\n    // Note that Google Test current only report elapsed time for each\n    // test iteration, not for the entire test program.\n    SendLn(\"event=TestProgramEnd&passed=\" + FormatBool(unit_test.Passed()));\n\n    // Notify the streaming server to stop.\n    socket_writer_->CloseConnection();\n  }\n\n  void OnTestIterationStart(const UnitTest& /* unit_test */,\n                            int iteration) override {\n    SendLn(\"event=TestIterationStart&iteration=\" +\n           StreamableToString(iteration));\n  }\n\n  void OnTestIterationEnd(const UnitTest& unit_test,\n                          int /* iteration */) override {\n    SendLn(\"event=TestIterationEnd&passed=\" +\n           FormatBool(unit_test.Passed()) + \"&elapsed_time=\" +\n           StreamableToString(unit_test.elapsed_time()) + \"ms\");\n  }\n\n  // Note that \"event=TestCaseStart\" is a wire format and has to remain\n  // \"case\" for compatibilty\n  void OnTestCaseStart(const TestCase& test_case) override {\n    SendLn(std::string(\"event=TestCaseStart&name=\") + test_case.name());\n  }\n\n  // Note that \"event=TestCaseEnd\" is a wire format and has to remain\n  // \"case\" for compatibilty\n  void OnTestCaseEnd(const TestCase& test_case) override {\n    SendLn(\"event=TestCaseEnd&passed=\" + FormatBool(test_case.Passed()) +\n           \"&elapsed_time=\" + StreamableToString(test_case.elapsed_time()) +\n           \"ms\");\n  }\n\n  void OnTestStart(const TestInfo& test_info) override {\n    SendLn(std::string(\"event=TestStart&name=\") + test_info.name());\n  }\n\n  void OnTestEnd(const TestInfo& test_info) override {\n    SendLn(\"event=TestEnd&passed=\" +\n           FormatBool((test_info.result())->Passed()) +\n           \"&elapsed_time=\" +\n           StreamableToString((test_info.result())->elapsed_time()) + \"ms\");\n  }\n\n  void OnTestPartResult(const TestPartResult& test_part_result) override {\n    const char* file_name = test_part_result.file_name();\n    if (file_name == nullptr) file_name = \"\";\n    SendLn(\"event=TestPartResult&file=\" + UrlEncode(file_name) +\n           \"&line=\" + StreamableToString(test_part_result.line_number()) +\n           \"&message=\" + UrlEncode(test_part_result.message()));\n  }\n\n private:\n  // Sends the given message and a newline to the socket.\n  void SendLn(const std::string& message) { socket_writer_->SendLn(message); }\n\n  // Called at the start of streaming to notify the receiver what\n  // protocol we are using.\n  void Start() { SendLn(\"gtest_streaming_protocol_version=1.0\"); }\n\n  std::string FormatBool(bool value) { return value ? \"1\" : \"0\"; }\n\n  const std::unique_ptr<AbstractSocketWriter> socket_writer_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(StreamingListener);\n};  // class StreamingListener\n\n#endif  // GTEST_CAN_STREAM_RESULTS_\n\n}  // namespace internal\n}  // namespace testing\n\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4251\n\n#endif  // GTEST_SRC_GTEST_INTERNAL_INL_H_\n"
  },
  {
    "path": "test/googletest/src/gtest-matchers.cc",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This file implements just enough of the matcher interface to allow\n// EXPECT_DEATH and friends to accept a matcher argument.\n\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-port.h\"\n#include \"gtest/gtest-matchers.h\"\n\n#include <string>\n\nnamespace testing {\n\n// Constructs a matcher that matches a const std::string& whose value is\n// equal to s.\nMatcher<const std::string&>::Matcher(const std::string& s) { *this = Eq(s); }\n\n// Constructs a matcher that matches a const std::string& whose value is\n// equal to s.\nMatcher<const std::string&>::Matcher(const char* s) {\n  *this = Eq(std::string(s));\n}\n\n// Constructs a matcher that matches a std::string whose value is equal to\n// s.\nMatcher<std::string>::Matcher(const std::string& s) { *this = Eq(s); }\n\n// Constructs a matcher that matches a std::string whose value is equal to\n// s.\nMatcher<std::string>::Matcher(const char* s) { *this = Eq(std::string(s)); }\n\n#if GTEST_HAS_ABSL\n// Constructs a matcher that matches a const absl::string_view& whose value is\n// equal to s.\nMatcher<const absl::string_view&>::Matcher(const std::string& s) {\n  *this = Eq(s);\n}\n\n// Constructs a matcher that matches a const absl::string_view& whose value is\n// equal to s.\nMatcher<const absl::string_view&>::Matcher(const char* s) {\n  *this = Eq(std::string(s));\n}\n\n// Constructs a matcher that matches a const absl::string_view& whose value is\n// equal to s.\nMatcher<const absl::string_view&>::Matcher(absl::string_view s) {\n  *this = Eq(std::string(s));\n}\n\n// Constructs a matcher that matches a absl::string_view whose value is equal to\n// s.\nMatcher<absl::string_view>::Matcher(const std::string& s) { *this = Eq(s); }\n\n// Constructs a matcher that matches a absl::string_view whose value is equal to\n// s.\nMatcher<absl::string_view>::Matcher(const char* s) {\n  *this = Eq(std::string(s));\n}\n\n// Constructs a matcher that matches a absl::string_view whose value is equal to\n// s.\nMatcher<absl::string_view>::Matcher(absl::string_view s) {\n  *this = Eq(std::string(s));\n}\n#endif  // GTEST_HAS_ABSL\n\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest-port.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"gtest/internal/gtest-port.h\"\n\n#include <limits.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <string.h>\n#include <fstream>\n#include <memory>\n\n#if GTEST_OS_WINDOWS\n# include <windows.h>\n# include <io.h>\n# include <sys/stat.h>\n# include <map>  // Used in ThreadLocal.\n# ifdef _MSC_VER\n#  include <crtdbg.h>\n# endif  // _MSC_VER\n#else\n# include <unistd.h>\n#endif  // GTEST_OS_WINDOWS\n\n#if GTEST_OS_MAC\n# include <mach/mach_init.h>\n# include <mach/task.h>\n# include <mach/vm_map.h>\n#endif  // GTEST_OS_MAC\n\n#if GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \\\n    GTEST_OS_NETBSD || GTEST_OS_OPENBSD\n# include <sys/sysctl.h>\n# if GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD\n#  include <sys/user.h>\n# endif\n#endif\n\n#if GTEST_OS_QNX\n# include <devctl.h>\n# include <fcntl.h>\n# include <sys/procfs.h>\n#endif  // GTEST_OS_QNX\n\n#if GTEST_OS_AIX\n# include <procinfo.h>\n# include <sys/types.h>\n#endif  // GTEST_OS_AIX\n\n#if GTEST_OS_FUCHSIA\n# include <zircon/process.h>\n# include <zircon/syscalls.h>\n#endif  // GTEST_OS_FUCHSIA\n\n#include \"gtest/gtest-spi.h\"\n#include \"gtest/gtest-message.h\"\n#include \"gtest/internal/gtest-internal.h\"\n#include \"gtest/internal/gtest-string.h\"\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\nnamespace internal {\n\n#if defined(_MSC_VER) || defined(__BORLANDC__)\n// MSVC and C++Builder do not provide a definition of STDERR_FILENO.\nconst int kStdOutFileno = 1;\nconst int kStdErrFileno = 2;\n#else\nconst int kStdOutFileno = STDOUT_FILENO;\nconst int kStdErrFileno = STDERR_FILENO;\n#endif  // _MSC_VER\n\n#if GTEST_OS_LINUX\n\nnamespace {\ntemplate <typename T>\nT ReadProcFileField(const std::string& filename, int field) {\n  std::string dummy;\n  std::ifstream file(filename.c_str());\n  while (field-- > 0) {\n    file >> dummy;\n  }\n  T output = 0;\n  file >> output;\n  return output;\n}\n}  // namespace\n\n// Returns the number of active threads, or 0 when there is an error.\nsize_t GetThreadCount() {\n  const std::string filename =\n      (Message() << \"/proc/\" << getpid() << \"/stat\").GetString();\n  return ReadProcFileField<size_t>(filename, 19);\n}\n\n#elif GTEST_OS_MAC\n\nsize_t GetThreadCount() {\n  const task_t task = mach_task_self();\n  mach_msg_type_number_t thread_count;\n  thread_act_array_t thread_list;\n  const kern_return_t status = task_threads(task, &thread_list, &thread_count);\n  if (status == KERN_SUCCESS) {\n    // task_threads allocates resources in thread_list and we need to free them\n    // to avoid leaks.\n    vm_deallocate(task,\n                  reinterpret_cast<vm_address_t>(thread_list),\n                  sizeof(thread_t) * thread_count);\n    return static_cast<size_t>(thread_count);\n  } else {\n    return 0;\n  }\n}\n\n#elif GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \\\n      GTEST_OS_NETBSD\n\n#if GTEST_OS_NETBSD\n#undef KERN_PROC\n#define KERN_PROC KERN_PROC2\n#define kinfo_proc kinfo_proc2\n#endif\n\n#if GTEST_OS_DRAGONFLY\n#define KP_NLWP(kp) (kp.kp_nthreads)\n#elif GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD\n#define KP_NLWP(kp) (kp.ki_numthreads)\n#elif GTEST_OS_NETBSD\n#define KP_NLWP(kp) (kp.p_nlwps)\n#endif\n\n// Returns the number of threads running in the process, or 0 to indicate that\n// we cannot detect it.\nsize_t GetThreadCount() {\n  int mib[] = {\n    CTL_KERN,\n    KERN_PROC,\n    KERN_PROC_PID,\n    getpid(),\n#if GTEST_OS_NETBSD\n    sizeof(struct kinfo_proc),\n    1,\n#endif\n  };\n  u_int miblen = sizeof(mib) / sizeof(mib[0]);\n  struct kinfo_proc info;\n  size_t size = sizeof(info);\n  if (sysctl(mib, miblen, &info, &size, NULL, 0)) {\n    return 0;\n  }\n  return static_cast<size_t>(KP_NLWP(info));\n}\n#elif GTEST_OS_OPENBSD\n\n// Returns the number of threads running in the process, or 0 to indicate that\n// we cannot detect it.\nsize_t GetThreadCount() {\n  int mib[] = {\n    CTL_KERN,\n    KERN_PROC,\n    KERN_PROC_PID | KERN_PROC_SHOW_THREADS,\n    getpid(),\n    sizeof(struct kinfo_proc),\n    0,\n  };\n  u_int miblen = sizeof(mib) / sizeof(mib[0]);\n\n  // get number of structs\n  size_t size;\n  if (sysctl(mib, miblen, NULL, &size, NULL, 0)) {\n    return 0;\n  }\n  mib[5] = size / mib[4];\n\n  // populate array of structs\n  struct kinfo_proc info[mib[5]];\n  if (sysctl(mib, miblen, &info, &size, NULL, 0)) {\n    return 0;\n  }\n\n  // exclude empty members\n  int nthreads = 0;\n  for (int i = 0; i < size / mib[4]; i++) {\n    if (info[i].p_tid != -1)\n      nthreads++;\n  }\n  return nthreads;\n}\n\n#elif GTEST_OS_QNX\n\n// Returns the number of threads running in the process, or 0 to indicate that\n// we cannot detect it.\nsize_t GetThreadCount() {\n  const int fd = open(\"/proc/self/as\", O_RDONLY);\n  if (fd < 0) {\n    return 0;\n  }\n  procfs_info process_info;\n  const int status =\n      devctl(fd, DCMD_PROC_INFO, &process_info, sizeof(process_info), nullptr);\n  close(fd);\n  if (status == EOK) {\n    return static_cast<size_t>(process_info.num_threads);\n  } else {\n    return 0;\n  }\n}\n\n#elif GTEST_OS_AIX\n\nsize_t GetThreadCount() {\n  struct procentry64 entry;\n  pid_t pid = getpid();\n  int status = getprocs64(&entry, sizeof(entry), nullptr, 0, &pid, 1);\n  if (status == 1) {\n    return entry.pi_thcount;\n  } else {\n    return 0;\n  }\n}\n\n#elif GTEST_OS_FUCHSIA\n\nsize_t GetThreadCount() {\n  int dummy_buffer;\n  size_t avail;\n  zx_status_t status = zx_object_get_info(\n      zx_process_self(),\n      ZX_INFO_PROCESS_THREADS,\n      &dummy_buffer,\n      0,\n      nullptr,\n      &avail);\n  if (status == ZX_OK) {\n    return avail;\n  } else {\n    return 0;\n  }\n}\n\n#else\n\nsize_t GetThreadCount() {\n  // There's no portable way to detect the number of threads, so we just\n  // return 0 to indicate that we cannot detect it.\n  return 0;\n}\n\n#endif  // GTEST_OS_LINUX\n\n#if GTEST_IS_THREADSAFE && GTEST_OS_WINDOWS\n\nvoid SleepMilliseconds(int n) {\n  ::Sleep(static_cast<DWORD>(n));\n}\n\nAutoHandle::AutoHandle()\n    : handle_(INVALID_HANDLE_VALUE) {}\n\nAutoHandle::AutoHandle(Handle handle)\n    : handle_(handle) {}\n\nAutoHandle::~AutoHandle() {\n  Reset();\n}\n\nAutoHandle::Handle AutoHandle::Get() const {\n  return handle_;\n}\n\nvoid AutoHandle::Reset() {\n  Reset(INVALID_HANDLE_VALUE);\n}\n\nvoid AutoHandle::Reset(HANDLE handle) {\n  // Resetting with the same handle we already own is invalid.\n  if (handle_ != handle) {\n    if (IsCloseable()) {\n      ::CloseHandle(handle_);\n    }\n    handle_ = handle;\n  } else {\n    GTEST_CHECK_(!IsCloseable())\n        << \"Resetting a valid handle to itself is likely a programmer error \"\n            \"and thus not allowed.\";\n  }\n}\n\nbool AutoHandle::IsCloseable() const {\n  // Different Windows APIs may use either of these values to represent an\n  // invalid handle.\n  return handle_ != nullptr && handle_ != INVALID_HANDLE_VALUE;\n}\n\nNotification::Notification()\n    : event_(::CreateEvent(nullptr,     // Default security attributes.\n                           TRUE,        // Do not reset automatically.\n                           FALSE,       // Initially unset.\n                           nullptr)) {  // Anonymous event.\n  GTEST_CHECK_(event_.Get() != nullptr);\n}\n\nvoid Notification::Notify() {\n  GTEST_CHECK_(::SetEvent(event_.Get()) != FALSE);\n}\n\nvoid Notification::WaitForNotification() {\n  GTEST_CHECK_(\n      ::WaitForSingleObject(event_.Get(), INFINITE) == WAIT_OBJECT_0);\n}\n\nMutex::Mutex()\n    : owner_thread_id_(0),\n      type_(kDynamic),\n      critical_section_init_phase_(0),\n      critical_section_(new CRITICAL_SECTION) {\n  ::InitializeCriticalSection(critical_section_);\n}\n\nMutex::~Mutex() {\n  // Static mutexes are leaked intentionally. It is not thread-safe to try\n  // to clean them up.\n  if (type_ == kDynamic) {\n    ::DeleteCriticalSection(critical_section_);\n    delete critical_section_;\n    critical_section_ = nullptr;\n  }\n}\n\nvoid Mutex::Lock() {\n  ThreadSafeLazyInit();\n  ::EnterCriticalSection(critical_section_);\n  owner_thread_id_ = ::GetCurrentThreadId();\n}\n\nvoid Mutex::Unlock() {\n  ThreadSafeLazyInit();\n  // We don't protect writing to owner_thread_id_ here, as it's the\n  // caller's responsibility to ensure that the current thread holds the\n  // mutex when this is called.\n  owner_thread_id_ = 0;\n  ::LeaveCriticalSection(critical_section_);\n}\n\n// Does nothing if the current thread holds the mutex. Otherwise, crashes\n// with high probability.\nvoid Mutex::AssertHeld() {\n  ThreadSafeLazyInit();\n  GTEST_CHECK_(owner_thread_id_ == ::GetCurrentThreadId())\n      << \"The current thread is not holding the mutex @\" << this;\n}\n\nnamespace {\n\n#ifdef _MSC_VER\n// Use the RAII idiom to flag mem allocs that are intentionally never\n// deallocated. The motivation is to silence the false positive mem leaks\n// that are reported by the debug version of MS's CRT which can only detect\n// if an alloc is missing a matching deallocation.\n// Example:\n//    MemoryIsNotDeallocated memory_is_not_deallocated;\n//    critical_section_ = new CRITICAL_SECTION;\n//\nclass MemoryIsNotDeallocated\n{\n public:\n  MemoryIsNotDeallocated() : old_crtdbg_flag_(0) {\n    old_crtdbg_flag_ = _CrtSetDbgFlag(_CRTDBG_REPORT_FLAG);\n    // Set heap allocation block type to _IGNORE_BLOCK so that MS debug CRT\n    // doesn't report mem leak if there's no matching deallocation.\n    _CrtSetDbgFlag(old_crtdbg_flag_ & ~_CRTDBG_ALLOC_MEM_DF);\n  }\n\n  ~MemoryIsNotDeallocated() {\n    // Restore the original _CRTDBG_ALLOC_MEM_DF flag\n    _CrtSetDbgFlag(old_crtdbg_flag_);\n  }\n\n private:\n  int old_crtdbg_flag_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(MemoryIsNotDeallocated);\n};\n#endif  // _MSC_VER\n\n}  // namespace\n\n// Initializes owner_thread_id_ and critical_section_ in static mutexes.\nvoid Mutex::ThreadSafeLazyInit() {\n  // Dynamic mutexes are initialized in the constructor.\n  if (type_ == kStatic) {\n    switch (\n        ::InterlockedCompareExchange(&critical_section_init_phase_, 1L, 0L)) {\n      case 0:\n        // If critical_section_init_phase_ was 0 before the exchange, we\n        // are the first to test it and need to perform the initialization.\n        owner_thread_id_ = 0;\n        {\n          // Use RAII to flag that following mem alloc is never deallocated.\n#ifdef _MSC_VER\n          MemoryIsNotDeallocated memory_is_not_deallocated;\n#endif  // _MSC_VER\n          critical_section_ = new CRITICAL_SECTION;\n        }\n        ::InitializeCriticalSection(critical_section_);\n        // Updates the critical_section_init_phase_ to 2 to signal\n        // initialization complete.\n        GTEST_CHECK_(::InterlockedCompareExchange(\n                          &critical_section_init_phase_, 2L, 1L) ==\n                      1L);\n        break;\n      case 1:\n        // Somebody else is already initializing the mutex; spin until they\n        // are done.\n        while (::InterlockedCompareExchange(&critical_section_init_phase_,\n                                            2L,\n                                            2L) != 2L) {\n          // Possibly yields the rest of the thread's time slice to other\n          // threads.\n          ::Sleep(0);\n        }\n        break;\n\n      case 2:\n        break;  // The mutex is already initialized and ready for use.\n\n      default:\n        GTEST_CHECK_(false)\n            << \"Unexpected value of critical_section_init_phase_ \"\n            << \"while initializing a static mutex.\";\n    }\n  }\n}\n\nnamespace {\n\nclass ThreadWithParamSupport : public ThreadWithParamBase {\n public:\n  static HANDLE CreateThread(Runnable* runnable,\n                             Notification* thread_can_start) {\n    ThreadMainParam* param = new ThreadMainParam(runnable, thread_can_start);\n    DWORD thread_id;\n    HANDLE thread_handle = ::CreateThread(\n        nullptr,  // Default security.\n        0,        // Default stack size.\n        &ThreadWithParamSupport::ThreadMain,\n        param,        // Parameter to ThreadMainStatic\n        0x0,          // Default creation flags.\n        &thread_id);  // Need a valid pointer for the call to work under Win98.\n    GTEST_CHECK_(thread_handle != nullptr)\n        << \"CreateThread failed with error \" << ::GetLastError() << \".\";\n    if (thread_handle == nullptr) {\n      delete param;\n    }\n    return thread_handle;\n  }\n\n private:\n  struct ThreadMainParam {\n    ThreadMainParam(Runnable* runnable, Notification* thread_can_start)\n        : runnable_(runnable),\n          thread_can_start_(thread_can_start) {\n    }\n    std::unique_ptr<Runnable> runnable_;\n    // Does not own.\n    Notification* thread_can_start_;\n  };\n\n  static DWORD WINAPI ThreadMain(void* ptr) {\n    // Transfers ownership.\n    std::unique_ptr<ThreadMainParam> param(static_cast<ThreadMainParam*>(ptr));\n    if (param->thread_can_start_ != nullptr)\n      param->thread_can_start_->WaitForNotification();\n    param->runnable_->Run();\n    return 0;\n  }\n\n  // Prohibit instantiation.\n  ThreadWithParamSupport();\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ThreadWithParamSupport);\n};\n\n}  // namespace\n\nThreadWithParamBase::ThreadWithParamBase(Runnable *runnable,\n                                         Notification* thread_can_start)\n      : thread_(ThreadWithParamSupport::CreateThread(runnable,\n                                                     thread_can_start)) {\n}\n\nThreadWithParamBase::~ThreadWithParamBase() {\n  Join();\n}\n\nvoid ThreadWithParamBase::Join() {\n  GTEST_CHECK_(::WaitForSingleObject(thread_.Get(), INFINITE) == WAIT_OBJECT_0)\n      << \"Failed to join the thread with error \" << ::GetLastError() << \".\";\n}\n\n// Maps a thread to a set of ThreadIdToThreadLocals that have values\n// instantiated on that thread and notifies them when the thread exits.  A\n// ThreadLocal instance is expected to persist until all threads it has\n// values on have terminated.\nclass ThreadLocalRegistryImpl {\n public:\n  // Registers thread_local_instance as having value on the current thread.\n  // Returns a value that can be used to identify the thread from other threads.\n  static ThreadLocalValueHolderBase* GetValueOnCurrentThread(\n      const ThreadLocalBase* thread_local_instance) {\n    DWORD current_thread = ::GetCurrentThreadId();\n    MutexLock lock(&mutex_);\n    ThreadIdToThreadLocals* const thread_to_thread_locals =\n        GetThreadLocalsMapLocked();\n    ThreadIdToThreadLocals::iterator thread_local_pos =\n        thread_to_thread_locals->find(current_thread);\n    if (thread_local_pos == thread_to_thread_locals->end()) {\n      thread_local_pos = thread_to_thread_locals->insert(\n          std::make_pair(current_thread, ThreadLocalValues())).first;\n      StartWatcherThreadFor(current_thread);\n    }\n    ThreadLocalValues& thread_local_values = thread_local_pos->second;\n    ThreadLocalValues::iterator value_pos =\n        thread_local_values.find(thread_local_instance);\n    if (value_pos == thread_local_values.end()) {\n      value_pos =\n          thread_local_values\n              .insert(std::make_pair(\n                  thread_local_instance,\n                  std::shared_ptr<ThreadLocalValueHolderBase>(\n                      thread_local_instance->NewValueForCurrentThread())))\n              .first;\n    }\n    return value_pos->second.get();\n  }\n\n  static void OnThreadLocalDestroyed(\n      const ThreadLocalBase* thread_local_instance) {\n    std::vector<std::shared_ptr<ThreadLocalValueHolderBase> > value_holders;\n    // Clean up the ThreadLocalValues data structure while holding the lock, but\n    // defer the destruction of the ThreadLocalValueHolderBases.\n    {\n      MutexLock lock(&mutex_);\n      ThreadIdToThreadLocals* const thread_to_thread_locals =\n          GetThreadLocalsMapLocked();\n      for (ThreadIdToThreadLocals::iterator it =\n          thread_to_thread_locals->begin();\n          it != thread_to_thread_locals->end();\n          ++it) {\n        ThreadLocalValues& thread_local_values = it->second;\n        ThreadLocalValues::iterator value_pos =\n            thread_local_values.find(thread_local_instance);\n        if (value_pos != thread_local_values.end()) {\n          value_holders.push_back(value_pos->second);\n          thread_local_values.erase(value_pos);\n          // This 'if' can only be successful at most once, so theoretically we\n          // could break out of the loop here, but we don't bother doing so.\n        }\n      }\n    }\n    // Outside the lock, let the destructor for 'value_holders' deallocate the\n    // ThreadLocalValueHolderBases.\n  }\n\n  static void OnThreadExit(DWORD thread_id) {\n    GTEST_CHECK_(thread_id != 0) << ::GetLastError();\n    std::vector<std::shared_ptr<ThreadLocalValueHolderBase> > value_holders;\n    // Clean up the ThreadIdToThreadLocals data structure while holding the\n    // lock, but defer the destruction of the ThreadLocalValueHolderBases.\n    {\n      MutexLock lock(&mutex_);\n      ThreadIdToThreadLocals* const thread_to_thread_locals =\n          GetThreadLocalsMapLocked();\n      ThreadIdToThreadLocals::iterator thread_local_pos =\n          thread_to_thread_locals->find(thread_id);\n      if (thread_local_pos != thread_to_thread_locals->end()) {\n        ThreadLocalValues& thread_local_values = thread_local_pos->second;\n        for (ThreadLocalValues::iterator value_pos =\n            thread_local_values.begin();\n            value_pos != thread_local_values.end();\n            ++value_pos) {\n          value_holders.push_back(value_pos->second);\n        }\n        thread_to_thread_locals->erase(thread_local_pos);\n      }\n    }\n    // Outside the lock, let the destructor for 'value_holders' deallocate the\n    // ThreadLocalValueHolderBases.\n  }\n\n private:\n  // In a particular thread, maps a ThreadLocal object to its value.\n  typedef std::map<const ThreadLocalBase*,\n                   std::shared_ptr<ThreadLocalValueHolderBase> >\n      ThreadLocalValues;\n  // Stores all ThreadIdToThreadLocals having values in a thread, indexed by\n  // thread's ID.\n  typedef std::map<DWORD, ThreadLocalValues> ThreadIdToThreadLocals;\n\n  // Holds the thread id and thread handle that we pass from\n  // StartWatcherThreadFor to WatcherThreadFunc.\n  typedef std::pair<DWORD, HANDLE> ThreadIdAndHandle;\n\n  static void StartWatcherThreadFor(DWORD thread_id) {\n    // The returned handle will be kept in thread_map and closed by\n    // watcher_thread in WatcherThreadFunc.\n    HANDLE thread = ::OpenThread(SYNCHRONIZE | THREAD_QUERY_INFORMATION,\n                                 FALSE,\n                                 thread_id);\n    GTEST_CHECK_(thread != nullptr);\n    // We need to pass a valid thread ID pointer into CreateThread for it\n    // to work correctly under Win98.\n    DWORD watcher_thread_id;\n    HANDLE watcher_thread = ::CreateThread(\n        nullptr,  // Default security.\n        0,        // Default stack size\n        &ThreadLocalRegistryImpl::WatcherThreadFunc,\n        reinterpret_cast<LPVOID>(new ThreadIdAndHandle(thread_id, thread)),\n        CREATE_SUSPENDED, &watcher_thread_id);\n    GTEST_CHECK_(watcher_thread != nullptr);\n    // Give the watcher thread the same priority as ours to avoid being\n    // blocked by it.\n    ::SetThreadPriority(watcher_thread,\n                        ::GetThreadPriority(::GetCurrentThread()));\n    ::ResumeThread(watcher_thread);\n    ::CloseHandle(watcher_thread);\n  }\n\n  // Monitors exit from a given thread and notifies those\n  // ThreadIdToThreadLocals about thread termination.\n  static DWORD WINAPI WatcherThreadFunc(LPVOID param) {\n    const ThreadIdAndHandle* tah =\n        reinterpret_cast<const ThreadIdAndHandle*>(param);\n    GTEST_CHECK_(\n        ::WaitForSingleObject(tah->second, INFINITE) == WAIT_OBJECT_0);\n    OnThreadExit(tah->first);\n    ::CloseHandle(tah->second);\n    delete tah;\n    return 0;\n  }\n\n  // Returns map of thread local instances.\n  static ThreadIdToThreadLocals* GetThreadLocalsMapLocked() {\n    mutex_.AssertHeld();\n#ifdef _MSC_VER\n    MemoryIsNotDeallocated memory_is_not_deallocated;\n#endif  // _MSC_VER\n    static ThreadIdToThreadLocals* map = new ThreadIdToThreadLocals();\n    return map;\n  }\n\n  // Protects access to GetThreadLocalsMapLocked() and its return value.\n  static Mutex mutex_;\n  // Protects access to GetThreadMapLocked() and its return value.\n  static Mutex thread_map_mutex_;\n};\n\nMutex ThreadLocalRegistryImpl::mutex_(Mutex::kStaticMutex);\nMutex ThreadLocalRegistryImpl::thread_map_mutex_(Mutex::kStaticMutex);\n\nThreadLocalValueHolderBase* ThreadLocalRegistry::GetValueOnCurrentThread(\n      const ThreadLocalBase* thread_local_instance) {\n  return ThreadLocalRegistryImpl::GetValueOnCurrentThread(\n      thread_local_instance);\n}\n\nvoid ThreadLocalRegistry::OnThreadLocalDestroyed(\n      const ThreadLocalBase* thread_local_instance) {\n  ThreadLocalRegistryImpl::OnThreadLocalDestroyed(thread_local_instance);\n}\n\n#endif  // GTEST_IS_THREADSAFE && GTEST_OS_WINDOWS\n\n#if GTEST_USES_POSIX_RE\n\n// Implements RE.  Currently only needed for death tests.\n\nRE::~RE() {\n  if (is_valid_) {\n    // regfree'ing an invalid regex might crash because the content\n    // of the regex is undefined. Since the regex's are essentially\n    // the same, one cannot be valid (or invalid) without the other\n    // being so too.\n    regfree(&partial_regex_);\n    regfree(&full_regex_);\n  }\n  free(const_cast<char*>(pattern_));\n}\n\n// Returns true if and only if regular expression re matches the entire str.\nbool RE::FullMatch(const char* str, const RE& re) {\n  if (!re.is_valid_) return false;\n\n  regmatch_t match;\n  return regexec(&re.full_regex_, str, 1, &match, 0) == 0;\n}\n\n// Returns true if and only if regular expression re matches a substring of\n// str (including str itself).\nbool RE::PartialMatch(const char* str, const RE& re) {\n  if (!re.is_valid_) return false;\n\n  regmatch_t match;\n  return regexec(&re.partial_regex_, str, 1, &match, 0) == 0;\n}\n\n// Initializes an RE from its string representation.\nvoid RE::Init(const char* regex) {\n  pattern_ = posix::StrDup(regex);\n\n  // Reserves enough bytes to hold the regular expression used for a\n  // full match.\n  const size_t full_regex_len = strlen(regex) + 10;\n  char* const full_pattern = new char[full_regex_len];\n\n  snprintf(full_pattern, full_regex_len, \"^(%s)$\", regex);\n  is_valid_ = regcomp(&full_regex_, full_pattern, REG_EXTENDED) == 0;\n  // We want to call regcomp(&partial_regex_, ...) even if the\n  // previous expression returns false.  Otherwise partial_regex_ may\n  // not be properly initialized can may cause trouble when it's\n  // freed.\n  //\n  // Some implementation of POSIX regex (e.g. on at least some\n  // versions of Cygwin) doesn't accept the empty string as a valid\n  // regex.  We change it to an equivalent form \"()\" to be safe.\n  if (is_valid_) {\n    const char* const partial_regex = (*regex == '\\0') ? \"()\" : regex;\n    is_valid_ = regcomp(&partial_regex_, partial_regex, REG_EXTENDED) == 0;\n  }\n  EXPECT_TRUE(is_valid_)\n      << \"Regular expression \\\"\" << regex\n      << \"\\\" is not a valid POSIX Extended regular expression.\";\n\n  delete[] full_pattern;\n}\n\n#elif GTEST_USES_SIMPLE_RE\n\n// Returns true if and only if ch appears anywhere in str (excluding the\n// terminating '\\0' character).\nbool IsInSet(char ch, const char* str) {\n  return ch != '\\0' && strchr(str, ch) != nullptr;\n}\n\n// Returns true if and only if ch belongs to the given classification.\n// Unlike similar functions in <ctype.h>, these aren't affected by the\n// current locale.\nbool IsAsciiDigit(char ch) { return '0' <= ch && ch <= '9'; }\nbool IsAsciiPunct(char ch) {\n  return IsInSet(ch, \"^-!\\\"#$%&'()*+,./:;<=>?@[\\\\]_`{|}~\");\n}\nbool IsRepeat(char ch) { return IsInSet(ch, \"?*+\"); }\nbool IsAsciiWhiteSpace(char ch) { return IsInSet(ch, \" \\f\\n\\r\\t\\v\"); }\nbool IsAsciiWordChar(char ch) {\n  return ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') ||\n      ('0' <= ch && ch <= '9') || ch == '_';\n}\n\n// Returns true if and only if \"\\\\c\" is a supported escape sequence.\nbool IsValidEscape(char c) {\n  return (IsAsciiPunct(c) || IsInSet(c, \"dDfnrsStvwW\"));\n}\n\n// Returns true if and only if the given atom (specified by escaped and\n// pattern) matches ch.  The result is undefined if the atom is invalid.\nbool AtomMatchesChar(bool escaped, char pattern_char, char ch) {\n  if (escaped) {  // \"\\\\p\" where p is pattern_char.\n    switch (pattern_char) {\n      case 'd': return IsAsciiDigit(ch);\n      case 'D': return !IsAsciiDigit(ch);\n      case 'f': return ch == '\\f';\n      case 'n': return ch == '\\n';\n      case 'r': return ch == '\\r';\n      case 's': return IsAsciiWhiteSpace(ch);\n      case 'S': return !IsAsciiWhiteSpace(ch);\n      case 't': return ch == '\\t';\n      case 'v': return ch == '\\v';\n      case 'w': return IsAsciiWordChar(ch);\n      case 'W': return !IsAsciiWordChar(ch);\n    }\n    return IsAsciiPunct(pattern_char) && pattern_char == ch;\n  }\n\n  return (pattern_char == '.' && ch != '\\n') || pattern_char == ch;\n}\n\n// Helper function used by ValidateRegex() to format error messages.\nstatic std::string FormatRegexSyntaxError(const char* regex, int index) {\n  return (Message() << \"Syntax error at index \" << index\n          << \" in simple regular expression \\\"\" << regex << \"\\\": \").GetString();\n}\n\n// Generates non-fatal failures and returns false if regex is invalid;\n// otherwise returns true.\nbool ValidateRegex(const char* regex) {\n  if (regex == nullptr) {\n    ADD_FAILURE() << \"NULL is not a valid simple regular expression.\";\n    return false;\n  }\n\n  bool is_valid = true;\n\n  // True if and only if ?, *, or + can follow the previous atom.\n  bool prev_repeatable = false;\n  for (int i = 0; regex[i]; i++) {\n    if (regex[i] == '\\\\') {  // An escape sequence\n      i++;\n      if (regex[i] == '\\0') {\n        ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)\n                      << \"'\\\\' cannot appear at the end.\";\n        return false;\n      }\n\n      if (!IsValidEscape(regex[i])) {\n        ADD_FAILURE() << FormatRegexSyntaxError(regex, i - 1)\n                      << \"invalid escape sequence \\\"\\\\\" << regex[i] << \"\\\".\";\n        is_valid = false;\n      }\n      prev_repeatable = true;\n    } else {  // Not an escape sequence.\n      const char ch = regex[i];\n\n      if (ch == '^' && i > 0) {\n        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n                      << \"'^' can only appear at the beginning.\";\n        is_valid = false;\n      } else if (ch == '$' && regex[i + 1] != '\\0') {\n        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n                      << \"'$' can only appear at the end.\";\n        is_valid = false;\n      } else if (IsInSet(ch, \"()[]{}|\")) {\n        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n                      << \"'\" << ch << \"' is unsupported.\";\n        is_valid = false;\n      } else if (IsRepeat(ch) && !prev_repeatable) {\n        ADD_FAILURE() << FormatRegexSyntaxError(regex, i)\n                      << \"'\" << ch << \"' can only follow a repeatable token.\";\n        is_valid = false;\n      }\n\n      prev_repeatable = !IsInSet(ch, \"^$?*+\");\n    }\n  }\n\n  return is_valid;\n}\n\n// Matches a repeated regex atom followed by a valid simple regular\n// expression.  The regex atom is defined as c if escaped is false,\n// or \\c otherwise.  repeat is the repetition meta character (?, *,\n// or +).  The behavior is undefined if str contains too many\n// characters to be indexable by size_t, in which case the test will\n// probably time out anyway.  We are fine with this limitation as\n// std::string has it too.\nbool MatchRepetitionAndRegexAtHead(\n    bool escaped, char c, char repeat, const char* regex,\n    const char* str) {\n  const size_t min_count = (repeat == '+') ? 1 : 0;\n  const size_t max_count = (repeat == '?') ? 1 :\n      static_cast<size_t>(-1) - 1;\n  // We cannot call numeric_limits::max() as it conflicts with the\n  // max() macro on Windows.\n\n  for (size_t i = 0; i <= max_count; ++i) {\n    // We know that the atom matches each of the first i characters in str.\n    if (i >= min_count && MatchRegexAtHead(regex, str + i)) {\n      // We have enough matches at the head, and the tail matches too.\n      // Since we only care about *whether* the pattern matches str\n      // (as opposed to *how* it matches), there is no need to find a\n      // greedy match.\n      return true;\n    }\n    if (str[i] == '\\0' || !AtomMatchesChar(escaped, c, str[i]))\n      return false;\n  }\n  return false;\n}\n\n// Returns true if and only if regex matches a prefix of str. regex must\n// be a valid simple regular expression and not start with \"^\", or the\n// result is undefined.\nbool MatchRegexAtHead(const char* regex, const char* str) {\n  if (*regex == '\\0')  // An empty regex matches a prefix of anything.\n    return true;\n\n  // \"$\" only matches the end of a string.  Note that regex being\n  // valid guarantees that there's nothing after \"$\" in it.\n  if (*regex == '$')\n    return *str == '\\0';\n\n  // Is the first thing in regex an escape sequence?\n  const bool escaped = *regex == '\\\\';\n  if (escaped)\n    ++regex;\n  if (IsRepeat(regex[1])) {\n    // MatchRepetitionAndRegexAtHead() calls MatchRegexAtHead(), so\n    // here's an indirect recursion.  It terminates as the regex gets\n    // shorter in each recursion.\n    return MatchRepetitionAndRegexAtHead(\n        escaped, regex[0], regex[1], regex + 2, str);\n  } else {\n    // regex isn't empty, isn't \"$\", and doesn't start with a\n    // repetition.  We match the first atom of regex with the first\n    // character of str and recurse.\n    return (*str != '\\0') && AtomMatchesChar(escaped, *regex, *str) &&\n        MatchRegexAtHead(regex + 1, str + 1);\n  }\n}\n\n// Returns true if and only if regex matches any substring of str.  regex must\n// be a valid simple regular expression, or the result is undefined.\n//\n// The algorithm is recursive, but the recursion depth doesn't exceed\n// the regex length, so we won't need to worry about running out of\n// stack space normally.  In rare cases the time complexity can be\n// exponential with respect to the regex length + the string length,\n// but usually it's must faster (often close to linear).\nbool MatchRegexAnywhere(const char* regex, const char* str) {\n  if (regex == nullptr || str == nullptr) return false;\n\n  if (*regex == '^')\n    return MatchRegexAtHead(regex + 1, str);\n\n  // A successful match can be anywhere in str.\n  do {\n    if (MatchRegexAtHead(regex, str))\n      return true;\n  } while (*str++ != '\\0');\n  return false;\n}\n\n// Implements the RE class.\n\nRE::~RE() {\n  free(const_cast<char*>(pattern_));\n  free(const_cast<char*>(full_pattern_));\n}\n\n// Returns true if and only if regular expression re matches the entire str.\nbool RE::FullMatch(const char* str, const RE& re) {\n  return re.is_valid_ && MatchRegexAnywhere(re.full_pattern_, str);\n}\n\n// Returns true if and only if regular expression re matches a substring of\n// str (including str itself).\nbool RE::PartialMatch(const char* str, const RE& re) {\n  return re.is_valid_ && MatchRegexAnywhere(re.pattern_, str);\n}\n\n// Initializes an RE from its string representation.\nvoid RE::Init(const char* regex) {\n  pattern_ = full_pattern_ = nullptr;\n  if (regex != nullptr) {\n    pattern_ = posix::StrDup(regex);\n  }\n\n  is_valid_ = ValidateRegex(regex);\n  if (!is_valid_) {\n    // No need to calculate the full pattern when the regex is invalid.\n    return;\n  }\n\n  const size_t len = strlen(regex);\n  // Reserves enough bytes to hold the regular expression used for a\n  // full match: we need space to prepend a '^', append a '$', and\n  // terminate the string with '\\0'.\n  char* buffer = static_cast<char*>(malloc(len + 3));\n  full_pattern_ = buffer;\n\n  if (*regex != '^')\n    *buffer++ = '^';  // Makes sure full_pattern_ starts with '^'.\n\n  // We don't use snprintf or strncpy, as they trigger a warning when\n  // compiled with VC++ 8.0.\n  memcpy(buffer, regex, len);\n  buffer += len;\n\n  if (len == 0 || regex[len - 1] != '$')\n    *buffer++ = '$';  // Makes sure full_pattern_ ends with '$'.\n\n  *buffer = '\\0';\n}\n\n#endif  // GTEST_USES_POSIX_RE\n\nconst char kUnknownFile[] = \"unknown file\";\n\n// Formats a source file path and a line number as they would appear\n// in an error message from the compiler used to compile this code.\nGTEST_API_ ::std::string FormatFileLocation(const char* file, int line) {\n  const std::string file_name(file == nullptr ? kUnknownFile : file);\n\n  if (line < 0) {\n    return file_name + \":\";\n  }\n#ifdef _MSC_VER\n  return file_name + \"(\" + StreamableToString(line) + \"):\";\n#else\n  return file_name + \":\" + StreamableToString(line) + \":\";\n#endif  // _MSC_VER\n}\n\n// Formats a file location for compiler-independent XML output.\n// Although this function is not platform dependent, we put it next to\n// FormatFileLocation in order to contrast the two functions.\n// Note that FormatCompilerIndependentFileLocation() does NOT append colon\n// to the file location it produces, unlike FormatFileLocation().\nGTEST_API_ ::std::string FormatCompilerIndependentFileLocation(\n    const char* file, int line) {\n  const std::string file_name(file == nullptr ? kUnknownFile : file);\n\n  if (line < 0)\n    return file_name;\n  else\n    return file_name + \":\" + StreamableToString(line);\n}\n\nGTestLog::GTestLog(GTestLogSeverity severity, const char* file, int line)\n    : severity_(severity) {\n  const char* const marker =\n      severity == GTEST_INFO ?    \"[  INFO ]\" :\n      severity == GTEST_WARNING ? \"[WARNING]\" :\n      severity == GTEST_ERROR ?   \"[ ERROR ]\" : \"[ FATAL ]\";\n  GetStream() << ::std::endl << marker << \" \"\n              << FormatFileLocation(file, line).c_str() << \": \";\n}\n\n// Flushes the buffers and, if severity is GTEST_FATAL, aborts the program.\nGTestLog::~GTestLog() {\n  GetStream() << ::std::endl;\n  if (severity_ == GTEST_FATAL) {\n    fflush(stderr);\n    posix::Abort();\n  }\n}\n\n// Disable Microsoft deprecation warnings for POSIX functions called from\n// this class (creat, dup, dup2, and close)\nGTEST_DISABLE_MSC_DEPRECATED_PUSH_()\n\n#if GTEST_HAS_STREAM_REDIRECTION\n\n// Object that captures an output stream (stdout/stderr).\nclass CapturedStream {\n public:\n  // The ctor redirects the stream to a temporary file.\n  explicit CapturedStream(int fd) : fd_(fd), uncaptured_fd_(dup(fd)) {\n# if GTEST_OS_WINDOWS\n    char temp_dir_path[MAX_PATH + 1] = { '\\0' };  // NOLINT\n    char temp_file_path[MAX_PATH + 1] = { '\\0' };  // NOLINT\n\n    ::GetTempPathA(sizeof(temp_dir_path), temp_dir_path);\n    const UINT success = ::GetTempFileNameA(temp_dir_path,\n                                            \"gtest_redir\",\n                                            0,  // Generate unique file name.\n                                            temp_file_path);\n    GTEST_CHECK_(success != 0)\n        << \"Unable to create a temporary file in \" << temp_dir_path;\n    const int captured_fd = creat(temp_file_path, _S_IREAD | _S_IWRITE);\n    GTEST_CHECK_(captured_fd != -1) << \"Unable to open temporary file \"\n                                    << temp_file_path;\n    filename_ = temp_file_path;\n# else\n    // There's no guarantee that a test has write access to the current\n    // directory, so we create the temporary file in the /tmp directory\n    // instead. We use /tmp on most systems, and /sdcard on Android.\n    // That's because Android doesn't have /tmp.\n#  if GTEST_OS_LINUX_ANDROID\n    // Note: Android applications are expected to call the framework's\n    // Context.getExternalStorageDirectory() method through JNI to get\n    // the location of the world-writable SD Card directory. However,\n    // this requires a Context handle, which cannot be retrieved\n    // globally from native code. Doing so also precludes running the\n    // code as part of a regular standalone executable, which doesn't\n    // run in a Dalvik process (e.g. when running it through 'adb shell').\n    //\n    // The location /data/local/tmp is directly accessible from native code.\n    // '/sdcard' and other variants cannot be relied on, as they are not\n    // guaranteed to be mounted, or may have a delay in mounting.\n    char name_template[] = \"/data/local/tmp/gtest_captured_stream.XXXXXX\";\n#  else\n    char name_template[] = \"/tmp/captured_stream.XXXXXX\";\n#  endif  // GTEST_OS_LINUX_ANDROID\n    const int captured_fd = mkstemp(name_template);\n    if (captured_fd == -1) {\n      GTEST_LOG_(WARNING)\n          << \"Failed to create tmp file \" << name_template\n          << \" for test; does the test have access to the /tmp directory?\";\n    }\n    filename_ = name_template;\n# endif  // GTEST_OS_WINDOWS\n    fflush(nullptr);\n    dup2(captured_fd, fd_);\n    close(captured_fd);\n  }\n\n  ~CapturedStream() {\n    remove(filename_.c_str());\n  }\n\n  std::string GetCapturedString() {\n    if (uncaptured_fd_ != -1) {\n      // Restores the original stream.\n      fflush(nullptr);\n      dup2(uncaptured_fd_, fd_);\n      close(uncaptured_fd_);\n      uncaptured_fd_ = -1;\n    }\n\n    FILE* const file = posix::FOpen(filename_.c_str(), \"r\");\n    if (file == nullptr) {\n      GTEST_LOG_(FATAL) << \"Failed to open tmp file \" << filename_\n                        << \" for capturing stream.\";\n    }\n    const std::string content = ReadEntireFile(file);\n    posix::FClose(file);\n    return content;\n  }\n\n private:\n  const int fd_;  // A stream to capture.\n  int uncaptured_fd_;\n  // Name of the temporary file holding the stderr output.\n  ::std::string filename_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(CapturedStream);\n};\n\nGTEST_DISABLE_MSC_DEPRECATED_POP_()\n\nstatic CapturedStream* g_captured_stderr = nullptr;\nstatic CapturedStream* g_captured_stdout = nullptr;\n\n// Starts capturing an output stream (stdout/stderr).\nstatic void CaptureStream(int fd, const char* stream_name,\n                          CapturedStream** stream) {\n  if (*stream != nullptr) {\n    GTEST_LOG_(FATAL) << \"Only one \" << stream_name\n                      << \" capturer can exist at a time.\";\n  }\n  *stream = new CapturedStream(fd);\n}\n\n// Stops capturing the output stream and returns the captured string.\nstatic std::string GetCapturedStream(CapturedStream** captured_stream) {\n  const std::string content = (*captured_stream)->GetCapturedString();\n\n  delete *captured_stream;\n  *captured_stream = nullptr;\n\n  return content;\n}\n\n// Starts capturing stdout.\nvoid CaptureStdout() {\n  CaptureStream(kStdOutFileno, \"stdout\", &g_captured_stdout);\n}\n\n// Starts capturing stderr.\nvoid CaptureStderr() {\n  CaptureStream(kStdErrFileno, \"stderr\", &g_captured_stderr);\n}\n\n// Stops capturing stdout and returns the captured string.\nstd::string GetCapturedStdout() {\n  return GetCapturedStream(&g_captured_stdout);\n}\n\n// Stops capturing stderr and returns the captured string.\nstd::string GetCapturedStderr() {\n  return GetCapturedStream(&g_captured_stderr);\n}\n\n#endif  // GTEST_HAS_STREAM_REDIRECTION\n\n\n\n\n\nsize_t GetFileSize(FILE* file) {\n  fseek(file, 0, SEEK_END);\n  return static_cast<size_t>(ftell(file));\n}\n\nstd::string ReadEntireFile(FILE* file) {\n  const size_t file_size = GetFileSize(file);\n  char* const buffer = new char[file_size];\n\n  size_t bytes_last_read = 0;  // # of bytes read in the last fread()\n  size_t bytes_read = 0;       // # of bytes read so far\n\n  fseek(file, 0, SEEK_SET);\n\n  // Keeps reading the file until we cannot read further or the\n  // pre-determined file size is reached.\n  do {\n    bytes_last_read = fread(buffer+bytes_read, 1, file_size-bytes_read, file);\n    bytes_read += bytes_last_read;\n  } while (bytes_last_read > 0 && bytes_read < file_size);\n\n  const std::string content(buffer, bytes_read);\n  delete[] buffer;\n\n  return content;\n}\n\n#if GTEST_HAS_DEATH_TEST\nstatic const std::vector<std::string>* g_injected_test_argvs =\n    nullptr;  // Owned.\n\nstd::vector<std::string> GetInjectableArgvs() {\n  if (g_injected_test_argvs != nullptr) {\n    return *g_injected_test_argvs;\n  }\n  return GetArgvs();\n}\n\nvoid SetInjectableArgvs(const std::vector<std::string>* new_argvs) {\n  if (g_injected_test_argvs != new_argvs) delete g_injected_test_argvs;\n  g_injected_test_argvs = new_argvs;\n}\n\nvoid SetInjectableArgvs(const std::vector<std::string>& new_argvs) {\n  SetInjectableArgvs(\n      new std::vector<std::string>(new_argvs.begin(), new_argvs.end()));\n}\n\nvoid ClearInjectableArgvs() {\n  delete g_injected_test_argvs;\n  g_injected_test_argvs = nullptr;\n}\n#endif  // GTEST_HAS_DEATH_TEST\n\n#if GTEST_OS_WINDOWS_MOBILE\nnamespace posix {\nvoid Abort() {\n  DebugBreak();\n  TerminateProcess(GetCurrentProcess(), 1);\n}\n}  // namespace posix\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\n// Returns the name of the environment variable corresponding to the\n// given flag.  For example, FlagToEnvVar(\"foo\") will return\n// \"GTEST_FOO\" in the open-source version.\nstatic std::string FlagToEnvVar(const char* flag) {\n  const std::string full_flag =\n      (Message() << GTEST_FLAG_PREFIX_ << flag).GetString();\n\n  Message env_var;\n  for (size_t i = 0; i != full_flag.length(); i++) {\n    env_var << ToUpper(full_flag.c_str()[i]);\n  }\n\n  return env_var.GetString();\n}\n\n// Parses 'str' for a 32-bit signed integer.  If successful, writes\n// the result to *value and returns true; otherwise leaves *value\n// unchanged and returns false.\nbool ParseInt32(const Message& src_text, const char* str, Int32* value) {\n  // Parses the environment variable as a decimal integer.\n  char* end = nullptr;\n  const long long_value = strtol(str, &end, 10);  // NOLINT\n\n  // Has strtol() consumed all characters in the string?\n  if (*end != '\\0') {\n    // No - an invalid character was encountered.\n    Message msg;\n    msg << \"WARNING: \" << src_text\n        << \" is expected to be a 32-bit integer, but actually\"\n        << \" has value \\\"\" << str << \"\\\".\\n\";\n    printf(\"%s\", msg.GetString().c_str());\n    fflush(stdout);\n    return false;\n  }\n\n  // Is the parsed value in the range of an Int32?\n  const Int32 result = static_cast<Int32>(long_value);\n  if (long_value == LONG_MAX || long_value == LONG_MIN ||\n      // The parsed value overflows as a long.  (strtol() returns\n      // LONG_MAX or LONG_MIN when the input overflows.)\n      result != long_value\n      // The parsed value overflows as an Int32.\n      ) {\n    Message msg;\n    msg << \"WARNING: \" << src_text\n        << \" is expected to be a 32-bit integer, but actually\"\n        << \" has value \" << str << \", which overflows.\\n\";\n    printf(\"%s\", msg.GetString().c_str());\n    fflush(stdout);\n    return false;\n  }\n\n  *value = result;\n  return true;\n}\n\n// Reads and returns the Boolean environment variable corresponding to\n// the given flag; if it's not set, returns default_value.\n//\n// The value is considered true if and only if it's not \"0\".\nbool BoolFromGTestEnv(const char* flag, bool default_value) {\n#if defined(GTEST_GET_BOOL_FROM_ENV_)\n  return GTEST_GET_BOOL_FROM_ENV_(flag, default_value);\n#else\n  const std::string env_var = FlagToEnvVar(flag);\n  const char* const string_value = posix::GetEnv(env_var.c_str());\n  return string_value == nullptr ? default_value\n                                 : strcmp(string_value, \"0\") != 0;\n#endif  // defined(GTEST_GET_BOOL_FROM_ENV_)\n}\n\n// Reads and returns a 32-bit integer stored in the environment\n// variable corresponding to the given flag; if it isn't set or\n// doesn't represent a valid 32-bit integer, returns default_value.\nInt32 Int32FromGTestEnv(const char* flag, Int32 default_value) {\n#if defined(GTEST_GET_INT32_FROM_ENV_)\n  return GTEST_GET_INT32_FROM_ENV_(flag, default_value);\n#else\n  const std::string env_var = FlagToEnvVar(flag);\n  const char* const string_value = posix::GetEnv(env_var.c_str());\n  if (string_value == nullptr) {\n    // The environment variable is not set.\n    return default_value;\n  }\n\n  Int32 result = default_value;\n  if (!ParseInt32(Message() << \"Environment variable \" << env_var,\n                  string_value, &result)) {\n    printf(\"The default value %s is used.\\n\",\n           (Message() << default_value).GetString().c_str());\n    fflush(stdout);\n    return default_value;\n  }\n\n  return result;\n#endif  // defined(GTEST_GET_INT32_FROM_ENV_)\n}\n\n// As a special case for the 'output' flag, if GTEST_OUTPUT is not\n// set, we look for XML_OUTPUT_FILE, which is set by the Bazel build\n// system.  The value of XML_OUTPUT_FILE is a filename without the\n// \"xml:\" prefix of GTEST_OUTPUT.\n// Note that this is meant to be called at the call site so it does\n// not check that the flag is 'output'\n// In essence this checks an env variable called XML_OUTPUT_FILE\n// and if it is set we prepend \"xml:\" to its value, if it not set we return \"\"\nstd::string OutputFlagAlsoCheckEnvVar(){\n  std::string default_value_for_output_flag = \"\";\n  const char* xml_output_file_env = posix::GetEnv(\"XML_OUTPUT_FILE\");\n  if (nullptr != xml_output_file_env) {\n    default_value_for_output_flag = std::string(\"xml:\") + xml_output_file_env;\n  }\n  return default_value_for_output_flag;\n}\n\n// Reads and returns the string environment variable corresponding to\n// the given flag; if it's not set, returns default_value.\nconst char* StringFromGTestEnv(const char* flag, const char* default_value) {\n#if defined(GTEST_GET_STRING_FROM_ENV_)\n  return GTEST_GET_STRING_FROM_ENV_(flag, default_value);\n#else\n  const std::string env_var = FlagToEnvVar(flag);\n  const char* const value = posix::GetEnv(env_var.c_str());\n  return value == nullptr ? default_value : value;\n#endif  // defined(GTEST_GET_STRING_FROM_ENV_)\n}\n\n}  // namespace internal\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest-printers.cc",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Google Test - The Google C++ Testing and Mocking Framework\n//\n// This file implements a universal value printer that can print a\n// value of any type T:\n//\n//   void ::testing::internal::UniversalPrinter<T>::Print(value, ostream_ptr);\n//\n// It uses the << operator when possible, and prints the bytes in the\n// object otherwise.  A user can override its behavior for a class\n// type Foo by defining either operator<<(::std::ostream&, const Foo&)\n// or void PrintTo(const Foo&, ::std::ostream*) in the namespace that\n// defines Foo.\n\n#include \"gtest/gtest-printers.h\"\n#include <stdio.h>\n#include <cctype>\n#include <cwchar>\n#include <ostream>  // NOLINT\n#include <string>\n#include \"gtest/internal/gtest-port.h\"\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\n\nnamespace {\n\nusing ::std::ostream;\n\n// Prints a segment of bytes in the given object.\nGTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_\nGTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_\nGTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_\nGTEST_ATTRIBUTE_NO_SANITIZE_THREAD_\nvoid PrintByteSegmentInObjectTo(const unsigned char* obj_bytes, size_t start,\n                                size_t count, ostream* os) {\n  char text[5] = \"\";\n  for (size_t i = 0; i != count; i++) {\n    const size_t j = start + i;\n    if (i != 0) {\n      // Organizes the bytes into groups of 2 for easy parsing by\n      // human.\n      if ((j % 2) == 0)\n        *os << ' ';\n      else\n        *os << '-';\n    }\n    GTEST_SNPRINTF_(text, sizeof(text), \"%02X\", obj_bytes[j]);\n    *os << text;\n  }\n}\n\n// Prints the bytes in the given value to the given ostream.\nvoid PrintBytesInObjectToImpl(const unsigned char* obj_bytes, size_t count,\n                              ostream* os) {\n  // Tells the user how big the object is.\n  *os << count << \"-byte object <\";\n\n  const size_t kThreshold = 132;\n  const size_t kChunkSize = 64;\n  // If the object size is bigger than kThreshold, we'll have to omit\n  // some details by printing only the first and the last kChunkSize\n  // bytes.\n  if (count < kThreshold) {\n    PrintByteSegmentInObjectTo(obj_bytes, 0, count, os);\n  } else {\n    PrintByteSegmentInObjectTo(obj_bytes, 0, kChunkSize, os);\n    *os << \" ... \";\n    // Rounds up to 2-byte boundary.\n    const size_t resume_pos = (count - kChunkSize + 1)/2*2;\n    PrintByteSegmentInObjectTo(obj_bytes, resume_pos, count - resume_pos, os);\n  }\n  *os << \">\";\n}\n\n}  // namespace\n\nnamespace internal2 {\n\n// Delegates to PrintBytesInObjectToImpl() to print the bytes in the\n// given object.  The delegation simplifies the implementation, which\n// uses the << operator and thus is easier done outside of the\n// ::testing::internal namespace, which contains a << operator that\n// sometimes conflicts with the one in STL.\nvoid PrintBytesInObjectTo(const unsigned char* obj_bytes, size_t count,\n                          ostream* os) {\n  PrintBytesInObjectToImpl(obj_bytes, count, os);\n}\n\n}  // namespace internal2\n\nnamespace internal {\n\n// Depending on the value of a char (or wchar_t), we print it in one\n// of three formats:\n//   - as is if it's a printable ASCII (e.g. 'a', '2', ' '),\n//   - as a hexadecimal escape sequence (e.g. '\\x7F'), or\n//   - as a special escape sequence (e.g. '\\r', '\\n').\nenum CharFormat {\n  kAsIs,\n  kHexEscape,\n  kSpecialEscape\n};\n\n// Returns true if c is a printable ASCII character.  We test the\n// value of c directly instead of calling isprint(), which is buggy on\n// Windows Mobile.\ninline bool IsPrintableAscii(wchar_t c) {\n  return 0x20 <= c && c <= 0x7E;\n}\n\n// Prints a wide or narrow char c as a character literal without the\n// quotes, escaping it when necessary; returns how c was formatted.\n// The template argument UnsignedChar is the unsigned version of Char,\n// which is the type of c.\ntemplate <typename UnsignedChar, typename Char>\nstatic CharFormat PrintAsCharLiteralTo(Char c, ostream* os) {\n  wchar_t w_c = static_cast<wchar_t>(c);\n  switch (w_c) {\n    case L'\\0':\n      *os << \"\\\\0\";\n      break;\n    case L'\\'':\n      *os << \"\\\\'\";\n      break;\n    case L'\\\\':\n      *os << \"\\\\\\\\\";\n      break;\n    case L'\\a':\n      *os << \"\\\\a\";\n      break;\n    case L'\\b':\n      *os << \"\\\\b\";\n      break;\n    case L'\\f':\n      *os << \"\\\\f\";\n      break;\n    case L'\\n':\n      *os << \"\\\\n\";\n      break;\n    case L'\\r':\n      *os << \"\\\\r\";\n      break;\n    case L'\\t':\n      *os << \"\\\\t\";\n      break;\n    case L'\\v':\n      *os << \"\\\\v\";\n      break;\n    default:\n      if (IsPrintableAscii(w_c)) {\n        *os << static_cast<char>(c);\n        return kAsIs;\n      } else {\n        ostream::fmtflags flags = os->flags();\n        *os << \"\\\\x\" << std::hex << std::uppercase\n            << static_cast<int>(static_cast<UnsignedChar>(c));\n        os->flags(flags);\n        return kHexEscape;\n      }\n  }\n  return kSpecialEscape;\n}\n\n// Prints a wchar_t c as if it's part of a string literal, escaping it when\n// necessary; returns how c was formatted.\nstatic CharFormat PrintAsStringLiteralTo(wchar_t c, ostream* os) {\n  switch (c) {\n    case L'\\'':\n      *os << \"'\";\n      return kAsIs;\n    case L'\"':\n      *os << \"\\\\\\\"\";\n      return kSpecialEscape;\n    default:\n      return PrintAsCharLiteralTo<wchar_t>(c, os);\n  }\n}\n\n// Prints a char c as if it's part of a string literal, escaping it when\n// necessary; returns how c was formatted.\nstatic CharFormat PrintAsStringLiteralTo(char c, ostream* os) {\n  return PrintAsStringLiteralTo(\n      static_cast<wchar_t>(static_cast<unsigned char>(c)), os);\n}\n\n// Prints a wide or narrow character c and its code.  '\\0' is printed\n// as \"'\\\\0'\", other unprintable characters are also properly escaped\n// using the standard C++ escape sequence.  The template argument\n// UnsignedChar is the unsigned version of Char, which is the type of c.\ntemplate <typename UnsignedChar, typename Char>\nvoid PrintCharAndCodeTo(Char c, ostream* os) {\n  // First, print c as a literal in the most readable form we can find.\n  *os << ((sizeof(c) > 1) ? \"L'\" : \"'\");\n  const CharFormat format = PrintAsCharLiteralTo<UnsignedChar>(c, os);\n  *os << \"'\";\n\n  // To aid user debugging, we also print c's code in decimal, unless\n  // it's 0 (in which case c was printed as '\\\\0', making the code\n  // obvious).\n  if (c == 0)\n    return;\n  *os << \" (\" << static_cast<int>(c);\n\n  // For more convenience, we print c's code again in hexadecimal,\n  // unless c was already printed in the form '\\x##' or the code is in\n  // [1, 9].\n  if (format == kHexEscape || (1 <= c && c <= 9)) {\n    // Do nothing.\n  } else {\n    *os << \", 0x\" << String::FormatHexInt(static_cast<int>(c));\n  }\n  *os << \")\";\n}\n\nvoid PrintTo(unsigned char c, ::std::ostream* os) {\n  PrintCharAndCodeTo<unsigned char>(c, os);\n}\nvoid PrintTo(signed char c, ::std::ostream* os) {\n  PrintCharAndCodeTo<unsigned char>(c, os);\n}\n\n// Prints a wchar_t as a symbol if it is printable or as its internal\n// code otherwise and also as its code.  L'\\0' is printed as \"L'\\\\0'\".\nvoid PrintTo(wchar_t wc, ostream* os) {\n  PrintCharAndCodeTo<wchar_t>(wc, os);\n}\n\n// Prints the given array of characters to the ostream.  CharType must be either\n// char or wchar_t.\n// The array starts at begin, the length is len, it may include '\\0' characters\n// and may not be NUL-terminated.\ntemplate <typename CharType>\nGTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_\nGTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_\nGTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_\nGTEST_ATTRIBUTE_NO_SANITIZE_THREAD_\nstatic CharFormat PrintCharsAsStringTo(\n    const CharType* begin, size_t len, ostream* os) {\n  const char* const kQuoteBegin = sizeof(CharType) == 1 ? \"\\\"\" : \"L\\\"\";\n  *os << kQuoteBegin;\n  bool is_previous_hex = false;\n  CharFormat print_format = kAsIs;\n  for (size_t index = 0; index < len; ++index) {\n    const CharType cur = begin[index];\n    if (is_previous_hex && IsXDigit(cur)) {\n      // Previous character is of '\\x..' form and this character can be\n      // interpreted as another hexadecimal digit in its number. Break string to\n      // disambiguate.\n      *os << \"\\\" \" << kQuoteBegin;\n    }\n    is_previous_hex = PrintAsStringLiteralTo(cur, os) == kHexEscape;\n    // Remember if any characters required hex escaping.\n    if (is_previous_hex) {\n      print_format = kHexEscape;\n    }\n  }\n  *os << \"\\\"\";\n  return print_format;\n}\n\n// Prints a (const) char/wchar_t array of 'len' elements, starting at address\n// 'begin'.  CharType must be either char or wchar_t.\ntemplate <typename CharType>\nGTEST_ATTRIBUTE_NO_SANITIZE_MEMORY_\nGTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_\nGTEST_ATTRIBUTE_NO_SANITIZE_HWADDRESS_\nGTEST_ATTRIBUTE_NO_SANITIZE_THREAD_\nstatic void UniversalPrintCharArray(\n    const CharType* begin, size_t len, ostream* os) {\n  // The code\n  //   const char kFoo[] = \"foo\";\n  // generates an array of 4, not 3, elements, with the last one being '\\0'.\n  //\n  // Therefore when printing a char array, we don't print the last element if\n  // it's '\\0', such that the output matches the string literal as it's\n  // written in the source code.\n  if (len > 0 && begin[len - 1] == '\\0') {\n    PrintCharsAsStringTo(begin, len - 1, os);\n    return;\n  }\n\n  // If, however, the last element in the array is not '\\0', e.g.\n  //    const char kFoo[] = { 'f', 'o', 'o' };\n  // we must print the entire array.  We also print a message to indicate\n  // that the array is not NUL-terminated.\n  PrintCharsAsStringTo(begin, len, os);\n  *os << \" (no terminating NUL)\";\n}\n\n// Prints a (const) char array of 'len' elements, starting at address 'begin'.\nvoid UniversalPrintArray(const char* begin, size_t len, ostream* os) {\n  UniversalPrintCharArray(begin, len, os);\n}\n\n// Prints a (const) wchar_t array of 'len' elements, starting at address\n// 'begin'.\nvoid UniversalPrintArray(const wchar_t* begin, size_t len, ostream* os) {\n  UniversalPrintCharArray(begin, len, os);\n}\n\n// Prints the given C string to the ostream.\nvoid PrintTo(const char* s, ostream* os) {\n  if (s == nullptr) {\n    *os << \"NULL\";\n  } else {\n    *os << ImplicitCast_<const void*>(s) << \" pointing to \";\n    PrintCharsAsStringTo(s, strlen(s), os);\n  }\n}\n\n// MSVC compiler can be configured to define whar_t as a typedef\n// of unsigned short. Defining an overload for const wchar_t* in that case\n// would cause pointers to unsigned shorts be printed as wide strings,\n// possibly accessing more memory than intended and causing invalid\n// memory accesses. MSVC defines _NATIVE_WCHAR_T_DEFINED symbol when\n// wchar_t is implemented as a native type.\n#if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED)\n// Prints the given wide C string to the ostream.\nvoid PrintTo(const wchar_t* s, ostream* os) {\n  if (s == nullptr) {\n    *os << \"NULL\";\n  } else {\n    *os << ImplicitCast_<const void*>(s) << \" pointing to \";\n    PrintCharsAsStringTo(s, wcslen(s), os);\n  }\n}\n#endif  // wchar_t is native\n\nnamespace {\n\nbool ContainsUnprintableControlCodes(const char* str, size_t length) {\n  const unsigned char *s = reinterpret_cast<const unsigned char *>(str);\n\n  for (size_t i = 0; i < length; i++) {\n    unsigned char ch = *s++;\n    if (std::iscntrl(ch)) {\n        switch (ch) {\n        case '\\t':\n        case '\\n':\n        case '\\r':\n          break;\n        default:\n          return true;\n        }\n      }\n  }\n  return false;\n}\n\nbool IsUTF8TrailByte(unsigned char t) { return 0x80 <= t && t<= 0xbf; }\n\nbool IsValidUTF8(const char* str, size_t length) {\n  const unsigned char *s = reinterpret_cast<const unsigned char *>(str);\n\n  for (size_t i = 0; i < length;) {\n    unsigned char lead = s[i++];\n\n    if (lead <= 0x7f) {\n      continue;  // single-byte character (ASCII) 0..7F\n    }\n    if (lead < 0xc2) {\n      return false;  // trail byte or non-shortest form\n    } else if (lead <= 0xdf && (i + 1) <= length && IsUTF8TrailByte(s[i])) {\n      ++i;  // 2-byte character\n    } else if (0xe0 <= lead && lead <= 0xef && (i + 2) <= length &&\n               IsUTF8TrailByte(s[i]) &&\n               IsUTF8TrailByte(s[i + 1]) &&\n               // check for non-shortest form and surrogate\n               (lead != 0xe0 || s[i] >= 0xa0) &&\n               (lead != 0xed || s[i] < 0xa0)) {\n      i += 2;  // 3-byte character\n    } else if (0xf0 <= lead && lead <= 0xf4 && (i + 3) <= length &&\n               IsUTF8TrailByte(s[i]) &&\n               IsUTF8TrailByte(s[i + 1]) &&\n               IsUTF8TrailByte(s[i + 2]) &&\n               // check for non-shortest form\n               (lead != 0xf0 || s[i] >= 0x90) &&\n               (lead != 0xf4 || s[i] < 0x90)) {\n      i += 3;  // 4-byte character\n    } else {\n      return false;\n    }\n  }\n  return true;\n}\n\nvoid ConditionalPrintAsText(const char* str, size_t length, ostream* os) {\n  if (!ContainsUnprintableControlCodes(str, length) &&\n      IsValidUTF8(str, length)) {\n    *os << \"\\n    As Text: \\\"\" << str << \"\\\"\";\n  }\n}\n\n}  // anonymous namespace\n\nvoid PrintStringTo(const ::std::string& s, ostream* os) {\n  if (PrintCharsAsStringTo(s.data(), s.size(), os) == kHexEscape) {\n    if (GTEST_FLAG(print_utf8)) {\n      ConditionalPrintAsText(s.data(), s.size(), os);\n    }\n  }\n}\n\n#if GTEST_HAS_STD_WSTRING\nvoid PrintWideStringTo(const ::std::wstring& s, ostream* os) {\n  PrintCharsAsStringTo(s.data(), s.size(), os);\n}\n#endif  // GTEST_HAS_STD_WSTRING\n\n}  // namespace internal\n\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest-test-part.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n\n#include \"gtest/gtest-test-part.h\"\n\n#include \"gtest/internal/gtest-port.h\"\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\n\nusing internal::GetUnitTestImpl;\n\n// Gets the summary of the failure message by omitting the stack trace\n// in it.\nstd::string TestPartResult::ExtractSummary(const char* message) {\n  const char* const stack_trace = strstr(message, internal::kStackTraceMarker);\n  return stack_trace == nullptr ? message : std::string(message, stack_trace);\n}\n\n// Prints a TestPartResult object.\nstd::ostream& operator<<(std::ostream& os, const TestPartResult& result) {\n  return os << internal::FormatFileLocation(result.file_name(),\n                                            result.line_number())\n            << \" \"\n            << (result.type() == TestPartResult::kSuccess\n                    ? \"Success\"\n                    : result.type() == TestPartResult::kSkip\n                          ? \"Skipped\"\n                          : result.type() == TestPartResult::kFatalFailure\n                                ? \"Fatal failure\"\n                                : \"Non-fatal failure\")\n            << \":\\n\"\n            << result.message() << std::endl;\n}\n\n// Appends a TestPartResult to the array.\nvoid TestPartResultArray::Append(const TestPartResult& result) {\n  array_.push_back(result);\n}\n\n// Returns the TestPartResult at the given index (0-based).\nconst TestPartResult& TestPartResultArray::GetTestPartResult(int index) const {\n  if (index < 0 || index >= size()) {\n    printf(\"\\nInvalid index (%d) into TestPartResultArray.\\n\", index);\n    internal::posix::Abort();\n  }\n\n  return array_[static_cast<size_t>(index)];\n}\n\n// Returns the number of TestPartResult objects in the array.\nint TestPartResultArray::size() const {\n  return static_cast<int>(array_.size());\n}\n\nnamespace internal {\n\nHasNewFatalFailureHelper::HasNewFatalFailureHelper()\n    : has_new_fatal_failure_(false),\n      original_reporter_(GetUnitTestImpl()->\n                         GetTestPartResultReporterForCurrentThread()) {\n  GetUnitTestImpl()->SetTestPartResultReporterForCurrentThread(this);\n}\n\nHasNewFatalFailureHelper::~HasNewFatalFailureHelper() {\n  GetUnitTestImpl()->SetTestPartResultReporterForCurrentThread(\n      original_reporter_);\n}\n\nvoid HasNewFatalFailureHelper::ReportTestPartResult(\n    const TestPartResult& result) {\n  if (result.fatally_failed())\n    has_new_fatal_failure_ = true;\n  original_reporter_->ReportTestPartResult(result);\n}\n\n}  // namespace internal\n\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest-typed-test.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"gtest/gtest-typed-test.h\"\n\n#include \"gtest/gtest.h\"\n\nnamespace testing {\nnamespace internal {\n\n#if GTEST_HAS_TYPED_TEST_P\n\n// Skips to the first non-space char in str. Returns an empty string if str\n// contains only whitespace characters.\nstatic const char* SkipSpaces(const char* str) {\n  while (IsSpace(*str))\n    str++;\n  return str;\n}\n\nstatic std::vector<std::string> SplitIntoTestNames(const char* src) {\n  std::vector<std::string> name_vec;\n  src = SkipSpaces(src);\n  for (; src != nullptr; src = SkipComma(src)) {\n    name_vec.push_back(StripTrailingSpaces(GetPrefixUntilComma(src)));\n  }\n  return name_vec;\n}\n\n// Verifies that registered_tests match the test names in\n// registered_tests_; returns registered_tests if successful, or\n// aborts the program otherwise.\nconst char* TypedTestSuitePState::VerifyRegisteredTestNames(\n    const char* file, int line, const char* registered_tests) {\n  typedef RegisteredTestsMap::const_iterator RegisteredTestIter;\n  registered_ = true;\n\n  std::vector<std::string> name_vec = SplitIntoTestNames(registered_tests);\n\n  Message errors;\n\n  std::set<std::string> tests;\n  for (std::vector<std::string>::const_iterator name_it = name_vec.begin();\n       name_it != name_vec.end(); ++name_it) {\n    const std::string& name = *name_it;\n    if (tests.count(name) != 0) {\n      errors << \"Test \" << name << \" is listed more than once.\\n\";\n      continue;\n    }\n\n    bool found = false;\n    for (RegisteredTestIter it = registered_tests_.begin();\n         it != registered_tests_.end();\n         ++it) {\n      if (name == it->first) {\n        found = true;\n        break;\n      }\n    }\n\n    if (found) {\n      tests.insert(name);\n    } else {\n      errors << \"No test named \" << name\n             << \" can be found in this test suite.\\n\";\n    }\n  }\n\n  for (RegisteredTestIter it = registered_tests_.begin();\n       it != registered_tests_.end();\n       ++it) {\n    if (tests.count(it->first) == 0) {\n      errors << \"You forgot to list test \" << it->first << \".\\n\";\n    }\n  }\n\n  const std::string& errors_str = errors.GetString();\n  if (errors_str != \"\") {\n    fprintf(stderr, \"%s %s\", FormatFileLocation(file, line).c_str(),\n            errors_str.c_str());\n    fflush(stderr);\n    posix::Abort();\n  }\n\n  return registered_tests;\n}\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n\n}  // namespace internal\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n\n#include \"gtest/gtest.h\"\n#include \"gtest/internal/custom/gtest.h\"\n#include \"gtest/gtest-spi.h\"\n\n#include <ctype.h>\n#include <math.h>\n#include <stdarg.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <time.h>\n#include <wchar.h>\n#include <wctype.h>\n\n#include <algorithm>\n#include <iomanip>\n#include <limits>\n#include <list>\n#include <map>\n#include <ostream>  // NOLINT\n#include <sstream>\n#include <vector>\n\n#if GTEST_OS_LINUX\n\n# define GTEST_HAS_GETTIMEOFDAY_ 1\n\n# include <fcntl.h>  // NOLINT\n# include <limits.h>  // NOLINT\n# include <sched.h>  // NOLINT\n// Declares vsnprintf().  This header is not available on Windows.\n# include <strings.h>  // NOLINT\n# include <sys/mman.h>  // NOLINT\n# include <sys/time.h>  // NOLINT\n# include <unistd.h>  // NOLINT\n# include <string>\n\n#elif GTEST_OS_ZOS\n# define GTEST_HAS_GETTIMEOFDAY_ 1\n# include <sys/time.h>  // NOLINT\n\n// On z/OS we additionally need strings.h for strcasecmp.\n# include <strings.h>  // NOLINT\n\n#elif GTEST_OS_WINDOWS_MOBILE  // We are on Windows CE.\n\n# include <windows.h>  // NOLINT\n# undef min\n\n#elif GTEST_OS_WINDOWS  // We are on Windows proper.\n\n# include <windows.h>  // NOLINT\n# undef min\n\n#ifdef _MSC_VER\n# include <crtdbg.h>  // NOLINT\n# include <debugapi.h>  // NOLINT\n#endif\n\n# include <io.h>  // NOLINT\n# include <sys/timeb.h>  // NOLINT\n# include <sys/types.h>  // NOLINT\n# include <sys/stat.h>  // NOLINT\n\n# if GTEST_OS_WINDOWS_MINGW\n// MinGW has gettimeofday() but not _ftime64().\n#  define GTEST_HAS_GETTIMEOFDAY_ 1\n#  include <sys/time.h>  // NOLINT\n# endif  // GTEST_OS_WINDOWS_MINGW\n\n#else\n\n// Assume other platforms have gettimeofday().\n# define GTEST_HAS_GETTIMEOFDAY_ 1\n\n// cpplint thinks that the header is already included, so we want to\n// silence it.\n# include <sys/time.h>  // NOLINT\n# include <unistd.h>  // NOLINT\n\n#endif  // GTEST_OS_LINUX\n\n#if GTEST_HAS_EXCEPTIONS\n# include <stdexcept>\n#endif\n\n#if GTEST_CAN_STREAM_RESULTS_\n# include <arpa/inet.h>  // NOLINT\n# include <netdb.h>  // NOLINT\n# include <sys/socket.h>  // NOLINT\n# include <sys/types.h>  // NOLINT\n#endif\n\n#include \"src/gtest-internal-inl.h\"\n\n#if GTEST_OS_WINDOWS\n# define vsnprintf _vsnprintf\n#endif  // GTEST_OS_WINDOWS\n\n#if GTEST_OS_MAC\n#ifndef GTEST_OS_IOS\n#include <crt_externs.h>\n#endif\n#endif\n\n#if GTEST_HAS_ABSL\n#include \"absl/debugging/failure_signal_handler.h\"\n#include \"absl/debugging/stacktrace.h\"\n#include \"absl/debugging/symbolize.h\"\n#include \"absl/strings/str_cat.h\"\n#endif  // GTEST_HAS_ABSL\n\nnamespace testing {\n\nusing internal::CountIf;\nusing internal::ForEach;\nusing internal::GetElementOr;\nusing internal::Shuffle;\n\n// Constants.\n\n// A test whose test suite name or test name matches this filter is\n// disabled and not run.\nstatic const char kDisableTestFilter[] = \"DISABLED_*:*/DISABLED_*\";\n\n// A test suite whose name matches this filter is considered a death\n// test suite and will be run before test suites whose name doesn't\n// match this filter.\nstatic const char kDeathTestSuiteFilter[] = \"*DeathTest:*DeathTest/*\";\n\n// A test filter that matches everything.\nstatic const char kUniversalFilter[] = \"*\";\n\n// The default output format.\nstatic const char kDefaultOutputFormat[] = \"xml\";\n// The default output file.\nstatic const char kDefaultOutputFile[] = \"test_detail\";\n\n// The environment variable name for the test shard index.\nstatic const char kTestShardIndex[] = \"GTEST_SHARD_INDEX\";\n// The environment variable name for the total number of test shards.\nstatic const char kTestTotalShards[] = \"GTEST_TOTAL_SHARDS\";\n// The environment variable name for the test shard status file.\nstatic const char kTestShardStatusFile[] = \"GTEST_SHARD_STATUS_FILE\";\n\nnamespace internal {\n\n// The text used in failure messages to indicate the start of the\n// stack trace.\nconst char kStackTraceMarker[] = \"\\nStack trace:\\n\";\n\n// g_help_flag is true if and only if the --help flag or an equivalent form\n// is specified on the command line.\nbool g_help_flag = false;\n\n// Utilty function to Open File for Writing\nstatic FILE* OpenFileForWriting(const std::string& output_file) {\n  FILE* fileout = nullptr;\n  FilePath output_file_path(output_file);\n  FilePath output_dir(output_file_path.RemoveFileName());\n\n  if (output_dir.CreateDirectoriesRecursively()) {\n    fileout = posix::FOpen(output_file.c_str(), \"w\");\n  }\n  if (fileout == nullptr) {\n    GTEST_LOG_(FATAL) << \"Unable to open file \\\"\" << output_file << \"\\\"\";\n  }\n  return fileout;\n}\n\n}  // namespace internal\n\n// Bazel passes in the argument to '--test_filter' via the TESTBRIDGE_TEST_ONLY\n// environment variable.\nstatic const char* GetDefaultFilter() {\n  const char* const testbridge_test_only =\n      internal::posix::GetEnv(\"TESTBRIDGE_TEST_ONLY\");\n  if (testbridge_test_only != nullptr) {\n    return testbridge_test_only;\n  }\n  return kUniversalFilter;\n}\n\nGTEST_DEFINE_bool_(\n    also_run_disabled_tests,\n    internal::BoolFromGTestEnv(\"also_run_disabled_tests\", false),\n    \"Run disabled tests too, in addition to the tests normally being run.\");\n\nGTEST_DEFINE_bool_(\n    break_on_failure, internal::BoolFromGTestEnv(\"break_on_failure\", false),\n    \"True if and only if a failed assertion should be a debugger \"\n    \"break-point.\");\n\nGTEST_DEFINE_bool_(catch_exceptions,\n                   internal::BoolFromGTestEnv(\"catch_exceptions\", true),\n                   \"True if and only if \" GTEST_NAME_\n                   \" should catch exceptions and treat them as test failures.\");\n\nGTEST_DEFINE_string_(\n    color,\n    internal::StringFromGTestEnv(\"color\", \"auto\"),\n    \"Whether to use colors in the output.  Valid values: yes, no, \"\n    \"and auto.  'auto' means to use colors if the output is \"\n    \"being sent to a terminal and the TERM environment variable \"\n    \"is set to a terminal type that supports colors.\");\n\nGTEST_DEFINE_string_(\n    filter,\n    internal::StringFromGTestEnv(\"filter\", GetDefaultFilter()),\n    \"A colon-separated list of glob (not regex) patterns \"\n    \"for filtering the tests to run, optionally followed by a \"\n    \"'-' and a : separated list of negative patterns (tests to \"\n    \"exclude).  A test is run if it matches one of the positive \"\n    \"patterns and does not match any of the negative patterns.\");\n\nGTEST_DEFINE_bool_(\n    install_failure_signal_handler,\n    internal::BoolFromGTestEnv(\"install_failure_signal_handler\", false),\n    \"If true and supported on the current platform, \" GTEST_NAME_ \" should \"\n    \"install a signal handler that dumps debugging information when fatal \"\n    \"signals are raised.\");\n\nGTEST_DEFINE_bool_(list_tests, false,\n                   \"List all tests without running them.\");\n\n// The net priority order after flag processing is thus:\n//   --gtest_output command line flag\n//   GTEST_OUTPUT environment variable\n//   XML_OUTPUT_FILE environment variable\n//   ''\nGTEST_DEFINE_string_(\n    output,\n    internal::StringFromGTestEnv(\"output\",\n      internal::OutputFlagAlsoCheckEnvVar().c_str()),\n    \"A format (defaults to \\\"xml\\\" but can be specified to be \\\"json\\\"), \"\n    \"optionally followed by a colon and an output file name or directory. \"\n    \"A directory is indicated by a trailing pathname separator. \"\n    \"Examples: \\\"xml:filename.xml\\\", \\\"xml::directoryname/\\\". \"\n    \"If a directory is specified, output files will be created \"\n    \"within that directory, with file-names based on the test \"\n    \"executable's name and, if necessary, made unique by adding \"\n    \"digits.\");\n\nGTEST_DEFINE_bool_(print_time, internal::BoolFromGTestEnv(\"print_time\", true),\n                   \"True if and only if \" GTEST_NAME_\n                   \" should display elapsed time in text output.\");\n\nGTEST_DEFINE_bool_(print_utf8, internal::BoolFromGTestEnv(\"print_utf8\", true),\n                   \"True if and only if \" GTEST_NAME_\n                   \" prints UTF8 characters as text.\");\n\nGTEST_DEFINE_int32_(\n    random_seed,\n    internal::Int32FromGTestEnv(\"random_seed\", 0),\n    \"Random number seed to use when shuffling test orders.  Must be in range \"\n    \"[1, 99999], or 0 to use a seed based on the current time.\");\n\nGTEST_DEFINE_int32_(\n    repeat,\n    internal::Int32FromGTestEnv(\"repeat\", 1),\n    \"How many times to repeat each test.  Specify a negative number \"\n    \"for repeating forever.  Useful for shaking out flaky tests.\");\n\nGTEST_DEFINE_bool_(show_internal_stack_frames, false,\n                   \"True if and only if \" GTEST_NAME_\n                   \" should include internal stack frames when \"\n                   \"printing test failure stack traces.\");\n\nGTEST_DEFINE_bool_(shuffle, internal::BoolFromGTestEnv(\"shuffle\", false),\n                   \"True if and only if \" GTEST_NAME_\n                   \" should randomize tests' order on every run.\");\n\nGTEST_DEFINE_int32_(\n    stack_trace_depth,\n    internal::Int32FromGTestEnv(\"stack_trace_depth\", kMaxStackTraceDepth),\n    \"The maximum number of stack frames to print when an \"\n    \"assertion fails.  The valid range is 0 through 100, inclusive.\");\n\nGTEST_DEFINE_string_(\n    stream_result_to,\n    internal::StringFromGTestEnv(\"stream_result_to\", \"\"),\n    \"This flag specifies the host name and the port number on which to stream \"\n    \"test results. Example: \\\"localhost:555\\\". The flag is effective only on \"\n    \"Linux.\");\n\nGTEST_DEFINE_bool_(\n    throw_on_failure,\n    internal::BoolFromGTestEnv(\"throw_on_failure\", false),\n    \"When this flag is specified, a failed assertion will throw an exception \"\n    \"if exceptions are enabled or exit the program with a non-zero code \"\n    \"otherwise. For use with an external test framework.\");\n\n#if GTEST_USE_OWN_FLAGFILE_FLAG_\nGTEST_DEFINE_string_(\n    flagfile,\n    internal::StringFromGTestEnv(\"flagfile\", \"\"),\n    \"This flag specifies the flagfile to read command-line flags from.\");\n#endif  // GTEST_USE_OWN_FLAGFILE_FLAG_\n\nnamespace internal {\n\n// Generates a random number from [0, range), using a Linear\n// Congruential Generator (LCG).  Crashes if 'range' is 0 or greater\n// than kMaxRange.\nUInt32 Random::Generate(UInt32 range) {\n  // These constants are the same as are used in glibc's rand(3).\n  // Use wider types than necessary to prevent unsigned overflow diagnostics.\n  state_ = static_cast<UInt32>(1103515245ULL*state_ + 12345U) % kMaxRange;\n\n  GTEST_CHECK_(range > 0)\n      << \"Cannot generate a number in the range [0, 0).\";\n  GTEST_CHECK_(range <= kMaxRange)\n      << \"Generation of a number in [0, \" << range << \") was requested, \"\n      << \"but this can only generate numbers in [0, \" << kMaxRange << \").\";\n\n  // Converting via modulus introduces a bit of downward bias, but\n  // it's simple, and a linear congruential generator isn't too good\n  // to begin with.\n  return state_ % range;\n}\n\n// GTestIsInitialized() returns true if and only if the user has initialized\n// Google Test.  Useful for catching the user mistake of not initializing\n// Google Test before calling RUN_ALL_TESTS().\nstatic bool GTestIsInitialized() { return GetArgvs().size() > 0; }\n\n// Iterates over a vector of TestSuites, keeping a running sum of the\n// results of calling a given int-returning method on each.\n// Returns the sum.\nstatic int SumOverTestSuiteList(const std::vector<TestSuite*>& case_list,\n                                int (TestSuite::*method)() const) {\n  int sum = 0;\n  for (size_t i = 0; i < case_list.size(); i++) {\n    sum += (case_list[i]->*method)();\n  }\n  return sum;\n}\n\n// Returns true if and only if the test suite passed.\nstatic bool TestSuitePassed(const TestSuite* test_suite) {\n  return test_suite->should_run() && test_suite->Passed();\n}\n\n// Returns true if and only if the test suite failed.\nstatic bool TestSuiteFailed(const TestSuite* test_suite) {\n  return test_suite->should_run() && test_suite->Failed();\n}\n\n// Returns true if and only if test_suite contains at least one test that\n// should run.\nstatic bool ShouldRunTestSuite(const TestSuite* test_suite) {\n  return test_suite->should_run();\n}\n\n// AssertHelper constructor.\nAssertHelper::AssertHelper(TestPartResult::Type type,\n                           const char* file,\n                           int line,\n                           const char* message)\n    : data_(new AssertHelperData(type, file, line, message)) {\n}\n\nAssertHelper::~AssertHelper() {\n  delete data_;\n}\n\n// Message assignment, for assertion streaming support.\nvoid AssertHelper::operator=(const Message& message) const {\n  UnitTest::GetInstance()->\n    AddTestPartResult(data_->type, data_->file, data_->line,\n                      AppendUserMessage(data_->message, message),\n                      UnitTest::GetInstance()->impl()\n                      ->CurrentOsStackTraceExceptTop(1)\n                      // Skips the stack frame for this function itself.\n                      );  // NOLINT\n}\n\n// A copy of all command line arguments.  Set by InitGoogleTest().\nstatic ::std::vector<std::string> g_argvs;\n\n::std::vector<std::string> GetArgvs() {\n#if defined(GTEST_CUSTOM_GET_ARGVS_)\n  // GTEST_CUSTOM_GET_ARGVS_() may return a container of std::string or\n  // ::string. This code converts it to the appropriate type.\n  const auto& custom = GTEST_CUSTOM_GET_ARGVS_();\n  return ::std::vector<std::string>(custom.begin(), custom.end());\n#else   // defined(GTEST_CUSTOM_GET_ARGVS_)\n  return g_argvs;\n#endif  // defined(GTEST_CUSTOM_GET_ARGVS_)\n}\n\n// Returns the current application's name, removing directory path if that\n// is present.\nFilePath GetCurrentExecutableName() {\n  FilePath result;\n\n#if GTEST_OS_WINDOWS || GTEST_OS_OS2\n  result.Set(FilePath(GetArgvs()[0]).RemoveExtension(\"exe\"));\n#else\n  result.Set(FilePath(GetArgvs()[0]));\n#endif  // GTEST_OS_WINDOWS\n\n  return result.RemoveDirectoryName();\n}\n\n// Functions for processing the gtest_output flag.\n\n// Returns the output format, or \"\" for normal printed output.\nstd::string UnitTestOptions::GetOutputFormat() {\n  const char* const gtest_output_flag = GTEST_FLAG(output).c_str();\n  const char* const colon = strchr(gtest_output_flag, ':');\n  return (colon == nullptr)\n             ? std::string(gtest_output_flag)\n             : std::string(gtest_output_flag,\n                           static_cast<size_t>(colon - gtest_output_flag));\n}\n\n// Returns the name of the requested output file, or the default if none\n// was explicitly specified.\nstd::string UnitTestOptions::GetAbsolutePathToOutputFile() {\n  const char* const gtest_output_flag = GTEST_FLAG(output).c_str();\n\n  std::string format = GetOutputFormat();\n  if (format.empty())\n    format = std::string(kDefaultOutputFormat);\n\n  const char* const colon = strchr(gtest_output_flag, ':');\n  if (colon == nullptr)\n    return internal::FilePath::MakeFileName(\n        internal::FilePath(\n            UnitTest::GetInstance()->original_working_dir()),\n        internal::FilePath(kDefaultOutputFile), 0,\n        format.c_str()).string();\n\n  internal::FilePath output_name(colon + 1);\n  if (!output_name.IsAbsolutePath())\n    output_name = internal::FilePath::ConcatPaths(\n        internal::FilePath(UnitTest::GetInstance()->original_working_dir()),\n        internal::FilePath(colon + 1));\n\n  if (!output_name.IsDirectory())\n    return output_name.string();\n\n  internal::FilePath result(internal::FilePath::GenerateUniqueFileName(\n      output_name, internal::GetCurrentExecutableName(),\n      GetOutputFormat().c_str()));\n  return result.string();\n}\n\n// Returns true if and only if the wildcard pattern matches the string.\n// The first ':' or '\\0' character in pattern marks the end of it.\n//\n// This recursive algorithm isn't very efficient, but is clear and\n// works well enough for matching test names, which are short.\nbool UnitTestOptions::PatternMatchesString(const char *pattern,\n                                           const char *str) {\n  switch (*pattern) {\n    case '\\0':\n    case ':':  // Either ':' or '\\0' marks the end of the pattern.\n      return *str == '\\0';\n    case '?':  // Matches any single character.\n      return *str != '\\0' && PatternMatchesString(pattern + 1, str + 1);\n    case '*':  // Matches any string (possibly empty) of characters.\n      return (*str != '\\0' && PatternMatchesString(pattern, str + 1)) ||\n          PatternMatchesString(pattern + 1, str);\n    default:  // Non-special character.  Matches itself.\n      return *pattern == *str &&\n          PatternMatchesString(pattern + 1, str + 1);\n  }\n}\n\nbool UnitTestOptions::MatchesFilter(\n    const std::string& name, const char* filter) {\n  const char *cur_pattern = filter;\n  for (;;) {\n    if (PatternMatchesString(cur_pattern, name.c_str())) {\n      return true;\n    }\n\n    // Finds the next pattern in the filter.\n    cur_pattern = strchr(cur_pattern, ':');\n\n    // Returns if no more pattern can be found.\n    if (cur_pattern == nullptr) {\n      return false;\n    }\n\n    // Skips the pattern separater (the ':' character).\n    cur_pattern++;\n  }\n}\n\n// Returns true if and only if the user-specified filter matches the test\n// suite name and the test name.\nbool UnitTestOptions::FilterMatchesTest(const std::string& test_suite_name,\n                                        const std::string& test_name) {\n  const std::string& full_name = test_suite_name + \".\" + test_name.c_str();\n\n  // Split --gtest_filter at '-', if there is one, to separate into\n  // positive filter and negative filter portions\n  const char* const p = GTEST_FLAG(filter).c_str();\n  const char* const dash = strchr(p, '-');\n  std::string positive;\n  std::string negative;\n  if (dash == nullptr) {\n    positive = GTEST_FLAG(filter).c_str();  // Whole string is a positive filter\n    negative = \"\";\n  } else {\n    positive = std::string(p, dash);   // Everything up to the dash\n    negative = std::string(dash + 1);  // Everything after the dash\n    if (positive.empty()) {\n      // Treat '-test1' as the same as '*-test1'\n      positive = kUniversalFilter;\n    }\n  }\n\n  // A filter is a colon-separated list of patterns.  It matches a\n  // test if any pattern in it matches the test.\n  return (MatchesFilter(full_name, positive.c_str()) &&\n          !MatchesFilter(full_name, negative.c_str()));\n}\n\n#if GTEST_HAS_SEH\n// Returns EXCEPTION_EXECUTE_HANDLER if Google Test should handle the\n// given SEH exception, or EXCEPTION_CONTINUE_SEARCH otherwise.\n// This function is useful as an __except condition.\nint UnitTestOptions::GTestShouldProcessSEH(DWORD exception_code) {\n  // Google Test should handle a SEH exception if:\n  //   1. the user wants it to, AND\n  //   2. this is not a breakpoint exception, AND\n  //   3. this is not a C++ exception (VC++ implements them via SEH,\n  //      apparently).\n  //\n  // SEH exception code for C++ exceptions.\n  // (see http://support.microsoft.com/kb/185294 for more information).\n  const DWORD kCxxExceptionCode = 0xe06d7363;\n\n  bool should_handle = true;\n\n  if (!GTEST_FLAG(catch_exceptions))\n    should_handle = false;\n  else if (exception_code == EXCEPTION_BREAKPOINT)\n    should_handle = false;\n  else if (exception_code == kCxxExceptionCode)\n    should_handle = false;\n\n  return should_handle ? EXCEPTION_EXECUTE_HANDLER : EXCEPTION_CONTINUE_SEARCH;\n}\n#endif  // GTEST_HAS_SEH\n\n}  // namespace internal\n\n// The c'tor sets this object as the test part result reporter used by\n// Google Test.  The 'result' parameter specifies where to report the\n// results. Intercepts only failures from the current thread.\nScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter(\n    TestPartResultArray* result)\n    : intercept_mode_(INTERCEPT_ONLY_CURRENT_THREAD),\n      result_(result) {\n  Init();\n}\n\n// The c'tor sets this object as the test part result reporter used by\n// Google Test.  The 'result' parameter specifies where to report the\n// results.\nScopedFakeTestPartResultReporter::ScopedFakeTestPartResultReporter(\n    InterceptMode intercept_mode, TestPartResultArray* result)\n    : intercept_mode_(intercept_mode),\n      result_(result) {\n  Init();\n}\n\nvoid ScopedFakeTestPartResultReporter::Init() {\n  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();\n  if (intercept_mode_ == INTERCEPT_ALL_THREADS) {\n    old_reporter_ = impl->GetGlobalTestPartResultReporter();\n    impl->SetGlobalTestPartResultReporter(this);\n  } else {\n    old_reporter_ = impl->GetTestPartResultReporterForCurrentThread();\n    impl->SetTestPartResultReporterForCurrentThread(this);\n  }\n}\n\n// The d'tor restores the test part result reporter used by Google Test\n// before.\nScopedFakeTestPartResultReporter::~ScopedFakeTestPartResultReporter() {\n  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();\n  if (intercept_mode_ == INTERCEPT_ALL_THREADS) {\n    impl->SetGlobalTestPartResultReporter(old_reporter_);\n  } else {\n    impl->SetTestPartResultReporterForCurrentThread(old_reporter_);\n  }\n}\n\n// Increments the test part result count and remembers the result.\n// This method is from the TestPartResultReporterInterface interface.\nvoid ScopedFakeTestPartResultReporter::ReportTestPartResult(\n    const TestPartResult& result) {\n  result_->Append(result);\n}\n\nnamespace internal {\n\n// Returns the type ID of ::testing::Test.  We should always call this\n// instead of GetTypeId< ::testing::Test>() to get the type ID of\n// testing::Test.  This is to work around a suspected linker bug when\n// using Google Test as a framework on Mac OS X.  The bug causes\n// GetTypeId< ::testing::Test>() to return different values depending\n// on whether the call is from the Google Test framework itself or\n// from user test code.  GetTestTypeId() is guaranteed to always\n// return the same value, as it always calls GetTypeId<>() from the\n// gtest.cc, which is within the Google Test framework.\nTypeId GetTestTypeId() {\n  return GetTypeId<Test>();\n}\n\n// The value of GetTestTypeId() as seen from within the Google Test\n// library.  This is solely for testing GetTestTypeId().\nextern const TypeId kTestTypeIdInGoogleTest = GetTestTypeId();\n\n// This predicate-formatter checks that 'results' contains a test part\n// failure of the given type and that the failure message contains the\n// given substring.\nstatic AssertionResult HasOneFailure(const char* /* results_expr */,\n                                     const char* /* type_expr */,\n                                     const char* /* substr_expr */,\n                                     const TestPartResultArray& results,\n                                     TestPartResult::Type type,\n                                     const std::string& substr) {\n  const std::string expected(type == TestPartResult::kFatalFailure ?\n                        \"1 fatal failure\" :\n                        \"1 non-fatal failure\");\n  Message msg;\n  if (results.size() != 1) {\n    msg << \"Expected: \" << expected << \"\\n\"\n        << \"  Actual: \" << results.size() << \" failures\";\n    for (int i = 0; i < results.size(); i++) {\n      msg << \"\\n\" << results.GetTestPartResult(i);\n    }\n    return AssertionFailure() << msg;\n  }\n\n  const TestPartResult& r = results.GetTestPartResult(0);\n  if (r.type() != type) {\n    return AssertionFailure() << \"Expected: \" << expected << \"\\n\"\n                              << \"  Actual:\\n\"\n                              << r;\n  }\n\n  if (strstr(r.message(), substr.c_str()) == nullptr) {\n    return AssertionFailure() << \"Expected: \" << expected << \" containing \\\"\"\n                              << substr << \"\\\"\\n\"\n                              << \"  Actual:\\n\"\n                              << r;\n  }\n\n  return AssertionSuccess();\n}\n\n// The constructor of SingleFailureChecker remembers where to look up\n// test part results, what type of failure we expect, and what\n// substring the failure message should contain.\nSingleFailureChecker::SingleFailureChecker(const TestPartResultArray* results,\n                                           TestPartResult::Type type,\n                                           const std::string& substr)\n    : results_(results), type_(type), substr_(substr) {}\n\n// The destructor of SingleFailureChecker verifies that the given\n// TestPartResultArray contains exactly one failure that has the given\n// type and contains the given substring.  If that's not the case, a\n// non-fatal failure will be generated.\nSingleFailureChecker::~SingleFailureChecker() {\n  EXPECT_PRED_FORMAT3(HasOneFailure, *results_, type_, substr_);\n}\n\nDefaultGlobalTestPartResultReporter::DefaultGlobalTestPartResultReporter(\n    UnitTestImpl* unit_test) : unit_test_(unit_test) {}\n\nvoid DefaultGlobalTestPartResultReporter::ReportTestPartResult(\n    const TestPartResult& result) {\n  unit_test_->current_test_result()->AddTestPartResult(result);\n  unit_test_->listeners()->repeater()->OnTestPartResult(result);\n}\n\nDefaultPerThreadTestPartResultReporter::DefaultPerThreadTestPartResultReporter(\n    UnitTestImpl* unit_test) : unit_test_(unit_test) {}\n\nvoid DefaultPerThreadTestPartResultReporter::ReportTestPartResult(\n    const TestPartResult& result) {\n  unit_test_->GetGlobalTestPartResultReporter()->ReportTestPartResult(result);\n}\n\n// Returns the global test part result reporter.\nTestPartResultReporterInterface*\nUnitTestImpl::GetGlobalTestPartResultReporter() {\n  internal::MutexLock lock(&global_test_part_result_reporter_mutex_);\n  return global_test_part_result_repoter_;\n}\n\n// Sets the global test part result reporter.\nvoid UnitTestImpl::SetGlobalTestPartResultReporter(\n    TestPartResultReporterInterface* reporter) {\n  internal::MutexLock lock(&global_test_part_result_reporter_mutex_);\n  global_test_part_result_repoter_ = reporter;\n}\n\n// Returns the test part result reporter for the current thread.\nTestPartResultReporterInterface*\nUnitTestImpl::GetTestPartResultReporterForCurrentThread() {\n  return per_thread_test_part_result_reporter_.get();\n}\n\n// Sets the test part result reporter for the current thread.\nvoid UnitTestImpl::SetTestPartResultReporterForCurrentThread(\n    TestPartResultReporterInterface* reporter) {\n  per_thread_test_part_result_reporter_.set(reporter);\n}\n\n// Gets the number of successful test suites.\nint UnitTestImpl::successful_test_suite_count() const {\n  return CountIf(test_suites_, TestSuitePassed);\n}\n\n// Gets the number of failed test suites.\nint UnitTestImpl::failed_test_suite_count() const {\n  return CountIf(test_suites_, TestSuiteFailed);\n}\n\n// Gets the number of all test suites.\nint UnitTestImpl::total_test_suite_count() const {\n  return static_cast<int>(test_suites_.size());\n}\n\n// Gets the number of all test suites that contain at least one test\n// that should run.\nint UnitTestImpl::test_suite_to_run_count() const {\n  return CountIf(test_suites_, ShouldRunTestSuite);\n}\n\n// Gets the number of successful tests.\nint UnitTestImpl::successful_test_count() const {\n  return SumOverTestSuiteList(test_suites_, &TestSuite::successful_test_count);\n}\n\n// Gets the number of skipped tests.\nint UnitTestImpl::skipped_test_count() const {\n  return SumOverTestSuiteList(test_suites_, &TestSuite::skipped_test_count);\n}\n\n// Gets the number of failed tests.\nint UnitTestImpl::failed_test_count() const {\n  return SumOverTestSuiteList(test_suites_, &TestSuite::failed_test_count);\n}\n\n// Gets the number of disabled tests that will be reported in the XML report.\nint UnitTestImpl::reportable_disabled_test_count() const {\n  return SumOverTestSuiteList(test_suites_,\n                              &TestSuite::reportable_disabled_test_count);\n}\n\n// Gets the number of disabled tests.\nint UnitTestImpl::disabled_test_count() const {\n  return SumOverTestSuiteList(test_suites_, &TestSuite::disabled_test_count);\n}\n\n// Gets the number of tests to be printed in the XML report.\nint UnitTestImpl::reportable_test_count() const {\n  return SumOverTestSuiteList(test_suites_, &TestSuite::reportable_test_count);\n}\n\n// Gets the number of all tests.\nint UnitTestImpl::total_test_count() const {\n  return SumOverTestSuiteList(test_suites_, &TestSuite::total_test_count);\n}\n\n// Gets the number of tests that should run.\nint UnitTestImpl::test_to_run_count() const {\n  return SumOverTestSuiteList(test_suites_, &TestSuite::test_to_run_count);\n}\n\n// Returns the current OS stack trace as an std::string.\n//\n// The maximum number of stack frames to be included is specified by\n// the gtest_stack_trace_depth flag.  The skip_count parameter\n// specifies the number of top frames to be skipped, which doesn't\n// count against the number of frames to be included.\n//\n// For example, if Foo() calls Bar(), which in turn calls\n// CurrentOsStackTraceExceptTop(1), Foo() will be included in the\n// trace but Bar() and CurrentOsStackTraceExceptTop() won't.\nstd::string UnitTestImpl::CurrentOsStackTraceExceptTop(int skip_count) {\n  return os_stack_trace_getter()->CurrentStackTrace(\n      static_cast<int>(GTEST_FLAG(stack_trace_depth)),\n      skip_count + 1\n      // Skips the user-specified number of frames plus this function\n      // itself.\n      );  // NOLINT\n}\n\n// Returns the current time in milliseconds.\nTimeInMillis GetTimeInMillis() {\n#if GTEST_OS_WINDOWS_MOBILE || defined(__BORLANDC__)\n  // Difference between 1970-01-01 and 1601-01-01 in milliseconds.\n  // http://analogous.blogspot.com/2005/04/epoch.html\n  const TimeInMillis kJavaEpochToWinFileTimeDelta =\n    static_cast<TimeInMillis>(116444736UL) * 100000UL;\n  const DWORD kTenthMicrosInMilliSecond = 10000;\n\n  SYSTEMTIME now_systime;\n  FILETIME now_filetime;\n  ULARGE_INTEGER now_int64;\n  GetSystemTime(&now_systime);\n  if (SystemTimeToFileTime(&now_systime, &now_filetime)) {\n    now_int64.LowPart = now_filetime.dwLowDateTime;\n    now_int64.HighPart = now_filetime.dwHighDateTime;\n    now_int64.QuadPart = (now_int64.QuadPart / kTenthMicrosInMilliSecond) -\n      kJavaEpochToWinFileTimeDelta;\n    return now_int64.QuadPart;\n  }\n  return 0;\n#elif GTEST_OS_WINDOWS && !GTEST_HAS_GETTIMEOFDAY_\n  __timeb64 now;\n\n  // MSVC 8 deprecates _ftime64(), so we want to suppress warning 4996\n  // (deprecated function) there.\n  GTEST_DISABLE_MSC_DEPRECATED_PUSH_()\n  _ftime64(&now);\n  GTEST_DISABLE_MSC_DEPRECATED_POP_()\n\n  return static_cast<TimeInMillis>(now.time) * 1000 + now.millitm;\n#elif GTEST_HAS_GETTIMEOFDAY_\n  struct timeval now;\n  gettimeofday(&now, nullptr);\n  return static_cast<TimeInMillis>(now.tv_sec) * 1000 + now.tv_usec / 1000;\n#else\n# error \"Don't know how to get the current time on your system.\"\n#endif\n}\n\n// Utilities\n\n// class String.\n\n#if GTEST_OS_WINDOWS_MOBILE\n// Creates a UTF-16 wide string from the given ANSI string, allocating\n// memory using new. The caller is responsible for deleting the return\n// value using delete[]. Returns the wide string, or NULL if the\n// input is NULL.\nLPCWSTR String::AnsiToUtf16(const char* ansi) {\n  if (!ansi) return nullptr;\n  const int length = strlen(ansi);\n  const int unicode_length =\n      MultiByteToWideChar(CP_ACP, 0, ansi, length, nullptr, 0);\n  WCHAR* unicode = new WCHAR[unicode_length + 1];\n  MultiByteToWideChar(CP_ACP, 0, ansi, length,\n                      unicode, unicode_length);\n  unicode[unicode_length] = 0;\n  return unicode;\n}\n\n// Creates an ANSI string from the given wide string, allocating\n// memory using new. The caller is responsible for deleting the return\n// value using delete[]. Returns the ANSI string, or NULL if the\n// input is NULL.\nconst char* String::Utf16ToAnsi(LPCWSTR utf16_str)  {\n  if (!utf16_str) return nullptr;\n  const int ansi_length = WideCharToMultiByte(CP_ACP, 0, utf16_str, -1, nullptr,\n                                              0, nullptr, nullptr);\n  char* ansi = new char[ansi_length + 1];\n  WideCharToMultiByte(CP_ACP, 0, utf16_str, -1, ansi, ansi_length, nullptr,\n                      nullptr);\n  ansi[ansi_length] = 0;\n  return ansi;\n}\n\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\n// Compares two C strings.  Returns true if and only if they have the same\n// content.\n//\n// Unlike strcmp(), this function can handle NULL argument(s).  A NULL\n// C string is considered different to any non-NULL C string,\n// including the empty string.\nbool String::CStringEquals(const char * lhs, const char * rhs) {\n  if (lhs == nullptr) return rhs == nullptr;\n\n  if (rhs == nullptr) return false;\n\n  return strcmp(lhs, rhs) == 0;\n}\n\n#if GTEST_HAS_STD_WSTRING\n\n// Converts an array of wide chars to a narrow string using the UTF-8\n// encoding, and streams the result to the given Message object.\nstatic void StreamWideCharsToMessage(const wchar_t* wstr, size_t length,\n                                     Message* msg) {\n  for (size_t i = 0; i != length; ) {  // NOLINT\n    if (wstr[i] != L'\\0') {\n      *msg << WideStringToUtf8(wstr + i, static_cast<int>(length - i));\n      while (i != length && wstr[i] != L'\\0')\n        i++;\n    } else {\n      *msg << '\\0';\n      i++;\n    }\n  }\n}\n\n#endif  // GTEST_HAS_STD_WSTRING\n\nvoid SplitString(const ::std::string& str, char delimiter,\n                 ::std::vector< ::std::string>* dest) {\n  ::std::vector< ::std::string> parsed;\n  ::std::string::size_type pos = 0;\n  while (::testing::internal::AlwaysTrue()) {\n    const ::std::string::size_type colon = str.find(delimiter, pos);\n    if (colon == ::std::string::npos) {\n      parsed.push_back(str.substr(pos));\n      break;\n    } else {\n      parsed.push_back(str.substr(pos, colon - pos));\n      pos = colon + 1;\n    }\n  }\n  dest->swap(parsed);\n}\n\n}  // namespace internal\n\n// Constructs an empty Message.\n// We allocate the stringstream separately because otherwise each use of\n// ASSERT/EXPECT in a procedure adds over 200 bytes to the procedure's\n// stack frame leading to huge stack frames in some cases; gcc does not reuse\n// the stack space.\nMessage::Message() : ss_(new ::std::stringstream) {\n  // By default, we want there to be enough precision when printing\n  // a double to a Message.\n  *ss_ << std::setprecision(std::numeric_limits<double>::digits10 + 2);\n}\n\n// These two overloads allow streaming a wide C string to a Message\n// using the UTF-8 encoding.\nMessage& Message::operator <<(const wchar_t* wide_c_str) {\n  return *this << internal::String::ShowWideCString(wide_c_str);\n}\nMessage& Message::operator <<(wchar_t* wide_c_str) {\n  return *this << internal::String::ShowWideCString(wide_c_str);\n}\n\n#if GTEST_HAS_STD_WSTRING\n// Converts the given wide string to a narrow string using the UTF-8\n// encoding, and streams the result to this Message object.\nMessage& Message::operator <<(const ::std::wstring& wstr) {\n  internal::StreamWideCharsToMessage(wstr.c_str(), wstr.length(), this);\n  return *this;\n}\n#endif  // GTEST_HAS_STD_WSTRING\n\n// Gets the text streamed to this object so far as an std::string.\n// Each '\\0' character in the buffer is replaced with \"\\\\0\".\nstd::string Message::GetString() const {\n  return internal::StringStreamToString(ss_.get());\n}\n\n// AssertionResult constructors.\n// Used in EXPECT_TRUE/FALSE(assertion_result).\nAssertionResult::AssertionResult(const AssertionResult& other)\n    : success_(other.success_),\n      message_(other.message_.get() != nullptr\n                   ? new ::std::string(*other.message_)\n                   : static_cast< ::std::string*>(nullptr)) {}\n\n// Swaps two AssertionResults.\nvoid AssertionResult::swap(AssertionResult& other) {\n  using std::swap;\n  swap(success_, other.success_);\n  swap(message_, other.message_);\n}\n\n// Returns the assertion's negation. Used with EXPECT/ASSERT_FALSE.\nAssertionResult AssertionResult::operator!() const {\n  AssertionResult negation(!success_);\n  if (message_.get() != nullptr) negation << *message_;\n  return negation;\n}\n\n// Makes a successful assertion result.\nAssertionResult AssertionSuccess() {\n  return AssertionResult(true);\n}\n\n// Makes a failed assertion result.\nAssertionResult AssertionFailure() {\n  return AssertionResult(false);\n}\n\n// Makes a failed assertion result with the given failure message.\n// Deprecated; use AssertionFailure() << message.\nAssertionResult AssertionFailure(const Message& message) {\n  return AssertionFailure() << message;\n}\n\nnamespace internal {\n\nnamespace edit_distance {\nstd::vector<EditType> CalculateOptimalEdits(const std::vector<size_t>& left,\n                                            const std::vector<size_t>& right) {\n  std::vector<std::vector<double> > costs(\n      left.size() + 1, std::vector<double>(right.size() + 1));\n  std::vector<std::vector<EditType> > best_move(\n      left.size() + 1, std::vector<EditType>(right.size() + 1));\n\n  // Populate for empty right.\n  for (size_t l_i = 0; l_i < costs.size(); ++l_i) {\n    costs[l_i][0] = static_cast<double>(l_i);\n    best_move[l_i][0] = kRemove;\n  }\n  // Populate for empty left.\n  for (size_t r_i = 1; r_i < costs[0].size(); ++r_i) {\n    costs[0][r_i] = static_cast<double>(r_i);\n    best_move[0][r_i] = kAdd;\n  }\n\n  for (size_t l_i = 0; l_i < left.size(); ++l_i) {\n    for (size_t r_i = 0; r_i < right.size(); ++r_i) {\n      if (left[l_i] == right[r_i]) {\n        // Found a match. Consume it.\n        costs[l_i + 1][r_i + 1] = costs[l_i][r_i];\n        best_move[l_i + 1][r_i + 1] = kMatch;\n        continue;\n      }\n\n      const double add = costs[l_i + 1][r_i];\n      const double remove = costs[l_i][r_i + 1];\n      const double replace = costs[l_i][r_i];\n      if (add < remove && add < replace) {\n        costs[l_i + 1][r_i + 1] = add + 1;\n        best_move[l_i + 1][r_i + 1] = kAdd;\n      } else if (remove < add && remove < replace) {\n        costs[l_i + 1][r_i + 1] = remove + 1;\n        best_move[l_i + 1][r_i + 1] = kRemove;\n      } else {\n        // We make replace a little more expensive than add/remove to lower\n        // their priority.\n        costs[l_i + 1][r_i + 1] = replace + 1.00001;\n        best_move[l_i + 1][r_i + 1] = kReplace;\n      }\n    }\n  }\n\n  // Reconstruct the best path. We do it in reverse order.\n  std::vector<EditType> best_path;\n  for (size_t l_i = left.size(), r_i = right.size(); l_i > 0 || r_i > 0;) {\n    EditType move = best_move[l_i][r_i];\n    best_path.push_back(move);\n    l_i -= move != kAdd;\n    r_i -= move != kRemove;\n  }\n  std::reverse(best_path.begin(), best_path.end());\n  return best_path;\n}\n\nnamespace {\n\n// Helper class to convert string into ids with deduplication.\nclass InternalStrings {\n public:\n  size_t GetId(const std::string& str) {\n    IdMap::iterator it = ids_.find(str);\n    if (it != ids_.end()) return it->second;\n    size_t id = ids_.size();\n    return ids_[str] = id;\n  }\n\n private:\n  typedef std::map<std::string, size_t> IdMap;\n  IdMap ids_;\n};\n\n}  // namespace\n\nstd::vector<EditType> CalculateOptimalEdits(\n    const std::vector<std::string>& left,\n    const std::vector<std::string>& right) {\n  std::vector<size_t> left_ids, right_ids;\n  {\n    InternalStrings intern_table;\n    for (size_t i = 0; i < left.size(); ++i) {\n      left_ids.push_back(intern_table.GetId(left[i]));\n    }\n    for (size_t i = 0; i < right.size(); ++i) {\n      right_ids.push_back(intern_table.GetId(right[i]));\n    }\n  }\n  return CalculateOptimalEdits(left_ids, right_ids);\n}\n\nnamespace {\n\n// Helper class that holds the state for one hunk and prints it out to the\n// stream.\n// It reorders adds/removes when possible to group all removes before all\n// adds. It also adds the hunk header before printint into the stream.\nclass Hunk {\n public:\n  Hunk(size_t left_start, size_t right_start)\n      : left_start_(left_start),\n        right_start_(right_start),\n        adds_(),\n        removes_(),\n        common_() {}\n\n  void PushLine(char edit, const char* line) {\n    switch (edit) {\n      case ' ':\n        ++common_;\n        FlushEdits();\n        hunk_.push_back(std::make_pair(' ', line));\n        break;\n      case '-':\n        ++removes_;\n        hunk_removes_.push_back(std::make_pair('-', line));\n        break;\n      case '+':\n        ++adds_;\n        hunk_adds_.push_back(std::make_pair('+', line));\n        break;\n    }\n  }\n\n  void PrintTo(std::ostream* os) {\n    PrintHeader(os);\n    FlushEdits();\n    for (std::list<std::pair<char, const char*> >::const_iterator it =\n             hunk_.begin();\n         it != hunk_.end(); ++it) {\n      *os << it->first << it->second << \"\\n\";\n    }\n  }\n\n  bool has_edits() const { return adds_ || removes_; }\n\n private:\n  void FlushEdits() {\n    hunk_.splice(hunk_.end(), hunk_removes_);\n    hunk_.splice(hunk_.end(), hunk_adds_);\n  }\n\n  // Print a unified diff header for one hunk.\n  // The format is\n  //   \"@@ -<left_start>,<left_length> +<right_start>,<right_length> @@\"\n  // where the left/right parts are omitted if unnecessary.\n  void PrintHeader(std::ostream* ss) const {\n    *ss << \"@@ \";\n    if (removes_) {\n      *ss << \"-\" << left_start_ << \",\" << (removes_ + common_);\n    }\n    if (removes_ && adds_) {\n      *ss << \" \";\n    }\n    if (adds_) {\n      *ss << \"+\" << right_start_ << \",\" << (adds_ + common_);\n    }\n    *ss << \" @@\\n\";\n  }\n\n  size_t left_start_, right_start_;\n  size_t adds_, removes_, common_;\n  std::list<std::pair<char, const char*> > hunk_, hunk_adds_, hunk_removes_;\n};\n\n}  // namespace\n\n// Create a list of diff hunks in Unified diff format.\n// Each hunk has a header generated by PrintHeader above plus a body with\n// lines prefixed with ' ' for no change, '-' for deletion and '+' for\n// addition.\n// 'context' represents the desired unchanged prefix/suffix around the diff.\n// If two hunks are close enough that their contexts overlap, then they are\n// joined into one hunk.\nstd::string CreateUnifiedDiff(const std::vector<std::string>& left,\n                              const std::vector<std::string>& right,\n                              size_t context) {\n  const std::vector<EditType> edits = CalculateOptimalEdits(left, right);\n\n  size_t l_i = 0, r_i = 0, edit_i = 0;\n  std::stringstream ss;\n  while (edit_i < edits.size()) {\n    // Find first edit.\n    while (edit_i < edits.size() && edits[edit_i] == kMatch) {\n      ++l_i;\n      ++r_i;\n      ++edit_i;\n    }\n\n    // Find the first line to include in the hunk.\n    const size_t prefix_context = std::min(l_i, context);\n    Hunk hunk(l_i - prefix_context + 1, r_i - prefix_context + 1);\n    for (size_t i = prefix_context; i > 0; --i) {\n      hunk.PushLine(' ', left[l_i - i].c_str());\n    }\n\n    // Iterate the edits until we found enough suffix for the hunk or the input\n    // is over.\n    size_t n_suffix = 0;\n    for (; edit_i < edits.size(); ++edit_i) {\n      if (n_suffix >= context) {\n        // Continue only if the next hunk is very close.\n        auto it = edits.begin() + static_cast<int>(edit_i);\n        while (it != edits.end() && *it == kMatch) ++it;\n        if (it == edits.end() ||\n            static_cast<size_t>(it - edits.begin()) - edit_i >= context) {\n          // There is no next edit or it is too far away.\n          break;\n        }\n      }\n\n      EditType edit = edits[edit_i];\n      // Reset count when a non match is found.\n      n_suffix = edit == kMatch ? n_suffix + 1 : 0;\n\n      if (edit == kMatch || edit == kRemove || edit == kReplace) {\n        hunk.PushLine(edit == kMatch ? ' ' : '-', left[l_i].c_str());\n      }\n      if (edit == kAdd || edit == kReplace) {\n        hunk.PushLine('+', right[r_i].c_str());\n      }\n\n      // Advance indices, depending on edit type.\n      l_i += edit != kAdd;\n      r_i += edit != kRemove;\n    }\n\n    if (!hunk.has_edits()) {\n      // We are done. We don't want this hunk.\n      break;\n    }\n\n    hunk.PrintTo(&ss);\n  }\n  return ss.str();\n}\n\n}  // namespace edit_distance\n\nnamespace {\n\n// The string representation of the values received in EqFailure() are already\n// escaped. Split them on escaped '\\n' boundaries. Leave all other escaped\n// characters the same.\nstd::vector<std::string> SplitEscapedString(const std::string& str) {\n  std::vector<std::string> lines;\n  size_t start = 0, end = str.size();\n  if (end > 2 && str[0] == '\"' && str[end - 1] == '\"') {\n    ++start;\n    --end;\n  }\n  bool escaped = false;\n  for (size_t i = start; i + 1 < end; ++i) {\n    if (escaped) {\n      escaped = false;\n      if (str[i] == 'n') {\n        lines.push_back(str.substr(start, i - start - 1));\n        start = i + 1;\n      }\n    } else {\n      escaped = str[i] == '\\\\';\n    }\n  }\n  lines.push_back(str.substr(start, end - start));\n  return lines;\n}\n\n}  // namespace\n\n// Constructs and returns the message for an equality assertion\n// (e.g. ASSERT_EQ, EXPECT_STREQ, etc) failure.\n//\n// The first four parameters are the expressions used in the assertion\n// and their values, as strings.  For example, for ASSERT_EQ(foo, bar)\n// where foo is 5 and bar is 6, we have:\n//\n//   lhs_expression: \"foo\"\n//   rhs_expression: \"bar\"\n//   lhs_value:      \"5\"\n//   rhs_value:      \"6\"\n//\n// The ignoring_case parameter is true if and only if the assertion is a\n// *_STRCASEEQ*.  When it's true, the string \"Ignoring case\" will\n// be inserted into the message.\nAssertionResult EqFailure(const char* lhs_expression,\n                          const char* rhs_expression,\n                          const std::string& lhs_value,\n                          const std::string& rhs_value,\n                          bool ignoring_case) {\n  Message msg;\n  msg << \"Expected equality of these values:\";\n  msg << \"\\n  \" << lhs_expression;\n  if (lhs_value != lhs_expression) {\n    msg << \"\\n    Which is: \" << lhs_value;\n  }\n  msg << \"\\n  \" << rhs_expression;\n  if (rhs_value != rhs_expression) {\n    msg << \"\\n    Which is: \" << rhs_value;\n  }\n\n  if (ignoring_case) {\n    msg << \"\\nIgnoring case\";\n  }\n\n  if (!lhs_value.empty() && !rhs_value.empty()) {\n    const std::vector<std::string> lhs_lines =\n        SplitEscapedString(lhs_value);\n    const std::vector<std::string> rhs_lines =\n        SplitEscapedString(rhs_value);\n    if (lhs_lines.size() > 1 || rhs_lines.size() > 1) {\n      msg << \"\\nWith diff:\\n\"\n          << edit_distance::CreateUnifiedDiff(lhs_lines, rhs_lines);\n    }\n  }\n\n  return AssertionFailure() << msg;\n}\n\n// Constructs a failure message for Boolean assertions such as EXPECT_TRUE.\nstd::string GetBoolAssertionFailureMessage(\n    const AssertionResult& assertion_result,\n    const char* expression_text,\n    const char* actual_predicate_value,\n    const char* expected_predicate_value) {\n  const char* actual_message = assertion_result.message();\n  Message msg;\n  msg << \"Value of: \" << expression_text\n      << \"\\n  Actual: \" << actual_predicate_value;\n  if (actual_message[0] != '\\0')\n    msg << \" (\" << actual_message << \")\";\n  msg << \"\\nExpected: \" << expected_predicate_value;\n  return msg.GetString();\n}\n\n// Helper function for implementing ASSERT_NEAR.\nAssertionResult DoubleNearPredFormat(const char* expr1,\n                                     const char* expr2,\n                                     const char* abs_error_expr,\n                                     double val1,\n                                     double val2,\n                                     double abs_error) {\n  const double diff = fabs(val1 - val2);\n  if (diff <= abs_error) return AssertionSuccess();\n\n  return AssertionFailure()\n      << \"The difference between \" << expr1 << \" and \" << expr2\n      << \" is \" << diff << \", which exceeds \" << abs_error_expr << \", where\\n\"\n      << expr1 << \" evaluates to \" << val1 << \",\\n\"\n      << expr2 << \" evaluates to \" << val2 << \", and\\n\"\n      << abs_error_expr << \" evaluates to \" << abs_error << \".\";\n}\n\n\n// Helper template for implementing FloatLE() and DoubleLE().\ntemplate <typename RawType>\nAssertionResult FloatingPointLE(const char* expr1,\n                                const char* expr2,\n                                RawType val1,\n                                RawType val2) {\n  // Returns success if val1 is less than val2,\n  if (val1 < val2) {\n    return AssertionSuccess();\n  }\n\n  // or if val1 is almost equal to val2.\n  const FloatingPoint<RawType> lhs(val1), rhs(val2);\n  if (lhs.AlmostEquals(rhs)) {\n    return AssertionSuccess();\n  }\n\n  // Note that the above two checks will both fail if either val1 or\n  // val2 is NaN, as the IEEE floating-point standard requires that\n  // any predicate involving a NaN must return false.\n\n  ::std::stringstream val1_ss;\n  val1_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)\n          << val1;\n\n  ::std::stringstream val2_ss;\n  val2_ss << std::setprecision(std::numeric_limits<RawType>::digits10 + 2)\n          << val2;\n\n  return AssertionFailure()\n      << \"Expected: (\" << expr1 << \") <= (\" << expr2 << \")\\n\"\n      << \"  Actual: \" << StringStreamToString(&val1_ss) << \" vs \"\n      << StringStreamToString(&val2_ss);\n}\n\n}  // namespace internal\n\n// Asserts that val1 is less than, or almost equal to, val2.  Fails\n// otherwise.  In particular, it fails if either val1 or val2 is NaN.\nAssertionResult FloatLE(const char* expr1, const char* expr2,\n                        float val1, float val2) {\n  return internal::FloatingPointLE<float>(expr1, expr2, val1, val2);\n}\n\n// Asserts that val1 is less than, or almost equal to, val2.  Fails\n// otherwise.  In particular, it fails if either val1 or val2 is NaN.\nAssertionResult DoubleLE(const char* expr1, const char* expr2,\n                         double val1, double val2) {\n  return internal::FloatingPointLE<double>(expr1, expr2, val1, val2);\n}\n\nnamespace internal {\n\n// The helper function for {ASSERT|EXPECT}_EQ with int or enum\n// arguments.\nAssertionResult CmpHelperEQ(const char* lhs_expression,\n                            const char* rhs_expression,\n                            BiggestInt lhs,\n                            BiggestInt rhs) {\n  if (lhs == rhs) {\n    return AssertionSuccess();\n  }\n\n  return EqFailure(lhs_expression,\n                   rhs_expression,\n                   FormatForComparisonFailureMessage(lhs, rhs),\n                   FormatForComparisonFailureMessage(rhs, lhs),\n                   false);\n}\n\n// A macro for implementing the helper functions needed to implement\n// ASSERT_?? and EXPECT_?? with integer or enum arguments.  It is here\n// just to avoid copy-and-paste of similar code.\n#define GTEST_IMPL_CMP_HELPER_(op_name, op)\\\nAssertionResult CmpHelper##op_name(const char* expr1, const char* expr2, \\\n                                   BiggestInt val1, BiggestInt val2) {\\\n  if (val1 op val2) {\\\n    return AssertionSuccess();\\\n  } else {\\\n    return AssertionFailure() \\\n        << \"Expected: (\" << expr1 << \") \" #op \" (\" << expr2\\\n        << \"), actual: \" << FormatForComparisonFailureMessage(val1, val2)\\\n        << \" vs \" << FormatForComparisonFailureMessage(val2, val1);\\\n  }\\\n}\n\n// Implements the helper function for {ASSERT|EXPECT}_NE with int or\n// enum arguments.\nGTEST_IMPL_CMP_HELPER_(NE, !=)\n// Implements the helper function for {ASSERT|EXPECT}_LE with int or\n// enum arguments.\nGTEST_IMPL_CMP_HELPER_(LE, <=)\n// Implements the helper function for {ASSERT|EXPECT}_LT with int or\n// enum arguments.\nGTEST_IMPL_CMP_HELPER_(LT, < )\n// Implements the helper function for {ASSERT|EXPECT}_GE with int or\n// enum arguments.\nGTEST_IMPL_CMP_HELPER_(GE, >=)\n// Implements the helper function for {ASSERT|EXPECT}_GT with int or\n// enum arguments.\nGTEST_IMPL_CMP_HELPER_(GT, > )\n\n#undef GTEST_IMPL_CMP_HELPER_\n\n// The helper function for {ASSERT|EXPECT}_STREQ.\nAssertionResult CmpHelperSTREQ(const char* lhs_expression,\n                               const char* rhs_expression,\n                               const char* lhs,\n                               const char* rhs) {\n  if (String::CStringEquals(lhs, rhs)) {\n    return AssertionSuccess();\n  }\n\n  return EqFailure(lhs_expression,\n                   rhs_expression,\n                   PrintToString(lhs),\n                   PrintToString(rhs),\n                   false);\n}\n\n// The helper function for {ASSERT|EXPECT}_STRCASEEQ.\nAssertionResult CmpHelperSTRCASEEQ(const char* lhs_expression,\n                                   const char* rhs_expression,\n                                   const char* lhs,\n                                   const char* rhs) {\n  if (String::CaseInsensitiveCStringEquals(lhs, rhs)) {\n    return AssertionSuccess();\n  }\n\n  return EqFailure(lhs_expression,\n                   rhs_expression,\n                   PrintToString(lhs),\n                   PrintToString(rhs),\n                   true);\n}\n\n// The helper function for {ASSERT|EXPECT}_STRNE.\nAssertionResult CmpHelperSTRNE(const char* s1_expression,\n                               const char* s2_expression,\n                               const char* s1,\n                               const char* s2) {\n  if (!String::CStringEquals(s1, s2)) {\n    return AssertionSuccess();\n  } else {\n    return AssertionFailure() << \"Expected: (\" << s1_expression << \") != (\"\n                              << s2_expression << \"), actual: \\\"\"\n                              << s1 << \"\\\" vs \\\"\" << s2 << \"\\\"\";\n  }\n}\n\n// The helper function for {ASSERT|EXPECT}_STRCASENE.\nAssertionResult CmpHelperSTRCASENE(const char* s1_expression,\n                                   const char* s2_expression,\n                                   const char* s1,\n                                   const char* s2) {\n  if (!String::CaseInsensitiveCStringEquals(s1, s2)) {\n    return AssertionSuccess();\n  } else {\n    return AssertionFailure()\n        << \"Expected: (\" << s1_expression << \") != (\"\n        << s2_expression << \") (ignoring case), actual: \\\"\"\n        << s1 << \"\\\" vs \\\"\" << s2 << \"\\\"\";\n  }\n}\n\n}  // namespace internal\n\nnamespace {\n\n// Helper functions for implementing IsSubString() and IsNotSubstring().\n\n// This group of overloaded functions return true if and only if needle\n// is a substring of haystack.  NULL is considered a substring of\n// itself only.\n\nbool IsSubstringPred(const char* needle, const char* haystack) {\n  if (needle == nullptr || haystack == nullptr) return needle == haystack;\n\n  return strstr(haystack, needle) != nullptr;\n}\n\nbool IsSubstringPred(const wchar_t* needle, const wchar_t* haystack) {\n  if (needle == nullptr || haystack == nullptr) return needle == haystack;\n\n  return wcsstr(haystack, needle) != nullptr;\n}\n\n// StringType here can be either ::std::string or ::std::wstring.\ntemplate <typename StringType>\nbool IsSubstringPred(const StringType& needle,\n                     const StringType& haystack) {\n  return haystack.find(needle) != StringType::npos;\n}\n\n// This function implements either IsSubstring() or IsNotSubstring(),\n// depending on the value of the expected_to_be_substring parameter.\n// StringType here can be const char*, const wchar_t*, ::std::string,\n// or ::std::wstring.\ntemplate <typename StringType>\nAssertionResult IsSubstringImpl(\n    bool expected_to_be_substring,\n    const char* needle_expr, const char* haystack_expr,\n    const StringType& needle, const StringType& haystack) {\n  if (IsSubstringPred(needle, haystack) == expected_to_be_substring)\n    return AssertionSuccess();\n\n  const bool is_wide_string = sizeof(needle[0]) > 1;\n  const char* const begin_string_quote = is_wide_string ? \"L\\\"\" : \"\\\"\";\n  return AssertionFailure()\n      << \"Value of: \" << needle_expr << \"\\n\"\n      << \"  Actual: \" << begin_string_quote << needle << \"\\\"\\n\"\n      << \"Expected: \" << (expected_to_be_substring ? \"\" : \"not \")\n      << \"a substring of \" << haystack_expr << \"\\n\"\n      << \"Which is: \" << begin_string_quote << haystack << \"\\\"\";\n}\n\n}  // namespace\n\n// IsSubstring() and IsNotSubstring() check whether needle is a\n// substring of haystack (NULL is considered a substring of itself\n// only), and return an appropriate error message when they fail.\n\nAssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const char* needle, const char* haystack) {\n  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);\n}\n\nAssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const wchar_t* needle, const wchar_t* haystack) {\n  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);\n}\n\nAssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const char* needle, const char* haystack) {\n  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);\n}\n\nAssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const wchar_t* needle, const wchar_t* haystack) {\n  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);\n}\n\nAssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::string& needle, const ::std::string& haystack) {\n  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);\n}\n\nAssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::string& needle, const ::std::string& haystack) {\n  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);\n}\n\n#if GTEST_HAS_STD_WSTRING\nAssertionResult IsSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::wstring& needle, const ::std::wstring& haystack) {\n  return IsSubstringImpl(true, needle_expr, haystack_expr, needle, haystack);\n}\n\nAssertionResult IsNotSubstring(\n    const char* needle_expr, const char* haystack_expr,\n    const ::std::wstring& needle, const ::std::wstring& haystack) {\n  return IsSubstringImpl(false, needle_expr, haystack_expr, needle, haystack);\n}\n#endif  // GTEST_HAS_STD_WSTRING\n\nnamespace internal {\n\n#if GTEST_OS_WINDOWS\n\nnamespace {\n\n// Helper function for IsHRESULT{SuccessFailure} predicates\nAssertionResult HRESULTFailureHelper(const char* expr,\n                                     const char* expected,\n                                     long hr) {  // NOLINT\n# if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_WINDOWS_TV_TITLE\n\n  // Windows CE doesn't support FormatMessage.\n  const char error_text[] = \"\";\n\n# else\n\n  // Looks up the human-readable system message for the HRESULT code\n  // and since we're not passing any params to FormatMessage, we don't\n  // want inserts expanded.\n  const DWORD kFlags = FORMAT_MESSAGE_FROM_SYSTEM |\n                       FORMAT_MESSAGE_IGNORE_INSERTS;\n  const DWORD kBufSize = 4096;\n  // Gets the system's human readable message string for this HRESULT.\n  char error_text[kBufSize] = { '\\0' };\n  DWORD message_length = ::FormatMessageA(kFlags,\n                                          0,   // no source, we're asking system\n                                          static_cast<DWORD>(hr),  // the error\n                                          0,   // no line width restrictions\n                                          error_text,  // output buffer\n                                          kBufSize,    // buf size\n                                          nullptr);  // no arguments for inserts\n  // Trims tailing white space (FormatMessage leaves a trailing CR-LF)\n  for (; message_length && IsSpace(error_text[message_length - 1]);\n          --message_length) {\n    error_text[message_length - 1] = '\\0';\n  }\n\n# endif  // GTEST_OS_WINDOWS_MOBILE\n\n  const std::string error_hex(\"0x\" + String::FormatHexInt(hr));\n  return ::testing::AssertionFailure()\n      << \"Expected: \" << expr << \" \" << expected << \".\\n\"\n      << \"  Actual: \" << error_hex << \" \" << error_text << \"\\n\";\n}\n\n}  // namespace\n\nAssertionResult IsHRESULTSuccess(const char* expr, long hr) {  // NOLINT\n  if (SUCCEEDED(hr)) {\n    return AssertionSuccess();\n  }\n  return HRESULTFailureHelper(expr, \"succeeds\", hr);\n}\n\nAssertionResult IsHRESULTFailure(const char* expr, long hr) {  // NOLINT\n  if (FAILED(hr)) {\n    return AssertionSuccess();\n  }\n  return HRESULTFailureHelper(expr, \"fails\", hr);\n}\n\n#endif  // GTEST_OS_WINDOWS\n\n// Utility functions for encoding Unicode text (wide strings) in\n// UTF-8.\n\n// A Unicode code-point can have up to 21 bits, and is encoded in UTF-8\n// like this:\n//\n// Code-point length   Encoding\n//   0 -  7 bits       0xxxxxxx\n//   8 - 11 bits       110xxxxx 10xxxxxx\n//  12 - 16 bits       1110xxxx 10xxxxxx 10xxxxxx\n//  17 - 21 bits       11110xxx 10xxxxxx 10xxxxxx 10xxxxxx\n\n// The maximum code-point a one-byte UTF-8 sequence can represent.\nconst UInt32 kMaxCodePoint1 = (static_cast<UInt32>(1) <<  7) - 1;\n\n// The maximum code-point a two-byte UTF-8 sequence can represent.\nconst UInt32 kMaxCodePoint2 = (static_cast<UInt32>(1) << (5 + 6)) - 1;\n\n// The maximum code-point a three-byte UTF-8 sequence can represent.\nconst UInt32 kMaxCodePoint3 = (static_cast<UInt32>(1) << (4 + 2*6)) - 1;\n\n// The maximum code-point a four-byte UTF-8 sequence can represent.\nconst UInt32 kMaxCodePoint4 = (static_cast<UInt32>(1) << (3 + 3*6)) - 1;\n\n// Chops off the n lowest bits from a bit pattern.  Returns the n\n// lowest bits.  As a side effect, the original bit pattern will be\n// shifted to the right by n bits.\ninline UInt32 ChopLowBits(UInt32* bits, int n) {\n  const UInt32 low_bits = *bits & ((static_cast<UInt32>(1) << n) - 1);\n  *bits >>= n;\n  return low_bits;\n}\n\n// Converts a Unicode code point to a narrow string in UTF-8 encoding.\n// code_point parameter is of type UInt32 because wchar_t may not be\n// wide enough to contain a code point.\n// If the code_point is not a valid Unicode code point\n// (i.e. outside of Unicode range U+0 to U+10FFFF) it will be converted\n// to \"(Invalid Unicode 0xXXXXXXXX)\".\nstd::string CodePointToUtf8(UInt32 code_point) {\n  if (code_point > kMaxCodePoint4) {\n    return \"(Invalid Unicode 0x\" + String::FormatHexUInt32(code_point) + \")\";\n  }\n\n  char str[5];  // Big enough for the largest valid code point.\n  if (code_point <= kMaxCodePoint1) {\n    str[1] = '\\0';\n    str[0] = static_cast<char>(code_point);                          // 0xxxxxxx\n  } else if (code_point <= kMaxCodePoint2) {\n    str[2] = '\\0';\n    str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx\n    str[0] = static_cast<char>(0xC0 | code_point);                   // 110xxxxx\n  } else if (code_point <= kMaxCodePoint3) {\n    str[3] = '\\0';\n    str[2] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx\n    str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx\n    str[0] = static_cast<char>(0xE0 | code_point);                   // 1110xxxx\n  } else {  // code_point <= kMaxCodePoint4\n    str[4] = '\\0';\n    str[3] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx\n    str[2] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx\n    str[1] = static_cast<char>(0x80 | ChopLowBits(&code_point, 6));  // 10xxxxxx\n    str[0] = static_cast<char>(0xF0 | code_point);                   // 11110xxx\n  }\n  return str;\n}\n\n// The following two functions only make sense if the system\n// uses UTF-16 for wide string encoding. All supported systems\n// with 16 bit wchar_t (Windows, Cygwin) do use UTF-16.\n\n// Determines if the arguments constitute UTF-16 surrogate pair\n// and thus should be combined into a single Unicode code point\n// using CreateCodePointFromUtf16SurrogatePair.\ninline bool IsUtf16SurrogatePair(wchar_t first, wchar_t second) {\n  return sizeof(wchar_t) == 2 &&\n      (first & 0xFC00) == 0xD800 && (second & 0xFC00) == 0xDC00;\n}\n\n// Creates a Unicode code point from UTF16 surrogate pair.\ninline UInt32 CreateCodePointFromUtf16SurrogatePair(wchar_t first,\n                                                    wchar_t second) {\n  const auto first_u = static_cast<UInt32>(first);\n  const auto second_u = static_cast<UInt32>(second);\n  const UInt32 mask = (1 << 10) - 1;\n  return (sizeof(wchar_t) == 2)\n             ? (((first_u & mask) << 10) | (second_u & mask)) + 0x10000\n             :\n             // This function should not be called when the condition is\n             // false, but we provide a sensible default in case it is.\n             first_u;\n}\n\n// Converts a wide string to a narrow string in UTF-8 encoding.\n// The wide string is assumed to have the following encoding:\n//   UTF-16 if sizeof(wchar_t) == 2 (on Windows, Cygwin)\n//   UTF-32 if sizeof(wchar_t) == 4 (on Linux)\n// Parameter str points to a null-terminated wide string.\n// Parameter num_chars may additionally limit the number\n// of wchar_t characters processed. -1 is used when the entire string\n// should be processed.\n// If the string contains code points that are not valid Unicode code points\n// (i.e. outside of Unicode range U+0 to U+10FFFF) they will be output\n// as '(Invalid Unicode 0xXXXXXXXX)'. If the string is in UTF16 encoding\n// and contains invalid UTF-16 surrogate pairs, values in those pairs\n// will be encoded as individual Unicode characters from Basic Normal Plane.\nstd::string WideStringToUtf8(const wchar_t* str, int num_chars) {\n  if (num_chars == -1)\n    num_chars = static_cast<int>(wcslen(str));\n\n  ::std::stringstream stream;\n  for (int i = 0; i < num_chars; ++i) {\n    UInt32 unicode_code_point;\n\n    if (str[i] == L'\\0') {\n      break;\n    } else if (i + 1 < num_chars && IsUtf16SurrogatePair(str[i], str[i + 1])) {\n      unicode_code_point = CreateCodePointFromUtf16SurrogatePair(str[i],\n                                                                 str[i + 1]);\n      i++;\n    } else {\n      unicode_code_point = static_cast<UInt32>(str[i]);\n    }\n\n    stream << CodePointToUtf8(unicode_code_point);\n  }\n  return StringStreamToString(&stream);\n}\n\n// Converts a wide C string to an std::string using the UTF-8 encoding.\n// NULL will be converted to \"(null)\".\nstd::string String::ShowWideCString(const wchar_t * wide_c_str) {\n  if (wide_c_str == nullptr) return \"(null)\";\n\n  return internal::WideStringToUtf8(wide_c_str, -1);\n}\n\n// Compares two wide C strings.  Returns true if and only if they have the\n// same content.\n//\n// Unlike wcscmp(), this function can handle NULL argument(s).  A NULL\n// C string is considered different to any non-NULL C string,\n// including the empty string.\nbool String::WideCStringEquals(const wchar_t * lhs, const wchar_t * rhs) {\n  if (lhs == nullptr) return rhs == nullptr;\n\n  if (rhs == nullptr) return false;\n\n  return wcscmp(lhs, rhs) == 0;\n}\n\n// Helper function for *_STREQ on wide strings.\nAssertionResult CmpHelperSTREQ(const char* lhs_expression,\n                               const char* rhs_expression,\n                               const wchar_t* lhs,\n                               const wchar_t* rhs) {\n  if (String::WideCStringEquals(lhs, rhs)) {\n    return AssertionSuccess();\n  }\n\n  return EqFailure(lhs_expression,\n                   rhs_expression,\n                   PrintToString(lhs),\n                   PrintToString(rhs),\n                   false);\n}\n\n// Helper function for *_STRNE on wide strings.\nAssertionResult CmpHelperSTRNE(const char* s1_expression,\n                               const char* s2_expression,\n                               const wchar_t* s1,\n                               const wchar_t* s2) {\n  if (!String::WideCStringEquals(s1, s2)) {\n    return AssertionSuccess();\n  }\n\n  return AssertionFailure() << \"Expected: (\" << s1_expression << \") != (\"\n                            << s2_expression << \"), actual: \"\n                            << PrintToString(s1)\n                            << \" vs \" << PrintToString(s2);\n}\n\n// Compares two C strings, ignoring case.  Returns true if and only if they have\n// the same content.\n//\n// Unlike strcasecmp(), this function can handle NULL argument(s).  A\n// NULL C string is considered different to any non-NULL C string,\n// including the empty string.\nbool String::CaseInsensitiveCStringEquals(const char * lhs, const char * rhs) {\n  if (lhs == nullptr) return rhs == nullptr;\n  if (rhs == nullptr) return false;\n  return posix::StrCaseCmp(lhs, rhs) == 0;\n}\n\n// Compares two wide C strings, ignoring case.  Returns true if and only if they\n// have the same content.\n//\n// Unlike wcscasecmp(), this function can handle NULL argument(s).\n// A NULL C string is considered different to any non-NULL wide C string,\n// including the empty string.\n// NB: The implementations on different platforms slightly differ.\n// On windows, this method uses _wcsicmp which compares according to LC_CTYPE\n// environment variable. On GNU platform this method uses wcscasecmp\n// which compares according to LC_CTYPE category of the current locale.\n// On MacOS X, it uses towlower, which also uses LC_CTYPE category of the\n// current locale.\nbool String::CaseInsensitiveWideCStringEquals(const wchar_t* lhs,\n                                              const wchar_t* rhs) {\n  if (lhs == nullptr) return rhs == nullptr;\n\n  if (rhs == nullptr) return false;\n\n#if GTEST_OS_WINDOWS\n  return _wcsicmp(lhs, rhs) == 0;\n#elif GTEST_OS_LINUX && !GTEST_OS_LINUX_ANDROID\n  return wcscasecmp(lhs, rhs) == 0;\n#else\n  // Android, Mac OS X and Cygwin don't define wcscasecmp.\n  // Other unknown OSes may not define it either.\n  wint_t left, right;\n  do {\n    left = towlower(static_cast<wint_t>(*lhs++));\n    right = towlower(static_cast<wint_t>(*rhs++));\n  } while (left && left == right);\n  return left == right;\n#endif  // OS selector\n}\n\n// Returns true if and only if str ends with the given suffix, ignoring case.\n// Any string is considered to end with an empty suffix.\nbool String::EndsWithCaseInsensitive(\n    const std::string& str, const std::string& suffix) {\n  const size_t str_len = str.length();\n  const size_t suffix_len = suffix.length();\n  return (str_len >= suffix_len) &&\n         CaseInsensitiveCStringEquals(str.c_str() + str_len - suffix_len,\n                                      suffix.c_str());\n}\n\n// Formats an int value as \"%02d\".\nstd::string String::FormatIntWidth2(int value) {\n  std::stringstream ss;\n  ss << std::setfill('0') << std::setw(2) << value;\n  return ss.str();\n}\n\n// Formats an int value as \"%X\".\nstd::string String::FormatHexUInt32(UInt32 value) {\n  std::stringstream ss;\n  ss << std::hex << std::uppercase << value;\n  return ss.str();\n}\n\n// Formats an int value as \"%X\".\nstd::string String::FormatHexInt(int value) {\n  return FormatHexUInt32(static_cast<UInt32>(value));\n}\n\n// Formats a byte as \"%02X\".\nstd::string String::FormatByte(unsigned char value) {\n  std::stringstream ss;\n  ss << std::setfill('0') << std::setw(2) << std::hex << std::uppercase\n     << static_cast<unsigned int>(value);\n  return ss.str();\n}\n\n// Converts the buffer in a stringstream to an std::string, converting NUL\n// bytes to \"\\\\0\" along the way.\nstd::string StringStreamToString(::std::stringstream* ss) {\n  const ::std::string& str = ss->str();\n  const char* const start = str.c_str();\n  const char* const end = start + str.length();\n\n  std::string result;\n  result.reserve(static_cast<size_t>(2 * (end - start)));\n  for (const char* ch = start; ch != end; ++ch) {\n    if (*ch == '\\0') {\n      result += \"\\\\0\";  // Replaces NUL with \"\\\\0\";\n    } else {\n      result += *ch;\n    }\n  }\n\n  return result;\n}\n\n// Appends the user-supplied message to the Google-Test-generated message.\nstd::string AppendUserMessage(const std::string& gtest_msg,\n                              const Message& user_msg) {\n  // Appends the user message if it's non-empty.\n  const std::string user_msg_string = user_msg.GetString();\n  if (user_msg_string.empty()) {\n    return gtest_msg;\n  }\n\n  return gtest_msg + \"\\n\" + user_msg_string;\n}\n\n}  // namespace internal\n\n// class TestResult\n\n// Creates an empty TestResult.\nTestResult::TestResult()\n    : death_test_count_(0), start_timestamp_(0), elapsed_time_(0) {}\n\n// D'tor.\nTestResult::~TestResult() {\n}\n\n// Returns the i-th test part result among all the results. i can\n// range from 0 to total_part_count() - 1. If i is not in that range,\n// aborts the program.\nconst TestPartResult& TestResult::GetTestPartResult(int i) const {\n  if (i < 0 || i >= total_part_count())\n    internal::posix::Abort();\n  return test_part_results_.at(static_cast<size_t>(i));\n}\n\n// Returns the i-th test property. i can range from 0 to\n// test_property_count() - 1. If i is not in that range, aborts the\n// program.\nconst TestProperty& TestResult::GetTestProperty(int i) const {\n  if (i < 0 || i >= test_property_count())\n    internal::posix::Abort();\n  return test_properties_.at(static_cast<size_t>(i));\n}\n\n// Clears the test part results.\nvoid TestResult::ClearTestPartResults() {\n  test_part_results_.clear();\n}\n\n// Adds a test part result to the list.\nvoid TestResult::AddTestPartResult(const TestPartResult& test_part_result) {\n  test_part_results_.push_back(test_part_result);\n}\n\n// Adds a test property to the list. If a property with the same key as the\n// supplied property is already represented, the value of this test_property\n// replaces the old value for that key.\nvoid TestResult::RecordProperty(const std::string& xml_element,\n                                const TestProperty& test_property) {\n  if (!ValidateTestProperty(xml_element, test_property)) {\n    return;\n  }\n  internal::MutexLock lock(&test_properites_mutex_);\n  const std::vector<TestProperty>::iterator property_with_matching_key =\n      std::find_if(test_properties_.begin(), test_properties_.end(),\n                   internal::TestPropertyKeyIs(test_property.key()));\n  if (property_with_matching_key == test_properties_.end()) {\n    test_properties_.push_back(test_property);\n    return;\n  }\n  property_with_matching_key->SetValue(test_property.value());\n}\n\n// The list of reserved attributes used in the <testsuites> element of XML\n// output.\nstatic const char* const kReservedTestSuitesAttributes[] = {\n  \"disabled\",\n  \"errors\",\n  \"failures\",\n  \"name\",\n  \"random_seed\",\n  \"tests\",\n  \"time\",\n  \"timestamp\"\n};\n\n// The list of reserved attributes used in the <testsuite> element of XML\n// output.\nstatic const char* const kReservedTestSuiteAttributes[] = {\n    \"disabled\", \"errors\", \"failures\", \"name\", \"tests\", \"time\", \"timestamp\"};\n\n// The list of reserved attributes used in the <testcase> element of XML output.\nstatic const char* const kReservedTestCaseAttributes[] = {\n    \"classname\",   \"name\", \"status\", \"time\",  \"type_param\",\n    \"value_param\", \"file\", \"line\"};\n\n// Use a slightly different set for allowed output to ensure existing tests can\n// still RecordProperty(\"result\") or \"RecordProperty(timestamp\")\nstatic const char* const kReservedOutputTestCaseAttributes[] = {\n    \"classname\",   \"name\", \"status\", \"time\",   \"type_param\",\n    \"value_param\", \"file\", \"line\",   \"result\", \"timestamp\"};\n\ntemplate <int kSize>\nstd::vector<std::string> ArrayAsVector(const char* const (&array)[kSize]) {\n  return std::vector<std::string>(array, array + kSize);\n}\n\nstatic std::vector<std::string> GetReservedAttributesForElement(\n    const std::string& xml_element) {\n  if (xml_element == \"testsuites\") {\n    return ArrayAsVector(kReservedTestSuitesAttributes);\n  } else if (xml_element == \"testsuite\") {\n    return ArrayAsVector(kReservedTestSuiteAttributes);\n  } else if (xml_element == \"testcase\") {\n    return ArrayAsVector(kReservedTestCaseAttributes);\n  } else {\n    GTEST_CHECK_(false) << \"Unrecognized xml_element provided: \" << xml_element;\n  }\n  // This code is unreachable but some compilers may not realizes that.\n  return std::vector<std::string>();\n}\n\n// TODO(jdesprez): Merge the two getReserved attributes once skip is improved\nstatic std::vector<std::string> GetReservedOutputAttributesForElement(\n    const std::string& xml_element) {\n  if (xml_element == \"testsuites\") {\n    return ArrayAsVector(kReservedTestSuitesAttributes);\n  } else if (xml_element == \"testsuite\") {\n    return ArrayAsVector(kReservedTestSuiteAttributes);\n  } else if (xml_element == \"testcase\") {\n    return ArrayAsVector(kReservedOutputTestCaseAttributes);\n  } else {\n    GTEST_CHECK_(false) << \"Unrecognized xml_element provided: \" << xml_element;\n  }\n  // This code is unreachable but some compilers may not realizes that.\n  return std::vector<std::string>();\n}\n\nstatic std::string FormatWordList(const std::vector<std::string>& words) {\n  Message word_list;\n  for (size_t i = 0; i < words.size(); ++i) {\n    if (i > 0 && words.size() > 2) {\n      word_list << \", \";\n    }\n    if (i == words.size() - 1) {\n      word_list << \"and \";\n    }\n    word_list << \"'\" << words[i] << \"'\";\n  }\n  return word_list.GetString();\n}\n\nstatic bool ValidateTestPropertyName(\n    const std::string& property_name,\n    const std::vector<std::string>& reserved_names) {\n  if (std::find(reserved_names.begin(), reserved_names.end(), property_name) !=\n          reserved_names.end()) {\n    ADD_FAILURE() << \"Reserved key used in RecordProperty(): \" << property_name\n                  << \" (\" << FormatWordList(reserved_names)\n                  << \" are reserved by \" << GTEST_NAME_ << \")\";\n    return false;\n  }\n  return true;\n}\n\n// Adds a failure if the key is a reserved attribute of the element named\n// xml_element.  Returns true if the property is valid.\nbool TestResult::ValidateTestProperty(const std::string& xml_element,\n                                      const TestProperty& test_property) {\n  return ValidateTestPropertyName(test_property.key(),\n                                  GetReservedAttributesForElement(xml_element));\n}\n\n// Clears the object.\nvoid TestResult::Clear() {\n  test_part_results_.clear();\n  test_properties_.clear();\n  death_test_count_ = 0;\n  elapsed_time_ = 0;\n}\n\n// Returns true off the test part was skipped.\nstatic bool TestPartSkipped(const TestPartResult& result) {\n  return result.skipped();\n}\n\n// Returns true if and only if the test was skipped.\nbool TestResult::Skipped() const {\n  return !Failed() && CountIf(test_part_results_, TestPartSkipped) > 0;\n}\n\n// Returns true if and only if the test failed.\nbool TestResult::Failed() const {\n  for (int i = 0; i < total_part_count(); ++i) {\n    if (GetTestPartResult(i).failed())\n      return true;\n  }\n  return false;\n}\n\n// Returns true if and only if the test part fatally failed.\nstatic bool TestPartFatallyFailed(const TestPartResult& result) {\n  return result.fatally_failed();\n}\n\n// Returns true if and only if the test fatally failed.\nbool TestResult::HasFatalFailure() const {\n  return CountIf(test_part_results_, TestPartFatallyFailed) > 0;\n}\n\n// Returns true if and only if the test part non-fatally failed.\nstatic bool TestPartNonfatallyFailed(const TestPartResult& result) {\n  return result.nonfatally_failed();\n}\n\n// Returns true if and only if the test has a non-fatal failure.\nbool TestResult::HasNonfatalFailure() const {\n  return CountIf(test_part_results_, TestPartNonfatallyFailed) > 0;\n}\n\n// Gets the number of all test parts.  This is the sum of the number\n// of successful test parts and the number of failed test parts.\nint TestResult::total_part_count() const {\n  return static_cast<int>(test_part_results_.size());\n}\n\n// Returns the number of the test properties.\nint TestResult::test_property_count() const {\n  return static_cast<int>(test_properties_.size());\n}\n\n// class Test\n\n// Creates a Test object.\n\n// The c'tor saves the states of all flags.\nTest::Test()\n    : gtest_flag_saver_(new GTEST_FLAG_SAVER_) {\n}\n\n// The d'tor restores the states of all flags.  The actual work is\n// done by the d'tor of the gtest_flag_saver_ field, and thus not\n// visible here.\nTest::~Test() {\n}\n\n// Sets up the test fixture.\n//\n// A sub-class may override this.\nvoid Test::SetUp() {\n}\n\n// Tears down the test fixture.\n//\n// A sub-class may override this.\nvoid Test::TearDown() {\n}\n\n// Allows user supplied key value pairs to be recorded for later output.\nvoid Test::RecordProperty(const std::string& key, const std::string& value) {\n  UnitTest::GetInstance()->RecordProperty(key, value);\n}\n\n// Allows user supplied key value pairs to be recorded for later output.\nvoid Test::RecordProperty(const std::string& key, int value) {\n  Message value_message;\n  value_message << value;\n  RecordProperty(key, value_message.GetString().c_str());\n}\n\nnamespace internal {\n\nvoid ReportFailureInUnknownLocation(TestPartResult::Type result_type,\n                                    const std::string& message) {\n  // This function is a friend of UnitTest and as such has access to\n  // AddTestPartResult.\n  UnitTest::GetInstance()->AddTestPartResult(\n      result_type,\n      nullptr,  // No info about the source file where the exception occurred.\n      -1,       // We have no info on which line caused the exception.\n      message,\n      \"\");  // No stack trace, either.\n}\n\n}  // namespace internal\n\n// Google Test requires all tests in the same test suite to use the same test\n// fixture class.  This function checks if the current test has the\n// same fixture class as the first test in the current test suite.  If\n// yes, it returns true; otherwise it generates a Google Test failure and\n// returns false.\nbool Test::HasSameFixtureClass() {\n  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();\n  const TestSuite* const test_suite = impl->current_test_suite();\n\n  // Info about the first test in the current test suite.\n  const TestInfo* const first_test_info = test_suite->test_info_list()[0];\n  const internal::TypeId first_fixture_id = first_test_info->fixture_class_id_;\n  const char* const first_test_name = first_test_info->name();\n\n  // Info about the current test.\n  const TestInfo* const this_test_info = impl->current_test_info();\n  const internal::TypeId this_fixture_id = this_test_info->fixture_class_id_;\n  const char* const this_test_name = this_test_info->name();\n\n  if (this_fixture_id != first_fixture_id) {\n    // Is the first test defined using TEST?\n    const bool first_is_TEST = first_fixture_id == internal::GetTestTypeId();\n    // Is this test defined using TEST?\n    const bool this_is_TEST = this_fixture_id == internal::GetTestTypeId();\n\n    if (first_is_TEST || this_is_TEST) {\n      // Both TEST and TEST_F appear in same test suite, which is incorrect.\n      // Tell the user how to fix this.\n\n      // Gets the name of the TEST and the name of the TEST_F.  Note\n      // that first_is_TEST and this_is_TEST cannot both be true, as\n      // the fixture IDs are different for the two tests.\n      const char* const TEST_name =\n          first_is_TEST ? first_test_name : this_test_name;\n      const char* const TEST_F_name =\n          first_is_TEST ? this_test_name : first_test_name;\n\n      ADD_FAILURE()\n          << \"All tests in the same test suite must use the same test fixture\\n\"\n          << \"class, so mixing TEST_F and TEST in the same test suite is\\n\"\n          << \"illegal.  In test suite \" << this_test_info->test_suite_name()\n          << \",\\n\"\n          << \"test \" << TEST_F_name << \" is defined using TEST_F but\\n\"\n          << \"test \" << TEST_name << \" is defined using TEST.  You probably\\n\"\n          << \"want to change the TEST to TEST_F or move it to another test\\n\"\n          << \"case.\";\n    } else {\n      // Two fixture classes with the same name appear in two different\n      // namespaces, which is not allowed. Tell the user how to fix this.\n      ADD_FAILURE()\n          << \"All tests in the same test suite must use the same test fixture\\n\"\n          << \"class.  However, in test suite \"\n          << this_test_info->test_suite_name() << \",\\n\"\n          << \"you defined test \" << first_test_name << \" and test \"\n          << this_test_name << \"\\n\"\n          << \"using two different test fixture classes.  This can happen if\\n\"\n          << \"the two classes are from different namespaces or translation\\n\"\n          << \"units and have the same name.  You should probably rename one\\n\"\n          << \"of the classes to put the tests into different test suites.\";\n    }\n    return false;\n  }\n\n  return true;\n}\n\n#if GTEST_HAS_SEH\n\n// Adds an \"exception thrown\" fatal failure to the current test.  This\n// function returns its result via an output parameter pointer because VC++\n// prohibits creation of objects with destructors on stack in functions\n// using __try (see error C2712).\nstatic std::string* FormatSehExceptionMessage(DWORD exception_code,\n                                              const char* location) {\n  Message message;\n  message << \"SEH exception with code 0x\" << std::setbase(16) <<\n    exception_code << std::setbase(10) << \" thrown in \" << location << \".\";\n\n  return new std::string(message.GetString());\n}\n\n#endif  // GTEST_HAS_SEH\n\nnamespace internal {\n\n#if GTEST_HAS_EXCEPTIONS\n\n// Adds an \"exception thrown\" fatal failure to the current test.\nstatic std::string FormatCxxExceptionMessage(const char* description,\n                                             const char* location) {\n  Message message;\n  if (description != nullptr) {\n    message << \"C++ exception with description \\\"\" << description << \"\\\"\";\n  } else {\n    message << \"Unknown C++ exception\";\n  }\n  message << \" thrown in \" << location << \".\";\n\n  return message.GetString();\n}\n\nstatic std::string PrintTestPartResultToString(\n    const TestPartResult& test_part_result);\n\nGoogleTestFailureException::GoogleTestFailureException(\n    const TestPartResult& failure)\n    : ::std::runtime_error(PrintTestPartResultToString(failure).c_str()) {}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// We put these helper functions in the internal namespace as IBM's xlC\n// compiler rejects the code if they were declared static.\n\n// Runs the given method and handles SEH exceptions it throws, when\n// SEH is supported; returns the 0-value for type Result in case of an\n// SEH exception.  (Microsoft compilers cannot handle SEH and C++\n// exceptions in the same function.  Therefore, we provide a separate\n// wrapper function for handling SEH exceptions.)\ntemplate <class T, typename Result>\nResult HandleSehExceptionsInMethodIfSupported(\n    T* object, Result (T::*method)(), const char* location) {\n#if GTEST_HAS_SEH\n  __try {\n    return (object->*method)();\n  } __except (internal::UnitTestOptions::GTestShouldProcessSEH(  // NOLINT\n      GetExceptionCode())) {\n    // We create the exception message on the heap because VC++ prohibits\n    // creation of objects with destructors on stack in functions using __try\n    // (see error C2712).\n    std::string* exception_message = FormatSehExceptionMessage(\n        GetExceptionCode(), location);\n    internal::ReportFailureInUnknownLocation(TestPartResult::kFatalFailure,\n                                             *exception_message);\n    delete exception_message;\n    return static_cast<Result>(0);\n  }\n#else\n  (void)location;\n  return (object->*method)();\n#endif  // GTEST_HAS_SEH\n}\n\n// Runs the given method and catches and reports C++ and/or SEH-style\n// exceptions, if they are supported; returns the 0-value for type\n// Result in case of an SEH exception.\ntemplate <class T, typename Result>\nResult HandleExceptionsInMethodIfSupported(\n    T* object, Result (T::*method)(), const char* location) {\n  // NOTE: The user code can affect the way in which Google Test handles\n  // exceptions by setting GTEST_FLAG(catch_exceptions), but only before\n  // RUN_ALL_TESTS() starts. It is technically possible to check the flag\n  // after the exception is caught and either report or re-throw the\n  // exception based on the flag's value:\n  //\n  // try {\n  //   // Perform the test method.\n  // } catch (...) {\n  //   if (GTEST_FLAG(catch_exceptions))\n  //     // Report the exception as failure.\n  //   else\n  //     throw;  // Re-throws the original exception.\n  // }\n  //\n  // However, the purpose of this flag is to allow the program to drop into\n  // the debugger when the exception is thrown. On most platforms, once the\n  // control enters the catch block, the exception origin information is\n  // lost and the debugger will stop the program at the point of the\n  // re-throw in this function -- instead of at the point of the original\n  // throw statement in the code under test.  For this reason, we perform\n  // the check early, sacrificing the ability to affect Google Test's\n  // exception handling in the method where the exception is thrown.\n  if (internal::GetUnitTestImpl()->catch_exceptions()) {\n#if GTEST_HAS_EXCEPTIONS\n    try {\n      return HandleSehExceptionsInMethodIfSupported(object, method, location);\n    } catch (const AssertionException&) {  // NOLINT\n      // This failure was reported already.\n    } catch (const internal::GoogleTestFailureException&) {  // NOLINT\n      // This exception type can only be thrown by a failed Google\n      // Test assertion with the intention of letting another testing\n      // framework catch it.  Therefore we just re-throw it.\n      throw;\n    } catch (const std::exception& e) {  // NOLINT\n      internal::ReportFailureInUnknownLocation(\n          TestPartResult::kFatalFailure,\n          FormatCxxExceptionMessage(e.what(), location));\n    } catch (...) {  // NOLINT\n      internal::ReportFailureInUnknownLocation(\n          TestPartResult::kFatalFailure,\n          FormatCxxExceptionMessage(nullptr, location));\n    }\n    return static_cast<Result>(0);\n#else\n    return HandleSehExceptionsInMethodIfSupported(object, method, location);\n#endif  // GTEST_HAS_EXCEPTIONS\n  } else {\n    return (object->*method)();\n  }\n}\n\n}  // namespace internal\n\n// Runs the test and updates the test result.\nvoid Test::Run() {\n  if (!HasSameFixtureClass()) return;\n\n  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();\n  impl->os_stack_trace_getter()->UponLeavingGTest();\n  internal::HandleExceptionsInMethodIfSupported(this, &Test::SetUp, \"SetUp()\");\n  // We will run the test only if SetUp() was successful and didn't call\n  // GTEST_SKIP().\n  if (!HasFatalFailure() && !IsSkipped()) {\n    impl->os_stack_trace_getter()->UponLeavingGTest();\n    internal::HandleExceptionsInMethodIfSupported(\n        this, &Test::TestBody, \"the test body\");\n  }\n\n  // However, we want to clean up as much as possible.  Hence we will\n  // always call TearDown(), even if SetUp() or the test body has\n  // failed.\n  impl->os_stack_trace_getter()->UponLeavingGTest();\n  internal::HandleExceptionsInMethodIfSupported(\n      this, &Test::TearDown, \"TearDown()\");\n}\n\n// Returns true if and only if the current test has a fatal failure.\nbool Test::HasFatalFailure() {\n  return internal::GetUnitTestImpl()->current_test_result()->HasFatalFailure();\n}\n\n// Returns true if and only if the current test has a non-fatal failure.\nbool Test::HasNonfatalFailure() {\n  return internal::GetUnitTestImpl()->current_test_result()->\n      HasNonfatalFailure();\n}\n\n// Returns true if and only if the current test was skipped.\nbool Test::IsSkipped() {\n  return internal::GetUnitTestImpl()->current_test_result()->Skipped();\n}\n\n// class TestInfo\n\n// Constructs a TestInfo object. It assumes ownership of the test factory\n// object.\nTestInfo::TestInfo(const std::string& a_test_suite_name,\n                   const std::string& a_name, const char* a_type_param,\n                   const char* a_value_param,\n                   internal::CodeLocation a_code_location,\n                   internal::TypeId fixture_class_id,\n                   internal::TestFactoryBase* factory)\n    : test_suite_name_(a_test_suite_name),\n      name_(a_name),\n      type_param_(a_type_param ? new std::string(a_type_param) : nullptr),\n      value_param_(a_value_param ? new std::string(a_value_param) : nullptr),\n      location_(a_code_location),\n      fixture_class_id_(fixture_class_id),\n      should_run_(false),\n      is_disabled_(false),\n      matches_filter_(false),\n      factory_(factory),\n      result_() {}\n\n// Destructs a TestInfo object.\nTestInfo::~TestInfo() { delete factory_; }\n\nnamespace internal {\n\n// Creates a new TestInfo object and registers it with Google Test;\n// returns the created object.\n//\n// Arguments:\n//\n//   test_suite_name:   name of the test suite\n//   name:             name of the test\n//   type_param:       the name of the test's type parameter, or NULL if\n//                     this is not a typed or a type-parameterized test.\n//   value_param:      text representation of the test's value parameter,\n//                     or NULL if this is not a value-parameterized test.\n//   code_location:    code location where the test is defined\n//   fixture_class_id: ID of the test fixture class\n//   set_up_tc:        pointer to the function that sets up the test suite\n//   tear_down_tc:     pointer to the function that tears down the test suite\n//   factory:          pointer to the factory that creates a test object.\n//                     The newly created TestInfo instance will assume\n//                     ownership of the factory object.\nTestInfo* MakeAndRegisterTestInfo(\n    const char* test_suite_name, const char* name, const char* type_param,\n    const char* value_param, CodeLocation code_location,\n    TypeId fixture_class_id, SetUpTestSuiteFunc set_up_tc,\n    TearDownTestSuiteFunc tear_down_tc, TestFactoryBase* factory) {\n  TestInfo* const test_info =\n      new TestInfo(test_suite_name, name, type_param, value_param,\n                   code_location, fixture_class_id, factory);\n  GetUnitTestImpl()->AddTestInfo(set_up_tc, tear_down_tc, test_info);\n  return test_info;\n}\n\nvoid ReportInvalidTestSuiteType(const char* test_suite_name,\n                                CodeLocation code_location) {\n  Message errors;\n  errors\n      << \"Attempted redefinition of test suite \" << test_suite_name << \".\\n\"\n      << \"All tests in the same test suite must use the same test fixture\\n\"\n      << \"class.  However, in test suite \" << test_suite_name << \", you tried\\n\"\n      << \"to define a test using a fixture class different from the one\\n\"\n      << \"used earlier. This can happen if the two fixture classes are\\n\"\n      << \"from different namespaces and have the same name. You should\\n\"\n      << \"probably rename one of the classes to put the tests into different\\n\"\n      << \"test suites.\";\n\n  GTEST_LOG_(ERROR) << FormatFileLocation(code_location.file.c_str(),\n                                          code_location.line)\n                    << \" \" << errors.GetString();\n}\n}  // namespace internal\n\nnamespace {\n\n// A predicate that checks the test name of a TestInfo against a known\n// value.\n//\n// This is used for implementation of the TestSuite class only.  We put\n// it in the anonymous namespace to prevent polluting the outer\n// namespace.\n//\n// TestNameIs is copyable.\nclass TestNameIs {\n public:\n  // Constructor.\n  //\n  // TestNameIs has NO default constructor.\n  explicit TestNameIs(const char* name)\n      : name_(name) {}\n\n  // Returns true if and only if the test name of test_info matches name_.\n  bool operator()(const TestInfo * test_info) const {\n    return test_info && test_info->name() == name_;\n  }\n\n private:\n  std::string name_;\n};\n\n}  // namespace\n\nnamespace internal {\n\n// This method expands all parameterized tests registered with macros TEST_P\n// and INSTANTIATE_TEST_SUITE_P into regular tests and registers those.\n// This will be done just once during the program runtime.\nvoid UnitTestImpl::RegisterParameterizedTests() {\n  if (!parameterized_tests_registered_) {\n    parameterized_test_registry_.RegisterTests();\n    parameterized_tests_registered_ = true;\n  }\n}\n\n}  // namespace internal\n\n// Creates the test object, runs it, records its result, and then\n// deletes it.\nvoid TestInfo::Run() {\n  if (!should_run_) return;\n\n  // Tells UnitTest where to store test result.\n  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();\n  impl->set_current_test_info(this);\n\n  TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();\n\n  // Notifies the unit test event listeners that a test is about to start.\n  repeater->OnTestStart(*this);\n\n  const TimeInMillis start = internal::GetTimeInMillis();\n\n  impl->os_stack_trace_getter()->UponLeavingGTest();\n\n  // Creates the test object.\n  Test* const test = internal::HandleExceptionsInMethodIfSupported(\n      factory_, &internal::TestFactoryBase::CreateTest,\n      \"the test fixture's constructor\");\n\n  // Runs the test if the constructor didn't generate a fatal failure or invoke\n  // GTEST_SKIP().\n  // Note that the object will not be null\n  if (!Test::HasFatalFailure() && !Test::IsSkipped()) {\n    // This doesn't throw as all user code that can throw are wrapped into\n    // exception handling code.\n    test->Run();\n  }\n\n  if (test != nullptr) {\n    // Deletes the test object.\n    impl->os_stack_trace_getter()->UponLeavingGTest();\n    internal::HandleExceptionsInMethodIfSupported(\n        test, &Test::DeleteSelf_, \"the test fixture's destructor\");\n  }\n\n  result_.set_start_timestamp(start);\n  result_.set_elapsed_time(internal::GetTimeInMillis() - start);\n\n  // Notifies the unit test event listener that a test has just finished.\n  repeater->OnTestEnd(*this);\n\n  // Tells UnitTest to stop associating assertion results to this\n  // test.\n  impl->set_current_test_info(nullptr);\n}\n\n// class TestSuite\n\n// Gets the number of successful tests in this test suite.\nint TestSuite::successful_test_count() const {\n  return CountIf(test_info_list_, TestPassed);\n}\n\n// Gets the number of successful tests in this test suite.\nint TestSuite::skipped_test_count() const {\n  return CountIf(test_info_list_, TestSkipped);\n}\n\n// Gets the number of failed tests in this test suite.\nint TestSuite::failed_test_count() const {\n  return CountIf(test_info_list_, TestFailed);\n}\n\n// Gets the number of disabled tests that will be reported in the XML report.\nint TestSuite::reportable_disabled_test_count() const {\n  return CountIf(test_info_list_, TestReportableDisabled);\n}\n\n// Gets the number of disabled tests in this test suite.\nint TestSuite::disabled_test_count() const {\n  return CountIf(test_info_list_, TestDisabled);\n}\n\n// Gets the number of tests to be printed in the XML report.\nint TestSuite::reportable_test_count() const {\n  return CountIf(test_info_list_, TestReportable);\n}\n\n// Get the number of tests in this test suite that should run.\nint TestSuite::test_to_run_count() const {\n  return CountIf(test_info_list_, ShouldRunTest);\n}\n\n// Gets the number of all tests.\nint TestSuite::total_test_count() const {\n  return static_cast<int>(test_info_list_.size());\n}\n\n// Creates a TestSuite with the given name.\n//\n// Arguments:\n//\n//   name:         name of the test suite\n//   a_type_param: the name of the test suite's type parameter, or NULL if\n//                 this is not a typed or a type-parameterized test suite.\n//   set_up_tc:    pointer to the function that sets up the test suite\n//   tear_down_tc: pointer to the function that tears down the test suite\nTestSuite::TestSuite(const char* a_name, const char* a_type_param,\n                     internal::SetUpTestSuiteFunc set_up_tc,\n                     internal::TearDownTestSuiteFunc tear_down_tc)\n    : name_(a_name),\n      type_param_(a_type_param ? new std::string(a_type_param) : nullptr),\n      set_up_tc_(set_up_tc),\n      tear_down_tc_(tear_down_tc),\n      should_run_(false),\n      start_timestamp_(0),\n      elapsed_time_(0) {}\n\n// Destructor of TestSuite.\nTestSuite::~TestSuite() {\n  // Deletes every Test in the collection.\n  ForEach(test_info_list_, internal::Delete<TestInfo>);\n}\n\n// Returns the i-th test among all the tests. i can range from 0 to\n// total_test_count() - 1. If i is not in that range, returns NULL.\nconst TestInfo* TestSuite::GetTestInfo(int i) const {\n  const int index = GetElementOr(test_indices_, i, -1);\n  return index < 0 ? nullptr : test_info_list_[static_cast<size_t>(index)];\n}\n\n// Returns the i-th test among all the tests. i can range from 0 to\n// total_test_count() - 1. If i is not in that range, returns NULL.\nTestInfo* TestSuite::GetMutableTestInfo(int i) {\n  const int index = GetElementOr(test_indices_, i, -1);\n  return index < 0 ? nullptr : test_info_list_[static_cast<size_t>(index)];\n}\n\n// Adds a test to this test suite.  Will delete the test upon\n// destruction of the TestSuite object.\nvoid TestSuite::AddTestInfo(TestInfo* test_info) {\n  test_info_list_.push_back(test_info);\n  test_indices_.push_back(static_cast<int>(test_indices_.size()));\n}\n\n// Runs every test in this TestSuite.\nvoid TestSuite::Run() {\n  if (!should_run_) return;\n\n  internal::UnitTestImpl* const impl = internal::GetUnitTestImpl();\n  impl->set_current_test_suite(this);\n\n  TestEventListener* repeater = UnitTest::GetInstance()->listeners().repeater();\n\n  // Call both legacy and the new API\n  repeater->OnTestSuiteStart(*this);\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI\n  repeater->OnTestCaseStart(*this);\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI\n\n  impl->os_stack_trace_getter()->UponLeavingGTest();\n  internal::HandleExceptionsInMethodIfSupported(\n      this, &TestSuite::RunSetUpTestSuite, \"SetUpTestSuite()\");\n\n  start_timestamp_ = internal::GetTimeInMillis();\n  for (int i = 0; i < total_test_count(); i++) {\n    GetMutableTestInfo(i)->Run();\n  }\n  elapsed_time_ = internal::GetTimeInMillis() - start_timestamp_;\n\n  impl->os_stack_trace_getter()->UponLeavingGTest();\n  internal::HandleExceptionsInMethodIfSupported(\n      this, &TestSuite::RunTearDownTestSuite, \"TearDownTestSuite()\");\n\n  // Call both legacy and the new API\n  repeater->OnTestSuiteEnd(*this);\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI\n  repeater->OnTestCaseEnd(*this);\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI\n\n  impl->set_current_test_suite(nullptr);\n}\n\n// Clears the results of all tests in this test suite.\nvoid TestSuite::ClearResult() {\n  ad_hoc_test_result_.Clear();\n  ForEach(test_info_list_, TestInfo::ClearTestResult);\n}\n\n// Shuffles the tests in this test suite.\nvoid TestSuite::ShuffleTests(internal::Random* random) {\n  Shuffle(random, &test_indices_);\n}\n\n// Restores the test order to before the first shuffle.\nvoid TestSuite::UnshuffleTests() {\n  for (size_t i = 0; i < test_indices_.size(); i++) {\n    test_indices_[i] = static_cast<int>(i);\n  }\n}\n\n// Formats a countable noun.  Depending on its quantity, either the\n// singular form or the plural form is used. e.g.\n//\n// FormatCountableNoun(1, \"formula\", \"formuli\") returns \"1 formula\".\n// FormatCountableNoun(5, \"book\", \"books\") returns \"5 books\".\nstatic std::string FormatCountableNoun(int count,\n                                       const char * singular_form,\n                                       const char * plural_form) {\n  return internal::StreamableToString(count) + \" \" +\n      (count == 1 ? singular_form : plural_form);\n}\n\n// Formats the count of tests.\nstatic std::string FormatTestCount(int test_count) {\n  return FormatCountableNoun(test_count, \"test\", \"tests\");\n}\n\n// Formats the count of test suites.\nstatic std::string FormatTestSuiteCount(int test_suite_count) {\n  return FormatCountableNoun(test_suite_count, \"test suite\", \"test suites\");\n}\n\n// Converts a TestPartResult::Type enum to human-friendly string\n// representation.  Both kNonFatalFailure and kFatalFailure are translated\n// to \"Failure\", as the user usually doesn't care about the difference\n// between the two when viewing the test result.\nstatic const char * TestPartResultTypeToString(TestPartResult::Type type) {\n  switch (type) {\n    case TestPartResult::kSkip:\n      return \"Skipped\";\n    case TestPartResult::kSuccess:\n      return \"Success\";\n\n    case TestPartResult::kNonFatalFailure:\n    case TestPartResult::kFatalFailure:\n#ifdef _MSC_VER\n      return \"error: \";\n#else\n      return \"Failure\\n\";\n#endif\n    default:\n      return \"Unknown result type\";\n  }\n}\n\nnamespace internal {\n\n// Prints a TestPartResult to an std::string.\nstatic std::string PrintTestPartResultToString(\n    const TestPartResult& test_part_result) {\n  return (Message()\n          << internal::FormatFileLocation(test_part_result.file_name(),\n                                          test_part_result.line_number())\n          << \" \" << TestPartResultTypeToString(test_part_result.type())\n          << test_part_result.message()).GetString();\n}\n\n// Prints a TestPartResult.\nstatic void PrintTestPartResult(const TestPartResult& test_part_result) {\n  const std::string& result =\n      PrintTestPartResultToString(test_part_result);\n  printf(\"%s\\n\", result.c_str());\n  fflush(stdout);\n  // If the test program runs in Visual Studio or a debugger, the\n  // following statements add the test part result message to the Output\n  // window such that the user can double-click on it to jump to the\n  // corresponding source code location; otherwise they do nothing.\n#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE\n  // We don't call OutputDebugString*() on Windows Mobile, as printing\n  // to stdout is done by OutputDebugString() there already - we don't\n  // want the same message printed twice.\n  ::OutputDebugStringA(result.c_str());\n  ::OutputDebugStringA(\"\\n\");\n#endif\n}\n\n// class PrettyUnitTestResultPrinter\n#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \\\n    !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW\n\n// Returns the character attribute for the given color.\nstatic WORD GetColorAttribute(GTestColor color) {\n  switch (color) {\n    case COLOR_RED:    return FOREGROUND_RED;\n    case COLOR_GREEN:  return FOREGROUND_GREEN;\n    case COLOR_YELLOW: return FOREGROUND_RED | FOREGROUND_GREEN;\n    default:           return 0;\n  }\n}\n\nstatic int GetBitOffset(WORD color_mask) {\n  if (color_mask == 0) return 0;\n\n  int bitOffset = 0;\n  while ((color_mask & 1) == 0) {\n    color_mask >>= 1;\n    ++bitOffset;\n  }\n  return bitOffset;\n}\n\nstatic WORD GetNewColor(GTestColor color, WORD old_color_attrs) {\n  // Let's reuse the BG\n  static const WORD background_mask = BACKGROUND_BLUE | BACKGROUND_GREEN |\n                                      BACKGROUND_RED | BACKGROUND_INTENSITY;\n  static const WORD foreground_mask = FOREGROUND_BLUE | FOREGROUND_GREEN |\n                                      FOREGROUND_RED | FOREGROUND_INTENSITY;\n  const WORD existing_bg = old_color_attrs & background_mask;\n\n  WORD new_color =\n      GetColorAttribute(color) | existing_bg | FOREGROUND_INTENSITY;\n  static const int bg_bitOffset = GetBitOffset(background_mask);\n  static const int fg_bitOffset = GetBitOffset(foreground_mask);\n\n  if (((new_color & background_mask) >> bg_bitOffset) ==\n      ((new_color & foreground_mask) >> fg_bitOffset)) {\n    new_color ^= FOREGROUND_INTENSITY;  // invert intensity\n  }\n  return new_color;\n}\n\n#else\n\n// Returns the ANSI color code for the given color.  COLOR_DEFAULT is\n// an invalid input.\nstatic const char* GetAnsiColorCode(GTestColor color) {\n  switch (color) {\n    case COLOR_RED:     return \"1\";\n    case COLOR_GREEN:   return \"2\";\n    case COLOR_YELLOW:  return \"3\";\n    default:\n      return nullptr;\n  }\n}\n\n#endif  // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE\n\n// Returns true if and only if Google Test should use colors in the output.\nbool ShouldUseColor(bool stdout_is_tty) {\n  const char* const gtest_color = GTEST_FLAG(color).c_str();\n\n  if (String::CaseInsensitiveCStringEquals(gtest_color, \"auto\")) {\n#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW\n    // On Windows the TERM variable is usually not set, but the\n    // console there does support colors.\n    return stdout_is_tty;\n#else\n    // On non-Windows platforms, we rely on the TERM variable.\n    const char* const term = posix::GetEnv(\"TERM\");\n    const bool term_supports_color =\n        String::CStringEquals(term, \"xterm\") ||\n        String::CStringEquals(term, \"xterm-color\") ||\n        String::CStringEquals(term, \"xterm-256color\") ||\n        String::CStringEquals(term, \"screen\") ||\n        String::CStringEquals(term, \"screen-256color\") ||\n        String::CStringEquals(term, \"tmux\") ||\n        String::CStringEquals(term, \"tmux-256color\") ||\n        String::CStringEquals(term, \"rxvt-unicode\") ||\n        String::CStringEquals(term, \"rxvt-unicode-256color\") ||\n        String::CStringEquals(term, \"linux\") ||\n        String::CStringEquals(term, \"cygwin\");\n    return stdout_is_tty && term_supports_color;\n#endif  // GTEST_OS_WINDOWS\n  }\n\n  return String::CaseInsensitiveCStringEquals(gtest_color, \"yes\") ||\n      String::CaseInsensitiveCStringEquals(gtest_color, \"true\") ||\n      String::CaseInsensitiveCStringEquals(gtest_color, \"t\") ||\n      String::CStringEquals(gtest_color, \"1\");\n  // We take \"yes\", \"true\", \"t\", and \"1\" as meaning \"yes\".  If the\n  // value is neither one of these nor \"auto\", we treat it as \"no\" to\n  // be conservative.\n}\n\n// Helpers for printing colored strings to stdout. Note that on Windows, we\n// cannot simply emit special characters and have the terminal change colors.\n// This routine must actually emit the characters rather than return a string\n// that would be colored when printed, as can be done on Linux.\nvoid ColoredPrintf(GTestColor color, const char* fmt, ...) {\n  va_list args;\n  va_start(args, fmt);\n\n#if GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS || GTEST_OS_IOS || \\\n    GTEST_OS_WINDOWS_PHONE || GTEST_OS_WINDOWS_RT || defined(ESP_PLATFORM)\n  const bool use_color = AlwaysFalse();\n#else\n  static const bool in_color_mode =\n      ShouldUseColor(posix::IsATTY(posix::FileNo(stdout)) != 0);\n  const bool use_color = in_color_mode && (color != COLOR_DEFAULT);\n#endif  // GTEST_OS_WINDOWS_MOBILE || GTEST_OS_ZOS\n\n  if (!use_color) {\n    vprintf(fmt, args);\n    va_end(args);\n    return;\n  }\n\n#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE && \\\n    !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT && !GTEST_OS_WINDOWS_MINGW\n  const HANDLE stdout_handle = GetStdHandle(STD_OUTPUT_HANDLE);\n\n  // Gets the current text color.\n  CONSOLE_SCREEN_BUFFER_INFO buffer_info;\n  GetConsoleScreenBufferInfo(stdout_handle, &buffer_info);\n  const WORD old_color_attrs = buffer_info.wAttributes;\n  const WORD new_color = GetNewColor(color, old_color_attrs);\n\n  // We need to flush the stream buffers into the console before each\n  // SetConsoleTextAttribute call lest it affect the text that is already\n  // printed but has not yet reached the console.\n  fflush(stdout);\n  SetConsoleTextAttribute(stdout_handle, new_color);\n\n  vprintf(fmt, args);\n\n  fflush(stdout);\n  // Restores the text color.\n  SetConsoleTextAttribute(stdout_handle, old_color_attrs);\n#else\n  printf(\"\\033[0;3%sm\", GetAnsiColorCode(color));\n  vprintf(fmt, args);\n  printf(\"\\033[m\");  // Resets the terminal to default.\n#endif  // GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MOBILE\n  va_end(args);\n}\n\n// Text printed in Google Test's text output and --gtest_list_tests\n// output to label the type parameter and value parameter for a test.\nstatic const char kTypeParamLabel[] = \"TypeParam\";\nstatic const char kValueParamLabel[] = \"GetParam()\";\n\nstatic void PrintFullTestCommentIfPresent(const TestInfo& test_info) {\n  const char* const type_param = test_info.type_param();\n  const char* const value_param = test_info.value_param();\n\n  if (type_param != nullptr || value_param != nullptr) {\n    printf(\", where \");\n    if (type_param != nullptr) {\n      printf(\"%s = %s\", kTypeParamLabel, type_param);\n      if (value_param != nullptr) printf(\" and \");\n    }\n    if (value_param != nullptr) {\n      printf(\"%s = %s\", kValueParamLabel, value_param);\n    }\n  }\n}\n\n// This class implements the TestEventListener interface.\n//\n// Class PrettyUnitTestResultPrinter is copyable.\nclass PrettyUnitTestResultPrinter : public TestEventListener {\n public:\n  PrettyUnitTestResultPrinter() {}\n  static void PrintTestName(const char* test_suite, const char* test) {\n    printf(\"%s.%s\", test_suite, test);\n  }\n\n  // The following methods override what's in the TestEventListener class.\n  void OnTestProgramStart(const UnitTest& /*unit_test*/) override {}\n  void OnTestIterationStart(const UnitTest& unit_test, int iteration) override;\n  void OnEnvironmentsSetUpStart(const UnitTest& unit_test) override;\n  void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {}\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseStart(const TestCase& test_case) override;\n#else\n  void OnTestSuiteStart(const TestSuite& test_suite) override;\n#endif  // OnTestCaseStart\n\n  void OnTestStart(const TestInfo& test_info) override;\n\n  void OnTestPartResult(const TestPartResult& result) override;\n  void OnTestEnd(const TestInfo& test_info) override;\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseEnd(const TestCase& test_case) override;\n#else\n  void OnTestSuiteEnd(const TestSuite& test_suite) override;\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  void OnEnvironmentsTearDownStart(const UnitTest& unit_test) override;\n  void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {}\n  void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;\n  void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {}\n\n private:\n  static void PrintFailedTests(const UnitTest& unit_test);\n  static void PrintSkippedTests(const UnitTest& unit_test);\n};\n\n  // Fired before each iteration of tests starts.\nvoid PrettyUnitTestResultPrinter::OnTestIterationStart(\n    const UnitTest& unit_test, int iteration) {\n  if (GTEST_FLAG(repeat) != 1)\n    printf(\"\\nRepeating all tests (iteration %d) . . .\\n\\n\", iteration + 1);\n\n  const char* const filter = GTEST_FLAG(filter).c_str();\n\n  // Prints the filter if it's not *.  This reminds the user that some\n  // tests may be skipped.\n  if (!String::CStringEquals(filter, kUniversalFilter)) {\n    ColoredPrintf(COLOR_YELLOW,\n                  \"Note: %s filter = %s\\n\", GTEST_NAME_, filter);\n  }\n\n  if (internal::ShouldShard(kTestTotalShards, kTestShardIndex, false)) {\n    const Int32 shard_index = Int32FromEnvOrDie(kTestShardIndex, -1);\n    ColoredPrintf(COLOR_YELLOW,\n                  \"Note: This is test shard %d of %s.\\n\",\n                  static_cast<int>(shard_index) + 1,\n                  internal::posix::GetEnv(kTestTotalShards));\n  }\n\n  if (GTEST_FLAG(shuffle)) {\n    ColoredPrintf(COLOR_YELLOW,\n                  \"Note: Randomizing tests' orders with a seed of %d .\\n\",\n                  unit_test.random_seed());\n  }\n\n  ColoredPrintf(COLOR_GREEN,  \"[==========] \");\n  printf(\"Running %s from %s.\\n\",\n         FormatTestCount(unit_test.test_to_run_count()).c_str(),\n         FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str());\n  fflush(stdout);\n}\n\nvoid PrettyUnitTestResultPrinter::OnEnvironmentsSetUpStart(\n    const UnitTest& /*unit_test*/) {\n  ColoredPrintf(COLOR_GREEN,  \"[----------] \");\n  printf(\"Global test environment set-up.\\n\");\n  fflush(stdout);\n}\n\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nvoid PrettyUnitTestResultPrinter::OnTestCaseStart(const TestCase& test_case) {\n  const std::string counts =\n      FormatCountableNoun(test_case.test_to_run_count(), \"test\", \"tests\");\n  ColoredPrintf(COLOR_GREEN, \"[----------] \");\n  printf(\"%s from %s\", counts.c_str(), test_case.name());\n  if (test_case.type_param() == nullptr) {\n    printf(\"\\n\");\n  } else {\n    printf(\", where %s = %s\\n\", kTypeParamLabel, test_case.type_param());\n  }\n  fflush(stdout);\n}\n#else\nvoid PrettyUnitTestResultPrinter::OnTestSuiteStart(\n    const TestSuite& test_suite) {\n  const std::string counts =\n      FormatCountableNoun(test_suite.test_to_run_count(), \"test\", \"tests\");\n  ColoredPrintf(COLOR_GREEN, \"[----------] \");\n  printf(\"%s from %s\", counts.c_str(), test_suite.name());\n  if (test_suite.type_param() == nullptr) {\n    printf(\"\\n\");\n  } else {\n    printf(\", where %s = %s\\n\", kTypeParamLabel, test_suite.type_param());\n  }\n  fflush(stdout);\n}\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\nvoid PrettyUnitTestResultPrinter::OnTestStart(const TestInfo& test_info) {\n  ColoredPrintf(COLOR_GREEN,  \"[ RUN      ] \");\n  PrintTestName(test_info.test_suite_name(), test_info.name());\n  printf(\"\\n\");\n  fflush(stdout);\n}\n\n// Called after an assertion failure.\nvoid PrettyUnitTestResultPrinter::OnTestPartResult(\n    const TestPartResult& result) {\n  switch (result.type()) {\n    // If the test part succeeded, we don't need to do anything.\n    case TestPartResult::kSuccess:\n      return;\n    default:\n      // Print failure message from the assertion\n      // (e.g. expected this and got that).\n      PrintTestPartResult(result);\n      fflush(stdout);\n  }\n}\n\nvoid PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo& test_info) {\n  if (test_info.result()->Passed()) {\n    ColoredPrintf(COLOR_GREEN, \"[       OK ] \");\n  } else if (test_info.result()->Skipped()) {\n    ColoredPrintf(COLOR_GREEN, \"[  SKIPPED ] \");\n  } else {\n    ColoredPrintf(COLOR_RED, \"[  FAILED  ] \");\n  }\n  PrintTestName(test_info.test_suite_name(), test_info.name());\n  if (test_info.result()->Failed())\n    PrintFullTestCommentIfPresent(test_info);\n\n  if (GTEST_FLAG(print_time)) {\n    printf(\" (%s ms)\\n\", internal::StreamableToString(\n           test_info.result()->elapsed_time()).c_str());\n  } else {\n    printf(\"\\n\");\n  }\n  fflush(stdout);\n}\n\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nvoid PrettyUnitTestResultPrinter::OnTestCaseEnd(const TestCase& test_case) {\n  if (!GTEST_FLAG(print_time)) return;\n\n  const std::string counts =\n      FormatCountableNoun(test_case.test_to_run_count(), \"test\", \"tests\");\n  ColoredPrintf(COLOR_GREEN, \"[----------] \");\n  printf(\"%s from %s (%s ms total)\\n\\n\", counts.c_str(), test_case.name(),\n         internal::StreamableToString(test_case.elapsed_time()).c_str());\n  fflush(stdout);\n}\n#else\nvoid PrettyUnitTestResultPrinter::OnTestSuiteEnd(const TestSuite& test_suite) {\n  if (!GTEST_FLAG(print_time)) return;\n\n  const std::string counts =\n      FormatCountableNoun(test_suite.test_to_run_count(), \"test\", \"tests\");\n  ColoredPrintf(COLOR_GREEN, \"[----------] \");\n  printf(\"%s from %s (%s ms total)\\n\\n\", counts.c_str(), test_suite.name(),\n         internal::StreamableToString(test_suite.elapsed_time()).c_str());\n  fflush(stdout);\n}\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\nvoid PrettyUnitTestResultPrinter::OnEnvironmentsTearDownStart(\n    const UnitTest& /*unit_test*/) {\n  ColoredPrintf(COLOR_GREEN,  \"[----------] \");\n  printf(\"Global test environment tear-down\\n\");\n  fflush(stdout);\n}\n\n// Internal helper for printing the list of failed tests.\nvoid PrettyUnitTestResultPrinter::PrintFailedTests(const UnitTest& unit_test) {\n  const int failed_test_count = unit_test.failed_test_count();\n  if (failed_test_count == 0) {\n    return;\n  }\n\n  for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {\n    const TestSuite& test_suite = *unit_test.GetTestSuite(i);\n    if (!test_suite.should_run() || (test_suite.failed_test_count() == 0)) {\n      continue;\n    }\n    for (int j = 0; j < test_suite.total_test_count(); ++j) {\n      const TestInfo& test_info = *test_suite.GetTestInfo(j);\n      if (!test_info.should_run() || !test_info.result()->Failed()) {\n        continue;\n      }\n      ColoredPrintf(COLOR_RED, \"[  FAILED  ] \");\n      printf(\"%s.%s\", test_suite.name(), test_info.name());\n      PrintFullTestCommentIfPresent(test_info);\n      printf(\"\\n\");\n    }\n  }\n}\n\n// Internal helper for printing the list of skipped tests.\nvoid PrettyUnitTestResultPrinter::PrintSkippedTests(const UnitTest& unit_test) {\n  const int skipped_test_count = unit_test.skipped_test_count();\n  if (skipped_test_count == 0) {\n    return;\n  }\n\n  for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {\n    const TestSuite& test_suite = *unit_test.GetTestSuite(i);\n    if (!test_suite.should_run() || (test_suite.skipped_test_count() == 0)) {\n      continue;\n    }\n    for (int j = 0; j < test_suite.total_test_count(); ++j) {\n      const TestInfo& test_info = *test_suite.GetTestInfo(j);\n      if (!test_info.should_run() || !test_info.result()->Skipped()) {\n        continue;\n      }\n      ColoredPrintf(COLOR_GREEN, \"[  SKIPPED ] \");\n      printf(\"%s.%s\", test_suite.name(), test_info.name());\n      printf(\"\\n\");\n    }\n  }\n}\n\nvoid PrettyUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,\n                                                     int /*iteration*/) {\n  ColoredPrintf(COLOR_GREEN,  \"[==========] \");\n  printf(\"%s from %s ran.\",\n         FormatTestCount(unit_test.test_to_run_count()).c_str(),\n         FormatTestSuiteCount(unit_test.test_suite_to_run_count()).c_str());\n  if (GTEST_FLAG(print_time)) {\n    printf(\" (%s ms total)\",\n           internal::StreamableToString(unit_test.elapsed_time()).c_str());\n  }\n  printf(\"\\n\");\n  ColoredPrintf(COLOR_GREEN,  \"[  PASSED  ] \");\n  printf(\"%s.\\n\", FormatTestCount(unit_test.successful_test_count()).c_str());\n\n  const int skipped_test_count = unit_test.skipped_test_count();\n  if (skipped_test_count > 0) {\n    ColoredPrintf(COLOR_GREEN, \"[  SKIPPED ] \");\n    printf(\"%s, listed below:\\n\", FormatTestCount(skipped_test_count).c_str());\n    PrintSkippedTests(unit_test);\n  }\n\n  int num_failures = unit_test.failed_test_count();\n  if (!unit_test.Passed()) {\n    const int failed_test_count = unit_test.failed_test_count();\n    ColoredPrintf(COLOR_RED,  \"[  FAILED  ] \");\n    printf(\"%s, listed below:\\n\", FormatTestCount(failed_test_count).c_str());\n    PrintFailedTests(unit_test);\n    printf(\"\\n%2d FAILED %s\\n\", num_failures,\n                        num_failures == 1 ? \"TEST\" : \"TESTS\");\n  }\n\n  int num_disabled = unit_test.reportable_disabled_test_count();\n  if (num_disabled && !GTEST_FLAG(also_run_disabled_tests)) {\n    if (!num_failures) {\n      printf(\"\\n\");  // Add a spacer if no FAILURE banner is displayed.\n    }\n    ColoredPrintf(COLOR_YELLOW,\n                  \"  YOU HAVE %d DISABLED %s\\n\\n\",\n                  num_disabled,\n                  num_disabled == 1 ? \"TEST\" : \"TESTS\");\n  }\n  // Ensure that Google Test output is printed before, e.g., heapchecker output.\n  fflush(stdout);\n}\n\n// End PrettyUnitTestResultPrinter\n\n// class TestEventRepeater\n//\n// This class forwards events to other event listeners.\nclass TestEventRepeater : public TestEventListener {\n public:\n  TestEventRepeater() : forwarding_enabled_(true) {}\n  ~TestEventRepeater() override;\n  void Append(TestEventListener *listener);\n  TestEventListener* Release(TestEventListener* listener);\n\n  // Controls whether events will be forwarded to listeners_. Set to false\n  // in death test child processes.\n  bool forwarding_enabled() const { return forwarding_enabled_; }\n  void set_forwarding_enabled(bool enable) { forwarding_enabled_ = enable; }\n\n  void OnTestProgramStart(const UnitTest& unit_test) override;\n  void OnTestIterationStart(const UnitTest& unit_test, int iteration) override;\n  void OnEnvironmentsSetUpStart(const UnitTest& unit_test) override;\n  void OnEnvironmentsSetUpEnd(const UnitTest& unit_test) override;\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseStart(const TestSuite& parameter) override;\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestSuiteStart(const TestSuite& parameter) override;\n  void OnTestStart(const TestInfo& test_info) override;\n  void OnTestPartResult(const TestPartResult& result) override;\n  void OnTestEnd(const TestInfo& test_info) override;\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseEnd(const TestCase& parameter) override;\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestSuiteEnd(const TestSuite& parameter) override;\n  void OnEnvironmentsTearDownStart(const UnitTest& unit_test) override;\n  void OnEnvironmentsTearDownEnd(const UnitTest& unit_test) override;\n  void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;\n  void OnTestProgramEnd(const UnitTest& unit_test) override;\n\n private:\n  // Controls whether events will be forwarded to listeners_. Set to false\n  // in death test child processes.\n  bool forwarding_enabled_;\n  // The list of listeners that receive events.\n  std::vector<TestEventListener*> listeners_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestEventRepeater);\n};\n\nTestEventRepeater::~TestEventRepeater() {\n  ForEach(listeners_, Delete<TestEventListener>);\n}\n\nvoid TestEventRepeater::Append(TestEventListener *listener) {\n  listeners_.push_back(listener);\n}\n\nTestEventListener* TestEventRepeater::Release(TestEventListener *listener) {\n  for (size_t i = 0; i < listeners_.size(); ++i) {\n    if (listeners_[i] == listener) {\n      listeners_.erase(listeners_.begin() + static_cast<int>(i));\n      return listener;\n    }\n  }\n\n  return nullptr;\n}\n\n// Since most methods are very similar, use macros to reduce boilerplate.\n// This defines a member that forwards the call to all listeners.\n#define GTEST_REPEATER_METHOD_(Name, Type) \\\nvoid TestEventRepeater::Name(const Type& parameter) { \\\n  if (forwarding_enabled_) { \\\n    for (size_t i = 0; i < listeners_.size(); i++) { \\\n      listeners_[i]->Name(parameter); \\\n    } \\\n  } \\\n}\n// This defines a member that forwards the call to all listeners in reverse\n// order.\n#define GTEST_REVERSE_REPEATER_METHOD_(Name, Type)      \\\n  void TestEventRepeater::Name(const Type& parameter) { \\\n    if (forwarding_enabled_) {                          \\\n      for (size_t i = listeners_.size(); i != 0; i--) { \\\n        listeners_[i - 1]->Name(parameter);             \\\n      }                                                 \\\n    }                                                   \\\n  }\n\nGTEST_REPEATER_METHOD_(OnTestProgramStart, UnitTest)\nGTEST_REPEATER_METHOD_(OnEnvironmentsSetUpStart, UnitTest)\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nGTEST_REPEATER_METHOD_(OnTestCaseStart, TestSuite)\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nGTEST_REPEATER_METHOD_(OnTestSuiteStart, TestSuite)\nGTEST_REPEATER_METHOD_(OnTestStart, TestInfo)\nGTEST_REPEATER_METHOD_(OnTestPartResult, TestPartResult)\nGTEST_REPEATER_METHOD_(OnEnvironmentsTearDownStart, UnitTest)\nGTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsSetUpEnd, UnitTest)\nGTEST_REVERSE_REPEATER_METHOD_(OnEnvironmentsTearDownEnd, UnitTest)\nGTEST_REVERSE_REPEATER_METHOD_(OnTestEnd, TestInfo)\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nGTEST_REVERSE_REPEATER_METHOD_(OnTestCaseEnd, TestSuite)\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nGTEST_REVERSE_REPEATER_METHOD_(OnTestSuiteEnd, TestSuite)\nGTEST_REVERSE_REPEATER_METHOD_(OnTestProgramEnd, UnitTest)\n\n#undef GTEST_REPEATER_METHOD_\n#undef GTEST_REVERSE_REPEATER_METHOD_\n\nvoid TestEventRepeater::OnTestIterationStart(const UnitTest& unit_test,\n                                             int iteration) {\n  if (forwarding_enabled_) {\n    for (size_t i = 0; i < listeners_.size(); i++) {\n      listeners_[i]->OnTestIterationStart(unit_test, iteration);\n    }\n  }\n}\n\nvoid TestEventRepeater::OnTestIterationEnd(const UnitTest& unit_test,\n                                           int iteration) {\n  if (forwarding_enabled_) {\n    for (size_t i = listeners_.size(); i > 0; i--) {\n      listeners_[i - 1]->OnTestIterationEnd(unit_test, iteration);\n    }\n  }\n}\n\n// End TestEventRepeater\n\n// This class generates an XML output file.\nclass XmlUnitTestResultPrinter : public EmptyTestEventListener {\n public:\n  explicit XmlUnitTestResultPrinter(const char* output_file);\n\n  void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;\n  void ListTestsMatchingFilter(const std::vector<TestSuite*>& test_suites);\n\n  // Prints an XML summary of all unit tests.\n  static void PrintXmlTestsList(std::ostream* stream,\n                                const std::vector<TestSuite*>& test_suites);\n\n private:\n  // Is c a whitespace character that is normalized to a space character\n  // when it appears in an XML attribute value?\n  static bool IsNormalizableWhitespace(char c) {\n    return c == 0x9 || c == 0xA || c == 0xD;\n  }\n\n  // May c appear in a well-formed XML document?\n  static bool IsValidXmlCharacter(char c) {\n    return IsNormalizableWhitespace(c) || c >= 0x20;\n  }\n\n  // Returns an XML-escaped copy of the input string str.  If\n  // is_attribute is true, the text is meant to appear as an attribute\n  // value, and normalizable whitespace is preserved by replacing it\n  // with character references.\n  static std::string EscapeXml(const std::string& str, bool is_attribute);\n\n  // Returns the given string with all characters invalid in XML removed.\n  static std::string RemoveInvalidXmlCharacters(const std::string& str);\n\n  // Convenience wrapper around EscapeXml when str is an attribute value.\n  static std::string EscapeXmlAttribute(const std::string& str) {\n    return EscapeXml(str, true);\n  }\n\n  // Convenience wrapper around EscapeXml when str is not an attribute value.\n  static std::string EscapeXmlText(const char* str) {\n    return EscapeXml(str, false);\n  }\n\n  // Verifies that the given attribute belongs to the given element and\n  // streams the attribute as XML.\n  static void OutputXmlAttribute(std::ostream* stream,\n                                 const std::string& element_name,\n                                 const std::string& name,\n                                 const std::string& value);\n\n  // Streams an XML CDATA section, escaping invalid CDATA sequences as needed.\n  static void OutputXmlCDataSection(::std::ostream* stream, const char* data);\n\n  // Streams an XML representation of a TestInfo object.\n  static void OutputXmlTestInfo(::std::ostream* stream,\n                                const char* test_suite_name,\n                                const TestInfo& test_info);\n\n  // Prints an XML representation of a TestSuite object\n  static void PrintXmlTestSuite(::std::ostream* stream,\n                                const TestSuite& test_suite);\n\n  // Prints an XML summary of unit_test to output stream out.\n  static void PrintXmlUnitTest(::std::ostream* stream,\n                               const UnitTest& unit_test);\n\n  // Produces a string representing the test properties in a result as space\n  // delimited XML attributes based on the property key=\"value\" pairs.\n  // When the std::string is not empty, it includes a space at the beginning,\n  // to delimit this attribute from prior attributes.\n  static std::string TestPropertiesAsXmlAttributes(const TestResult& result);\n\n  // Streams an XML representation of the test properties of a TestResult\n  // object.\n  static void OutputXmlTestProperties(std::ostream* stream,\n                                      const TestResult& result);\n\n  // The output file.\n  const std::string output_file_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(XmlUnitTestResultPrinter);\n};\n\n// Creates a new XmlUnitTestResultPrinter.\nXmlUnitTestResultPrinter::XmlUnitTestResultPrinter(const char* output_file)\n    : output_file_(output_file) {\n  if (output_file_.empty()) {\n    GTEST_LOG_(FATAL) << \"XML output file may not be null\";\n  }\n}\n\n// Called after the unit test ends.\nvoid XmlUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,\n                                                  int /*iteration*/) {\n  FILE* xmlout = OpenFileForWriting(output_file_);\n  std::stringstream stream;\n  PrintXmlUnitTest(&stream, unit_test);\n  fprintf(xmlout, \"%s\", StringStreamToString(&stream).c_str());\n  fclose(xmlout);\n}\n\nvoid XmlUnitTestResultPrinter::ListTestsMatchingFilter(\n    const std::vector<TestSuite*>& test_suites) {\n  FILE* xmlout = OpenFileForWriting(output_file_);\n  std::stringstream stream;\n  PrintXmlTestsList(&stream, test_suites);\n  fprintf(xmlout, \"%s\", StringStreamToString(&stream).c_str());\n  fclose(xmlout);\n}\n\n// Returns an XML-escaped copy of the input string str.  If is_attribute\n// is true, the text is meant to appear as an attribute value, and\n// normalizable whitespace is preserved by replacing it with character\n// references.\n//\n// Invalid XML characters in str, if any, are stripped from the output.\n// It is expected that most, if not all, of the text processed by this\n// module will consist of ordinary English text.\n// If this module is ever modified to produce version 1.1 XML output,\n// most invalid characters can be retained using character references.\nstd::string XmlUnitTestResultPrinter::EscapeXml(\n    const std::string& str, bool is_attribute) {\n  Message m;\n\n  for (size_t i = 0; i < str.size(); ++i) {\n    const char ch = str[i];\n    switch (ch) {\n      case '<':\n        m << \"&lt;\";\n        break;\n      case '>':\n        m << \"&gt;\";\n        break;\n      case '&':\n        m << \"&amp;\";\n        break;\n      case '\\'':\n        if (is_attribute)\n          m << \"&apos;\";\n        else\n          m << '\\'';\n        break;\n      case '\"':\n        if (is_attribute)\n          m << \"&quot;\";\n        else\n          m << '\"';\n        break;\n      default:\n        if (IsValidXmlCharacter(ch)) {\n          if (is_attribute && IsNormalizableWhitespace(ch))\n            m << \"&#x\" << String::FormatByte(static_cast<unsigned char>(ch))\n              << \";\";\n          else\n            m << ch;\n        }\n        break;\n    }\n  }\n\n  return m.GetString();\n}\n\n// Returns the given string with all characters invalid in XML removed.\n// Currently invalid characters are dropped from the string. An\n// alternative is to replace them with certain characters such as . or ?.\nstd::string XmlUnitTestResultPrinter::RemoveInvalidXmlCharacters(\n    const std::string& str) {\n  std::string output;\n  output.reserve(str.size());\n  for (std::string::const_iterator it = str.begin(); it != str.end(); ++it)\n    if (IsValidXmlCharacter(*it))\n      output.push_back(*it);\n\n  return output;\n}\n\n// The following routines generate an XML representation of a UnitTest\n// object.\n// GOOGLETEST_CM0009 DO NOT DELETE\n//\n// This is how Google Test concepts map to the DTD:\n//\n// <testsuites name=\"AllTests\">        <-- corresponds to a UnitTest object\n//   <testsuite name=\"testcase-name\">  <-- corresponds to a TestSuite object\n//     <testcase name=\"test-name\">     <-- corresponds to a TestInfo object\n//       <failure message=\"...\">...</failure>\n//       <failure message=\"...\">...</failure>\n//       <failure message=\"...\">...</failure>\n//                                     <-- individual assertion failures\n//     </testcase>\n//   </testsuite>\n// </testsuites>\n\n// Formats the given time in milliseconds as seconds.\nstd::string FormatTimeInMillisAsSeconds(TimeInMillis ms) {\n  ::std::stringstream ss;\n  ss << (static_cast<double>(ms) * 1e-3);\n  return ss.str();\n}\n\nstatic bool PortableLocaltime(time_t seconds, struct tm* out) {\n#if defined(_MSC_VER)\n  return localtime_s(out, &seconds) == 0;\n#elif defined(__MINGW32__) || defined(__MINGW64__)\n  // MINGW <time.h> provides neither localtime_r nor localtime_s, but uses\n  // Windows' localtime(), which has a thread-local tm buffer.\n  struct tm* tm_ptr = localtime(&seconds);  // NOLINT\n  if (tm_ptr == nullptr) return false;\n  *out = *tm_ptr;\n  return true;\n#else\n  return localtime_r(&seconds, out) != nullptr;\n#endif\n}\n\n// Converts the given epoch time in milliseconds to a date string in the ISO\n// 8601 format, without the timezone information.\nstd::string FormatEpochTimeInMillisAsIso8601(TimeInMillis ms) {\n  struct tm time_struct;\n  if (!PortableLocaltime(static_cast<time_t>(ms / 1000), &time_struct))\n    return \"\";\n  // YYYY-MM-DDThh:mm:ss\n  return StreamableToString(time_struct.tm_year + 1900) + \"-\" +\n      String::FormatIntWidth2(time_struct.tm_mon + 1) + \"-\" +\n      String::FormatIntWidth2(time_struct.tm_mday) + \"T\" +\n      String::FormatIntWidth2(time_struct.tm_hour) + \":\" +\n      String::FormatIntWidth2(time_struct.tm_min) + \":\" +\n      String::FormatIntWidth2(time_struct.tm_sec);\n}\n\n// Streams an XML CDATA section, escaping invalid CDATA sequences as needed.\nvoid XmlUnitTestResultPrinter::OutputXmlCDataSection(::std::ostream* stream,\n                                                     const char* data) {\n  const char* segment = data;\n  *stream << \"<![CDATA[\";\n  for (;;) {\n    const char* const next_segment = strstr(segment, \"]]>\");\n    if (next_segment != nullptr) {\n      stream->write(\n          segment, static_cast<std::streamsize>(next_segment - segment));\n      *stream << \"]]>]]&gt;<![CDATA[\";\n      segment = next_segment + strlen(\"]]>\");\n    } else {\n      *stream << segment;\n      break;\n    }\n  }\n  *stream << \"]]>\";\n}\n\nvoid XmlUnitTestResultPrinter::OutputXmlAttribute(\n    std::ostream* stream,\n    const std::string& element_name,\n    const std::string& name,\n    const std::string& value) {\n  const std::vector<std::string>& allowed_names =\n      GetReservedOutputAttributesForElement(element_name);\n\n  GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=\n                   allowed_names.end())\n      << \"Attribute \" << name << \" is not allowed for element <\" << element_name\n      << \">.\";\n\n  *stream << \" \" << name << \"=\\\"\" << EscapeXmlAttribute(value) << \"\\\"\";\n}\n\n// Prints an XML representation of a TestInfo object.\nvoid XmlUnitTestResultPrinter::OutputXmlTestInfo(::std::ostream* stream,\n                                                 const char* test_suite_name,\n                                                 const TestInfo& test_info) {\n  const TestResult& result = *test_info.result();\n  const std::string kTestsuite = \"testcase\";\n\n  if (test_info.is_in_another_shard()) {\n    return;\n  }\n\n  *stream << \"    <testcase\";\n  OutputXmlAttribute(stream, kTestsuite, \"name\", test_info.name());\n\n  if (test_info.value_param() != nullptr) {\n    OutputXmlAttribute(stream, kTestsuite, \"value_param\",\n                       test_info.value_param());\n  }\n  if (test_info.type_param() != nullptr) {\n    OutputXmlAttribute(stream, kTestsuite, \"type_param\",\n                       test_info.type_param());\n  }\n  if (GTEST_FLAG(list_tests)) {\n    OutputXmlAttribute(stream, kTestsuite, \"file\", test_info.file());\n    OutputXmlAttribute(stream, kTestsuite, \"line\",\n                       StreamableToString(test_info.line()));\n    *stream << \" />\\n\";\n    return;\n  }\n\n  OutputXmlAttribute(stream, kTestsuite, \"status\",\n                     test_info.should_run() ? \"run\" : \"notrun\");\n  OutputXmlAttribute(stream, kTestsuite, \"result\",\n                     test_info.should_run()\n                         ? (result.Skipped() ? \"skipped\" : \"completed\")\n                         : \"suppressed\");\n  OutputXmlAttribute(stream, kTestsuite, \"time\",\n                     FormatTimeInMillisAsSeconds(result.elapsed_time()));\n  OutputXmlAttribute(\n      stream, kTestsuite, \"timestamp\",\n      FormatEpochTimeInMillisAsIso8601(result.start_timestamp()));\n  OutputXmlAttribute(stream, kTestsuite, \"classname\", test_suite_name);\n\n  int failures = 0;\n  for (int i = 0; i < result.total_part_count(); ++i) {\n    const TestPartResult& part = result.GetTestPartResult(i);\n    if (part.failed()) {\n      if (++failures == 1) {\n        *stream << \">\\n\";\n      }\n      const std::string location =\n          internal::FormatCompilerIndependentFileLocation(part.file_name(),\n                                                          part.line_number());\n      const std::string summary = location + \"\\n\" + part.summary();\n      *stream << \"      <failure message=\\\"\"\n              << EscapeXmlAttribute(summary.c_str())\n              << \"\\\" type=\\\"\\\">\";\n      const std::string detail = location + \"\\n\" + part.message();\n      OutputXmlCDataSection(stream, RemoveInvalidXmlCharacters(detail).c_str());\n      *stream << \"</failure>\\n\";\n    }\n  }\n\n  if (failures == 0 && result.test_property_count() == 0) {\n    *stream << \" />\\n\";\n  } else {\n    if (failures == 0) {\n      *stream << \">\\n\";\n    }\n    OutputXmlTestProperties(stream, result);\n    *stream << \"    </testcase>\\n\";\n  }\n}\n\n// Prints an XML representation of a TestSuite object\nvoid XmlUnitTestResultPrinter::PrintXmlTestSuite(std::ostream* stream,\n                                                 const TestSuite& test_suite) {\n  const std::string kTestsuite = \"testsuite\";\n  *stream << \"  <\" << kTestsuite;\n  OutputXmlAttribute(stream, kTestsuite, \"name\", test_suite.name());\n  OutputXmlAttribute(stream, kTestsuite, \"tests\",\n                     StreamableToString(test_suite.reportable_test_count()));\n  if (!GTEST_FLAG(list_tests)) {\n    OutputXmlAttribute(stream, kTestsuite, \"failures\",\n                       StreamableToString(test_suite.failed_test_count()));\n    OutputXmlAttribute(\n        stream, kTestsuite, \"disabled\",\n        StreamableToString(test_suite.reportable_disabled_test_count()));\n    OutputXmlAttribute(stream, kTestsuite, \"errors\", \"0\");\n    OutputXmlAttribute(stream, kTestsuite, \"time\",\n                       FormatTimeInMillisAsSeconds(test_suite.elapsed_time()));\n    OutputXmlAttribute(\n        stream, kTestsuite, \"timestamp\",\n        FormatEpochTimeInMillisAsIso8601(test_suite.start_timestamp()));\n    *stream << TestPropertiesAsXmlAttributes(test_suite.ad_hoc_test_result());\n  }\n  *stream << \">\\n\";\n  for (int i = 0; i < test_suite.total_test_count(); ++i) {\n    if (test_suite.GetTestInfo(i)->is_reportable())\n      OutputXmlTestInfo(stream, test_suite.name(), *test_suite.GetTestInfo(i));\n  }\n  *stream << \"  </\" << kTestsuite << \">\\n\";\n}\n\n// Prints an XML summary of unit_test to output stream out.\nvoid XmlUnitTestResultPrinter::PrintXmlUnitTest(std::ostream* stream,\n                                                const UnitTest& unit_test) {\n  const std::string kTestsuites = \"testsuites\";\n\n  *stream << \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\"?>\\n\";\n  *stream << \"<\" << kTestsuites;\n\n  OutputXmlAttribute(stream, kTestsuites, \"tests\",\n                     StreamableToString(unit_test.reportable_test_count()));\n  OutputXmlAttribute(stream, kTestsuites, \"failures\",\n                     StreamableToString(unit_test.failed_test_count()));\n  OutputXmlAttribute(\n      stream, kTestsuites, \"disabled\",\n      StreamableToString(unit_test.reportable_disabled_test_count()));\n  OutputXmlAttribute(stream, kTestsuites, \"errors\", \"0\");\n  OutputXmlAttribute(stream, kTestsuites, \"time\",\n                     FormatTimeInMillisAsSeconds(unit_test.elapsed_time()));\n  OutputXmlAttribute(\n      stream, kTestsuites, \"timestamp\",\n      FormatEpochTimeInMillisAsIso8601(unit_test.start_timestamp()));\n\n  if (GTEST_FLAG(shuffle)) {\n    OutputXmlAttribute(stream, kTestsuites, \"random_seed\",\n                       StreamableToString(unit_test.random_seed()));\n  }\n  *stream << TestPropertiesAsXmlAttributes(unit_test.ad_hoc_test_result());\n\n  OutputXmlAttribute(stream, kTestsuites, \"name\", \"AllTests\");\n  *stream << \">\\n\";\n\n  for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {\n    if (unit_test.GetTestSuite(i)->reportable_test_count() > 0)\n      PrintXmlTestSuite(stream, *unit_test.GetTestSuite(i));\n  }\n  *stream << \"</\" << kTestsuites << \">\\n\";\n}\n\nvoid XmlUnitTestResultPrinter::PrintXmlTestsList(\n    std::ostream* stream, const std::vector<TestSuite*>& test_suites) {\n  const std::string kTestsuites = \"testsuites\";\n\n  *stream << \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\"?>\\n\";\n  *stream << \"<\" << kTestsuites;\n\n  int total_tests = 0;\n  for (auto test_suite : test_suites) {\n    total_tests += test_suite->total_test_count();\n  }\n  OutputXmlAttribute(stream, kTestsuites, \"tests\",\n                     StreamableToString(total_tests));\n  OutputXmlAttribute(stream, kTestsuites, \"name\", \"AllTests\");\n  *stream << \">\\n\";\n\n  for (auto test_suite : test_suites) {\n    PrintXmlTestSuite(stream, *test_suite);\n  }\n  *stream << \"</\" << kTestsuites << \">\\n\";\n}\n\n// Produces a string representing the test properties in a result as space\n// delimited XML attributes based on the property key=\"value\" pairs.\nstd::string XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes(\n    const TestResult& result) {\n  Message attributes;\n  for (int i = 0; i < result.test_property_count(); ++i) {\n    const TestProperty& property = result.GetTestProperty(i);\n    attributes << \" \" << property.key() << \"=\"\n        << \"\\\"\" << EscapeXmlAttribute(property.value()) << \"\\\"\";\n  }\n  return attributes.GetString();\n}\n\nvoid XmlUnitTestResultPrinter::OutputXmlTestProperties(\n    std::ostream* stream, const TestResult& result) {\n  const std::string kProperties = \"properties\";\n  const std::string kProperty = \"property\";\n\n  if (result.test_property_count() <= 0) {\n    return;\n  }\n\n  *stream << \"<\" << kProperties << \">\\n\";\n  for (int i = 0; i < result.test_property_count(); ++i) {\n    const TestProperty& property = result.GetTestProperty(i);\n    *stream << \"<\" << kProperty;\n    *stream << \" name=\\\"\" << EscapeXmlAttribute(property.key()) << \"\\\"\";\n    *stream << \" value=\\\"\" << EscapeXmlAttribute(property.value()) << \"\\\"\";\n    *stream << \"/>\\n\";\n  }\n  *stream << \"</\" << kProperties << \">\\n\";\n}\n\n// End XmlUnitTestResultPrinter\n\n// This class generates an JSON output file.\nclass JsonUnitTestResultPrinter : public EmptyTestEventListener {\n public:\n  explicit JsonUnitTestResultPrinter(const char* output_file);\n\n  void OnTestIterationEnd(const UnitTest& unit_test, int iteration) override;\n\n  // Prints an JSON summary of all unit tests.\n  static void PrintJsonTestList(::std::ostream* stream,\n                                const std::vector<TestSuite*>& test_suites);\n\n private:\n  // Returns an JSON-escaped copy of the input string str.\n  static std::string EscapeJson(const std::string& str);\n\n  //// Verifies that the given attribute belongs to the given element and\n  //// streams the attribute as JSON.\n  static void OutputJsonKey(std::ostream* stream,\n                            const std::string& element_name,\n                            const std::string& name,\n                            const std::string& value,\n                            const std::string& indent,\n                            bool comma = true);\n  static void OutputJsonKey(std::ostream* stream,\n                            const std::string& element_name,\n                            const std::string& name,\n                            int value,\n                            const std::string& indent,\n                            bool comma = true);\n\n  // Streams a JSON representation of a TestInfo object.\n  static void OutputJsonTestInfo(::std::ostream* stream,\n                                 const char* test_suite_name,\n                                 const TestInfo& test_info);\n\n  // Prints a JSON representation of a TestSuite object\n  static void PrintJsonTestSuite(::std::ostream* stream,\n                                 const TestSuite& test_suite);\n\n  // Prints a JSON summary of unit_test to output stream out.\n  static void PrintJsonUnitTest(::std::ostream* stream,\n                                const UnitTest& unit_test);\n\n  // Produces a string representing the test properties in a result as\n  // a JSON dictionary.\n  static std::string TestPropertiesAsJson(const TestResult& result,\n                                          const std::string& indent);\n\n  // The output file.\n  const std::string output_file_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(JsonUnitTestResultPrinter);\n};\n\n// Creates a new JsonUnitTestResultPrinter.\nJsonUnitTestResultPrinter::JsonUnitTestResultPrinter(const char* output_file)\n    : output_file_(output_file) {\n  if (output_file_.empty()) {\n    GTEST_LOG_(FATAL) << \"JSON output file may not be null\";\n  }\n}\n\nvoid JsonUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,\n                                                  int /*iteration*/) {\n  FILE* jsonout = OpenFileForWriting(output_file_);\n  std::stringstream stream;\n  PrintJsonUnitTest(&stream, unit_test);\n  fprintf(jsonout, \"%s\", StringStreamToString(&stream).c_str());\n  fclose(jsonout);\n}\n\n// Returns an JSON-escaped copy of the input string str.\nstd::string JsonUnitTestResultPrinter::EscapeJson(const std::string& str) {\n  Message m;\n\n  for (size_t i = 0; i < str.size(); ++i) {\n    const char ch = str[i];\n    switch (ch) {\n      case '\\\\':\n      case '\"':\n      case '/':\n        m << '\\\\' << ch;\n        break;\n      case '\\b':\n        m << \"\\\\b\";\n        break;\n      case '\\t':\n        m << \"\\\\t\";\n        break;\n      case '\\n':\n        m << \"\\\\n\";\n        break;\n      case '\\f':\n        m << \"\\\\f\";\n        break;\n      case '\\r':\n        m << \"\\\\r\";\n        break;\n      default:\n        if (ch < ' ') {\n          m << \"\\\\u00\" << String::FormatByte(static_cast<unsigned char>(ch));\n        } else {\n          m << ch;\n        }\n        break;\n    }\n  }\n\n  return m.GetString();\n}\n\n// The following routines generate an JSON representation of a UnitTest\n// object.\n\n// Formats the given time in milliseconds as seconds.\nstatic std::string FormatTimeInMillisAsDuration(TimeInMillis ms) {\n  ::std::stringstream ss;\n  ss << (static_cast<double>(ms) * 1e-3) << \"s\";\n  return ss.str();\n}\n\n// Converts the given epoch time in milliseconds to a date string in the\n// RFC3339 format, without the timezone information.\nstatic std::string FormatEpochTimeInMillisAsRFC3339(TimeInMillis ms) {\n  struct tm time_struct;\n  if (!PortableLocaltime(static_cast<time_t>(ms / 1000), &time_struct))\n    return \"\";\n  // YYYY-MM-DDThh:mm:ss\n  return StreamableToString(time_struct.tm_year + 1900) + \"-\" +\n      String::FormatIntWidth2(time_struct.tm_mon + 1) + \"-\" +\n      String::FormatIntWidth2(time_struct.tm_mday) + \"T\" +\n      String::FormatIntWidth2(time_struct.tm_hour) + \":\" +\n      String::FormatIntWidth2(time_struct.tm_min) + \":\" +\n      String::FormatIntWidth2(time_struct.tm_sec) + \"Z\";\n}\n\nstatic inline std::string Indent(size_t width) {\n  return std::string(width, ' ');\n}\n\nvoid JsonUnitTestResultPrinter::OutputJsonKey(\n    std::ostream* stream,\n    const std::string& element_name,\n    const std::string& name,\n    const std::string& value,\n    const std::string& indent,\n    bool comma) {\n  const std::vector<std::string>& allowed_names =\n      GetReservedOutputAttributesForElement(element_name);\n\n  GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=\n                   allowed_names.end())\n      << \"Key \\\"\" << name << \"\\\" is not allowed for value \\\"\" << element_name\n      << \"\\\".\";\n\n  *stream << indent << \"\\\"\" << name << \"\\\": \\\"\" << EscapeJson(value) << \"\\\"\";\n  if (comma)\n    *stream << \",\\n\";\n}\n\nvoid JsonUnitTestResultPrinter::OutputJsonKey(\n    std::ostream* stream,\n    const std::string& element_name,\n    const std::string& name,\n    int value,\n    const std::string& indent,\n    bool comma) {\n  const std::vector<std::string>& allowed_names =\n      GetReservedOutputAttributesForElement(element_name);\n\n  GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=\n                   allowed_names.end())\n      << \"Key \\\"\" << name << \"\\\" is not allowed for value \\\"\" << element_name\n      << \"\\\".\";\n\n  *stream << indent << \"\\\"\" << name << \"\\\": \" << StreamableToString(value);\n  if (comma)\n    *stream << \",\\n\";\n}\n\n// Prints a JSON representation of a TestInfo object.\nvoid JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream,\n                                                   const char* test_suite_name,\n                                                   const TestInfo& test_info) {\n  const TestResult& result = *test_info.result();\n  const std::string kTestsuite = \"testcase\";\n  const std::string kIndent = Indent(10);\n\n  *stream << Indent(8) << \"{\\n\";\n  OutputJsonKey(stream, kTestsuite, \"name\", test_info.name(), kIndent);\n\n  if (test_info.value_param() != nullptr) {\n    OutputJsonKey(stream, kTestsuite, \"value_param\", test_info.value_param(),\n                  kIndent);\n  }\n  if (test_info.type_param() != nullptr) {\n    OutputJsonKey(stream, kTestsuite, \"type_param\", test_info.type_param(),\n                  kIndent);\n  }\n  if (GTEST_FLAG(list_tests)) {\n    OutputJsonKey(stream, kTestsuite, \"file\", test_info.file(), kIndent);\n    OutputJsonKey(stream, kTestsuite, \"line\", test_info.line(), kIndent, false);\n    *stream << \"\\n\" << Indent(8) << \"}\";\n    return;\n  }\n\n  OutputJsonKey(stream, kTestsuite, \"status\",\n                test_info.should_run() ? \"RUN\" : \"NOTRUN\", kIndent);\n  OutputJsonKey(stream, kTestsuite, \"result\",\n                test_info.should_run()\n                    ? (result.Skipped() ? \"SKIPPED\" : \"COMPLETED\")\n                    : \"SUPPRESSED\",\n                kIndent);\n  OutputJsonKey(stream, kTestsuite, \"timestamp\",\n                FormatEpochTimeInMillisAsRFC3339(result.start_timestamp()),\n                kIndent);\n  OutputJsonKey(stream, kTestsuite, \"time\",\n                FormatTimeInMillisAsDuration(result.elapsed_time()), kIndent);\n  OutputJsonKey(stream, kTestsuite, \"classname\", test_suite_name, kIndent,\n                false);\n  *stream << TestPropertiesAsJson(result, kIndent);\n\n  int failures = 0;\n  for (int i = 0; i < result.total_part_count(); ++i) {\n    const TestPartResult& part = result.GetTestPartResult(i);\n    if (part.failed()) {\n      *stream << \",\\n\";\n      if (++failures == 1) {\n        *stream << kIndent << \"\\\"\" << \"failures\" << \"\\\": [\\n\";\n      }\n      const std::string location =\n          internal::FormatCompilerIndependentFileLocation(part.file_name(),\n                                                          part.line_number());\n      const std::string message = EscapeJson(location + \"\\n\" + part.message());\n      *stream << kIndent << \"  {\\n\"\n              << kIndent << \"    \\\"failure\\\": \\\"\" << message << \"\\\",\\n\"\n              << kIndent << \"    \\\"type\\\": \\\"\\\"\\n\"\n              << kIndent << \"  }\";\n    }\n  }\n\n  if (failures > 0)\n    *stream << \"\\n\" << kIndent << \"]\";\n  *stream << \"\\n\" << Indent(8) << \"}\";\n}\n\n// Prints an JSON representation of a TestSuite object\nvoid JsonUnitTestResultPrinter::PrintJsonTestSuite(\n    std::ostream* stream, const TestSuite& test_suite) {\n  const std::string kTestsuite = \"testsuite\";\n  const std::string kIndent = Indent(6);\n\n  *stream << Indent(4) << \"{\\n\";\n  OutputJsonKey(stream, kTestsuite, \"name\", test_suite.name(), kIndent);\n  OutputJsonKey(stream, kTestsuite, \"tests\", test_suite.reportable_test_count(),\n                kIndent);\n  if (!GTEST_FLAG(list_tests)) {\n    OutputJsonKey(stream, kTestsuite, \"failures\",\n                  test_suite.failed_test_count(), kIndent);\n    OutputJsonKey(stream, kTestsuite, \"disabled\",\n                  test_suite.reportable_disabled_test_count(), kIndent);\n    OutputJsonKey(stream, kTestsuite, \"errors\", 0, kIndent);\n    OutputJsonKey(\n        stream, kTestsuite, \"timestamp\",\n        FormatEpochTimeInMillisAsRFC3339(test_suite.start_timestamp()),\n        kIndent);\n    OutputJsonKey(stream, kTestsuite, \"time\",\n                  FormatTimeInMillisAsDuration(test_suite.elapsed_time()),\n                  kIndent, false);\n    *stream << TestPropertiesAsJson(test_suite.ad_hoc_test_result(), kIndent)\n            << \",\\n\";\n  }\n\n  *stream << kIndent << \"\\\"\" << kTestsuite << \"\\\": [\\n\";\n\n  bool comma = false;\n  for (int i = 0; i < test_suite.total_test_count(); ++i) {\n    if (test_suite.GetTestInfo(i)->is_reportable()) {\n      if (comma) {\n        *stream << \",\\n\";\n      } else {\n        comma = true;\n      }\n      OutputJsonTestInfo(stream, test_suite.name(), *test_suite.GetTestInfo(i));\n    }\n  }\n  *stream << \"\\n\" << kIndent << \"]\\n\" << Indent(4) << \"}\";\n}\n\n// Prints a JSON summary of unit_test to output stream out.\nvoid JsonUnitTestResultPrinter::PrintJsonUnitTest(std::ostream* stream,\n                                                  const UnitTest& unit_test) {\n  const std::string kTestsuites = \"testsuites\";\n  const std::string kIndent = Indent(2);\n  *stream << \"{\\n\";\n\n  OutputJsonKey(stream, kTestsuites, \"tests\", unit_test.reportable_test_count(),\n                kIndent);\n  OutputJsonKey(stream, kTestsuites, \"failures\", unit_test.failed_test_count(),\n                kIndent);\n  OutputJsonKey(stream, kTestsuites, \"disabled\",\n                unit_test.reportable_disabled_test_count(), kIndent);\n  OutputJsonKey(stream, kTestsuites, \"errors\", 0, kIndent);\n  if (GTEST_FLAG(shuffle)) {\n    OutputJsonKey(stream, kTestsuites, \"random_seed\", unit_test.random_seed(),\n                  kIndent);\n  }\n  OutputJsonKey(stream, kTestsuites, \"timestamp\",\n                FormatEpochTimeInMillisAsRFC3339(unit_test.start_timestamp()),\n                kIndent);\n  OutputJsonKey(stream, kTestsuites, \"time\",\n                FormatTimeInMillisAsDuration(unit_test.elapsed_time()), kIndent,\n                false);\n\n  *stream << TestPropertiesAsJson(unit_test.ad_hoc_test_result(), kIndent)\n          << \",\\n\";\n\n  OutputJsonKey(stream, kTestsuites, \"name\", \"AllTests\", kIndent);\n  *stream << kIndent << \"\\\"\" << kTestsuites << \"\\\": [\\n\";\n\n  bool comma = false;\n  for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {\n    if (unit_test.GetTestSuite(i)->reportable_test_count() > 0) {\n      if (comma) {\n        *stream << \",\\n\";\n      } else {\n        comma = true;\n      }\n      PrintJsonTestSuite(stream, *unit_test.GetTestSuite(i));\n    }\n  }\n\n  *stream << \"\\n\" << kIndent << \"]\\n\" << \"}\\n\";\n}\n\nvoid JsonUnitTestResultPrinter::PrintJsonTestList(\n    std::ostream* stream, const std::vector<TestSuite*>& test_suites) {\n  const std::string kTestsuites = \"testsuites\";\n  const std::string kIndent = Indent(2);\n  *stream << \"{\\n\";\n  int total_tests = 0;\n  for (auto test_suite : test_suites) {\n    total_tests += test_suite->total_test_count();\n  }\n  OutputJsonKey(stream, kTestsuites, \"tests\", total_tests, kIndent);\n\n  OutputJsonKey(stream, kTestsuites, \"name\", \"AllTests\", kIndent);\n  *stream << kIndent << \"\\\"\" << kTestsuites << \"\\\": [\\n\";\n\n  for (size_t i = 0; i < test_suites.size(); ++i) {\n    if (i != 0) {\n      *stream << \",\\n\";\n    }\n    PrintJsonTestSuite(stream, *test_suites[i]);\n  }\n\n  *stream << \"\\n\"\n          << kIndent << \"]\\n\"\n          << \"}\\n\";\n}\n// Produces a string representing the test properties in a result as\n// a JSON dictionary.\nstd::string JsonUnitTestResultPrinter::TestPropertiesAsJson(\n    const TestResult& result, const std::string& indent) {\n  Message attributes;\n  for (int i = 0; i < result.test_property_count(); ++i) {\n    const TestProperty& property = result.GetTestProperty(i);\n    attributes << \",\\n\" << indent << \"\\\"\" << property.key() << \"\\\": \"\n               << \"\\\"\" << EscapeJson(property.value()) << \"\\\"\";\n  }\n  return attributes.GetString();\n}\n\n// End JsonUnitTestResultPrinter\n\n#if GTEST_CAN_STREAM_RESULTS_\n\n// Checks if str contains '=', '&', '%' or '\\n' characters. If yes,\n// replaces them by \"%xx\" where xx is their hexadecimal value. For\n// example, replaces \"=\" with \"%3D\".  This algorithm is O(strlen(str))\n// in both time and space -- important as the input str may contain an\n// arbitrarily long test failure message and stack trace.\nstd::string StreamingListener::UrlEncode(const char* str) {\n  std::string result;\n  result.reserve(strlen(str) + 1);\n  for (char ch = *str; ch != '\\0'; ch = *++str) {\n    switch (ch) {\n      case '%':\n      case '=':\n      case '&':\n      case '\\n':\n        result.append(\"%\" + String::FormatByte(static_cast<unsigned char>(ch)));\n        break;\n      default:\n        result.push_back(ch);\n        break;\n    }\n  }\n  return result;\n}\n\nvoid StreamingListener::SocketWriter::MakeConnection() {\n  GTEST_CHECK_(sockfd_ == -1)\n      << \"MakeConnection() can't be called when there is already a connection.\";\n\n  addrinfo hints;\n  memset(&hints, 0, sizeof(hints));\n  hints.ai_family = AF_UNSPEC;    // To allow both IPv4 and IPv6 addresses.\n  hints.ai_socktype = SOCK_STREAM;\n  addrinfo* servinfo = nullptr;\n\n  // Use the getaddrinfo() to get a linked list of IP addresses for\n  // the given host name.\n  const int error_num = getaddrinfo(\n      host_name_.c_str(), port_num_.c_str(), &hints, &servinfo);\n  if (error_num != 0) {\n    GTEST_LOG_(WARNING) << \"stream_result_to: getaddrinfo() failed: \"\n                        << gai_strerror(error_num);\n  }\n\n  // Loop through all the results and connect to the first we can.\n  for (addrinfo* cur_addr = servinfo; sockfd_ == -1 && cur_addr != nullptr;\n       cur_addr = cur_addr->ai_next) {\n    sockfd_ = socket(\n        cur_addr->ai_family, cur_addr->ai_socktype, cur_addr->ai_protocol);\n    if (sockfd_ != -1) {\n      // Connect the client socket to the server socket.\n      if (connect(sockfd_, cur_addr->ai_addr, cur_addr->ai_addrlen) == -1) {\n        close(sockfd_);\n        sockfd_ = -1;\n      }\n    }\n  }\n\n  freeaddrinfo(servinfo);  // all done with this structure\n\n  if (sockfd_ == -1) {\n    GTEST_LOG_(WARNING) << \"stream_result_to: failed to connect to \"\n                        << host_name_ << \":\" << port_num_;\n  }\n}\n\n// End of class Streaming Listener\n#endif  // GTEST_CAN_STREAM_RESULTS__\n\n// class OsStackTraceGetter\n\nconst char* const OsStackTraceGetterInterface::kElidedFramesMarker =\n    \"... \" GTEST_NAME_ \" internal frames ...\";\n\nstd::string OsStackTraceGetter::CurrentStackTrace(int max_depth, int skip_count)\n    GTEST_LOCK_EXCLUDED_(mutex_) {\n#if GTEST_HAS_ABSL\n  std::string result;\n\n  if (max_depth <= 0) {\n    return result;\n  }\n\n  max_depth = std::min(max_depth, kMaxStackTraceDepth);\n\n  std::vector<void*> raw_stack(max_depth);\n  // Skips the frames requested by the caller, plus this function.\n  const int raw_stack_size =\n      absl::GetStackTrace(&raw_stack[0], max_depth, skip_count + 1);\n\n  void* caller_frame = nullptr;\n  {\n    MutexLock lock(&mutex_);\n    caller_frame = caller_frame_;\n  }\n\n  for (int i = 0; i < raw_stack_size; ++i) {\n    if (raw_stack[i] == caller_frame &&\n        !GTEST_FLAG(show_internal_stack_frames)) {\n      // Add a marker to the trace and stop adding frames.\n      absl::StrAppend(&result, kElidedFramesMarker, \"\\n\");\n      break;\n    }\n\n    char tmp[1024];\n    const char* symbol = \"(unknown)\";\n    if (absl::Symbolize(raw_stack[i], tmp, sizeof(tmp))) {\n      symbol = tmp;\n    }\n\n    char line[1024];\n    snprintf(line, sizeof(line), \"  %p: %s\\n\", raw_stack[i], symbol);\n    result += line;\n  }\n\n  return result;\n\n#else  // !GTEST_HAS_ABSL\n  static_cast<void>(max_depth);\n  static_cast<void>(skip_count);\n  return \"\";\n#endif  // GTEST_HAS_ABSL\n}\n\nvoid OsStackTraceGetter::UponLeavingGTest() GTEST_LOCK_EXCLUDED_(mutex_) {\n#if GTEST_HAS_ABSL\n  void* caller_frame = nullptr;\n  if (absl::GetStackTrace(&caller_frame, 1, 3) <= 0) {\n    caller_frame = nullptr;\n  }\n\n  MutexLock lock(&mutex_);\n  caller_frame_ = caller_frame;\n#endif  // GTEST_HAS_ABSL\n}\n\n// A helper class that creates the premature-exit file in its\n// constructor and deletes the file in its destructor.\nclass ScopedPrematureExitFile {\n public:\n  explicit ScopedPrematureExitFile(const char* premature_exit_filepath)\n      : premature_exit_filepath_(premature_exit_filepath ?\n                                 premature_exit_filepath : \"\") {\n    // If a path to the premature-exit file is specified...\n    if (!premature_exit_filepath_.empty()) {\n      // create the file with a single \"0\" character in it.  I/O\n      // errors are ignored as there's nothing better we can do and we\n      // don't want to fail the test because of this.\n      FILE* pfile = posix::FOpen(premature_exit_filepath, \"w\");\n      fwrite(\"0\", 1, 1, pfile);\n      fclose(pfile);\n    }\n  }\n\n  ~ScopedPrematureExitFile() {\n#if !defined GTEST_OS_ESP8266\n    if (!premature_exit_filepath_.empty()) {\n      int retval = remove(premature_exit_filepath_.c_str());\n      if (retval) {\n        GTEST_LOG_(ERROR) << \"Failed to remove premature exit filepath \\\"\"\n                          << premature_exit_filepath_ << \"\\\" with error \"\n                          << retval;\n      }\n    }\n#endif\n  }\n\n private:\n  const std::string premature_exit_filepath_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(ScopedPrematureExitFile);\n};\n\n}  // namespace internal\n\n// class TestEventListeners\n\nTestEventListeners::TestEventListeners()\n    : repeater_(new internal::TestEventRepeater()),\n      default_result_printer_(nullptr),\n      default_xml_generator_(nullptr) {}\n\nTestEventListeners::~TestEventListeners() { delete repeater_; }\n\n// Returns the standard listener responsible for the default console\n// output.  Can be removed from the listeners list to shut down default\n// console output.  Note that removing this object from the listener list\n// with Release transfers its ownership to the user.\nvoid TestEventListeners::Append(TestEventListener* listener) {\n  repeater_->Append(listener);\n}\n\n// Removes the given event listener from the list and returns it.  It then\n// becomes the caller's responsibility to delete the listener. Returns\n// NULL if the listener is not found in the list.\nTestEventListener* TestEventListeners::Release(TestEventListener* listener) {\n  if (listener == default_result_printer_)\n    default_result_printer_ = nullptr;\n  else if (listener == default_xml_generator_)\n    default_xml_generator_ = nullptr;\n  return repeater_->Release(listener);\n}\n\n// Returns repeater that broadcasts the TestEventListener events to all\n// subscribers.\nTestEventListener* TestEventListeners::repeater() { return repeater_; }\n\n// Sets the default_result_printer attribute to the provided listener.\n// The listener is also added to the listener list and previous\n// default_result_printer is removed from it and deleted. The listener can\n// also be NULL in which case it will not be added to the list. Does\n// nothing if the previous and the current listener objects are the same.\nvoid TestEventListeners::SetDefaultResultPrinter(TestEventListener* listener) {\n  if (default_result_printer_ != listener) {\n    // It is an error to pass this method a listener that is already in the\n    // list.\n    delete Release(default_result_printer_);\n    default_result_printer_ = listener;\n    if (listener != nullptr) Append(listener);\n  }\n}\n\n// Sets the default_xml_generator attribute to the provided listener.  The\n// listener is also added to the listener list and previous\n// default_xml_generator is removed from it and deleted. The listener can\n// also be NULL in which case it will not be added to the list. Does\n// nothing if the previous and the current listener objects are the same.\nvoid TestEventListeners::SetDefaultXmlGenerator(TestEventListener* listener) {\n  if (default_xml_generator_ != listener) {\n    // It is an error to pass this method a listener that is already in the\n    // list.\n    delete Release(default_xml_generator_);\n    default_xml_generator_ = listener;\n    if (listener != nullptr) Append(listener);\n  }\n}\n\n// Controls whether events will be forwarded by the repeater to the\n// listeners in the list.\nbool TestEventListeners::EventForwardingEnabled() const {\n  return repeater_->forwarding_enabled();\n}\n\nvoid TestEventListeners::SuppressEventForwarding() {\n  repeater_->set_forwarding_enabled(false);\n}\n\n// class UnitTest\n\n// Gets the singleton UnitTest object.  The first time this method is\n// called, a UnitTest object is constructed and returned.  Consecutive\n// calls will return the same object.\n//\n// We don't protect this under mutex_ as a user is not supposed to\n// call this before main() starts, from which point on the return\n// value will never change.\nUnitTest* UnitTest::GetInstance() {\n  // CodeGear C++Builder insists on a public destructor for the\n  // default implementation.  Use this implementation to keep good OO\n  // design with private destructor.\n\n#if defined(__BORLANDC__)\n  static UnitTest* const instance = new UnitTest;\n  return instance;\n#else\n  static UnitTest instance;\n  return &instance;\n#endif  // defined(__BORLANDC__)\n}\n\n// Gets the number of successful test suites.\nint UnitTest::successful_test_suite_count() const {\n  return impl()->successful_test_suite_count();\n}\n\n// Gets the number of failed test suites.\nint UnitTest::failed_test_suite_count() const {\n  return impl()->failed_test_suite_count();\n}\n\n// Gets the number of all test suites.\nint UnitTest::total_test_suite_count() const {\n  return impl()->total_test_suite_count();\n}\n\n// Gets the number of all test suites that contain at least one test\n// that should run.\nint UnitTest::test_suite_to_run_count() const {\n  return impl()->test_suite_to_run_count();\n}\n\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nint UnitTest::successful_test_case_count() const {\n  return impl()->successful_test_suite_count();\n}\nint UnitTest::failed_test_case_count() const {\n  return impl()->failed_test_suite_count();\n}\nint UnitTest::total_test_case_count() const {\n  return impl()->total_test_suite_count();\n}\nint UnitTest::test_case_to_run_count() const {\n  return impl()->test_suite_to_run_count();\n}\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n// Gets the number of successful tests.\nint UnitTest::successful_test_count() const {\n  return impl()->successful_test_count();\n}\n\n// Gets the number of skipped tests.\nint UnitTest::skipped_test_count() const {\n  return impl()->skipped_test_count();\n}\n\n// Gets the number of failed tests.\nint UnitTest::failed_test_count() const { return impl()->failed_test_count(); }\n\n// Gets the number of disabled tests that will be reported in the XML report.\nint UnitTest::reportable_disabled_test_count() const {\n  return impl()->reportable_disabled_test_count();\n}\n\n// Gets the number of disabled tests.\nint UnitTest::disabled_test_count() const {\n  return impl()->disabled_test_count();\n}\n\n// Gets the number of tests to be printed in the XML report.\nint UnitTest::reportable_test_count() const {\n  return impl()->reportable_test_count();\n}\n\n// Gets the number of all tests.\nint UnitTest::total_test_count() const { return impl()->total_test_count(); }\n\n// Gets the number of tests that should run.\nint UnitTest::test_to_run_count() const { return impl()->test_to_run_count(); }\n\n// Gets the time of the test program start, in ms from the start of the\n// UNIX epoch.\ninternal::TimeInMillis UnitTest::start_timestamp() const {\n    return impl()->start_timestamp();\n}\n\n// Gets the elapsed time, in milliseconds.\ninternal::TimeInMillis UnitTest::elapsed_time() const {\n  return impl()->elapsed_time();\n}\n\n// Returns true if and only if the unit test passed (i.e. all test suites\n// passed).\nbool UnitTest::Passed() const { return impl()->Passed(); }\n\n// Returns true if and only if the unit test failed (i.e. some test suite\n// failed or something outside of all tests failed).\nbool UnitTest::Failed() const { return impl()->Failed(); }\n\n// Gets the i-th test suite among all the test suites. i can range from 0 to\n// total_test_suite_count() - 1. If i is not in that range, returns NULL.\nconst TestSuite* UnitTest::GetTestSuite(int i) const {\n  return impl()->GetTestSuite(i);\n}\n\n//  Legacy API is deprecated but still available\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nconst TestCase* UnitTest::GetTestCase(int i) const {\n  return impl()->GetTestCase(i);\n}\n#endif  //  GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n// Returns the TestResult containing information on test failures and\n// properties logged outside of individual test suites.\nconst TestResult& UnitTest::ad_hoc_test_result() const {\n  return *impl()->ad_hoc_test_result();\n}\n\n// Gets the i-th test suite among all the test suites. i can range from 0 to\n// total_test_suite_count() - 1. If i is not in that range, returns NULL.\nTestSuite* UnitTest::GetMutableTestSuite(int i) {\n  return impl()->GetMutableSuiteCase(i);\n}\n\n// Returns the list of event listeners that can be used to track events\n// inside Google Test.\nTestEventListeners& UnitTest::listeners() {\n  return *impl()->listeners();\n}\n\n// Registers and returns a global test environment.  When a test\n// program is run, all global test environments will be set-up in the\n// order they were registered.  After all tests in the program have\n// finished, all global test environments will be torn-down in the\n// *reverse* order they were registered.\n//\n// The UnitTest object takes ownership of the given environment.\n//\n// We don't protect this under mutex_, as we only support calling it\n// from the main thread.\nEnvironment* UnitTest::AddEnvironment(Environment* env) {\n  if (env == nullptr) {\n    return nullptr;\n  }\n\n  impl_->environments().push_back(env);\n  return env;\n}\n\n// Adds a TestPartResult to the current TestResult object.  All Google Test\n// assertion macros (e.g. ASSERT_TRUE, EXPECT_EQ, etc) eventually call\n// this to report their results.  The user code should use the\n// assertion macros instead of calling this directly.\nvoid UnitTest::AddTestPartResult(\n    TestPartResult::Type result_type,\n    const char* file_name,\n    int line_number,\n    const std::string& message,\n    const std::string& os_stack_trace) GTEST_LOCK_EXCLUDED_(mutex_) {\n  Message msg;\n  msg << message;\n\n  internal::MutexLock lock(&mutex_);\n  if (impl_->gtest_trace_stack().size() > 0) {\n    msg << \"\\n\" << GTEST_NAME_ << \" trace:\";\n\n    for (size_t i = impl_->gtest_trace_stack().size(); i > 0; --i) {\n      const internal::TraceInfo& trace = impl_->gtest_trace_stack()[i - 1];\n      msg << \"\\n\" << internal::FormatFileLocation(trace.file, trace.line)\n          << \" \" << trace.message;\n    }\n  }\n\n  if (os_stack_trace.c_str() != nullptr && !os_stack_trace.empty()) {\n    msg << internal::kStackTraceMarker << os_stack_trace;\n  }\n\n  const TestPartResult result = TestPartResult(\n      result_type, file_name, line_number, msg.GetString().c_str());\n  impl_->GetTestPartResultReporterForCurrentThread()->\n      ReportTestPartResult(result);\n\n  if (result_type != TestPartResult::kSuccess &&\n      result_type != TestPartResult::kSkip) {\n    // gtest_break_on_failure takes precedence over\n    // gtest_throw_on_failure.  This allows a user to set the latter\n    // in the code (perhaps in order to use Google Test assertions\n    // with another testing framework) and specify the former on the\n    // command line for debugging.\n    if (GTEST_FLAG(break_on_failure)) {\n#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT\n      // Using DebugBreak on Windows allows gtest to still break into a debugger\n      // when a failure happens and both the --gtest_break_on_failure and\n      // the --gtest_catch_exceptions flags are specified.\n      DebugBreak();\n#elif (!defined(__native_client__)) &&            \\\n    ((defined(__clang__) || defined(__GNUC__)) && \\\n     (defined(__x86_64__) || defined(__i386__)))\n      // with clang/gcc we can achieve the same effect on x86 by invoking int3\n      asm(\"int3\");\n#else\n      // Dereference nullptr through a volatile pointer to prevent the compiler\n      // from removing. We use this rather than abort() or __builtin_trap() for\n      // portability: some debuggers don't correctly trap abort().\n      *static_cast<volatile int*>(nullptr) = 1;\n#endif  // GTEST_OS_WINDOWS\n    } else if (GTEST_FLAG(throw_on_failure)) {\n#if GTEST_HAS_EXCEPTIONS\n      throw internal::GoogleTestFailureException(result);\n#else\n      // We cannot call abort() as it generates a pop-up in debug mode\n      // that cannot be suppressed in VC 7.1 or below.\n      exit(1);\n#endif\n    }\n  }\n}\n\n// Adds a TestProperty to the current TestResult object when invoked from\n// inside a test, to current TestSuite's ad_hoc_test_result_ when invoked\n// from SetUpTestSuite or TearDownTestSuite, or to the global property set\n// when invoked elsewhere.  If the result already contains a property with\n// the same key, the value will be updated.\nvoid UnitTest::RecordProperty(const std::string& key,\n                              const std::string& value) {\n  impl_->RecordProperty(TestProperty(key, value));\n}\n\n// Runs all tests in this UnitTest object and prints the result.\n// Returns 0 if successful, or 1 otherwise.\n//\n// We don't protect this under mutex_, as we only support calling it\n// from the main thread.\nint UnitTest::Run() {\n  const bool in_death_test_child_process =\n      internal::GTEST_FLAG(internal_run_death_test).length() > 0;\n\n  // Google Test implements this protocol for catching that a test\n  // program exits before returning control to Google Test:\n  //\n  //   1. Upon start, Google Test creates a file whose absolute path\n  //      is specified by the environment variable\n  //      TEST_PREMATURE_EXIT_FILE.\n  //   2. When Google Test has finished its work, it deletes the file.\n  //\n  // This allows a test runner to set TEST_PREMATURE_EXIT_FILE before\n  // running a Google-Test-based test program and check the existence\n  // of the file at the end of the test execution to see if it has\n  // exited prematurely.\n\n  // If we are in the child process of a death test, don't\n  // create/delete the premature exit file, as doing so is unnecessary\n  // and will confuse the parent process.  Otherwise, create/delete\n  // the file upon entering/leaving this function.  If the program\n  // somehow exits before this function has a chance to return, the\n  // premature-exit file will be left undeleted, causing a test runner\n  // that understands the premature-exit-file protocol to report the\n  // test as having failed.\n  const internal::ScopedPrematureExitFile premature_exit_file(\n      in_death_test_child_process\n          ? nullptr\n          : internal::posix::GetEnv(\"TEST_PREMATURE_EXIT_FILE\"));\n\n  // Captures the value of GTEST_FLAG(catch_exceptions).  This value will be\n  // used for the duration of the program.\n  impl()->set_catch_exceptions(GTEST_FLAG(catch_exceptions));\n\n#if GTEST_OS_WINDOWS\n  // Either the user wants Google Test to catch exceptions thrown by the\n  // tests or this is executing in the context of death test child\n  // process. In either case the user does not want to see pop-up dialogs\n  // about crashes - they are expected.\n  if (impl()->catch_exceptions() || in_death_test_child_process) {\n# if !GTEST_OS_WINDOWS_MOBILE && !GTEST_OS_WINDOWS_PHONE && !GTEST_OS_WINDOWS_RT\n    // SetErrorMode doesn't exist on CE.\n    SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOALIGNMENTFAULTEXCEPT |\n                 SEM_NOGPFAULTERRORBOX | SEM_NOOPENFILEERRORBOX);\n# endif  // !GTEST_OS_WINDOWS_MOBILE\n\n# if (defined(_MSC_VER) || GTEST_OS_WINDOWS_MINGW) && !GTEST_OS_WINDOWS_MOBILE\n    // Death test children can be terminated with _abort().  On Windows,\n    // _abort() can show a dialog with a warning message.  This forces the\n    // abort message to go to stderr instead.\n    _set_error_mode(_OUT_TO_STDERR);\n# endif\n\n# if defined(_MSC_VER) && !GTEST_OS_WINDOWS_MOBILE\n    // In the debug version, Visual Studio pops up a separate dialog\n    // offering a choice to debug the aborted program. We need to suppress\n    // this dialog or it will pop up for every EXPECT/ASSERT_DEATH statement\n    // executed. Google Test will notify the user of any unexpected\n    // failure via stderr.\n    if (!GTEST_FLAG(break_on_failure))\n      _set_abort_behavior(\n          0x0,                                    // Clear the following flags:\n          _WRITE_ABORT_MSG | _CALL_REPORTFAULT);  // pop-up window, core dump.\n\n    // In debug mode, the Windows CRT can crash with an assertion over invalid\n    // input (e.g. passing an invalid file descriptor).  The default handling\n    // for these assertions is to pop up a dialog and wait for user input.\n    // Instead ask the CRT to dump such assertions to stderr non-interactively.\n    if (!IsDebuggerPresent()) {\n      (void)_CrtSetReportMode(_CRT_ASSERT,\n                              _CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG);\n      (void)_CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDERR);\n    }\n# endif\n  }\n#endif  // GTEST_OS_WINDOWS\n\n  return internal::HandleExceptionsInMethodIfSupported(\n      impl(),\n      &internal::UnitTestImpl::RunAllTests,\n      \"auxiliary test code (environments or event listeners)\") ? 0 : 1;\n}\n\n// Returns the working directory when the first TEST() or TEST_F() was\n// executed.\nconst char* UnitTest::original_working_dir() const {\n  return impl_->original_working_dir_.c_str();\n}\n\n// Returns the TestSuite object for the test that's currently running,\n// or NULL if no test is running.\nconst TestSuite* UnitTest::current_test_suite() const\n    GTEST_LOCK_EXCLUDED_(mutex_) {\n  internal::MutexLock lock(&mutex_);\n  return impl_->current_test_suite();\n}\n\n// Legacy API is still available but deprecated\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\nconst TestCase* UnitTest::current_test_case() const\n    GTEST_LOCK_EXCLUDED_(mutex_) {\n  internal::MutexLock lock(&mutex_);\n  return impl_->current_test_suite();\n}\n#endif\n\n// Returns the TestInfo object for the test that's currently running,\n// or NULL if no test is running.\nconst TestInfo* UnitTest::current_test_info() const\n    GTEST_LOCK_EXCLUDED_(mutex_) {\n  internal::MutexLock lock(&mutex_);\n  return impl_->current_test_info();\n}\n\n// Returns the random seed used at the start of the current test run.\nint UnitTest::random_seed() const { return impl_->random_seed(); }\n\n// Returns ParameterizedTestSuiteRegistry object used to keep track of\n// value-parameterized tests and instantiate and register them.\ninternal::ParameterizedTestSuiteRegistry&\nUnitTest::parameterized_test_registry() GTEST_LOCK_EXCLUDED_(mutex_) {\n  return impl_->parameterized_test_registry();\n}\n\n// Creates an empty UnitTest.\nUnitTest::UnitTest() {\n  impl_ = new internal::UnitTestImpl(this);\n}\n\n// Destructor of UnitTest.\nUnitTest::~UnitTest() {\n  delete impl_;\n}\n\n// Pushes a trace defined by SCOPED_TRACE() on to the per-thread\n// Google Test trace stack.\nvoid UnitTest::PushGTestTrace(const internal::TraceInfo& trace)\n    GTEST_LOCK_EXCLUDED_(mutex_) {\n  internal::MutexLock lock(&mutex_);\n  impl_->gtest_trace_stack().push_back(trace);\n}\n\n// Pops a trace from the per-thread Google Test trace stack.\nvoid UnitTest::PopGTestTrace()\n    GTEST_LOCK_EXCLUDED_(mutex_) {\n  internal::MutexLock lock(&mutex_);\n  impl_->gtest_trace_stack().pop_back();\n}\n\nnamespace internal {\n\nUnitTestImpl::UnitTestImpl(UnitTest* parent)\n    : parent_(parent),\n      GTEST_DISABLE_MSC_WARNINGS_PUSH_(4355 /* using this in initializer */)\n          default_global_test_part_result_reporter_(this),\n      default_per_thread_test_part_result_reporter_(this),\n      GTEST_DISABLE_MSC_WARNINGS_POP_() global_test_part_result_repoter_(\n          &default_global_test_part_result_reporter_),\n      per_thread_test_part_result_reporter_(\n          &default_per_thread_test_part_result_reporter_),\n      parameterized_test_registry_(),\n      parameterized_tests_registered_(false),\n      last_death_test_suite_(-1),\n      current_test_suite_(nullptr),\n      current_test_info_(nullptr),\n      ad_hoc_test_result_(),\n      os_stack_trace_getter_(nullptr),\n      post_flag_parse_init_performed_(false),\n      random_seed_(0),  // Will be overridden by the flag before first use.\n      random_(0),       // Will be reseeded before first use.\n      start_timestamp_(0),\n      elapsed_time_(0),\n#if GTEST_HAS_DEATH_TEST\n      death_test_factory_(new DefaultDeathTestFactory),\n#endif\n      // Will be overridden by the flag before first use.\n      catch_exceptions_(false) {\n  listeners()->SetDefaultResultPrinter(new PrettyUnitTestResultPrinter);\n}\n\nUnitTestImpl::~UnitTestImpl() {\n  // Deletes every TestSuite.\n  ForEach(test_suites_, internal::Delete<TestSuite>);\n\n  // Deletes every Environment.\n  ForEach(environments_, internal::Delete<Environment>);\n\n  delete os_stack_trace_getter_;\n}\n\n// Adds a TestProperty to the current TestResult object when invoked in a\n// context of a test, to current test suite's ad_hoc_test_result when invoke\n// from SetUpTestSuite/TearDownTestSuite, or to the global property set\n// otherwise.  If the result already contains a property with the same key,\n// the value will be updated.\nvoid UnitTestImpl::RecordProperty(const TestProperty& test_property) {\n  std::string xml_element;\n  TestResult* test_result;  // TestResult appropriate for property recording.\n\n  if (current_test_info_ != nullptr) {\n    xml_element = \"testcase\";\n    test_result = &(current_test_info_->result_);\n  } else if (current_test_suite_ != nullptr) {\n    xml_element = \"testsuite\";\n    test_result = &(current_test_suite_->ad_hoc_test_result_);\n  } else {\n    xml_element = \"testsuites\";\n    test_result = &ad_hoc_test_result_;\n  }\n  test_result->RecordProperty(xml_element, test_property);\n}\n\n#if GTEST_HAS_DEATH_TEST\n// Disables event forwarding if the control is currently in a death test\n// subprocess. Must not be called before InitGoogleTest.\nvoid UnitTestImpl::SuppressTestEventsIfInSubprocess() {\n  if (internal_run_death_test_flag_.get() != nullptr)\n    listeners()->SuppressEventForwarding();\n}\n#endif  // GTEST_HAS_DEATH_TEST\n\n// Initializes event listeners performing XML output as specified by\n// UnitTestOptions. Must not be called before InitGoogleTest.\nvoid UnitTestImpl::ConfigureXmlOutput() {\n  const std::string& output_format = UnitTestOptions::GetOutputFormat();\n  if (output_format == \"xml\") {\n    listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter(\n        UnitTestOptions::GetAbsolutePathToOutputFile().c_str()));\n  } else if (output_format == \"json\") {\n    listeners()->SetDefaultXmlGenerator(new JsonUnitTestResultPrinter(\n        UnitTestOptions::GetAbsolutePathToOutputFile().c_str()));\n  } else if (output_format != \"\") {\n    GTEST_LOG_(WARNING) << \"WARNING: unrecognized output format \\\"\"\n                        << output_format << \"\\\" ignored.\";\n  }\n}\n\n#if GTEST_CAN_STREAM_RESULTS_\n// Initializes event listeners for streaming test results in string form.\n// Must not be called before InitGoogleTest.\nvoid UnitTestImpl::ConfigureStreamingOutput() {\n  const std::string& target = GTEST_FLAG(stream_result_to);\n  if (!target.empty()) {\n    const size_t pos = target.find(':');\n    if (pos != std::string::npos) {\n      listeners()->Append(new StreamingListener(target.substr(0, pos),\n                                                target.substr(pos+1)));\n    } else {\n      GTEST_LOG_(WARNING) << \"unrecognized streaming target \\\"\" << target\n                          << \"\\\" ignored.\";\n    }\n  }\n}\n#endif  // GTEST_CAN_STREAM_RESULTS_\n\n// Performs initialization dependent upon flag values obtained in\n// ParseGoogleTestFlagsOnly.  Is called from InitGoogleTest after the call to\n// ParseGoogleTestFlagsOnly.  In case a user neglects to call InitGoogleTest\n// this function is also called from RunAllTests.  Since this function can be\n// called more than once, it has to be idempotent.\nvoid UnitTestImpl::PostFlagParsingInit() {\n  // Ensures that this function does not execute more than once.\n  if (!post_flag_parse_init_performed_) {\n    post_flag_parse_init_performed_ = true;\n\n#if defined(GTEST_CUSTOM_TEST_EVENT_LISTENER_)\n    // Register to send notifications about key process state changes.\n    listeners()->Append(new GTEST_CUSTOM_TEST_EVENT_LISTENER_());\n#endif  // defined(GTEST_CUSTOM_TEST_EVENT_LISTENER_)\n\n#if GTEST_HAS_DEATH_TEST\n    InitDeathTestSubprocessControlInfo();\n    SuppressTestEventsIfInSubprocess();\n#endif  // GTEST_HAS_DEATH_TEST\n\n    // Registers parameterized tests. This makes parameterized tests\n    // available to the UnitTest reflection API without running\n    // RUN_ALL_TESTS.\n    RegisterParameterizedTests();\n\n    // Configures listeners for XML output. This makes it possible for users\n    // to shut down the default XML output before invoking RUN_ALL_TESTS.\n    ConfigureXmlOutput();\n\n#if GTEST_CAN_STREAM_RESULTS_\n    // Configures listeners for streaming test results to the specified server.\n    ConfigureStreamingOutput();\n#endif  // GTEST_CAN_STREAM_RESULTS_\n\n#if GTEST_HAS_ABSL\n    if (GTEST_FLAG(install_failure_signal_handler)) {\n      absl::FailureSignalHandlerOptions options;\n      absl::InstallFailureSignalHandler(options);\n    }\n#endif  // GTEST_HAS_ABSL\n  }\n}\n\n// A predicate that checks the name of a TestSuite against a known\n// value.\n//\n// This is used for implementation of the UnitTest class only.  We put\n// it in the anonymous namespace to prevent polluting the outer\n// namespace.\n//\n// TestSuiteNameIs is copyable.\nclass TestSuiteNameIs {\n public:\n  // Constructor.\n  explicit TestSuiteNameIs(const std::string& name) : name_(name) {}\n\n  // Returns true if and only if the name of test_suite matches name_.\n  bool operator()(const TestSuite* test_suite) const {\n    return test_suite != nullptr &&\n           strcmp(test_suite->name(), name_.c_str()) == 0;\n  }\n\n private:\n  std::string name_;\n};\n\n// Finds and returns a TestSuite with the given name.  If one doesn't\n// exist, creates one and returns it.  It's the CALLER'S\n// RESPONSIBILITY to ensure that this function is only called WHEN THE\n// TESTS ARE NOT SHUFFLED.\n//\n// Arguments:\n//\n//   test_suite_name: name of the test suite\n//   type_param:     the name of the test suite's type parameter, or NULL if\n//                   this is not a typed or a type-parameterized test suite.\n//   set_up_tc:      pointer to the function that sets up the test suite\n//   tear_down_tc:   pointer to the function that tears down the test suite\nTestSuite* UnitTestImpl::GetTestSuite(\n    const char* test_suite_name, const char* type_param,\n    internal::SetUpTestSuiteFunc set_up_tc,\n    internal::TearDownTestSuiteFunc tear_down_tc) {\n  // Can we find a TestSuite with the given name?\n  const auto test_suite =\n      std::find_if(test_suites_.rbegin(), test_suites_.rend(),\n                   TestSuiteNameIs(test_suite_name));\n\n  if (test_suite != test_suites_.rend()) return *test_suite;\n\n  // No.  Let's create one.\n  auto* const new_test_suite =\n      new TestSuite(test_suite_name, type_param, set_up_tc, tear_down_tc);\n\n  // Is this a death test suite?\n  if (internal::UnitTestOptions::MatchesFilter(test_suite_name,\n                                               kDeathTestSuiteFilter)) {\n    // Yes.  Inserts the test suite after the last death test suite\n    // defined so far.  This only works when the test suites haven't\n    // been shuffled.  Otherwise we may end up running a death test\n    // after a non-death test.\n    ++last_death_test_suite_;\n    test_suites_.insert(test_suites_.begin() + last_death_test_suite_,\n                        new_test_suite);\n  } else {\n    // No.  Appends to the end of the list.\n    test_suites_.push_back(new_test_suite);\n  }\n\n  test_suite_indices_.push_back(static_cast<int>(test_suite_indices_.size()));\n  return new_test_suite;\n}\n\n// Helpers for setting up / tearing down the given environment.  They\n// are for use in the ForEach() function.\nstatic void SetUpEnvironment(Environment* env) { env->SetUp(); }\nstatic void TearDownEnvironment(Environment* env) { env->TearDown(); }\n\n// Runs all tests in this UnitTest object, prints the result, and\n// returns true if all tests are successful.  If any exception is\n// thrown during a test, the test is considered to be failed, but the\n// rest of the tests will still be run.\n//\n// When parameterized tests are enabled, it expands and registers\n// parameterized tests first in RegisterParameterizedTests().\n// All other functions called from RunAllTests() may safely assume that\n// parameterized tests are ready to be counted and run.\nbool UnitTestImpl::RunAllTests() {\n  // True if and only if Google Test is initialized before RUN_ALL_TESTS() is\n  // called.\n  const bool gtest_is_initialized_before_run_all_tests = GTestIsInitialized();\n\n  // Do not run any test if the --help flag was specified.\n  if (g_help_flag)\n    return true;\n\n  // Repeats the call to the post-flag parsing initialization in case the\n  // user didn't call InitGoogleTest.\n  PostFlagParsingInit();\n\n  // Even if sharding is not on, test runners may want to use the\n  // GTEST_SHARD_STATUS_FILE to query whether the test supports the sharding\n  // protocol.\n  internal::WriteToShardStatusFileIfNeeded();\n\n  // True if and only if we are in a subprocess for running a thread-safe-style\n  // death test.\n  bool in_subprocess_for_death_test = false;\n\n#if GTEST_HAS_DEATH_TEST\n  in_subprocess_for_death_test =\n      (internal_run_death_test_flag_.get() != nullptr);\n# if defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_)\n  if (in_subprocess_for_death_test) {\n    GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_();\n  }\n# endif  // defined(GTEST_EXTRA_DEATH_TEST_CHILD_SETUP_)\n#endif  // GTEST_HAS_DEATH_TEST\n\n  const bool should_shard = ShouldShard(kTestTotalShards, kTestShardIndex,\n                                        in_subprocess_for_death_test);\n\n  // Compares the full test names with the filter to decide which\n  // tests to run.\n  const bool has_tests_to_run = FilterTests(should_shard\n                                              ? HONOR_SHARDING_PROTOCOL\n                                              : IGNORE_SHARDING_PROTOCOL) > 0;\n\n  // Lists the tests and exits if the --gtest_list_tests flag was specified.\n  if (GTEST_FLAG(list_tests)) {\n    // This must be called *after* FilterTests() has been called.\n    ListTestsMatchingFilter();\n    return true;\n  }\n\n  random_seed_ = GTEST_FLAG(shuffle) ?\n      GetRandomSeedFromFlag(GTEST_FLAG(random_seed)) : 0;\n\n  // True if and only if at least one test has failed.\n  bool failed = false;\n\n  TestEventListener* repeater = listeners()->repeater();\n\n  start_timestamp_ = GetTimeInMillis();\n  repeater->OnTestProgramStart(*parent_);\n\n  // How many times to repeat the tests?  We don't want to repeat them\n  // when we are inside the subprocess of a death test.\n  const int repeat = in_subprocess_for_death_test ? 1 : GTEST_FLAG(repeat);\n  // Repeats forever if the repeat count is negative.\n  const bool gtest_repeat_forever = repeat < 0;\n  for (int i = 0; gtest_repeat_forever || i != repeat; i++) {\n    // We want to preserve failures generated by ad-hoc test\n    // assertions executed before RUN_ALL_TESTS().\n    ClearNonAdHocTestResult();\n\n    const TimeInMillis start = GetTimeInMillis();\n\n    // Shuffles test suites and tests if requested.\n    if (has_tests_to_run && GTEST_FLAG(shuffle)) {\n      random()->Reseed(static_cast<UInt32>(random_seed_));\n      // This should be done before calling OnTestIterationStart(),\n      // such that a test event listener can see the actual test order\n      // in the event.\n      ShuffleTests();\n    }\n\n    // Tells the unit test event listeners that the tests are about to start.\n    repeater->OnTestIterationStart(*parent_, i);\n\n    // Runs each test suite if there is at least one test to run.\n    if (has_tests_to_run) {\n      // Sets up all environments beforehand.\n      repeater->OnEnvironmentsSetUpStart(*parent_);\n      ForEach(environments_, SetUpEnvironment);\n      repeater->OnEnvironmentsSetUpEnd(*parent_);\n\n      // Runs the tests only if there was no fatal failure or skip triggered\n      // during global set-up.\n      if (Test::IsSkipped()) {\n        // Emit diagnostics when global set-up calls skip, as it will not be\n        // emitted by default.\n        TestResult& test_result =\n            *internal::GetUnitTestImpl()->current_test_result();\n        for (int j = 0; j < test_result.total_part_count(); ++j) {\n          const TestPartResult& test_part_result =\n              test_result.GetTestPartResult(j);\n          if (test_part_result.type() == TestPartResult::kSkip) {\n            const std::string& result = test_part_result.message();\n            printf(\"%s\\n\", result.c_str());\n          }\n        }\n        fflush(stdout);\n      } else if (!Test::HasFatalFailure()) {\n        for (int test_index = 0; test_index < total_test_suite_count();\n             test_index++) {\n          GetMutableSuiteCase(test_index)->Run();\n        }\n      }\n\n      // Tears down all environments in reverse order afterwards.\n      repeater->OnEnvironmentsTearDownStart(*parent_);\n      std::for_each(environments_.rbegin(), environments_.rend(),\n                    TearDownEnvironment);\n      repeater->OnEnvironmentsTearDownEnd(*parent_);\n    }\n\n    elapsed_time_ = GetTimeInMillis() - start;\n\n    // Tells the unit test event listener that the tests have just finished.\n    repeater->OnTestIterationEnd(*parent_, i);\n\n    // Gets the result and clears it.\n    if (!Passed()) {\n      failed = true;\n    }\n\n    // Restores the original test order after the iteration.  This\n    // allows the user to quickly repro a failure that happens in the\n    // N-th iteration without repeating the first (N - 1) iterations.\n    // This is not enclosed in \"if (GTEST_FLAG(shuffle)) { ... }\", in\n    // case the user somehow changes the value of the flag somewhere\n    // (it's always safe to unshuffle the tests).\n    UnshuffleTests();\n\n    if (GTEST_FLAG(shuffle)) {\n      // Picks a new random seed for each iteration.\n      random_seed_ = GetNextRandomSeed(random_seed_);\n    }\n  }\n\n  repeater->OnTestProgramEnd(*parent_);\n\n  if (!gtest_is_initialized_before_run_all_tests) {\n    ColoredPrintf(\n        COLOR_RED,\n        \"\\nIMPORTANT NOTICE - DO NOT IGNORE:\\n\"\n        \"This test program did NOT call \" GTEST_INIT_GOOGLE_TEST_NAME_\n        \"() before calling RUN_ALL_TESTS(). This is INVALID. Soon \" GTEST_NAME_\n        \" will start to enforce the valid usage. \"\n        \"Please fix it ASAP, or IT WILL START TO FAIL.\\n\");  // NOLINT\n#if GTEST_FOR_GOOGLE_\n    ColoredPrintf(COLOR_RED,\n                  \"For more details, see http://wiki/Main/ValidGUnitMain.\\n\");\n#endif  // GTEST_FOR_GOOGLE_\n  }\n\n  return !failed;\n}\n\n// Reads the GTEST_SHARD_STATUS_FILE environment variable, and creates the file\n// if the variable is present. If a file already exists at this location, this\n// function will write over it. If the variable is present, but the file cannot\n// be created, prints an error and exits.\nvoid WriteToShardStatusFileIfNeeded() {\n  const char* const test_shard_file = posix::GetEnv(kTestShardStatusFile);\n  if (test_shard_file != nullptr) {\n    FILE* const file = posix::FOpen(test_shard_file, \"w\");\n    if (file == nullptr) {\n      ColoredPrintf(COLOR_RED,\n                    \"Could not write to the test shard status file \\\"%s\\\" \"\n                    \"specified by the %s environment variable.\\n\",\n                    test_shard_file, kTestShardStatusFile);\n      fflush(stdout);\n      exit(EXIT_FAILURE);\n    }\n    fclose(file);\n  }\n}\n\n// Checks whether sharding is enabled by examining the relevant\n// environment variable values. If the variables are present,\n// but inconsistent (i.e., shard_index >= total_shards), prints\n// an error and exits. If in_subprocess_for_death_test, sharding is\n// disabled because it must only be applied to the original test\n// process. Otherwise, we could filter out death tests we intended to execute.\nbool ShouldShard(const char* total_shards_env,\n                 const char* shard_index_env,\n                 bool in_subprocess_for_death_test) {\n  if (in_subprocess_for_death_test) {\n    return false;\n  }\n\n  const Int32 total_shards = Int32FromEnvOrDie(total_shards_env, -1);\n  const Int32 shard_index = Int32FromEnvOrDie(shard_index_env, -1);\n\n  if (total_shards == -1 && shard_index == -1) {\n    return false;\n  } else if (total_shards == -1 && shard_index != -1) {\n    const Message msg = Message()\n      << \"Invalid environment variables: you have \"\n      << kTestShardIndex << \" = \" << shard_index\n      << \", but have left \" << kTestTotalShards << \" unset.\\n\";\n    ColoredPrintf(COLOR_RED, \"%s\", msg.GetString().c_str());\n    fflush(stdout);\n    exit(EXIT_FAILURE);\n  } else if (total_shards != -1 && shard_index == -1) {\n    const Message msg = Message()\n      << \"Invalid environment variables: you have \"\n      << kTestTotalShards << \" = \" << total_shards\n      << \", but have left \" << kTestShardIndex << \" unset.\\n\";\n    ColoredPrintf(COLOR_RED, \"%s\", msg.GetString().c_str());\n    fflush(stdout);\n    exit(EXIT_FAILURE);\n  } else if (shard_index < 0 || shard_index >= total_shards) {\n    const Message msg = Message()\n      << \"Invalid environment variables: we require 0 <= \"\n      << kTestShardIndex << \" < \" << kTestTotalShards\n      << \", but you have \" << kTestShardIndex << \"=\" << shard_index\n      << \", \" << kTestTotalShards << \"=\" << total_shards << \".\\n\";\n    ColoredPrintf(COLOR_RED, \"%s\", msg.GetString().c_str());\n    fflush(stdout);\n    exit(EXIT_FAILURE);\n  }\n\n  return total_shards > 1;\n}\n\n// Parses the environment variable var as an Int32. If it is unset,\n// returns default_val. If it is not an Int32, prints an error\n// and aborts.\nInt32 Int32FromEnvOrDie(const char* var, Int32 default_val) {\n  const char* str_val = posix::GetEnv(var);\n  if (str_val == nullptr) {\n    return default_val;\n  }\n\n  Int32 result;\n  if (!ParseInt32(Message() << \"The value of environment variable \" << var,\n                  str_val, &result)) {\n    exit(EXIT_FAILURE);\n  }\n  return result;\n}\n\n// Given the total number of shards, the shard index, and the test id,\n// returns true if and only if the test should be run on this shard. The test id\n// is some arbitrary but unique non-negative integer assigned to each test\n// method. Assumes that 0 <= shard_index < total_shards.\nbool ShouldRunTestOnShard(int total_shards, int shard_index, int test_id) {\n  return (test_id % total_shards) == shard_index;\n}\n\n// Compares the name of each test with the user-specified filter to\n// decide whether the test should be run, then records the result in\n// each TestSuite and TestInfo object.\n// If shard_tests == true, further filters tests based on sharding\n// variables in the environment - see\n// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md\n// . Returns the number of tests that should run.\nint UnitTestImpl::FilterTests(ReactionToSharding shard_tests) {\n  const Int32 total_shards = shard_tests == HONOR_SHARDING_PROTOCOL ?\n      Int32FromEnvOrDie(kTestTotalShards, -1) : -1;\n  const Int32 shard_index = shard_tests == HONOR_SHARDING_PROTOCOL ?\n      Int32FromEnvOrDie(kTestShardIndex, -1) : -1;\n\n  // num_runnable_tests are the number of tests that will\n  // run across all shards (i.e., match filter and are not disabled).\n  // num_selected_tests are the number of tests to be run on\n  // this shard.\n  int num_runnable_tests = 0;\n  int num_selected_tests = 0;\n  for (auto* test_suite : test_suites_) {\n    const std::string& test_suite_name = test_suite->name();\n    test_suite->set_should_run(false);\n\n    for (size_t j = 0; j < test_suite->test_info_list().size(); j++) {\n      TestInfo* const test_info = test_suite->test_info_list()[j];\n      const std::string test_name(test_info->name());\n      // A test is disabled if test suite name or test name matches\n      // kDisableTestFilter.\n      const bool is_disabled = internal::UnitTestOptions::MatchesFilter(\n                                   test_suite_name, kDisableTestFilter) ||\n                               internal::UnitTestOptions::MatchesFilter(\n                                   test_name, kDisableTestFilter);\n      test_info->is_disabled_ = is_disabled;\n\n      const bool matches_filter = internal::UnitTestOptions::FilterMatchesTest(\n          test_suite_name, test_name);\n      test_info->matches_filter_ = matches_filter;\n\n      const bool is_runnable =\n          (GTEST_FLAG(also_run_disabled_tests) || !is_disabled) &&\n          matches_filter;\n\n      const bool is_in_another_shard =\n          shard_tests != IGNORE_SHARDING_PROTOCOL &&\n          !ShouldRunTestOnShard(total_shards, shard_index, num_runnable_tests);\n      test_info->is_in_another_shard_ = is_in_another_shard;\n      const bool is_selected = is_runnable && !is_in_another_shard;\n\n      num_runnable_tests += is_runnable;\n      num_selected_tests += is_selected;\n\n      test_info->should_run_ = is_selected;\n      test_suite->set_should_run(test_suite->should_run() || is_selected);\n    }\n  }\n  return num_selected_tests;\n}\n\n// Prints the given C-string on a single line by replacing all '\\n'\n// characters with string \"\\\\n\".  If the output takes more than\n// max_length characters, only prints the first max_length characters\n// and \"...\".\nstatic void PrintOnOneLine(const char* str, int max_length) {\n  if (str != nullptr) {\n    for (int i = 0; *str != '\\0'; ++str) {\n      if (i >= max_length) {\n        printf(\"...\");\n        break;\n      }\n      if (*str == '\\n') {\n        printf(\"\\\\n\");\n        i += 2;\n      } else {\n        printf(\"%c\", *str);\n        ++i;\n      }\n    }\n  }\n}\n\n// Prints the names of the tests matching the user-specified filter flag.\nvoid UnitTestImpl::ListTestsMatchingFilter() {\n  // Print at most this many characters for each type/value parameter.\n  const int kMaxParamLength = 250;\n\n  for (auto* test_suite : test_suites_) {\n    bool printed_test_suite_name = false;\n\n    for (size_t j = 0; j < test_suite->test_info_list().size(); j++) {\n      const TestInfo* const test_info = test_suite->test_info_list()[j];\n      if (test_info->matches_filter_) {\n        if (!printed_test_suite_name) {\n          printed_test_suite_name = true;\n          printf(\"%s.\", test_suite->name());\n          if (test_suite->type_param() != nullptr) {\n            printf(\"  # %s = \", kTypeParamLabel);\n            // We print the type parameter on a single line to make\n            // the output easy to parse by a program.\n            PrintOnOneLine(test_suite->type_param(), kMaxParamLength);\n          }\n          printf(\"\\n\");\n        }\n        printf(\"  %s\", test_info->name());\n        if (test_info->value_param() != nullptr) {\n          printf(\"  # %s = \", kValueParamLabel);\n          // We print the value parameter on a single line to make the\n          // output easy to parse by a program.\n          PrintOnOneLine(test_info->value_param(), kMaxParamLength);\n        }\n        printf(\"\\n\");\n      }\n    }\n  }\n  fflush(stdout);\n  const std::string& output_format = UnitTestOptions::GetOutputFormat();\n  if (output_format == \"xml\" || output_format == \"json\") {\n    FILE* fileout = OpenFileForWriting(\n        UnitTestOptions::GetAbsolutePathToOutputFile().c_str());\n    std::stringstream stream;\n    if (output_format == \"xml\") {\n      XmlUnitTestResultPrinter(\n          UnitTestOptions::GetAbsolutePathToOutputFile().c_str())\n          .PrintXmlTestsList(&stream, test_suites_);\n    } else if (output_format == \"json\") {\n      JsonUnitTestResultPrinter(\n          UnitTestOptions::GetAbsolutePathToOutputFile().c_str())\n          .PrintJsonTestList(&stream, test_suites_);\n    }\n    fprintf(fileout, \"%s\", StringStreamToString(&stream).c_str());\n    fclose(fileout);\n  }\n}\n\n// Sets the OS stack trace getter.\n//\n// Does nothing if the input and the current OS stack trace getter are\n// the same; otherwise, deletes the old getter and makes the input the\n// current getter.\nvoid UnitTestImpl::set_os_stack_trace_getter(\n    OsStackTraceGetterInterface* getter) {\n  if (os_stack_trace_getter_ != getter) {\n    delete os_stack_trace_getter_;\n    os_stack_trace_getter_ = getter;\n  }\n}\n\n// Returns the current OS stack trace getter if it is not NULL;\n// otherwise, creates an OsStackTraceGetter, makes it the current\n// getter, and returns it.\nOsStackTraceGetterInterface* UnitTestImpl::os_stack_trace_getter() {\n  if (os_stack_trace_getter_ == nullptr) {\n#ifdef GTEST_OS_STACK_TRACE_GETTER_\n    os_stack_trace_getter_ = new GTEST_OS_STACK_TRACE_GETTER_;\n#else\n    os_stack_trace_getter_ = new OsStackTraceGetter;\n#endif  // GTEST_OS_STACK_TRACE_GETTER_\n  }\n\n  return os_stack_trace_getter_;\n}\n\n// Returns the most specific TestResult currently running.\nTestResult* UnitTestImpl::current_test_result() {\n  if (current_test_info_ != nullptr) {\n    return &current_test_info_->result_;\n  }\n  if (current_test_suite_ != nullptr) {\n    return &current_test_suite_->ad_hoc_test_result_;\n  }\n  return &ad_hoc_test_result_;\n}\n\n// Shuffles all test suites, and the tests within each test suite,\n// making sure that death tests are still run first.\nvoid UnitTestImpl::ShuffleTests() {\n  // Shuffles the death test suites.\n  ShuffleRange(random(), 0, last_death_test_suite_ + 1, &test_suite_indices_);\n\n  // Shuffles the non-death test suites.\n  ShuffleRange(random(), last_death_test_suite_ + 1,\n               static_cast<int>(test_suites_.size()), &test_suite_indices_);\n\n  // Shuffles the tests inside each test suite.\n  for (auto& test_suite : test_suites_) {\n    test_suite->ShuffleTests(random());\n  }\n}\n\n// Restores the test suites and tests to their order before the first shuffle.\nvoid UnitTestImpl::UnshuffleTests() {\n  for (size_t i = 0; i < test_suites_.size(); i++) {\n    // Unshuffles the tests in each test suite.\n    test_suites_[i]->UnshuffleTests();\n    // Resets the index of each test suite.\n    test_suite_indices_[i] = static_cast<int>(i);\n  }\n}\n\n// Returns the current OS stack trace as an std::string.\n//\n// The maximum number of stack frames to be included is specified by\n// the gtest_stack_trace_depth flag.  The skip_count parameter\n// specifies the number of top frames to be skipped, which doesn't\n// count against the number of frames to be included.\n//\n// For example, if Foo() calls Bar(), which in turn calls\n// GetCurrentOsStackTraceExceptTop(..., 1), Foo() will be included in\n// the trace but Bar() and GetCurrentOsStackTraceExceptTop() won't.\nstd::string GetCurrentOsStackTraceExceptTop(UnitTest* /*unit_test*/,\n                                            int skip_count) {\n  // We pass skip_count + 1 to skip this wrapper function in addition\n  // to what the user really wants to skip.\n  return GetUnitTestImpl()->CurrentOsStackTraceExceptTop(skip_count + 1);\n}\n\n// Used by the GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_ macro to\n// suppress unreachable code warnings.\nnamespace {\nclass ClassUniqueToAlwaysTrue {};\n}\n\nbool IsTrue(bool condition) { return condition; }\n\nbool AlwaysTrue() {\n#if GTEST_HAS_EXCEPTIONS\n  // This condition is always false so AlwaysTrue() never actually throws,\n  // but it makes the compiler think that it may throw.\n  if (IsTrue(false))\n    throw ClassUniqueToAlwaysTrue();\n#endif  // GTEST_HAS_EXCEPTIONS\n  return true;\n}\n\n// If *pstr starts with the given prefix, modifies *pstr to be right\n// past the prefix and returns true; otherwise leaves *pstr unchanged\n// and returns false.  None of pstr, *pstr, and prefix can be NULL.\nbool SkipPrefix(const char* prefix, const char** pstr) {\n  const size_t prefix_len = strlen(prefix);\n  if (strncmp(*pstr, prefix, prefix_len) == 0) {\n    *pstr += prefix_len;\n    return true;\n  }\n  return false;\n}\n\n// Parses a string as a command line flag.  The string should have\n// the format \"--flag=value\".  When def_optional is true, the \"=value\"\n// part can be omitted.\n//\n// Returns the value of the flag, or NULL if the parsing failed.\nstatic const char* ParseFlagValue(const char* str, const char* flag,\n                                  bool def_optional) {\n  // str and flag must not be NULL.\n  if (str == nullptr || flag == nullptr) return nullptr;\n\n  // The flag must start with \"--\" followed by GTEST_FLAG_PREFIX_.\n  const std::string flag_str = std::string(\"--\") + GTEST_FLAG_PREFIX_ + flag;\n  const size_t flag_len = flag_str.length();\n  if (strncmp(str, flag_str.c_str(), flag_len) != 0) return nullptr;\n\n  // Skips the flag name.\n  const char* flag_end = str + flag_len;\n\n  // When def_optional is true, it's OK to not have a \"=value\" part.\n  if (def_optional && (flag_end[0] == '\\0')) {\n    return flag_end;\n  }\n\n  // If def_optional is true and there are more characters after the\n  // flag name, or if def_optional is false, there must be a '=' after\n  // the flag name.\n  if (flag_end[0] != '=') return nullptr;\n\n  // Returns the string after \"=\".\n  return flag_end + 1;\n}\n\n// Parses a string for a bool flag, in the form of either\n// \"--flag=value\" or \"--flag\".\n//\n// In the former case, the value is taken as true as long as it does\n// not start with '0', 'f', or 'F'.\n//\n// In the latter case, the value is taken as true.\n//\n// On success, stores the value of the flag in *value, and returns\n// true.  On failure, returns false without changing *value.\nstatic bool ParseBoolFlag(const char* str, const char* flag, bool* value) {\n  // Gets the value of the flag as a string.\n  const char* const value_str = ParseFlagValue(str, flag, true);\n\n  // Aborts if the parsing failed.\n  if (value_str == nullptr) return false;\n\n  // Converts the string value to a bool.\n  *value = !(*value_str == '0' || *value_str == 'f' || *value_str == 'F');\n  return true;\n}\n\n// Parses a string for an Int32 flag, in the form of\n// \"--flag=value\".\n//\n// On success, stores the value of the flag in *value, and returns\n// true.  On failure, returns false without changing *value.\nbool ParseInt32Flag(const char* str, const char* flag, Int32* value) {\n  // Gets the value of the flag as a string.\n  const char* const value_str = ParseFlagValue(str, flag, false);\n\n  // Aborts if the parsing failed.\n  if (value_str == nullptr) return false;\n\n  // Sets *value to the value of the flag.\n  return ParseInt32(Message() << \"The value of flag --\" << flag,\n                    value_str, value);\n}\n\n// Parses a string for a string flag, in the form of\n// \"--flag=value\".\n//\n// On success, stores the value of the flag in *value, and returns\n// true.  On failure, returns false without changing *value.\ntemplate <typename String>\nstatic bool ParseStringFlag(const char* str, const char* flag, String* value) {\n  // Gets the value of the flag as a string.\n  const char* const value_str = ParseFlagValue(str, flag, false);\n\n  // Aborts if the parsing failed.\n  if (value_str == nullptr) return false;\n\n  // Sets *value to the value of the flag.\n  *value = value_str;\n  return true;\n}\n\n// Determines whether a string has a prefix that Google Test uses for its\n// flags, i.e., starts with GTEST_FLAG_PREFIX_ or GTEST_FLAG_PREFIX_DASH_.\n// If Google Test detects that a command line flag has its prefix but is not\n// recognized, it will print its help message. Flags starting with\n// GTEST_INTERNAL_PREFIX_ followed by \"internal_\" are considered Google Test\n// internal flags and do not trigger the help message.\nstatic bool HasGoogleTestFlagPrefix(const char* str) {\n  return (SkipPrefix(\"--\", &str) ||\n          SkipPrefix(\"-\", &str) ||\n          SkipPrefix(\"/\", &str)) &&\n         !SkipPrefix(GTEST_FLAG_PREFIX_ \"internal_\", &str) &&\n         (SkipPrefix(GTEST_FLAG_PREFIX_, &str) ||\n          SkipPrefix(GTEST_FLAG_PREFIX_DASH_, &str));\n}\n\n// Prints a string containing code-encoded text.  The following escape\n// sequences can be used in the string to control the text color:\n//\n//   @@    prints a single '@' character.\n//   @R    changes the color to red.\n//   @G    changes the color to green.\n//   @Y    changes the color to yellow.\n//   @D    changes to the default terminal text color.\n//\nstatic void PrintColorEncoded(const char* str) {\n  GTestColor color = COLOR_DEFAULT;  // The current color.\n\n  // Conceptually, we split the string into segments divided by escape\n  // sequences.  Then we print one segment at a time.  At the end of\n  // each iteration, the str pointer advances to the beginning of the\n  // next segment.\n  for (;;) {\n    const char* p = strchr(str, '@');\n    if (p == nullptr) {\n      ColoredPrintf(color, \"%s\", str);\n      return;\n    }\n\n    ColoredPrintf(color, \"%s\", std::string(str, p).c_str());\n\n    const char ch = p[1];\n    str = p + 2;\n    if (ch == '@') {\n      ColoredPrintf(color, \"@\");\n    } else if (ch == 'D') {\n      color = COLOR_DEFAULT;\n    } else if (ch == 'R') {\n      color = COLOR_RED;\n    } else if (ch == 'G') {\n      color = COLOR_GREEN;\n    } else if (ch == 'Y') {\n      color = COLOR_YELLOW;\n    } else {\n      --str;\n    }\n  }\n}\n\nstatic const char kColorEncodedHelpMessage[] =\n\"This program contains tests written using \" GTEST_NAME_ \". You can use the\\n\"\n\"following command line flags to control its behavior:\\n\"\n\"\\n\"\n\"Test Selection:\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"list_tests@D\\n\"\n\"      List the names of all tests instead of running them. The name of\\n\"\n\"      TEST(Foo, Bar) is \\\"Foo.Bar\\\".\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"filter=@YPOSTIVE_PATTERNS\"\n    \"[@G-@YNEGATIVE_PATTERNS]@D\\n\"\n\"      Run only the tests whose name matches one of the positive patterns but\\n\"\n\"      none of the negative patterns. '?' matches any single character; '*'\\n\"\n\"      matches any substring; ':' separates two patterns.\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"also_run_disabled_tests@D\\n\"\n\"      Run all disabled tests too.\\n\"\n\"\\n\"\n\"Test Execution:\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"repeat=@Y[COUNT]@D\\n\"\n\"      Run the tests repeatedly; use a negative count to repeat forever.\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"shuffle@D\\n\"\n\"      Randomize tests' orders on every iteration.\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"random_seed=@Y[NUMBER]@D\\n\"\n\"      Random number seed to use for shuffling test orders (between 1 and\\n\"\n\"      99999, or 0 to use a seed based on the current time).\\n\"\n\"\\n\"\n\"Test Output:\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"color=@Y(@Gyes@Y|@Gno@Y|@Gauto@Y)@D\\n\"\n\"      Enable/disable colored output. The default is @Gauto@D.\\n\"\n\"  -@G-\" GTEST_FLAG_PREFIX_ \"print_time=0@D\\n\"\n\"      Don't print the elapsed time of each test.\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"output=@Y(@Gjson@Y|@Gxml@Y)[@G:@YDIRECTORY_PATH@G\"\n    GTEST_PATH_SEP_ \"@Y|@G:@YFILE_PATH]@D\\n\"\n\"      Generate a JSON or XML report in the given directory or with the given\\n\"\n\"      file name. @YFILE_PATH@D defaults to @Gtest_detail.xml@D.\\n\"\n# if GTEST_CAN_STREAM_RESULTS_\n\"  @G--\" GTEST_FLAG_PREFIX_ \"stream_result_to=@YHOST@G:@YPORT@D\\n\"\n\"      Stream test results to the given server.\\n\"\n# endif  // GTEST_CAN_STREAM_RESULTS_\n\"\\n\"\n\"Assertion Behavior:\\n\"\n# if GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS\n\"  @G--\" GTEST_FLAG_PREFIX_ \"death_test_style=@Y(@Gfast@Y|@Gthreadsafe@Y)@D\\n\"\n\"      Set the default death test style.\\n\"\n# endif  // GTEST_HAS_DEATH_TEST && !GTEST_OS_WINDOWS\n\"  @G--\" GTEST_FLAG_PREFIX_ \"break_on_failure@D\\n\"\n\"      Turn assertion failures into debugger break-points.\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"throw_on_failure@D\\n\"\n\"      Turn assertion failures into C++ exceptions for use by an external\\n\"\n\"      test framework.\\n\"\n\"  @G--\" GTEST_FLAG_PREFIX_ \"catch_exceptions=0@D\\n\"\n\"      Do not report exceptions as test failures. Instead, allow them\\n\"\n\"      to crash the program or throw a pop-up (on Windows).\\n\"\n\"\\n\"\n\"Except for @G--\" GTEST_FLAG_PREFIX_ \"list_tests@D, you can alternatively set \"\n    \"the corresponding\\n\"\n\"environment variable of a flag (all letters in upper-case). For example, to\\n\"\n\"disable colored text output, you can either specify @G--\" GTEST_FLAG_PREFIX_\n    \"color=no@D or set\\n\"\n\"the @G\" GTEST_FLAG_PREFIX_UPPER_ \"COLOR@D environment variable to @Gno@D.\\n\"\n\"\\n\"\n\"For more information, please read the \" GTEST_NAME_ \" documentation at\\n\"\n\"@G\" GTEST_PROJECT_URL_ \"@D. If you find a bug in \" GTEST_NAME_ \"\\n\"\n\"(not one in your own code or tests), please report it to\\n\"\n\"@G<\" GTEST_DEV_EMAIL_ \">@D.\\n\";\n\nstatic bool ParseGoogleTestFlag(const char* const arg) {\n  return ParseBoolFlag(arg, kAlsoRunDisabledTestsFlag,\n                       &GTEST_FLAG(also_run_disabled_tests)) ||\n      ParseBoolFlag(arg, kBreakOnFailureFlag,\n                    &GTEST_FLAG(break_on_failure)) ||\n      ParseBoolFlag(arg, kCatchExceptionsFlag,\n                    &GTEST_FLAG(catch_exceptions)) ||\n      ParseStringFlag(arg, kColorFlag, &GTEST_FLAG(color)) ||\n      ParseStringFlag(arg, kDeathTestStyleFlag,\n                      &GTEST_FLAG(death_test_style)) ||\n      ParseBoolFlag(arg, kDeathTestUseFork,\n                    &GTEST_FLAG(death_test_use_fork)) ||\n      ParseStringFlag(arg, kFilterFlag, &GTEST_FLAG(filter)) ||\n      ParseStringFlag(arg, kInternalRunDeathTestFlag,\n                      &GTEST_FLAG(internal_run_death_test)) ||\n      ParseBoolFlag(arg, kListTestsFlag, &GTEST_FLAG(list_tests)) ||\n      ParseStringFlag(arg, kOutputFlag, &GTEST_FLAG(output)) ||\n      ParseBoolFlag(arg, kPrintTimeFlag, &GTEST_FLAG(print_time)) ||\n      ParseBoolFlag(arg, kPrintUTF8Flag, &GTEST_FLAG(print_utf8)) ||\n      ParseInt32Flag(arg, kRandomSeedFlag, &GTEST_FLAG(random_seed)) ||\n      ParseInt32Flag(arg, kRepeatFlag, &GTEST_FLAG(repeat)) ||\n      ParseBoolFlag(arg, kShuffleFlag, &GTEST_FLAG(shuffle)) ||\n      ParseInt32Flag(arg, kStackTraceDepthFlag,\n                     &GTEST_FLAG(stack_trace_depth)) ||\n      ParseStringFlag(arg, kStreamResultToFlag,\n                      &GTEST_FLAG(stream_result_to)) ||\n      ParseBoolFlag(arg, kThrowOnFailureFlag,\n                    &GTEST_FLAG(throw_on_failure));\n}\n\n#if GTEST_USE_OWN_FLAGFILE_FLAG_\nstatic void LoadFlagsFromFile(const std::string& path) {\n  FILE* flagfile = posix::FOpen(path.c_str(), \"r\");\n  if (!flagfile) {\n    GTEST_LOG_(FATAL) << \"Unable to open file \\\"\" << GTEST_FLAG(flagfile)\n                      << \"\\\"\";\n  }\n  std::string contents(ReadEntireFile(flagfile));\n  posix::FClose(flagfile);\n  std::vector<std::string> lines;\n  SplitString(contents, '\\n', &lines);\n  for (size_t i = 0; i < lines.size(); ++i) {\n    if (lines[i].empty())\n      continue;\n    if (!ParseGoogleTestFlag(lines[i].c_str()))\n      g_help_flag = true;\n  }\n}\n#endif  // GTEST_USE_OWN_FLAGFILE_FLAG_\n\n// Parses the command line for Google Test flags, without initializing\n// other parts of Google Test.  The type parameter CharType can be\n// instantiated to either char or wchar_t.\ntemplate <typename CharType>\nvoid ParseGoogleTestFlagsOnlyImpl(int* argc, CharType** argv) {\n  for (int i = 1; i < *argc; i++) {\n    const std::string arg_string = StreamableToString(argv[i]);\n    const char* const arg = arg_string.c_str();\n\n    using internal::ParseBoolFlag;\n    using internal::ParseInt32Flag;\n    using internal::ParseStringFlag;\n\n    bool remove_flag = false;\n    if (ParseGoogleTestFlag(arg)) {\n      remove_flag = true;\n#if GTEST_USE_OWN_FLAGFILE_FLAG_\n    } else if (ParseStringFlag(arg, kFlagfileFlag, &GTEST_FLAG(flagfile))) {\n      LoadFlagsFromFile(GTEST_FLAG(flagfile));\n      remove_flag = true;\n#endif  // GTEST_USE_OWN_FLAGFILE_FLAG_\n    } else if (arg_string == \"--help\" || arg_string == \"-h\" ||\n               arg_string == \"-?\" || arg_string == \"/?\" ||\n               HasGoogleTestFlagPrefix(arg)) {\n      // Both help flag and unrecognized Google Test flags (excluding\n      // internal ones) trigger help display.\n      g_help_flag = true;\n    }\n\n    if (remove_flag) {\n      // Shift the remainder of the argv list left by one.  Note\n      // that argv has (*argc + 1) elements, the last one always being\n      // NULL.  The following loop moves the trailing NULL element as\n      // well.\n      for (int j = i; j != *argc; j++) {\n        argv[j] = argv[j + 1];\n      }\n\n      // Decrements the argument count.\n      (*argc)--;\n\n      // We also need to decrement the iterator as we just removed\n      // an element.\n      i--;\n    }\n  }\n\n  if (g_help_flag) {\n    // We print the help here instead of in RUN_ALL_TESTS(), as the\n    // latter may not be called at all if the user is using Google\n    // Test with another testing framework.\n    PrintColorEncoded(kColorEncodedHelpMessage);\n  }\n}\n\n// Parses the command line for Google Test flags, without initializing\n// other parts of Google Test.\nvoid ParseGoogleTestFlagsOnly(int* argc, char** argv) {\n  ParseGoogleTestFlagsOnlyImpl(argc, argv);\n\n  // Fix the value of *_NSGetArgc() on macOS, but if and only if\n  // *_NSGetArgv() == argv\n  // Only applicable to char** version of argv\n#if GTEST_OS_MAC\n#ifndef GTEST_OS_IOS\n  if (*_NSGetArgv() == argv) {\n    *_NSGetArgc() = *argc;\n  }\n#endif\n#endif\n}\nvoid ParseGoogleTestFlagsOnly(int* argc, wchar_t** argv) {\n  ParseGoogleTestFlagsOnlyImpl(argc, argv);\n}\n\n// The internal implementation of InitGoogleTest().\n//\n// The type parameter CharType can be instantiated to either char or\n// wchar_t.\ntemplate <typename CharType>\nvoid InitGoogleTestImpl(int* argc, CharType** argv) {\n  // We don't want to run the initialization code twice.\n  if (GTestIsInitialized()) return;\n\n  if (*argc <= 0) return;\n\n  g_argvs.clear();\n  for (int i = 0; i != *argc; i++) {\n    g_argvs.push_back(StreamableToString(argv[i]));\n  }\n\n#if GTEST_HAS_ABSL\n  absl::InitializeSymbolizer(g_argvs[0].c_str());\n#endif  // GTEST_HAS_ABSL\n\n  ParseGoogleTestFlagsOnly(argc, argv);\n  GetUnitTestImpl()->PostFlagParsingInit();\n}\n\n}  // namespace internal\n\n// Initializes Google Test.  This must be called before calling\n// RUN_ALL_TESTS().  In particular, it parses a command line for the\n// flags that Google Test recognizes.  Whenever a Google Test flag is\n// seen, it is removed from argv, and *argc is decremented.\n//\n// No value is returned.  Instead, the Google Test flag variables are\n// updated.\n//\n// Calling the function for the second time has no user-visible effect.\nvoid InitGoogleTest(int* argc, char** argv) {\n#if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n  GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv);\n#else  // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n  internal::InitGoogleTestImpl(argc, argv);\n#endif  // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n}\n\n// This overloaded version can be used in Windows programs compiled in\n// UNICODE mode.\nvoid InitGoogleTest(int* argc, wchar_t** argv) {\n#if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n  GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(argc, argv);\n#else  // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n  internal::InitGoogleTestImpl(argc, argv);\n#endif  // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n}\n\n// This overloaded version can be used on Arduino/embedded platforms where\n// there is no argc/argv.\nvoid InitGoogleTest() {\n  // Since Arduino doesn't have a command line, fake out the argc/argv arguments\n  int argc = 1;\n  const auto arg0 = \"dummy\";\n  char* argv0 = const_cast<char*>(arg0);\n  char** argv = &argv0;\n\n#if defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n  GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_(&argc, argv);\n#else  // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n  internal::InitGoogleTestImpl(&argc, argv);\n#endif  // defined(GTEST_CUSTOM_INIT_GOOGLE_TEST_FUNCTION_)\n}\n\nstd::string TempDir() {\n#if defined(GTEST_CUSTOM_TEMPDIR_FUNCTION_)\n  return GTEST_CUSTOM_TEMPDIR_FUNCTION_();\n#endif\n\n#if GTEST_OS_WINDOWS_MOBILE\n  return \"\\\\temp\\\\\";\n#elif GTEST_OS_WINDOWS\n  const char* temp_dir = internal::posix::GetEnv(\"TEMP\");\n  if (temp_dir == nullptr || temp_dir[0] == '\\0')\n    return \"\\\\temp\\\\\";\n  else if (temp_dir[strlen(temp_dir) - 1] == '\\\\')\n    return temp_dir;\n  else\n    return std::string(temp_dir) + \"\\\\\";\n#elif GTEST_OS_LINUX_ANDROID\n  return \"/sdcard/\";\n#else\n  return \"/tmp/\";\n#endif  // GTEST_OS_WINDOWS_MOBILE\n}\n\n// Class ScopedTrace\n\n// Pushes the given source file location and message onto a per-thread\n// trace stack maintained by Google Test.\nvoid ScopedTrace::PushTrace(const char* file, int line, std::string message) {\n  internal::TraceInfo trace;\n  trace.file = file;\n  trace.line = line;\n  trace.message.swap(message);\n\n  UnitTest::GetInstance()->PushGTestTrace(trace);\n}\n\n// Pops the info pushed by the c'tor.\nScopedTrace::~ScopedTrace()\n    GTEST_LOCK_EXCLUDED_(&UnitTest::mutex_) {\n  UnitTest::GetInstance()->PopGTestTrace();\n}\n\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/src/gtest_main.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n#include <cstdio>\n#include \"gtest/gtest.h\"\n\n#if GTEST_OS_ESP8266 || GTEST_OS_ESP32\n#if GTEST_OS_ESP8266\nextern \"C\" {\n#endif\nvoid setup() {\n  testing::InitGoogleTest();\n}\n\nvoid loop() { RUN_ALL_TESTS(); }\n\n#if GTEST_OS_ESP8266\n}\n#endif\n\n#else\n\nGTEST_API_ int main(int argc, char **argv) {\n  printf(\"Running main() from %s\\n\", __FILE__);\n  testing::InitGoogleTest(&argc, argv);\n  return RUN_ALL_TESTS();\n}\n#endif\n"
  },
  {
    "path": "test/googletest/test/BUILD.bazel",
    "content": "# Copyright 2017 Google Inc.\n# All Rights Reserved.\n#\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n#\n# Author: misterg@google.com (Gennadiy Civil)\n#\n# Bazel BUILD for The Google C++ Testing Framework (Google Test)\n\nload(\"@rules_cc//cc:defs.bzl\", \"cc_binary\", \"cc_test\")\nload(\"@rules_python//python:defs.bzl\", \"py_library\", \"py_test\")\n\nlicenses([\"notice\"])\n\n#on windows exclude gtest-tuple.h\ncc_test(\n    name = \"gtest_all_test\",\n    size = \"small\",\n    srcs = glob(\n        include = [\n            \"gtest-*.cc\",\n            \"googletest-*.cc\",\n            \"*.h\",\n            \"googletest/include/gtest/**/*.h\",\n        ],\n        exclude = [\n            \"gtest-unittest-api_test.cc\",\n            \"googletest/src/gtest-all.cc\",\n            \"gtest_all_test.cc\",\n            \"gtest-death-test_ex_test.cc\",\n            \"gtest-listener_test.cc\",\n            \"gtest-unittest-api_test.cc\",\n            \"googletest-param-test-test.cc\",\n            \"googletest-catch-exceptions-test_.cc\",\n            \"googletest-color-test_.cc\",\n            \"googletest-env-var-test_.cc\",\n            \"googletest-filter-unittest_.cc\",\n            \"googletest-break-on-failure-unittest_.cc\",\n            \"googletest-listener-test.cc\",\n            \"googletest-output-test_.cc\",\n            \"googletest-list-tests-unittest_.cc\",\n            \"googletest-shuffle-test_.cc\",\n            \"googletest-uninitialized-test_.cc\",\n            \"googletest-death-test_ex_test.cc\",\n            \"googletest-param-test-test\",\n            \"googletest-throw-on-failure-test_.cc\",\n            \"googletest-param-test-invalid-name1-test_.cc\",\n            \"googletest-param-test-invalid-name2-test_.cc\",\n        ],\n    ) + select({\n        \"//:windows\": [],\n        \"//conditions:default\": [],\n    }),\n    copts = select({\n        \"//:windows\": [\"-DGTEST_USE_OWN_TR1_TUPLE=0\"],\n        \"//conditions:default\": [\"-DGTEST_USE_OWN_TR1_TUPLE=1\"],\n    }),\n    includes = [\n        \"googletest\",\n        \"googletest/include\",\n        \"googletest/include/internal\",\n        \"googletest/test\",\n    ],\n    linkopts = select({\n        \"//:windows\": [],\n        \"//conditions:default\": [\"-pthread\"],\n    }),\n    deps = [\"//:gtest_main\"],\n)\n\n# Tests death tests.\ncc_test(\n    name = \"googletest-death-test-test\",\n    size = \"medium\",\n    srcs = [\"googletest-death-test-test.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\ncc_test(\n    name = \"gtest_test_macro_stack_footprint_test\",\n    size = \"small\",\n    srcs = [\"gtest_test_macro_stack_footprint_test.cc\"],\n    deps = [\"//:gtest\"],\n)\n\n#These googletest tests have their own main()\ncc_test(\n    name = \"googletest-listener-test\",\n    size = \"small\",\n    srcs = [\"googletest-listener-test.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\ncc_test(\n    name = \"gtest-unittest-api_test\",\n    size = \"small\",\n    srcs = [\n        \"gtest-unittest-api_test.cc\",\n    ],\n    deps = [\n        \"//:gtest\",\n    ],\n)\n\ncc_test(\n    name = \"googletest-param-test-test\",\n    size = \"small\",\n    srcs = [\n        \"googletest-param-test-test.cc\",\n        \"googletest-param-test-test.h\",\n        \"googletest-param-test2-test.cc\",\n    ],\n    deps = [\"//:gtest\"],\n)\n\ncc_test(\n    name = \"gtest_unittest\",\n    size = \"small\",\n    srcs = [\"gtest_unittest.cc\"],\n    args = [\"--heap_check=strict\"],\n    shard_count = 2,\n    deps = [\"//:gtest_main\"],\n)\n\n#  Py tests\n\npy_library(\n    name = \"gtest_test_utils\",\n    testonly = 1,\n    srcs = [\"gtest_test_utils.py\"],\n)\n\ncc_binary(\n    name = \"gtest_help_test_\",\n    testonly = 1,\n    srcs = [\"gtest_help_test_.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\npy_test(\n    name = \"gtest_help_test\",\n    size = \"small\",\n    srcs = [\"gtest_help_test.py\"],\n    data = [\":gtest_help_test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-output-test_\",\n    testonly = 1,\n    srcs = [\"googletest-output-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-output-test\",\n    size = \"small\",\n    srcs = [\"googletest-output-test.py\"],\n    args = select({\n        \"//:has_absl\": [],\n        \"//conditions:default\": [\"--no_stacktrace_support\"],\n    }),\n    data = [\n        \"googletest-output-test-golden-lin.txt\",\n        \":googletest-output-test_\",\n    ],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-color-test_\",\n    testonly = 1,\n    srcs = [\"googletest-color-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-color-test\",\n    size = \"small\",\n    srcs = [\"googletest-color-test.py\"],\n    data = [\":googletest-color-test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-env-var-test_\",\n    testonly = 1,\n    srcs = [\"googletest-env-var-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-env-var-test\",\n    size = \"medium\",\n    srcs = [\"googletest-env-var-test.py\"],\n    data = [\":googletest-env-var-test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-filter-unittest_\",\n    testonly = 1,\n    srcs = [\"googletest-filter-unittest_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-filter-unittest\",\n    size = \"medium\",\n    srcs = [\"googletest-filter-unittest.py\"],\n    data = [\":googletest-filter-unittest_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-break-on-failure-unittest_\",\n    testonly = 1,\n    srcs = [\"googletest-break-on-failure-unittest_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-break-on-failure-unittest\",\n    size = \"small\",\n    srcs = [\"googletest-break-on-failure-unittest.py\"],\n    data = [\":googletest-break-on-failure-unittest_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_test(\n    name = \"gtest_assert_by_exception_test\",\n    size = \"small\",\n    srcs = [\"gtest_assert_by_exception_test.cc\"],\n    deps = [\"//:gtest\"],\n)\n\ncc_binary(\n    name = \"googletest-throw-on-failure-test_\",\n    testonly = 1,\n    srcs = [\"googletest-throw-on-failure-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-throw-on-failure-test\",\n    size = \"small\",\n    srcs = [\"googletest-throw-on-failure-test.py\"],\n    data = [\":googletest-throw-on-failure-test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-list-tests-unittest_\",\n    testonly = 1,\n    srcs = [\"googletest-list-tests-unittest_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\ncc_test(\n    name = \"gtest_skip_test\",\n    size = \"small\",\n    srcs = [\"gtest_skip_test.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\ncc_test(\n    name = \"gtest_skip_in_environment_setup_test\",\n    size = \"small\",\n    srcs = [\"gtest_skip_in_environment_setup_test.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\npy_test(\n    name = \"gtest_skip_check_output_test\",\n    size = \"small\",\n    srcs = [\"gtest_skip_check_output_test.py\"],\n    data = [\":gtest_skip_test\"],\n    deps = [\":gtest_test_utils\"],\n)\n\npy_test(\n    name = \"gtest_skip_environment_check_output_test\",\n    size = \"small\",\n    srcs = [\"gtest_skip_environment_check_output_test.py\"],\n    data = [\n        \":gtest_skip_in_environment_setup_test\",\n    ],\n    deps = [\":gtest_test_utils\"],\n)\n\npy_test(\n    name = \"googletest-list-tests-unittest\",\n    size = \"small\",\n    srcs = [\"googletest-list-tests-unittest.py\"],\n    data = [\":googletest-list-tests-unittest_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-shuffle-test_\",\n    srcs = [\"googletest-shuffle-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-shuffle-test\",\n    size = \"small\",\n    srcs = [\"googletest-shuffle-test.py\"],\n    data = [\":googletest-shuffle-test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-catch-exceptions-no-ex-test_\",\n    testonly = 1,\n    srcs = [\"googletest-catch-exceptions-test_.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\ncc_binary(\n    name = \"googletest-catch-exceptions-ex-test_\",\n    testonly = 1,\n    srcs = [\"googletest-catch-exceptions-test_.cc\"],\n    copts = [\"-fexceptions\"],\n    deps = [\"//:gtest_main\"],\n)\n\npy_test(\n    name = \"googletest-catch-exceptions-test\",\n    size = \"small\",\n    srcs = [\"googletest-catch-exceptions-test.py\"],\n    data = [\n        \":googletest-catch-exceptions-ex-test_\",\n        \":googletest-catch-exceptions-no-ex-test_\",\n    ],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"gtest_xml_output_unittest_\",\n    testonly = 1,\n    srcs = [\"gtest_xml_output_unittest_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\ncc_test(\n    name = \"gtest_no_test_unittest\",\n    size = \"small\",\n    srcs = [\"gtest_no_test_unittest.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"gtest_xml_output_unittest\",\n    size = \"small\",\n    srcs = [\n        \"gtest_xml_output_unittest.py\",\n        \"gtest_xml_test_utils.py\",\n    ],\n    args = select({\n        \"//:has_absl\": [],\n        \"//conditions:default\": [\"--no_stacktrace_support\"],\n    }),\n    data = [\n        # We invoke gtest_no_test_unittest to verify the XML output\n        # when the test program contains no test definition.\n        \":gtest_no_test_unittest\",\n        \":gtest_xml_output_unittest_\",\n    ],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"gtest_xml_outfile1_test_\",\n    testonly = 1,\n    srcs = [\"gtest_xml_outfile1_test_.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\ncc_binary(\n    name = \"gtest_xml_outfile2_test_\",\n    testonly = 1,\n    srcs = [\"gtest_xml_outfile2_test_.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\npy_test(\n    name = \"gtest_xml_outfiles_test\",\n    size = \"small\",\n    srcs = [\n        \"gtest_xml_outfiles_test.py\",\n        \"gtest_xml_test_utils.py\",\n    ],\n    data = [\n        \":gtest_xml_outfile1_test_\",\n        \":gtest_xml_outfile2_test_\",\n    ],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"googletest-uninitialized-test_\",\n    testonly = 1,\n    srcs = [\"googletest-uninitialized-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-uninitialized-test\",\n    size = \"medium\",\n    srcs = [\"googletest-uninitialized-test.py\"],\n    data = [\"googletest-uninitialized-test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\ncc_binary(\n    name = \"gtest_testbridge_test_\",\n    testonly = 1,\n    srcs = [\"gtest_testbridge_test_.cc\"],\n    deps = [\"//:gtest_main\"],\n)\n\n# Tests that filtering via testbridge works\npy_test(\n    name = \"gtest_testbridge_test\",\n    size = \"small\",\n    srcs = [\"gtest_testbridge_test.py\"],\n    data = [\":gtest_testbridge_test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\npy_test(\n    name = \"googletest-json-outfiles-test\",\n    size = \"small\",\n    srcs = [\n        \"googletest-json-outfiles-test.py\",\n        \"gtest_json_test_utils.py\",\n    ],\n    data = [\n        \":gtest_xml_outfile1_test_\",\n        \":gtest_xml_outfile2_test_\",\n    ],\n    deps = [\":gtest_test_utils\"],\n)\n\npy_test(\n    name = \"googletest-json-output-unittest\",\n    size = \"medium\",\n    srcs = [\n        \"googletest-json-output-unittest.py\",\n        \"gtest_json_test_utils.py\",\n    ],\n    args = select({\n        \"//:has_absl\": [],\n        \"//conditions:default\": [\"--no_stacktrace_support\"],\n    }),\n    data = [\n        # We invoke gtest_no_test_unittest to verify the JSON output\n        # when the test program contains no test definition.\n        \":gtest_no_test_unittest\",\n        \":gtest_xml_output_unittest_\",\n    ],\n    deps = [\":gtest_test_utils\"],\n)\n\n# Verifies interaction of death tests and exceptions.\ncc_test(\n    name = \"googletest-death-test_ex_catch_test\",\n    size = \"medium\",\n    srcs = [\"googletest-death-test_ex_test.cc\"],\n    copts = [\"-fexceptions\"],\n    defines = [\"GTEST_ENABLE_CATCH_EXCEPTIONS_=1\"],\n    deps = [\"//:gtest\"],\n)\n\ncc_binary(\n    name = \"googletest-param-test-invalid-name1-test_\",\n    testonly = 1,\n    srcs = [\"googletest-param-test-invalid-name1-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\ncc_binary(\n    name = \"googletest-param-test-invalid-name2-test_\",\n    testonly = 1,\n    srcs = [\"googletest-param-test-invalid-name2-test_.cc\"],\n    deps = [\"//:gtest\"],\n)\n\npy_test(\n    name = \"googletest-param-test-invalid-name1-test\",\n    size = \"small\",\n    srcs = [\"googletest-param-test-invalid-name1-test.py\"],\n    data = [\":googletest-param-test-invalid-name1-test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n\npy_test(\n    name = \"googletest-param-test-invalid-name2-test\",\n    size = \"small\",\n    srcs = [\"googletest-param-test-invalid-name2-test.py\"],\n    data = [\":googletest-param-test-invalid-name2-test_\"],\n    deps = [\":gtest_test_utils\"],\n)\n"
  },
  {
    "path": "test/googletest/test/googletest-break-on-failure-unittest.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2006, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test for Google Test's break-on-failure mode.\n\nA user can ask Google Test to seg-fault when an assertion fails, using\neither the GTEST_BREAK_ON_FAILURE environment variable or the\n--gtest_break_on_failure flag.  This script tests such functionality\nby invoking googletest-break-on-failure-unittest_ (a program written with\nGoogle Test) with different environments and command line flags.\n\"\"\"\n\nimport os\nimport gtest_test_utils\n\n# Constants.\n\nIS_WINDOWS = os.name == 'nt'\n\n# The environment variable for enabling/disabling the break-on-failure mode.\nBREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE'\n\n# The command line flag for enabling/disabling the break-on-failure mode.\nBREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure'\n\n# The environment variable for enabling/disabling the throw-on-failure mode.\nTHROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE'\n\n# The environment variable for enabling/disabling the catch-exceptions mode.\nCATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS'\n\n# Path to the googletest-break-on-failure-unittest_ program.\nEXE_PATH = gtest_test_utils.GetTestExecutablePath(\n    'googletest-break-on-failure-unittest_')\n\n\nenviron = gtest_test_utils.environ\nSetEnvVar = gtest_test_utils.SetEnvVar\n\n# Tests in this file run a Google-Test-based test program and expect it\n# to terminate prematurely.  Therefore they are incompatible with\n# the premature-exit-file protocol by design.  Unset the\n# premature-exit filepath to prevent Google Test from creating\n# the file.\nSetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)\n\n\ndef Run(command):\n  \"\"\"Runs a command; returns 1 if it was killed by a signal, or 0 otherwise.\"\"\"\n\n  p = gtest_test_utils.Subprocess(command, env=environ)\n  if p.terminated_by_signal:\n    return 1\n  else:\n    return 0\n\n\n# The tests.\n\n\nclass GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase):\n  \"\"\"Tests using the GTEST_BREAK_ON_FAILURE environment variable or\n  the --gtest_break_on_failure flag to turn assertion failures into\n  segmentation faults.\n  \"\"\"\n\n  def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault):\n    \"\"\"Runs googletest-break-on-failure-unittest_ and verifies that it does\n    (or does not) have a seg-fault.\n\n    Args:\n      env_var_value:    value of the GTEST_BREAK_ON_FAILURE environment\n                        variable; None if the variable should be unset.\n      flag_value:       value of the --gtest_break_on_failure flag;\n                        None if the flag should not be present.\n      expect_seg_fault: 1 if the program is expected to generate a seg-fault;\n                        0 otherwise.\n    \"\"\"\n\n    SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value)\n\n    if env_var_value is None:\n      env_var_value_msg = ' is not set'\n    else:\n      env_var_value_msg = '=' + env_var_value\n\n    if flag_value is None:\n      flag = ''\n    elif flag_value == '0':\n      flag = '--%s=0' % BREAK_ON_FAILURE_FLAG\n    else:\n      flag = '--%s' % BREAK_ON_FAILURE_FLAG\n\n    command = [EXE_PATH]\n    if flag:\n      command.append(flag)\n\n    if expect_seg_fault:\n      should_or_not = 'should'\n    else:\n      should_or_not = 'should not'\n\n    has_seg_fault = Run(command)\n\n    SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None)\n\n    msg = ('when %s%s, an assertion failure in \"%s\" %s cause a seg-fault.' %\n           (BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command),\n            should_or_not))\n    self.assert_(has_seg_fault == expect_seg_fault, msg)\n\n  def testDefaultBehavior(self):\n    \"\"\"Tests the behavior of the default mode.\"\"\"\n\n    self.RunAndVerify(env_var_value=None,\n                      flag_value=None,\n                      expect_seg_fault=0)\n\n  def testEnvVar(self):\n    \"\"\"Tests using the GTEST_BREAK_ON_FAILURE environment variable.\"\"\"\n\n    self.RunAndVerify(env_var_value='0',\n                      flag_value=None,\n                      expect_seg_fault=0)\n    self.RunAndVerify(env_var_value='1',\n                      flag_value=None,\n                      expect_seg_fault=1)\n\n  def testFlag(self):\n    \"\"\"Tests using the --gtest_break_on_failure flag.\"\"\"\n\n    self.RunAndVerify(env_var_value=None,\n                      flag_value='0',\n                      expect_seg_fault=0)\n    self.RunAndVerify(env_var_value=None,\n                      flag_value='1',\n                      expect_seg_fault=1)\n\n  def testFlagOverridesEnvVar(self):\n    \"\"\"Tests that the flag overrides the environment variable.\"\"\"\n\n    self.RunAndVerify(env_var_value='0',\n                      flag_value='0',\n                      expect_seg_fault=0)\n    self.RunAndVerify(env_var_value='0',\n                      flag_value='1',\n                      expect_seg_fault=1)\n    self.RunAndVerify(env_var_value='1',\n                      flag_value='0',\n                      expect_seg_fault=0)\n    self.RunAndVerify(env_var_value='1',\n                      flag_value='1',\n                      expect_seg_fault=1)\n\n  def testBreakOnFailureOverridesThrowOnFailure(self):\n    \"\"\"Tests that gtest_break_on_failure overrides gtest_throw_on_failure.\"\"\"\n\n    SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1')\n    try:\n      self.RunAndVerify(env_var_value=None,\n                        flag_value='1',\n                        expect_seg_fault=1)\n    finally:\n      SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None)\n\n  if IS_WINDOWS:\n    def testCatchExceptionsDoesNotInterfere(self):\n      \"\"\"Tests that gtest_catch_exceptions doesn't interfere.\"\"\"\n\n      SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1')\n      try:\n        self.RunAndVerify(env_var_value='1',\n                          flag_value='1',\n                          expect_seg_fault=1)\n      finally:\n        SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-break-on-failure-unittest_.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Unit test for Google Test's break-on-failure mode.\n//\n// A user can ask Google Test to seg-fault when an assertion fails, using\n// either the GTEST_BREAK_ON_FAILURE environment variable or the\n// --gtest_break_on_failure flag.  This file is used for testing such\n// functionality.\n//\n// This program will be invoked from a Python unit test.  It is\n// expected to fail.  Don't run it directly.\n\n#include \"gtest/gtest.h\"\n\n#if GTEST_OS_WINDOWS\n# include <windows.h>\n# include <stdlib.h>\n#endif\n\nnamespace {\n\n// A test that's expected to fail.\nTEST(Foo, Bar) {\n  EXPECT_EQ(2, 3);\n}\n\n#if GTEST_HAS_SEH && !GTEST_OS_WINDOWS_MOBILE\n// On Windows Mobile global exception handlers are not supported.\nLONG WINAPI ExitWithExceptionCode(\n    struct _EXCEPTION_POINTERS* exception_pointers) {\n  exit(exception_pointers->ExceptionRecord->ExceptionCode);\n}\n#endif\n\n}  // namespace\n\nint main(int argc, char **argv) {\n#if GTEST_OS_WINDOWS\n  // Suppresses display of the Windows error dialog upon encountering\n  // a general protection fault (segment violation).\n  SetErrorMode(SEM_NOGPFAULTERRORBOX | SEM_FAILCRITICALERRORS);\n\n# if GTEST_HAS_SEH && !GTEST_OS_WINDOWS_MOBILE\n\n  // The default unhandled exception filter does not always exit\n  // with the exception code as exit code - for example it exits with\n  // 0 for EXCEPTION_ACCESS_VIOLATION and 1 for EXCEPTION_BREAKPOINT\n  // if the application is compiled in debug mode. Thus we use our own\n  // filter which always exits with the exception code for unhandled\n  // exceptions.\n  SetUnhandledExceptionFilter(ExitWithExceptionCode);\n\n# endif\n#endif  // GTEST_OS_WINDOWS\n  testing::InitGoogleTest(&argc, argv);\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-catch-exceptions-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2010 Google Inc.  All Rights Reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Tests Google Test's exception catching behavior.\n\nThis script invokes googletest-catch-exceptions-test_ and\ngoogletest-catch-exceptions-ex-test_ (programs written with\nGoogle Test) and verifies their output.\n\"\"\"\n\nimport gtest_test_utils\n\n# Constants.\nFLAG_PREFIX = '--gtest_'\nLIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'\nNO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0'\nFILTER_FLAG = FLAG_PREFIX + 'filter'\n\n# Path to the googletest-catch-exceptions-ex-test_ binary, compiled with\n# exceptions enabled.\nEX_EXE_PATH = gtest_test_utils.GetTestExecutablePath(\n    'googletest-catch-exceptions-ex-test_')\n\n# Path to the googletest-catch-exceptions-test_ binary, compiled with\n# exceptions disabled.\nEXE_PATH = gtest_test_utils.GetTestExecutablePath(\n    'googletest-catch-exceptions-no-ex-test_')\n\nenviron = gtest_test_utils.environ\nSetEnvVar = gtest_test_utils.SetEnvVar\n\n# Tests in this file run a Google-Test-based test program and expect it\n# to terminate prematurely.  Therefore they are incompatible with\n# the premature-exit-file protocol by design.  Unset the\n# premature-exit filepath to prevent Google Test from creating\n# the file.\nSetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None)\n\nTEST_LIST = gtest_test_utils.Subprocess(\n    [EXE_PATH, LIST_TESTS_FLAG], env=environ).output\n\nSUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST\n\nif SUPPORTS_SEH_EXCEPTIONS:\n  BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output\n\nEX_BINARY_OUTPUT = gtest_test_utils.Subprocess(\n    [EX_EXE_PATH], env=environ).output\n\n\n# The tests.\nif SUPPORTS_SEH_EXCEPTIONS:\n  # pylint:disable-msg=C6302\n  class CatchSehExceptionsTest(gtest_test_utils.TestCase):\n    \"\"\"Tests exception-catching behavior.\"\"\"\n\n\n    def TestSehExceptions(self, test_output):\n      self.assert_('SEH exception with code 0x2a thrown '\n                   'in the test fixture\\'s constructor'\n                   in test_output)\n      self.assert_('SEH exception with code 0x2a thrown '\n                   'in the test fixture\\'s destructor'\n                   in test_output)\n      self.assert_('SEH exception with code 0x2a thrown in SetUpTestSuite()'\n                   in test_output)\n      self.assert_('SEH exception with code 0x2a thrown in TearDownTestSuite()'\n                   in test_output)\n      self.assert_('SEH exception with code 0x2a thrown in SetUp()'\n                   in test_output)\n      self.assert_('SEH exception with code 0x2a thrown in TearDown()'\n                   in test_output)\n      self.assert_('SEH exception with code 0x2a thrown in the test body'\n                   in test_output)\n\n    def testCatchesSehExceptionsWithCxxExceptionsEnabled(self):\n      self.TestSehExceptions(EX_BINARY_OUTPUT)\n\n    def testCatchesSehExceptionsWithCxxExceptionsDisabled(self):\n      self.TestSehExceptions(BINARY_OUTPUT)\n\n\nclass CatchCxxExceptionsTest(gtest_test_utils.TestCase):\n  \"\"\"Tests C++ exception-catching behavior.\n\n     Tests in this test case verify that:\n     * C++ exceptions are caught and logged as C++ (not SEH) exceptions\n     * Exception thrown affect the remainder of the test work flow in the\n       expected manner.\n  \"\"\"\n\n  def testCatchesCxxExceptionsInFixtureConstructor(self):\n    self.assertTrue(\n        'C++ exception with description '\n        '\"Standard C++ exception\" thrown '\n        'in the test fixture\\'s constructor' in EX_BINARY_OUTPUT,\n        EX_BINARY_OUTPUT)\n    self.assert_('unexpected' not in EX_BINARY_OUTPUT,\n                 'This failure belongs in this test only if '\n                 '\"CxxExceptionInConstructorTest\" (no quotes) '\n                 'appears on the same line as words \"called unexpectedly\"')\n\n  if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in\n      EX_BINARY_OUTPUT):\n\n    def testCatchesCxxExceptionsInFixtureDestructor(self):\n      self.assertTrue(\n          'C++ exception with description '\n          '\"Standard C++ exception\" thrown '\n          'in the test fixture\\'s destructor' in EX_BINARY_OUTPUT,\n          EX_BINARY_OUTPUT)\n      self.assertTrue(\n          'CxxExceptionInDestructorTest::TearDownTestSuite() '\n          'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n\n  def testCatchesCxxExceptionsInSetUpTestCase(self):\n    self.assertTrue(\n        'C++ exception with description \"Standard C++ exception\"'\n        ' thrown in SetUpTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInConstructorTest::TearDownTestSuite() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTestSuiteTest constructor '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTestSuiteTest destructor '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTestSuiteTest::SetUp() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTestSuiteTest::TearDown() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTestSuiteTest test body '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n\n  def testCatchesCxxExceptionsInTearDownTestCase(self):\n    self.assertTrue(\n        'C++ exception with description \"Standard C++ exception\"'\n        ' thrown in TearDownTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n\n  def testCatchesCxxExceptionsInSetUp(self):\n    self.assertTrue(\n        'C++ exception with description \"Standard C++ exception\"'\n        ' thrown in SetUp()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTest::TearDownTestSuite() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTest destructor '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInSetUpTest::TearDown() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assert_('unexpected' not in EX_BINARY_OUTPUT,\n                 'This failure belongs in this test only if '\n                 '\"CxxExceptionInSetUpTest\" (no quotes) '\n                 'appears on the same line as words \"called unexpectedly\"')\n\n  def testCatchesCxxExceptionsInTearDown(self):\n    self.assertTrue(\n        'C++ exception with description \"Standard C++ exception\"'\n        ' thrown in TearDown()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInTearDownTest::TearDownTestSuite() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInTearDownTest destructor '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n\n  def testCatchesCxxExceptionsInTestBody(self):\n    self.assertTrue(\n        'C++ exception with description \"Standard C++ exception\"'\n        ' thrown in the test body' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInTestBodyTest::TearDownTestSuite() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInTestBodyTest destructor '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n    self.assertTrue(\n        'CxxExceptionInTestBodyTest::TearDown() '\n        'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT)\n\n  def testCatchesNonStdCxxExceptions(self):\n    self.assertTrue(\n        'Unknown C++ exception thrown in the test body' in EX_BINARY_OUTPUT,\n        EX_BINARY_OUTPUT)\n\n  def testUnhandledCxxExceptionsAbortTheProgram(self):\n    # Filters out SEH exception tests on Windows. Unhandled SEH exceptions\n    # cause tests to show pop-up windows there.\n    FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*'\n    # By default, Google Test doesn't catch the exceptions.\n    uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess(\n        [EX_EXE_PATH,\n         NO_CATCH_EXCEPTIONS_FLAG,\n         FITLER_OUT_SEH_TESTS_FLAG],\n        env=environ).output\n\n    self.assert_('Unhandled C++ exception terminating the program'\n                 in uncaught_exceptions_ex_binary_output)\n    self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-catch-exceptions-test_.cc",
    "content": "// Copyright 2010, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for Google Test itself. Tests in this file throw C++ or SEH\n// exceptions, and the output is verified by\n// googletest-catch-exceptions-test.py.\n\n#include <stdio.h>  // NOLINT\n#include <stdlib.h>  // For exit().\n\n#include \"gtest/gtest.h\"\n\n#if GTEST_HAS_SEH\n# include <windows.h>\n#endif\n\n#if GTEST_HAS_EXCEPTIONS\n# include <exception>  // For set_terminate().\n# include <stdexcept>\n#endif\n\nusing testing::Test;\n\n#if GTEST_HAS_SEH\n\nclass SehExceptionInConstructorTest : public Test {\n public:\n  SehExceptionInConstructorTest() { RaiseException(42, 0, 0, NULL); }\n};\n\nTEST_F(SehExceptionInConstructorTest, ThrowsExceptionInConstructor) {}\n\nclass SehExceptionInDestructorTest : public Test {\n public:\n  ~SehExceptionInDestructorTest() { RaiseException(42, 0, 0, NULL); }\n};\n\nTEST_F(SehExceptionInDestructorTest, ThrowsExceptionInDestructor) {}\n\nclass SehExceptionInSetUpTestSuiteTest : public Test {\n public:\n  static void SetUpTestSuite() { RaiseException(42, 0, 0, NULL); }\n};\n\nTEST_F(SehExceptionInSetUpTestSuiteTest, ThrowsExceptionInSetUpTestSuite) {}\n\nclass SehExceptionInTearDownTestSuiteTest : public Test {\n public:\n  static void TearDownTestSuite() { RaiseException(42, 0, 0, NULL); }\n};\n\nTEST_F(SehExceptionInTearDownTestSuiteTest,\n       ThrowsExceptionInTearDownTestSuite) {}\n\nclass SehExceptionInSetUpTest : public Test {\n protected:\n  virtual void SetUp() { RaiseException(42, 0, 0, NULL); }\n};\n\nTEST_F(SehExceptionInSetUpTest, ThrowsExceptionInSetUp) {}\n\nclass SehExceptionInTearDownTest : public Test {\n protected:\n  virtual void TearDown() { RaiseException(42, 0, 0, NULL); }\n};\n\nTEST_F(SehExceptionInTearDownTest, ThrowsExceptionInTearDown) {}\n\nTEST(SehExceptionTest, ThrowsSehException) {\n  RaiseException(42, 0, 0, NULL);\n}\n\n#endif  // GTEST_HAS_SEH\n\n#if GTEST_HAS_EXCEPTIONS\n\nclass CxxExceptionInConstructorTest : public Test {\n public:\n  CxxExceptionInConstructorTest() {\n    // Without this macro VC++ complains about unreachable code at the end of\n    // the constructor.\n    GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_(\n        throw std::runtime_error(\"Standard C++ exception\"));\n  }\n\n  static void TearDownTestSuite() {\n    printf(\"%s\",\n           \"CxxExceptionInConstructorTest::TearDownTestSuite() \"\n           \"called as expected.\\n\");\n  }\n\n protected:\n  ~CxxExceptionInConstructorTest() override {\n    ADD_FAILURE() << \"CxxExceptionInConstructorTest destructor \"\n                  << \"called unexpectedly.\";\n  }\n\n  void SetUp() override {\n    ADD_FAILURE() << \"CxxExceptionInConstructorTest::SetUp() \"\n                  << \"called unexpectedly.\";\n  }\n\n  void TearDown() override {\n    ADD_FAILURE() << \"CxxExceptionInConstructorTest::TearDown() \"\n                  << \"called unexpectedly.\";\n  }\n};\n\nTEST_F(CxxExceptionInConstructorTest, ThrowsExceptionInConstructor) {\n  ADD_FAILURE() << \"CxxExceptionInConstructorTest test body \"\n                << \"called unexpectedly.\";\n}\n\nclass CxxExceptionInSetUpTestSuiteTest : public Test {\n public:\n  CxxExceptionInSetUpTestSuiteTest() {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTestSuiteTest constructor \"\n           \"called as expected.\\n\");\n  }\n\n  static void SetUpTestSuite() {\n    throw std::runtime_error(\"Standard C++ exception\");\n  }\n\n  static void TearDownTestSuite() {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTestSuiteTest::TearDownTestSuite() \"\n           \"called as expected.\\n\");\n  }\n\n protected:\n  ~CxxExceptionInSetUpTestSuiteTest() override {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTestSuiteTest destructor \"\n           \"called as expected.\\n\");\n  }\n\n  void SetUp() override {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTestSuiteTest::SetUp() \"\n           \"called as expected.\\n\");\n  }\n\n  void TearDown() override {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTestSuiteTest::TearDown() \"\n           \"called as expected.\\n\");\n  }\n};\n\nTEST_F(CxxExceptionInSetUpTestSuiteTest, ThrowsExceptionInSetUpTestSuite) {\n  printf(\"%s\",\n         \"CxxExceptionInSetUpTestSuiteTest test body \"\n         \"called as expected.\\n\");\n}\n\nclass CxxExceptionInTearDownTestSuiteTest : public Test {\n public:\n  static void TearDownTestSuite() {\n    throw std::runtime_error(\"Standard C++ exception\");\n  }\n};\n\nTEST_F(CxxExceptionInTearDownTestSuiteTest,\n       ThrowsExceptionInTearDownTestSuite) {}\n\nclass CxxExceptionInSetUpTest : public Test {\n public:\n  static void TearDownTestSuite() {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTest::TearDownTestSuite() \"\n           \"called as expected.\\n\");\n  }\n\n protected:\n  ~CxxExceptionInSetUpTest() override {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTest destructor \"\n           \"called as expected.\\n\");\n  }\n\n  void SetUp() override { throw std::runtime_error(\"Standard C++ exception\"); }\n\n  void TearDown() override {\n    printf(\"%s\",\n           \"CxxExceptionInSetUpTest::TearDown() \"\n           \"called as expected.\\n\");\n  }\n};\n\nTEST_F(CxxExceptionInSetUpTest, ThrowsExceptionInSetUp) {\n  ADD_FAILURE() << \"CxxExceptionInSetUpTest test body \"\n                << \"called unexpectedly.\";\n}\n\nclass CxxExceptionInTearDownTest : public Test {\n public:\n  static void TearDownTestSuite() {\n    printf(\"%s\",\n           \"CxxExceptionInTearDownTest::TearDownTestSuite() \"\n           \"called as expected.\\n\");\n  }\n\n protected:\n  ~CxxExceptionInTearDownTest() override {\n    printf(\"%s\",\n           \"CxxExceptionInTearDownTest destructor \"\n           \"called as expected.\\n\");\n  }\n\n  void TearDown() override {\n    throw std::runtime_error(\"Standard C++ exception\");\n  }\n};\n\nTEST_F(CxxExceptionInTearDownTest, ThrowsExceptionInTearDown) {}\n\nclass CxxExceptionInTestBodyTest : public Test {\n public:\n  static void TearDownTestSuite() {\n    printf(\"%s\",\n           \"CxxExceptionInTestBodyTest::TearDownTestSuite() \"\n           \"called as expected.\\n\");\n  }\n\n protected:\n  ~CxxExceptionInTestBodyTest() override {\n    printf(\"%s\",\n           \"CxxExceptionInTestBodyTest destructor \"\n           \"called as expected.\\n\");\n  }\n\n  void TearDown() override {\n    printf(\"%s\",\n           \"CxxExceptionInTestBodyTest::TearDown() \"\n           \"called as expected.\\n\");\n  }\n};\n\nTEST_F(CxxExceptionInTestBodyTest, ThrowsStdCxxException) {\n  throw std::runtime_error(\"Standard C++ exception\");\n}\n\nTEST(CxxExceptionTest, ThrowsNonStdCxxException) {\n  throw \"C-string\";\n}\n\n// This terminate handler aborts the program using exit() rather than abort().\n// This avoids showing pop-ups on Windows systems and core dumps on Unix-like\n// ones.\nvoid TerminateHandler() {\n  fprintf(stderr, \"%s\\n\", \"Unhandled C++ exception terminating the program.\");\n  fflush(nullptr);\n  exit(3);\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\nint main(int argc, char** argv) {\n#if GTEST_HAS_EXCEPTIONS\n  std::set_terminate(&TerminateHandler);\n#endif\n  testing::InitGoogleTest(&argc, argv);\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-color-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2008, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Verifies that Google Test correctly determines whether to use colors.\"\"\"\n\nimport os\nimport gtest_test_utils\n\nIS_WINDOWS = os.name == 'nt'\n\nCOLOR_ENV_VAR = 'GTEST_COLOR'\nCOLOR_FLAG = 'gtest_color'\nCOMMAND = gtest_test_utils.GetTestExecutablePath('googletest-color-test_')\n\n\ndef SetEnvVar(env_var, value):\n  \"\"\"Sets the env variable to 'value'; unsets it when 'value' is None.\"\"\"\n\n  if value is not None:\n    os.environ[env_var] = value\n  elif env_var in os.environ:\n    del os.environ[env_var]\n\n\ndef UsesColor(term, color_env_var, color_flag):\n  \"\"\"Runs googletest-color-test_ and returns its exit code.\"\"\"\n\n  SetEnvVar('TERM', term)\n  SetEnvVar(COLOR_ENV_VAR, color_env_var)\n\n  if color_flag is None:\n    args = []\n  else:\n    args = ['--%s=%s' % (COLOR_FLAG, color_flag)]\n  p = gtest_test_utils.Subprocess([COMMAND] + args)\n  return not p.exited or p.exit_code\n\n\nclass GTestColorTest(gtest_test_utils.TestCase):\n  def testNoEnvVarNoFlag(self):\n    \"\"\"Tests the case when there's neither GTEST_COLOR nor --gtest_color.\"\"\"\n\n    if not IS_WINDOWS:\n      self.assert_(not UsesColor('dumb', None, None))\n      self.assert_(not UsesColor('emacs', None, None))\n      self.assert_(not UsesColor('xterm-mono', None, None))\n      self.assert_(not UsesColor('unknown', None, None))\n      self.assert_(not UsesColor(None, None, None))\n    self.assert_(UsesColor('linux', None, None))\n    self.assert_(UsesColor('cygwin', None, None))\n    self.assert_(UsesColor('xterm', None, None))\n    self.assert_(UsesColor('xterm-color', None, None))\n    self.assert_(UsesColor('xterm-256color', None, None))\n\n  def testFlagOnly(self):\n    \"\"\"Tests the case when there's --gtest_color but not GTEST_COLOR.\"\"\"\n\n    self.assert_(not UsesColor('dumb', None, 'no'))\n    self.assert_(not UsesColor('xterm-color', None, 'no'))\n    if not IS_WINDOWS:\n      self.assert_(not UsesColor('emacs', None, 'auto'))\n    self.assert_(UsesColor('xterm', None, 'auto'))\n    self.assert_(UsesColor('dumb', None, 'yes'))\n    self.assert_(UsesColor('xterm', None, 'yes'))\n\n  def testEnvVarOnly(self):\n    \"\"\"Tests the case when there's GTEST_COLOR but not --gtest_color.\"\"\"\n\n    self.assert_(not UsesColor('dumb', 'no', None))\n    self.assert_(not UsesColor('xterm-color', 'no', None))\n    if not IS_WINDOWS:\n      self.assert_(not UsesColor('dumb', 'auto', None))\n    self.assert_(UsesColor('xterm-color', 'auto', None))\n    self.assert_(UsesColor('dumb', 'yes', None))\n    self.assert_(UsesColor('xterm-color', 'yes', None))\n\n  def testEnvVarAndFlag(self):\n    \"\"\"Tests the case when there are both GTEST_COLOR and --gtest_color.\"\"\"\n\n    self.assert_(not UsesColor('xterm-color', 'no', 'no'))\n    self.assert_(UsesColor('dumb', 'no', 'yes'))\n    self.assert_(UsesColor('xterm-color', 'no', 'auto'))\n\n  def testAliasesOfYesAndNo(self):\n    \"\"\"Tests using aliases in specifying --gtest_color.\"\"\"\n\n    self.assert_(UsesColor('dumb', None, 'true'))\n    self.assert_(UsesColor('dumb', None, 'YES'))\n    self.assert_(UsesColor('dumb', None, 'T'))\n    self.assert_(UsesColor('dumb', None, '1'))\n\n    self.assert_(not UsesColor('xterm', None, 'f'))\n    self.assert_(not UsesColor('xterm', None, 'false'))\n    self.assert_(not UsesColor('xterm', None, '0'))\n    self.assert_(not UsesColor('xterm', None, 'unknown'))\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-color-test_.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// A helper program for testing how Google Test determines whether to use\n// colors in the output.  It prints \"YES\" and returns 1 if Google Test\n// decides to use colors, and prints \"NO\" and returns 0 otherwise.\n\n#include <stdio.h>\n\n#include \"gtest/gtest.h\"\n#include \"src/gtest-internal-inl.h\"\n\nusing testing::internal::ShouldUseColor;\n\n// The purpose of this is to ensure that the UnitTest singleton is\n// created before main() is entered, and thus that ShouldUseColor()\n// works the same way as in a real Google-Test-based test.  We don't actual\n// run the TEST itself.\nTEST(GTestColorTest, Dummy) {\n}\n\nint main(int argc, char** argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  if (ShouldUseColor(true)) {\n    // Google Test decides to use colors in the output (assuming it\n    // goes to a TTY).\n    printf(\"YES\\n\");\n    return 1;\n  } else {\n    // Google Test decides not to use colors in the output.\n    printf(\"NO\\n\");\n    return 0;\n  }\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-death-test-test.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for death tests.\n\n#include \"gtest/gtest-death-test.h\"\n\n#include \"gtest/gtest.h\"\n#include \"gtest/internal/gtest-filepath.h\"\n\nusing testing::internal::AlwaysFalse;\nusing testing::internal::AlwaysTrue;\n\n#if GTEST_HAS_DEATH_TEST\n\n# if GTEST_OS_WINDOWS\n#  include <fcntl.h>           // For O_BINARY\n#  include <direct.h>          // For chdir().\n#  include <io.h>\n# else\n#  include <unistd.h>\n#  include <sys/wait.h>        // For waitpid.\n# endif  // GTEST_OS_WINDOWS\n\n# include <limits.h>\n# include <signal.h>\n# include <stdio.h>\n\n# if GTEST_OS_LINUX\n#  include <sys/time.h>\n# endif  // GTEST_OS_LINUX\n\n# include \"gtest/gtest-spi.h\"\n# include \"src/gtest-internal-inl.h\"\n\nnamespace posix = ::testing::internal::posix;\n\nusing testing::ContainsRegex;\nusing testing::Matcher;\nusing testing::Message;\nusing testing::internal::DeathTest;\nusing testing::internal::DeathTestFactory;\nusing testing::internal::FilePath;\nusing testing::internal::GetLastErrnoDescription;\nusing testing::internal::GetUnitTestImpl;\nusing testing::internal::InDeathTestChild;\nusing testing::internal::ParseNaturalNumber;\n\nnamespace testing {\nnamespace internal {\n\n// A helper class whose objects replace the death test factory for a\n// single UnitTest object during their lifetimes.\nclass ReplaceDeathTestFactory {\n public:\n  explicit ReplaceDeathTestFactory(DeathTestFactory* new_factory)\n      : unit_test_impl_(GetUnitTestImpl()) {\n    old_factory_ = unit_test_impl_->death_test_factory_.release();\n    unit_test_impl_->death_test_factory_.reset(new_factory);\n  }\n\n  ~ReplaceDeathTestFactory() {\n    unit_test_impl_->death_test_factory_.release();\n    unit_test_impl_->death_test_factory_.reset(old_factory_);\n  }\n private:\n  // Prevents copying ReplaceDeathTestFactory objects.\n  ReplaceDeathTestFactory(const ReplaceDeathTestFactory&);\n  void operator=(const ReplaceDeathTestFactory&);\n\n  UnitTestImpl* unit_test_impl_;\n  DeathTestFactory* old_factory_;\n};\n\n}  // namespace internal\n}  // namespace testing\n\nnamespace {\n\nvoid DieWithMessage(const ::std::string& message) {\n  fprintf(stderr, \"%s\", message.c_str());\n  fflush(stderr);  // Make sure the text is printed before the process exits.\n\n  // We call _exit() instead of exit(), as the former is a direct\n  // system call and thus safer in the presence of threads.  exit()\n  // will invoke user-defined exit-hooks, which may do dangerous\n  // things that conflict with death tests.\n  //\n  // Some compilers can recognize that _exit() never returns and issue the\n  // 'unreachable code' warning for code following this function, unless\n  // fooled by a fake condition.\n  if (AlwaysTrue())\n    _exit(1);\n}\n\nvoid DieInside(const ::std::string& function) {\n  DieWithMessage(\"death inside \" + function + \"().\");\n}\n\n// Tests that death tests work.\n\nclass TestForDeathTest : public testing::Test {\n protected:\n  TestForDeathTest() : original_dir_(FilePath::GetCurrentDir()) {}\n\n  ~TestForDeathTest() override { posix::ChDir(original_dir_.c_str()); }\n\n  // A static member function that's expected to die.\n  static void StaticMemberFunction() { DieInside(\"StaticMemberFunction\"); }\n\n  // A method of the test fixture that may die.\n  void MemberFunction() {\n    if (should_die_)\n      DieInside(\"MemberFunction\");\n  }\n\n  // True if and only if MemberFunction() should die.\n  bool should_die_;\n  const FilePath original_dir_;\n};\n\n// A class with a member function that may die.\nclass MayDie {\n public:\n  explicit MayDie(bool should_die) : should_die_(should_die) {}\n\n  // A member function that may die.\n  void MemberFunction() const {\n    if (should_die_)\n      DieInside(\"MayDie::MemberFunction\");\n  }\n\n private:\n  // True if and only if MemberFunction() should die.\n  bool should_die_;\n};\n\n// A global function that's expected to die.\nvoid GlobalFunction() { DieInside(\"GlobalFunction\"); }\n\n// A non-void function that's expected to die.\nint NonVoidFunction() {\n  DieInside(\"NonVoidFunction\");\n  return 1;\n}\n\n// A unary function that may die.\nvoid DieIf(bool should_die) {\n  if (should_die)\n    DieInside(\"DieIf\");\n}\n\n// A binary function that may die.\nbool DieIfLessThan(int x, int y) {\n  if (x < y) {\n    DieInside(\"DieIfLessThan\");\n  }\n  return true;\n}\n\n// Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture.\nvoid DeathTestSubroutine() {\n  EXPECT_DEATH(GlobalFunction(), \"death.*GlobalFunction\");\n  ASSERT_DEATH(GlobalFunction(), \"death.*GlobalFunction\");\n}\n\n// Death in dbg, not opt.\nint DieInDebugElse12(int* sideeffect) {\n  if (sideeffect) *sideeffect = 12;\n\n# ifndef NDEBUG\n\n  DieInside(\"DieInDebugElse12\");\n\n# endif  // NDEBUG\n\n  return 12;\n}\n\n# if GTEST_OS_WINDOWS\n\n// Death in dbg due to Windows CRT assertion failure, not opt.\nint DieInCRTDebugElse12(int* sideeffect) {\n  if (sideeffect) *sideeffect = 12;\n\n  // Create an invalid fd by closing a valid one\n  int fdpipe[2];\n  EXPECT_EQ(_pipe(fdpipe, 256, O_BINARY), 0);\n  EXPECT_EQ(_close(fdpipe[0]), 0);\n  EXPECT_EQ(_close(fdpipe[1]), 0);\n\n  // _dup() should crash in debug mode\n  EXPECT_EQ(_dup(fdpipe[0]), -1);\n\n  return 12;\n}\n\n#endif  // GTEST_OS_WINDOWS\n\n# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA\n\n// Tests the ExitedWithCode predicate.\nTEST(ExitStatusPredicateTest, ExitedWithCode) {\n  // On Windows, the process's exit code is the same as its exit status,\n  // so the predicate just compares the its input with its parameter.\n  EXPECT_TRUE(testing::ExitedWithCode(0)(0));\n  EXPECT_TRUE(testing::ExitedWithCode(1)(1));\n  EXPECT_TRUE(testing::ExitedWithCode(42)(42));\n  EXPECT_FALSE(testing::ExitedWithCode(0)(1));\n  EXPECT_FALSE(testing::ExitedWithCode(1)(0));\n}\n\n# else\n\n// Returns the exit status of a process that calls _exit(2) with a\n// given exit code.  This is a helper function for the\n// ExitStatusPredicateTest test suite.\nstatic int NormalExitStatus(int exit_code) {\n  pid_t child_pid = fork();\n  if (child_pid == 0) {\n    _exit(exit_code);\n  }\n  int status;\n  waitpid(child_pid, &status, 0);\n  return status;\n}\n\n// Returns the exit status of a process that raises a given signal.\n// If the signal does not cause the process to die, then it returns\n// instead the exit status of a process that exits normally with exit\n// code 1.  This is a helper function for the ExitStatusPredicateTest\n// test suite.\nstatic int KilledExitStatus(int signum) {\n  pid_t child_pid = fork();\n  if (child_pid == 0) {\n    raise(signum);\n    _exit(1);\n  }\n  int status;\n  waitpid(child_pid, &status, 0);\n  return status;\n}\n\n// Tests the ExitedWithCode predicate.\nTEST(ExitStatusPredicateTest, ExitedWithCode) {\n  const int status0  = NormalExitStatus(0);\n  const int status1  = NormalExitStatus(1);\n  const int status42 = NormalExitStatus(42);\n  const testing::ExitedWithCode pred0(0);\n  const testing::ExitedWithCode pred1(1);\n  const testing::ExitedWithCode pred42(42);\n  EXPECT_PRED1(pred0,  status0);\n  EXPECT_PRED1(pred1,  status1);\n  EXPECT_PRED1(pred42, status42);\n  EXPECT_FALSE(pred0(status1));\n  EXPECT_FALSE(pred42(status0));\n  EXPECT_FALSE(pred1(status42));\n}\n\n// Tests the KilledBySignal predicate.\nTEST(ExitStatusPredicateTest, KilledBySignal) {\n  const int status_segv = KilledExitStatus(SIGSEGV);\n  const int status_kill = KilledExitStatus(SIGKILL);\n  const testing::KilledBySignal pred_segv(SIGSEGV);\n  const testing::KilledBySignal pred_kill(SIGKILL);\n  EXPECT_PRED1(pred_segv, status_segv);\n  EXPECT_PRED1(pred_kill, status_kill);\n  EXPECT_FALSE(pred_segv(status_kill));\n  EXPECT_FALSE(pred_kill(status_segv));\n}\n\n# endif  // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA\n\n// Tests that the death test macros expand to code which may or may not\n// be followed by operator<<, and that in either case the complete text\n// comprises only a single C++ statement.\nTEST_F(TestForDeathTest, SingleStatement) {\n  if (AlwaysFalse())\n    // This would fail if executed; this is a compilation test only\n    ASSERT_DEATH(return, \"\");\n\n  if (AlwaysTrue())\n    EXPECT_DEATH(_exit(1), \"\");\n  else\n    // This empty \"else\" branch is meant to ensure that EXPECT_DEATH\n    // doesn't expand into an \"if\" statement without an \"else\"\n    ;\n\n  if (AlwaysFalse())\n    ASSERT_DEATH(return, \"\") << \"did not die\";\n\n  if (AlwaysFalse())\n    ;\n  else\n    EXPECT_DEATH(_exit(1), \"\") << 1 << 2 << 3;\n}\n\n# if GTEST_USES_PCRE\n\nvoid DieWithEmbeddedNul() {\n  fprintf(stderr, \"Hello%cmy null world.\\n\", '\\0');\n  fflush(stderr);\n  _exit(1);\n}\n\n// Tests that EXPECT_DEATH and ASSERT_DEATH work when the error\n// message has a NUL character in it.\nTEST_F(TestForDeathTest, EmbeddedNulInMessage) {\n  EXPECT_DEATH(DieWithEmbeddedNul(), \"my null world\");\n  ASSERT_DEATH(DieWithEmbeddedNul(), \"my null world\");\n}\n\n# endif  // GTEST_USES_PCRE\n\n// Tests that death test macros expand to code which interacts well with switch\n// statements.\nTEST_F(TestForDeathTest, SwitchStatement) {\n  // Microsoft compiler usually complains about switch statements without\n  // case labels. We suppress that warning for this test.\n  GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065)\n\n  switch (0)\n    default:\n      ASSERT_DEATH(_exit(1), \"\") << \"exit in default switch handler\";\n\n  switch (0)\n    case 0:\n      EXPECT_DEATH(_exit(1), \"\") << \"exit in switch case\";\n\n  GTEST_DISABLE_MSC_WARNINGS_POP_()\n}\n\n// Tests that a static member function can be used in a \"fast\" style\n// death test.\nTEST_F(TestForDeathTest, StaticMemberFunctionFastStyle) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  ASSERT_DEATH(StaticMemberFunction(), \"death.*StaticMember\");\n}\n\n// Tests that a method of the test fixture can be used in a \"fast\"\n// style death test.\nTEST_F(TestForDeathTest, MemberFunctionFastStyle) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  should_die_ = true;\n  EXPECT_DEATH(MemberFunction(), \"inside.*MemberFunction\");\n}\n\nvoid ChangeToRootDir() { posix::ChDir(GTEST_PATH_SEP_); }\n\n// Tests that death tests work even if the current directory has been\n// changed.\nTEST_F(TestForDeathTest, FastDeathTestInChangedDir) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n\n  ChangeToRootDir();\n  EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), \"\");\n\n  ChangeToRootDir();\n  ASSERT_DEATH(_exit(1), \"\");\n}\n\n# if GTEST_OS_LINUX\nvoid SigprofAction(int, siginfo_t*, void*) { /* no op */ }\n\n// Sets SIGPROF action and ITIMER_PROF timer (interval: 1ms).\nvoid SetSigprofActionAndTimer() {\n  struct itimerval timer;\n  timer.it_interval.tv_sec = 0;\n  timer.it_interval.tv_usec = 1;\n  timer.it_value = timer.it_interval;\n  ASSERT_EQ(0, setitimer(ITIMER_PROF, &timer, nullptr));\n  struct sigaction signal_action;\n  memset(&signal_action, 0, sizeof(signal_action));\n  sigemptyset(&signal_action.sa_mask);\n  signal_action.sa_sigaction = SigprofAction;\n  signal_action.sa_flags = SA_RESTART | SA_SIGINFO;\n  ASSERT_EQ(0, sigaction(SIGPROF, &signal_action, nullptr));\n}\n\n// Disables ITIMER_PROF timer and ignores SIGPROF signal.\nvoid DisableSigprofActionAndTimer(struct sigaction* old_signal_action) {\n  struct itimerval timer;\n  timer.it_interval.tv_sec = 0;\n  timer.it_interval.tv_usec = 0;\n  timer.it_value = timer.it_interval;\n  ASSERT_EQ(0, setitimer(ITIMER_PROF, &timer, nullptr));\n  struct sigaction signal_action;\n  memset(&signal_action, 0, sizeof(signal_action));\n  sigemptyset(&signal_action.sa_mask);\n  signal_action.sa_handler = SIG_IGN;\n  ASSERT_EQ(0, sigaction(SIGPROF, &signal_action, old_signal_action));\n}\n\n// Tests that death tests work when SIGPROF handler and timer are set.\nTEST_F(TestForDeathTest, FastSigprofActionSet) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  SetSigprofActionAndTimer();\n  EXPECT_DEATH(_exit(1), \"\");\n  struct sigaction old_signal_action;\n  DisableSigprofActionAndTimer(&old_signal_action);\n  EXPECT_TRUE(old_signal_action.sa_sigaction == SigprofAction);\n}\n\nTEST_F(TestForDeathTest, ThreadSafeSigprofActionSet) {\n  testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n  SetSigprofActionAndTimer();\n  EXPECT_DEATH(_exit(1), \"\");\n  struct sigaction old_signal_action;\n  DisableSigprofActionAndTimer(&old_signal_action);\n  EXPECT_TRUE(old_signal_action.sa_sigaction == SigprofAction);\n}\n# endif  // GTEST_OS_LINUX\n\n// Repeats a representative sample of death tests in the \"threadsafe\" style:\n\nTEST_F(TestForDeathTest, StaticMemberFunctionThreadsafeStyle) {\n  testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n  ASSERT_DEATH(StaticMemberFunction(), \"death.*StaticMember\");\n}\n\nTEST_F(TestForDeathTest, MemberFunctionThreadsafeStyle) {\n  testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n  should_die_ = true;\n  EXPECT_DEATH(MemberFunction(), \"inside.*MemberFunction\");\n}\n\nTEST_F(TestForDeathTest, ThreadsafeDeathTestInLoop) {\n  testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n\n  for (int i = 0; i < 3; ++i)\n    EXPECT_EXIT(_exit(i), testing::ExitedWithCode(i), \"\") << \": i = \" << i;\n}\n\nTEST_F(TestForDeathTest, ThreadsafeDeathTestInChangedDir) {\n  testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n\n  ChangeToRootDir();\n  EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), \"\");\n\n  ChangeToRootDir();\n  ASSERT_DEATH(_exit(1), \"\");\n}\n\nTEST_F(TestForDeathTest, MixedStyles) {\n  testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n  EXPECT_DEATH(_exit(1), \"\");\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_DEATH(_exit(1), \"\");\n}\n\n# if GTEST_HAS_CLONE && GTEST_HAS_PTHREAD\n\nbool pthread_flag;\n\nvoid SetPthreadFlag() {\n  pthread_flag = true;\n}\n\nTEST_F(TestForDeathTest, DoesNotExecuteAtforkHooks) {\n  if (!testing::GTEST_FLAG(death_test_use_fork)) {\n    testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n    pthread_flag = false;\n    ASSERT_EQ(0, pthread_atfork(&SetPthreadFlag, nullptr, nullptr));\n    ASSERT_DEATH(_exit(1), \"\");\n    ASSERT_FALSE(pthread_flag);\n  }\n}\n\n# endif  // GTEST_HAS_CLONE && GTEST_HAS_PTHREAD\n\n// Tests that a method of another class can be used in a death test.\nTEST_F(TestForDeathTest, MethodOfAnotherClass) {\n  const MayDie x(true);\n  ASSERT_DEATH(x.MemberFunction(), \"MayDie\\\\:\\\\:MemberFunction\");\n}\n\n// Tests that a global function can be used in a death test.\nTEST_F(TestForDeathTest, GlobalFunction) {\n  EXPECT_DEATH(GlobalFunction(), \"GlobalFunction\");\n}\n\n// Tests that any value convertible to an RE works as a second\n// argument to EXPECT_DEATH.\nTEST_F(TestForDeathTest, AcceptsAnythingConvertibleToRE) {\n  static const char regex_c_str[] = \"GlobalFunction\";\n  EXPECT_DEATH(GlobalFunction(), regex_c_str);\n\n  const testing::internal::RE regex(regex_c_str);\n  EXPECT_DEATH(GlobalFunction(), regex);\n\n# if !GTEST_USES_PCRE\n\n  const ::std::string regex_std_str(regex_c_str);\n  EXPECT_DEATH(GlobalFunction(), regex_std_str);\n\n  // This one is tricky; a temporary pointer into another temporary.  Reference\n  // lifetime extension of the pointer is not sufficient.\n  EXPECT_DEATH(GlobalFunction(), ::std::string(regex_c_str).c_str());\n\n# endif  // !GTEST_USES_PCRE\n}\n\n// Tests that a non-void function can be used in a death test.\nTEST_F(TestForDeathTest, NonVoidFunction) {\n  ASSERT_DEATH(NonVoidFunction(), \"NonVoidFunction\");\n}\n\n// Tests that functions that take parameter(s) can be used in a death test.\nTEST_F(TestForDeathTest, FunctionWithParameter) {\n  EXPECT_DEATH(DieIf(true), \"DieIf\\\\(\\\\)\");\n  EXPECT_DEATH(DieIfLessThan(2, 3), \"DieIfLessThan\");\n}\n\n// Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture.\nTEST_F(TestForDeathTest, OutsideFixture) {\n  DeathTestSubroutine();\n}\n\n// Tests that death tests can be done inside a loop.\nTEST_F(TestForDeathTest, InsideLoop) {\n  for (int i = 0; i < 5; i++) {\n    EXPECT_DEATH(DieIfLessThan(-1, i), \"DieIfLessThan\") << \"where i == \" << i;\n  }\n}\n\n// Tests that a compound statement can be used in a death test.\nTEST_F(TestForDeathTest, CompoundStatement) {\n  EXPECT_DEATH({  // NOLINT\n    const int x = 2;\n    const int y = x + 1;\n    DieIfLessThan(x, y);\n  },\n  \"DieIfLessThan\");\n}\n\n// Tests that code that doesn't die causes a death test to fail.\nTEST_F(TestForDeathTest, DoesNotDie) {\n  EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(DieIf(false), \"DieIf\"),\n                          \"failed to die\");\n}\n\n// Tests that a death test fails when the error message isn't expected.\nTEST_F(TestForDeathTest, ErrorMessageMismatch) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_DEATH(DieIf(true), \"DieIfLessThan\") << \"End of death test message.\";\n  }, \"died but not with expected error\");\n}\n\n// On exit, *aborted will be true if and only if the EXPECT_DEATH()\n// statement aborted the function.\nvoid ExpectDeathTestHelper(bool* aborted) {\n  *aborted = true;\n  EXPECT_DEATH(DieIf(false), \"DieIf\");  // This assertion should fail.\n  *aborted = false;\n}\n\n// Tests that EXPECT_DEATH doesn't abort the test on failure.\nTEST_F(TestForDeathTest, EXPECT_DEATH) {\n  bool aborted = true;\n  EXPECT_NONFATAL_FAILURE(ExpectDeathTestHelper(&aborted),\n                          \"failed to die\");\n  EXPECT_FALSE(aborted);\n}\n\n// Tests that ASSERT_DEATH does abort the test on failure.\nTEST_F(TestForDeathTest, ASSERT_DEATH) {\n  static bool aborted;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    aborted = true;\n    ASSERT_DEATH(DieIf(false), \"DieIf\");  // This assertion should fail.\n    aborted = false;\n  }, \"failed to die\");\n  EXPECT_TRUE(aborted);\n}\n\n// Tests that EXPECT_DEATH evaluates the arguments exactly once.\nTEST_F(TestForDeathTest, SingleEvaluation) {\n  int x = 3;\n  EXPECT_DEATH(DieIf((++x) == 4), \"DieIf\");\n\n  const char* regex = \"DieIf\";\n  const char* regex_save = regex;\n  EXPECT_DEATH(DieIfLessThan(3, 4), regex++);\n  EXPECT_EQ(regex_save + 1, regex);\n}\n\n// Tests that run-away death tests are reported as failures.\nTEST_F(TestForDeathTest, RunawayIsFailure) {\n  EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(static_cast<void>(0), \"Foo\"),\n                          \"failed to die.\");\n}\n\n// Tests that death tests report executing 'return' in the statement as\n// failure.\nTEST_F(TestForDeathTest, ReturnIsFailure) {\n  EXPECT_FATAL_FAILURE(ASSERT_DEATH(return, \"Bar\"),\n                       \"illegal return in test statement.\");\n}\n\n// Tests that EXPECT_DEBUG_DEATH works as expected, that is, you can stream a\n// message to it, and in debug mode it:\n// 1. Asserts on death.\n// 2. Has no side effect.\n//\n// And in opt mode, it:\n// 1.  Has side effects but does not assert.\nTEST_F(TestForDeathTest, TestExpectDebugDeath) {\n  int sideeffect = 0;\n\n  // Put the regex in a local variable to make sure we don't get an \"unused\"\n  // warning in opt mode.\n  const char* regex = \"death.*DieInDebugElse12\";\n\n  EXPECT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), regex)\n      << \"Must accept a streamed message\";\n\n# ifdef NDEBUG\n\n  // Checks that the assignment occurs in opt mode (sideeffect).\n  EXPECT_EQ(12, sideeffect);\n\n# else\n\n  // Checks that the assignment does not occur in dbg mode (no sideeffect).\n  EXPECT_EQ(0, sideeffect);\n\n# endif\n}\n\n# if GTEST_OS_WINDOWS\n\n// Tests that EXPECT_DEBUG_DEATH works as expected when in debug mode\n// the Windows CRT crashes the process with an assertion failure.\n// 1. Asserts on death.\n// 2. Has no side effect (doesn't pop up a window or wait for user input).\n//\n// And in opt mode, it:\n// 1.  Has side effects but does not assert.\nTEST_F(TestForDeathTest, CRTDebugDeath) {\n  int sideeffect = 0;\n\n  // Put the regex in a local variable to make sure we don't get an \"unused\"\n  // warning in opt mode.\n  const char* regex = \"dup.* : Assertion failed\";\n\n  EXPECT_DEBUG_DEATH(DieInCRTDebugElse12(&sideeffect), regex)\n      << \"Must accept a streamed message\";\n\n# ifdef NDEBUG\n\n  // Checks that the assignment occurs in opt mode (sideeffect).\n  EXPECT_EQ(12, sideeffect);\n\n# else\n\n  // Checks that the assignment does not occur in dbg mode (no sideeffect).\n  EXPECT_EQ(0, sideeffect);\n\n# endif\n}\n\n# endif  // GTEST_OS_WINDOWS\n\n// Tests that ASSERT_DEBUG_DEATH works as expected, that is, you can stream a\n// message to it, and in debug mode it:\n// 1. Asserts on death.\n// 2. Has no side effect.\n//\n// And in opt mode, it:\n// 1.  Has side effects but does not assert.\nTEST_F(TestForDeathTest, TestAssertDebugDeath) {\n  int sideeffect = 0;\n\n  ASSERT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), \"death.*DieInDebugElse12\")\n      << \"Must accept a streamed message\";\n\n# ifdef NDEBUG\n\n  // Checks that the assignment occurs in opt mode (sideeffect).\n  EXPECT_EQ(12, sideeffect);\n\n# else\n\n  // Checks that the assignment does not occur in dbg mode (no sideeffect).\n  EXPECT_EQ(0, sideeffect);\n\n# endif\n}\n\n# ifndef NDEBUG\n\nvoid ExpectDebugDeathHelper(bool* aborted) {\n  *aborted = true;\n  EXPECT_DEBUG_DEATH(return, \"\") << \"This is expected to fail.\";\n  *aborted = false;\n}\n\n#  if GTEST_OS_WINDOWS\nTEST(PopUpDeathTest, DoesNotShowPopUpOnAbort) {\n  printf(\"This test should be considered failing if it shows \"\n         \"any pop-up dialogs.\\n\");\n  fflush(stdout);\n\n  EXPECT_DEATH({\n    testing::GTEST_FLAG(catch_exceptions) = false;\n    abort();\n  }, \"\");\n}\n#  endif  // GTEST_OS_WINDOWS\n\n// Tests that EXPECT_DEBUG_DEATH in debug mode does not abort\n// the function.\nTEST_F(TestForDeathTest, ExpectDebugDeathDoesNotAbort) {\n  bool aborted = true;\n  EXPECT_NONFATAL_FAILURE(ExpectDebugDeathHelper(&aborted), \"\");\n  EXPECT_FALSE(aborted);\n}\n\nvoid AssertDebugDeathHelper(bool* aborted) {\n  *aborted = true;\n  GTEST_LOG_(INFO) << \"Before ASSERT_DEBUG_DEATH\";\n  ASSERT_DEBUG_DEATH(GTEST_LOG_(INFO) << \"In ASSERT_DEBUG_DEATH\"; return, \"\")\n      << \"This is expected to fail.\";\n  GTEST_LOG_(INFO) << \"After ASSERT_DEBUG_DEATH\";\n  *aborted = false;\n}\n\n// Tests that ASSERT_DEBUG_DEATH in debug mode aborts the function on\n// failure.\nTEST_F(TestForDeathTest, AssertDebugDeathAborts) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts2) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts3) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts4) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts5) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts6) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts7) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts8) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts9) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\nTEST_F(TestForDeathTest, AssertDebugDeathAborts10) {\n  static bool aborted;\n  aborted = false;\n  EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), \"\");\n  EXPECT_TRUE(aborted);\n}\n\n# endif  // _NDEBUG\n\n// Tests the *_EXIT family of macros, using a variety of predicates.\nstatic void TestExitMacros() {\n  EXPECT_EXIT(_exit(1),  testing::ExitedWithCode(1),  \"\");\n  ASSERT_EXIT(_exit(42), testing::ExitedWithCode(42), \"\");\n\n# if GTEST_OS_WINDOWS\n\n  // Of all signals effects on the process exit code, only those of SIGABRT\n  // are documented on Windows.\n  // See https://msdn.microsoft.com/en-us/query-bi/m/dwwzkt4c.\n  EXPECT_EXIT(raise(SIGABRT), testing::ExitedWithCode(3), \"\") << \"b_ar\";\n\n# elif !GTEST_OS_FUCHSIA\n\n  // Fuchsia has no unix signals.\n  EXPECT_EXIT(raise(SIGKILL), testing::KilledBySignal(SIGKILL), \"\") << \"foo\";\n  ASSERT_EXIT(raise(SIGUSR2), testing::KilledBySignal(SIGUSR2), \"\") << \"bar\";\n\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_EXIT(_exit(0), testing::KilledBySignal(SIGSEGV), \"\")\n      << \"This failure is expected, too.\";\n  }, \"This failure is expected, too.\");\n\n# endif  // GTEST_OS_WINDOWS\n\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_EXIT(raise(SIGSEGV), testing::ExitedWithCode(0), \"\")\n      << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n}\n\nTEST_F(TestForDeathTest, ExitMacros) {\n  TestExitMacros();\n}\n\nTEST_F(TestForDeathTest, ExitMacrosUsingFork) {\n  testing::GTEST_FLAG(death_test_use_fork) = true;\n  TestExitMacros();\n}\n\nTEST_F(TestForDeathTest, InvalidStyle) {\n  testing::GTEST_FLAG(death_test_style) = \"rococo\";\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_DEATH(_exit(0), \"\") << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n}\n\nTEST_F(TestForDeathTest, DeathTestFailedOutput) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_DEATH(DieWithMessage(\"death\\n\"),\n                   \"expected message\"),\n      \"Actual msg:\\n\"\n      \"[  DEATH   ] death\\n\");\n}\n\nTEST_F(TestForDeathTest, DeathTestUnexpectedReturnOutput) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_DEATH({\n          fprintf(stderr, \"returning\\n\");\n          fflush(stderr);\n          return;\n        }, \"\"),\n      \"    Result: illegal return in test statement.\\n\"\n      \" Error msg:\\n\"\n      \"[  DEATH   ] returning\\n\");\n}\n\nTEST_F(TestForDeathTest, DeathTestBadExitCodeOutput) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_EXIT(DieWithMessage(\"exiting with rc 1\\n\"),\n                  testing::ExitedWithCode(3),\n                  \"expected message\"),\n      \"    Result: died but not with expected exit code:\\n\"\n      \"            Exited with exit status 1\\n\"\n      \"Actual msg:\\n\"\n      \"[  DEATH   ] exiting with rc 1\\n\");\n}\n\nTEST_F(TestForDeathTest, DeathTestMultiLineMatchFail) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_DEATH(DieWithMessage(\"line 1\\nline 2\\nline 3\\n\"),\n                   \"line 1\\nxyz\\nline 3\\n\"),\n      \"Actual msg:\\n\"\n      \"[  DEATH   ] line 1\\n\"\n      \"[  DEATH   ] line 2\\n\"\n      \"[  DEATH   ] line 3\\n\");\n}\n\nTEST_F(TestForDeathTest, DeathTestMultiLineMatchPass) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_DEATH(DieWithMessage(\"line 1\\nline 2\\nline 3\\n\"),\n               \"line 1\\nline 2\\nline 3\\n\");\n}\n\n// A DeathTestFactory that returns MockDeathTests.\nclass MockDeathTestFactory : public DeathTestFactory {\n public:\n  MockDeathTestFactory();\n  bool Create(const char* statement,\n              testing::Matcher<const std::string&> matcher, const char* file,\n              int line, DeathTest** test) override;\n\n  // Sets the parameters for subsequent calls to Create.\n  void SetParameters(bool create, DeathTest::TestRole role,\n                     int status, bool passed);\n\n  // Accessors.\n  int AssumeRoleCalls() const { return assume_role_calls_; }\n  int WaitCalls() const { return wait_calls_; }\n  size_t PassedCalls() const { return passed_args_.size(); }\n  bool PassedArgument(int n) const {\n    return passed_args_[static_cast<size_t>(n)];\n  }\n  size_t AbortCalls() const { return abort_args_.size(); }\n  DeathTest::AbortReason AbortArgument(int n) const {\n    return abort_args_[static_cast<size_t>(n)];\n  }\n  bool TestDeleted() const { return test_deleted_; }\n\n private:\n  friend class MockDeathTest;\n  // If true, Create will return a MockDeathTest; otherwise it returns\n  // NULL.\n  bool create_;\n  // The value a MockDeathTest will return from its AssumeRole method.\n  DeathTest::TestRole role_;\n  // The value a MockDeathTest will return from its Wait method.\n  int status_;\n  // The value a MockDeathTest will return from its Passed method.\n  bool passed_;\n\n  // Number of times AssumeRole was called.\n  int assume_role_calls_;\n  // Number of times Wait was called.\n  int wait_calls_;\n  // The arguments to the calls to Passed since the last call to\n  // SetParameters.\n  std::vector<bool> passed_args_;\n  // The arguments to the calls to Abort since the last call to\n  // SetParameters.\n  std::vector<DeathTest::AbortReason> abort_args_;\n  // True if the last MockDeathTest returned by Create has been\n  // deleted.\n  bool test_deleted_;\n};\n\n\n// A DeathTest implementation useful in testing.  It returns values set\n// at its creation from its various inherited DeathTest methods, and\n// reports calls to those methods to its parent MockDeathTestFactory\n// object.\nclass MockDeathTest : public DeathTest {\n public:\n  MockDeathTest(MockDeathTestFactory *parent,\n                TestRole role, int status, bool passed) :\n      parent_(parent), role_(role), status_(status), passed_(passed) {\n  }\n  ~MockDeathTest() override { parent_->test_deleted_ = true; }\n  TestRole AssumeRole() override {\n    ++parent_->assume_role_calls_;\n    return role_;\n  }\n  int Wait() override {\n    ++parent_->wait_calls_;\n    return status_;\n  }\n  bool Passed(bool exit_status_ok) override {\n    parent_->passed_args_.push_back(exit_status_ok);\n    return passed_;\n  }\n  void Abort(AbortReason reason) override {\n    parent_->abort_args_.push_back(reason);\n  }\n\n private:\n  MockDeathTestFactory* const parent_;\n  const TestRole role_;\n  const int status_;\n  const bool passed_;\n};\n\n\n// MockDeathTestFactory constructor.\nMockDeathTestFactory::MockDeathTestFactory()\n    : create_(true),\n      role_(DeathTest::OVERSEE_TEST),\n      status_(0),\n      passed_(true),\n      assume_role_calls_(0),\n      wait_calls_(0),\n      passed_args_(),\n      abort_args_() {\n}\n\n\n// Sets the parameters for subsequent calls to Create.\nvoid MockDeathTestFactory::SetParameters(bool create,\n                                         DeathTest::TestRole role,\n                                         int status, bool passed) {\n  create_ = create;\n  role_ = role;\n  status_ = status;\n  passed_ = passed;\n\n  assume_role_calls_ = 0;\n  wait_calls_ = 0;\n  passed_args_.clear();\n  abort_args_.clear();\n}\n\n\n// Sets test to NULL (if create_ is false) or to the address of a new\n// MockDeathTest object with parameters taken from the last call\n// to SetParameters (if create_ is true).  Always returns true.\nbool MockDeathTestFactory::Create(\n    const char* /*statement*/, testing::Matcher<const std::string&> /*matcher*/,\n    const char* /*file*/, int /*line*/, DeathTest** test) {\n  test_deleted_ = false;\n  if (create_) {\n    *test = new MockDeathTest(this, role_, status_, passed_);\n  } else {\n    *test = nullptr;\n  }\n  return true;\n}\n\n// A test fixture for testing the logic of the GTEST_DEATH_TEST_ macro.\n// It installs a MockDeathTestFactory that is used for the duration\n// of the test case.\nclass MacroLogicDeathTest : public testing::Test {\n protected:\n  static testing::internal::ReplaceDeathTestFactory* replacer_;\n  static MockDeathTestFactory* factory_;\n\n  static void SetUpTestSuite() {\n    factory_ = new MockDeathTestFactory;\n    replacer_ = new testing::internal::ReplaceDeathTestFactory(factory_);\n  }\n\n  static void TearDownTestSuite() {\n    delete replacer_;\n    replacer_ = nullptr;\n    delete factory_;\n    factory_ = nullptr;\n  }\n\n  // Runs a death test that breaks the rules by returning.  Such a death\n  // test cannot be run directly from a test routine that uses a\n  // MockDeathTest, or the remainder of the routine will not be executed.\n  static void RunReturningDeathTest(bool* flag) {\n    ASSERT_DEATH({  // NOLINT\n      *flag = true;\n      return;\n    }, \"\");\n  }\n};\n\ntesting::internal::ReplaceDeathTestFactory* MacroLogicDeathTest::replacer_ =\n    nullptr;\nMockDeathTestFactory* MacroLogicDeathTest::factory_ = nullptr;\n\n// Test that nothing happens when the factory doesn't return a DeathTest:\nTEST_F(MacroLogicDeathTest, NothingHappens) {\n  bool flag = false;\n  factory_->SetParameters(false, DeathTest::OVERSEE_TEST, 0, true);\n  EXPECT_DEATH(flag = true, \"\");\n  EXPECT_FALSE(flag);\n  EXPECT_EQ(0, factory_->AssumeRoleCalls());\n  EXPECT_EQ(0, factory_->WaitCalls());\n  EXPECT_EQ(0U, factory_->PassedCalls());\n  EXPECT_EQ(0U, factory_->AbortCalls());\n  EXPECT_FALSE(factory_->TestDeleted());\n}\n\n// Test that the parent process doesn't run the death test code,\n// and that the Passed method returns false when the (simulated)\n// child process exits with status 0:\nTEST_F(MacroLogicDeathTest, ChildExitsSuccessfully) {\n  bool flag = false;\n  factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 0, true);\n  EXPECT_DEATH(flag = true, \"\");\n  EXPECT_FALSE(flag);\n  EXPECT_EQ(1, factory_->AssumeRoleCalls());\n  EXPECT_EQ(1, factory_->WaitCalls());\n  ASSERT_EQ(1U, factory_->PassedCalls());\n  EXPECT_FALSE(factory_->PassedArgument(0));\n  EXPECT_EQ(0U, factory_->AbortCalls());\n  EXPECT_TRUE(factory_->TestDeleted());\n}\n\n// Tests that the Passed method was given the argument \"true\" when\n// the (simulated) child process exits with status 1:\nTEST_F(MacroLogicDeathTest, ChildExitsUnsuccessfully) {\n  bool flag = false;\n  factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 1, true);\n  EXPECT_DEATH(flag = true, \"\");\n  EXPECT_FALSE(flag);\n  EXPECT_EQ(1, factory_->AssumeRoleCalls());\n  EXPECT_EQ(1, factory_->WaitCalls());\n  ASSERT_EQ(1U, factory_->PassedCalls());\n  EXPECT_TRUE(factory_->PassedArgument(0));\n  EXPECT_EQ(0U, factory_->AbortCalls());\n  EXPECT_TRUE(factory_->TestDeleted());\n}\n\n// Tests that the (simulated) child process executes the death test\n// code, and is aborted with the correct AbortReason if it\n// executes a return statement.\nTEST_F(MacroLogicDeathTest, ChildPerformsReturn) {\n  bool flag = false;\n  factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true);\n  RunReturningDeathTest(&flag);\n  EXPECT_TRUE(flag);\n  EXPECT_EQ(1, factory_->AssumeRoleCalls());\n  EXPECT_EQ(0, factory_->WaitCalls());\n  EXPECT_EQ(0U, factory_->PassedCalls());\n  EXPECT_EQ(1U, factory_->AbortCalls());\n  EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT,\n            factory_->AbortArgument(0));\n  EXPECT_TRUE(factory_->TestDeleted());\n}\n\n// Tests that the (simulated) child process is aborted with the\n// correct AbortReason if it does not die.\nTEST_F(MacroLogicDeathTest, ChildDoesNotDie) {\n  bool flag = false;\n  factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true);\n  EXPECT_DEATH(flag = true, \"\");\n  EXPECT_TRUE(flag);\n  EXPECT_EQ(1, factory_->AssumeRoleCalls());\n  EXPECT_EQ(0, factory_->WaitCalls());\n  EXPECT_EQ(0U, factory_->PassedCalls());\n  // This time there are two calls to Abort: one since the test didn't\n  // die, and another from the ReturnSentinel when it's destroyed.  The\n  // sentinel normally isn't destroyed if a test doesn't die, since\n  // _exit(2) is called in that case by ForkingDeathTest, but not by\n  // our MockDeathTest.\n  ASSERT_EQ(2U, factory_->AbortCalls());\n  EXPECT_EQ(DeathTest::TEST_DID_NOT_DIE,\n            factory_->AbortArgument(0));\n  EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT,\n            factory_->AbortArgument(1));\n  EXPECT_TRUE(factory_->TestDeleted());\n}\n\n// Tests that a successful death test does not register a successful\n// test part.\nTEST(SuccessRegistrationDeathTest, NoSuccessPart) {\n  EXPECT_DEATH(_exit(1), \"\");\n  EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());\n}\n\nTEST(StreamingAssertionsDeathTest, DeathTest) {\n  EXPECT_DEATH(_exit(1), \"\") << \"unexpected failure\";\n  ASSERT_DEATH(_exit(1), \"\") << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_DEATH(_exit(0), \"\") << \"expected failure\";\n  }, \"expected failure\");\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_DEATH(_exit(0), \"\") << \"expected failure\";\n  }, \"expected failure\");\n}\n\n// Tests that GetLastErrnoDescription returns an empty string when the\n// last error is 0 and non-empty string when it is non-zero.\nTEST(GetLastErrnoDescription, GetLastErrnoDescriptionWorks) {\n  errno = ENOENT;\n  EXPECT_STRNE(\"\", GetLastErrnoDescription().c_str());\n  errno = 0;\n  EXPECT_STREQ(\"\", GetLastErrnoDescription().c_str());\n}\n\n# if GTEST_OS_WINDOWS\nTEST(AutoHandleTest, AutoHandleWorks) {\n  HANDLE handle = ::CreateEvent(NULL, FALSE, FALSE, NULL);\n  ASSERT_NE(INVALID_HANDLE_VALUE, handle);\n\n  // Tests that the AutoHandle is correctly initialized with a handle.\n  testing::internal::AutoHandle auto_handle(handle);\n  EXPECT_EQ(handle, auto_handle.Get());\n\n  // Tests that Reset assigns INVALID_HANDLE_VALUE.\n  // Note that this cannot verify whether the original handle is closed.\n  auto_handle.Reset();\n  EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle.Get());\n\n  // Tests that Reset assigns the new handle.\n  // Note that this cannot verify whether the original handle is closed.\n  handle = ::CreateEvent(NULL, FALSE, FALSE, NULL);\n  ASSERT_NE(INVALID_HANDLE_VALUE, handle);\n  auto_handle.Reset(handle);\n  EXPECT_EQ(handle, auto_handle.Get());\n\n  // Tests that AutoHandle contains INVALID_HANDLE_VALUE by default.\n  testing::internal::AutoHandle auto_handle2;\n  EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle2.Get());\n}\n# endif  // GTEST_OS_WINDOWS\n\n# if GTEST_OS_WINDOWS\ntypedef unsigned __int64 BiggestParsable;\ntypedef signed __int64 BiggestSignedParsable;\n# else\ntypedef unsigned long long BiggestParsable;\ntypedef signed long long BiggestSignedParsable;\n# endif  // GTEST_OS_WINDOWS\n\n// We cannot use std::numeric_limits<T>::max() as it clashes with the\n// max() macro defined by <windows.h>.\nconst BiggestParsable kBiggestParsableMax = ULLONG_MAX;\nconst BiggestSignedParsable kBiggestSignedParsableMax = LLONG_MAX;\n\nTEST(ParseNaturalNumberTest, RejectsInvalidFormat) {\n  BiggestParsable result = 0;\n\n  // Rejects non-numbers.\n  EXPECT_FALSE(ParseNaturalNumber(\"non-number string\", &result));\n\n  // Rejects numbers with whitespace prefix.\n  EXPECT_FALSE(ParseNaturalNumber(\" 123\", &result));\n\n  // Rejects negative numbers.\n  EXPECT_FALSE(ParseNaturalNumber(\"-123\", &result));\n\n  // Rejects numbers starting with a plus sign.\n  EXPECT_FALSE(ParseNaturalNumber(\"+123\", &result));\n  errno = 0;\n}\n\nTEST(ParseNaturalNumberTest, RejectsOverflownNumbers) {\n  BiggestParsable result = 0;\n\n  EXPECT_FALSE(ParseNaturalNumber(\"99999999999999999999999\", &result));\n\n  signed char char_result = 0;\n  EXPECT_FALSE(ParseNaturalNumber(\"200\", &char_result));\n  errno = 0;\n}\n\nTEST(ParseNaturalNumberTest, AcceptsValidNumbers) {\n  BiggestParsable result = 0;\n\n  result = 0;\n  ASSERT_TRUE(ParseNaturalNumber(\"123\", &result));\n  EXPECT_EQ(123U, result);\n\n  // Check 0 as an edge case.\n  result = 1;\n  ASSERT_TRUE(ParseNaturalNumber(\"0\", &result));\n  EXPECT_EQ(0U, result);\n\n  result = 1;\n  ASSERT_TRUE(ParseNaturalNumber(\"00000\", &result));\n  EXPECT_EQ(0U, result);\n}\n\nTEST(ParseNaturalNumberTest, AcceptsTypeLimits) {\n  Message msg;\n  msg << kBiggestParsableMax;\n\n  BiggestParsable result = 0;\n  EXPECT_TRUE(ParseNaturalNumber(msg.GetString(), &result));\n  EXPECT_EQ(kBiggestParsableMax, result);\n\n  Message msg2;\n  msg2 << kBiggestSignedParsableMax;\n\n  BiggestSignedParsable signed_result = 0;\n  EXPECT_TRUE(ParseNaturalNumber(msg2.GetString(), &signed_result));\n  EXPECT_EQ(kBiggestSignedParsableMax, signed_result);\n\n  Message msg3;\n  msg3 << INT_MAX;\n\n  int int_result = 0;\n  EXPECT_TRUE(ParseNaturalNumber(msg3.GetString(), &int_result));\n  EXPECT_EQ(INT_MAX, int_result);\n\n  Message msg4;\n  msg4 << UINT_MAX;\n\n  unsigned int uint_result = 0;\n  EXPECT_TRUE(ParseNaturalNumber(msg4.GetString(), &uint_result));\n  EXPECT_EQ(UINT_MAX, uint_result);\n}\n\nTEST(ParseNaturalNumberTest, WorksForShorterIntegers) {\n  short short_result = 0;\n  ASSERT_TRUE(ParseNaturalNumber(\"123\", &short_result));\n  EXPECT_EQ(123, short_result);\n\n  signed char char_result = 0;\n  ASSERT_TRUE(ParseNaturalNumber(\"123\", &char_result));\n  EXPECT_EQ(123, char_result);\n}\n\n# if GTEST_OS_WINDOWS\nTEST(EnvironmentTest, HandleFitsIntoSizeT) {\n  ASSERT_TRUE(sizeof(HANDLE) <= sizeof(size_t));\n}\n# endif  // GTEST_OS_WINDOWS\n\n// Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED trigger\n// failures when death tests are available on the system.\nTEST(ConditionalDeathMacrosDeathTest, ExpectsDeathWhenDeathTestsAvailable) {\n  EXPECT_DEATH_IF_SUPPORTED(DieInside(\"CondDeathTestExpectMacro\"),\n                            \"death inside CondDeathTestExpectMacro\");\n  ASSERT_DEATH_IF_SUPPORTED(DieInside(\"CondDeathTestAssertMacro\"),\n                            \"death inside CondDeathTestAssertMacro\");\n\n  // Empty statement will not crash, which must trigger a failure.\n  EXPECT_NONFATAL_FAILURE(EXPECT_DEATH_IF_SUPPORTED(;, \"\"), \"\");\n  EXPECT_FATAL_FAILURE(ASSERT_DEATH_IF_SUPPORTED(;, \"\"), \"\");\n}\n\nTEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInFastStyle) {\n  testing::GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_FALSE(InDeathTestChild());\n  EXPECT_DEATH({\n    fprintf(stderr, InDeathTestChild() ? \"Inside\" : \"Outside\");\n    fflush(stderr);\n    _exit(1);\n  }, \"Inside\");\n}\n\nTEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInThreadSafeStyle) {\n  testing::GTEST_FLAG(death_test_style) = \"threadsafe\";\n  EXPECT_FALSE(InDeathTestChild());\n  EXPECT_DEATH({\n    fprintf(stderr, InDeathTestChild() ? \"Inside\" : \"Outside\");\n    fflush(stderr);\n    _exit(1);\n  }, \"Inside\");\n}\n\nvoid DieWithMessage(const char* message) {\n  fputs(message, stderr);\n  fflush(stderr);  // Make sure the text is printed before the process exits.\n  _exit(1);\n}\n\nTEST(MatcherDeathTest, DoesNotBreakBareRegexMatching) {\n  // googletest tests this, of course; here we ensure that including googlemock\n  // has not broken it.\n  EXPECT_DEATH(DieWithMessage(\"O, I die, Horatio.\"), \"I d[aeiou]e\");\n}\n\nTEST(MatcherDeathTest, MonomorphicMatcherMatches) {\n  EXPECT_DEATH(DieWithMessage(\"Behind O, I am slain!\"),\n               Matcher<const std::string&>(ContainsRegex(\"I am slain\")));\n}\n\nTEST(MatcherDeathTest, MonomorphicMatcherDoesNotMatch) {\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_DEATH(\n          DieWithMessage(\"Behind O, I am slain!\"),\n          Matcher<const std::string&>(ContainsRegex(\"Ow, I am slain\"))),\n      \"Expected: contains regular expression \\\"Ow, I am slain\\\"\");\n}\n\nTEST(MatcherDeathTest, PolymorphicMatcherMatches) {\n  EXPECT_DEATH(DieWithMessage(\"The rest is silence.\"),\n               ContainsRegex(\"rest is silence\"));\n}\n\nTEST(MatcherDeathTest, PolymorphicMatcherDoesNotMatch) {\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_DEATH(DieWithMessage(\"The rest is silence.\"),\n                   ContainsRegex(\"rest is science\")),\n      \"Expected: contains regular expression \\\"rest is science\\\"\");\n}\n\n}  // namespace\n\n#else  // !GTEST_HAS_DEATH_TEST follows\n\nnamespace {\n\nusing testing::internal::CaptureStderr;\nusing testing::internal::GetCapturedStderr;\n\n// Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED are still\n// defined but do not trigger failures when death tests are not available on\n// the system.\nTEST(ConditionalDeathMacrosTest, WarnsWhenDeathTestsNotAvailable) {\n  // Empty statement will not crash, but that should not trigger a failure\n  // when death tests are not supported.\n  CaptureStderr();\n  EXPECT_DEATH_IF_SUPPORTED(;, \"\");\n  std::string output = GetCapturedStderr();\n  ASSERT_TRUE(NULL != strstr(output.c_str(),\n                             \"Death tests are not supported on this platform\"));\n  ASSERT_TRUE(NULL != strstr(output.c_str(), \";\"));\n\n  // The streamed message should not be printed as there is no test failure.\n  CaptureStderr();\n  EXPECT_DEATH_IF_SUPPORTED(;, \"\") << \"streamed message\";\n  output = GetCapturedStderr();\n  ASSERT_TRUE(NULL == strstr(output.c_str(), \"streamed message\"));\n\n  CaptureStderr();\n  ASSERT_DEATH_IF_SUPPORTED(;, \"\");  // NOLINT\n  output = GetCapturedStderr();\n  ASSERT_TRUE(NULL != strstr(output.c_str(),\n                             \"Death tests are not supported on this platform\"));\n  ASSERT_TRUE(NULL != strstr(output.c_str(), \";\"));\n\n  CaptureStderr();\n  ASSERT_DEATH_IF_SUPPORTED(;, \"\") << \"streamed message\";  // NOLINT\n  output = GetCapturedStderr();\n  ASSERT_TRUE(NULL == strstr(output.c_str(), \"streamed message\"));\n}\n\nvoid FuncWithAssert(int* n) {\n  ASSERT_DEATH_IF_SUPPORTED(return;, \"\");\n  (*n)++;\n}\n\n// Tests that ASSERT_DEATH_IF_SUPPORTED does not return from the current\n// function (as ASSERT_DEATH does) if death tests are not supported.\nTEST(ConditionalDeathMacrosTest, AssertDeatDoesNotReturnhIfUnsupported) {\n  int n = 0;\n  FuncWithAssert(&n);\n  EXPECT_EQ(1, n);\n}\n\n}  // namespace\n\n#endif  // !GTEST_HAS_DEATH_TEST\n\nnamespace {\n\n// Tests that the death test macros expand to code which may or may not\n// be followed by operator<<, and that in either case the complete text\n// comprises only a single C++ statement.\n//\n// The syntax should work whether death tests are available or not.\nTEST(ConditionalDeathMacrosSyntaxDeathTest, SingleStatement) {\n  if (AlwaysFalse())\n    // This would fail if executed; this is a compilation test only\n    ASSERT_DEATH_IF_SUPPORTED(return, \"\");\n\n  if (AlwaysTrue())\n    EXPECT_DEATH_IF_SUPPORTED(_exit(1), \"\");\n  else\n    // This empty \"else\" branch is meant to ensure that EXPECT_DEATH\n    // doesn't expand into an \"if\" statement without an \"else\"\n    ;  // NOLINT\n\n  if (AlwaysFalse())\n    ASSERT_DEATH_IF_SUPPORTED(return, \"\") << \"did not die\";\n\n  if (AlwaysFalse())\n    ;  // NOLINT\n  else\n    EXPECT_DEATH_IF_SUPPORTED(_exit(1), \"\") << 1 << 2 << 3;\n}\n\n// Tests that conditional death test macros expand to code which interacts\n// well with switch statements.\nTEST(ConditionalDeathMacrosSyntaxDeathTest, SwitchStatement) {\n  // Microsoft compiler usually complains about switch statements without\n  // case labels. We suppress that warning for this test.\n  GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065)\n\n  switch (0)\n    default:\n      ASSERT_DEATH_IF_SUPPORTED(_exit(1), \"\")\n          << \"exit in default switch handler\";\n\n  switch (0)\n    case 0:\n      EXPECT_DEATH_IF_SUPPORTED(_exit(1), \"\") << \"exit in switch case\";\n\n  GTEST_DISABLE_MSC_WARNINGS_POP_()\n}\n\n// Tests that a test case whose name ends with \"DeathTest\" works fine\n// on Windows.\nTEST(NotADeathTest, Test) {\n  SUCCEED();\n}\n\n}  // namespace\n"
  },
  {
    "path": "test/googletest/test/googletest-death-test_ex_test.cc",
    "content": "// Copyright 2010, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests that verify interaction of exceptions and death tests.\n\n#include \"gtest/gtest-death-test.h\"\n#include \"gtest/gtest.h\"\n\n#if GTEST_HAS_DEATH_TEST\n\n# if GTEST_HAS_SEH\n#  include <windows.h>          // For RaiseException().\n# endif\n\n# include \"gtest/gtest-spi.h\"\n\n# if GTEST_HAS_EXCEPTIONS\n\n#  include <exception>  // For std::exception.\n\n// Tests that death tests report thrown exceptions as failures and that the\n// exceptions do not escape death test macros.\nTEST(CxxExceptionDeathTest, ExceptionIsFailure) {\n  try {\n    EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw 1, \"\"), \"threw an exception\");\n  } catch (...) {  // NOLINT\n    FAIL() << \"An exception escaped a death test macro invocation \"\n           << \"with catch_exceptions \"\n           << (testing::GTEST_FLAG(catch_exceptions) ? \"enabled\" : \"disabled\");\n  }\n}\n\nclass TestException : public std::exception {\n public:\n  const char* what() const throw() override { return \"exceptional message\"; }\n};\n\nTEST(CxxExceptionDeathTest, PrintsMessageForStdExceptions) {\n  // Verifies that the exception message is quoted in the failure text.\n  EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), \"\"),\n                          \"exceptional message\");\n  // Verifies that the location is mentioned in the failure text.\n  EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), \"\"),\n                          __FILE__);\n}\n# endif  // GTEST_HAS_EXCEPTIONS\n\n# if GTEST_HAS_SEH\n// Tests that enabling interception of SEH exceptions with the\n// catch_exceptions flag does not interfere with SEH exceptions being\n// treated as death by death tests.\nTEST(SehExceptionDeasTest, CatchExceptionsDoesNotInterfere) {\n  EXPECT_DEATH(RaiseException(42, 0x0, 0, NULL), \"\")\n      << \"with catch_exceptions \"\n      << (testing::GTEST_FLAG(catch_exceptions) ? \"enabled\" : \"disabled\");\n}\n# endif\n\n#endif  // GTEST_HAS_DEATH_TEST\n\nint main(int argc, char** argv) {\n  testing::InitGoogleTest(&argc, argv);\n  testing::GTEST_FLAG(catch_exceptions) = GTEST_ENABLE_CATCH_EXCEPTIONS_ != 0;\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-env-var-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2008, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Verifies that Google Test correctly parses environment variables.\"\"\"\n\nimport os\nimport gtest_test_utils\n\n\nIS_WINDOWS = os.name == 'nt'\nIS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'\n\nCOMMAND = gtest_test_utils.GetTestExecutablePath('googletest-env-var-test_')\n\nenviron = os.environ.copy()\n\n\ndef AssertEq(expected, actual):\n  if expected != actual:\n    print('Expected: %s' % (expected,))\n    print('  Actual: %s' % (actual,))\n    raise AssertionError\n\n\ndef SetEnvVar(env_var, value):\n  \"\"\"Sets the env variable to 'value'; unsets it when 'value' is None.\"\"\"\n\n  if value is not None:\n    environ[env_var] = value\n  elif env_var in environ:\n    del environ[env_var]\n\n\ndef GetFlag(flag):\n  \"\"\"Runs googletest-env-var-test_ and returns its output.\"\"\"\n\n  args = [COMMAND]\n  if flag is not None:\n    args += [flag]\n  return gtest_test_utils.Subprocess(args, env=environ).output\n\n\ndef TestFlag(flag, test_val, default_val):\n  \"\"\"Verifies that the given flag is affected by the corresponding env var.\"\"\"\n\n  env_var = 'GTEST_' + flag.upper()\n  SetEnvVar(env_var, test_val)\n  AssertEq(test_val, GetFlag(flag))\n  SetEnvVar(env_var, None)\n  AssertEq(default_val, GetFlag(flag))\n\n\nclass GTestEnvVarTest(gtest_test_utils.TestCase):\n\n  def testEnvVarAffectsFlag(self):\n    \"\"\"Tests that environment variable should affect the corresponding flag.\"\"\"\n\n    TestFlag('break_on_failure', '1', '0')\n    TestFlag('color', 'yes', 'auto')\n    TestFlag('filter', 'FooTest.Bar', '*')\n    SetEnvVar('XML_OUTPUT_FILE', None)  # For 'output' test\n    TestFlag('output', 'xml:tmp/foo.xml', '')\n    TestFlag('print_time', '0', '1')\n    TestFlag('repeat', '999', '1')\n    TestFlag('throw_on_failure', '1', '0')\n    TestFlag('death_test_style', 'threadsafe', 'fast')\n    TestFlag('catch_exceptions', '0', '1')\n\n    if IS_LINUX:\n      TestFlag('death_test_use_fork', '1', '0')\n      TestFlag('stack_trace_depth', '0', '100')\n\n\n  def testXmlOutputFile(self):\n    \"\"\"Tests that $XML_OUTPUT_FILE affects the output flag.\"\"\"\n\n    SetEnvVar('GTEST_OUTPUT', None)\n    SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')\n    AssertEq('xml:tmp/bar.xml', GetFlag('output'))\n\n  def testXmlOutputFileOverride(self):\n    \"\"\"Tests that $XML_OUTPUT_FILE is overridden by $GTEST_OUTPUT.\"\"\"\n\n    SetEnvVar('GTEST_OUTPUT', 'xml:tmp/foo.xml')\n    SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml')\n    AssertEq('xml:tmp/foo.xml', GetFlag('output'))\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-env-var-test_.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// A helper program for testing that Google Test parses the environment\n// variables correctly.\n\n#include <iostream>\n\n#include \"gtest/gtest.h\"\n#include \"src/gtest-internal-inl.h\"\n\nusing ::std::cout;\n\nnamespace testing {\n\n// The purpose of this is to make the test more realistic by ensuring\n// that the UnitTest singleton is created before main() is entered.\n// We don't actual run the TEST itself.\nTEST(GTestEnvVarTest, Dummy) {\n}\n\nvoid PrintFlag(const char* flag) {\n  if (strcmp(flag, \"break_on_failure\") == 0) {\n    cout << GTEST_FLAG(break_on_failure);\n    return;\n  }\n\n  if (strcmp(flag, \"catch_exceptions\") == 0) {\n    cout << GTEST_FLAG(catch_exceptions);\n    return;\n  }\n\n  if (strcmp(flag, \"color\") == 0) {\n    cout << GTEST_FLAG(color);\n    return;\n  }\n\n  if (strcmp(flag, \"death_test_style\") == 0) {\n    cout << GTEST_FLAG(death_test_style);\n    return;\n  }\n\n  if (strcmp(flag, \"death_test_use_fork\") == 0) {\n    cout << GTEST_FLAG(death_test_use_fork);\n    return;\n  }\n\n  if (strcmp(flag, \"filter\") == 0) {\n    cout << GTEST_FLAG(filter);\n    return;\n  }\n\n  if (strcmp(flag, \"output\") == 0) {\n    cout << GTEST_FLAG(output);\n    return;\n  }\n\n  if (strcmp(flag, \"print_time\") == 0) {\n    cout << GTEST_FLAG(print_time);\n    return;\n  }\n\n  if (strcmp(flag, \"repeat\") == 0) {\n    cout << GTEST_FLAG(repeat);\n    return;\n  }\n\n  if (strcmp(flag, \"stack_trace_depth\") == 0) {\n    cout << GTEST_FLAG(stack_trace_depth);\n    return;\n  }\n\n  if (strcmp(flag, \"throw_on_failure\") == 0) {\n    cout << GTEST_FLAG(throw_on_failure);\n    return;\n  }\n\n  cout << \"Invalid flag name \" << flag\n       << \".  Valid names are break_on_failure, color, filter, etc.\\n\";\n  exit(1);\n}\n\n}  // namespace testing\n\nint main(int argc, char** argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  if (argc != 2) {\n    cout << \"Usage: googletest-env-var-test_ NAME_OF_FLAG\\n\";\n    return 1;\n  }\n\n  testing::PrintFlag(argv[1]);\n  return 0;\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-filepath-test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Google Test filepath utilities\n//\n// This file tests classes and functions used internally by\n// Google Test.  They are subject to change without notice.\n//\n// This file is #included from gtest-internal.h.\n// Do not #include this file anywhere else!\n\n#include \"gtest/internal/gtest-filepath.h\"\n#include \"gtest/gtest.h\"\n#include \"src/gtest-internal-inl.h\"\n\n#if GTEST_OS_WINDOWS_MOBILE\n# include <windows.h>  // NOLINT\n#elif GTEST_OS_WINDOWS\n# include <direct.h>  // NOLINT\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\nnamespace testing {\nnamespace internal {\nnamespace {\n\n#if GTEST_OS_WINDOWS_MOBILE\n\n// Windows CE doesn't have the remove C function.\nint remove(const char* path) {\n  LPCWSTR wpath = String::AnsiToUtf16(path);\n  int ret = DeleteFile(wpath) ? 0 : -1;\n  delete [] wpath;\n  return ret;\n}\n// Windows CE doesn't have the _rmdir C function.\nint _rmdir(const char* path) {\n  FilePath filepath(path);\n  LPCWSTR wpath = String::AnsiToUtf16(\n      filepath.RemoveTrailingPathSeparator().c_str());\n  int ret = RemoveDirectory(wpath) ? 0 : -1;\n  delete [] wpath;\n  return ret;\n}\n\n#else\n\nTEST(GetCurrentDirTest, ReturnsCurrentDir) {\n  const FilePath original_dir = FilePath::GetCurrentDir();\n  EXPECT_FALSE(original_dir.IsEmpty());\n\n  posix::ChDir(GTEST_PATH_SEP_);\n  const FilePath cwd = FilePath::GetCurrentDir();\n  posix::ChDir(original_dir.c_str());\n\n# if GTEST_OS_WINDOWS || GTEST_OS_OS2\n\n  // Skips the \":\".\n  const char* const cwd_without_drive = strchr(cwd.c_str(), ':');\n  ASSERT_TRUE(cwd_without_drive != NULL);\n  EXPECT_STREQ(GTEST_PATH_SEP_, cwd_without_drive + 1);\n\n# else\n\n  EXPECT_EQ(GTEST_PATH_SEP_, cwd.string());\n\n# endif\n}\n\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\nTEST(IsEmptyTest, ReturnsTrueForEmptyPath) {\n  EXPECT_TRUE(FilePath(\"\").IsEmpty());\n}\n\nTEST(IsEmptyTest, ReturnsFalseForNonEmptyPath) {\n  EXPECT_FALSE(FilePath(\"a\").IsEmpty());\n  EXPECT_FALSE(FilePath(\".\").IsEmpty());\n  EXPECT_FALSE(FilePath(\"a/b\").IsEmpty());\n  EXPECT_FALSE(FilePath(\"a\\\\b\\\\\").IsEmpty());\n}\n\n// RemoveDirectoryName \"\" -> \"\"\nTEST(RemoveDirectoryNameTest, WhenEmptyName) {\n  EXPECT_EQ(\"\", FilePath(\"\").RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName \"afile\" -> \"afile\"\nTEST(RemoveDirectoryNameTest, ButNoDirectory) {\n  EXPECT_EQ(\"afile\",\n      FilePath(\"afile\").RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName \"/afile\" -> \"afile\"\nTEST(RemoveDirectoryNameTest, RootFileShouldGiveFileName) {\n  EXPECT_EQ(\"afile\",\n      FilePath(GTEST_PATH_SEP_ \"afile\").RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName \"adir/\" -> \"\"\nTEST(RemoveDirectoryNameTest, WhereThereIsNoFileName) {\n  EXPECT_EQ(\"\",\n      FilePath(\"adir\" GTEST_PATH_SEP_).RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName \"adir/afile\" -> \"afile\"\nTEST(RemoveDirectoryNameTest, ShouldGiveFileName) {\n  EXPECT_EQ(\"afile\",\n      FilePath(\"adir\" GTEST_PATH_SEP_ \"afile\").RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName \"adir/subdir/afile\" -> \"afile\"\nTEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileName) {\n  EXPECT_EQ(\"afile\",\n      FilePath(\"adir\" GTEST_PATH_SEP_ \"subdir\" GTEST_PATH_SEP_ \"afile\")\n      .RemoveDirectoryName().string());\n}\n\n#if GTEST_HAS_ALT_PATH_SEP_\n\n// Tests that RemoveDirectoryName() works with the alternate separator\n// on Windows.\n\n// RemoveDirectoryName(\"/afile\") -> \"afile\"\nTEST(RemoveDirectoryNameTest, RootFileShouldGiveFileNameForAlternateSeparator) {\n  EXPECT_EQ(\"afile\", FilePath(\"/afile\").RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName(\"adir/\") -> \"\"\nTEST(RemoveDirectoryNameTest, WhereThereIsNoFileNameForAlternateSeparator) {\n  EXPECT_EQ(\"\", FilePath(\"adir/\").RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName(\"adir/afile\") -> \"afile\"\nTEST(RemoveDirectoryNameTest, ShouldGiveFileNameForAlternateSeparator) {\n  EXPECT_EQ(\"afile\", FilePath(\"adir/afile\").RemoveDirectoryName().string());\n}\n\n// RemoveDirectoryName(\"adir/subdir/afile\") -> \"afile\"\nTEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileNameForAlternateSeparator) {\n  EXPECT_EQ(\"afile\",\n            FilePath(\"adir/subdir/afile\").RemoveDirectoryName().string());\n}\n\n#endif\n\n// RemoveFileName \"\" -> \"./\"\nTEST(RemoveFileNameTest, EmptyName) {\n#if GTEST_OS_WINDOWS_MOBILE\n  // On Windows CE, we use the root as the current directory.\n  EXPECT_EQ(GTEST_PATH_SEP_, FilePath(\"\").RemoveFileName().string());\n#else\n  EXPECT_EQ(\".\" GTEST_PATH_SEP_, FilePath(\"\").RemoveFileName().string());\n#endif\n}\n\n// RemoveFileName \"adir/\" -> \"adir/\"\nTEST(RemoveFileNameTest, ButNoFile) {\n  EXPECT_EQ(\"adir\" GTEST_PATH_SEP_,\n      FilePath(\"adir\" GTEST_PATH_SEP_).RemoveFileName().string());\n}\n\n// RemoveFileName \"adir/afile\" -> \"adir/\"\nTEST(RemoveFileNameTest, GivesDirName) {\n  EXPECT_EQ(\"adir\" GTEST_PATH_SEP_,\n            FilePath(\"adir\" GTEST_PATH_SEP_ \"afile\").RemoveFileName().string());\n}\n\n// RemoveFileName \"adir/subdir/afile\" -> \"adir/subdir/\"\nTEST(RemoveFileNameTest, GivesDirAndSubDirName) {\n  EXPECT_EQ(\"adir\" GTEST_PATH_SEP_ \"subdir\" GTEST_PATH_SEP_,\n      FilePath(\"adir\" GTEST_PATH_SEP_ \"subdir\" GTEST_PATH_SEP_ \"afile\")\n      .RemoveFileName().string());\n}\n\n// RemoveFileName \"/afile\" -> \"/\"\nTEST(RemoveFileNameTest, GivesRootDir) {\n  EXPECT_EQ(GTEST_PATH_SEP_,\n      FilePath(GTEST_PATH_SEP_ \"afile\").RemoveFileName().string());\n}\n\n#if GTEST_HAS_ALT_PATH_SEP_\n\n// Tests that RemoveFileName() works with the alternate separator on\n// Windows.\n\n// RemoveFileName(\"adir/\") -> \"adir/\"\nTEST(RemoveFileNameTest, ButNoFileForAlternateSeparator) {\n  EXPECT_EQ(\"adir\" GTEST_PATH_SEP_,\n            FilePath(\"adir/\").RemoveFileName().string());\n}\n\n// RemoveFileName(\"adir/afile\") -> \"adir/\"\nTEST(RemoveFileNameTest, GivesDirNameForAlternateSeparator) {\n  EXPECT_EQ(\"adir\" GTEST_PATH_SEP_,\n            FilePath(\"adir/afile\").RemoveFileName().string());\n}\n\n// RemoveFileName(\"adir/subdir/afile\") -> \"adir/subdir/\"\nTEST(RemoveFileNameTest, GivesDirAndSubDirNameForAlternateSeparator) {\n  EXPECT_EQ(\"adir\" GTEST_PATH_SEP_ \"subdir\" GTEST_PATH_SEP_,\n            FilePath(\"adir/subdir/afile\").RemoveFileName().string());\n}\n\n// RemoveFileName(\"/afile\") -> \"\\\"\nTEST(RemoveFileNameTest, GivesRootDirForAlternateSeparator) {\n  EXPECT_EQ(GTEST_PATH_SEP_, FilePath(\"/afile\").RemoveFileName().string());\n}\n\n#endif\n\nTEST(MakeFileNameTest, GenerateWhenNumberIsZero) {\n  FilePath actual = FilePath::MakeFileName(FilePath(\"foo\"), FilePath(\"bar\"),\n      0, \"xml\");\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar.xml\", actual.string());\n}\n\nTEST(MakeFileNameTest, GenerateFileNameNumberGtZero) {\n  FilePath actual = FilePath::MakeFileName(FilePath(\"foo\"), FilePath(\"bar\"),\n      12, \"xml\");\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar_12.xml\", actual.string());\n}\n\nTEST(MakeFileNameTest, GenerateFileNameWithSlashNumberIsZero) {\n  FilePath actual = FilePath::MakeFileName(FilePath(\"foo\" GTEST_PATH_SEP_),\n      FilePath(\"bar\"), 0, \"xml\");\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar.xml\", actual.string());\n}\n\nTEST(MakeFileNameTest, GenerateFileNameWithSlashNumberGtZero) {\n  FilePath actual = FilePath::MakeFileName(FilePath(\"foo\" GTEST_PATH_SEP_),\n      FilePath(\"bar\"), 12, \"xml\");\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar_12.xml\", actual.string());\n}\n\nTEST(MakeFileNameTest, GenerateWhenNumberIsZeroAndDirIsEmpty) {\n  FilePath actual = FilePath::MakeFileName(FilePath(\"\"), FilePath(\"bar\"),\n      0, \"xml\");\n  EXPECT_EQ(\"bar.xml\", actual.string());\n}\n\nTEST(MakeFileNameTest, GenerateWhenNumberIsNotZeroAndDirIsEmpty) {\n  FilePath actual = FilePath::MakeFileName(FilePath(\"\"), FilePath(\"bar\"),\n      14, \"xml\");\n  EXPECT_EQ(\"bar_14.xml\", actual.string());\n}\n\nTEST(ConcatPathsTest, WorksWhenDirDoesNotEndWithPathSep) {\n  FilePath actual = FilePath::ConcatPaths(FilePath(\"foo\"),\n                                          FilePath(\"bar.xml\"));\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar.xml\", actual.string());\n}\n\nTEST(ConcatPathsTest, WorksWhenPath1EndsWithPathSep) {\n  FilePath actual = FilePath::ConcatPaths(FilePath(\"foo\" GTEST_PATH_SEP_),\n                                          FilePath(\"bar.xml\"));\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar.xml\", actual.string());\n}\n\nTEST(ConcatPathsTest, Path1BeingEmpty) {\n  FilePath actual = FilePath::ConcatPaths(FilePath(\"\"),\n                                          FilePath(\"bar.xml\"));\n  EXPECT_EQ(\"bar.xml\", actual.string());\n}\n\nTEST(ConcatPathsTest, Path2BeingEmpty) {\n  FilePath actual = FilePath::ConcatPaths(FilePath(\"foo\"), FilePath(\"\"));\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_, actual.string());\n}\n\nTEST(ConcatPathsTest, BothPathBeingEmpty) {\n  FilePath actual = FilePath::ConcatPaths(FilePath(\"\"),\n                                          FilePath(\"\"));\n  EXPECT_EQ(\"\", actual.string());\n}\n\nTEST(ConcatPathsTest, Path1ContainsPathSep) {\n  FilePath actual = FilePath::ConcatPaths(FilePath(\"foo\" GTEST_PATH_SEP_ \"bar\"),\n                                          FilePath(\"foobar.xml\"));\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\" GTEST_PATH_SEP_ \"foobar.xml\",\n            actual.string());\n}\n\nTEST(ConcatPathsTest, Path2ContainsPathSep) {\n  FilePath actual = FilePath::ConcatPaths(\n      FilePath(\"foo\" GTEST_PATH_SEP_),\n      FilePath(\"bar\" GTEST_PATH_SEP_ \"bar.xml\"));\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\" GTEST_PATH_SEP_ \"bar.xml\",\n            actual.string());\n}\n\nTEST(ConcatPathsTest, Path2EndsWithPathSep) {\n  FilePath actual = FilePath::ConcatPaths(FilePath(\"foo\"),\n                                          FilePath(\"bar\" GTEST_PATH_SEP_));\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\" GTEST_PATH_SEP_, actual.string());\n}\n\n// RemoveTrailingPathSeparator \"\" -> \"\"\nTEST(RemoveTrailingPathSeparatorTest, EmptyString) {\n  EXPECT_EQ(\"\", FilePath(\"\").RemoveTrailingPathSeparator().string());\n}\n\n// RemoveTrailingPathSeparator \"foo\" -> \"foo\"\nTEST(RemoveTrailingPathSeparatorTest, FileNoSlashString) {\n  EXPECT_EQ(\"foo\", FilePath(\"foo\").RemoveTrailingPathSeparator().string());\n}\n\n// RemoveTrailingPathSeparator \"foo/\" -> \"foo\"\nTEST(RemoveTrailingPathSeparatorTest, ShouldRemoveTrailingSeparator) {\n  EXPECT_EQ(\"foo\",\n      FilePath(\"foo\" GTEST_PATH_SEP_).RemoveTrailingPathSeparator().string());\n#if GTEST_HAS_ALT_PATH_SEP_\n  EXPECT_EQ(\"foo\", FilePath(\"foo/\").RemoveTrailingPathSeparator().string());\n#endif\n}\n\n// RemoveTrailingPathSeparator \"foo/bar/\" -> \"foo/bar/\"\nTEST(RemoveTrailingPathSeparatorTest, ShouldRemoveLastSeparator) {\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\",\n            FilePath(\"foo\" GTEST_PATH_SEP_ \"bar\" GTEST_PATH_SEP_)\n                .RemoveTrailingPathSeparator().string());\n}\n\n// RemoveTrailingPathSeparator \"foo/bar\" -> \"foo/bar\"\nTEST(RemoveTrailingPathSeparatorTest, ShouldReturnUnmodified) {\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\",\n            FilePath(\"foo\" GTEST_PATH_SEP_ \"bar\")\n                .RemoveTrailingPathSeparator().string());\n}\n\nTEST(DirectoryTest, RootDirectoryExists) {\n#if GTEST_OS_WINDOWS  // We are on Windows.\n  char current_drive[_MAX_PATH];  // NOLINT\n  current_drive[0] = static_cast<char>(_getdrive() + 'A' - 1);\n  current_drive[1] = ':';\n  current_drive[2] = '\\\\';\n  current_drive[3] = '\\0';\n  EXPECT_TRUE(FilePath(current_drive).DirectoryExists());\n#else\n  EXPECT_TRUE(FilePath(\"/\").DirectoryExists());\n#endif  // GTEST_OS_WINDOWS\n}\n\n#if GTEST_OS_WINDOWS\nTEST(DirectoryTest, RootOfWrongDriveDoesNotExists) {\n  const int saved_drive_ = _getdrive();\n  // Find a drive that doesn't exist. Start with 'Z' to avoid common ones.\n  for (char drive = 'Z'; drive >= 'A'; drive--)\n    if (_chdrive(drive - 'A' + 1) == -1) {\n      char non_drive[_MAX_PATH];  // NOLINT\n      non_drive[0] = drive;\n      non_drive[1] = ':';\n      non_drive[2] = '\\\\';\n      non_drive[3] = '\\0';\n      EXPECT_FALSE(FilePath(non_drive).DirectoryExists());\n      break;\n    }\n  _chdrive(saved_drive_);\n}\n#endif  // GTEST_OS_WINDOWS\n\n#if !GTEST_OS_WINDOWS_MOBILE\n// Windows CE _does_ consider an empty directory to exist.\nTEST(DirectoryTest, EmptyPathDirectoryDoesNotExist) {\n  EXPECT_FALSE(FilePath(\"\").DirectoryExists());\n}\n#endif  // !GTEST_OS_WINDOWS_MOBILE\n\nTEST(DirectoryTest, CurrentDirectoryExists) {\n#if GTEST_OS_WINDOWS  // We are on Windows.\n# ifndef _WIN32_CE  // Windows CE doesn't have a current directory.\n\n  EXPECT_TRUE(FilePath(\".\").DirectoryExists());\n  EXPECT_TRUE(FilePath(\".\\\\\").DirectoryExists());\n\n# endif  // _WIN32_CE\n#else\n  EXPECT_TRUE(FilePath(\".\").DirectoryExists());\n  EXPECT_TRUE(FilePath(\"./\").DirectoryExists());\n#endif  // GTEST_OS_WINDOWS\n}\n\n// \"foo/bar\" == foo//bar\" == \"foo///bar\"\nTEST(NormalizeTest, MultipleConsecutiveSepaparatorsInMidstring) {\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\",\n            FilePath(\"foo\" GTEST_PATH_SEP_ \"bar\").string());\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\",\n            FilePath(\"foo\" GTEST_PATH_SEP_ GTEST_PATH_SEP_ \"bar\").string());\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_ \"bar\",\n            FilePath(\"foo\" GTEST_PATH_SEP_ GTEST_PATH_SEP_\n                     GTEST_PATH_SEP_ \"bar\").string());\n}\n\n// \"/bar\" == //bar\" == \"///bar\"\nTEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringStart) {\n  EXPECT_EQ(GTEST_PATH_SEP_ \"bar\",\n    FilePath(GTEST_PATH_SEP_ \"bar\").string());\n  EXPECT_EQ(GTEST_PATH_SEP_ \"bar\",\n    FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ \"bar\").string());\n  EXPECT_EQ(GTEST_PATH_SEP_ \"bar\",\n    FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_ \"bar\").string());\n}\n\n// \"foo/\" == foo//\" == \"foo///\"\nTEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringEnd) {\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_,\n    FilePath(\"foo\" GTEST_PATH_SEP_).string());\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_,\n    FilePath(\"foo\" GTEST_PATH_SEP_ GTEST_PATH_SEP_).string());\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_,\n    FilePath(\"foo\" GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_).string());\n}\n\n#if GTEST_HAS_ALT_PATH_SEP_\n\n// Tests that separators at the end of the string are normalized\n// regardless of their combination (e.g. \"foo\\\" ==\"foo/\\\" ==\n// \"foo\\\\/\").\nTEST(NormalizeTest, MixAlternateSeparatorAtStringEnd) {\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_,\n            FilePath(\"foo/\").string());\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_,\n            FilePath(\"foo\" GTEST_PATH_SEP_ \"/\").string());\n  EXPECT_EQ(\"foo\" GTEST_PATH_SEP_,\n            FilePath(\"foo//\" GTEST_PATH_SEP_).string());\n}\n\n#endif\n\nTEST(AssignmentOperatorTest, DefaultAssignedToNonDefault) {\n  FilePath default_path;\n  FilePath non_default_path(\"path\");\n  non_default_path = default_path;\n  EXPECT_EQ(\"\", non_default_path.string());\n  EXPECT_EQ(\"\", default_path.string());  // RHS var is unchanged.\n}\n\nTEST(AssignmentOperatorTest, NonDefaultAssignedToDefault) {\n  FilePath non_default_path(\"path\");\n  FilePath default_path;\n  default_path = non_default_path;\n  EXPECT_EQ(\"path\", default_path.string());\n  EXPECT_EQ(\"path\", non_default_path.string());  // RHS var is unchanged.\n}\n\nTEST(AssignmentOperatorTest, ConstAssignedToNonConst) {\n  const FilePath const_default_path(\"const_path\");\n  FilePath non_default_path(\"path\");\n  non_default_path = const_default_path;\n  EXPECT_EQ(\"const_path\", non_default_path.string());\n}\n\nclass DirectoryCreationTest : public Test {\n protected:\n  void SetUp() override {\n    testdata_path_.Set(FilePath(\n        TempDir() + GetCurrentExecutableName().string() +\n        \"_directory_creation\" GTEST_PATH_SEP_ \"test\" GTEST_PATH_SEP_));\n    testdata_file_.Set(testdata_path_.RemoveTrailingPathSeparator());\n\n    unique_file0_.Set(FilePath::MakeFileName(testdata_path_, FilePath(\"unique\"),\n        0, \"txt\"));\n    unique_file1_.Set(FilePath::MakeFileName(testdata_path_, FilePath(\"unique\"),\n        1, \"txt\"));\n\n    remove(testdata_file_.c_str());\n    remove(unique_file0_.c_str());\n    remove(unique_file1_.c_str());\n    posix::RmDir(testdata_path_.c_str());\n  }\n\n  void TearDown() override {\n    remove(testdata_file_.c_str());\n    remove(unique_file0_.c_str());\n    remove(unique_file1_.c_str());\n    posix::RmDir(testdata_path_.c_str());\n  }\n\n  void CreateTextFile(const char* filename) {\n    FILE* f = posix::FOpen(filename, \"w\");\n    fprintf(f, \"text\\n\");\n    fclose(f);\n  }\n\n  // Strings representing a directory and a file, with identical paths\n  // except for the trailing separator character that distinquishes\n  // a directory named 'test' from a file named 'test'. Example names:\n  FilePath testdata_path_;  // \"/tmp/directory_creation/test/\"\n  FilePath testdata_file_;  // \"/tmp/directory_creation/test\"\n  FilePath unique_file0_;  // \"/tmp/directory_creation/test/unique.txt\"\n  FilePath unique_file1_;  // \"/tmp/directory_creation/test/unique_1.txt\"\n};\n\nTEST_F(DirectoryCreationTest, CreateDirectoriesRecursively) {\n  EXPECT_FALSE(testdata_path_.DirectoryExists()) << testdata_path_.string();\n  EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively());\n  EXPECT_TRUE(testdata_path_.DirectoryExists());\n}\n\nTEST_F(DirectoryCreationTest, CreateDirectoriesForAlreadyExistingPath) {\n  EXPECT_FALSE(testdata_path_.DirectoryExists()) << testdata_path_.string();\n  EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively());\n  // Call 'create' again... should still succeed.\n  EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively());\n}\n\nTEST_F(DirectoryCreationTest, CreateDirectoriesAndUniqueFilename) {\n  FilePath file_path(FilePath::GenerateUniqueFileName(testdata_path_,\n      FilePath(\"unique\"), \"txt\"));\n  EXPECT_EQ(unique_file0_.string(), file_path.string());\n  EXPECT_FALSE(file_path.FileOrDirectoryExists());  // file not there\n\n  testdata_path_.CreateDirectoriesRecursively();\n  EXPECT_FALSE(file_path.FileOrDirectoryExists());  // file still not there\n  CreateTextFile(file_path.c_str());\n  EXPECT_TRUE(file_path.FileOrDirectoryExists());\n\n  FilePath file_path2(FilePath::GenerateUniqueFileName(testdata_path_,\n      FilePath(\"unique\"), \"txt\"));\n  EXPECT_EQ(unique_file1_.string(), file_path2.string());\n  EXPECT_FALSE(file_path2.FileOrDirectoryExists());  // file not there\n  CreateTextFile(file_path2.c_str());\n  EXPECT_TRUE(file_path2.FileOrDirectoryExists());\n}\n\nTEST_F(DirectoryCreationTest, CreateDirectoriesFail) {\n  // force a failure by putting a file where we will try to create a directory.\n  CreateTextFile(testdata_file_.c_str());\n  EXPECT_TRUE(testdata_file_.FileOrDirectoryExists());\n  EXPECT_FALSE(testdata_file_.DirectoryExists());\n  EXPECT_FALSE(testdata_file_.CreateDirectoriesRecursively());\n}\n\nTEST(NoDirectoryCreationTest, CreateNoDirectoriesForDefaultXmlFile) {\n  const FilePath test_detail_xml(\"test_detail.xml\");\n  EXPECT_FALSE(test_detail_xml.CreateDirectoriesRecursively());\n}\n\nTEST(FilePathTest, DefaultConstructor) {\n  FilePath fp;\n  EXPECT_EQ(\"\", fp.string());\n}\n\nTEST(FilePathTest, CharAndCopyConstructors) {\n  const FilePath fp(\"spicy\");\n  EXPECT_EQ(\"spicy\", fp.string());\n\n  const FilePath fp_copy(fp);\n  EXPECT_EQ(\"spicy\", fp_copy.string());\n}\n\nTEST(FilePathTest, StringConstructor) {\n  const FilePath fp(std::string(\"cider\"));\n  EXPECT_EQ(\"cider\", fp.string());\n}\n\nTEST(FilePathTest, Set) {\n  const FilePath apple(\"apple\");\n  FilePath mac(\"mac\");\n  mac.Set(apple);  // Implement Set() since overloading operator= is forbidden.\n  EXPECT_EQ(\"apple\", mac.string());\n  EXPECT_EQ(\"apple\", apple.string());\n}\n\nTEST(FilePathTest, ToString) {\n  const FilePath file(\"drink\");\n  EXPECT_EQ(\"drink\", file.string());\n}\n\nTEST(FilePathTest, RemoveExtension) {\n  EXPECT_EQ(\"app\", FilePath(\"app.cc\").RemoveExtension(\"cc\").string());\n  EXPECT_EQ(\"app\", FilePath(\"app.exe\").RemoveExtension(\"exe\").string());\n  EXPECT_EQ(\"APP\", FilePath(\"APP.EXE\").RemoveExtension(\"exe\").string());\n}\n\nTEST(FilePathTest, RemoveExtensionWhenThereIsNoExtension) {\n  EXPECT_EQ(\"app\", FilePath(\"app\").RemoveExtension(\"exe\").string());\n}\n\nTEST(FilePathTest, IsDirectory) {\n  EXPECT_FALSE(FilePath(\"cola\").IsDirectory());\n  EXPECT_TRUE(FilePath(\"koala\" GTEST_PATH_SEP_).IsDirectory());\n#if GTEST_HAS_ALT_PATH_SEP_\n  EXPECT_TRUE(FilePath(\"koala/\").IsDirectory());\n#endif\n}\n\nTEST(FilePathTest, IsAbsolutePath) {\n  EXPECT_FALSE(FilePath(\"is\" GTEST_PATH_SEP_ \"relative\").IsAbsolutePath());\n  EXPECT_FALSE(FilePath(\"\").IsAbsolutePath());\n#if GTEST_OS_WINDOWS\n  EXPECT_TRUE(FilePath(\"c:\\\\\" GTEST_PATH_SEP_ \"is_not\"\n                       GTEST_PATH_SEP_ \"relative\").IsAbsolutePath());\n  EXPECT_FALSE(FilePath(\"c:foo\" GTEST_PATH_SEP_ \"bar\").IsAbsolutePath());\n  EXPECT_TRUE(FilePath(\"c:/\" GTEST_PATH_SEP_ \"is_not\"\n                       GTEST_PATH_SEP_ \"relative\").IsAbsolutePath());\n#else\n  EXPECT_TRUE(FilePath(GTEST_PATH_SEP_ \"is_not\" GTEST_PATH_SEP_ \"relative\")\n              .IsAbsolutePath());\n#endif  // GTEST_OS_WINDOWS\n}\n\nTEST(FilePathTest, IsRootDirectory) {\n#if GTEST_OS_WINDOWS\n  EXPECT_TRUE(FilePath(\"a:\\\\\").IsRootDirectory());\n  EXPECT_TRUE(FilePath(\"Z:/\").IsRootDirectory());\n  EXPECT_TRUE(FilePath(\"e://\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"b:\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"b:a\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"8:/\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"c|/\").IsRootDirectory());\n#else\n  EXPECT_TRUE(FilePath(\"/\").IsRootDirectory());\n  EXPECT_TRUE(FilePath(\"//\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"\\\\\").IsRootDirectory());\n  EXPECT_FALSE(FilePath(\"/x\").IsRootDirectory());\n#endif\n}\n\n}  // namespace\n}  // namespace internal\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/test/googletest-filter-unittest.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2005 Google Inc. All Rights Reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test for Google Test test filters.\n\nA user can specify which test(s) in a Google Test program to run via either\nthe GTEST_FILTER environment variable or the --gtest_filter flag.\nThis script tests such functionality by invoking\ngoogletest-filter-unittest_ (a program written with Google Test) with different\nenvironments and command line flags.\n\nNote that test sharding may also influence which tests are filtered. Therefore,\nwe test that here also.\n\"\"\"\n\nimport os\nimport re\ntry:\n  from sets import Set as set  # For Python 2.3 compatibility\nexcept ImportError:\n  pass\nimport sys\nimport gtest_test_utils\n\n# Constants.\n\n# Checks if this platform can pass empty environment variables to child\n# processes.  We set an env variable to an empty string and invoke a python\n# script in a subprocess to print whether the variable is STILL in\n# os.environ.  We then use 'eval' to parse the child's output so that an\n# exception is thrown if the input is anything other than 'True' nor 'False'.\nCAN_PASS_EMPTY_ENV = False\nif sys.executable:\n  os.environ['EMPTY_VAR'] = ''\n  child = gtest_test_utils.Subprocess(\n      [sys.executable, '-c', 'import os; print(\\'EMPTY_VAR\\' in os.environ)'])\n  CAN_PASS_EMPTY_ENV = eval(child.output)\n\n\n# Check if this platform can unset environment variables in child processes.\n# We set an env variable to a non-empty string, unset it, and invoke\n# a python script in a subprocess to print whether the variable\n# is NO LONGER in os.environ.\n# We use 'eval' to parse the child's output so that an exception\n# is thrown if the input is neither 'True' nor 'False'.\nCAN_UNSET_ENV = False\nif sys.executable:\n  os.environ['UNSET_VAR'] = 'X'\n  del os.environ['UNSET_VAR']\n  child = gtest_test_utils.Subprocess(\n      [sys.executable, '-c', 'import os; print(\\'UNSET_VAR\\' not in os.environ)'\n      ])\n  CAN_UNSET_ENV = eval(child.output)\n\n\n# Checks if we should test with an empty filter. This doesn't\n# make sense on platforms that cannot pass empty env variables (Win32)\n# and on platforms that cannot unset variables (since we cannot tell\n# the difference between \"\" and NULL -- Borland and Solaris < 5.10)\nCAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)\n\n\n# The environment variable for specifying the test filters.\nFILTER_ENV_VAR = 'GTEST_FILTER'\n\n# The environment variables for test sharding.\nTOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'\nSHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'\nSHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'\n\n# The command line flag for specifying the test filters.\nFILTER_FLAG = 'gtest_filter'\n\n# The command line flag for including disabled tests.\nALSO_RUN_DISABLED_TESTS_FLAG = 'gtest_also_run_disabled_tests'\n\n# Command to run the googletest-filter-unittest_ program.\nCOMMAND = gtest_test_utils.GetTestExecutablePath('googletest-filter-unittest_')\n\n# Regex for determining whether parameterized tests are enabled in the binary.\nPARAM_TEST_REGEX = re.compile(r'/ParamTest')\n\n# Regex for parsing test case names from Google Test's output.\nTEST_CASE_REGEX = re.compile(r'^\\[\\-+\\] \\d+ tests? from (\\w+(/\\w+)?)')\n\n# Regex for parsing test names from Google Test's output.\nTEST_REGEX = re.compile(r'^\\[\\s*RUN\\s*\\].*\\.(\\w+(/\\w+)?)')\n\n# The command line flag to tell Google Test to output the list of tests it\n# will run.\nLIST_TESTS_FLAG = '--gtest_list_tests'\n\n# Indicates whether Google Test supports death tests.\nSUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(\n    [COMMAND, LIST_TESTS_FLAG]).output\n\n# Full names of all tests in googletest-filter-unittests_.\nPARAM_TESTS = [\n    'SeqP/ParamTest.TestX/0',\n    'SeqP/ParamTest.TestX/1',\n    'SeqP/ParamTest.TestY/0',\n    'SeqP/ParamTest.TestY/1',\n    'SeqQ/ParamTest.TestX/0',\n    'SeqQ/ParamTest.TestX/1',\n    'SeqQ/ParamTest.TestY/0',\n    'SeqQ/ParamTest.TestY/1',\n    ]\n\nDISABLED_TESTS = [\n    'BarTest.DISABLED_TestFour',\n    'BarTest.DISABLED_TestFive',\n    'BazTest.DISABLED_TestC',\n    'DISABLED_FoobarTest.Test1',\n    'DISABLED_FoobarTest.DISABLED_Test2',\n    'DISABLED_FoobarbazTest.TestA',\n    ]\n\nif SUPPORTS_DEATH_TESTS:\n  DEATH_TESTS = [\n    'HasDeathTest.Test1',\n    'HasDeathTest.Test2',\n    ]\nelse:\n  DEATH_TESTS = []\n\n# All the non-disabled tests.\nACTIVE_TESTS = [\n    'FooTest.Abc',\n    'FooTest.Xyz',\n\n    'BarTest.TestOne',\n    'BarTest.TestTwo',\n    'BarTest.TestThree',\n\n    'BazTest.TestOne',\n    'BazTest.TestA',\n    'BazTest.TestB',\n    ] + DEATH_TESTS + PARAM_TESTS\n\nparam_tests_present = None\n\n# Utilities.\n\nenviron = os.environ.copy()\n\n\ndef SetEnvVar(env_var, value):\n  \"\"\"Sets the env variable to 'value'; unsets it when 'value' is None.\"\"\"\n\n  if value is not None:\n    environ[env_var] = value\n  elif env_var in environ:\n    del environ[env_var]\n\n\ndef RunAndReturnOutput(args = None):\n  \"\"\"Runs the test program and returns its output.\"\"\"\n\n  return gtest_test_utils.Subprocess([COMMAND] + (args or []),\n                                     env=environ).output\n\n\ndef RunAndExtractTestList(args = None):\n  \"\"\"Runs the test program and returns its exit code and a list of tests run.\"\"\"\n\n  p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)\n  tests_run = []\n  test_case = ''\n  test = ''\n  for line in p.output.split('\\n'):\n    match = TEST_CASE_REGEX.match(line)\n    if match is not None:\n      test_case = match.group(1)\n    else:\n      match = TEST_REGEX.match(line)\n      if match is not None:\n        test = match.group(1)\n        tests_run.append(test_case + '.' + test)\n  return (tests_run, p.exit_code)\n\n\ndef InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):\n  \"\"\"Runs the given function and arguments in a modified environment.\"\"\"\n  try:\n    original_env = environ.copy()\n    environ.update(extra_env)\n    return function(*args, **kwargs)\n  finally:\n    environ.clear()\n    environ.update(original_env)\n\n\ndef RunWithSharding(total_shards, shard_index, command):\n  \"\"\"Runs a test program shard and returns exit code and a list of tests run.\"\"\"\n\n  extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),\n               TOTAL_SHARDS_ENV_VAR: str(total_shards)}\n  return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)\n\n# The unit test.\n\n\nclass GTestFilterUnitTest(gtest_test_utils.TestCase):\n  \"\"\"Tests the env variable or the command line flag to filter tests.\"\"\"\n\n  # Utilities.\n\n  def AssertSetEqual(self, lhs, rhs):\n    \"\"\"Asserts that two sets are equal.\"\"\"\n\n    for elem in lhs:\n      self.assert_(elem in rhs, '%s in %s' % (elem, rhs))\n\n    for elem in rhs:\n      self.assert_(elem in lhs, '%s in %s' % (elem, lhs))\n\n  def AssertPartitionIsValid(self, set_var, list_of_sets):\n    \"\"\"Asserts that list_of_sets is a valid partition of set_var.\"\"\"\n\n    full_partition = []\n    for slice_var in list_of_sets:\n      full_partition.extend(slice_var)\n    self.assertEqual(len(set_var), len(full_partition))\n    self.assertEqual(set(set_var), set(full_partition))\n\n  def AdjustForParameterizedTests(self, tests_to_run):\n    \"\"\"Adjust tests_to_run in case value parameterized tests are disabled.\"\"\"\n\n    global param_tests_present\n    if not param_tests_present:\n      return list(set(tests_to_run) - set(PARAM_TESTS))\n    else:\n      return tests_to_run\n\n  def RunAndVerify(self, gtest_filter, tests_to_run):\n    \"\"\"Checks that the binary runs correct set of tests for a given filter.\"\"\"\n\n    tests_to_run = self.AdjustForParameterizedTests(tests_to_run)\n\n    # First, tests using the environment variable.\n\n    # Windows removes empty variables from the environment when passing it\n    # to a new process.  This means it is impossible to pass an empty filter\n    # into a process using the environment variable.  However, we can still\n    # test the case when the variable is not supplied (i.e., gtest_filter is\n    # None).\n    # pylint: disable-msg=C6403\n    if CAN_TEST_EMPTY_FILTER or gtest_filter != '':\n      SetEnvVar(FILTER_ENV_VAR, gtest_filter)\n      tests_run = RunAndExtractTestList()[0]\n      SetEnvVar(FILTER_ENV_VAR, None)\n      self.AssertSetEqual(tests_run, tests_to_run)\n    # pylint: enable-msg=C6403\n\n    # Next, tests using the command line flag.\n\n    if gtest_filter is None:\n      args = []\n    else:\n      args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]\n\n    tests_run = RunAndExtractTestList(args)[0]\n    self.AssertSetEqual(tests_run, tests_to_run)\n\n  def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,\n                               args=None, check_exit_0=False):\n    \"\"\"Checks that binary runs correct tests for the given filter and shard.\n\n    Runs all shards of googletest-filter-unittest_ with the given filter, and\n    verifies that the right set of tests were run. The union of tests run\n    on each shard should be identical to tests_to_run, without duplicates.\n    If check_exit_0, .\n\n    Args:\n      gtest_filter: A filter to apply to the tests.\n      total_shards: A total number of shards to split test run into.\n      tests_to_run: A set of tests expected to run.\n      args   :      Arguments to pass to the to the test binary.\n      check_exit_0: When set to a true value, make sure that all shards\n                    return 0.\n    \"\"\"\n\n    tests_to_run = self.AdjustForParameterizedTests(tests_to_run)\n\n    # Windows removes empty variables from the environment when passing it\n    # to a new process.  This means it is impossible to pass an empty filter\n    # into a process using the environment variable.  However, we can still\n    # test the case when the variable is not supplied (i.e., gtest_filter is\n    # None).\n    # pylint: disable-msg=C6403\n    if CAN_TEST_EMPTY_FILTER or gtest_filter != '':\n      SetEnvVar(FILTER_ENV_VAR, gtest_filter)\n      partition = []\n      for i in range(0, total_shards):\n        (tests_run, exit_code) = RunWithSharding(total_shards, i, args)\n        if check_exit_0:\n          self.assertEqual(0, exit_code)\n        partition.append(tests_run)\n\n      self.AssertPartitionIsValid(tests_to_run, partition)\n      SetEnvVar(FILTER_ENV_VAR, None)\n    # pylint: enable-msg=C6403\n\n  def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):\n    \"\"\"Checks that the binary runs correct set of tests for the given filter.\n\n    Runs googletest-filter-unittest_ with the given filter, and enables\n    disabled tests. Verifies that the right set of tests were run.\n\n    Args:\n      gtest_filter: A filter to apply to the tests.\n      tests_to_run: A set of tests expected to run.\n    \"\"\"\n\n    tests_to_run = self.AdjustForParameterizedTests(tests_to_run)\n\n    # Construct the command line.\n    args = ['--%s' % ALSO_RUN_DISABLED_TESTS_FLAG]\n    if gtest_filter is not None:\n      args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))\n\n    tests_run = RunAndExtractTestList(args)[0]\n    self.AssertSetEqual(tests_run, tests_to_run)\n\n  def setUp(self):\n    \"\"\"Sets up test case.\n\n    Determines whether value-parameterized tests are enabled in the binary and\n    sets the flags accordingly.\n    \"\"\"\n\n    global param_tests_present\n    if param_tests_present is None:\n      param_tests_present = PARAM_TEST_REGEX.search(\n          RunAndReturnOutput()) is not None\n\n  def testDefaultBehavior(self):\n    \"\"\"Tests the behavior of not specifying the filter.\"\"\"\n\n    self.RunAndVerify(None, ACTIVE_TESTS)\n\n  def testDefaultBehaviorWithShards(self):\n    \"\"\"Tests the behavior without the filter, with sharding enabled.\"\"\"\n\n    self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)\n    self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)\n    self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)\n    self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)\n    self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)\n\n  def testEmptyFilter(self):\n    \"\"\"Tests an empty filter.\"\"\"\n\n    self.RunAndVerify('', [])\n    self.RunAndVerifyWithSharding('', 1, [])\n    self.RunAndVerifyWithSharding('', 2, [])\n\n  def testBadFilter(self):\n    \"\"\"Tests a filter that matches nothing.\"\"\"\n\n    self.RunAndVerify('BadFilter', [])\n    self.RunAndVerifyAllowingDisabled('BadFilter', [])\n\n  def testFullName(self):\n    \"\"\"Tests filtering by full name.\"\"\"\n\n    self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])\n    self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])\n    self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])\n\n  def testUniversalFilters(self):\n    \"\"\"Tests filters that match everything.\"\"\"\n\n    self.RunAndVerify('*', ACTIVE_TESTS)\n    self.RunAndVerify('*.*', ACTIVE_TESTS)\n    self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)\n    self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)\n    self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)\n\n  def testFilterByTestCase(self):\n    \"\"\"Tests filtering by test case name.\"\"\"\n\n    self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])\n\n    BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']\n    self.RunAndVerify('BazTest.*', BAZ_TESTS)\n    self.RunAndVerifyAllowingDisabled('BazTest.*',\n                                      BAZ_TESTS + ['BazTest.DISABLED_TestC'])\n\n  def testFilterByTest(self):\n    \"\"\"Tests filtering by test name.\"\"\"\n\n    self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])\n\n  def testFilterDisabledTests(self):\n    \"\"\"Select only the disabled tests to run.\"\"\"\n\n    self.RunAndVerify('DISABLED_FoobarTest.Test1', [])\n    self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',\n                                      ['DISABLED_FoobarTest.Test1'])\n\n    self.RunAndVerify('*DISABLED_*', [])\n    self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)\n\n    self.RunAndVerify('*.DISABLED_*', [])\n    self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [\n        'BarTest.DISABLED_TestFour',\n        'BarTest.DISABLED_TestFive',\n        'BazTest.DISABLED_TestC',\n        'DISABLED_FoobarTest.DISABLED_Test2',\n        ])\n\n    self.RunAndVerify('DISABLED_*', [])\n    self.RunAndVerifyAllowingDisabled('DISABLED_*', [\n        'DISABLED_FoobarTest.Test1',\n        'DISABLED_FoobarTest.DISABLED_Test2',\n        'DISABLED_FoobarbazTest.TestA',\n        ])\n\n  def testWildcardInTestCaseName(self):\n    \"\"\"Tests using wildcard in the test case name.\"\"\"\n\n    self.RunAndVerify('*a*.*', [\n        'BarTest.TestOne',\n        'BarTest.TestTwo',\n        'BarTest.TestThree',\n\n        'BazTest.TestOne',\n        'BazTest.TestA',\n        'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)\n\n  def testWildcardInTestName(self):\n    \"\"\"Tests using wildcard in the test name.\"\"\"\n\n    self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])\n\n  def testFilterWithoutDot(self):\n    \"\"\"Tests a filter that has no '.' in it.\"\"\"\n\n    self.RunAndVerify('*z*', [\n        'FooTest.Xyz',\n\n        'BazTest.TestOne',\n        'BazTest.TestA',\n        'BazTest.TestB',\n        ])\n\n  def testTwoPatterns(self):\n    \"\"\"Tests filters that consist of two patterns.\"\"\"\n\n    self.RunAndVerify('Foo*.*:*A*', [\n        'FooTest.Abc',\n        'FooTest.Xyz',\n\n        'BazTest.TestA',\n        ])\n\n    # An empty pattern + a non-empty one\n    self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])\n\n  def testThreePatterns(self):\n    \"\"\"Tests filters that consist of three patterns.\"\"\"\n\n    self.RunAndVerify('*oo*:*A*:*One', [\n        'FooTest.Abc',\n        'FooTest.Xyz',\n\n        'BarTest.TestOne',\n\n        'BazTest.TestOne',\n        'BazTest.TestA',\n        ])\n\n    # The 2nd pattern is empty.\n    self.RunAndVerify('*oo*::*One', [\n        'FooTest.Abc',\n        'FooTest.Xyz',\n\n        'BarTest.TestOne',\n\n        'BazTest.TestOne',\n        ])\n\n    # The last 2 patterns are empty.\n    self.RunAndVerify('*oo*::', [\n        'FooTest.Abc',\n        'FooTest.Xyz',\n        ])\n\n  def testNegativeFilters(self):\n    self.RunAndVerify('*-BazTest.TestOne', [\n        'FooTest.Abc',\n        'FooTest.Xyz',\n\n        'BarTest.TestOne',\n        'BarTest.TestTwo',\n        'BarTest.TestThree',\n\n        'BazTest.TestA',\n        'BazTest.TestB',\n        ] + DEATH_TESTS + PARAM_TESTS)\n\n    self.RunAndVerify('*-FooTest.Abc:BazTest.*', [\n        'FooTest.Xyz',\n\n        'BarTest.TestOne',\n        'BarTest.TestTwo',\n        'BarTest.TestThree',\n        ] + DEATH_TESTS + PARAM_TESTS)\n\n    self.RunAndVerify('BarTest.*-BarTest.TestOne', [\n        'BarTest.TestTwo',\n        'BarTest.TestThree',\n        ])\n\n    # Tests without leading '*'.\n    self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [\n        'BarTest.TestOne',\n        'BarTest.TestTwo',\n        'BarTest.TestThree',\n        ] + DEATH_TESTS + PARAM_TESTS)\n\n    # Value parameterized tests.\n    self.RunAndVerify('*/*', PARAM_TESTS)\n\n    # Value parameterized tests filtering by the sequence name.\n    self.RunAndVerify('SeqP/*', [\n        'SeqP/ParamTest.TestX/0',\n        'SeqP/ParamTest.TestX/1',\n        'SeqP/ParamTest.TestY/0',\n        'SeqP/ParamTest.TestY/1',\n        ])\n\n    # Value parameterized tests filtering by the test name.\n    self.RunAndVerify('*/0', [\n        'SeqP/ParamTest.TestX/0',\n        'SeqP/ParamTest.TestY/0',\n        'SeqQ/ParamTest.TestX/0',\n        'SeqQ/ParamTest.TestY/0',\n        ])\n\n  def testFlagOverridesEnvVar(self):\n    \"\"\"Tests that the filter flag overrides the filtering env. variable.\"\"\"\n\n    SetEnvVar(FILTER_ENV_VAR, 'Foo*')\n    args = ['--%s=%s' % (FILTER_FLAG, '*One')]\n    tests_run = RunAndExtractTestList(args)[0]\n    SetEnvVar(FILTER_ENV_VAR, None)\n\n    self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])\n\n  def testShardStatusFileIsCreated(self):\n    \"\"\"Tests that the shard file is created if specified in the environment.\"\"\"\n\n    shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),\n                                     'shard_status_file')\n    self.assert_(not os.path.exists(shard_status_file))\n\n    extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}\n    try:\n      InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)\n    finally:\n      self.assert_(os.path.exists(shard_status_file))\n      os.remove(shard_status_file)\n\n  def testShardStatusFileIsCreatedWithListTests(self):\n    \"\"\"Tests that the shard file is created with the \"list_tests\" flag.\"\"\"\n\n    shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),\n                                     'shard_status_file2')\n    self.assert_(not os.path.exists(shard_status_file))\n\n    extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}\n    try:\n      output = InvokeWithModifiedEnv(extra_env,\n                                     RunAndReturnOutput,\n                                     [LIST_TESTS_FLAG])\n    finally:\n      # This assertion ensures that Google Test enumerated the tests as\n      # opposed to running them.\n      self.assert_('[==========]' not in output,\n                   'Unexpected output during test enumeration.\\n'\n                   'Please ensure that LIST_TESTS_FLAG is assigned the\\n'\n                   'correct flag value for listing Google Test tests.')\n\n      self.assert_(os.path.exists(shard_status_file))\n      os.remove(shard_status_file)\n\n  if SUPPORTS_DEATH_TESTS:\n    def testShardingWorksWithDeathTests(self):\n      \"\"\"Tests integration with death tests and sharding.\"\"\"\n\n      gtest_filter = 'HasDeathTest.*:SeqP/*'\n      expected_tests = [\n          'HasDeathTest.Test1',\n          'HasDeathTest.Test2',\n\n          'SeqP/ParamTest.TestX/0',\n          'SeqP/ParamTest.TestX/1',\n          'SeqP/ParamTest.TestY/0',\n          'SeqP/ParamTest.TestY/1',\n          ]\n\n      for flag in ['--gtest_death_test_style=threadsafe',\n                   '--gtest_death_test_style=fast']:\n        self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,\n                                      check_exit_0=True, args=[flag])\n        self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,\n                                      check_exit_0=True, args=[flag])\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-filter-unittest_.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Unit test for Google Test test filters.\n//\n// A user can specify which test(s) in a Google Test program to run via\n// either the GTEST_FILTER environment variable or the --gtest_filter\n// flag.  This is used for testing such functionality.\n//\n// The program will be invoked from a Python unit test.  Don't run it\n// directly.\n\n#include \"gtest/gtest.h\"\n\nnamespace {\n\n// Test case FooTest.\n\nclass FooTest : public testing::Test {\n};\n\nTEST_F(FooTest, Abc) {\n}\n\nTEST_F(FooTest, Xyz) {\n  FAIL() << \"Expected failure.\";\n}\n\n// Test case BarTest.\n\nTEST(BarTest, TestOne) {\n}\n\nTEST(BarTest, TestTwo) {\n}\n\nTEST(BarTest, TestThree) {\n}\n\nTEST(BarTest, DISABLED_TestFour) {\n  FAIL() << \"Expected failure.\";\n}\n\nTEST(BarTest, DISABLED_TestFive) {\n  FAIL() << \"Expected failure.\";\n}\n\n// Test case BazTest.\n\nTEST(BazTest, TestOne) {\n  FAIL() << \"Expected failure.\";\n}\n\nTEST(BazTest, TestA) {\n}\n\nTEST(BazTest, TestB) {\n}\n\nTEST(BazTest, DISABLED_TestC) {\n  FAIL() << \"Expected failure.\";\n}\n\n// Test case HasDeathTest\n\nTEST(HasDeathTest, Test1) {\n  EXPECT_DEATH_IF_SUPPORTED(exit(1), \".*\");\n}\n\n// We need at least two death tests to make sure that the all death tests\n// aren't on the first shard.\nTEST(HasDeathTest, Test2) {\n  EXPECT_DEATH_IF_SUPPORTED(exit(1), \".*\");\n}\n\n// Test case FoobarTest\n\nTEST(DISABLED_FoobarTest, Test1) {\n  FAIL() << \"Expected failure.\";\n}\n\nTEST(DISABLED_FoobarTest, DISABLED_Test2) {\n  FAIL() << \"Expected failure.\";\n}\n\n// Test case FoobarbazTest\n\nTEST(DISABLED_FoobarbazTest, TestA) {\n  FAIL() << \"Expected failure.\";\n}\n\nclass ParamTest : public testing::TestWithParam<int> {\n};\n\nTEST_P(ParamTest, TestX) {\n}\n\nTEST_P(ParamTest, TestY) {\n}\n\nINSTANTIATE_TEST_SUITE_P(SeqP, ParamTest, testing::Values(1, 2));\nINSTANTIATE_TEST_SUITE_P(SeqQ, ParamTest, testing::Values(5, 6));\n\n}  // namespace\n\nint main(int argc, char **argv) {\n  ::testing::InitGoogleTest(&argc, argv);\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-json-outfiles-test.py",
    "content": "#!/usr/bin/env python\n# Copyright 2018, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test for the gtest_json_output module.\"\"\"\n\nimport json\nimport os\nimport gtest_json_test_utils\nimport gtest_test_utils\n\nGTEST_OUTPUT_SUBDIR = 'json_outfiles'\nGTEST_OUTPUT_1_TEST = 'gtest_xml_outfile1_test_'\nGTEST_OUTPUT_2_TEST = 'gtest_xml_outfile2_test_'\n\nEXPECTED_1 = {\n    u'tests':\n        1,\n    u'failures':\n        0,\n    u'disabled':\n        0,\n    u'errors':\n        0,\n    u'time':\n        u'*',\n    u'timestamp':\n        u'*',\n    u'name':\n        u'AllTests',\n    u'testsuites': [{\n        u'name':\n            u'PropertyOne',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'TestSomeProperties',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'PropertyOne',\n            u'SetUpProp': u'1',\n            u'TestSomeProperty': u'1',\n            u'TearDownProp': u'1',\n        }],\n    }],\n}\n\nEXPECTED_2 = {\n    u'tests':\n        1,\n    u'failures':\n        0,\n    u'disabled':\n        0,\n    u'errors':\n        0,\n    u'time':\n        u'*',\n    u'timestamp':\n        u'*',\n    u'name':\n        u'AllTests',\n    u'testsuites': [{\n        u'name':\n            u'PropertyTwo',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'TestSomeProperties',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'timestamp': u'*',\n            u'time': u'*',\n            u'classname': u'PropertyTwo',\n            u'SetUpProp': u'2',\n            u'TestSomeProperty': u'2',\n            u'TearDownProp': u'2',\n        }],\n    }],\n}\n\n\nclass GTestJsonOutFilesTest(gtest_test_utils.TestCase):\n  \"\"\"Unit test for Google Test's JSON output functionality.\"\"\"\n\n  def setUp(self):\n    # We want the trailing '/' that the last \"\" provides in os.path.join, for\n    # telling Google Test to create an output directory instead of a single file\n    # for xml output.\n    self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),\n                                    GTEST_OUTPUT_SUBDIR, '')\n    self.DeleteFilesAndDir()\n\n  def tearDown(self):\n    self.DeleteFilesAndDir()\n\n  def DeleteFilesAndDir(self):\n    try:\n      os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + '.json'))\n    except os.error:\n      pass\n    try:\n      os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + '.json'))\n    except os.error:\n      pass\n    try:\n      os.rmdir(self.output_dir_)\n    except os.error:\n      pass\n\n  def testOutfile1(self):\n    self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_1)\n\n  def testOutfile2(self):\n    self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_2)\n\n  def _TestOutFile(self, test_name, expected):\n    gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)\n    command = [gtest_prog_path, '--gtest_output=json:%s' % self.output_dir_]\n    p = gtest_test_utils.Subprocess(command,\n                                    working_dir=gtest_test_utils.GetTempDir())\n    self.assert_(p.exited)\n    self.assertEquals(0, p.exit_code)\n\n    output_file_name1 = test_name + '.json'\n    output_file1 = os.path.join(self.output_dir_, output_file_name1)\n    output_file_name2 = 'lt-' + output_file_name1\n    output_file2 = os.path.join(self.output_dir_, output_file_name2)\n    self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),\n                 output_file1)\n\n    if os.path.isfile(output_file1):\n      with open(output_file1) as f:\n        actual = json.load(f)\n    else:\n      with open(output_file2) as f:\n        actual = json.load(f)\n    self.assertEqual(expected, gtest_json_test_utils.normalize(actual))\n\n\nif __name__ == '__main__':\n  os.environ['GTEST_STACK_TRACE_DEPTH'] = '0'\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-json-output-unittest.py",
    "content": "#!/usr/bin/env python\n# Copyright 2018, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test for the gtest_json_output module.\"\"\"\n\nimport datetime\nimport errno\nimport json\nimport os\nimport re\nimport sys\n\nimport gtest_json_test_utils\nimport gtest_test_utils\n\nGTEST_FILTER_FLAG = '--gtest_filter'\nGTEST_LIST_TESTS_FLAG = '--gtest_list_tests'\nGTEST_OUTPUT_FLAG = '--gtest_output'\nGTEST_DEFAULT_OUTPUT_FILE = 'test_detail.json'\nGTEST_PROGRAM_NAME = 'gtest_xml_output_unittest_'\n\n# The flag indicating stacktraces are not supported\nNO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'\n\nSUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv\n\nif SUPPORTS_STACK_TRACES:\n  STACK_TRACE_TEMPLATE = '\\nStack trace:\\n*'\nelse:\n  STACK_TRACE_TEMPLATE = ''\n\nEXPECTED_NON_EMPTY = {\n    u'tests':\n        24,\n    u'failures':\n        4,\n    u'disabled':\n        2,\n    u'errors':\n        0,\n    u'timestamp':\n        u'*',\n    u'time':\n        u'*',\n    u'ad_hoc_property':\n        u'42',\n    u'name':\n        u'AllTests',\n    u'testsuites': [{\n        u'name':\n            u'SuccessfulTest',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'Succeeds',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'SuccessfulTest'\n        }]\n    }, {\n        u'name':\n            u'FailedTest',\n        u'tests':\n            1,\n        u'failures':\n            1,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name':\n                u'Fails',\n            u'status':\n                u'RUN',\n            u'result':\n                u'COMPLETED',\n            u'time':\n                u'*',\n            u'timestamp':\n                u'*',\n            u'classname':\n                u'FailedTest',\n            u'failures': [{\n                u'failure': u'gtest_xml_output_unittest_.cc:*\\n'\n                            u'Expected equality of these values:\\n'\n                            u'  1\\n  2' + STACK_TRACE_TEMPLATE,\n                u'type': u''\n            }]\n        }]\n    }, {\n        u'name':\n            u'DisabledTest',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            1,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'DISABLED_test_not_run',\n            u'status': u'NOTRUN',\n            u'result': u'SUPPRESSED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'DisabledTest'\n        }]\n    }, {\n        u'name':\n            u'SkippedTest',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'Skipped',\n            u'status': u'RUN',\n            u'result': u'SKIPPED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'SkippedTest'\n        }]\n    }, {\n        u'name':\n            u'MixedResultTest',\n        u'tests':\n            3,\n        u'failures':\n            1,\n        u'disabled':\n            1,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'Succeeds',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'MixedResultTest'\n        }, {\n            u'name':\n                u'Fails',\n            u'status':\n                u'RUN',\n            u'result':\n                u'COMPLETED',\n            u'time':\n                u'*',\n            u'timestamp':\n                u'*',\n            u'classname':\n                u'MixedResultTest',\n            u'failures': [{\n                u'failure': u'gtest_xml_output_unittest_.cc:*\\n'\n                            u'Expected equality of these values:\\n'\n                            u'  1\\n  2' + STACK_TRACE_TEMPLATE,\n                u'type': u''\n            }, {\n                u'failure': u'gtest_xml_output_unittest_.cc:*\\n'\n                            u'Expected equality of these values:\\n'\n                            u'  2\\n  3' + STACK_TRACE_TEMPLATE,\n                u'type': u''\n            }]\n        }, {\n            u'name': u'DISABLED_test',\n            u'status': u'NOTRUN',\n            u'result': u'SUPPRESSED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'MixedResultTest'\n        }]\n    }, {\n        u'name':\n            u'XmlQuotingTest',\n        u'tests':\n            1,\n        u'failures':\n            1,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name':\n                u'OutputsCData',\n            u'status':\n                u'RUN',\n            u'result':\n                u'COMPLETED',\n            u'time':\n                u'*',\n            u'timestamp':\n                u'*',\n            u'classname':\n                u'XmlQuotingTest',\n            u'failures': [{\n                u'failure': u'gtest_xml_output_unittest_.cc:*\\n'\n                            u'Failed\\nXML output: <?xml encoding=\"utf-8\">'\n                            u'<top><![CDATA[cdata text]]></top>' +\n                            STACK_TRACE_TEMPLATE,\n                u'type': u''\n            }]\n        }]\n    }, {\n        u'name':\n            u'InvalidCharactersTest',\n        u'tests':\n            1,\n        u'failures':\n            1,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name':\n                u'InvalidCharactersInMessage',\n            u'status':\n                u'RUN',\n            u'result':\n                u'COMPLETED',\n            u'time':\n                u'*',\n            u'timestamp':\n                u'*',\n            u'classname':\n                u'InvalidCharactersTest',\n            u'failures': [{\n                u'failure': u'gtest_xml_output_unittest_.cc:*\\n'\n                            u'Failed\\nInvalid characters in brackets'\n                            u' [\\x01\\x02]' + STACK_TRACE_TEMPLATE,\n                u'type': u''\n            }]\n        }]\n    }, {\n        u'name':\n            u'PropertyRecordingTest',\n        u'tests':\n            4,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'SetUpTestSuite':\n            u'yes',\n        u'TearDownTestSuite':\n            u'aye',\n        u'testsuite': [{\n            u'name': u'OneProperty',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'PropertyRecordingTest',\n            u'key_1': u'1'\n        }, {\n            u'name': u'IntValuedProperty',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'PropertyRecordingTest',\n            u'key_int': u'1'\n        }, {\n            u'name': u'ThreeProperties',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'PropertyRecordingTest',\n            u'key_1': u'1',\n            u'key_2': u'2',\n            u'key_3': u'3'\n        }, {\n            u'name': u'TwoValuesForOneKeyUsesLastValue',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'PropertyRecordingTest',\n            u'key_1': u'2'\n        }]\n    }, {\n        u'name':\n            u'NoFixtureTest',\n        u'tests':\n            3,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'RecordProperty',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'NoFixtureTest',\n            u'key': u'1'\n        }, {\n            u'name': u'ExternalUtilityThatCallsRecordIntValuedProperty',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'NoFixtureTest',\n            u'key_for_utility_int': u'1'\n        }, {\n            u'name': u'ExternalUtilityThatCallsRecordStringValuedProperty',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'NoFixtureTest',\n            u'key_for_utility_string': u'1'\n        }]\n    }, {\n        u'name':\n            u'TypedTest/0',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'HasTypeParamAttribute',\n            u'type_param': u'int',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'TypedTest/0'\n        }]\n    }, {\n        u'name':\n            u'TypedTest/1',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'HasTypeParamAttribute',\n            u'type_param': u'long',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'TypedTest/1'\n        }]\n    }, {\n        u'name':\n            u'Single/TypeParameterizedTestSuite/0',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'HasTypeParamAttribute',\n            u'type_param': u'int',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'Single/TypeParameterizedTestSuite/0'\n        }]\n    }, {\n        u'name':\n            u'Single/TypeParameterizedTestSuite/1',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'HasTypeParamAttribute',\n            u'type_param': u'long',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'Single/TypeParameterizedTestSuite/1'\n        }]\n    }, {\n        u'name':\n            u'Single/ValueParamTest',\n        u'tests':\n            4,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'HasValueParamAttribute/0',\n            u'value_param': u'33',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'Single/ValueParamTest'\n        }, {\n            u'name': u'HasValueParamAttribute/1',\n            u'value_param': u'42',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'Single/ValueParamTest'\n        }, {\n            u'name': u'AnotherTestThatHasValueParamAttribute/0',\n            u'value_param': u'33',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'Single/ValueParamTest'\n        }, {\n            u'name': u'AnotherTestThatHasValueParamAttribute/1',\n            u'value_param': u'42',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'Single/ValueParamTest'\n        }]\n    }]\n}\n\nEXPECTED_FILTERED = {\n    u'tests':\n        1,\n    u'failures':\n        0,\n    u'disabled':\n        0,\n    u'errors':\n        0,\n    u'time':\n        u'*',\n    u'timestamp':\n        u'*',\n    u'name':\n        u'AllTests',\n    u'ad_hoc_property':\n        u'42',\n    u'testsuites': [{\n        u'name':\n            u'SuccessfulTest',\n        u'tests':\n            1,\n        u'failures':\n            0,\n        u'disabled':\n            0,\n        u'errors':\n            0,\n        u'time':\n            u'*',\n        u'timestamp':\n            u'*',\n        u'testsuite': [{\n            u'name': u'Succeeds',\n            u'status': u'RUN',\n            u'result': u'COMPLETED',\n            u'time': u'*',\n            u'timestamp': u'*',\n            u'classname': u'SuccessfulTest',\n        }]\n    }],\n}\n\nEXPECTED_EMPTY = {\n    u'tests': 0,\n    u'failures': 0,\n    u'disabled': 0,\n    u'errors': 0,\n    u'time': u'*',\n    u'timestamp': u'*',\n    u'name': u'AllTests',\n    u'testsuites': [],\n}\n\nGTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)\n\nSUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(\n    [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output\n\n\nclass GTestJsonOutputUnitTest(gtest_test_utils.TestCase):\n  \"\"\"Unit test for Google Test's JSON output functionality.\n  \"\"\"\n\n  # This test currently breaks on platforms that do not support typed and\n  # type-parameterized tests, so we don't run it under them.\n  if SUPPORTS_TYPED_TESTS:\n\n    def testNonEmptyJsonOutput(self):\n      \"\"\"Verifies JSON output for a Google Test binary with non-empty output.\n\n      Runs a test program that generates a non-empty JSON output, and\n      tests that the JSON output is expected.\n      \"\"\"\n      self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY, 1)\n\n  def testEmptyJsonOutput(self):\n    \"\"\"Verifies JSON output for a Google Test binary without actual tests.\n\n    Runs a test program that generates an empty JSON output, and\n    tests that the JSON output is expected.\n    \"\"\"\n\n    self._TestJsonOutput('gtest_no_test_unittest', EXPECTED_EMPTY, 0)\n\n  def testTimestampValue(self):\n    \"\"\"Checks whether the timestamp attribute in the JSON output is valid.\n\n    Runs a test program that generates an empty JSON output, and checks if\n    the timestamp attribute in the testsuites tag is valid.\n    \"\"\"\n    actual = self._GetJsonOutput('gtest_no_test_unittest', [], 0)\n    date_time_str = actual['timestamp']\n    # datetime.strptime() is only available in Python 2.5+ so we have to\n    # parse the expected datetime manually.\n    match = re.match(r'(\\d+)-(\\d\\d)-(\\d\\d)T(\\d\\d):(\\d\\d):(\\d\\d)', date_time_str)\n    self.assertTrue(\n        re.match,\n        'JSON datettime string %s has incorrect format' % date_time_str)\n    date_time_from_json = datetime.datetime(\n        year=int(match.group(1)), month=int(match.group(2)),\n        day=int(match.group(3)), hour=int(match.group(4)),\n        minute=int(match.group(5)), second=int(match.group(6)))\n\n    time_delta = abs(datetime.datetime.now() - date_time_from_json)\n    # timestamp value should be near the current local time\n    self.assertTrue(time_delta < datetime.timedelta(seconds=600),\n                    'time_delta is %s' % time_delta)\n\n  def testDefaultOutputFile(self):\n    \"\"\"Verifies the default output file name.\n\n    Confirms that Google Test produces an JSON output file with the expected\n    default name if no name is explicitly specified.\n    \"\"\"\n    output_file = os.path.join(gtest_test_utils.GetTempDir(),\n                               GTEST_DEFAULT_OUTPUT_FILE)\n    gtest_prog_path = gtest_test_utils.GetTestExecutablePath(\n        'gtest_no_test_unittest')\n    try:\n      os.remove(output_file)\n    except OSError:\n      e = sys.exc_info()[1]\n      if e.errno != errno.ENOENT:\n        raise\n\n    p = gtest_test_utils.Subprocess(\n        [gtest_prog_path, '%s=json' % GTEST_OUTPUT_FLAG],\n        working_dir=gtest_test_utils.GetTempDir())\n    self.assert_(p.exited)\n    self.assertEquals(0, p.exit_code)\n    self.assert_(os.path.isfile(output_file))\n\n  def testSuppressedJsonOutput(self):\n    \"\"\"Verifies that no JSON output is generated.\n\n    Tests that no JSON file is generated if the default JSON listener is\n    shut down before RUN_ALL_TESTS is invoked.\n    \"\"\"\n\n    json_path = os.path.join(gtest_test_utils.GetTempDir(),\n                             GTEST_PROGRAM_NAME + 'out.json')\n    if os.path.isfile(json_path):\n      os.remove(json_path)\n\n    command = [GTEST_PROGRAM_PATH,\n               '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path),\n               '--shut_down_xml']\n    p = gtest_test_utils.Subprocess(command)\n    if p.terminated_by_signal:\n      # p.signal is available only if p.terminated_by_signal is True.\n      self.assertFalse(\n          p.terminated_by_signal,\n          '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))\n    else:\n      self.assert_(p.exited)\n      self.assertEquals(1, p.exit_code,\n                        \"'%s' exited with code %s, which doesn't match \"\n                        'the expected exit code %s.'\n                        % (command, p.exit_code, 1))\n\n    self.assert_(not os.path.isfile(json_path))\n\n  def testFilteredTestJsonOutput(self):\n    \"\"\"Verifies JSON output when a filter is applied.\n\n    Runs a test program that executes only some tests and verifies that\n    non-selected tests do not show up in the JSON output.\n    \"\"\"\n\n    self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED, 0,\n                         extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])\n\n  def _GetJsonOutput(self, gtest_prog_name, extra_args, expected_exit_code):\n    \"\"\"Returns the JSON output generated by running the program gtest_prog_name.\n\n    Furthermore, the program's exit code must be expected_exit_code.\n\n    Args:\n      gtest_prog_name: Google Test binary name.\n      extra_args: extra arguments to binary invocation.\n      expected_exit_code: program's exit code.\n    \"\"\"\n    json_path = os.path.join(gtest_test_utils.GetTempDir(),\n                             gtest_prog_name + 'out.json')\n    gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)\n\n    command = (\n        [gtest_prog_path, '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path)] +\n        extra_args\n    )\n    p = gtest_test_utils.Subprocess(command)\n    if p.terminated_by_signal:\n      self.assert_(False,\n                   '%s was killed by signal %d' % (gtest_prog_name, p.signal))\n    else:\n      self.assert_(p.exited)\n      self.assertEquals(expected_exit_code, p.exit_code,\n                        \"'%s' exited with code %s, which doesn't match \"\n                        'the expected exit code %s.'\n                        % (command, p.exit_code, expected_exit_code))\n    with open(json_path) as f:\n      actual = json.load(f)\n    return actual\n\n  def _TestJsonOutput(self, gtest_prog_name, expected,\n                      expected_exit_code, extra_args=None):\n    \"\"\"Checks the JSON output generated by the Google Test binary.\n\n    Asserts that the JSON document generated by running the program\n    gtest_prog_name matches expected_json, a string containing another\n    JSON document.  Furthermore, the program's exit code must be\n    expected_exit_code.\n\n    Args:\n      gtest_prog_name: Google Test binary name.\n      expected: expected output.\n      expected_exit_code: program's exit code.\n      extra_args: extra arguments to binary invocation.\n    \"\"\"\n\n    actual = self._GetJsonOutput(gtest_prog_name, extra_args or [],\n                                 expected_exit_code)\n    self.assertEqual(expected, gtest_json_test_utils.normalize(actual))\n\n\nif __name__ == '__main__':\n  if NO_STACKTRACE_SUPPORT_FLAG in sys.argv:\n    # unittest.main() can't handle unknown flags\n    sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)\n\n  os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-list-tests-unittest.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2006, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test for Google Test's --gtest_list_tests flag.\n\nA user can ask Google Test to list all tests by specifying the\n--gtest_list_tests flag.  This script tests such functionality\nby invoking googletest-list-tests-unittest_ (a program written with\nGoogle Test) the command line flags.\n\"\"\"\n\nimport re\nimport gtest_test_utils\n\n# Constants.\n\n# The command line flag for enabling/disabling listing all tests.\nLIST_TESTS_FLAG = 'gtest_list_tests'\n\n# Path to the googletest-list-tests-unittest_ program.\nEXE_PATH = gtest_test_utils.GetTestExecutablePath('googletest-list-tests-unittest_')\n\n# The expected output when running googletest-list-tests-unittest_ with\n# --gtest_list_tests\nEXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r\"\"\"FooDeathTest\\.\n  Test1\nFoo\\.\n  Bar1\n  Bar2\n  DISABLED_Bar3\nAbc\\.\n  Xyz\n  Def\nFooBar\\.\n  Baz\nFooTest\\.\n  Test1\n  DISABLED_Test2\n  Test3\nTypedTest/0\\.  # TypeParam = (VeryLo{245}|class VeryLo{239})\\.\\.\\.\n  TestA\n  TestB\nTypedTest/1\\.  # TypeParam = int\\s*\\*( __ptr64)?\n  TestA\n  TestB\nTypedTest/2\\.  # TypeParam = .*MyArray<bool,\\s*42>\n  TestA\n  TestB\nMy/TypeParamTest/0\\.  # TypeParam = (VeryLo{245}|class VeryLo{239})\\.\\.\\.\n  TestA\n  TestB\nMy/TypeParamTest/1\\.  # TypeParam = int\\s*\\*( __ptr64)?\n  TestA\n  TestB\nMy/TypeParamTest/2\\.  # TypeParam = .*MyArray<bool,\\s*42>\n  TestA\n  TestB\nMyInstantiation/ValueParamTest\\.\n  TestA/0  # GetParam\\(\\) = one line\n  TestA/1  # GetParam\\(\\) = two\\\\nlines\n  TestA/2  # GetParam\\(\\) = a very\\\\nlo{241}\\.\\.\\.\n  TestB/0  # GetParam\\(\\) = one line\n  TestB/1  # GetParam\\(\\) = two\\\\nlines\n  TestB/2  # GetParam\\(\\) = a very\\\\nlo{241}\\.\\.\\.\n\"\"\")\n\n# The expected output when running googletest-list-tests-unittest_ with\n# --gtest_list_tests and --gtest_filter=Foo*.\nEXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r\"\"\"FooDeathTest\\.\n  Test1\nFoo\\.\n  Bar1\n  Bar2\n  DISABLED_Bar3\nFooBar\\.\n  Baz\nFooTest\\.\n  Test1\n  DISABLED_Test2\n  Test3\n\"\"\")\n\n# Utilities.\n\n\ndef Run(args):\n  \"\"\"Runs googletest-list-tests-unittest_ and returns the list of tests printed.\"\"\"\n\n  return gtest_test_utils.Subprocess([EXE_PATH] + args,\n                                     capture_stderr=False).output\n\n\n# The unit test.\n\n\nclass GTestListTestsUnitTest(gtest_test_utils.TestCase):\n  \"\"\"Tests using the --gtest_list_tests flag to list all tests.\"\"\"\n\n  def RunAndVerify(self, flag_value, expected_output_re, other_flag):\n    \"\"\"Runs googletest-list-tests-unittest_ and verifies that it prints\n    the correct tests.\n\n    Args:\n      flag_value:         value of the --gtest_list_tests flag;\n                          None if the flag should not be present.\n      expected_output_re: regular expression that matches the expected\n                          output after running command;\n      other_flag:         a different flag to be passed to command\n                          along with gtest_list_tests;\n                          None if the flag should not be present.\n    \"\"\"\n\n    if flag_value is None:\n      flag = ''\n      flag_expression = 'not set'\n    elif flag_value == '0':\n      flag = '--%s=0' % LIST_TESTS_FLAG\n      flag_expression = '0'\n    else:\n      flag = '--%s' % LIST_TESTS_FLAG\n      flag_expression = '1'\n\n    args = [flag]\n\n    if other_flag is not None:\n      args += [other_flag]\n\n    output = Run(args)\n\n    if expected_output_re:\n      self.assert_(\n          expected_output_re.match(output),\n          ('when %s is %s, the output of \"%s\" is \"%s\",\\n'\n           'which does not match regex \"%s\"' %\n           (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output,\n            expected_output_re.pattern)))\n    else:\n      self.assert_(\n          not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),\n          ('when %s is %s, the output of \"%s\" is \"%s\"'%\n           (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)))\n\n  def testDefaultBehavior(self):\n    \"\"\"Tests the behavior of the default mode.\"\"\"\n\n    self.RunAndVerify(flag_value=None,\n                      expected_output_re=None,\n                      other_flag=None)\n\n  def testFlag(self):\n    \"\"\"Tests using the --gtest_list_tests flag.\"\"\"\n\n    self.RunAndVerify(flag_value='0',\n                      expected_output_re=None,\n                      other_flag=None)\n    self.RunAndVerify(flag_value='1',\n                      expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,\n                      other_flag=None)\n\n  def testOverrideNonFilterFlags(self):\n    \"\"\"Tests that --gtest_list_tests overrides the non-filter flags.\"\"\"\n\n    self.RunAndVerify(flag_value='1',\n                      expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,\n                      other_flag='--gtest_break_on_failure')\n\n  def testWithFilterFlags(self):\n    \"\"\"Tests that --gtest_list_tests takes into account the\n    --gtest_filter flag.\"\"\"\n\n    self.RunAndVerify(flag_value='1',\n                      expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,\n                      other_flag='--gtest_filter=Foo*')\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-list-tests-unittest_.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Unit test for Google Test's --gtest_list_tests flag.\n//\n// A user can ask Google Test to list all tests that will run\n// so that when using a filter, a user will know what\n// tests to look for. The tests will not be run after listing.\n//\n// This program will be invoked from a Python unit test.\n// Don't run it directly.\n\n#include \"gtest/gtest.h\"\n\n// Several different test cases and tests that will be listed.\nTEST(Foo, Bar1) {\n}\n\nTEST(Foo, Bar2) {\n}\n\nTEST(Foo, DISABLED_Bar3) {\n}\n\nTEST(Abc, Xyz) {\n}\n\nTEST(Abc, Def) {\n}\n\nTEST(FooBar, Baz) {\n}\n\nclass FooTest : public testing::Test {\n};\n\nTEST_F(FooTest, Test1) {\n}\n\nTEST_F(FooTest, DISABLED_Test2) {\n}\n\nTEST_F(FooTest, Test3) {\n}\n\nTEST(FooDeathTest, Test1) {\n}\n\n// A group of value-parameterized tests.\n\nclass MyType {\n public:\n  explicit MyType(const std::string& a_value) : value_(a_value) {}\n\n  const std::string& value() const { return value_; }\n\n private:\n  std::string value_;\n};\n\n// Teaches Google Test how to print a MyType.\nvoid PrintTo(const MyType& x, std::ostream* os) {\n  *os << x.value();\n}\n\nclass ValueParamTest : public testing::TestWithParam<MyType> {\n};\n\nTEST_P(ValueParamTest, TestA) {\n}\n\nTEST_P(ValueParamTest, TestB) {\n}\n\nINSTANTIATE_TEST_SUITE_P(\n    MyInstantiation, ValueParamTest,\n    testing::Values(MyType(\"one line\"),\n                    MyType(\"two\\nlines\"),\n                    MyType(\"a very\\nloooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong line\")));  // NOLINT\n\n// A group of typed tests.\n\n// A deliberately long type name for testing the line-truncating\n// behavior when printing a type parameter.\nclass VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName {  // NOLINT\n};\n\ntemplate <typename T>\nclass TypedTest : public testing::Test {\n};\n\ntemplate <typename T, int kSize>\nclass MyArray {\n};\n\ntypedef testing::Types<VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName,  // NOLINT\n                       int*, MyArray<bool, 42> > MyTypes;\n\nTYPED_TEST_SUITE(TypedTest, MyTypes);\n\nTYPED_TEST(TypedTest, TestA) {\n}\n\nTYPED_TEST(TypedTest, TestB) {\n}\n\n// A group of type-parameterized tests.\n\ntemplate <typename T>\nclass TypeParamTest : public testing::Test {\n};\n\nTYPED_TEST_SUITE_P(TypeParamTest);\n\nTYPED_TEST_P(TypeParamTest, TestA) {\n}\n\nTYPED_TEST_P(TypeParamTest, TestB) {\n}\n\nREGISTER_TYPED_TEST_SUITE_P(TypeParamTest, TestA, TestB);\n\nINSTANTIATE_TYPED_TEST_SUITE_P(My, TypeParamTest, MyTypes);\n\nint main(int argc, char **argv) {\n  ::testing::InitGoogleTest(&argc, argv);\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-listener-test.cc",
    "content": "// Copyright 2009 Google Inc. All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This file verifies Google Test event listeners receive events at the\n// right times.\n\n#include <vector>\n\n#include \"gtest/gtest.h\"\n#include \"gtest/internal/custom/gtest.h\"\n\nusing ::testing::AddGlobalTestEnvironment;\nusing ::testing::Environment;\nusing ::testing::InitGoogleTest;\nusing ::testing::Test;\nusing ::testing::TestSuite;\nusing ::testing::TestEventListener;\nusing ::testing::TestInfo;\nusing ::testing::TestPartResult;\nusing ::testing::UnitTest;\n\n// Used by tests to register their events.\nstd::vector<std::string>* g_events = nullptr;\n\nnamespace testing {\nnamespace internal {\n\nclass EventRecordingListener : public TestEventListener {\n public:\n  explicit EventRecordingListener(const char* name) : name_(name) {}\n\n protected:\n  void OnTestProgramStart(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestProgramStart\"));\n  }\n\n  void OnTestIterationStart(const UnitTest& /*unit_test*/,\n                            int iteration) override {\n    Message message;\n    message << GetFullMethodName(\"OnTestIterationStart\")\n            << \"(\" << iteration << \")\";\n    g_events->push_back(message.GetString());\n  }\n\n  void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsSetUpStart\"));\n  }\n\n  void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsSetUpEnd\"));\n  }\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseStart(const TestCase& /*test_case*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestCaseStart\"));\n  }\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  void OnTestStart(const TestInfo& /*test_info*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestStart\"));\n  }\n\n  void OnTestPartResult(const TestPartResult& /*test_part_result*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestPartResult\"));\n  }\n\n  void OnTestEnd(const TestInfo& /*test_info*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestEnd\"));\n  }\n\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n  void OnTestCaseEnd(const TestCase& /*test_case*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestCaseEnd\"));\n  }\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsTearDownStart\"));\n  }\n\n  void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsTearDownEnd\"));\n  }\n\n  void OnTestIterationEnd(const UnitTest& /*unit_test*/,\n                          int iteration) override {\n    Message message;\n    message << GetFullMethodName(\"OnTestIterationEnd\")\n            << \"(\"  << iteration << \")\";\n    g_events->push_back(message.GetString());\n  }\n\n  void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestProgramEnd\"));\n  }\n\n private:\n  std::string GetFullMethodName(const char* name) {\n    return name_ + \".\" + name;\n  }\n\n  std::string name_;\n};\n\n// This listener is using OnTestSuiteStart, OnTestSuiteEnd API\nclass EventRecordingListener2 : public TestEventListener {\n public:\n  explicit EventRecordingListener2(const char* name) : name_(name) {}\n\n protected:\n  void OnTestProgramStart(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestProgramStart\"));\n  }\n\n  void OnTestIterationStart(const UnitTest& /*unit_test*/,\n                            int iteration) override {\n    Message message;\n    message << GetFullMethodName(\"OnTestIterationStart\") << \"(\" << iteration\n            << \")\";\n    g_events->push_back(message.GetString());\n  }\n\n  void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsSetUpStart\"));\n  }\n\n  void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsSetUpEnd\"));\n  }\n\n  void OnTestSuiteStart(const TestSuite& /*test_suite*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestSuiteStart\"));\n  }\n\n  void OnTestStart(const TestInfo& /*test_info*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestStart\"));\n  }\n\n  void OnTestPartResult(const TestPartResult& /*test_part_result*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestPartResult\"));\n  }\n\n  void OnTestEnd(const TestInfo& /*test_info*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestEnd\"));\n  }\n\n  void OnTestSuiteEnd(const TestSuite& /*test_suite*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestSuiteEnd\"));\n  }\n\n  void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsTearDownStart\"));\n  }\n\n  void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnEnvironmentsTearDownEnd\"));\n  }\n\n  void OnTestIterationEnd(const UnitTest& /*unit_test*/,\n                          int iteration) override {\n    Message message;\n    message << GetFullMethodName(\"OnTestIterationEnd\") << \"(\" << iteration\n            << \")\";\n    g_events->push_back(message.GetString());\n  }\n\n  void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {\n    g_events->push_back(GetFullMethodName(\"OnTestProgramEnd\"));\n  }\n\n private:\n  std::string GetFullMethodName(const char* name) { return name_ + \".\" + name; }\n\n  std::string name_;\n};\n\nclass EnvironmentInvocationCatcher : public Environment {\n protected:\n  void SetUp() override { g_events->push_back(\"Environment::SetUp\"); }\n\n  void TearDown() override { g_events->push_back(\"Environment::TearDown\"); }\n};\n\nclass ListenerTest : public Test {\n protected:\n  static void SetUpTestSuite() {\n    g_events->push_back(\"ListenerTest::SetUpTestSuite\");\n  }\n\n  static void TearDownTestSuite() {\n    g_events->push_back(\"ListenerTest::TearDownTestSuite\");\n  }\n\n  void SetUp() override { g_events->push_back(\"ListenerTest::SetUp\"); }\n\n  void TearDown() override { g_events->push_back(\"ListenerTest::TearDown\"); }\n};\n\nTEST_F(ListenerTest, DoesFoo) {\n  // Test execution order within a test case is not guaranteed so we are not\n  // recording the test name.\n  g_events->push_back(\"ListenerTest::* Test Body\");\n  SUCCEED();  // Triggers OnTestPartResult.\n}\n\nTEST_F(ListenerTest, DoesBar) {\n  g_events->push_back(\"ListenerTest::* Test Body\");\n  SUCCEED();  // Triggers OnTestPartResult.\n}\n\n}  // namespace internal\n\n}  // namespace testing\n\nusing ::testing::internal::EnvironmentInvocationCatcher;\nusing ::testing::internal::EventRecordingListener;\nusing ::testing::internal::EventRecordingListener2;\n\nvoid VerifyResults(const std::vector<std::string>& data,\n                   const char* const* expected_data,\n                   size_t expected_data_size) {\n  const size_t actual_size = data.size();\n  // If the following assertion fails, a new entry will be appended to\n  // data.  Hence we save data.size() first.\n  EXPECT_EQ(expected_data_size, actual_size);\n\n  // Compares the common prefix.\n  const size_t shorter_size = expected_data_size <= actual_size ?\n      expected_data_size : actual_size;\n  size_t i = 0;\n  for (; i < shorter_size; ++i) {\n    ASSERT_STREQ(expected_data[i], data[i].c_str())\n        << \"at position \" << i;\n  }\n\n  // Prints extra elements in the actual data.\n  for (; i < actual_size; ++i) {\n    printf(\"  Actual event #%lu: %s\\n\",\n        static_cast<unsigned long>(i), data[i].c_str());\n  }\n}\n\nint main(int argc, char **argv) {\n  std::vector<std::string> events;\n  g_events = &events;\n  InitGoogleTest(&argc, argv);\n\n  UnitTest::GetInstance()->listeners().Append(\n      new EventRecordingListener(\"1st\"));\n  UnitTest::GetInstance()->listeners().Append(\n      new EventRecordingListener(\"2nd\"));\n  UnitTest::GetInstance()->listeners().Append(\n      new EventRecordingListener2(\"3rd\"));\n\n  AddGlobalTestEnvironment(new EnvironmentInvocationCatcher);\n\n  GTEST_CHECK_(events.size() == 0)\n      << \"AddGlobalTestEnvironment should not generate any events itself.\";\n\n  ::testing::GTEST_FLAG(repeat) = 2;\n  int ret_val = RUN_ALL_TESTS();\n\n#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  // The deprecated OnTestSuiteStart/OnTestCaseStart events are included\n  const char* const expected_events[] = {\"1st.OnTestProgramStart\",\n                                         \"2nd.OnTestProgramStart\",\n                                         \"3rd.OnTestProgramStart\",\n                                         \"1st.OnTestIterationStart(0)\",\n                                         \"2nd.OnTestIterationStart(0)\",\n                                         \"3rd.OnTestIterationStart(0)\",\n                                         \"1st.OnEnvironmentsSetUpStart\",\n                                         \"2nd.OnEnvironmentsSetUpStart\",\n                                         \"3rd.OnEnvironmentsSetUpStart\",\n                                         \"Environment::SetUp\",\n                                         \"3rd.OnEnvironmentsSetUpEnd\",\n                                         \"2nd.OnEnvironmentsSetUpEnd\",\n                                         \"1st.OnEnvironmentsSetUpEnd\",\n                                         \"3rd.OnTestSuiteStart\",\n                                         \"1st.OnTestCaseStart\",\n                                         \"2nd.OnTestCaseStart\",\n                                         \"ListenerTest::SetUpTestSuite\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"ListenerTest::TearDownTestSuite\",\n                                         \"3rd.OnTestSuiteEnd\",\n                                         \"2nd.OnTestCaseEnd\",\n                                         \"1st.OnTestCaseEnd\",\n                                         \"1st.OnEnvironmentsTearDownStart\",\n                                         \"2nd.OnEnvironmentsTearDownStart\",\n                                         \"3rd.OnEnvironmentsTearDownStart\",\n                                         \"Environment::TearDown\",\n                                         \"3rd.OnEnvironmentsTearDownEnd\",\n                                         \"2nd.OnEnvironmentsTearDownEnd\",\n                                         \"1st.OnEnvironmentsTearDownEnd\",\n                                         \"3rd.OnTestIterationEnd(0)\",\n                                         \"2nd.OnTestIterationEnd(0)\",\n                                         \"1st.OnTestIterationEnd(0)\",\n                                         \"1st.OnTestIterationStart(1)\",\n                                         \"2nd.OnTestIterationStart(1)\",\n                                         \"3rd.OnTestIterationStart(1)\",\n                                         \"1st.OnEnvironmentsSetUpStart\",\n                                         \"2nd.OnEnvironmentsSetUpStart\",\n                                         \"3rd.OnEnvironmentsSetUpStart\",\n                                         \"Environment::SetUp\",\n                                         \"3rd.OnEnvironmentsSetUpEnd\",\n                                         \"2nd.OnEnvironmentsSetUpEnd\",\n                                         \"1st.OnEnvironmentsSetUpEnd\",\n                                         \"3rd.OnTestSuiteStart\",\n                                         \"1st.OnTestCaseStart\",\n                                         \"2nd.OnTestCaseStart\",\n                                         \"ListenerTest::SetUpTestSuite\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"ListenerTest::TearDownTestSuite\",\n                                         \"3rd.OnTestSuiteEnd\",\n                                         \"2nd.OnTestCaseEnd\",\n                                         \"1st.OnTestCaseEnd\",\n                                         \"1st.OnEnvironmentsTearDownStart\",\n                                         \"2nd.OnEnvironmentsTearDownStart\",\n                                         \"3rd.OnEnvironmentsTearDownStart\",\n                                         \"Environment::TearDown\",\n                                         \"3rd.OnEnvironmentsTearDownEnd\",\n                                         \"2nd.OnEnvironmentsTearDownEnd\",\n                                         \"1st.OnEnvironmentsTearDownEnd\",\n                                         \"3rd.OnTestIterationEnd(1)\",\n                                         \"2nd.OnTestIterationEnd(1)\",\n                                         \"1st.OnTestIterationEnd(1)\",\n                                         \"3rd.OnTestProgramEnd\",\n                                         \"2nd.OnTestProgramEnd\",\n                                         \"1st.OnTestProgramEnd\"};\n#else\n  const char* const expected_events[] = {\"1st.OnTestProgramStart\",\n                                         \"2nd.OnTestProgramStart\",\n                                         \"3rd.OnTestProgramStart\",\n                                         \"1st.OnTestIterationStart(0)\",\n                                         \"2nd.OnTestIterationStart(0)\",\n                                         \"3rd.OnTestIterationStart(0)\",\n                                         \"1st.OnEnvironmentsSetUpStart\",\n                                         \"2nd.OnEnvironmentsSetUpStart\",\n                                         \"3rd.OnEnvironmentsSetUpStart\",\n                                         \"Environment::SetUp\",\n                                         \"3rd.OnEnvironmentsSetUpEnd\",\n                                         \"2nd.OnEnvironmentsSetUpEnd\",\n                                         \"1st.OnEnvironmentsSetUpEnd\",\n                                         \"3rd.OnTestSuiteStart\",\n                                         \"ListenerTest::SetUpTestSuite\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"ListenerTest::TearDownTestSuite\",\n                                         \"3rd.OnTestSuiteEnd\",\n                                         \"1st.OnEnvironmentsTearDownStart\",\n                                         \"2nd.OnEnvironmentsTearDownStart\",\n                                         \"3rd.OnEnvironmentsTearDownStart\",\n                                         \"Environment::TearDown\",\n                                         \"3rd.OnEnvironmentsTearDownEnd\",\n                                         \"2nd.OnEnvironmentsTearDownEnd\",\n                                         \"1st.OnEnvironmentsTearDownEnd\",\n                                         \"3rd.OnTestIterationEnd(0)\",\n                                         \"2nd.OnTestIterationEnd(0)\",\n                                         \"1st.OnTestIterationEnd(0)\",\n                                         \"1st.OnTestIterationStart(1)\",\n                                         \"2nd.OnTestIterationStart(1)\",\n                                         \"3rd.OnTestIterationStart(1)\",\n                                         \"1st.OnEnvironmentsSetUpStart\",\n                                         \"2nd.OnEnvironmentsSetUpStart\",\n                                         \"3rd.OnEnvironmentsSetUpStart\",\n                                         \"Environment::SetUp\",\n                                         \"3rd.OnEnvironmentsSetUpEnd\",\n                                         \"2nd.OnEnvironmentsSetUpEnd\",\n                                         \"1st.OnEnvironmentsSetUpEnd\",\n                                         \"3rd.OnTestSuiteStart\",\n                                         \"ListenerTest::SetUpTestSuite\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"1st.OnTestStart\",\n                                         \"2nd.OnTestStart\",\n                                         \"3rd.OnTestStart\",\n                                         \"ListenerTest::SetUp\",\n                                         \"ListenerTest::* Test Body\",\n                                         \"1st.OnTestPartResult\",\n                                         \"2nd.OnTestPartResult\",\n                                         \"3rd.OnTestPartResult\",\n                                         \"ListenerTest::TearDown\",\n                                         \"3rd.OnTestEnd\",\n                                         \"2nd.OnTestEnd\",\n                                         \"1st.OnTestEnd\",\n                                         \"ListenerTest::TearDownTestSuite\",\n                                         \"3rd.OnTestSuiteEnd\",\n                                         \"1st.OnEnvironmentsTearDownStart\",\n                                         \"2nd.OnEnvironmentsTearDownStart\",\n                                         \"3rd.OnEnvironmentsTearDownStart\",\n                                         \"Environment::TearDown\",\n                                         \"3rd.OnEnvironmentsTearDownEnd\",\n                                         \"2nd.OnEnvironmentsTearDownEnd\",\n                                         \"1st.OnEnvironmentsTearDownEnd\",\n                                         \"3rd.OnTestIterationEnd(1)\",\n                                         \"2nd.OnTestIterationEnd(1)\",\n                                         \"1st.OnTestIterationEnd(1)\",\n                                         \"3rd.OnTestProgramEnd\",\n                                         \"2nd.OnTestProgramEnd\",\n                                         \"1st.OnTestProgramEnd\"};\n#endif  // GTEST_REMOVE_LEGACY_TEST_CASEAPI_\n\n  VerifyResults(events,\n                expected_events,\n                sizeof(expected_events)/sizeof(expected_events[0]));\n\n  // We need to check manually for ad hoc test failures that happen after\n  // RUN_ALL_TESTS finishes.\n  if (UnitTest::GetInstance()->Failed())\n    ret_val = 1;\n\n  return ret_val;\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-message-test.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for the Message class.\n\n#include \"gtest/gtest-message.h\"\n\n#include \"gtest/gtest.h\"\n\nnamespace {\n\nusing ::testing::Message;\n\n// Tests the testing::Message class\n\n// Tests the default constructor.\nTEST(MessageTest, DefaultConstructor) {\n  const Message msg;\n  EXPECT_EQ(\"\", msg.GetString());\n}\n\n// Tests the copy constructor.\nTEST(MessageTest, CopyConstructor) {\n  const Message msg1(\"Hello\");\n  const Message msg2(msg1);\n  EXPECT_EQ(\"Hello\", msg2.GetString());\n}\n\n// Tests constructing a Message from a C-string.\nTEST(MessageTest, ConstructsFromCString) {\n  Message msg(\"Hello\");\n  EXPECT_EQ(\"Hello\", msg.GetString());\n}\n\n// Tests streaming a float.\nTEST(MessageTest, StreamsFloat) {\n  const std::string s = (Message() << 1.23456F << \" \" << 2.34567F).GetString();\n  // Both numbers should be printed with enough precision.\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"1.234560\", s.c_str());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \" 2.345669\", s.c_str());\n}\n\n// Tests streaming a double.\nTEST(MessageTest, StreamsDouble) {\n  const std::string s = (Message() << 1260570880.4555497 << \" \"\n                                  << 1260572265.1954534).GetString();\n  // Both numbers should be printed with enough precision.\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"1260570880.45\", s.c_str());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \" 1260572265.19\", s.c_str());\n}\n\n// Tests streaming a non-char pointer.\nTEST(MessageTest, StreamsPointer) {\n  int n = 0;\n  int* p = &n;\n  EXPECT_NE(\"(null)\", (Message() << p).GetString());\n}\n\n// Tests streaming a NULL non-char pointer.\nTEST(MessageTest, StreamsNullPointer) {\n  int* p = nullptr;\n  EXPECT_EQ(\"(null)\", (Message() << p).GetString());\n}\n\n// Tests streaming a C string.\nTEST(MessageTest, StreamsCString) {\n  EXPECT_EQ(\"Foo\", (Message() << \"Foo\").GetString());\n}\n\n// Tests streaming a NULL C string.\nTEST(MessageTest, StreamsNullCString) {\n  char* p = nullptr;\n  EXPECT_EQ(\"(null)\", (Message() << p).GetString());\n}\n\n// Tests streaming std::string.\nTEST(MessageTest, StreamsString) {\n  const ::std::string str(\"Hello\");\n  EXPECT_EQ(\"Hello\", (Message() << str).GetString());\n}\n\n// Tests that we can output strings containing embedded NULs.\nTEST(MessageTest, StreamsStringWithEmbeddedNUL) {\n  const char char_array_with_nul[] =\n      \"Here's a NUL\\0 and some more string\";\n  const ::std::string string_with_nul(char_array_with_nul,\n                                      sizeof(char_array_with_nul) - 1);\n  EXPECT_EQ(\"Here's a NUL\\\\0 and some more string\",\n            (Message() << string_with_nul).GetString());\n}\n\n// Tests streaming a NUL char.\nTEST(MessageTest, StreamsNULChar) {\n  EXPECT_EQ(\"\\\\0\", (Message() << '\\0').GetString());\n}\n\n// Tests streaming int.\nTEST(MessageTest, StreamsInt) {\n  EXPECT_EQ(\"123\", (Message() << 123).GetString());\n}\n\n// Tests that basic IO manipulators (endl, ends, and flush) can be\n// streamed to Message.\nTEST(MessageTest, StreamsBasicIoManip) {\n  EXPECT_EQ(\"Line 1.\\nA NUL char \\\\0 in line 2.\",\n               (Message() << \"Line 1.\" << std::endl\n                         << \"A NUL char \" << std::ends << std::flush\n                         << \" in line 2.\").GetString());\n}\n\n// Tests Message::GetString()\nTEST(MessageTest, GetString) {\n  Message msg;\n  msg << 1 << \" lamb\";\n  EXPECT_EQ(\"1 lamb\", msg.GetString());\n}\n\n// Tests streaming a Message object to an ostream.\nTEST(MessageTest, StreamsToOStream) {\n  Message msg(\"Hello\");\n  ::std::stringstream ss;\n  ss << msg;\n  EXPECT_EQ(\"Hello\", testing::internal::StringStreamToString(&ss));\n}\n\n// Tests that a Message object doesn't take up too much stack space.\nTEST(MessageTest, DoesNotTakeUpMuchStackSpace) {\n  EXPECT_LE(sizeof(Message), 16U);\n}\n\n}  // namespace\n"
  },
  {
    "path": "test/googletest/test/googletest-options-test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Google Test UnitTestOptions tests\n//\n// This file tests classes and functions used internally by\n// Google Test.  They are subject to change without notice.\n//\n// This file is #included from gtest.cc, to avoid changing build or\n// make-files on Windows and other platforms. Do not #include this file\n// anywhere else!\n\n#include \"gtest/gtest.h\"\n\n#if GTEST_OS_WINDOWS_MOBILE\n# include <windows.h>\n#elif GTEST_OS_WINDOWS\n# include <direct.h>\n#endif  // GTEST_OS_WINDOWS_MOBILE\n\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\nnamespace internal {\nnamespace {\n\n// Turns the given relative path into an absolute path.\nFilePath GetAbsolutePathOf(const FilePath& relative_path) {\n  return FilePath::ConcatPaths(FilePath::GetCurrentDir(), relative_path);\n}\n\n// Testing UnitTestOptions::GetOutputFormat/GetOutputFile.\n\nTEST(XmlOutputTest, GetOutputFormatDefault) {\n  GTEST_FLAG(output) = \"\";\n  EXPECT_STREQ(\"\", UnitTestOptions::GetOutputFormat().c_str());\n}\n\nTEST(XmlOutputTest, GetOutputFormat) {\n  GTEST_FLAG(output) = \"xml:filename\";\n  EXPECT_STREQ(\"xml\", UnitTestOptions::GetOutputFormat().c_str());\n}\n\nTEST(XmlOutputTest, GetOutputFileDefault) {\n  GTEST_FLAG(output) = \"\";\n  EXPECT_EQ(GetAbsolutePathOf(FilePath(\"test_detail.xml\")).string(),\n            UnitTestOptions::GetAbsolutePathToOutputFile());\n}\n\nTEST(XmlOutputTest, GetOutputFileSingleFile) {\n  GTEST_FLAG(output) = \"xml:filename.abc\";\n  EXPECT_EQ(GetAbsolutePathOf(FilePath(\"filename.abc\")).string(),\n            UnitTestOptions::GetAbsolutePathToOutputFile());\n}\n\nTEST(XmlOutputTest, GetOutputFileFromDirectoryPath) {\n  GTEST_FLAG(output) = \"xml:path\" GTEST_PATH_SEP_;\n  const std::string expected_output_file =\n      GetAbsolutePathOf(\n          FilePath(std::string(\"path\") + GTEST_PATH_SEP_ +\n                   GetCurrentExecutableName().string() + \".xml\")).string();\n  const std::string& output_file =\n      UnitTestOptions::GetAbsolutePathToOutputFile();\n#if GTEST_OS_WINDOWS\n  EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());\n#else\n  EXPECT_EQ(expected_output_file, output_file.c_str());\n#endif\n}\n\nTEST(OutputFileHelpersTest, GetCurrentExecutableName) {\n  const std::string exe_str = GetCurrentExecutableName().string();\n#if GTEST_OS_WINDOWS\n  const bool success =\n      _strcmpi(\"googletest-options-test\", exe_str.c_str()) == 0 ||\n      _strcmpi(\"gtest-options-ex_test\", exe_str.c_str()) == 0 ||\n      _strcmpi(\"gtest_all_test\", exe_str.c_str()) == 0 ||\n      _strcmpi(\"gtest_dll_test\", exe_str.c_str()) == 0;\n#elif GTEST_OS_OS2\n  const bool success =\n      strcasecmp(\"googletest-options-test\", exe_str.c_str()) == 0 ||\n      strcasecmp(\"gtest-options-ex_test\", exe_str.c_str()) == 0 ||\n      strcasecmp(\"gtest_all_test\", exe_str.c_str()) == 0 ||\n      strcasecmp(\"gtest_dll_test\", exe_str.c_str()) == 0;\n#elif GTEST_OS_FUCHSIA\n  const bool success = exe_str == \"app\";\n#else\n  const bool success =\n      exe_str == \"googletest-options-test\" ||\n      exe_str == \"gtest_all_test\" ||\n      exe_str == \"lt-gtest_all_test\" ||\n      exe_str == \"gtest_dll_test\";\n#endif  // GTEST_OS_WINDOWS\n  if (!success)\n    FAIL() << \"GetCurrentExecutableName() returns \" << exe_str;\n}\n\n#if !GTEST_OS_FUCHSIA\n\nclass XmlOutputChangeDirTest : public Test {\n protected:\n  void SetUp() override {\n    original_working_dir_ = FilePath::GetCurrentDir();\n    posix::ChDir(\"..\");\n    // This will make the test fail if run from the root directory.\n    EXPECT_NE(original_working_dir_.string(),\n              FilePath::GetCurrentDir().string());\n  }\n\n  void TearDown() override {\n    posix::ChDir(original_working_dir_.string().c_str());\n  }\n\n  FilePath original_working_dir_;\n};\n\nTEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefault) {\n  GTEST_FLAG(output) = \"\";\n  EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,\n                                  FilePath(\"test_detail.xml\")).string(),\n            UnitTestOptions::GetAbsolutePathToOutputFile());\n}\n\nTEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefaultXML) {\n  GTEST_FLAG(output) = \"xml\";\n  EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,\n                                  FilePath(\"test_detail.xml\")).string(),\n            UnitTestOptions::GetAbsolutePathToOutputFile());\n}\n\nTEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativeFile) {\n  GTEST_FLAG(output) = \"xml:filename.abc\";\n  EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_,\n                                  FilePath(\"filename.abc\")).string(),\n            UnitTestOptions::GetAbsolutePathToOutputFile());\n}\n\nTEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativePath) {\n  GTEST_FLAG(output) = \"xml:path\" GTEST_PATH_SEP_;\n  const std::string expected_output_file =\n      FilePath::ConcatPaths(\n          original_working_dir_,\n          FilePath(std::string(\"path\") + GTEST_PATH_SEP_ +\n                   GetCurrentExecutableName().string() + \".xml\")).string();\n  const std::string& output_file =\n      UnitTestOptions::GetAbsolutePathToOutputFile();\n#if GTEST_OS_WINDOWS\n  EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());\n#else\n  EXPECT_EQ(expected_output_file, output_file.c_str());\n#endif\n}\n\nTEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsoluteFile) {\n#if GTEST_OS_WINDOWS\n  GTEST_FLAG(output) = \"xml:c:\\\\tmp\\\\filename.abc\";\n  EXPECT_EQ(FilePath(\"c:\\\\tmp\\\\filename.abc\").string(),\n            UnitTestOptions::GetAbsolutePathToOutputFile());\n#else\n  GTEST_FLAG(output) =\"xml:/tmp/filename.abc\";\n  EXPECT_EQ(FilePath(\"/tmp/filename.abc\").string(),\n            UnitTestOptions::GetAbsolutePathToOutputFile());\n#endif\n}\n\nTEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsolutePath) {\n#if GTEST_OS_WINDOWS\n  const std::string path = \"c:\\\\tmp\\\\\";\n#else\n  const std::string path = \"/tmp/\";\n#endif\n\n  GTEST_FLAG(output) = \"xml:\" + path;\n  const std::string expected_output_file =\n      path + GetCurrentExecutableName().string() + \".xml\";\n  const std::string& output_file =\n      UnitTestOptions::GetAbsolutePathToOutputFile();\n\n#if GTEST_OS_WINDOWS\n  EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str());\n#else\n  EXPECT_EQ(expected_output_file, output_file.c_str());\n#endif\n}\n\n#endif  // !GTEST_OS_FUCHSIA\n\n}  // namespace\n}  // namespace internal\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/test/googletest-output-test-golden-lin.txt",
    "content": "The non-test part of the code is expected to have 2 failures.\n\ngoogletest-output-test_.cc:#: Failure\nValue of: false\n  Actual: false\nExpected: true\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  2\n  3\nStack trace: (omitted)\n\n\u001b[0;32m[==========] \u001b[mRunning 84 tests from 39 test suites.\n\u001b[0;32m[----------] \u001b[mGlobal test environment set-up.\nFooEnvironment::SetUp() called.\nBarEnvironment::SetUp() called.\n\u001b[0;32m[----------] \u001b[m1 test from ADeathTest\n\u001b[0;32m[ RUN      ] \u001b[mADeathTest.ShouldRunFirst\n\u001b[0;32m[       OK ] \u001b[mADeathTest.ShouldRunFirst\n\u001b[0;32m[----------] \u001b[m1 test from ATypedDeathTest/0, where TypeParam = int\n\u001b[0;32m[ RUN      ] \u001b[mATypedDeathTest/0.ShouldRunFirst\n\u001b[0;32m[       OK ] \u001b[mATypedDeathTest/0.ShouldRunFirst\n\u001b[0;32m[----------] \u001b[m1 test from ATypedDeathTest/1, where TypeParam = double\n\u001b[0;32m[ RUN      ] \u001b[mATypedDeathTest/1.ShouldRunFirst\n\u001b[0;32m[       OK ] \u001b[mATypedDeathTest/1.ShouldRunFirst\n\u001b[0;32m[----------] \u001b[m1 test from My/ATypeParamDeathTest/0, where TypeParam = int\n\u001b[0;32m[ RUN      ] \u001b[mMy/ATypeParamDeathTest/0.ShouldRunFirst\n\u001b[0;32m[       OK ] \u001b[mMy/ATypeParamDeathTest/0.ShouldRunFirst\n\u001b[0;32m[----------] \u001b[m1 test from My/ATypeParamDeathTest/1, where TypeParam = double\n\u001b[0;32m[ RUN      ] \u001b[mMy/ATypeParamDeathTest/1.ShouldRunFirst\n\u001b[0;32m[       OK ] \u001b[mMy/ATypeParamDeathTest/1.ShouldRunFirst\n\u001b[0;32m[----------] \u001b[m2 tests from PassingTest\n\u001b[0;32m[ RUN      ] \u001b[mPassingTest.PassingTest1\n\u001b[0;32m[       OK ] \u001b[mPassingTest.PassingTest1\n\u001b[0;32m[ RUN      ] \u001b[mPassingTest.PassingTest2\n\u001b[0;32m[       OK ] \u001b[mPassingTest.PassingTest2\n\u001b[0;32m[----------] \u001b[m2 tests from NonfatalFailureTest\n\u001b[0;32m[ RUN      ] \u001b[mNonfatalFailureTest.EscapesStringOperands\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  kGoldenString\n    Which is: \"\\\"Line\"\n  actual\n    Which is: \"actual \\\"string\\\"\"\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  golden\n    Which is: \"\\\"Line\"\n  actual\n    Which is: \"actual \\\"string\\\"\"\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mNonfatalFailureTest.EscapesStringOperands\n\u001b[0;32m[ RUN      ] \u001b[mNonfatalFailureTest.DiffForLongStrings\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  golden_str\n    Which is: \"\\\"Line\\0 1\\\"\\nLine 2\"\n  \"Line 2\"\nWith diff:\n@@ -1,2 @@\n-\\\"Line\\0 1\\\"\n Line 2\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mNonfatalFailureTest.DiffForLongStrings\n\u001b[0;32m[----------] \u001b[m3 tests from FatalFailureTest\n\u001b[0;32m[ RUN      ] \u001b[mFatalFailureTest.FatalFailureInSubroutine\n(expecting a failure that x should be 1)\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  x\n    Which is: 2\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureTest.FatalFailureInSubroutine\n\u001b[0;32m[ RUN      ] \u001b[mFatalFailureTest.FatalFailureInNestedSubroutine\n(expecting a failure that x should be 1)\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  x\n    Which is: 2\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureTest.FatalFailureInNestedSubroutine\n\u001b[0;32m[ RUN      ] \u001b[mFatalFailureTest.NonfatalFailureInSubroutine\n(expecting a failure on false)\ngoogletest-output-test_.cc:#: Failure\nValue of: false\n  Actual: false\nExpected: true\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureTest.NonfatalFailureInSubroutine\n\u001b[0;32m[----------] \u001b[m1 test from LoggingTest\n\u001b[0;32m[ RUN      ] \u001b[mLoggingTest.InterleavingLoggingAndAssertions\n(expecting 2 failures on (3) >= (a[i]))\ni == 0\ni == 1\ngoogletest-output-test_.cc:#: Failure\nExpected: (3) >= (a[i]), actual: 3 vs 9\nStack trace: (omitted)\n\ni == 2\ni == 3\ngoogletest-output-test_.cc:#: Failure\nExpected: (3) >= (a[i]), actual: 3 vs 6\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mLoggingTest.InterleavingLoggingAndAssertions\n\u001b[0;32m[----------] \u001b[m7 tests from SCOPED_TRACETest\n\u001b[0;32m[ RUN      ] \u001b[mSCOPED_TRACETest.AcceptedValues\ngoogletest-output-test_.cc:#: Failure\nFailed\nJust checking that all these values work fine.\nGoogle Test trace:\ngoogletest-output-test_.cc:#: (null)\ngoogletest-output-test_.cc:#: 1337\ngoogletest-output-test_.cc:#: std::string\ngoogletest-output-test_.cc:#: literal string\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.AcceptedValues\n\u001b[0;32m[ RUN      ] \u001b[mSCOPED_TRACETest.ObeysScopes\n(expected to fail)\ngoogletest-output-test_.cc:#: Failure\nFailed\nThis failure is expected, and shouldn't have a trace.\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nThis failure is expected, and should have a trace.\nGoogle Test trace:\ngoogletest-output-test_.cc:#: Expected trace\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nThis failure is expected, and shouldn't have a trace.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.ObeysScopes\n\u001b[0;32m[ RUN      ] \u001b[mSCOPED_TRACETest.WorksInLoop\n(expected to fail)\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  2\n  n\n    Which is: 1\nGoogle Test trace:\ngoogletest-output-test_.cc:#: i = 1\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  n\n    Which is: 2\nGoogle Test trace:\ngoogletest-output-test_.cc:#: i = 2\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.WorksInLoop\n\u001b[0;32m[ RUN      ] \u001b[mSCOPED_TRACETest.WorksInSubroutine\n(expected to fail)\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  2\n  n\n    Which is: 1\nGoogle Test trace:\ngoogletest-output-test_.cc:#: n = 1\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  n\n    Which is: 2\nGoogle Test trace:\ngoogletest-output-test_.cc:#: n = 2\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.WorksInSubroutine\n\u001b[0;32m[ RUN      ] \u001b[mSCOPED_TRACETest.CanBeNested\n(expected to fail)\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  n\n    Which is: 2\nGoogle Test trace:\ngoogletest-output-test_.cc:#: n = 2\ngoogletest-output-test_.cc:#: \nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.CanBeNested\n\u001b[0;32m[ RUN      ] \u001b[mSCOPED_TRACETest.CanBeRepeated\n(expected to fail)\ngoogletest-output-test_.cc:#: Failure\nFailed\nThis failure is expected, and should contain trace point A.\nGoogle Test trace:\ngoogletest-output-test_.cc:#: A\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nThis failure is expected, and should contain trace point A and B.\nGoogle Test trace:\ngoogletest-output-test_.cc:#: B\ngoogletest-output-test_.cc:#: A\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nThis failure is expected, and should contain trace point A, B, and C.\nGoogle Test trace:\ngoogletest-output-test_.cc:#: C\ngoogletest-output-test_.cc:#: B\ngoogletest-output-test_.cc:#: A\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nThis failure is expected, and should contain trace point A, B, and D.\nGoogle Test trace:\ngoogletest-output-test_.cc:#: D\ngoogletest-output-test_.cc:#: B\ngoogletest-output-test_.cc:#: A\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.CanBeRepeated\n\u001b[0;32m[ RUN      ] \u001b[mSCOPED_TRACETest.WorksConcurrently\n(expecting 6 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #1 (in thread B, only trace B alive).\nGoogle Test trace:\ngoogletest-output-test_.cc:#: Trace B\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #2 (in thread A, trace A & B both alive).\nGoogle Test trace:\ngoogletest-output-test_.cc:#: Trace A\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #3 (in thread B, trace A & B both alive).\nGoogle Test trace:\ngoogletest-output-test_.cc:#: Trace B\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #4 (in thread B, only trace A alive).\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #5 (in thread A, only trace A alive).\nGoogle Test trace:\ngoogletest-output-test_.cc:#: Trace A\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #6 (in thread A, no trace alive).\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.WorksConcurrently\n\u001b[0;32m[----------] \u001b[m1 test from ScopedTraceTest\n\u001b[0;32m[ RUN      ] \u001b[mScopedTraceTest.WithExplicitFileAndLine\ngoogletest-output-test_.cc:#: Failure\nFailed\nCheck that the trace is attached to a particular location.\nGoogle Test trace:\nexplicit_file.cc:123: expected trace message\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mScopedTraceTest.WithExplicitFileAndLine\n\u001b[0;32m[----------] \u001b[m1 test from NonFatalFailureInFixtureConstructorTest\n\u001b[0;32m[ RUN      ] \u001b[mNonFatalFailureInFixtureConstructorTest.FailureInConstructor\n(expecting 5 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #1, in the test fixture c'tor.\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #2, in SetUp().\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #3, in the test body.\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #4, in TearDown.\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #5, in the test fixture d'tor.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mNonFatalFailureInFixtureConstructorTest.FailureInConstructor\n\u001b[0;32m[----------] \u001b[m1 test from FatalFailureInFixtureConstructorTest\n\u001b[0;32m[ RUN      ] \u001b[mFatalFailureInFixtureConstructorTest.FailureInConstructor\n(expecting 2 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #1, in the test fixture c'tor.\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #2, in the test fixture d'tor.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureInFixtureConstructorTest.FailureInConstructor\n\u001b[0;32m[----------] \u001b[m1 test from NonFatalFailureInSetUpTest\n\u001b[0;32m[ RUN      ] \u001b[mNonFatalFailureInSetUpTest.FailureInSetUp\n(expecting 4 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #1, in SetUp().\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #2, in the test function.\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #3, in TearDown().\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #4, in the test fixture d'tor.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mNonFatalFailureInSetUpTest.FailureInSetUp\n\u001b[0;32m[----------] \u001b[m1 test from FatalFailureInSetUpTest\n\u001b[0;32m[ RUN      ] \u001b[mFatalFailureInSetUpTest.FailureInSetUp\n(expecting 3 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #1, in SetUp().\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #2, in TearDown().\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected failure #3, in the test fixture d'tor.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureInSetUpTest.FailureInSetUp\n\u001b[0;32m[----------] \u001b[m1 test from AddFailureAtTest\n\u001b[0;32m[ RUN      ] \u001b[mAddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber\nfoo.cc:42: Failure\nFailed\nExpected nonfatal failure in foo.cc\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mAddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber\n\u001b[0;32m[----------] \u001b[m1 test from GtestFailAtTest\n\u001b[0;32m[ RUN      ] \u001b[mGtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber\nfoo.cc:42: Failure\nFailed\nExpected fatal failure in foo.cc\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mGtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber\n\u001b[0;32m[----------] \u001b[m4 tests from MixedUpTestSuiteTest\n\u001b[0;32m[ RUN      ] \u001b[mMixedUpTestSuiteTest.FirstTestFromNamespaceFoo\n\u001b[0;32m[       OK ] \u001b[mMixedUpTestSuiteTest.FirstTestFromNamespaceFoo\n\u001b[0;32m[ RUN      ] \u001b[mMixedUpTestSuiteTest.SecondTestFromNamespaceFoo\n\u001b[0;32m[       OK ] \u001b[mMixedUpTestSuiteTest.SecondTestFromNamespaceFoo\n\u001b[0;32m[ RUN      ] \u001b[mMixedUpTestSuiteTest.ThisShouldFail\ngtest.cc:#: Failure\nFailed\nAll tests in the same test suite must use the same test fixture\nclass.  However, in test suite MixedUpTestSuiteTest,\nyou defined test FirstTestFromNamespaceFoo and test ThisShouldFail\nusing two different test fixture classes.  This can happen if\nthe two classes are from different namespaces or translation\nunits and have the same name.  You should probably rename one\nof the classes to put the tests into different test suites.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mMixedUpTestSuiteTest.ThisShouldFail\n\u001b[0;32m[ RUN      ] \u001b[mMixedUpTestSuiteTest.ThisShouldFailToo\ngtest.cc:#: Failure\nFailed\nAll tests in the same test suite must use the same test fixture\nclass.  However, in test suite MixedUpTestSuiteTest,\nyou defined test FirstTestFromNamespaceFoo and test ThisShouldFailToo\nusing two different test fixture classes.  This can happen if\nthe two classes are from different namespaces or translation\nunits and have the same name.  You should probably rename one\nof the classes to put the tests into different test suites.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mMixedUpTestSuiteTest.ThisShouldFailToo\n\u001b[0;32m[----------] \u001b[m2 tests from MixedUpTestSuiteWithSameTestNameTest\n\u001b[0;32m[ RUN      ] \u001b[mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail\n\u001b[0;32m[       OK ] \u001b[mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail\n\u001b[0;32m[ RUN      ] \u001b[mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail\ngtest.cc:#: Failure\nFailed\nAll tests in the same test suite must use the same test fixture\nclass.  However, in test suite MixedUpTestSuiteWithSameTestNameTest,\nyou defined test TheSecondTestWithThisNameShouldFail and test TheSecondTestWithThisNameShouldFail\nusing two different test fixture classes.  This can happen if\nthe two classes are from different namespaces or translation\nunits and have the same name.  You should probably rename one\nof the classes to put the tests into different test suites.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail\n\u001b[0;32m[----------] \u001b[m2 tests from TEST_F_before_TEST_in_same_test_case\n\u001b[0;32m[ RUN      ] \u001b[mTEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F\n\u001b[0;32m[       OK ] \u001b[mTEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F\n\u001b[0;32m[ RUN      ] \u001b[mTEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail\ngtest.cc:#: Failure\nFailed\nAll tests in the same test suite must use the same test fixture\nclass, so mixing TEST_F and TEST in the same test suite is\nillegal.  In test suite TEST_F_before_TEST_in_same_test_case,\ntest DefinedUsingTEST_F is defined using TEST_F but\ntest DefinedUsingTESTAndShouldFail is defined using TEST.  You probably\nwant to change the TEST to TEST_F or move it to another test\ncase.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mTEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail\n\u001b[0;32m[----------] \u001b[m2 tests from TEST_before_TEST_F_in_same_test_case\n\u001b[0;32m[ RUN      ] \u001b[mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST\n\u001b[0;32m[       OK ] \u001b[mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST\n\u001b[0;32m[ RUN      ] \u001b[mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail\ngtest.cc:#: Failure\nFailed\nAll tests in the same test suite must use the same test fixture\nclass, so mixing TEST_F and TEST in the same test suite is\nillegal.  In test suite TEST_before_TEST_F_in_same_test_case,\ntest DefinedUsingTEST_FAndShouldFail is defined using TEST_F but\ntest DefinedUsingTEST is defined using TEST.  You probably\nwant to change the TEST to TEST_F or move it to another test\ncase.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail\n\u001b[0;32m[----------] \u001b[m8 tests from ExpectNonfatalFailureTest\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.CanReferenceGlobalVariables\n\u001b[0;32m[       OK ] \u001b[mExpectNonfatalFailureTest.CanReferenceGlobalVariables\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.CanReferenceLocalVariables\n\u001b[0;32m[       OK ] \u001b[mExpectNonfatalFailureTest.CanReferenceLocalVariables\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure\n\u001b[0;32m[       OK ] \u001b[mExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual: 2 failures\ngoogletest-output-test_.cc:#: Non-fatal failure:\nFailed\nExpected non-fatal failure 1.\nStack trace: (omitted)\n\n\ngoogletest-output-test_.cc:#: Non-fatal failure:\nFailed\nExpected non-fatal failure 2.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Fatal failure:\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.FailsWhenStatementReturns\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenStatementReturns\n\u001b[0;32m[ RUN      ] \u001b[mExpectNonfatalFailureTest.FailsWhenStatementThrows\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenStatementThrows\n\u001b[0;32m[----------] \u001b[m8 tests from ExpectFatalFailureTest\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.CanReferenceGlobalVariables\n\u001b[0;32m[       OK ] \u001b[mExpectFatalFailureTest.CanReferenceGlobalVariables\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.CanReferenceLocalStaticVariables\n\u001b[0;32m[       OK ] \u001b[mExpectFatalFailureTest.CanReferenceLocalStaticVariables\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure\n\u001b[0;32m[       OK ] \u001b[mExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual: 2 failures\ngoogletest-output-test_.cc:#: Fatal failure:\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\ngoogletest-output-test_.cc:#: Fatal failure:\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Non-fatal failure:\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.FailsWhenStatementReturns\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenStatementReturns\n\u001b[0;32m[ RUN      ] \u001b[mExpectFatalFailureTest.FailsWhenStatementThrows\n(expecting a failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenStatementThrows\n\u001b[0;32m[----------] \u001b[m2 tests from TypedTest/0, where TypeParam = int\n\u001b[0;32m[ RUN      ] \u001b[mTypedTest/0.Success\n\u001b[0;32m[       OK ] \u001b[mTypedTest/0.Success\n\u001b[0;32m[ RUN      ] \u001b[mTypedTest/0.Failure\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  TypeParam()\n    Which is: 0\nExpected failure\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mTypedTest/0.Failure, where TypeParam = int\n\u001b[0;32m[----------] \u001b[m2 tests from TypedTestWithNames/char0, where TypeParam = char\n\u001b[0;32m[ RUN      ] \u001b[mTypedTestWithNames/char0.Success\n\u001b[0;32m[       OK ] \u001b[mTypedTestWithNames/char0.Success\n\u001b[0;32m[ RUN      ] \u001b[mTypedTestWithNames/char0.Failure\ngoogletest-output-test_.cc:#: Failure\nFailed\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mTypedTestWithNames/char0.Failure, where TypeParam = char\n\u001b[0;32m[----------] \u001b[m2 tests from TypedTestWithNames/int1, where TypeParam = int\n\u001b[0;32m[ RUN      ] \u001b[mTypedTestWithNames/int1.Success\n\u001b[0;32m[       OK ] \u001b[mTypedTestWithNames/int1.Success\n\u001b[0;32m[ RUN      ] \u001b[mTypedTestWithNames/int1.Failure\ngoogletest-output-test_.cc:#: Failure\nFailed\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mTypedTestWithNames/int1.Failure, where TypeParam = int\n\u001b[0;32m[----------] \u001b[m2 tests from Unsigned/TypedTestP/0, where TypeParam = unsigned char\n\u001b[0;32m[ RUN      ] \u001b[mUnsigned/TypedTestP/0.Success\n\u001b[0;32m[       OK ] \u001b[mUnsigned/TypedTestP/0.Success\n\u001b[0;32m[ RUN      ] \u001b[mUnsigned/TypedTestP/0.Failure\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1U\n    Which is: 1\n  TypeParam()\n    Which is: '\\0'\nExpected failure\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mUnsigned/TypedTestP/0.Failure, where TypeParam = unsigned char\n\u001b[0;32m[----------] \u001b[m2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int\n\u001b[0;32m[ RUN      ] \u001b[mUnsigned/TypedTestP/1.Success\n\u001b[0;32m[       OK ] \u001b[mUnsigned/TypedTestP/1.Success\n\u001b[0;32m[ RUN      ] \u001b[mUnsigned/TypedTestP/1.Failure\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1U\n    Which is: 1\n  TypeParam()\n    Which is: 0\nExpected failure\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mUnsigned/TypedTestP/1.Failure, where TypeParam = unsigned int\n\u001b[0;32m[----------] \u001b[m2 tests from UnsignedCustomName/TypedTestP/unsignedChar0, where TypeParam = unsigned char\n\u001b[0;32m[ RUN      ] \u001b[mUnsignedCustomName/TypedTestP/unsignedChar0.Success\n\u001b[0;32m[       OK ] \u001b[mUnsignedCustomName/TypedTestP/unsignedChar0.Success\n\u001b[0;32m[ RUN      ] \u001b[mUnsignedCustomName/TypedTestP/unsignedChar0.Failure\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1U\n    Which is: 1\n  TypeParam()\n    Which is: '\\0'\nExpected failure\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mUnsignedCustomName/TypedTestP/unsignedChar0.Failure, where TypeParam = unsigned char\n\u001b[0;32m[----------] \u001b[m2 tests from UnsignedCustomName/TypedTestP/unsignedInt1, where TypeParam = unsigned int\n\u001b[0;32m[ RUN      ] \u001b[mUnsignedCustomName/TypedTestP/unsignedInt1.Success\n\u001b[0;32m[       OK ] \u001b[mUnsignedCustomName/TypedTestP/unsignedInt1.Success\n\u001b[0;32m[ RUN      ] \u001b[mUnsignedCustomName/TypedTestP/unsignedInt1.Failure\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1U\n    Which is: 1\n  TypeParam()\n    Which is: 0\nExpected failure\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mUnsignedCustomName/TypedTestP/unsignedInt1.Failure, where TypeParam = unsigned int\n\u001b[0;32m[----------] \u001b[m4 tests from ExpectFailureTest\n\u001b[0;32m[ RUN      ] \u001b[mExpectFailureTest.ExpectFatalFailure\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Success:\nSucceeded\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Non-fatal failure:\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure containing \"Some other fatal failure expected.\"\n  Actual:\ngoogletest-output-test_.cc:#: Fatal failure:\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectFatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectFailureTest.ExpectNonFatalFailure\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Success:\nSucceeded\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Fatal failure:\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure containing \"Some other non-fatal failure.\"\n  Actual:\ngoogletest-output-test_.cc:#: Non-fatal failure:\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectNonFatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectFailureTest.ExpectFatalFailureOnAllThreads\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Success:\nSucceeded\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Non-fatal failure:\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 fatal failure containing \"Some other fatal failure expected.\"\n  Actual:\ngoogletest-output-test_.cc:#: Fatal failure:\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectFatalFailureOnAllThreads\n\u001b[0;32m[ RUN      ] \u001b[mExpectFailureTest.ExpectNonFatalFailureOnAllThreads\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Success:\nSucceeded\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual:\ngoogletest-output-test_.cc:#: Fatal failure:\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n(expecting 1 failure)\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure containing \"Some other non-fatal failure.\"\n  Actual:\ngoogletest-output-test_.cc:#: Non-fatal failure:\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\n\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectNonFatalFailureOnAllThreads\n\u001b[0;32m[----------] \u001b[m2 tests from ExpectFailureWithThreadsTest\n\u001b[0;32m[ RUN      ] \u001b[mExpectFailureWithThreadsTest.ExpectFatalFailure\n(expecting 2 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\ngtest.cc:#: Failure\nExpected: 1 fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureWithThreadsTest.ExpectFatalFailure\n\u001b[0;32m[ RUN      ] \u001b[mExpectFailureWithThreadsTest.ExpectNonFatalFailure\n(expecting 2 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\ngtest.cc:#: Failure\nExpected: 1 non-fatal failure\n  Actual: 0 failures\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureWithThreadsTest.ExpectNonFatalFailure\n\u001b[0;32m[----------] \u001b[m1 test from ScopedFakeTestPartResultReporterTest\n\u001b[0;32m[ RUN      ] \u001b[mScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread\n(expecting 2 failures)\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread\n\u001b[0;32m[----------] \u001b[m2 tests from DynamicFixture\nDynamicFixture::SetUpTestSuite\n\u001b[0;32m[ RUN      ] \u001b[mDynamicFixture.DynamicTestPass\nDynamicFixture()\nDynamicFixture::SetUp\nDynamicFixture::TearDown\n~DynamicFixture()\n\u001b[0;32m[       OK ] \u001b[mDynamicFixture.DynamicTestPass\n\u001b[0;32m[ RUN      ] \u001b[mDynamicFixture.DynamicTestFail\nDynamicFixture()\nDynamicFixture::SetUp\ngoogletest-output-test_.cc:#: Failure\nValue of: Pass\n  Actual: false\nExpected: true\nStack trace: (omitted)\n\nDynamicFixture::TearDown\n~DynamicFixture()\n\u001b[0;31m[  FAILED  ] \u001b[mDynamicFixture.DynamicTestFail\nDynamicFixture::TearDownTestSuite\n\u001b[0;32m[----------] \u001b[m1 test from DynamicFixtureAnotherName\nDynamicFixture::SetUpTestSuite\n\u001b[0;32m[ RUN      ] \u001b[mDynamicFixtureAnotherName.DynamicTestPass\nDynamicFixture()\nDynamicFixture::SetUp\nDynamicFixture::TearDown\n~DynamicFixture()\n\u001b[0;32m[       OK ] \u001b[mDynamicFixtureAnotherName.DynamicTestPass\nDynamicFixture::TearDownTestSuite\n\u001b[0;32m[----------] \u001b[m2 tests from BadDynamicFixture1\nDynamicFixture::SetUpTestSuite\n\u001b[0;32m[ RUN      ] \u001b[mBadDynamicFixture1.FixtureBase\nDynamicFixture()\nDynamicFixture::SetUp\nDynamicFixture::TearDown\n~DynamicFixture()\n\u001b[0;32m[       OK ] \u001b[mBadDynamicFixture1.FixtureBase\n\u001b[0;32m[ RUN      ] \u001b[mBadDynamicFixture1.TestBase\nDynamicFixture()\ngtest.cc:#: Failure\nFailed\nAll tests in the same test suite must use the same test fixture\nclass, so mixing TEST_F and TEST in the same test suite is\nillegal.  In test suite BadDynamicFixture1,\ntest FixtureBase is defined using TEST_F but\ntest TestBase is defined using TEST.  You probably\nwant to change the TEST to TEST_F or move it to another test\ncase.\nStack trace: (omitted)\n\n~DynamicFixture()\n\u001b[0;31m[  FAILED  ] \u001b[mBadDynamicFixture1.TestBase\nDynamicFixture::TearDownTestSuite\n\u001b[0;32m[----------] \u001b[m2 tests from BadDynamicFixture2\nDynamicFixture::SetUpTestSuite\n\u001b[0;32m[ RUN      ] \u001b[mBadDynamicFixture2.FixtureBase\nDynamicFixture()\nDynamicFixture::SetUp\nDynamicFixture::TearDown\n~DynamicFixture()\n\u001b[0;32m[       OK ] \u001b[mBadDynamicFixture2.FixtureBase\n\u001b[0;32m[ RUN      ] \u001b[mBadDynamicFixture2.Derived\nDynamicFixture()\ngtest.cc:#: Failure\nFailed\nAll tests in the same test suite must use the same test fixture\nclass.  However, in test suite BadDynamicFixture2,\nyou defined test FixtureBase and test Derived\nusing two different test fixture classes.  This can happen if\nthe two classes are from different namespaces or translation\nunits and have the same name.  You should probably rename one\nof the classes to put the tests into different test suites.\nStack trace: (omitted)\n\n~DynamicFixture()\n\u001b[0;31m[  FAILED  ] \u001b[mBadDynamicFixture2.Derived\nDynamicFixture::TearDownTestSuite\n\u001b[0;32m[----------] \u001b[m1 test from PrintingFailingParams/FailingParamTest\n\u001b[0;32m[ RUN      ] \u001b[mPrintingFailingParams/FailingParamTest.Fails/0\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  GetParam()\n    Which is: 2\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mPrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2\n\u001b[0;32m[----------] \u001b[m2 tests from PrintingStrings/ParamTest\n\u001b[0;32m[ RUN      ] \u001b[mPrintingStrings/ParamTest.Success/a\n\u001b[0;32m[       OK ] \u001b[mPrintingStrings/ParamTest.Success/a\n\u001b[0;32m[ RUN      ] \u001b[mPrintingStrings/ParamTest.Failure/a\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  \"b\"\n  GetParam()\n    Which is: \"a\"\nExpected failure\nStack trace: (omitted)\n\n\u001b[0;31m[  FAILED  ] \u001b[mPrintingStrings/ParamTest.Failure/a, where GetParam() = \"a\"\n\u001b[0;32m[----------] \u001b[mGlobal test environment tear-down\nBarEnvironment::TearDown() called.\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected non-fatal failure.\nStack trace: (omitted)\n\nFooEnvironment::TearDown() called.\ngoogletest-output-test_.cc:#: Failure\nFailed\nExpected fatal failure.\nStack trace: (omitted)\n\n\u001b[0;32m[==========] \u001b[m84 tests from 39 test suites ran.\n\u001b[0;32m[  PASSED  ] \u001b[m30 tests.\n\u001b[0;31m[  FAILED  ] \u001b[m54 tests, listed below:\n\u001b[0;31m[  FAILED  ] \u001b[mNonfatalFailureTest.EscapesStringOperands\n\u001b[0;31m[  FAILED  ] \u001b[mNonfatalFailureTest.DiffForLongStrings\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureTest.FatalFailureInSubroutine\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureTest.FatalFailureInNestedSubroutine\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureTest.NonfatalFailureInSubroutine\n\u001b[0;31m[  FAILED  ] \u001b[mLoggingTest.InterleavingLoggingAndAssertions\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.AcceptedValues\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.ObeysScopes\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.WorksInLoop\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.WorksInSubroutine\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.CanBeNested\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.CanBeRepeated\n\u001b[0;31m[  FAILED  ] \u001b[mSCOPED_TRACETest.WorksConcurrently\n\u001b[0;31m[  FAILED  ] \u001b[mScopedTraceTest.WithExplicitFileAndLine\n\u001b[0;31m[  FAILED  ] \u001b[mNonFatalFailureInFixtureConstructorTest.FailureInConstructor\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureInFixtureConstructorTest.FailureInConstructor\n\u001b[0;31m[  FAILED  ] \u001b[mNonFatalFailureInSetUpTest.FailureInSetUp\n\u001b[0;31m[  FAILED  ] \u001b[mFatalFailureInSetUpTest.FailureInSetUp\n\u001b[0;31m[  FAILED  ] \u001b[mAddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber\n\u001b[0;31m[  FAILED  ] \u001b[mGtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber\n\u001b[0;31m[  FAILED  ] \u001b[mMixedUpTestSuiteTest.ThisShouldFail\n\u001b[0;31m[  FAILED  ] \u001b[mMixedUpTestSuiteTest.ThisShouldFailToo\n\u001b[0;31m[  FAILED  ] \u001b[mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail\n\u001b[0;31m[  FAILED  ] \u001b[mTEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail\n\u001b[0;31m[  FAILED  ] \u001b[mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenStatementReturns\n\u001b[0;31m[  FAILED  ] \u001b[mExpectNonfatalFailureTest.FailsWhenStatementThrows\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenStatementReturns\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFatalFailureTest.FailsWhenStatementThrows\n\u001b[0;31m[  FAILED  ] \u001b[mTypedTest/0.Failure, where TypeParam = int\n\u001b[0;31m[  FAILED  ] \u001b[mTypedTestWithNames/char0.Failure, where TypeParam = char\n\u001b[0;31m[  FAILED  ] \u001b[mTypedTestWithNames/int1.Failure, where TypeParam = int\n\u001b[0;31m[  FAILED  ] \u001b[mUnsigned/TypedTestP/0.Failure, where TypeParam = unsigned char\n\u001b[0;31m[  FAILED  ] \u001b[mUnsigned/TypedTestP/1.Failure, where TypeParam = unsigned int\n\u001b[0;31m[  FAILED  ] \u001b[mUnsignedCustomName/TypedTestP/unsignedChar0.Failure, where TypeParam = unsigned char\n\u001b[0;31m[  FAILED  ] \u001b[mUnsignedCustomName/TypedTestP/unsignedInt1.Failure, where TypeParam = unsigned int\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectFatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectNonFatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectFatalFailureOnAllThreads\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureTest.ExpectNonFatalFailureOnAllThreads\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureWithThreadsTest.ExpectFatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mExpectFailureWithThreadsTest.ExpectNonFatalFailure\n\u001b[0;31m[  FAILED  ] \u001b[mScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread\n\u001b[0;31m[  FAILED  ] \u001b[mDynamicFixture.DynamicTestFail\n\u001b[0;31m[  FAILED  ] \u001b[mBadDynamicFixture1.TestBase\n\u001b[0;31m[  FAILED  ] \u001b[mBadDynamicFixture2.Derived\n\u001b[0;31m[  FAILED  ] \u001b[mPrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2\n\u001b[0;31m[  FAILED  ] \u001b[mPrintingStrings/ParamTest.Failure/a, where GetParam() = \"a\"\n\n54 FAILED TESTS\n\u001b[0;33m  YOU HAVE 1 DISABLED TEST\n\n\u001b[mNote: Google Test filter = FatalFailureTest.*:LoggingTest.*\n[==========] Running 4 tests from 2 test suites.\n[----------] Global test environment set-up.\n[----------] 3 tests from FatalFailureTest\n[ RUN      ] FatalFailureTest.FatalFailureInSubroutine\n(expecting a failure that x should be 1)\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  x\n    Which is: 2\nStack trace: (omitted)\n\n[  FAILED  ] FatalFailureTest.FatalFailureInSubroutine (? ms)\n[ RUN      ] FatalFailureTest.FatalFailureInNestedSubroutine\n(expecting a failure that x should be 1)\ngoogletest-output-test_.cc:#: Failure\nExpected equality of these values:\n  1\n  x\n    Which is: 2\nStack trace: (omitted)\n\n[  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)\n[ RUN      ] FatalFailureTest.NonfatalFailureInSubroutine\n(expecting a failure on false)\ngoogletest-output-test_.cc:#: Failure\nValue of: false\n  Actual: false\nExpected: true\nStack trace: (omitted)\n\n[  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)\n[----------] 3 tests from FatalFailureTest (? ms total)\n\n[----------] 1 test from LoggingTest\n[ RUN      ] LoggingTest.InterleavingLoggingAndAssertions\n(expecting 2 failures on (3) >= (a[i]))\ni == 0\ni == 1\ngoogletest-output-test_.cc:#: Failure\nExpected: (3) >= (a[i]), actual: 3 vs 9\nStack trace: (omitted)\n\ni == 2\ni == 3\ngoogletest-output-test_.cc:#: Failure\nExpected: (3) >= (a[i]), actual: 3 vs 6\nStack trace: (omitted)\n\n[  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions (? ms)\n[----------] 1 test from LoggingTest (? ms total)\n\n[----------] Global test environment tear-down\n[==========] 4 tests from 2 test suites ran. (? ms total)\n[  PASSED  ] 0 tests.\n[  FAILED  ] 4 tests, listed below:\n[  FAILED  ] FatalFailureTest.FatalFailureInSubroutine\n[  FAILED  ] FatalFailureTest.FatalFailureInNestedSubroutine\n[  FAILED  ] FatalFailureTest.NonfatalFailureInSubroutine\n[  FAILED  ] LoggingTest.InterleavingLoggingAndAssertions\n\n 4 FAILED TESTS\nNote: Google Test filter = *DISABLED_*\n[==========] Running 1 test from 1 test suite.\n[----------] Global test environment set-up.\n[----------] 1 test from DisabledTestsWarningTest\n[ RUN      ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning\n[       OK ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning\n[----------] Global test environment tear-down\n[==========] 1 test from 1 test suite ran.\n[  PASSED  ] 1 test.\nNote: Google Test filter = PassingTest.*\nNote: This is test shard 2 of 2.\n[==========] Running 1 test from 1 test suite.\n[----------] Global test environment set-up.\n[----------] 1 test from PassingTest\n[ RUN      ] PassingTest.PassingTest2\n[       OK ] PassingTest.PassingTest2\n[----------] Global test environment tear-down\n[==========] 1 test from 1 test suite ran.\n[  PASSED  ] 1 test.\n"
  },
  {
    "path": "test/googletest/test/googletest-output-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2008, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Tests the text output of Google C++ Testing and Mocking Framework.\n\nTo update the golden file:\ngoogletest_output_test.py --build_dir=BUILD/DIR --gengolden\nwhere BUILD/DIR contains the built googletest-output-test_ file.\ngoogletest_output_test.py --gengolden\ngoogletest_output_test.py\n\"\"\"\n\nimport difflib\nimport os\nimport re\nimport sys\nimport gtest_test_utils\n\n\n# The flag for generating the golden file\nGENGOLDEN_FLAG = '--gengolden'\nCATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'\n\n# The flag indicating stacktraces are not supported\nNO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'\n\nIS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'\nIS_WINDOWS = os.name == 'nt'\n\nGOLDEN_NAME = 'googletest-output-test-golden-lin.txt'\n\nPROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('googletest-output-test_')\n\n# At least one command we exercise must not have the\n# 'internal_skip_environment_and_ad_hoc_tests' argument.\nCOMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])\nCOMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])\nCOMMAND_WITH_TIME = ({}, [PROGRAM_PATH,\n                          '--gtest_print_time',\n                          'internal_skip_environment_and_ad_hoc_tests',\n                          '--gtest_filter=FatalFailureTest.*:LoggingTest.*'])\nCOMMAND_WITH_DISABLED = (\n    {}, [PROGRAM_PATH,\n         '--gtest_also_run_disabled_tests',\n         'internal_skip_environment_and_ad_hoc_tests',\n         '--gtest_filter=*DISABLED_*'])\nCOMMAND_WITH_SHARDING = (\n    {'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},\n    [PROGRAM_PATH,\n     'internal_skip_environment_and_ad_hoc_tests',\n     '--gtest_filter=PassingTest.*'])\n\nGOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)\n\n\ndef ToUnixLineEnding(s):\n  \"\"\"Changes all Windows/Mac line endings in s to UNIX line endings.\"\"\"\n\n  return s.replace('\\r\\n', '\\n').replace('\\r', '\\n')\n\n\ndef RemoveLocations(test_output):\n  \"\"\"Removes all file location info from a Google Test program's output.\n\n  Args:\n       test_output:  the output of a Google Test program.\n\n  Returns:\n       output with all file location info (in the form of\n       'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or\n       'DIRECTORY\\\\FILE_NAME(LINE_NUMBER): ') replaced by\n       'FILE_NAME:#: '.\n  \"\"\"\n\n  return re.sub(r'.*[/\\\\]((googletest-output-test_|gtest).cc)(\\:\\d+|\\(\\d+\\))\\: ',\n                r'\\1:#: ', test_output)\n\n\ndef RemoveStackTraceDetails(output):\n  \"\"\"Removes all stack traces from a Google Test program's output.\"\"\"\n\n  # *? means \"find the shortest string that matches\".\n  return re.sub(r'Stack trace:(.|\\n)*?\\n\\n',\n                'Stack trace: (omitted)\\n\\n', output)\n\n\ndef RemoveStackTraces(output):\n  \"\"\"Removes all traces of stack traces from a Google Test program's output.\"\"\"\n\n  # *? means \"find the shortest string that matches\".\n  return re.sub(r'Stack trace:(.|\\n)*?\\n\\n', '', output)\n\n\ndef RemoveTime(output):\n  \"\"\"Removes all time information from a Google Test program's output.\"\"\"\n\n  return re.sub(r'\\(\\d+ ms', '(? ms', output)\n\n\ndef RemoveTypeInfoDetails(test_output):\n  \"\"\"Removes compiler-specific type info from Google Test program's output.\n\n  Args:\n       test_output:  the output of a Google Test program.\n\n  Returns:\n       output with type information normalized to canonical form.\n  \"\"\"\n\n  # some compilers output the name of type 'unsigned int' as 'unsigned'\n  return re.sub(r'unsigned int', 'unsigned', test_output)\n\n\ndef NormalizeToCurrentPlatform(test_output):\n  \"\"\"Normalizes platform specific output details for easier comparison.\"\"\"\n\n  if IS_WINDOWS:\n    # Removes the color information that is not present on Windows.\n    test_output = re.sub('\\x1b\\\\[(0;3\\d)?m', '', test_output)\n    # Changes failure message headers into the Windows format.\n    test_output = re.sub(r': Failure\\n', r': error: ', test_output)\n    # Changes file(line_number) to file:line_number.\n    test_output = re.sub(r'((\\w|\\.)+)\\((\\d+)\\):', r'\\1:\\3:', test_output)\n\n  return test_output\n\n\ndef RemoveTestCounts(output):\n  \"\"\"Removes test counts from a Google Test program's output.\"\"\"\n\n  output = re.sub(r'\\d+ tests?, listed below',\n                  '? tests, listed below', output)\n  output = re.sub(r'\\d+ FAILED TESTS',\n                  '? FAILED TESTS', output)\n  output = re.sub(r'\\d+ tests? from \\d+ test cases?',\n                  '? tests from ? test cases', output)\n  output = re.sub(r'\\d+ tests? from ([a-zA-Z_])',\n                  r'? tests from \\1', output)\n  return re.sub(r'\\d+ tests?\\.', '? tests.', output)\n\n\ndef RemoveMatchingTests(test_output, pattern):\n  \"\"\"Removes output of specified tests from a Google Test program's output.\n\n  This function strips not only the beginning and the end of a test but also\n  all output in between.\n\n  Args:\n    test_output:       A string containing the test output.\n    pattern:           A regex string that matches names of test cases or\n                       tests to remove.\n\n  Returns:\n    Contents of test_output with tests whose names match pattern removed.\n  \"\"\"\n\n  test_output = re.sub(\n      r'.*\\[ RUN      \\] .*%s(.|\\n)*?\\[(  FAILED  |       OK )\\] .*%s.*\\n' % (\n          pattern, pattern),\n      '',\n      test_output)\n  return re.sub(r'.*%s.*\\n' % pattern, '', test_output)\n\n\ndef NormalizeOutput(output):\n  \"\"\"Normalizes output (the output of googletest-output-test_.exe).\"\"\"\n\n  output = ToUnixLineEnding(output)\n  output = RemoveLocations(output)\n  output = RemoveStackTraceDetails(output)\n  output = RemoveTime(output)\n  return output\n\n\ndef GetShellCommandOutput(env_cmd):\n  \"\"\"Runs a command in a sub-process, and returns its output in a string.\n\n  Args:\n    env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra\n             environment variables to set, and element 1 is a string with\n             the command and any flags.\n\n  Returns:\n    A string with the command's combined standard and diagnostic output.\n  \"\"\"\n\n  # Spawns cmd in a sub-process, and gets its standard I/O file objects.\n  # Set and save the environment properly.\n  environ = os.environ.copy()\n  environ.update(env_cmd[0])\n  p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)\n\n  return p.output\n\n\ndef GetCommandOutput(env_cmd):\n  \"\"\"Runs a command and returns its output with all file location\n  info stripped off.\n\n  Args:\n    env_cmd:  The shell command. A 2-tuple where element 0 is a dict of extra\n              environment variables to set, and element 1 is a string with\n              the command and any flags.\n  \"\"\"\n\n  # Disables exception pop-ups on Windows.\n  environ, cmdline = env_cmd\n  environ = dict(environ)  # Ensures we are modifying a copy.\n  environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'\n  return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))\n\n\ndef GetOutputOfAllCommands():\n  \"\"\"Returns concatenated output from several representative commands.\"\"\"\n\n  return (GetCommandOutput(COMMAND_WITH_COLOR) +\n          GetCommandOutput(COMMAND_WITH_TIME) +\n          GetCommandOutput(COMMAND_WITH_DISABLED) +\n          GetCommandOutput(COMMAND_WITH_SHARDING))\n\n\ntest_list = GetShellCommandOutput(COMMAND_LIST_TESTS)\nSUPPORTS_DEATH_TESTS = 'DeathTest' in test_list\nSUPPORTS_TYPED_TESTS = 'TypedTest' in test_list\nSUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list\nSUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv\n\nCAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and\n                            SUPPORTS_TYPED_TESTS and\n                            SUPPORTS_THREADS and\n                            SUPPORTS_STACK_TRACES)\n\nclass GTestOutputTest(gtest_test_utils.TestCase):\n  def RemoveUnsupportedTests(self, test_output):\n    if not SUPPORTS_DEATH_TESTS:\n      test_output = RemoveMatchingTests(test_output, 'DeathTest')\n    if not SUPPORTS_TYPED_TESTS:\n      test_output = RemoveMatchingTests(test_output, 'TypedTest')\n      test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')\n      test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')\n    if not SUPPORTS_THREADS:\n      test_output = RemoveMatchingTests(test_output,\n                                        'ExpectFailureWithThreadsTest')\n      test_output = RemoveMatchingTests(test_output,\n                                        'ScopedFakeTestPartResultReporterTest')\n      test_output = RemoveMatchingTests(test_output,\n                                        'WorksConcurrently')\n    if not SUPPORTS_STACK_TRACES:\n      test_output = RemoveStackTraces(test_output)\n\n    return test_output\n\n  def testOutput(self):\n    output = GetOutputOfAllCommands()\n\n    golden_file = open(GOLDEN_PATH, 'rb')\n    # A mis-configured source control system can cause \\r appear in EOL\n    # sequences when we read the golden file irrespective of an operating\n    # system used. Therefore, we need to strip those \\r's from newlines\n    # unconditionally.\n    golden = ToUnixLineEnding(golden_file.read().decode())\n    golden_file.close()\n\n    # We want the test to pass regardless of certain features being\n    # supported or not.\n\n    # We still have to remove type name specifics in all cases.\n    normalized_actual = RemoveTypeInfoDetails(output)\n    normalized_golden = RemoveTypeInfoDetails(golden)\n\n    if CAN_GENERATE_GOLDEN_FILE:\n      self.assertEqual(normalized_golden, normalized_actual,\n                       '\\n'.join(difflib.unified_diff(\n                           normalized_golden.split('\\n'),\n                           normalized_actual.split('\\n'),\n                           'golden', 'actual')))\n    else:\n      normalized_actual = NormalizeToCurrentPlatform(\n          RemoveTestCounts(normalized_actual))\n      normalized_golden = NormalizeToCurrentPlatform(\n          RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))\n\n      # This code is very handy when debugging golden file differences:\n      if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):\n        open(os.path.join(\n            gtest_test_utils.GetSourceDir(),\n            '_googletest-output-test_normalized_actual.txt'), 'wb').write(\n                normalized_actual)\n        open(os.path.join(\n            gtest_test_utils.GetSourceDir(),\n            '_googletest-output-test_normalized_golden.txt'), 'wb').write(\n                normalized_golden)\n\n      self.assertEqual(normalized_golden, normalized_actual)\n\n\nif __name__ == '__main__':\n  if NO_STACKTRACE_SUPPORT_FLAG in sys.argv:\n    # unittest.main() can't handle unknown flags\n    sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)\n\n  if GENGOLDEN_FLAG in sys.argv:\n    if CAN_GENERATE_GOLDEN_FILE:\n      output = GetOutputOfAllCommands()\n      golden_file = open(GOLDEN_PATH, 'wb')\n      golden_file.write(output)\n      golden_file.close()\n    else:\n      message = (\n          \"\"\"Unable to write a golden file when compiled in an environment\nthat does not support all the required features (death tests,\ntyped tests, stack traces, and multiple threads).\nPlease build this test and generate the golden file using Blaze on Linux.\"\"\")\n\n      sys.stderr.write(message)\n      sys.exit(1)\n  else:\n    gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-output-test_.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// The purpose of this file is to generate Google Test output under\n// various conditions.  The output will then be verified by\n// googletest-output-test.py to ensure that Google Test generates the\n// desired messages.  Therefore, most tests in this file are MEANT TO\n// FAIL.\n\n#include \"gtest/gtest-spi.h\"\n#include \"gtest/gtest.h\"\n#include \"src/gtest-internal-inl.h\"\n\n#include <stdlib.h>\n\n#if _MSC_VER\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4127 /* conditional expression is constant */)\n#endif  //  _MSC_VER\n\n#if GTEST_IS_THREADSAFE\nusing testing::ScopedFakeTestPartResultReporter;\nusing testing::TestPartResultArray;\n\nusing testing::internal::Notification;\nusing testing::internal::ThreadWithParam;\n#endif\n\nnamespace posix = ::testing::internal::posix;\n\n// Tests catching fatal failures.\n\n// A subroutine used by the following test.\nvoid TestEq1(int x) {\n  ASSERT_EQ(1, x);\n}\n\n// This function calls a test subroutine, catches the fatal failure it\n// generates, and then returns early.\nvoid TryTestSubroutine() {\n  // Calls a subrountine that yields a fatal failure.\n  TestEq1(2);\n\n  // Catches the fatal failure and aborts the test.\n  //\n  // The testing::Test:: prefix is necessary when calling\n  // HasFatalFailure() outside of a TEST, TEST_F, or test fixture.\n  if (testing::Test::HasFatalFailure()) return;\n\n  // If we get here, something is wrong.\n  FAIL() << \"This should never be reached.\";\n}\n\nTEST(PassingTest, PassingTest1) {\n}\n\nTEST(PassingTest, PassingTest2) {\n}\n\n// Tests that parameters of failing parameterized tests are printed in the\n// failing test summary.\nclass FailingParamTest : public testing::TestWithParam<int> {};\n\nTEST_P(FailingParamTest, Fails) {\n  EXPECT_EQ(1, GetParam());\n}\n\n// This generates a test which will fail. Google Test is expected to print\n// its parameter when it outputs the list of all failed tests.\nINSTANTIATE_TEST_SUITE_P(PrintingFailingParams,\n                         FailingParamTest,\n                         testing::Values(2));\n\nstatic const char kGoldenString[] = \"\\\"Line\\0 1\\\"\\nLine 2\";\n\nTEST(NonfatalFailureTest, EscapesStringOperands) {\n  std::string actual = \"actual \\\"string\\\"\";\n  EXPECT_EQ(kGoldenString, actual);\n\n  const char* golden = kGoldenString;\n  EXPECT_EQ(golden, actual);\n}\n\nTEST(NonfatalFailureTest, DiffForLongStrings) {\n  std::string golden_str(kGoldenString, sizeof(kGoldenString) - 1);\n  EXPECT_EQ(golden_str, \"Line 2\");\n}\n\n// Tests catching a fatal failure in a subroutine.\nTEST(FatalFailureTest, FatalFailureInSubroutine) {\n  printf(\"(expecting a failure that x should be 1)\\n\");\n\n  TryTestSubroutine();\n}\n\n// Tests catching a fatal failure in a nested subroutine.\nTEST(FatalFailureTest, FatalFailureInNestedSubroutine) {\n  printf(\"(expecting a failure that x should be 1)\\n\");\n\n  // Calls a subrountine that yields a fatal failure.\n  TryTestSubroutine();\n\n  // Catches the fatal failure and aborts the test.\n  //\n  // When calling HasFatalFailure() inside a TEST, TEST_F, or test\n  // fixture, the testing::Test:: prefix is not needed.\n  if (HasFatalFailure()) return;\n\n  // If we get here, something is wrong.\n  FAIL() << \"This should never be reached.\";\n}\n\n// Tests HasFatalFailure() after a failed EXPECT check.\nTEST(FatalFailureTest, NonfatalFailureInSubroutine) {\n  printf(\"(expecting a failure on false)\\n\");\n  EXPECT_TRUE(false);  // Generates a nonfatal failure\n  ASSERT_FALSE(HasFatalFailure());  // This should succeed.\n}\n\n// Tests interleaving user logging and Google Test assertions.\nTEST(LoggingTest, InterleavingLoggingAndAssertions) {\n  static const int a[4] = {\n    3, 9, 2, 6\n  };\n\n  printf(\"(expecting 2 failures on (3) >= (a[i]))\\n\");\n  for (int i = 0; i < static_cast<int>(sizeof(a)/sizeof(*a)); i++) {\n    printf(\"i == %d\\n\", i);\n    EXPECT_GE(3, a[i]);\n  }\n}\n\n// Tests the SCOPED_TRACE macro.\n\n// A helper function for testing SCOPED_TRACE.\nvoid SubWithoutTrace(int n) {\n  EXPECT_EQ(1, n);\n  ASSERT_EQ(2, n);\n}\n\n// Another helper function for testing SCOPED_TRACE.\nvoid SubWithTrace(int n) {\n  SCOPED_TRACE(testing::Message() << \"n = \" << n);\n\n  SubWithoutTrace(n);\n}\n\nTEST(SCOPED_TRACETest, AcceptedValues) {\n  SCOPED_TRACE(\"literal string\");\n  SCOPED_TRACE(std::string(\"std::string\"));\n  SCOPED_TRACE(1337);  // streamable type\n  const char* null_value = nullptr;\n  SCOPED_TRACE(null_value);\n\n  ADD_FAILURE() << \"Just checking that all these values work fine.\";\n}\n\n// Tests that SCOPED_TRACE() obeys lexical scopes.\nTEST(SCOPED_TRACETest, ObeysScopes) {\n  printf(\"(expected to fail)\\n\");\n\n  // There should be no trace before SCOPED_TRACE() is invoked.\n  ADD_FAILURE() << \"This failure is expected, and shouldn't have a trace.\";\n\n  {\n    SCOPED_TRACE(\"Expected trace\");\n    // After SCOPED_TRACE(), a failure in the current scope should contain\n    // the trace.\n    ADD_FAILURE() << \"This failure is expected, and should have a trace.\";\n  }\n\n  // Once the control leaves the scope of the SCOPED_TRACE(), there\n  // should be no trace again.\n  ADD_FAILURE() << \"This failure is expected, and shouldn't have a trace.\";\n}\n\n// Tests that SCOPED_TRACE works inside a loop.\nTEST(SCOPED_TRACETest, WorksInLoop) {\n  printf(\"(expected to fail)\\n\");\n\n  for (int i = 1; i <= 2; i++) {\n    SCOPED_TRACE(testing::Message() << \"i = \" << i);\n\n    SubWithoutTrace(i);\n  }\n}\n\n// Tests that SCOPED_TRACE works in a subroutine.\nTEST(SCOPED_TRACETest, WorksInSubroutine) {\n  printf(\"(expected to fail)\\n\");\n\n  SubWithTrace(1);\n  SubWithTrace(2);\n}\n\n// Tests that SCOPED_TRACE can be nested.\nTEST(SCOPED_TRACETest, CanBeNested) {\n  printf(\"(expected to fail)\\n\");\n\n  SCOPED_TRACE(\"\");  // A trace without a message.\n\n  SubWithTrace(2);\n}\n\n// Tests that multiple SCOPED_TRACEs can be used in the same scope.\nTEST(SCOPED_TRACETest, CanBeRepeated) {\n  printf(\"(expected to fail)\\n\");\n\n  SCOPED_TRACE(\"A\");\n  ADD_FAILURE()\n      << \"This failure is expected, and should contain trace point A.\";\n\n  SCOPED_TRACE(\"B\");\n  ADD_FAILURE()\n      << \"This failure is expected, and should contain trace point A and B.\";\n\n  {\n    SCOPED_TRACE(\"C\");\n    ADD_FAILURE() << \"This failure is expected, and should \"\n                  << \"contain trace point A, B, and C.\";\n  }\n\n  SCOPED_TRACE(\"D\");\n  ADD_FAILURE() << \"This failure is expected, and should \"\n                << \"contain trace point A, B, and D.\";\n}\n\n#if GTEST_IS_THREADSAFE\n// Tests that SCOPED_TRACE()s can be used concurrently from multiple\n// threads.  Namely, an assertion should be affected by\n// SCOPED_TRACE()s in its own thread only.\n\n// Here's the sequence of actions that happen in the test:\n//\n//   Thread A (main)                | Thread B (spawned)\n//   ===============================|================================\n//   spawns thread B                |\n//   -------------------------------+--------------------------------\n//   waits for n1                   | SCOPED_TRACE(\"Trace B\");\n//                                  | generates failure #1\n//                                  | notifies n1\n//   -------------------------------+--------------------------------\n//   SCOPED_TRACE(\"Trace A\");       | waits for n2\n//   generates failure #2           |\n//   notifies n2                    |\n//   -------------------------------|--------------------------------\n//   waits for n3                   | generates failure #3\n//                                  | trace B dies\n//                                  | generates failure #4\n//                                  | notifies n3\n//   -------------------------------|--------------------------------\n//   generates failure #5           | finishes\n//   trace A dies                   |\n//   generates failure #6           |\n//   -------------------------------|--------------------------------\n//   waits for thread B to finish   |\n\nstruct CheckPoints {\n  Notification n1;\n  Notification n2;\n  Notification n3;\n};\n\nstatic void ThreadWithScopedTrace(CheckPoints* check_points) {\n  {\n    SCOPED_TRACE(\"Trace B\");\n    ADD_FAILURE()\n        << \"Expected failure #1 (in thread B, only trace B alive).\";\n    check_points->n1.Notify();\n    check_points->n2.WaitForNotification();\n\n    ADD_FAILURE()\n        << \"Expected failure #3 (in thread B, trace A & B both alive).\";\n  }  // Trace B dies here.\n  ADD_FAILURE()\n      << \"Expected failure #4 (in thread B, only trace A alive).\";\n  check_points->n3.Notify();\n}\n\nTEST(SCOPED_TRACETest, WorksConcurrently) {\n  printf(\"(expecting 6 failures)\\n\");\n\n  CheckPoints check_points;\n  ThreadWithParam<CheckPoints*> thread(&ThreadWithScopedTrace, &check_points,\n                                       nullptr);\n  check_points.n1.WaitForNotification();\n\n  {\n    SCOPED_TRACE(\"Trace A\");\n    ADD_FAILURE()\n        << \"Expected failure #2 (in thread A, trace A & B both alive).\";\n    check_points.n2.Notify();\n    check_points.n3.WaitForNotification();\n\n    ADD_FAILURE()\n        << \"Expected failure #5 (in thread A, only trace A alive).\";\n  }  // Trace A dies here.\n  ADD_FAILURE()\n      << \"Expected failure #6 (in thread A, no trace alive).\";\n  thread.Join();\n}\n#endif  // GTEST_IS_THREADSAFE\n\n// Tests basic functionality of the ScopedTrace utility (most of its features\n// are already tested in SCOPED_TRACETest).\nTEST(ScopedTraceTest, WithExplicitFileAndLine) {\n  testing::ScopedTrace trace(\"explicit_file.cc\", 123, \"expected trace message\");\n  ADD_FAILURE() << \"Check that the trace is attached to a particular location.\";\n}\n\nTEST(DisabledTestsWarningTest,\n     DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning) {\n  // This test body is intentionally empty.  Its sole purpose is for\n  // verifying that the --gtest_also_run_disabled_tests flag\n  // suppresses the \"YOU HAVE 12 DISABLED TESTS\" warning at the end of\n  // the test output.\n}\n\n// Tests using assertions outside of TEST and TEST_F.\n//\n// This function creates two failures intentionally.\nvoid AdHocTest() {\n  printf(\"The non-test part of the code is expected to have 2 failures.\\n\\n\");\n  EXPECT_TRUE(false);\n  EXPECT_EQ(2, 3);\n}\n\n// Runs all TESTs, all TEST_Fs, and the ad hoc test.\nint RunAllTests() {\n  AdHocTest();\n  return RUN_ALL_TESTS();\n}\n\n// Tests non-fatal failures in the fixture constructor.\nclass NonFatalFailureInFixtureConstructorTest : public testing::Test {\n protected:\n  NonFatalFailureInFixtureConstructorTest() {\n    printf(\"(expecting 5 failures)\\n\");\n    ADD_FAILURE() << \"Expected failure #1, in the test fixture c'tor.\";\n  }\n\n  ~NonFatalFailureInFixtureConstructorTest() override {\n    ADD_FAILURE() << \"Expected failure #5, in the test fixture d'tor.\";\n  }\n\n  void SetUp() override { ADD_FAILURE() << \"Expected failure #2, in SetUp().\"; }\n\n  void TearDown() override {\n    ADD_FAILURE() << \"Expected failure #4, in TearDown.\";\n  }\n};\n\nTEST_F(NonFatalFailureInFixtureConstructorTest, FailureInConstructor) {\n  ADD_FAILURE() << \"Expected failure #3, in the test body.\";\n}\n\n// Tests fatal failures in the fixture constructor.\nclass FatalFailureInFixtureConstructorTest : public testing::Test {\n protected:\n  FatalFailureInFixtureConstructorTest() {\n    printf(\"(expecting 2 failures)\\n\");\n    Init();\n  }\n\n  ~FatalFailureInFixtureConstructorTest() override {\n    ADD_FAILURE() << \"Expected failure #2, in the test fixture d'tor.\";\n  }\n\n  void SetUp() override {\n    ADD_FAILURE() << \"UNEXPECTED failure in SetUp().  \"\n                  << \"We should never get here, as the test fixture c'tor \"\n                  << \"had a fatal failure.\";\n  }\n\n  void TearDown() override {\n    ADD_FAILURE() << \"UNEXPECTED failure in TearDown().  \"\n                  << \"We should never get here, as the test fixture c'tor \"\n                  << \"had a fatal failure.\";\n  }\n\n private:\n  void Init() {\n    FAIL() << \"Expected failure #1, in the test fixture c'tor.\";\n  }\n};\n\nTEST_F(FatalFailureInFixtureConstructorTest, FailureInConstructor) {\n  ADD_FAILURE() << \"UNEXPECTED failure in the test body.  \"\n                << \"We should never get here, as the test fixture c'tor \"\n                << \"had a fatal failure.\";\n}\n\n// Tests non-fatal failures in SetUp().\nclass NonFatalFailureInSetUpTest : public testing::Test {\n protected:\n  ~NonFatalFailureInSetUpTest() override { Deinit(); }\n\n  void SetUp() override {\n    printf(\"(expecting 4 failures)\\n\");\n    ADD_FAILURE() << \"Expected failure #1, in SetUp().\";\n  }\n\n  void TearDown() override { FAIL() << \"Expected failure #3, in TearDown().\"; }\n\n private:\n  void Deinit() {\n    FAIL() << \"Expected failure #4, in the test fixture d'tor.\";\n  }\n};\n\nTEST_F(NonFatalFailureInSetUpTest, FailureInSetUp) {\n  FAIL() << \"Expected failure #2, in the test function.\";\n}\n\n// Tests fatal failures in SetUp().\nclass FatalFailureInSetUpTest : public testing::Test {\n protected:\n  ~FatalFailureInSetUpTest() override { Deinit(); }\n\n  void SetUp() override {\n    printf(\"(expecting 3 failures)\\n\");\n    FAIL() << \"Expected failure #1, in SetUp().\";\n  }\n\n  void TearDown() override { FAIL() << \"Expected failure #2, in TearDown().\"; }\n\n private:\n  void Deinit() {\n    FAIL() << \"Expected failure #3, in the test fixture d'tor.\";\n  }\n};\n\nTEST_F(FatalFailureInSetUpTest, FailureInSetUp) {\n  FAIL() << \"UNEXPECTED failure in the test function.  \"\n         << \"We should never get here, as SetUp() failed.\";\n}\n\nTEST(AddFailureAtTest, MessageContainsSpecifiedFileAndLineNumber) {\n  ADD_FAILURE_AT(\"foo.cc\", 42) << \"Expected nonfatal failure in foo.cc\";\n}\n\nTEST(GtestFailAtTest, MessageContainsSpecifiedFileAndLineNumber) {\n  GTEST_FAIL_AT(\"foo.cc\", 42) << \"Expected fatal failure in foo.cc\";\n}\n\n#if GTEST_IS_THREADSAFE\n\n// A unary function that may die.\nvoid DieIf(bool should_die) {\n  GTEST_CHECK_(!should_die) << \" - death inside DieIf().\";\n}\n\n// Tests running death tests in a multi-threaded context.\n\n// Used for coordination between the main and the spawn thread.\nstruct SpawnThreadNotifications {\n  SpawnThreadNotifications() {}\n\n  Notification spawn_thread_started;\n  Notification spawn_thread_ok_to_terminate;\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(SpawnThreadNotifications);\n};\n\n// The function to be executed in the thread spawn by the\n// MultipleThreads test (below).\nstatic void ThreadRoutine(SpawnThreadNotifications* notifications) {\n  // Signals the main thread that this thread has started.\n  notifications->spawn_thread_started.Notify();\n\n  // Waits for permission to finish from the main thread.\n  notifications->spawn_thread_ok_to_terminate.WaitForNotification();\n}\n\n// This is a death-test test, but it's not named with a DeathTest\n// suffix.  It starts threads which might interfere with later\n// death tests, so it must run after all other death tests.\nclass DeathTestAndMultiThreadsTest : public testing::Test {\n protected:\n  // Starts a thread and waits for it to begin.\n  void SetUp() override {\n    thread_.reset(new ThreadWithParam<SpawnThreadNotifications*>(\n        &ThreadRoutine, &notifications_, nullptr));\n    notifications_.spawn_thread_started.WaitForNotification();\n  }\n  // Tells the thread to finish, and reaps it.\n  // Depending on the version of the thread library in use,\n  // a manager thread might still be left running that will interfere\n  // with later death tests.  This is unfortunate, but this class\n  // cleans up after itself as best it can.\n  void TearDown() override {\n    notifications_.spawn_thread_ok_to_terminate.Notify();\n  }\n\n private:\n  SpawnThreadNotifications notifications_;\n  std::unique_ptr<ThreadWithParam<SpawnThreadNotifications*> > thread_;\n};\n\n#endif  // GTEST_IS_THREADSAFE\n\n// The MixedUpTestSuiteTest test case verifies that Google Test will fail a\n// test if it uses a different fixture class than what other tests in\n// the same test case use.  It deliberately contains two fixture\n// classes with the same name but defined in different namespaces.\n\n// The MixedUpTestSuiteWithSameTestNameTest test case verifies that\n// when the user defines two tests with the same test case name AND\n// same test name (but in different namespaces), the second test will\n// fail.\n\nnamespace foo {\n\nclass MixedUpTestSuiteTest : public testing::Test {\n};\n\nTEST_F(MixedUpTestSuiteTest, FirstTestFromNamespaceFoo) {}\nTEST_F(MixedUpTestSuiteTest, SecondTestFromNamespaceFoo) {}\n\nclass MixedUpTestSuiteWithSameTestNameTest : public testing::Test {\n};\n\nTEST_F(MixedUpTestSuiteWithSameTestNameTest,\n       TheSecondTestWithThisNameShouldFail) {}\n\n}  // namespace foo\n\nnamespace bar {\n\nclass MixedUpTestSuiteTest : public testing::Test {\n};\n\n// The following two tests are expected to fail.  We rely on the\n// golden file to check that Google Test generates the right error message.\nTEST_F(MixedUpTestSuiteTest, ThisShouldFail) {}\nTEST_F(MixedUpTestSuiteTest, ThisShouldFailToo) {}\n\nclass MixedUpTestSuiteWithSameTestNameTest : public testing::Test {\n};\n\n// Expected to fail.  We rely on the golden file to check that Google Test\n// generates the right error message.\nTEST_F(MixedUpTestSuiteWithSameTestNameTest,\n       TheSecondTestWithThisNameShouldFail) {}\n\n}  // namespace bar\n\n// The following two test cases verify that Google Test catches the user\n// error of mixing TEST and TEST_F in the same test case.  The first\n// test case checks the scenario where TEST_F appears before TEST, and\n// the second one checks where TEST appears before TEST_F.\n\nclass TEST_F_before_TEST_in_same_test_case : public testing::Test {\n};\n\nTEST_F(TEST_F_before_TEST_in_same_test_case, DefinedUsingTEST_F) {}\n\n// Expected to fail.  We rely on the golden file to check that Google Test\n// generates the right error message.\nTEST(TEST_F_before_TEST_in_same_test_case, DefinedUsingTESTAndShouldFail) {}\n\nclass TEST_before_TEST_F_in_same_test_case : public testing::Test {\n};\n\nTEST(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST) {}\n\n// Expected to fail.  We rely on the golden file to check that Google Test\n// generates the right error message.\nTEST_F(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST_FAndShouldFail) {\n}\n\n// Used for testing EXPECT_NONFATAL_FAILURE() and EXPECT_FATAL_FAILURE().\nint global_integer = 0;\n\n// Tests that EXPECT_NONFATAL_FAILURE() can reference global variables.\nTEST(ExpectNonfatalFailureTest, CanReferenceGlobalVariables) {\n  global_integer = 0;\n  EXPECT_NONFATAL_FAILURE({\n    EXPECT_EQ(1, global_integer) << \"Expected non-fatal failure.\";\n  }, \"Expected non-fatal failure.\");\n}\n\n// Tests that EXPECT_NONFATAL_FAILURE() can reference local variables\n// (static or not).\nTEST(ExpectNonfatalFailureTest, CanReferenceLocalVariables) {\n  int m = 0;\n  static int n;\n  n = 1;\n  EXPECT_NONFATAL_FAILURE({\n    EXPECT_EQ(m, n) << \"Expected non-fatal failure.\";\n  }, \"Expected non-fatal failure.\");\n}\n\n// Tests that EXPECT_NONFATAL_FAILURE() succeeds when there is exactly\n// one non-fatal failure and no fatal failure.\nTEST(ExpectNonfatalFailureTest, SucceedsWhenThereIsOneNonfatalFailure) {\n  EXPECT_NONFATAL_FAILURE({\n    ADD_FAILURE() << \"Expected non-fatal failure.\";\n  }, \"Expected non-fatal failure.\");\n}\n\n// Tests that EXPECT_NONFATAL_FAILURE() fails when there is no\n// non-fatal failure.\nTEST(ExpectNonfatalFailureTest, FailsWhenThereIsNoNonfatalFailure) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_NONFATAL_FAILURE({\n  }, \"\");\n}\n\n// Tests that EXPECT_NONFATAL_FAILURE() fails when there are two\n// non-fatal failures.\nTEST(ExpectNonfatalFailureTest, FailsWhenThereAreTwoNonfatalFailures) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_NONFATAL_FAILURE({\n    ADD_FAILURE() << \"Expected non-fatal failure 1.\";\n    ADD_FAILURE() << \"Expected non-fatal failure 2.\";\n  }, \"\");\n}\n\n// Tests that EXPECT_NONFATAL_FAILURE() fails when there is one fatal\n// failure.\nTEST(ExpectNonfatalFailureTest, FailsWhenThereIsOneFatalFailure) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_NONFATAL_FAILURE({\n    FAIL() << \"Expected fatal failure.\";\n  }, \"\");\n}\n\n// Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being\n// tested returns.\nTEST(ExpectNonfatalFailureTest, FailsWhenStatementReturns) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_NONFATAL_FAILURE({\n    return;\n  }, \"\");\n}\n\n#if GTEST_HAS_EXCEPTIONS\n\n// Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being\n// tested throws.\nTEST(ExpectNonfatalFailureTest, FailsWhenStatementThrows) {\n  printf(\"(expecting a failure)\\n\");\n  try {\n    EXPECT_NONFATAL_FAILURE({\n      throw 0;\n    }, \"\");\n  } catch(int) {  // NOLINT\n  }\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// Tests that EXPECT_FATAL_FAILURE() can reference global variables.\nTEST(ExpectFatalFailureTest, CanReferenceGlobalVariables) {\n  global_integer = 0;\n  EXPECT_FATAL_FAILURE({\n    ASSERT_EQ(1, global_integer) << \"Expected fatal failure.\";\n  }, \"Expected fatal failure.\");\n}\n\n// Tests that EXPECT_FATAL_FAILURE() can reference local static\n// variables.\nTEST(ExpectFatalFailureTest, CanReferenceLocalStaticVariables) {\n  static int n;\n  n = 1;\n  EXPECT_FATAL_FAILURE({\n    ASSERT_EQ(0, n) << \"Expected fatal failure.\";\n  }, \"Expected fatal failure.\");\n}\n\n// Tests that EXPECT_FATAL_FAILURE() succeeds when there is exactly\n// one fatal failure and no non-fatal failure.\nTEST(ExpectFatalFailureTest, SucceedsWhenThereIsOneFatalFailure) {\n  EXPECT_FATAL_FAILURE({\n    FAIL() << \"Expected fatal failure.\";\n  }, \"Expected fatal failure.\");\n}\n\n// Tests that EXPECT_FATAL_FAILURE() fails when there is no fatal\n// failure.\nTEST(ExpectFatalFailureTest, FailsWhenThereIsNoFatalFailure) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_FATAL_FAILURE({\n  }, \"\");\n}\n\n// A helper for generating a fatal failure.\nvoid FatalFailure() {\n  FAIL() << \"Expected fatal failure.\";\n}\n\n// Tests that EXPECT_FATAL_FAILURE() fails when there are two\n// fatal failures.\nTEST(ExpectFatalFailureTest, FailsWhenThereAreTwoFatalFailures) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_FATAL_FAILURE({\n    FatalFailure();\n    FatalFailure();\n  }, \"\");\n}\n\n// Tests that EXPECT_FATAL_FAILURE() fails when there is one non-fatal\n// failure.\nTEST(ExpectFatalFailureTest, FailsWhenThereIsOneNonfatalFailure) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_FATAL_FAILURE({\n    ADD_FAILURE() << \"Expected non-fatal failure.\";\n  }, \"\");\n}\n\n// Tests that EXPECT_FATAL_FAILURE() fails when the statement being\n// tested returns.\nTEST(ExpectFatalFailureTest, FailsWhenStatementReturns) {\n  printf(\"(expecting a failure)\\n\");\n  EXPECT_FATAL_FAILURE({\n    return;\n  }, \"\");\n}\n\n#if GTEST_HAS_EXCEPTIONS\n\n// Tests that EXPECT_FATAL_FAILURE() fails when the statement being\n// tested throws.\nTEST(ExpectFatalFailureTest, FailsWhenStatementThrows) {\n  printf(\"(expecting a failure)\\n\");\n  try {\n    EXPECT_FATAL_FAILURE({\n      throw 0;\n    }, \"\");\n  } catch(int) {  // NOLINT\n  }\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// This #ifdef block tests the output of value-parameterized tests.\n\nstd::string ParamNameFunc(const testing::TestParamInfo<std::string>& info) {\n  return info.param;\n}\n\nclass ParamTest : public testing::TestWithParam<std::string> {\n};\n\nTEST_P(ParamTest, Success) {\n  EXPECT_EQ(\"a\", GetParam());\n}\n\nTEST_P(ParamTest, Failure) {\n  EXPECT_EQ(\"b\", GetParam()) << \"Expected failure\";\n}\n\nINSTANTIATE_TEST_SUITE_P(PrintingStrings,\n                         ParamTest,\n                         testing::Values(std::string(\"a\")),\n                         ParamNameFunc);\n\n// This #ifdef block tests the output of typed tests.\n#if GTEST_HAS_TYPED_TEST\n\ntemplate <typename T>\nclass TypedTest : public testing::Test {\n};\n\nTYPED_TEST_SUITE(TypedTest, testing::Types<int>);\n\nTYPED_TEST(TypedTest, Success) {\n  EXPECT_EQ(0, TypeParam());\n}\n\nTYPED_TEST(TypedTest, Failure) {\n  EXPECT_EQ(1, TypeParam()) << \"Expected failure\";\n}\n\ntypedef testing::Types<char, int> TypesForTestWithNames;\n\ntemplate <typename T>\nclass TypedTestWithNames : public testing::Test {};\n\nclass TypedTestNames {\n public:\n  template <typename T>\n  static std::string GetName(int i) {\n    if (std::is_same<T, char>::value)\n      return std::string(\"char\") + ::testing::PrintToString(i);\n    if (std::is_same<T, int>::value)\n      return std::string(\"int\") + ::testing::PrintToString(i);\n  }\n};\n\nTYPED_TEST_SUITE(TypedTestWithNames, TypesForTestWithNames, TypedTestNames);\n\nTYPED_TEST(TypedTestWithNames, Success) {}\n\nTYPED_TEST(TypedTestWithNames, Failure) { FAIL(); }\n\n#endif  // GTEST_HAS_TYPED_TEST\n\n// This #ifdef block tests the output of type-parameterized tests.\n#if GTEST_HAS_TYPED_TEST_P\n\ntemplate <typename T>\nclass TypedTestP : public testing::Test {\n};\n\nTYPED_TEST_SUITE_P(TypedTestP);\n\nTYPED_TEST_P(TypedTestP, Success) {\n  EXPECT_EQ(0U, TypeParam());\n}\n\nTYPED_TEST_P(TypedTestP, Failure) {\n  EXPECT_EQ(1U, TypeParam()) << \"Expected failure\";\n}\n\nREGISTER_TYPED_TEST_SUITE_P(TypedTestP, Success, Failure);\n\ntypedef testing::Types<unsigned char, unsigned int> UnsignedTypes;\nINSTANTIATE_TYPED_TEST_SUITE_P(Unsigned, TypedTestP, UnsignedTypes);\n\nclass TypedTestPNames {\n public:\n  template <typename T>\n  static std::string GetName(int i) {\n    if (std::is_same<T, unsigned char>::value) {\n      return std::string(\"unsignedChar\") + ::testing::PrintToString(i);\n    }\n    if (std::is_same<T, unsigned int>::value) {\n      return std::string(\"unsignedInt\") + ::testing::PrintToString(i);\n    }\n  }\n};\n\nINSTANTIATE_TYPED_TEST_SUITE_P(UnsignedCustomName, TypedTestP, UnsignedTypes,\n                              TypedTestPNames);\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n\n#if GTEST_HAS_DEATH_TEST\n\n// We rely on the golden file to verify that tests whose test case\n// name ends with DeathTest are run first.\n\nTEST(ADeathTest, ShouldRunFirst) {\n}\n\n# if GTEST_HAS_TYPED_TEST\n\n// We rely on the golden file to verify that typed tests whose test\n// case name ends with DeathTest are run first.\n\ntemplate <typename T>\nclass ATypedDeathTest : public testing::Test {\n};\n\ntypedef testing::Types<int, double> NumericTypes;\nTYPED_TEST_SUITE(ATypedDeathTest, NumericTypes);\n\nTYPED_TEST(ATypedDeathTest, ShouldRunFirst) {\n}\n\n# endif  // GTEST_HAS_TYPED_TEST\n\n# if GTEST_HAS_TYPED_TEST_P\n\n\n// We rely on the golden file to verify that type-parameterized tests\n// whose test case name ends with DeathTest are run first.\n\ntemplate <typename T>\nclass ATypeParamDeathTest : public testing::Test {\n};\n\nTYPED_TEST_SUITE_P(ATypeParamDeathTest);\n\nTYPED_TEST_P(ATypeParamDeathTest, ShouldRunFirst) {\n}\n\nREGISTER_TYPED_TEST_SUITE_P(ATypeParamDeathTest, ShouldRunFirst);\n\nINSTANTIATE_TYPED_TEST_SUITE_P(My, ATypeParamDeathTest, NumericTypes);\n\n# endif  // GTEST_HAS_TYPED_TEST_P\n\n#endif  // GTEST_HAS_DEATH_TEST\n\n// Tests various failure conditions of\n// EXPECT_{,NON}FATAL_FAILURE{,_ON_ALL_THREADS}.\nclass ExpectFailureTest : public testing::Test {\n public:  // Must be public and not protected due to a bug in g++ 3.4.2.\n  enum FailureMode {\n    FATAL_FAILURE,\n    NONFATAL_FAILURE\n  };\n  static void AddFailure(FailureMode failure) {\n    if (failure == FATAL_FAILURE) {\n      FAIL() << \"Expected fatal failure.\";\n    } else {\n      ADD_FAILURE() << \"Expected non-fatal failure.\";\n    }\n  }\n};\n\nTEST_F(ExpectFailureTest, ExpectFatalFailure) {\n  // Expected fatal failure, but succeeds.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_FATAL_FAILURE(SUCCEED(), \"Expected fatal failure.\");\n  // Expected fatal failure, but got a non-fatal failure.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_FATAL_FAILURE(AddFailure(NONFATAL_FAILURE), \"Expected non-fatal \"\n                       \"failure.\");\n  // Wrong message.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_FATAL_FAILURE(AddFailure(FATAL_FAILURE), \"Some other fatal failure \"\n                       \"expected.\");\n}\n\nTEST_F(ExpectFailureTest, ExpectNonFatalFailure) {\n  // Expected non-fatal failure, but succeeds.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_NONFATAL_FAILURE(SUCCEED(), \"Expected non-fatal failure.\");\n  // Expected non-fatal failure, but got a fatal failure.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_NONFATAL_FAILURE(AddFailure(FATAL_FAILURE), \"Expected fatal failure.\");\n  // Wrong message.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_NONFATAL_FAILURE(AddFailure(NONFATAL_FAILURE), \"Some other non-fatal \"\n                          \"failure.\");\n}\n\n#if GTEST_IS_THREADSAFE\n\nclass ExpectFailureWithThreadsTest : public ExpectFailureTest {\n protected:\n  static void AddFailureInOtherThread(FailureMode failure) {\n    ThreadWithParam<FailureMode> thread(&AddFailure, failure, nullptr);\n    thread.Join();\n  }\n};\n\nTEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailure) {\n  // We only intercept the current thread.\n  printf(\"(expecting 2 failures)\\n\");\n  EXPECT_FATAL_FAILURE(AddFailureInOtherThread(FATAL_FAILURE),\n                       \"Expected fatal failure.\");\n}\n\nTEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailure) {\n  // We only intercept the current thread.\n  printf(\"(expecting 2 failures)\\n\");\n  EXPECT_NONFATAL_FAILURE(AddFailureInOtherThread(NONFATAL_FAILURE),\n                          \"Expected non-fatal failure.\");\n}\n\ntypedef ExpectFailureWithThreadsTest ScopedFakeTestPartResultReporterTest;\n\n// Tests that the ScopedFakeTestPartResultReporter only catches failures from\n// the current thread if it is instantiated with INTERCEPT_ONLY_CURRENT_THREAD.\nTEST_F(ScopedFakeTestPartResultReporterTest, InterceptOnlyCurrentThread) {\n  printf(\"(expecting 2 failures)\\n\");\n  TestPartResultArray results;\n  {\n    ScopedFakeTestPartResultReporter reporter(\n        ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD,\n        &results);\n    AddFailureInOtherThread(FATAL_FAILURE);\n    AddFailureInOtherThread(NONFATAL_FAILURE);\n  }\n  // The two failures should not have been intercepted.\n  EXPECT_EQ(0, results.size()) << \"This shouldn't fail.\";\n}\n\n#endif  // GTEST_IS_THREADSAFE\n\nTEST_F(ExpectFailureTest, ExpectFatalFailureOnAllThreads) {\n  // Expected fatal failure, but succeeds.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), \"Expected fatal failure.\");\n  // Expected fatal failure, but got a non-fatal failure.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE),\n                                      \"Expected non-fatal failure.\");\n  // Wrong message.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE),\n                                      \"Some other fatal failure expected.\");\n}\n\nTEST_F(ExpectFailureTest, ExpectNonFatalFailureOnAllThreads) {\n  // Expected non-fatal failure, but succeeds.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), \"Expected non-fatal \"\n                                         \"failure.\");\n  // Expected non-fatal failure, but got a fatal failure.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE),\n                                         \"Expected fatal failure.\");\n  // Wrong message.\n  printf(\"(expecting 1 failure)\\n\");\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE),\n                                         \"Some other non-fatal failure.\");\n}\n\nclass DynamicFixture : public testing::Test {\n protected:\n  DynamicFixture() { printf(\"DynamicFixture()\\n\"); }\n  ~DynamicFixture() override { printf(\"~DynamicFixture()\\n\"); }\n  void SetUp() override { printf(\"DynamicFixture::SetUp\\n\"); }\n  void TearDown() override { printf(\"DynamicFixture::TearDown\\n\"); }\n\n  static void SetUpTestSuite() { printf(\"DynamicFixture::SetUpTestSuite\\n\"); }\n  static void TearDownTestSuite() {\n    printf(\"DynamicFixture::TearDownTestSuite\\n\");\n  }\n};\n\ntemplate <bool Pass>\nclass DynamicTest : public DynamicFixture {\n public:\n  void TestBody() override { EXPECT_TRUE(Pass); }\n};\n\nauto dynamic_test = (\n    // Register two tests with the same fixture correctly.\n    testing::RegisterTest(\n        \"DynamicFixture\", \"DynamicTestPass\", nullptr, nullptr, __FILE__,\n        __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }),\n    testing::RegisterTest(\n        \"DynamicFixture\", \"DynamicTestFail\", nullptr, nullptr, __FILE__,\n        __LINE__, []() -> DynamicFixture* { return new DynamicTest<false>; }),\n\n    // Register the same fixture with another name. That's fine.\n    testing::RegisterTest(\n        \"DynamicFixtureAnotherName\", \"DynamicTestPass\", nullptr, nullptr,\n        __FILE__, __LINE__,\n        []() -> DynamicFixture* { return new DynamicTest<true>; }),\n\n    // Register two tests with the same fixture incorrectly.\n    testing::RegisterTest(\n        \"BadDynamicFixture1\", \"FixtureBase\", nullptr, nullptr, __FILE__,\n        __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }),\n    testing::RegisterTest(\n        \"BadDynamicFixture1\", \"TestBase\", nullptr, nullptr, __FILE__, __LINE__,\n        []() -> testing::Test* { return new DynamicTest<true>; }),\n\n    // Register two tests with the same fixture incorrectly by ommiting the\n    // return type.\n    testing::RegisterTest(\n        \"BadDynamicFixture2\", \"FixtureBase\", nullptr, nullptr, __FILE__,\n        __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }),\n    testing::RegisterTest(\"BadDynamicFixture2\", \"Derived\", nullptr, nullptr,\n                          __FILE__, __LINE__,\n                          []() { return new DynamicTest<true>; }));\n\n// Two test environments for testing testing::AddGlobalTestEnvironment().\n\nclass FooEnvironment : public testing::Environment {\n public:\n  void SetUp() override { printf(\"%s\", \"FooEnvironment::SetUp() called.\\n\"); }\n\n  void TearDown() override {\n    printf(\"%s\", \"FooEnvironment::TearDown() called.\\n\");\n    FAIL() << \"Expected fatal failure.\";\n  }\n};\n\nclass BarEnvironment : public testing::Environment {\n public:\n  void SetUp() override { printf(\"%s\", \"BarEnvironment::SetUp() called.\\n\"); }\n\n  void TearDown() override {\n    printf(\"%s\", \"BarEnvironment::TearDown() called.\\n\");\n    ADD_FAILURE() << \"Expected non-fatal failure.\";\n  }\n};\n\n// The main function.\n//\n// The idea is to use Google Test to run all the tests we have defined (some\n// of them are intended to fail), and then compare the test results\n// with the \"golden\" file.\nint main(int argc, char **argv) {\n  testing::GTEST_FLAG(print_time) = false;\n\n  // We just run the tests, knowing some of them are intended to fail.\n  // We will use a separate Python script to compare the output of\n  // this program with the golden file.\n\n  // It's hard to test InitGoogleTest() directly, as it has many\n  // global side effects.  The following line serves as a sanity test\n  // for it.\n  testing::InitGoogleTest(&argc, argv);\n  bool internal_skip_environment_and_ad_hoc_tests =\n      std::count(argv, argv + argc,\n                 std::string(\"internal_skip_environment_and_ad_hoc_tests\")) > 0;\n\n#if GTEST_HAS_DEATH_TEST\n  if (testing::internal::GTEST_FLAG(internal_run_death_test) != \"\") {\n    // Skip the usual output capturing if we're running as the child\n    // process of an threadsafe-style death test.\n# if GTEST_OS_WINDOWS\n    posix::FReopen(\"nul:\", \"w\", stdout);\n# else\n    posix::FReopen(\"/dev/null\", \"w\", stdout);\n# endif  // GTEST_OS_WINDOWS\n    return RUN_ALL_TESTS();\n  }\n#endif  // GTEST_HAS_DEATH_TEST\n\n  if (internal_skip_environment_and_ad_hoc_tests)\n    return RUN_ALL_TESTS();\n\n  // Registers two global test environments.\n  // The golden file verifies that they are set up in the order they\n  // are registered, and torn down in the reverse order.\n  testing::AddGlobalTestEnvironment(new FooEnvironment);\n  testing::AddGlobalTestEnvironment(new BarEnvironment);\n#if _MSC_VER\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4127\n#endif  //  _MSC_VER\n  return RunAllTests();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-param-test-invalid-name1-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2015 Google Inc. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Verifies that Google Test warns the user when not initialized properly.\"\"\"\n\nimport gtest_test_utils\n\nbinary_name = 'googletest-param-test-invalid-name1-test_'\nCOMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)\n\n\ndef Assert(condition):\n  if not condition:\n    raise AssertionError\n\n\ndef TestExitCodeAndOutput(command):\n  \"\"\"Runs the given command and verifies its exit code and output.\"\"\"\n\n  err = ('Parameterized test name \\'\"InvalidWithQuotes\"\\' is invalid')\n\n  p = gtest_test_utils.Subprocess(command)\n  Assert(p.terminated_by_signal)\n\n  # Verify the output message contains appropriate output\n  Assert(err in p.output)\n\n\nclass GTestParamTestInvalidName1Test(gtest_test_utils.TestCase):\n\n  def testExitCodeAndOutput(self):\n    TestExitCodeAndOutput(COMMAND)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-param-test-invalid-name1-test_.cc",
    "content": "// Copyright 2015, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"gtest/gtest.h\"\n\nnamespace {\nclass DummyTest : public ::testing::TestWithParam<const char *> {};\n\nTEST_P(DummyTest, Dummy) {\n}\n\nINSTANTIATE_TEST_SUITE_P(InvalidTestName,\n                         DummyTest,\n                         ::testing::Values(\"InvalidWithQuotes\"),\n                         ::testing::PrintToStringParamName());\n\n}  // namespace\n\nint main(int argc, char *argv[]) {\n  testing::InitGoogleTest(&argc, argv);\n  return RUN_ALL_TESTS();\n}\n\n"
  },
  {
    "path": "test/googletest/test/googletest-param-test-invalid-name2-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2015 Google Inc. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Verifies that Google Test warns the user when not initialized properly.\"\"\"\n\nimport gtest_test_utils\n\nbinary_name = 'googletest-param-test-invalid-name2-test_'\nCOMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)\n\n\ndef Assert(condition):\n  if not condition:\n    raise AssertionError\n\n\ndef TestExitCodeAndOutput(command):\n  \"\"\"Runs the given command and verifies its exit code and output.\"\"\"\n\n  err = ('Duplicate parameterized test name \\'a\\'')\n\n  p = gtest_test_utils.Subprocess(command)\n  Assert(p.terminated_by_signal)\n\n  # Check for appropriate output\n  Assert(err in p.output)\n\n\nclass GTestParamTestInvalidName2Test(gtest_test_utils.TestCase):\n\n  def testExitCodeAndOutput(self):\n    TestExitCodeAndOutput(COMMAND)\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-param-test-invalid-name2-test_.cc",
    "content": "// Copyright 2015, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"gtest/gtest.h\"\n\nnamespace {\nclass DummyTest : public ::testing::TestWithParam<const char *> {};\n\nstd::string StringParamTestSuffix(\n    const testing::TestParamInfo<const char*>& info) {\n  return std::string(info.param);\n}\n\nTEST_P(DummyTest, Dummy) {\n}\n\nINSTANTIATE_TEST_SUITE_P(DuplicateTestNames,\n                         DummyTest,\n                         ::testing::Values(\"a\", \"b\", \"a\", \"c\"),\n                         StringParamTestSuffix);\n}  // namespace\n\nint main(int argc, char *argv[]) {\n  testing::InitGoogleTest(&argc, argv);\n  return RUN_ALL_TESTS();\n}\n\n\n"
  },
  {
    "path": "test/googletest/test/googletest-param-test-test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for Google Test itself. This file verifies that the parameter\n// generators objects produce correct parameter sequences and that\n// Google Test runtime instantiates correct tests from those sequences.\n\n#include \"gtest/gtest.h\"\n\n# include <algorithm>\n# include <iostream>\n# include <list>\n# include <set>\n# include <sstream>\n# include <string>\n# include <vector>\n\n# include \"src/gtest-internal-inl.h\"  // for UnitTestOptions\n# include \"test/googletest-param-test-test.h\"\n\nusing ::std::vector;\nusing ::std::sort;\n\nusing ::testing::AddGlobalTestEnvironment;\nusing ::testing::Bool;\nusing ::testing::Combine;\nusing ::testing::Message;\nusing ::testing::Range;\nusing ::testing::TestWithParam;\nusing ::testing::Values;\nusing ::testing::ValuesIn;\n\nusing ::testing::internal::ParamGenerator;\nusing ::testing::internal::UnitTestOptions;\n\n// Prints a value to a string.\n//\n// FIXME: remove PrintValue() when we move matchers and\n// EXPECT_THAT() from Google Mock to Google Test.  At that time, we\n// can write EXPECT_THAT(x, Eq(y)) to compare two tuples x and y, as\n// EXPECT_THAT() and the matchers know how to print tuples.\ntemplate <typename T>\n::std::string PrintValue(const T& value) {\n  return testing::PrintToString(value);\n}\n\n// Verifies that a sequence generated by the generator and accessed\n// via the iterator object matches the expected one using Google Test\n// assertions.\ntemplate <typename T, size_t N>\nvoid VerifyGenerator(const ParamGenerator<T>& generator,\n                     const T (&expected_values)[N]) {\n  typename ParamGenerator<T>::iterator it = generator.begin();\n  for (size_t i = 0; i < N; ++i) {\n    ASSERT_FALSE(it == generator.end())\n        << \"At element \" << i << \" when accessing via an iterator \"\n        << \"created with the copy constructor.\\n\";\n    // We cannot use EXPECT_EQ() here as the values may be tuples,\n    // which don't support <<.\n    EXPECT_TRUE(expected_values[i] == *it)\n        << \"where i is \" << i\n        << \", expected_values[i] is \" << PrintValue(expected_values[i])\n        << \", *it is \" << PrintValue(*it)\n        << \", and 'it' is an iterator created with the copy constructor.\\n\";\n    ++it;\n  }\n  EXPECT_TRUE(it == generator.end())\n        << \"At the presumed end of sequence when accessing via an iterator \"\n        << \"created with the copy constructor.\\n\";\n\n  // Test the iterator assignment. The following lines verify that\n  // the sequence accessed via an iterator initialized via the\n  // assignment operator (as opposed to a copy constructor) matches\n  // just the same.\n  it = generator.begin();\n  for (size_t i = 0; i < N; ++i) {\n    ASSERT_FALSE(it == generator.end())\n        << \"At element \" << i << \" when accessing via an iterator \"\n        << \"created with the assignment operator.\\n\";\n    EXPECT_TRUE(expected_values[i] == *it)\n        << \"where i is \" << i\n        << \", expected_values[i] is \" << PrintValue(expected_values[i])\n        << \", *it is \" << PrintValue(*it)\n        << \", and 'it' is an iterator created with the copy constructor.\\n\";\n    ++it;\n  }\n  EXPECT_TRUE(it == generator.end())\n        << \"At the presumed end of sequence when accessing via an iterator \"\n        << \"created with the assignment operator.\\n\";\n}\n\ntemplate <typename T>\nvoid VerifyGeneratorIsEmpty(const ParamGenerator<T>& generator) {\n  typename ParamGenerator<T>::iterator it = generator.begin();\n  EXPECT_TRUE(it == generator.end());\n\n  it = generator.begin();\n  EXPECT_TRUE(it == generator.end());\n}\n\n// Generator tests. They test that each of the provided generator functions\n// generates an expected sequence of values. The general test pattern\n// instantiates a generator using one of the generator functions,\n// checks the sequence produced by the generator using its iterator API,\n// and then resets the iterator back to the beginning of the sequence\n// and checks the sequence again.\n\n// Tests that iterators produced by generator functions conform to the\n// ForwardIterator concept.\nTEST(IteratorTest, ParamIteratorConformsToForwardIteratorConcept) {\n  const ParamGenerator<int> gen = Range(0, 10);\n  ParamGenerator<int>::iterator it = gen.begin();\n\n  // Verifies that iterator initialization works as expected.\n  ParamGenerator<int>::iterator it2 = it;\n  EXPECT_TRUE(*it == *it2) << \"Initialized iterators must point to the \"\n                           << \"element same as its source points to\";\n\n  // Verifies that iterator assignment works as expected.\n  ++it;\n  EXPECT_FALSE(*it == *it2);\n  it2 = it;\n  EXPECT_TRUE(*it == *it2) << \"Assigned iterators must point to the \"\n                           << \"element same as its source points to\";\n\n  // Verifies that prefix operator++() returns *this.\n  EXPECT_EQ(&it, &(++it)) << \"Result of the prefix operator++ must be \"\n                          << \"refer to the original object\";\n\n  // Verifies that the result of the postfix operator++ points to the value\n  // pointed to by the original iterator.\n  int original_value = *it;  // Have to compute it outside of macro call to be\n                             // unaffected by the parameter evaluation order.\n  EXPECT_EQ(original_value, *(it++));\n\n  // Verifies that prefix and postfix operator++() advance an iterator\n  // all the same.\n  it2 = it;\n  ++it;\n  ++it2;\n  EXPECT_TRUE(*it == *it2);\n}\n\n// Tests that Range() generates the expected sequence.\nTEST(RangeTest, IntRangeWithDefaultStep) {\n  const ParamGenerator<int> gen = Range(0, 3);\n  const int expected_values[] = {0, 1, 2};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Edge case. Tests that Range() generates the single element sequence\n// as expected when provided with range limits that are equal.\nTEST(RangeTest, IntRangeSingleValue) {\n  const ParamGenerator<int> gen = Range(0, 1);\n  const int expected_values[] = {0};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Edge case. Tests that Range() with generates empty sequence when\n// supplied with an empty range.\nTEST(RangeTest, IntRangeEmpty) {\n  const ParamGenerator<int> gen = Range(0, 0);\n  VerifyGeneratorIsEmpty(gen);\n}\n\n// Tests that Range() with custom step (greater then one) generates\n// the expected sequence.\nTEST(RangeTest, IntRangeWithCustomStep) {\n  const ParamGenerator<int> gen = Range(0, 9, 3);\n  const int expected_values[] = {0, 3, 6};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that Range() with custom step (greater then one) generates\n// the expected sequence when the last element does not fall on the\n// upper range limit. Sequences generated by Range() must not have\n// elements beyond the range limits.\nTEST(RangeTest, IntRangeWithCustomStepOverUpperBound) {\n  const ParamGenerator<int> gen = Range(0, 4, 3);\n  const int expected_values[] = {0, 3};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Verifies that Range works with user-defined types that define\n// copy constructor, operator=(), operator+(), and operator<().\nclass DogAdder {\n public:\n  explicit DogAdder(const char* a_value) : value_(a_value) {}\n  DogAdder(const DogAdder& other) : value_(other.value_.c_str()) {}\n\n  DogAdder operator=(const DogAdder& other) {\n    if (this != &other)\n      value_ = other.value_;\n    return *this;\n  }\n  DogAdder operator+(const DogAdder& other) const {\n    Message msg;\n    msg << value_.c_str() << other.value_.c_str();\n    return DogAdder(msg.GetString().c_str());\n  }\n  bool operator<(const DogAdder& other) const {\n    return value_ < other.value_;\n  }\n  const std::string& value() const { return value_; }\n\n private:\n  std::string value_;\n};\n\nTEST(RangeTest, WorksWithACustomType) {\n  const ParamGenerator<DogAdder> gen =\n      Range(DogAdder(\"cat\"), DogAdder(\"catdogdog\"), DogAdder(\"dog\"));\n  ParamGenerator<DogAdder>::iterator it = gen.begin();\n\n  ASSERT_FALSE(it == gen.end());\n  EXPECT_STREQ(\"cat\", it->value().c_str());\n\n  ASSERT_FALSE(++it == gen.end());\n  EXPECT_STREQ(\"catdog\", it->value().c_str());\n\n  EXPECT_TRUE(++it == gen.end());\n}\n\nclass IntWrapper {\n public:\n  explicit IntWrapper(int a_value) : value_(a_value) {}\n  IntWrapper(const IntWrapper& other) : value_(other.value_) {}\n\n  IntWrapper operator=(const IntWrapper& other) {\n    value_ = other.value_;\n    return *this;\n  }\n  // operator+() adds a different type.\n  IntWrapper operator+(int other) const { return IntWrapper(value_ + other); }\n  bool operator<(const IntWrapper& other) const {\n    return value_ < other.value_;\n  }\n  int value() const { return value_; }\n\n private:\n  int value_;\n};\n\nTEST(RangeTest, WorksWithACustomTypeWithDifferentIncrementType) {\n  const ParamGenerator<IntWrapper> gen = Range(IntWrapper(0), IntWrapper(2));\n  ParamGenerator<IntWrapper>::iterator it = gen.begin();\n\n  ASSERT_FALSE(it == gen.end());\n  EXPECT_EQ(0, it->value());\n\n  ASSERT_FALSE(++it == gen.end());\n  EXPECT_EQ(1, it->value());\n\n  EXPECT_TRUE(++it == gen.end());\n}\n\n// Tests that ValuesIn() with an array parameter generates\n// the expected sequence.\nTEST(ValuesInTest, ValuesInArray) {\n  int array[] = {3, 5, 8};\n  const ParamGenerator<int> gen = ValuesIn(array);\n  VerifyGenerator(gen, array);\n}\n\n// Tests that ValuesIn() with a const array parameter generates\n// the expected sequence.\nTEST(ValuesInTest, ValuesInConstArray) {\n  const int array[] = {3, 5, 8};\n  const ParamGenerator<int> gen = ValuesIn(array);\n  VerifyGenerator(gen, array);\n}\n\n// Edge case. Tests that ValuesIn() with an array parameter containing a\n// single element generates the single element sequence.\nTEST(ValuesInTest, ValuesInSingleElementArray) {\n  int array[] = {42};\n  const ParamGenerator<int> gen = ValuesIn(array);\n  VerifyGenerator(gen, array);\n}\n\n// Tests that ValuesIn() generates the expected sequence for an STL\n// container (vector).\nTEST(ValuesInTest, ValuesInVector) {\n  typedef ::std::vector<int> ContainerType;\n  ContainerType values;\n  values.push_back(3);\n  values.push_back(5);\n  values.push_back(8);\n  const ParamGenerator<int> gen = ValuesIn(values);\n\n  const int expected_values[] = {3, 5, 8};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that ValuesIn() generates the expected sequence.\nTEST(ValuesInTest, ValuesInIteratorRange) {\n  typedef ::std::vector<int> ContainerType;\n  ContainerType values;\n  values.push_back(3);\n  values.push_back(5);\n  values.push_back(8);\n  const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());\n\n  const int expected_values[] = {3, 5, 8};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Edge case. Tests that ValuesIn() provided with an iterator range specifying a\n// single value generates a single-element sequence.\nTEST(ValuesInTest, ValuesInSingleElementIteratorRange) {\n  typedef ::std::vector<int> ContainerType;\n  ContainerType values;\n  values.push_back(42);\n  const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());\n\n  const int expected_values[] = {42};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Edge case. Tests that ValuesIn() provided with an empty iterator range\n// generates an empty sequence.\nTEST(ValuesInTest, ValuesInEmptyIteratorRange) {\n  typedef ::std::vector<int> ContainerType;\n  ContainerType values;\n  const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end());\n\n  VerifyGeneratorIsEmpty(gen);\n}\n\n// Tests that the Values() generates the expected sequence.\nTEST(ValuesTest, ValuesWorks) {\n  const ParamGenerator<int> gen = Values(3, 5, 8);\n\n  const int expected_values[] = {3, 5, 8};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that Values() generates the expected sequences from elements of\n// different types convertible to ParamGenerator's parameter type.\nTEST(ValuesTest, ValuesWorksForValuesOfCompatibleTypes) {\n  const ParamGenerator<double> gen = Values(3, 5.0f, 8.0);\n\n  const double expected_values[] = {3.0, 5.0, 8.0};\n  VerifyGenerator(gen, expected_values);\n}\n\nTEST(ValuesTest, ValuesWorksForMaxLengthList) {\n  const ParamGenerator<int> gen = Values(\n      10, 20, 30, 40, 50, 60, 70, 80, 90, 100,\n      110, 120, 130, 140, 150, 160, 170, 180, 190, 200,\n      210, 220, 230, 240, 250, 260, 270, 280, 290, 300,\n      310, 320, 330, 340, 350, 360, 370, 380, 390, 400,\n      410, 420, 430, 440, 450, 460, 470, 480, 490, 500);\n\n  const int expected_values[] = {\n      10, 20, 30, 40, 50, 60, 70, 80, 90, 100,\n      110, 120, 130, 140, 150, 160, 170, 180, 190, 200,\n      210, 220, 230, 240, 250, 260, 270, 280, 290, 300,\n      310, 320, 330, 340, 350, 360, 370, 380, 390, 400,\n      410, 420, 430, 440, 450, 460, 470, 480, 490, 500};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Edge case test. Tests that single-parameter Values() generates the sequence\n// with the single value.\nTEST(ValuesTest, ValuesWithSingleParameter) {\n  const ParamGenerator<int> gen = Values(42);\n\n  const int expected_values[] = {42};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that Bool() generates sequence (false, true).\nTEST(BoolTest, BoolWorks) {\n  const ParamGenerator<bool> gen = Bool();\n\n  const bool expected_values[] = {false, true};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that Combine() with two parameters generates the expected sequence.\nTEST(CombineTest, CombineWithTwoParameters) {\n  const char* foo = \"foo\";\n  const char* bar = \"bar\";\n  const ParamGenerator<std::tuple<const char*, int> > gen =\n      Combine(Values(foo, bar), Values(3, 4));\n\n  std::tuple<const char*, int> expected_values[] = {\n      std::make_tuple(foo, 3), std::make_tuple(foo, 4), std::make_tuple(bar, 3),\n      std::make_tuple(bar, 4)};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that Combine() with three parameters generates the expected sequence.\nTEST(CombineTest, CombineWithThreeParameters) {\n  const ParamGenerator<std::tuple<int, int, int> > gen =\n      Combine(Values(0, 1), Values(3, 4), Values(5, 6));\n  std::tuple<int, int, int> expected_values[] = {\n      std::make_tuple(0, 3, 5), std::make_tuple(0, 3, 6),\n      std::make_tuple(0, 4, 5), std::make_tuple(0, 4, 6),\n      std::make_tuple(1, 3, 5), std::make_tuple(1, 3, 6),\n      std::make_tuple(1, 4, 5), std::make_tuple(1, 4, 6)};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that the Combine() with the first parameter generating a single value\n// sequence generates a sequence with the number of elements equal to the\n// number of elements in the sequence generated by the second parameter.\nTEST(CombineTest, CombineWithFirstParameterSingleValue) {\n  const ParamGenerator<std::tuple<int, int> > gen =\n      Combine(Values(42), Values(0, 1));\n\n  std::tuple<int, int> expected_values[] = {std::make_tuple(42, 0),\n                                            std::make_tuple(42, 1)};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that the Combine() with the second parameter generating a single value\n// sequence generates a sequence with the number of elements equal to the\n// number of elements in the sequence generated by the first parameter.\nTEST(CombineTest, CombineWithSecondParameterSingleValue) {\n  const ParamGenerator<std::tuple<int, int> > gen =\n      Combine(Values(0, 1), Values(42));\n\n  std::tuple<int, int> expected_values[] = {std::make_tuple(0, 42),\n                                            std::make_tuple(1, 42)};\n  VerifyGenerator(gen, expected_values);\n}\n\n// Tests that when the first parameter produces an empty sequence,\n// Combine() produces an empty sequence, too.\nTEST(CombineTest, CombineWithFirstParameterEmptyRange) {\n  const ParamGenerator<std::tuple<int, int> > gen =\n      Combine(Range(0, 0), Values(0, 1));\n  VerifyGeneratorIsEmpty(gen);\n}\n\n// Tests that when the second parameter produces an empty sequence,\n// Combine() produces an empty sequence, too.\nTEST(CombineTest, CombineWithSecondParameterEmptyRange) {\n  const ParamGenerator<std::tuple<int, int> > gen =\n      Combine(Values(0, 1), Range(1, 1));\n  VerifyGeneratorIsEmpty(gen);\n}\n\n// Edge case. Tests that combine works with the maximum number\n// of parameters supported by Google Test (currently 10).\nTEST(CombineTest, CombineWithMaxNumberOfParameters) {\n  const char* foo = \"foo\";\n  const char* bar = \"bar\";\n  const ParamGenerator<\n      std::tuple<const char*, int, int, int, int, int, int, int, int, int> >\n      gen =\n          Combine(Values(foo, bar), Values(1), Values(2), Values(3), Values(4),\n                  Values(5), Values(6), Values(7), Values(8), Values(9));\n\n  std::tuple<const char*, int, int, int, int, int, int, int, int, int>\n      expected_values[] = {std::make_tuple(foo, 1, 2, 3, 4, 5, 6, 7, 8, 9),\n                           std::make_tuple(bar, 1, 2, 3, 4, 5, 6, 7, 8, 9)};\n  VerifyGenerator(gen, expected_values);\n}\n\nclass NonDefaultConstructAssignString {\n public:\n  NonDefaultConstructAssignString(const std::string& s) : str_(s) {}\n\n  const std::string& str() const { return str_; }\n\n private:\n  std::string str_;\n\n  // Not default constructible\n  NonDefaultConstructAssignString();\n  // Not assignable\n  void operator=(const NonDefaultConstructAssignString&);\n};\n\nTEST(CombineTest, NonDefaultConstructAssign) {\n  const ParamGenerator<std::tuple<int, NonDefaultConstructAssignString> > gen =\n      Combine(Values(0, 1), Values(NonDefaultConstructAssignString(\"A\"),\n                                   NonDefaultConstructAssignString(\"B\")));\n\n  ParamGenerator<std::tuple<int, NonDefaultConstructAssignString> >::iterator\n      it = gen.begin();\n\n  EXPECT_EQ(0, std::get<0>(*it));\n  EXPECT_EQ(\"A\", std::get<1>(*it).str());\n  ++it;\n\n  EXPECT_EQ(0, std::get<0>(*it));\n  EXPECT_EQ(\"B\", std::get<1>(*it).str());\n  ++it;\n\n  EXPECT_EQ(1, std::get<0>(*it));\n  EXPECT_EQ(\"A\", std::get<1>(*it).str());\n  ++it;\n\n  EXPECT_EQ(1, std::get<0>(*it));\n  EXPECT_EQ(\"B\", std::get<1>(*it).str());\n  ++it;\n\n  EXPECT_TRUE(it == gen.end());\n}\n\n\n// Tests that an generator produces correct sequence after being\n// assigned from another generator.\nTEST(ParamGeneratorTest, AssignmentWorks) {\n  ParamGenerator<int> gen = Values(1, 2);\n  const ParamGenerator<int> gen2 = Values(3, 4);\n  gen = gen2;\n\n  const int expected_values[] = {3, 4};\n  VerifyGenerator(gen, expected_values);\n}\n\n// This test verifies that the tests are expanded and run as specified:\n// one test per element from the sequence produced by the generator\n// specified in INSTANTIATE_TEST_SUITE_P. It also verifies that the test's\n// fixture constructor, SetUp(), and TearDown() have run and have been\n// supplied with the correct parameters.\n\n// The use of environment object allows detection of the case where no test\n// case functionality is run at all. In this case TearDownTestSuite will not\n// be able to detect missing tests, naturally.\ntemplate <int kExpectedCalls>\nclass TestGenerationEnvironment : public ::testing::Environment {\n public:\n  static TestGenerationEnvironment* Instance() {\n    static TestGenerationEnvironment* instance = new TestGenerationEnvironment;\n    return instance;\n  }\n\n  void FixtureConstructorExecuted() { fixture_constructor_count_++; }\n  void SetUpExecuted() { set_up_count_++; }\n  void TearDownExecuted() { tear_down_count_++; }\n  void TestBodyExecuted() { test_body_count_++; }\n\n  void TearDown() override {\n    // If all MultipleTestGenerationTest tests have been de-selected\n    // by the filter flag, the following checks make no sense.\n    bool perform_check = false;\n\n    for (int i = 0; i < kExpectedCalls; ++i) {\n      Message msg;\n      msg << \"TestsExpandedAndRun/\" << i;\n      if (UnitTestOptions::FilterMatchesTest(\n             \"TestExpansionModule/MultipleTestGenerationTest\",\n              msg.GetString().c_str())) {\n        perform_check = true;\n      }\n    }\n    if (perform_check) {\n      EXPECT_EQ(kExpectedCalls, fixture_constructor_count_)\n          << \"Fixture constructor of ParamTestGenerationTest test case \"\n          << \"has not been run as expected.\";\n      EXPECT_EQ(kExpectedCalls, set_up_count_)\n          << \"Fixture SetUp method of ParamTestGenerationTest test case \"\n          << \"has not been run as expected.\";\n      EXPECT_EQ(kExpectedCalls, tear_down_count_)\n          << \"Fixture TearDown method of ParamTestGenerationTest test case \"\n          << \"has not been run as expected.\";\n      EXPECT_EQ(kExpectedCalls, test_body_count_)\n          << \"Test in ParamTestGenerationTest test case \"\n          << \"has not been run as expected.\";\n    }\n  }\n\n private:\n  TestGenerationEnvironment() : fixture_constructor_count_(0), set_up_count_(0),\n                                tear_down_count_(0), test_body_count_(0) {}\n\n  int fixture_constructor_count_;\n  int set_up_count_;\n  int tear_down_count_;\n  int test_body_count_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationEnvironment);\n};\n\nconst int test_generation_params[] = {36, 42, 72};\n\nclass TestGenerationTest : public TestWithParam<int> {\n public:\n  enum {\n    PARAMETER_COUNT =\n        sizeof(test_generation_params)/sizeof(test_generation_params[0])\n  };\n\n  typedef TestGenerationEnvironment<PARAMETER_COUNT> Environment;\n\n  TestGenerationTest() {\n    Environment::Instance()->FixtureConstructorExecuted();\n    current_parameter_ = GetParam();\n  }\n  void SetUp() override {\n    Environment::Instance()->SetUpExecuted();\n    EXPECT_EQ(current_parameter_, GetParam());\n  }\n  void TearDown() override {\n    Environment::Instance()->TearDownExecuted();\n    EXPECT_EQ(current_parameter_, GetParam());\n  }\n\n  static void SetUpTestSuite() {\n    bool all_tests_in_test_case_selected = true;\n\n    for (int i = 0; i < PARAMETER_COUNT; ++i) {\n      Message test_name;\n      test_name << \"TestsExpandedAndRun/\" << i;\n      if ( !UnitTestOptions::FilterMatchesTest(\n                \"TestExpansionModule/MultipleTestGenerationTest\",\n                test_name.GetString())) {\n        all_tests_in_test_case_selected = false;\n      }\n    }\n    EXPECT_TRUE(all_tests_in_test_case_selected)\n        << \"When running the TestGenerationTest test case all of its tests\\n\"\n        << \"must be selected by the filter flag for the test case to pass.\\n\"\n        << \"If not all of them are enabled, we can't reliably conclude\\n\"\n        << \"that the correct number of tests have been generated.\";\n\n    collected_parameters_.clear();\n  }\n\n  static void TearDownTestSuite() {\n    vector<int> expected_values(test_generation_params,\n                                test_generation_params + PARAMETER_COUNT);\n    // Test execution order is not guaranteed by Google Test,\n    // so the order of values in collected_parameters_ can be\n    // different and we have to sort to compare.\n    sort(expected_values.begin(), expected_values.end());\n    sort(collected_parameters_.begin(), collected_parameters_.end());\n\n    EXPECT_TRUE(collected_parameters_ == expected_values);\n  }\n\n protected:\n  int current_parameter_;\n  static vector<int> collected_parameters_;\n\n private:\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationTest);\n};\nvector<int> TestGenerationTest::collected_parameters_;\n\nTEST_P(TestGenerationTest, TestsExpandedAndRun) {\n  Environment::Instance()->TestBodyExecuted();\n  EXPECT_EQ(current_parameter_, GetParam());\n  collected_parameters_.push_back(GetParam());\n}\nINSTANTIATE_TEST_SUITE_P(TestExpansionModule, TestGenerationTest,\n                         ValuesIn(test_generation_params));\n\n// This test verifies that the element sequence (third parameter of\n// INSTANTIATE_TEST_SUITE_P) is evaluated in InitGoogleTest() and neither at\n// the call site of INSTANTIATE_TEST_SUITE_P nor in RUN_ALL_TESTS().  For\n// that, we declare param_value_ to be a static member of\n// GeneratorEvaluationTest and initialize it to 0.  We set it to 1 in\n// main(), just before invocation of InitGoogleTest().  After calling\n// InitGoogleTest(), we set the value to 2.  If the sequence is evaluated\n// before or after InitGoogleTest, INSTANTIATE_TEST_SUITE_P will create a\n// test with parameter other than 1, and the test body will fail the\n// assertion.\nclass GeneratorEvaluationTest : public TestWithParam<int> {\n public:\n  static int param_value() { return param_value_; }\n  static void set_param_value(int param_value) { param_value_ = param_value; }\n\n private:\n  static int param_value_;\n};\nint GeneratorEvaluationTest::param_value_ = 0;\n\nTEST_P(GeneratorEvaluationTest, GeneratorsEvaluatedInMain) {\n  EXPECT_EQ(1, GetParam());\n}\nINSTANTIATE_TEST_SUITE_P(GenEvalModule, GeneratorEvaluationTest,\n                         Values(GeneratorEvaluationTest::param_value()));\n\n// Tests that generators defined in a different translation unit are\n// functional. Generator extern_gen is defined in gtest-param-test_test2.cc.\nextern ParamGenerator<int> extern_gen;\nclass ExternalGeneratorTest : public TestWithParam<int> {};\nTEST_P(ExternalGeneratorTest, ExternalGenerator) {\n  // Sequence produced by extern_gen contains only a single value\n  // which we verify here.\n  EXPECT_EQ(GetParam(), 33);\n}\nINSTANTIATE_TEST_SUITE_P(ExternalGeneratorModule, ExternalGeneratorTest,\n                         extern_gen);\n\n// Tests that a parameterized test case can be defined in one translation\n// unit and instantiated in another. This test will be instantiated in\n// gtest-param-test_test2.cc. ExternalInstantiationTest fixture class is\n// defined in gtest-param-test_test.h.\nTEST_P(ExternalInstantiationTest, IsMultipleOf33) {\n  EXPECT_EQ(0, GetParam() % 33);\n}\n\n// Tests that a parameterized test case can be instantiated with multiple\n// generators.\nclass MultipleInstantiationTest : public TestWithParam<int> {};\nTEST_P(MultipleInstantiationTest, AllowsMultipleInstances) {\n}\nINSTANTIATE_TEST_SUITE_P(Sequence1, MultipleInstantiationTest, Values(1, 2));\nINSTANTIATE_TEST_SUITE_P(Sequence2, MultipleInstantiationTest, Range(3, 5));\n\n// Tests that a parameterized test case can be instantiated\n// in multiple translation units. This test will be instantiated\n// here and in gtest-param-test_test2.cc.\n// InstantiationInMultipleTranslationUnitsTest fixture class\n// is defined in gtest-param-test_test.h.\nTEST_P(InstantiationInMultipleTranslationUnitsTest, IsMultipleOf42) {\n  EXPECT_EQ(0, GetParam() % 42);\n}\nINSTANTIATE_TEST_SUITE_P(Sequence1, InstantiationInMultipleTranslationUnitsTest,\n                         Values(42, 42 * 2));\n\n// Tests that each iteration of parameterized test runs in a separate test\n// object.\nclass SeparateInstanceTest : public TestWithParam<int> {\n public:\n  SeparateInstanceTest() : count_(0) {}\n\n  static void TearDownTestSuite() {\n    EXPECT_GE(global_count_, 2)\n        << \"If some (but not all) SeparateInstanceTest tests have been \"\n        << \"filtered out this test will fail. Make sure that all \"\n        << \"GeneratorEvaluationTest are selected or de-selected together \"\n        << \"by the test filter.\";\n  }\n\n protected:\n  int count_;\n  static int global_count_;\n};\nint SeparateInstanceTest::global_count_ = 0;\n\nTEST_P(SeparateInstanceTest, TestsRunInSeparateInstances) {\n  EXPECT_EQ(0, count_++);\n  global_count_++;\n}\nINSTANTIATE_TEST_SUITE_P(FourElemSequence, SeparateInstanceTest, Range(1, 4));\n\n// Tests that all instantiations of a test have named appropriately. Test\n// defined with TEST_P(TestSuiteName, TestName) and instantiated with\n// INSTANTIATE_TEST_SUITE_P(SequenceName, TestSuiteName, generator) must be\n// named SequenceName/TestSuiteName.TestName/i, where i is the 0-based index of\n// the sequence element used to instantiate the test.\nclass NamingTest : public TestWithParam<int> {};\n\nTEST_P(NamingTest, TestsReportCorrectNamesAndParameters) {\n  const ::testing::TestInfo* const test_info =\n     ::testing::UnitTest::GetInstance()->current_test_info();\n\n  EXPECT_STREQ(\"ZeroToFiveSequence/NamingTest\", test_info->test_suite_name());\n\n  Message index_stream;\n  index_stream << \"TestsReportCorrectNamesAndParameters/\" << GetParam();\n  EXPECT_STREQ(index_stream.GetString().c_str(), test_info->name());\n\n  EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param());\n}\n\nINSTANTIATE_TEST_SUITE_P(ZeroToFiveSequence, NamingTest, Range(0, 5));\n\n// Tests that macros in test names are expanded correctly.\nclass MacroNamingTest : public TestWithParam<int> {};\n\n#define PREFIX_WITH_FOO(test_name) Foo##test_name\n#define PREFIX_WITH_MACRO(test_name) Macro##test_name\n\nTEST_P(PREFIX_WITH_MACRO(NamingTest), PREFIX_WITH_FOO(SomeTestName)) {\n  const ::testing::TestInfo* const test_info =\n     ::testing::UnitTest::GetInstance()->current_test_info();\n\n  EXPECT_STREQ(\"FortyTwo/MacroNamingTest\", test_info->test_suite_name());\n  EXPECT_STREQ(\"FooSomeTestName/0\", test_info->name());\n}\n\nINSTANTIATE_TEST_SUITE_P(FortyTwo, MacroNamingTest, Values(42));\n\n// Tests the same thing for non-parametrized tests.\nclass MacroNamingTestNonParametrized : public ::testing::Test {};\n\nTEST_F(PREFIX_WITH_MACRO(NamingTestNonParametrized),\n       PREFIX_WITH_FOO(SomeTestName)) {\n  const ::testing::TestInfo* const test_info =\n     ::testing::UnitTest::GetInstance()->current_test_info();\n\n  EXPECT_STREQ(\"MacroNamingTestNonParametrized\", test_info->test_suite_name());\n  EXPECT_STREQ(\"FooSomeTestName\", test_info->name());\n}\n\nTEST(MacroNameing, LookupNames) {\n  std::set<std::string> know_suite_names, know_test_names;\n\n  auto ins = testing::UnitTest::GetInstance();\n  int ts = 0;\n  while (const testing::TestSuite* suite = ins->GetTestSuite(ts++)) {\n    know_suite_names.insert(suite->name());\n\n    int ti = 0;\n    while (const testing::TestInfo* info = suite->GetTestInfo(ti++)) {\n      know_test_names.insert(std::string(suite->name()) + \".\" + info->name());\n    }\n  }\n\n  // Check that the expected form of the test suit name actualy exists.\n  EXPECT_NE(  //\n      know_suite_names.find(\"FortyTwo/MacroNamingTest\"),\n      know_suite_names.end());\n  EXPECT_NE(\n      know_suite_names.find(\"MacroNamingTestNonParametrized\"),\n      know_suite_names.end());\n  // Check that the expected form of the test name actualy exists.\n  EXPECT_NE(  //\n      know_test_names.find(\"FortyTwo/MacroNamingTest.FooSomeTestName/0\"),\n      know_test_names.end());\n  EXPECT_NE(\n      know_test_names.find(\"MacroNamingTestNonParametrized.FooSomeTestName\"),\n      know_test_names.end());\n}\n\n// Tests that user supplied custom parameter names are working correctly.\n// Runs the test with a builtin helper method which uses PrintToString,\n// as well as a custom function and custom functor to ensure all possible\n// uses work correctly.\nclass CustomFunctorNamingTest : public TestWithParam<std::string> {};\nTEST_P(CustomFunctorNamingTest, CustomTestNames) {}\n\nstruct CustomParamNameFunctor {\n  std::string operator()(const ::testing::TestParamInfo<std::string>& inf) {\n    return inf.param;\n  }\n};\n\nINSTANTIATE_TEST_SUITE_P(CustomParamNameFunctor, CustomFunctorNamingTest,\n                         Values(std::string(\"FunctorName\")),\n                         CustomParamNameFunctor());\n\nINSTANTIATE_TEST_SUITE_P(AllAllowedCharacters, CustomFunctorNamingTest,\n                         Values(\"abcdefghijklmnopqrstuvwxyz\",\n                                \"ABCDEFGHIJKLMNOPQRSTUVWXYZ\", \"01234567890_\"),\n                         CustomParamNameFunctor());\n\ninline std::string CustomParamNameFunction(\n    const ::testing::TestParamInfo<std::string>& inf) {\n  return inf.param;\n}\n\nclass CustomFunctionNamingTest : public TestWithParam<std::string> {};\nTEST_P(CustomFunctionNamingTest, CustomTestNames) {}\n\nINSTANTIATE_TEST_SUITE_P(CustomParamNameFunction, CustomFunctionNamingTest,\n                         Values(std::string(\"FunctionName\")),\n                         CustomParamNameFunction);\n\nINSTANTIATE_TEST_SUITE_P(CustomParamNameFunctionP, CustomFunctionNamingTest,\n                         Values(std::string(\"FunctionNameP\")),\n                         &CustomParamNameFunction);\n\n// Test custom naming with a lambda\n\nclass CustomLambdaNamingTest : public TestWithParam<std::string> {};\nTEST_P(CustomLambdaNamingTest, CustomTestNames) {}\n\nINSTANTIATE_TEST_SUITE_P(CustomParamNameLambda, CustomLambdaNamingTest,\n                         Values(std::string(\"LambdaName\")),\n                         [](const ::testing::TestParamInfo<std::string>& inf) {\n                           return inf.param;\n                         });\n\nTEST(CustomNamingTest, CheckNameRegistry) {\n  ::testing::UnitTest* unit_test = ::testing::UnitTest::GetInstance();\n  std::set<std::string> test_names;\n  for (int suite_num = 0; suite_num < unit_test->total_test_suite_count();\n       ++suite_num) {\n    const ::testing::TestSuite* test_suite = unit_test->GetTestSuite(suite_num);\n    for (int test_num = 0; test_num < test_suite->total_test_count();\n         ++test_num) {\n      const ::testing::TestInfo* test_info = test_suite->GetTestInfo(test_num);\n      test_names.insert(std::string(test_info->name()));\n    }\n  }\n  EXPECT_EQ(1u, test_names.count(\"CustomTestNames/FunctorName\"));\n  EXPECT_EQ(1u, test_names.count(\"CustomTestNames/FunctionName\"));\n  EXPECT_EQ(1u, test_names.count(\"CustomTestNames/FunctionNameP\"));\n  EXPECT_EQ(1u, test_names.count(\"CustomTestNames/LambdaName\"));\n}\n\n// Test a numeric name to ensure PrintToStringParamName works correctly.\n\nclass CustomIntegerNamingTest : public TestWithParam<int> {};\n\nTEST_P(CustomIntegerNamingTest, TestsReportCorrectNames) {\n  const ::testing::TestInfo* const test_info =\n     ::testing::UnitTest::GetInstance()->current_test_info();\n  Message test_name_stream;\n  test_name_stream << \"TestsReportCorrectNames/\" << GetParam();\n  EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());\n}\n\nINSTANTIATE_TEST_SUITE_P(PrintToString, CustomIntegerNamingTest, Range(0, 5),\n                         ::testing::PrintToStringParamName());\n\n// Test a custom struct with PrintToString.\n\nstruct CustomStruct {\n  explicit CustomStruct(int value) : x(value) {}\n  int x;\n};\n\nstd::ostream& operator<<(std::ostream& stream, const CustomStruct& val) {\n  stream << val.x;\n  return stream;\n}\n\nclass CustomStructNamingTest : public TestWithParam<CustomStruct> {};\n\nTEST_P(CustomStructNamingTest, TestsReportCorrectNames) {\n  const ::testing::TestInfo* const test_info =\n     ::testing::UnitTest::GetInstance()->current_test_info();\n  Message test_name_stream;\n  test_name_stream << \"TestsReportCorrectNames/\" << GetParam();\n  EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());\n}\n\nINSTANTIATE_TEST_SUITE_P(PrintToString, CustomStructNamingTest,\n                         Values(CustomStruct(0), CustomStruct(1)),\n                         ::testing::PrintToStringParamName());\n\n// Test that using a stateful parameter naming function works as expected.\n\nstruct StatefulNamingFunctor {\n  StatefulNamingFunctor() : sum(0) {}\n  std::string operator()(const ::testing::TestParamInfo<int>& info) {\n    int value = info.param + sum;\n    sum += info.param;\n    return ::testing::PrintToString(value);\n  }\n  int sum;\n};\n\nclass StatefulNamingTest : public ::testing::TestWithParam<int> {\n protected:\n  StatefulNamingTest() : sum_(0) {}\n  int sum_;\n};\n\nTEST_P(StatefulNamingTest, TestsReportCorrectNames) {\n  const ::testing::TestInfo* const test_info =\n     ::testing::UnitTest::GetInstance()->current_test_info();\n  sum_ += GetParam();\n  Message test_name_stream;\n  test_name_stream << \"TestsReportCorrectNames/\" << sum_;\n  EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name());\n}\n\nINSTANTIATE_TEST_SUITE_P(StatefulNamingFunctor, StatefulNamingTest, Range(0, 5),\n                         StatefulNamingFunctor());\n\n// Class that cannot be streamed into an ostream.  It needs to be copyable\n// (and, in case of MSVC, also assignable) in order to be a test parameter\n// type.  Its default copy constructor and assignment operator do exactly\n// what we need.\nclass Unstreamable {\n public:\n  explicit Unstreamable(int value) : value_(value) {}\n  // -Wunused-private-field: dummy accessor for `value_`.\n  const int& dummy_value() const { return value_; }\n\n private:\n  int value_;\n};\n\nclass CommentTest : public TestWithParam<Unstreamable> {};\n\nTEST_P(CommentTest, TestsCorrectlyReportUnstreamableParams) {\n  const ::testing::TestInfo* const test_info =\n     ::testing::UnitTest::GetInstance()->current_test_info();\n\n  EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param());\n}\n\nINSTANTIATE_TEST_SUITE_P(InstantiationWithComments, CommentTest,\n                         Values(Unstreamable(1)));\n\n// Verify that we can create a hierarchy of test fixtures, where the base\n// class fixture is not parameterized and the derived class is. In this case\n// ParameterizedDerivedTest inherits from NonParameterizedBaseTest.  We\n// perform simple tests on both.\nclass NonParameterizedBaseTest : public ::testing::Test {\n public:\n  NonParameterizedBaseTest() : n_(17) { }\n protected:\n  int n_;\n};\n\nclass ParameterizedDerivedTest : public NonParameterizedBaseTest,\n                                 public ::testing::WithParamInterface<int> {\n protected:\n  ParameterizedDerivedTest() : count_(0) { }\n  int count_;\n  static int global_count_;\n};\n\nint ParameterizedDerivedTest::global_count_ = 0;\n\nTEST_F(NonParameterizedBaseTest, FixtureIsInitialized) {\n  EXPECT_EQ(17, n_);\n}\n\nTEST_P(ParameterizedDerivedTest, SeesSequence) {\n  EXPECT_EQ(17, n_);\n  EXPECT_EQ(0, count_++);\n  EXPECT_EQ(GetParam(), global_count_++);\n}\n\nclass ParameterizedDeathTest : public ::testing::TestWithParam<int> { };\n\nTEST_F(ParameterizedDeathTest, GetParamDiesFromTestF) {\n  EXPECT_DEATH_IF_SUPPORTED(GetParam(),\n                            \".* value-parameterized test .*\");\n}\n\nINSTANTIATE_TEST_SUITE_P(RangeZeroToFive, ParameterizedDerivedTest,\n                         Range(0, 5));\n\n// Tests param generator working with Enums\nenum MyEnums {\n  ENUM1 = 1,\n  ENUM2 = 3,\n  ENUM3 = 8,\n};\n\nclass MyEnumTest : public testing::TestWithParam<MyEnums> {};\n\nTEST_P(MyEnumTest, ChecksParamMoreThanZero) { EXPECT_GE(10, GetParam()); }\nINSTANTIATE_TEST_SUITE_P(MyEnumTests, MyEnumTest,\n                         ::testing::Values(ENUM1, ENUM2, 0));\n\nint main(int argc, char **argv) {\n  // Used in TestGenerationTest test suite.\n  AddGlobalTestEnvironment(TestGenerationTest::Environment::Instance());\n  // Used in GeneratorEvaluationTest test suite. Tests that the updated value\n  // will be picked up for instantiating tests in GeneratorEvaluationTest.\n  GeneratorEvaluationTest::set_param_value(1);\n\n  ::testing::InitGoogleTest(&argc, argv);\n\n  // Used in GeneratorEvaluationTest test suite. Tests that value updated\n  // here will NOT be used for instantiating tests in\n  // GeneratorEvaluationTest.\n  GeneratorEvaluationTest::set_param_value(2);\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-param-test-test.h",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This header file provides classes and functions used internally\n// for testing Google Test itself.\n\n#ifndef GTEST_TEST_GTEST_PARAM_TEST_TEST_H_\n#define GTEST_TEST_GTEST_PARAM_TEST_TEST_H_\n\n#include \"gtest/gtest.h\"\n\n// Test fixture for testing definition and instantiation of a test\n// in separate translation units.\nclass ExternalInstantiationTest : public ::testing::TestWithParam<int> {\n};\n\n// Test fixture for testing instantiation of a test in multiple\n// translation units.\nclass InstantiationInMultipleTranslationUnitsTest\n    : public ::testing::TestWithParam<int> {\n};\n\n#endif  // GTEST_TEST_GTEST_PARAM_TEST_TEST_H_\n"
  },
  {
    "path": "test/googletest/test/googletest-param-test2-test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for Google Test itself.  This verifies that the basic constructs of\n// Google Test work.\n\n#include \"gtest/gtest.h\"\n#include \"test/googletest-param-test-test.h\"\n\nusing ::testing::Values;\nusing ::testing::internal::ParamGenerator;\n\n// Tests that generators defined in a different translation unit\n// are functional. The test using extern_gen is defined\n// in googletest-param-test-test.cc.\nParamGenerator<int> extern_gen = Values(33);\n\n// Tests that a parameterized test case can be defined in one translation unit\n// and instantiated in another. The test is defined in\n// googletest-param-test-test.cc and ExternalInstantiationTest fixture class is\n// defined in gtest-param-test_test.h.\nINSTANTIATE_TEST_SUITE_P(MultiplesOf33,\n                         ExternalInstantiationTest,\n                         Values(33, 66));\n\n// Tests that a parameterized test case can be instantiated\n// in multiple translation units. Another instantiation is defined\n// in googletest-param-test-test.cc and\n// InstantiationInMultipleTranslationUnitsTest fixture is defined in\n// gtest-param-test_test.h\nINSTANTIATE_TEST_SUITE_P(Sequence2,\n                         InstantiationInMultipleTranslationUnitsTest,\n                         Values(42*3, 42*4, 42*5));\n\n"
  },
  {
    "path": "test/googletest/test/googletest-port-test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// This file tests the internal cross-platform support utilities.\n#include <stdio.h>\n\n#include \"gtest/internal/gtest-port.h\"\n\n#if GTEST_OS_MAC\n# include <time.h>\n#endif  // GTEST_OS_MAC\n\n#include <list>\n#include <memory>\n#include <utility>  // For std::pair and std::make_pair.\n#include <vector>\n\n#include \"gtest/gtest.h\"\n#include \"gtest/gtest-spi.h\"\n#include \"src/gtest-internal-inl.h\"\n\nusing std::make_pair;\nusing std::pair;\n\nnamespace testing {\nnamespace internal {\n\nTEST(IsXDigitTest, WorksForNarrowAscii) {\n  EXPECT_TRUE(IsXDigit('0'));\n  EXPECT_TRUE(IsXDigit('9'));\n  EXPECT_TRUE(IsXDigit('A'));\n  EXPECT_TRUE(IsXDigit('F'));\n  EXPECT_TRUE(IsXDigit('a'));\n  EXPECT_TRUE(IsXDigit('f'));\n\n  EXPECT_FALSE(IsXDigit('-'));\n  EXPECT_FALSE(IsXDigit('g'));\n  EXPECT_FALSE(IsXDigit('G'));\n}\n\nTEST(IsXDigitTest, ReturnsFalseForNarrowNonAscii) {\n  EXPECT_FALSE(IsXDigit(static_cast<char>('\\x80')));\n  EXPECT_FALSE(IsXDigit(static_cast<char>('0' | '\\x80')));\n}\n\nTEST(IsXDigitTest, WorksForWideAscii) {\n  EXPECT_TRUE(IsXDigit(L'0'));\n  EXPECT_TRUE(IsXDigit(L'9'));\n  EXPECT_TRUE(IsXDigit(L'A'));\n  EXPECT_TRUE(IsXDigit(L'F'));\n  EXPECT_TRUE(IsXDigit(L'a'));\n  EXPECT_TRUE(IsXDigit(L'f'));\n\n  EXPECT_FALSE(IsXDigit(L'-'));\n  EXPECT_FALSE(IsXDigit(L'g'));\n  EXPECT_FALSE(IsXDigit(L'G'));\n}\n\nTEST(IsXDigitTest, ReturnsFalseForWideNonAscii) {\n  EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(0x80)));\n  EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(L'0' | 0x80)));\n  EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(L'0' | 0x100)));\n}\n\nclass Base {\n public:\n  // Copy constructor and assignment operator do exactly what we need, so we\n  // use them.\n  Base() : member_(0) {}\n  explicit Base(int n) : member_(n) {}\n  virtual ~Base() {}\n  int member() { return member_; }\n\n private:\n  int member_;\n};\n\nclass Derived : public Base {\n public:\n  explicit Derived(int n) : Base(n) {}\n};\n\nTEST(ImplicitCastTest, ConvertsPointers) {\n  Derived derived(0);\n  EXPECT_TRUE(&derived == ::testing::internal::ImplicitCast_<Base*>(&derived));\n}\n\nTEST(ImplicitCastTest, CanUseInheritance) {\n  Derived derived(1);\n  Base base = ::testing::internal::ImplicitCast_<Base>(derived);\n  EXPECT_EQ(derived.member(), base.member());\n}\n\nclass Castable {\n public:\n  explicit Castable(bool* converted) : converted_(converted) {}\n  operator Base() {\n    *converted_ = true;\n    return Base();\n  }\n\n private:\n  bool* converted_;\n};\n\nTEST(ImplicitCastTest, CanUseNonConstCastOperator) {\n  bool converted = false;\n  Castable castable(&converted);\n  Base base = ::testing::internal::ImplicitCast_<Base>(castable);\n  EXPECT_TRUE(converted);\n}\n\nclass ConstCastable {\n public:\n  explicit ConstCastable(bool* converted) : converted_(converted) {}\n  operator Base() const {\n    *converted_ = true;\n    return Base();\n  }\n\n private:\n  bool* converted_;\n};\n\nTEST(ImplicitCastTest, CanUseConstCastOperatorOnConstValues) {\n  bool converted = false;\n  const ConstCastable const_castable(&converted);\n  Base base = ::testing::internal::ImplicitCast_<Base>(const_castable);\n  EXPECT_TRUE(converted);\n}\n\nclass ConstAndNonConstCastable {\n public:\n  ConstAndNonConstCastable(bool* converted, bool* const_converted)\n      : converted_(converted), const_converted_(const_converted) {}\n  operator Base() {\n    *converted_ = true;\n    return Base();\n  }\n  operator Base() const {\n    *const_converted_ = true;\n    return Base();\n  }\n\n private:\n  bool* converted_;\n  bool* const_converted_;\n};\n\nTEST(ImplicitCastTest, CanSelectBetweenConstAndNonConstCasrAppropriately) {\n  bool converted = false;\n  bool const_converted = false;\n  ConstAndNonConstCastable castable(&converted, &const_converted);\n  Base base = ::testing::internal::ImplicitCast_<Base>(castable);\n  EXPECT_TRUE(converted);\n  EXPECT_FALSE(const_converted);\n\n  converted = false;\n  const_converted = false;\n  const ConstAndNonConstCastable const_castable(&converted, &const_converted);\n  base = ::testing::internal::ImplicitCast_<Base>(const_castable);\n  EXPECT_FALSE(converted);\n  EXPECT_TRUE(const_converted);\n}\n\nclass To {\n public:\n  To(bool* converted) { *converted = true; }  // NOLINT\n};\n\nTEST(ImplicitCastTest, CanUseImplicitConstructor) {\n  bool converted = false;\n  To to = ::testing::internal::ImplicitCast_<To>(&converted);\n  (void)to;\n  EXPECT_TRUE(converted);\n}\n\nTEST(GtestCheckSyntaxTest, BehavesLikeASingleStatement) {\n  if (AlwaysFalse())\n    GTEST_CHECK_(false) << \"This should never be executed; \"\n                           \"It's a compilation test only.\";\n\n  if (AlwaysTrue())\n    GTEST_CHECK_(true);\n  else\n    ;  // NOLINT\n\n  if (AlwaysFalse())\n    ;  // NOLINT\n  else\n    GTEST_CHECK_(true) << \"\";\n}\n\nTEST(GtestCheckSyntaxTest, WorksWithSwitch) {\n  switch (0) {\n    case 1:\n      break;\n    default:\n      GTEST_CHECK_(true);\n  }\n\n  switch (0)\n    case 0:\n      GTEST_CHECK_(true) << \"Check failed in switch case\";\n}\n\n// Verifies behavior of FormatFileLocation.\nTEST(FormatFileLocationTest, FormatsFileLocation) {\n  EXPECT_PRED_FORMAT2(IsSubstring, \"foo.cc\", FormatFileLocation(\"foo.cc\", 42));\n  EXPECT_PRED_FORMAT2(IsSubstring, \"42\", FormatFileLocation(\"foo.cc\", 42));\n}\n\nTEST(FormatFileLocationTest, FormatsUnknownFile) {\n  EXPECT_PRED_FORMAT2(IsSubstring, \"unknown file\",\n                      FormatFileLocation(nullptr, 42));\n  EXPECT_PRED_FORMAT2(IsSubstring, \"42\", FormatFileLocation(nullptr, 42));\n}\n\nTEST(FormatFileLocationTest, FormatsUknownLine) {\n  EXPECT_EQ(\"foo.cc:\", FormatFileLocation(\"foo.cc\", -1));\n}\n\nTEST(FormatFileLocationTest, FormatsUknownFileAndLine) {\n  EXPECT_EQ(\"unknown file:\", FormatFileLocation(nullptr, -1));\n}\n\n// Verifies behavior of FormatCompilerIndependentFileLocation.\nTEST(FormatCompilerIndependentFileLocationTest, FormatsFileLocation) {\n  EXPECT_EQ(\"foo.cc:42\", FormatCompilerIndependentFileLocation(\"foo.cc\", 42));\n}\n\nTEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFile) {\n  EXPECT_EQ(\"unknown file:42\",\n            FormatCompilerIndependentFileLocation(nullptr, 42));\n}\n\nTEST(FormatCompilerIndependentFileLocationTest, FormatsUknownLine) {\n  EXPECT_EQ(\"foo.cc\", FormatCompilerIndependentFileLocation(\"foo.cc\", -1));\n}\n\nTEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFileAndLine) {\n  EXPECT_EQ(\"unknown file\", FormatCompilerIndependentFileLocation(nullptr, -1));\n}\n\n#if GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA || \\\n    GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \\\n    GTEST_OS_NETBSD || GTEST_OS_OPENBSD\nvoid* ThreadFunc(void* data) {\n  internal::Mutex* mutex = static_cast<internal::Mutex*>(data);\n  mutex->Lock();\n  mutex->Unlock();\n  return nullptr;\n}\n\nTEST(GetThreadCountTest, ReturnsCorrectValue) {\n  const size_t starting_count = GetThreadCount();\n  pthread_t       thread_id;\n\n  internal::Mutex mutex;\n  {\n    internal::MutexLock lock(&mutex);\n    pthread_attr_t  attr;\n    ASSERT_EQ(0, pthread_attr_init(&attr));\n    ASSERT_EQ(0, pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE));\n\n    const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex);\n    ASSERT_EQ(0, pthread_attr_destroy(&attr));\n    ASSERT_EQ(0, status);\n    EXPECT_EQ(starting_count + 1, GetThreadCount());\n  }\n\n  void* dummy;\n  ASSERT_EQ(0, pthread_join(thread_id, &dummy));\n\n  // The OS may not immediately report the updated thread count after\n  // joining a thread, causing flakiness in this test. To counter that, we\n  // wait for up to .5 seconds for the OS to report the correct value.\n  for (int i = 0; i < 5; ++i) {\n    if (GetThreadCount() == starting_count)\n      break;\n\n    SleepMilliseconds(100);\n  }\n\n  EXPECT_EQ(starting_count, GetThreadCount());\n}\n#else\nTEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) {\n  EXPECT_EQ(0U, GetThreadCount());\n}\n#endif  // GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA\n\nTEST(GtestCheckDeathTest, DiesWithCorrectOutputOnFailure) {\n  const bool a_false_condition = false;\n  const char regex[] =\n#ifdef _MSC_VER\n     \"googletest-port-test\\\\.cc\\\\(\\\\d+\\\\):\"\n#elif GTEST_USES_POSIX_RE\n     \"googletest-port-test\\\\.cc:[0-9]+\"\n#else\n     \"googletest-port-test\\\\.cc:\\\\d+\"\n#endif  // _MSC_VER\n     \".*a_false_condition.*Extra info.*\";\n\n  EXPECT_DEATH_IF_SUPPORTED(GTEST_CHECK_(a_false_condition) << \"Extra info\",\n                            regex);\n}\n\n#if GTEST_HAS_DEATH_TEST\n\nTEST(GtestCheckDeathTest, LivesSilentlyOnSuccess) {\n  EXPECT_EXIT({\n      GTEST_CHECK_(true) << \"Extra info\";\n      ::std::cerr << \"Success\\n\";\n      exit(0); },\n      ::testing::ExitedWithCode(0), \"Success\");\n}\n\n#endif  // GTEST_HAS_DEATH_TEST\n\n// Verifies that Google Test choose regular expression engine appropriate to\n// the platform. The test will produce compiler errors in case of failure.\n// For simplicity, we only cover the most important platforms here.\nTEST(RegexEngineSelectionTest, SelectsCorrectRegexEngine) {\n#if !GTEST_USES_PCRE\n# if GTEST_HAS_POSIX_RE\n\n  EXPECT_TRUE(GTEST_USES_POSIX_RE);\n\n# else\n\n  EXPECT_TRUE(GTEST_USES_SIMPLE_RE);\n\n# endif\n#endif  // !GTEST_USES_PCRE\n}\n\n#if GTEST_USES_POSIX_RE\n\n# if GTEST_HAS_TYPED_TEST\n\ntemplate <typename Str>\nclass RETest : public ::testing::Test {};\n\n// Defines StringTypes as the list of all string types that class RE\n// supports.\ntypedef testing::Types< ::std::string, const char*> StringTypes;\n\nTYPED_TEST_SUITE(RETest, StringTypes);\n\n// Tests RE's implicit constructors.\nTYPED_TEST(RETest, ImplicitConstructorWorks) {\n  const RE empty(TypeParam(\"\"));\n  EXPECT_STREQ(\"\", empty.pattern());\n\n  const RE simple(TypeParam(\"hello\"));\n  EXPECT_STREQ(\"hello\", simple.pattern());\n\n  const RE normal(TypeParam(\".*(\\\\w+)\"));\n  EXPECT_STREQ(\".*(\\\\w+)\", normal.pattern());\n}\n\n// Tests that RE's constructors reject invalid regular expressions.\nTYPED_TEST(RETest, RejectsInvalidRegex) {\n  EXPECT_NONFATAL_FAILURE({\n    const RE invalid(TypeParam(\"?\"));\n  }, \"\\\"?\\\" is not a valid POSIX Extended regular expression.\");\n}\n\n// Tests RE::FullMatch().\nTYPED_TEST(RETest, FullMatchWorks) {\n  const RE empty(TypeParam(\"\"));\n  EXPECT_TRUE(RE::FullMatch(TypeParam(\"\"), empty));\n  EXPECT_FALSE(RE::FullMatch(TypeParam(\"a\"), empty));\n\n  const RE re(TypeParam(\"a.*z\"));\n  EXPECT_TRUE(RE::FullMatch(TypeParam(\"az\"), re));\n  EXPECT_TRUE(RE::FullMatch(TypeParam(\"axyz\"), re));\n  EXPECT_FALSE(RE::FullMatch(TypeParam(\"baz\"), re));\n  EXPECT_FALSE(RE::FullMatch(TypeParam(\"azy\"), re));\n}\n\n// Tests RE::PartialMatch().\nTYPED_TEST(RETest, PartialMatchWorks) {\n  const RE empty(TypeParam(\"\"));\n  EXPECT_TRUE(RE::PartialMatch(TypeParam(\"\"), empty));\n  EXPECT_TRUE(RE::PartialMatch(TypeParam(\"a\"), empty));\n\n  const RE re(TypeParam(\"a.*z\"));\n  EXPECT_TRUE(RE::PartialMatch(TypeParam(\"az\"), re));\n  EXPECT_TRUE(RE::PartialMatch(TypeParam(\"axyz\"), re));\n  EXPECT_TRUE(RE::PartialMatch(TypeParam(\"baz\"), re));\n  EXPECT_TRUE(RE::PartialMatch(TypeParam(\"azy\"), re));\n  EXPECT_FALSE(RE::PartialMatch(TypeParam(\"zza\"), re));\n}\n\n# endif  // GTEST_HAS_TYPED_TEST\n\n#elif GTEST_USES_SIMPLE_RE\n\nTEST(IsInSetTest, NulCharIsNotInAnySet) {\n  EXPECT_FALSE(IsInSet('\\0', \"\"));\n  EXPECT_FALSE(IsInSet('\\0', \"\\0\"));\n  EXPECT_FALSE(IsInSet('\\0', \"a\"));\n}\n\nTEST(IsInSetTest, WorksForNonNulChars) {\n  EXPECT_FALSE(IsInSet('a', \"Ab\"));\n  EXPECT_FALSE(IsInSet('c', \"\"));\n\n  EXPECT_TRUE(IsInSet('b', \"bcd\"));\n  EXPECT_TRUE(IsInSet('b', \"ab\"));\n}\n\nTEST(IsAsciiDigitTest, IsFalseForNonDigit) {\n  EXPECT_FALSE(IsAsciiDigit('\\0'));\n  EXPECT_FALSE(IsAsciiDigit(' '));\n  EXPECT_FALSE(IsAsciiDigit('+'));\n  EXPECT_FALSE(IsAsciiDigit('-'));\n  EXPECT_FALSE(IsAsciiDigit('.'));\n  EXPECT_FALSE(IsAsciiDigit('a'));\n}\n\nTEST(IsAsciiDigitTest, IsTrueForDigit) {\n  EXPECT_TRUE(IsAsciiDigit('0'));\n  EXPECT_TRUE(IsAsciiDigit('1'));\n  EXPECT_TRUE(IsAsciiDigit('5'));\n  EXPECT_TRUE(IsAsciiDigit('9'));\n}\n\nTEST(IsAsciiPunctTest, IsFalseForNonPunct) {\n  EXPECT_FALSE(IsAsciiPunct('\\0'));\n  EXPECT_FALSE(IsAsciiPunct(' '));\n  EXPECT_FALSE(IsAsciiPunct('\\n'));\n  EXPECT_FALSE(IsAsciiPunct('a'));\n  EXPECT_FALSE(IsAsciiPunct('0'));\n}\n\nTEST(IsAsciiPunctTest, IsTrueForPunct) {\n  for (const char* p = \"^-!\\\"#$%&'()*+,./:;<=>?@[\\\\]_`{|}~\"; *p; p++) {\n    EXPECT_PRED1(IsAsciiPunct, *p);\n  }\n}\n\nTEST(IsRepeatTest, IsFalseForNonRepeatChar) {\n  EXPECT_FALSE(IsRepeat('\\0'));\n  EXPECT_FALSE(IsRepeat(' '));\n  EXPECT_FALSE(IsRepeat('a'));\n  EXPECT_FALSE(IsRepeat('1'));\n  EXPECT_FALSE(IsRepeat('-'));\n}\n\nTEST(IsRepeatTest, IsTrueForRepeatChar) {\n  EXPECT_TRUE(IsRepeat('?'));\n  EXPECT_TRUE(IsRepeat('*'));\n  EXPECT_TRUE(IsRepeat('+'));\n}\n\nTEST(IsAsciiWhiteSpaceTest, IsFalseForNonWhiteSpace) {\n  EXPECT_FALSE(IsAsciiWhiteSpace('\\0'));\n  EXPECT_FALSE(IsAsciiWhiteSpace('a'));\n  EXPECT_FALSE(IsAsciiWhiteSpace('1'));\n  EXPECT_FALSE(IsAsciiWhiteSpace('+'));\n  EXPECT_FALSE(IsAsciiWhiteSpace('_'));\n}\n\nTEST(IsAsciiWhiteSpaceTest, IsTrueForWhiteSpace) {\n  EXPECT_TRUE(IsAsciiWhiteSpace(' '));\n  EXPECT_TRUE(IsAsciiWhiteSpace('\\n'));\n  EXPECT_TRUE(IsAsciiWhiteSpace('\\r'));\n  EXPECT_TRUE(IsAsciiWhiteSpace('\\t'));\n  EXPECT_TRUE(IsAsciiWhiteSpace('\\v'));\n  EXPECT_TRUE(IsAsciiWhiteSpace('\\f'));\n}\n\nTEST(IsAsciiWordCharTest, IsFalseForNonWordChar) {\n  EXPECT_FALSE(IsAsciiWordChar('\\0'));\n  EXPECT_FALSE(IsAsciiWordChar('+'));\n  EXPECT_FALSE(IsAsciiWordChar('.'));\n  EXPECT_FALSE(IsAsciiWordChar(' '));\n  EXPECT_FALSE(IsAsciiWordChar('\\n'));\n}\n\nTEST(IsAsciiWordCharTest, IsTrueForLetter) {\n  EXPECT_TRUE(IsAsciiWordChar('a'));\n  EXPECT_TRUE(IsAsciiWordChar('b'));\n  EXPECT_TRUE(IsAsciiWordChar('A'));\n  EXPECT_TRUE(IsAsciiWordChar('Z'));\n}\n\nTEST(IsAsciiWordCharTest, IsTrueForDigit) {\n  EXPECT_TRUE(IsAsciiWordChar('0'));\n  EXPECT_TRUE(IsAsciiWordChar('1'));\n  EXPECT_TRUE(IsAsciiWordChar('7'));\n  EXPECT_TRUE(IsAsciiWordChar('9'));\n}\n\nTEST(IsAsciiWordCharTest, IsTrueForUnderscore) {\n  EXPECT_TRUE(IsAsciiWordChar('_'));\n}\n\nTEST(IsValidEscapeTest, IsFalseForNonPrintable) {\n  EXPECT_FALSE(IsValidEscape('\\0'));\n  EXPECT_FALSE(IsValidEscape('\\007'));\n}\n\nTEST(IsValidEscapeTest, IsFalseForDigit) {\n  EXPECT_FALSE(IsValidEscape('0'));\n  EXPECT_FALSE(IsValidEscape('9'));\n}\n\nTEST(IsValidEscapeTest, IsFalseForWhiteSpace) {\n  EXPECT_FALSE(IsValidEscape(' '));\n  EXPECT_FALSE(IsValidEscape('\\n'));\n}\n\nTEST(IsValidEscapeTest, IsFalseForSomeLetter) {\n  EXPECT_FALSE(IsValidEscape('a'));\n  EXPECT_FALSE(IsValidEscape('Z'));\n}\n\nTEST(IsValidEscapeTest, IsTrueForPunct) {\n  EXPECT_TRUE(IsValidEscape('.'));\n  EXPECT_TRUE(IsValidEscape('-'));\n  EXPECT_TRUE(IsValidEscape('^'));\n  EXPECT_TRUE(IsValidEscape('$'));\n  EXPECT_TRUE(IsValidEscape('('));\n  EXPECT_TRUE(IsValidEscape(']'));\n  EXPECT_TRUE(IsValidEscape('{'));\n  EXPECT_TRUE(IsValidEscape('|'));\n}\n\nTEST(IsValidEscapeTest, IsTrueForSomeLetter) {\n  EXPECT_TRUE(IsValidEscape('d'));\n  EXPECT_TRUE(IsValidEscape('D'));\n  EXPECT_TRUE(IsValidEscape('s'));\n  EXPECT_TRUE(IsValidEscape('S'));\n  EXPECT_TRUE(IsValidEscape('w'));\n  EXPECT_TRUE(IsValidEscape('W'));\n}\n\nTEST(AtomMatchesCharTest, EscapedPunct) {\n  EXPECT_FALSE(AtomMatchesChar(true, '\\\\', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, '\\\\', ' '));\n  EXPECT_FALSE(AtomMatchesChar(true, '_', '.'));\n  EXPECT_FALSE(AtomMatchesChar(true, '.', 'a'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, '\\\\', '\\\\'));\n  EXPECT_TRUE(AtomMatchesChar(true, '_', '_'));\n  EXPECT_TRUE(AtomMatchesChar(true, '+', '+'));\n  EXPECT_TRUE(AtomMatchesChar(true, '.', '.'));\n}\n\nTEST(AtomMatchesCharTest, Escaped_d) {\n  EXPECT_FALSE(AtomMatchesChar(true, 'd', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'd', 'a'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'd', '.'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, 'd', '0'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'd', '9'));\n}\n\nTEST(AtomMatchesCharTest, Escaped_D) {\n  EXPECT_FALSE(AtomMatchesChar(true, 'D', '0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'D', '9'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, 'D', '\\0'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'D', 'a'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'D', '-'));\n}\n\nTEST(AtomMatchesCharTest, Escaped_s) {\n  EXPECT_FALSE(AtomMatchesChar(true, 's', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 's', 'a'));\n  EXPECT_FALSE(AtomMatchesChar(true, 's', '.'));\n  EXPECT_FALSE(AtomMatchesChar(true, 's', '9'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, 's', ' '));\n  EXPECT_TRUE(AtomMatchesChar(true, 's', '\\n'));\n  EXPECT_TRUE(AtomMatchesChar(true, 's', '\\t'));\n}\n\nTEST(AtomMatchesCharTest, Escaped_S) {\n  EXPECT_FALSE(AtomMatchesChar(true, 'S', ' '));\n  EXPECT_FALSE(AtomMatchesChar(true, 'S', '\\r'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, 'S', '\\0'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'S', 'a'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'S', '9'));\n}\n\nTEST(AtomMatchesCharTest, Escaped_w) {\n  EXPECT_FALSE(AtomMatchesChar(true, 'w', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'w', '+'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'w', ' '));\n  EXPECT_FALSE(AtomMatchesChar(true, 'w', '\\n'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, 'w', '0'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'w', 'b'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'w', 'C'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'w', '_'));\n}\n\nTEST(AtomMatchesCharTest, Escaped_W) {\n  EXPECT_FALSE(AtomMatchesChar(true, 'W', 'A'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'W', 'b'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'W', '9'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'W', '_'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, 'W', '\\0'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'W', '*'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'W', '\\n'));\n}\n\nTEST(AtomMatchesCharTest, EscapedWhiteSpace) {\n  EXPECT_FALSE(AtomMatchesChar(true, 'f', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'f', '\\n'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'n', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'n', '\\r'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'r', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'r', 'a'));\n  EXPECT_FALSE(AtomMatchesChar(true, 't', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 't', 't'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'v', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(true, 'v', '\\f'));\n\n  EXPECT_TRUE(AtomMatchesChar(true, 'f', '\\f'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'n', '\\n'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'r', '\\r'));\n  EXPECT_TRUE(AtomMatchesChar(true, 't', '\\t'));\n  EXPECT_TRUE(AtomMatchesChar(true, 'v', '\\v'));\n}\n\nTEST(AtomMatchesCharTest, UnescapedDot) {\n  EXPECT_FALSE(AtomMatchesChar(false, '.', '\\n'));\n\n  EXPECT_TRUE(AtomMatchesChar(false, '.', '\\0'));\n  EXPECT_TRUE(AtomMatchesChar(false, '.', '.'));\n  EXPECT_TRUE(AtomMatchesChar(false, '.', 'a'));\n  EXPECT_TRUE(AtomMatchesChar(false, '.', ' '));\n}\n\nTEST(AtomMatchesCharTest, UnescapedChar) {\n  EXPECT_FALSE(AtomMatchesChar(false, 'a', '\\0'));\n  EXPECT_FALSE(AtomMatchesChar(false, 'a', 'b'));\n  EXPECT_FALSE(AtomMatchesChar(false, '$', 'a'));\n\n  EXPECT_TRUE(AtomMatchesChar(false, '$', '$'));\n  EXPECT_TRUE(AtomMatchesChar(false, '5', '5'));\n  EXPECT_TRUE(AtomMatchesChar(false, 'Z', 'Z'));\n}\n\nTEST(ValidateRegexTest, GeneratesFailureAndReturnsFalseForInvalid) {\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(NULL)),\n                          \"NULL is not a valid simple regular expression\");\n  EXPECT_NONFATAL_FAILURE(\n      ASSERT_FALSE(ValidateRegex(\"a\\\\\")),\n      \"Syntax error at index 1 in simple regular expression \\\"a\\\\\\\": \");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"a\\\\\")),\n                          \"'\\\\' cannot appear at the end\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"\\\\n\\\\\")),\n                          \"'\\\\' cannot appear at the end\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"\\\\s\\\\hb\")),\n                          \"invalid escape sequence \\\"\\\\h\\\"\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"^^\")),\n                          \"'^' can only appear at the beginning\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\".*^b\")),\n                          \"'^' can only appear at the beginning\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"$$\")),\n                          \"'$' can only appear at the end\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"^$a\")),\n                          \"'$' can only appear at the end\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"a(b\")),\n                          \"'(' is unsupported\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"ab)\")),\n                          \"')' is unsupported\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"[ab\")),\n                          \"'[' is unsupported\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"a{2\")),\n                          \"'{' is unsupported\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"?\")),\n                          \"'?' can only follow a repeatable token\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"^*\")),\n                          \"'*' can only follow a repeatable token\");\n  EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(\"5*+\")),\n                          \"'+' can only follow a repeatable token\");\n}\n\nTEST(ValidateRegexTest, ReturnsTrueForValid) {\n  EXPECT_TRUE(ValidateRegex(\"\"));\n  EXPECT_TRUE(ValidateRegex(\"a\"));\n  EXPECT_TRUE(ValidateRegex(\".*\"));\n  EXPECT_TRUE(ValidateRegex(\"^a_+\"));\n  EXPECT_TRUE(ValidateRegex(\"^a\\\\t\\\\&?\"));\n  EXPECT_TRUE(ValidateRegex(\"09*$\"));\n  EXPECT_TRUE(ValidateRegex(\"^Z$\"));\n  EXPECT_TRUE(ValidateRegex(\"a\\\\^Z\\\\$\\\\(\\\\)\\\\|\\\\[\\\\]\\\\{\\\\}\"));\n}\n\nTEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrOne) {\n  EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', \"a\", \"ba\"));\n  // Repeating more than once.\n  EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', \"b\", \"aab\"));\n\n  // Repeating zero times.\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', \"b\", \"ba\"));\n  // Repeating once.\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', \"b\", \"ab\"));\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '#', '?', \".\", \"##\"));\n}\n\nTEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrMany) {\n  EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '*', \"a$\", \"baab\"));\n\n  // Repeating zero times.\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', \"b\", \"bc\"));\n  // Repeating once.\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', \"b\", \"abc\"));\n  // Repeating more than once.\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '*', \"-\", \"ab_1-g\"));\n}\n\nTEST(MatchRepetitionAndRegexAtHeadTest, WorksForOneOrMany) {\n  EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', \"a$\", \"baab\"));\n  // Repeating zero times.\n  EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', \"b\", \"bc\"));\n\n  // Repeating once.\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '+', \"b\", \"abc\"));\n  // Repeating more than once.\n  EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '+', \"-\", \"ab_1-g\"));\n}\n\nTEST(MatchRegexAtHeadTest, ReturnsTrueForEmptyRegex) {\n  EXPECT_TRUE(MatchRegexAtHead(\"\", \"\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"\", \"ab\"));\n}\n\nTEST(MatchRegexAtHeadTest, WorksWhenDollarIsInRegex) {\n  EXPECT_FALSE(MatchRegexAtHead(\"$\", \"a\"));\n\n  EXPECT_TRUE(MatchRegexAtHead(\"$\", \"\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"a$\", \"a\"));\n}\n\nTEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithEscapeSequence) {\n  EXPECT_FALSE(MatchRegexAtHead(\"\\\\w\", \"+\"));\n  EXPECT_FALSE(MatchRegexAtHead(\"\\\\W\", \"ab\"));\n\n  EXPECT_TRUE(MatchRegexAtHead(\"\\\\sa\", \"\\nab\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"\\\\d\", \"1a\"));\n}\n\nTEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithRepetition) {\n  EXPECT_FALSE(MatchRegexAtHead(\".+a\", \"abc\"));\n  EXPECT_FALSE(MatchRegexAtHead(\"a?b\", \"aab\"));\n\n  EXPECT_TRUE(MatchRegexAtHead(\".*a\", \"bc12-ab\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"a?b\", \"b\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"a?b\", \"ab\"));\n}\n\nTEST(MatchRegexAtHeadTest,\n     WorksWhenRegexStartsWithRepetionOfEscapeSequence) {\n  EXPECT_FALSE(MatchRegexAtHead(\"\\\\.+a\", \"abc\"));\n  EXPECT_FALSE(MatchRegexAtHead(\"\\\\s?b\", \"  b\"));\n\n  EXPECT_TRUE(MatchRegexAtHead(\"\\\\(*a\", \"((((ab\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"\\\\^?b\", \"^b\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"\\\\\\\\?b\", \"b\"));\n  EXPECT_TRUE(MatchRegexAtHead(\"\\\\\\\\?b\", \"\\\\b\"));\n}\n\nTEST(MatchRegexAtHeadTest, MatchesSequentially) {\n  EXPECT_FALSE(MatchRegexAtHead(\"ab.*c\", \"acabc\"));\n\n  EXPECT_TRUE(MatchRegexAtHead(\"ab.*c\", \"ab-fsc\"));\n}\n\nTEST(MatchRegexAnywhereTest, ReturnsFalseWhenStringIsNull) {\n  EXPECT_FALSE(MatchRegexAnywhere(\"\", NULL));\n}\n\nTEST(MatchRegexAnywhereTest, WorksWhenRegexStartsWithCaret) {\n  EXPECT_FALSE(MatchRegexAnywhere(\"^a\", \"ba\"));\n  EXPECT_FALSE(MatchRegexAnywhere(\"^$\", \"a\"));\n\n  EXPECT_TRUE(MatchRegexAnywhere(\"^a\", \"ab\"));\n  EXPECT_TRUE(MatchRegexAnywhere(\"^\", \"ab\"));\n  EXPECT_TRUE(MatchRegexAnywhere(\"^$\", \"\"));\n}\n\nTEST(MatchRegexAnywhereTest, ReturnsFalseWhenNoMatch) {\n  EXPECT_FALSE(MatchRegexAnywhere(\"a\", \"bcde123\"));\n  EXPECT_FALSE(MatchRegexAnywhere(\"a.+a\", \"--aa88888888\"));\n}\n\nTEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingPrefix) {\n  EXPECT_TRUE(MatchRegexAnywhere(\"\\\\w+\", \"ab1_ - 5\"));\n  EXPECT_TRUE(MatchRegexAnywhere(\".*=\", \"=\"));\n  EXPECT_TRUE(MatchRegexAnywhere(\"x.*ab?.*bc\", \"xaaabc\"));\n}\n\nTEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingNonPrefix) {\n  EXPECT_TRUE(MatchRegexAnywhere(\"\\\\w+\", \"$$$ ab1_ - 5\"));\n  EXPECT_TRUE(MatchRegexAnywhere(\"\\\\.+=\", \"=  ...=\"));\n}\n\n// Tests RE's implicit constructors.\nTEST(RETest, ImplicitConstructorWorks) {\n  const RE empty(\"\");\n  EXPECT_STREQ(\"\", empty.pattern());\n\n  const RE simple(\"hello\");\n  EXPECT_STREQ(\"hello\", simple.pattern());\n}\n\n// Tests that RE's constructors reject invalid regular expressions.\nTEST(RETest, RejectsInvalidRegex) {\n  EXPECT_NONFATAL_FAILURE({\n    const RE normal(NULL);\n  }, \"NULL is not a valid simple regular expression\");\n\n  EXPECT_NONFATAL_FAILURE({\n    const RE normal(\".*(\\\\w+\");\n  }, \"'(' is unsupported\");\n\n  EXPECT_NONFATAL_FAILURE({\n    const RE invalid(\"^?\");\n  }, \"'?' can only follow a repeatable token\");\n}\n\n// Tests RE::FullMatch().\nTEST(RETest, FullMatchWorks) {\n  const RE empty(\"\");\n  EXPECT_TRUE(RE::FullMatch(\"\", empty));\n  EXPECT_FALSE(RE::FullMatch(\"a\", empty));\n\n  const RE re1(\"a\");\n  EXPECT_TRUE(RE::FullMatch(\"a\", re1));\n\n  const RE re(\"a.*z\");\n  EXPECT_TRUE(RE::FullMatch(\"az\", re));\n  EXPECT_TRUE(RE::FullMatch(\"axyz\", re));\n  EXPECT_FALSE(RE::FullMatch(\"baz\", re));\n  EXPECT_FALSE(RE::FullMatch(\"azy\", re));\n}\n\n// Tests RE::PartialMatch().\nTEST(RETest, PartialMatchWorks) {\n  const RE empty(\"\");\n  EXPECT_TRUE(RE::PartialMatch(\"\", empty));\n  EXPECT_TRUE(RE::PartialMatch(\"a\", empty));\n\n  const RE re(\"a.*z\");\n  EXPECT_TRUE(RE::PartialMatch(\"az\", re));\n  EXPECT_TRUE(RE::PartialMatch(\"axyz\", re));\n  EXPECT_TRUE(RE::PartialMatch(\"baz\", re));\n  EXPECT_TRUE(RE::PartialMatch(\"azy\", re));\n  EXPECT_FALSE(RE::PartialMatch(\"zza\", re));\n}\n\n#endif  // GTEST_USES_POSIX_RE\n\n#if !GTEST_OS_WINDOWS_MOBILE\n\nTEST(CaptureTest, CapturesStdout) {\n  CaptureStdout();\n  fprintf(stdout, \"abc\");\n  EXPECT_STREQ(\"abc\", GetCapturedStdout().c_str());\n\n  CaptureStdout();\n  fprintf(stdout, \"def%cghi\", '\\0');\n  EXPECT_EQ(::std::string(\"def\\0ghi\", 7), ::std::string(GetCapturedStdout()));\n}\n\nTEST(CaptureTest, CapturesStderr) {\n  CaptureStderr();\n  fprintf(stderr, \"jkl\");\n  EXPECT_STREQ(\"jkl\", GetCapturedStderr().c_str());\n\n  CaptureStderr();\n  fprintf(stderr, \"jkl%cmno\", '\\0');\n  EXPECT_EQ(::std::string(\"jkl\\0mno\", 7), ::std::string(GetCapturedStderr()));\n}\n\n// Tests that stdout and stderr capture don't interfere with each other.\nTEST(CaptureTest, CapturesStdoutAndStderr) {\n  CaptureStdout();\n  CaptureStderr();\n  fprintf(stdout, \"pqr\");\n  fprintf(stderr, \"stu\");\n  EXPECT_STREQ(\"pqr\", GetCapturedStdout().c_str());\n  EXPECT_STREQ(\"stu\", GetCapturedStderr().c_str());\n}\n\nTEST(CaptureDeathTest, CannotReenterStdoutCapture) {\n  CaptureStdout();\n  EXPECT_DEATH_IF_SUPPORTED(CaptureStdout(),\n                            \"Only one stdout capturer can exist at a time\");\n  GetCapturedStdout();\n\n  // We cannot test stderr capturing using death tests as they use it\n  // themselves.\n}\n\n#endif  // !GTEST_OS_WINDOWS_MOBILE\n\nTEST(ThreadLocalTest, DefaultConstructorInitializesToDefaultValues) {\n  ThreadLocal<int> t1;\n  EXPECT_EQ(0, t1.get());\n\n  ThreadLocal<void*> t2;\n  EXPECT_TRUE(t2.get() == nullptr);\n}\n\nTEST(ThreadLocalTest, SingleParamConstructorInitializesToParam) {\n  ThreadLocal<int> t1(123);\n  EXPECT_EQ(123, t1.get());\n\n  int i = 0;\n  ThreadLocal<int*> t2(&i);\n  EXPECT_EQ(&i, t2.get());\n}\n\nclass NoDefaultContructor {\n public:\n  explicit NoDefaultContructor(const char*) {}\n  NoDefaultContructor(const NoDefaultContructor&) {}\n};\n\nTEST(ThreadLocalTest, ValueDefaultContructorIsNotRequiredForParamVersion) {\n  ThreadLocal<NoDefaultContructor> bar(NoDefaultContructor(\"foo\"));\n  bar.pointer();\n}\n\nTEST(ThreadLocalTest, GetAndPointerReturnSameValue) {\n  ThreadLocal<std::string> thread_local_string;\n\n  EXPECT_EQ(thread_local_string.pointer(), &(thread_local_string.get()));\n\n  // Verifies the condition still holds after calling set.\n  thread_local_string.set(\"foo\");\n  EXPECT_EQ(thread_local_string.pointer(), &(thread_local_string.get()));\n}\n\nTEST(ThreadLocalTest, PointerAndConstPointerReturnSameValue) {\n  ThreadLocal<std::string> thread_local_string;\n  const ThreadLocal<std::string>& const_thread_local_string =\n      thread_local_string;\n\n  EXPECT_EQ(thread_local_string.pointer(), const_thread_local_string.pointer());\n\n  thread_local_string.set(\"foo\");\n  EXPECT_EQ(thread_local_string.pointer(), const_thread_local_string.pointer());\n}\n\n#if GTEST_IS_THREADSAFE\n\nvoid AddTwo(int* param) { *param += 2; }\n\nTEST(ThreadWithParamTest, ConstructorExecutesThreadFunc) {\n  int i = 40;\n  ThreadWithParam<int*> thread(&AddTwo, &i, nullptr);\n  thread.Join();\n  EXPECT_EQ(42, i);\n}\n\nTEST(MutexDeathTest, AssertHeldShouldAssertWhenNotLocked) {\n  // AssertHeld() is flaky only in the presence of multiple threads accessing\n  // the lock. In this case, the test is robust.\n  EXPECT_DEATH_IF_SUPPORTED({\n    Mutex m;\n    { MutexLock lock(&m); }\n    m.AssertHeld();\n  },\n  \"thread .*hold\");\n}\n\nTEST(MutexTest, AssertHeldShouldNotAssertWhenLocked) {\n  Mutex m;\n  MutexLock lock(&m);\n  m.AssertHeld();\n}\n\nclass AtomicCounterWithMutex {\n public:\n  explicit AtomicCounterWithMutex(Mutex* mutex) :\n    value_(0), mutex_(mutex), random_(42) {}\n\n  void Increment() {\n    MutexLock lock(mutex_);\n    int temp = value_;\n    {\n      // We need to put up a memory barrier to prevent reads and writes to\n      // value_ rearranged with the call to SleepMilliseconds when observed\n      // from other threads.\n#if GTEST_HAS_PTHREAD\n      // On POSIX, locking a mutex puts up a memory barrier.  We cannot use\n      // Mutex and MutexLock here or rely on their memory barrier\n      // functionality as we are testing them here.\n      pthread_mutex_t memory_barrier_mutex;\n      GTEST_CHECK_POSIX_SUCCESS_(\n          pthread_mutex_init(&memory_barrier_mutex, nullptr));\n      GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&memory_barrier_mutex));\n\n      SleepMilliseconds(static_cast<int>(random_.Generate(30)));\n\n      GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&memory_barrier_mutex));\n      GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&memory_barrier_mutex));\n#elif GTEST_OS_WINDOWS\n      // On Windows, performing an interlocked access puts up a memory barrier.\n      volatile LONG dummy = 0;\n      ::InterlockedIncrement(&dummy);\n      SleepMilliseconds(static_cast<int>(random_.Generate(30)));\n      ::InterlockedIncrement(&dummy);\n#else\n# error \"Memory barrier not implemented on this platform.\"\n#endif  // GTEST_HAS_PTHREAD\n    }\n    value_ = temp + 1;\n  }\n  int value() const { return value_; }\n\n private:\n  volatile int value_;\n  Mutex* const mutex_;  // Protects value_.\n  Random       random_;\n};\n\nvoid CountingThreadFunc(pair<AtomicCounterWithMutex*, int> param) {\n  for (int i = 0; i < param.second; ++i)\n      param.first->Increment();\n}\n\n// Tests that the mutex only lets one thread at a time to lock it.\nTEST(MutexTest, OnlyOneThreadCanLockAtATime) {\n  Mutex mutex;\n  AtomicCounterWithMutex locked_counter(&mutex);\n\n  typedef ThreadWithParam<pair<AtomicCounterWithMutex*, int> > ThreadType;\n  const int kCycleCount = 20;\n  const int kThreadCount = 7;\n  std::unique_ptr<ThreadType> counting_threads[kThreadCount];\n  Notification threads_can_start;\n  // Creates and runs kThreadCount threads that increment locked_counter\n  // kCycleCount times each.\n  for (int i = 0; i < kThreadCount; ++i) {\n    counting_threads[i].reset(new ThreadType(&CountingThreadFunc,\n                                             make_pair(&locked_counter,\n                                                       kCycleCount),\n                                             &threads_can_start));\n  }\n  threads_can_start.Notify();\n  for (int i = 0; i < kThreadCount; ++i)\n    counting_threads[i]->Join();\n\n  // If the mutex lets more than one thread to increment the counter at a\n  // time, they are likely to encounter a race condition and have some\n  // increments overwritten, resulting in the lower then expected counter\n  // value.\n  EXPECT_EQ(kCycleCount * kThreadCount, locked_counter.value());\n}\n\ntemplate <typename T>\nvoid RunFromThread(void (func)(T), T param) {\n  ThreadWithParam<T> thread(func, param, nullptr);\n  thread.Join();\n}\n\nvoid RetrieveThreadLocalValue(\n    pair<ThreadLocal<std::string>*, std::string*> param) {\n  *param.second = param.first->get();\n}\n\nTEST(ThreadLocalTest, ParameterizedConstructorSetsDefault) {\n  ThreadLocal<std::string> thread_local_string(\"foo\");\n  EXPECT_STREQ(\"foo\", thread_local_string.get().c_str());\n\n  thread_local_string.set(\"bar\");\n  EXPECT_STREQ(\"bar\", thread_local_string.get().c_str());\n\n  std::string result;\n  RunFromThread(&RetrieveThreadLocalValue,\n                make_pair(&thread_local_string, &result));\n  EXPECT_STREQ(\"foo\", result.c_str());\n}\n\n// Keeps track of whether of destructors being called on instances of\n// DestructorTracker.  On Windows, waits for the destructor call reports.\nclass DestructorCall {\n public:\n  DestructorCall() {\n    invoked_ = false;\n#if GTEST_OS_WINDOWS\n    wait_event_.Reset(::CreateEvent(NULL, TRUE, FALSE, NULL));\n    GTEST_CHECK_(wait_event_.Get() != NULL);\n#endif\n  }\n\n  bool CheckDestroyed() const {\n#if GTEST_OS_WINDOWS\n    if (::WaitForSingleObject(wait_event_.Get(), 1000) != WAIT_OBJECT_0)\n      return false;\n#endif\n    return invoked_;\n  }\n\n  void ReportDestroyed() {\n    invoked_ = true;\n#if GTEST_OS_WINDOWS\n    ::SetEvent(wait_event_.Get());\n#endif\n  }\n\n  static std::vector<DestructorCall*>& List() { return *list_; }\n\n  static void ResetList() {\n    for (size_t i = 0; i < list_->size(); ++i) {\n      delete list_->at(i);\n    }\n    list_->clear();\n  }\n\n private:\n  bool invoked_;\n#if GTEST_OS_WINDOWS\n  AutoHandle wait_event_;\n#endif\n  static std::vector<DestructorCall*>* const list_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(DestructorCall);\n};\n\nstd::vector<DestructorCall*>* const DestructorCall::list_ =\n    new std::vector<DestructorCall*>;\n\n// DestructorTracker keeps track of whether its instances have been\n// destroyed.\nclass DestructorTracker {\n public:\n  DestructorTracker() : index_(GetNewIndex()) {}\n  DestructorTracker(const DestructorTracker& /* rhs */)\n      : index_(GetNewIndex()) {}\n  ~DestructorTracker() {\n    // We never access DestructorCall::List() concurrently, so we don't need\n    // to protect this access with a mutex.\n    DestructorCall::List()[index_]->ReportDestroyed();\n  }\n\n private:\n  static size_t GetNewIndex() {\n    DestructorCall::List().push_back(new DestructorCall);\n    return DestructorCall::List().size() - 1;\n  }\n  const size_t index_;\n\n  GTEST_DISALLOW_ASSIGN_(DestructorTracker);\n};\n\ntypedef ThreadLocal<DestructorTracker>* ThreadParam;\n\nvoid CallThreadLocalGet(ThreadParam thread_local_param) {\n  thread_local_param->get();\n}\n\n// Tests that when a ThreadLocal object dies in a thread, it destroys\n// the managed object for that thread.\nTEST(ThreadLocalTest, DestroysManagedObjectForOwnThreadWhenDying) {\n  DestructorCall::ResetList();\n\n  {\n    ThreadLocal<DestructorTracker> thread_local_tracker;\n    ASSERT_EQ(0U, DestructorCall::List().size());\n\n    // This creates another DestructorTracker object for the main thread.\n    thread_local_tracker.get();\n    ASSERT_EQ(1U, DestructorCall::List().size());\n    ASSERT_FALSE(DestructorCall::List()[0]->CheckDestroyed());\n  }\n\n  // Now thread_local_tracker has died.\n  ASSERT_EQ(1U, DestructorCall::List().size());\n  EXPECT_TRUE(DestructorCall::List()[0]->CheckDestroyed());\n\n  DestructorCall::ResetList();\n}\n\n// Tests that when a thread exits, the thread-local object for that\n// thread is destroyed.\nTEST(ThreadLocalTest, DestroysManagedObjectAtThreadExit) {\n  DestructorCall::ResetList();\n\n  {\n    ThreadLocal<DestructorTracker> thread_local_tracker;\n    ASSERT_EQ(0U, DestructorCall::List().size());\n\n    // This creates another DestructorTracker object in the new thread.\n    ThreadWithParam<ThreadParam> thread(&CallThreadLocalGet,\n                                        &thread_local_tracker, nullptr);\n    thread.Join();\n\n    // The thread has exited, and we should have a DestroyedTracker\n    // instance created for it. But it may not have been destroyed yet.\n    ASSERT_EQ(1U, DestructorCall::List().size());\n  }\n\n  // The thread has exited and thread_local_tracker has died.\n  ASSERT_EQ(1U, DestructorCall::List().size());\n  EXPECT_TRUE(DestructorCall::List()[0]->CheckDestroyed());\n\n  DestructorCall::ResetList();\n}\n\nTEST(ThreadLocalTest, ThreadLocalMutationsAffectOnlyCurrentThread) {\n  ThreadLocal<std::string> thread_local_string;\n  thread_local_string.set(\"Foo\");\n  EXPECT_STREQ(\"Foo\", thread_local_string.get().c_str());\n\n  std::string result;\n  RunFromThread(&RetrieveThreadLocalValue,\n                make_pair(&thread_local_string, &result));\n  EXPECT_TRUE(result.empty());\n}\n\n#endif  // GTEST_IS_THREADSAFE\n\n#if GTEST_OS_WINDOWS\nTEST(WindowsTypesTest, HANDLEIsVoidStar) {\n  StaticAssertTypeEq<HANDLE, void*>();\n}\n\n#if GTEST_OS_WINDOWS_MINGW && !defined(__MINGW64_VERSION_MAJOR)\nTEST(WindowsTypesTest, _CRITICAL_SECTIONIs_CRITICAL_SECTION) {\n  StaticAssertTypeEq<CRITICAL_SECTION, _CRITICAL_SECTION>();\n}\n#else\nTEST(WindowsTypesTest, CRITICAL_SECTIONIs_RTL_CRITICAL_SECTION) {\n  StaticAssertTypeEq<CRITICAL_SECTION, _RTL_CRITICAL_SECTION>();\n}\n#endif\n\n#endif  // GTEST_OS_WINDOWS\n\n}  // namespace internal\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/test/googletest-printers-test.cc",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Google Test - The Google C++ Testing and Mocking Framework\n//\n// This file tests the universal value printer.\n\n#include <ctype.h>\n#include <limits.h>\n#include <string.h>\n#include <algorithm>\n#include <deque>\n#include <forward_list>\n#include <list>\n#include <map>\n#include <set>\n#include <sstream>\n#include <string>\n#include <unordered_map>\n#include <unordered_set>\n#include <utility>\n#include <vector>\n\n#include \"gtest/gtest-printers.h\"\n#include \"gtest/gtest.h\"\n\n// Some user-defined types for testing the universal value printer.\n\n// An anonymous enum type.\nenum AnonymousEnum {\n  kAE1 = -1,\n  kAE2 = 1\n};\n\n// An enum without a user-defined printer.\nenum EnumWithoutPrinter {\n  kEWP1 = -2,\n  kEWP2 = 42\n};\n\n// An enum with a << operator.\nenum EnumWithStreaming {\n  kEWS1 = 10\n};\n\nstd::ostream& operator<<(std::ostream& os, EnumWithStreaming e) {\n  return os << (e == kEWS1 ? \"kEWS1\" : \"invalid\");\n}\n\n// An enum with a PrintTo() function.\nenum EnumWithPrintTo {\n  kEWPT1 = 1\n};\n\nvoid PrintTo(EnumWithPrintTo e, std::ostream* os) {\n  *os << (e == kEWPT1 ? \"kEWPT1\" : \"invalid\");\n}\n\n// A class implicitly convertible to BiggestInt.\nclass BiggestIntConvertible {\n public:\n  operator ::testing::internal::BiggestInt() const { return 42; }\n};\n\n// A user-defined unprintable class template in the global namespace.\ntemplate <typename T>\nclass UnprintableTemplateInGlobal {\n public:\n  UnprintableTemplateInGlobal() : value_() {}\n private:\n  T value_;\n};\n\n// A user-defined streamable type in the global namespace.\nclass StreamableInGlobal {\n public:\n  virtual ~StreamableInGlobal() {}\n};\n\ninline void operator<<(::std::ostream& os, const StreamableInGlobal& /* x */) {\n  os << \"StreamableInGlobal\";\n}\n\nvoid operator<<(::std::ostream& os, const StreamableInGlobal* /* x */) {\n  os << \"StreamableInGlobal*\";\n}\n\nnamespace foo {\n\n// A user-defined unprintable type in a user namespace.\nclass UnprintableInFoo {\n public:\n  UnprintableInFoo() : z_(0) { memcpy(xy_, \"\\xEF\\x12\\x0\\x0\\x34\\xAB\\x0\\x0\", 8); }\n  double z() const { return z_; }\n private:\n  char xy_[8];\n  double z_;\n};\n\n// A user-defined printable type in a user-chosen namespace.\nstruct PrintableViaPrintTo {\n  PrintableViaPrintTo() : value() {}\n  int value;\n};\n\nvoid PrintTo(const PrintableViaPrintTo& x, ::std::ostream* os) {\n  *os << \"PrintableViaPrintTo: \" << x.value;\n}\n\n// A type with a user-defined << for printing its pointer.\nstruct PointerPrintable {\n};\n\n::std::ostream& operator<<(::std::ostream& os,\n                           const PointerPrintable* /* x */) {\n  return os << \"PointerPrintable*\";\n}\n\n// A user-defined printable class template in a user-chosen namespace.\ntemplate <typename T>\nclass PrintableViaPrintToTemplate {\n public:\n  explicit PrintableViaPrintToTemplate(const T& a_value) : value_(a_value) {}\n\n  const T& value() const { return value_; }\n private:\n  T value_;\n};\n\ntemplate <typename T>\nvoid PrintTo(const PrintableViaPrintToTemplate<T>& x, ::std::ostream* os) {\n  *os << \"PrintableViaPrintToTemplate: \" << x.value();\n}\n\n// A user-defined streamable class template in a user namespace.\ntemplate <typename T>\nclass StreamableTemplateInFoo {\n public:\n  StreamableTemplateInFoo() : value_() {}\n\n  const T& value() const { return value_; }\n private:\n  T value_;\n};\n\ntemplate <typename T>\ninline ::std::ostream& operator<<(::std::ostream& os,\n                                  const StreamableTemplateInFoo<T>& x) {\n  return os << \"StreamableTemplateInFoo: \" << x.value();\n}\n\n// A user-defined streamable but recursivly-defined container type in\n// a user namespace, it mimics therefore std::filesystem::path or\n// boost::filesystem::path.\nclass PathLike {\n public:\n  struct iterator {\n    typedef PathLike value_type;\n\n    iterator& operator++();\n    PathLike& operator*();\n  };\n\n  using value_type = char;\n  using const_iterator = iterator;\n\n  PathLike() {}\n\n  iterator begin() const { return iterator(); }\n  iterator end() const { return iterator(); }\n\n  friend ::std::ostream& operator<<(::std::ostream& os, const PathLike&) {\n    return os << \"Streamable-PathLike\";\n  }\n};\n\n}  // namespace foo\n\nnamespace testing {\nnamespace gtest_printers_test {\n\nusing ::std::deque;\nusing ::std::list;\nusing ::std::make_pair;\nusing ::std::map;\nusing ::std::multimap;\nusing ::std::multiset;\nusing ::std::pair;\nusing ::std::set;\nusing ::std::vector;\nusing ::testing::PrintToString;\nusing ::testing::internal::FormatForComparisonFailureMessage;\nusing ::testing::internal::ImplicitCast_;\nusing ::testing::internal::NativeArray;\nusing ::testing::internal::RelationToSourceReference;\nusing ::testing::internal::Strings;\nusing ::testing::internal::UniversalPrint;\nusing ::testing::internal::UniversalPrinter;\nusing ::testing::internal::UniversalTersePrint;\nusing ::testing::internal::UniversalTersePrintTupleFieldsToStrings;\n\n// Prints a value to a string using the universal value printer.  This\n// is a helper for testing UniversalPrinter<T>::Print() for various types.\ntemplate <typename T>\nstd::string Print(const T& value) {\n  ::std::stringstream ss;\n  UniversalPrinter<T>::Print(value, &ss);\n  return ss.str();\n}\n\n// Prints a value passed by reference to a string, using the universal\n// value printer.  This is a helper for testing\n// UniversalPrinter<T&>::Print() for various types.\ntemplate <typename T>\nstd::string PrintByRef(const T& value) {\n  ::std::stringstream ss;\n  UniversalPrinter<T&>::Print(value, &ss);\n  return ss.str();\n}\n\n// Tests printing various enum types.\n\nTEST(PrintEnumTest, AnonymousEnum) {\n  EXPECT_EQ(\"-1\", Print(kAE1));\n  EXPECT_EQ(\"1\", Print(kAE2));\n}\n\nTEST(PrintEnumTest, EnumWithoutPrinter) {\n  EXPECT_EQ(\"-2\", Print(kEWP1));\n  EXPECT_EQ(\"42\", Print(kEWP2));\n}\n\nTEST(PrintEnumTest, EnumWithStreaming) {\n  EXPECT_EQ(\"kEWS1\", Print(kEWS1));\n  EXPECT_EQ(\"invalid\", Print(static_cast<EnumWithStreaming>(0)));\n}\n\nTEST(PrintEnumTest, EnumWithPrintTo) {\n  EXPECT_EQ(\"kEWPT1\", Print(kEWPT1));\n  EXPECT_EQ(\"invalid\", Print(static_cast<EnumWithPrintTo>(0)));\n}\n\n// Tests printing a class implicitly convertible to BiggestInt.\n\nTEST(PrintClassTest, BiggestIntConvertible) {\n  EXPECT_EQ(\"42\", Print(BiggestIntConvertible()));\n}\n\n// Tests printing various char types.\n\n// char.\nTEST(PrintCharTest, PlainChar) {\n  EXPECT_EQ(\"'\\\\0'\", Print('\\0'));\n  EXPECT_EQ(\"'\\\\'' (39, 0x27)\", Print('\\''));\n  EXPECT_EQ(\"'\\\"' (34, 0x22)\", Print('\"'));\n  EXPECT_EQ(\"'?' (63, 0x3F)\", Print('?'));\n  EXPECT_EQ(\"'\\\\\\\\' (92, 0x5C)\", Print('\\\\'));\n  EXPECT_EQ(\"'\\\\a' (7)\", Print('\\a'));\n  EXPECT_EQ(\"'\\\\b' (8)\", Print('\\b'));\n  EXPECT_EQ(\"'\\\\f' (12, 0xC)\", Print('\\f'));\n  EXPECT_EQ(\"'\\\\n' (10, 0xA)\", Print('\\n'));\n  EXPECT_EQ(\"'\\\\r' (13, 0xD)\", Print('\\r'));\n  EXPECT_EQ(\"'\\\\t' (9)\", Print('\\t'));\n  EXPECT_EQ(\"'\\\\v' (11, 0xB)\", Print('\\v'));\n  EXPECT_EQ(\"'\\\\x7F' (127)\", Print('\\x7F'));\n  EXPECT_EQ(\"'\\\\xFF' (255)\", Print('\\xFF'));\n  EXPECT_EQ(\"' ' (32, 0x20)\", Print(' '));\n  EXPECT_EQ(\"'a' (97, 0x61)\", Print('a'));\n}\n\n// signed char.\nTEST(PrintCharTest, SignedChar) {\n  EXPECT_EQ(\"'\\\\0'\", Print(static_cast<signed char>('\\0')));\n  EXPECT_EQ(\"'\\\\xCE' (-50)\",\n            Print(static_cast<signed char>(-50)));\n}\n\n// unsigned char.\nTEST(PrintCharTest, UnsignedChar) {\n  EXPECT_EQ(\"'\\\\0'\", Print(static_cast<unsigned char>('\\0')));\n  EXPECT_EQ(\"'b' (98, 0x62)\",\n            Print(static_cast<unsigned char>('b')));\n}\n\n// Tests printing other simple, built-in types.\n\n// bool.\nTEST(PrintBuiltInTypeTest, Bool) {\n  EXPECT_EQ(\"false\", Print(false));\n  EXPECT_EQ(\"true\", Print(true));\n}\n\n// wchar_t.\nTEST(PrintBuiltInTypeTest, Wchar_t) {\n  EXPECT_EQ(\"L'\\\\0'\", Print(L'\\0'));\n  EXPECT_EQ(\"L'\\\\'' (39, 0x27)\", Print(L'\\''));\n  EXPECT_EQ(\"L'\\\"' (34, 0x22)\", Print(L'\"'));\n  EXPECT_EQ(\"L'?' (63, 0x3F)\", Print(L'?'));\n  EXPECT_EQ(\"L'\\\\\\\\' (92, 0x5C)\", Print(L'\\\\'));\n  EXPECT_EQ(\"L'\\\\a' (7)\", Print(L'\\a'));\n  EXPECT_EQ(\"L'\\\\b' (8)\", Print(L'\\b'));\n  EXPECT_EQ(\"L'\\\\f' (12, 0xC)\", Print(L'\\f'));\n  EXPECT_EQ(\"L'\\\\n' (10, 0xA)\", Print(L'\\n'));\n  EXPECT_EQ(\"L'\\\\r' (13, 0xD)\", Print(L'\\r'));\n  EXPECT_EQ(\"L'\\\\t' (9)\", Print(L'\\t'));\n  EXPECT_EQ(\"L'\\\\v' (11, 0xB)\", Print(L'\\v'));\n  EXPECT_EQ(\"L'\\\\x7F' (127)\", Print(L'\\x7F'));\n  EXPECT_EQ(\"L'\\\\xFF' (255)\", Print(L'\\xFF'));\n  EXPECT_EQ(\"L' ' (32, 0x20)\", Print(L' '));\n  EXPECT_EQ(\"L'a' (97, 0x61)\", Print(L'a'));\n  EXPECT_EQ(\"L'\\\\x576' (1398)\", Print(static_cast<wchar_t>(0x576)));\n  EXPECT_EQ(\"L'\\\\xC74D' (51021)\", Print(static_cast<wchar_t>(0xC74D)));\n}\n\n// Test that Int64 provides more storage than wchar_t.\nTEST(PrintTypeSizeTest, Wchar_t) {\n  EXPECT_LT(sizeof(wchar_t), sizeof(testing::internal::Int64));\n}\n\n// Various integer types.\nTEST(PrintBuiltInTypeTest, Integer) {\n  EXPECT_EQ(\"'\\\\xFF' (255)\", Print(static_cast<unsigned char>(255)));  // uint8\n  EXPECT_EQ(\"'\\\\x80' (-128)\", Print(static_cast<signed char>(-128)));  // int8\n  EXPECT_EQ(\"65535\", Print(USHRT_MAX));  // uint16\n  EXPECT_EQ(\"-32768\", Print(SHRT_MIN));  // int16\n  EXPECT_EQ(\"4294967295\", Print(UINT_MAX));  // uint32\n  EXPECT_EQ(\"-2147483648\", Print(INT_MIN));  // int32\n  EXPECT_EQ(\"18446744073709551615\",\n            Print(static_cast<testing::internal::UInt64>(-1)));  // uint64\n  EXPECT_EQ(\"-9223372036854775808\",\n            Print(static_cast<testing::internal::Int64>(1) << 63));  // int64\n}\n\n// Size types.\nTEST(PrintBuiltInTypeTest, Size_t) {\n  EXPECT_EQ(\"1\", Print(sizeof('a')));  // size_t.\n#if !GTEST_OS_WINDOWS\n  // Windows has no ssize_t type.\n  EXPECT_EQ(\"-2\", Print(static_cast<ssize_t>(-2)));  // ssize_t.\n#endif  // !GTEST_OS_WINDOWS\n}\n\n// Floating-points.\nTEST(PrintBuiltInTypeTest, FloatingPoints) {\n  EXPECT_EQ(\"1.5\", Print(1.5f));   // float\n  EXPECT_EQ(\"-2.5\", Print(-2.5));  // double\n}\n\n// Since ::std::stringstream::operator<<(const void *) formats the pointer\n// output differently with different compilers, we have to create the expected\n// output first and use it as our expectation.\nstatic std::string PrintPointer(const void* p) {\n  ::std::stringstream expected_result_stream;\n  expected_result_stream << p;\n  return expected_result_stream.str();\n}\n\n// Tests printing C strings.\n\n// const char*.\nTEST(PrintCStringTest, Const) {\n  const char* p = \"World\";\n  EXPECT_EQ(PrintPointer(p) + \" pointing to \\\"World\\\"\", Print(p));\n}\n\n// char*.\nTEST(PrintCStringTest, NonConst) {\n  char p[] = \"Hi\";\n  EXPECT_EQ(PrintPointer(p) + \" pointing to \\\"Hi\\\"\",\n            Print(static_cast<char*>(p)));\n}\n\n// NULL C string.\nTEST(PrintCStringTest, Null) {\n  const char* p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// Tests that C strings are escaped properly.\nTEST(PrintCStringTest, EscapesProperly) {\n  const char* p = \"'\\\"?\\\\\\a\\b\\f\\n\\r\\t\\v\\x7F\\xFF a\";\n  EXPECT_EQ(PrintPointer(p) + \" pointing to \\\"'\\\\\\\"?\\\\\\\\\\\\a\\\\b\\\\f\"\n            \"\\\\n\\\\r\\\\t\\\\v\\\\x7F\\\\xFF a\\\"\",\n            Print(p));\n}\n\n// MSVC compiler can be configured to define whar_t as a typedef\n// of unsigned short. Defining an overload for const wchar_t* in that case\n// would cause pointers to unsigned shorts be printed as wide strings,\n// possibly accessing more memory than intended and causing invalid\n// memory accesses. MSVC defines _NATIVE_WCHAR_T_DEFINED symbol when\n// wchar_t is implemented as a native type.\n#if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED)\n\n// const wchar_t*.\nTEST(PrintWideCStringTest, Const) {\n  const wchar_t* p = L\"World\";\n  EXPECT_EQ(PrintPointer(p) + \" pointing to L\\\"World\\\"\", Print(p));\n}\n\n// wchar_t*.\nTEST(PrintWideCStringTest, NonConst) {\n  wchar_t p[] = L\"Hi\";\n  EXPECT_EQ(PrintPointer(p) + \" pointing to L\\\"Hi\\\"\",\n            Print(static_cast<wchar_t*>(p)));\n}\n\n// NULL wide C string.\nTEST(PrintWideCStringTest, Null) {\n  const wchar_t* p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// Tests that wide C strings are escaped properly.\nTEST(PrintWideCStringTest, EscapesProperly) {\n  const wchar_t s[] = {'\\'', '\"', '?', '\\\\', '\\a', '\\b', '\\f', '\\n', '\\r',\n                       '\\t', '\\v', 0xD3, 0x576, 0x8D3, 0xC74D, ' ', 'a', '\\0'};\n  EXPECT_EQ(PrintPointer(s) + \" pointing to L\\\"'\\\\\\\"?\\\\\\\\\\\\a\\\\b\\\\f\"\n            \"\\\\n\\\\r\\\\t\\\\v\\\\xD3\\\\x576\\\\x8D3\\\\xC74D a\\\"\",\n            Print(static_cast<const wchar_t*>(s)));\n}\n#endif  // native wchar_t\n\n// Tests printing pointers to other char types.\n\n// signed char*.\nTEST(PrintCharPointerTest, SignedChar) {\n  signed char* p = reinterpret_cast<signed char*>(0x1234);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// const signed char*.\nTEST(PrintCharPointerTest, ConstSignedChar) {\n  signed char* p = reinterpret_cast<signed char*>(0x1234);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// unsigned char*.\nTEST(PrintCharPointerTest, UnsignedChar) {\n  unsigned char* p = reinterpret_cast<unsigned char*>(0x1234);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// const unsigned char*.\nTEST(PrintCharPointerTest, ConstUnsignedChar) {\n  const unsigned char* p = reinterpret_cast<const unsigned char*>(0x1234);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// Tests printing pointers to simple, built-in types.\n\n// bool*.\nTEST(PrintPointerToBuiltInTypeTest, Bool) {\n  bool* p = reinterpret_cast<bool*>(0xABCD);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// void*.\nTEST(PrintPointerToBuiltInTypeTest, Void) {\n  void* p = reinterpret_cast<void*>(0xABCD);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// const void*.\nTEST(PrintPointerToBuiltInTypeTest, ConstVoid) {\n  const void* p = reinterpret_cast<const void*>(0xABCD);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// Tests printing pointers to pointers.\nTEST(PrintPointerToPointerTest, IntPointerPointer) {\n  int** p = reinterpret_cast<int**>(0xABCD);\n  EXPECT_EQ(PrintPointer(p), Print(p));\n  p = nullptr;\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// Tests printing (non-member) function pointers.\n\nvoid MyFunction(int /* n */) {}\n\nTEST(PrintPointerTest, NonMemberFunctionPointer) {\n  // We cannot directly cast &MyFunction to const void* because the\n  // standard disallows casting between pointers to functions and\n  // pointers to objects, and some compilers (e.g. GCC 3.4) enforce\n  // this limitation.\n  EXPECT_EQ(\n      PrintPointer(reinterpret_cast<const void*>(\n          reinterpret_cast<internal::BiggestInt>(&MyFunction))),\n      Print(&MyFunction));\n  int (*p)(bool) = NULL;  // NOLINT\n  EXPECT_EQ(\"NULL\", Print(p));\n}\n\n// An assertion predicate determining whether a one string is a prefix for\n// another.\ntemplate <typename StringType>\nAssertionResult HasPrefix(const StringType& str, const StringType& prefix) {\n  if (str.find(prefix, 0) == 0)\n    return AssertionSuccess();\n\n  const bool is_wide_string = sizeof(prefix[0]) > 1;\n  const char* const begin_string_quote = is_wide_string ? \"L\\\"\" : \"\\\"\";\n  return AssertionFailure()\n      << begin_string_quote << prefix << \"\\\" is not a prefix of \"\n      << begin_string_quote << str << \"\\\"\\n\";\n}\n\n// Tests printing member variable pointers.  Although they are called\n// pointers, they don't point to a location in the address space.\n// Their representation is implementation-defined.  Thus they will be\n// printed as raw bytes.\n\nstruct Foo {\n public:\n  virtual ~Foo() {}\n  int MyMethod(char x) { return x + 1; }\n  virtual char MyVirtualMethod(int /* n */) { return 'a'; }\n\n  int value;\n};\n\nTEST(PrintPointerTest, MemberVariablePointer) {\n  EXPECT_TRUE(HasPrefix(Print(&Foo::value),\n                        Print(sizeof(&Foo::value)) + \"-byte object \"));\n  int Foo::*p = NULL;  // NOLINT\n  EXPECT_TRUE(HasPrefix(Print(p),\n                        Print(sizeof(p)) + \"-byte object \"));\n}\n\n// Tests printing member function pointers.  Although they are called\n// pointers, they don't point to a location in the address space.\n// Their representation is implementation-defined.  Thus they will be\n// printed as raw bytes.\nTEST(PrintPointerTest, MemberFunctionPointer) {\n  EXPECT_TRUE(HasPrefix(Print(&Foo::MyMethod),\n                        Print(sizeof(&Foo::MyMethod)) + \"-byte object \"));\n  EXPECT_TRUE(\n      HasPrefix(Print(&Foo::MyVirtualMethod),\n                Print(sizeof((&Foo::MyVirtualMethod))) + \"-byte object \"));\n  int (Foo::*p)(char) = NULL;  // NOLINT\n  EXPECT_TRUE(HasPrefix(Print(p),\n                        Print(sizeof(p)) + \"-byte object \"));\n}\n\n// Tests printing C arrays.\n\n// The difference between this and Print() is that it ensures that the\n// argument is a reference to an array.\ntemplate <typename T, size_t N>\nstd::string PrintArrayHelper(T (&a)[N]) {\n  return Print(a);\n}\n\n// One-dimensional array.\nTEST(PrintArrayTest, OneDimensionalArray) {\n  int a[5] = { 1, 2, 3, 4, 5 };\n  EXPECT_EQ(\"{ 1, 2, 3, 4, 5 }\", PrintArrayHelper(a));\n}\n\n// Two-dimensional array.\nTEST(PrintArrayTest, TwoDimensionalArray) {\n  int a[2][5] = {\n    { 1, 2, 3, 4, 5 },\n    { 6, 7, 8, 9, 0 }\n  };\n  EXPECT_EQ(\"{ { 1, 2, 3, 4, 5 }, { 6, 7, 8, 9, 0 } }\", PrintArrayHelper(a));\n}\n\n// Array of const elements.\nTEST(PrintArrayTest, ConstArray) {\n  const bool a[1] = { false };\n  EXPECT_EQ(\"{ false }\", PrintArrayHelper(a));\n}\n\n// char array without terminating NUL.\nTEST(PrintArrayTest, CharArrayWithNoTerminatingNul) {\n  // Array a contains '\\0' in the middle and doesn't end with '\\0'.\n  char a[] = { 'H', '\\0', 'i' };\n  EXPECT_EQ(\"\\\"H\\\\0i\\\" (no terminating NUL)\", PrintArrayHelper(a));\n}\n\n// const char array with terminating NUL.\nTEST(PrintArrayTest, ConstCharArrayWithTerminatingNul) {\n  const char a[] = \"\\0Hi\";\n  EXPECT_EQ(\"\\\"\\\\0Hi\\\"\", PrintArrayHelper(a));\n}\n\n// const wchar_t array without terminating NUL.\nTEST(PrintArrayTest, WCharArrayWithNoTerminatingNul) {\n  // Array a contains '\\0' in the middle and doesn't end with '\\0'.\n  const wchar_t a[] = { L'H', L'\\0', L'i' };\n  EXPECT_EQ(\"L\\\"H\\\\0i\\\" (no terminating NUL)\", PrintArrayHelper(a));\n}\n\n// wchar_t array with terminating NUL.\nTEST(PrintArrayTest, WConstCharArrayWithTerminatingNul) {\n  const wchar_t a[] = L\"\\0Hi\";\n  EXPECT_EQ(\"L\\\"\\\\0Hi\\\"\", PrintArrayHelper(a));\n}\n\n// Array of objects.\nTEST(PrintArrayTest, ObjectArray) {\n  std::string a[3] = {\"Hi\", \"Hello\", \"Ni hao\"};\n  EXPECT_EQ(\"{ \\\"Hi\\\", \\\"Hello\\\", \\\"Ni hao\\\" }\", PrintArrayHelper(a));\n}\n\n// Array with many elements.\nTEST(PrintArrayTest, BigArray) {\n  int a[100] = { 1, 2, 3 };\n  EXPECT_EQ(\"{ 1, 2, 3, 0, 0, 0, 0, 0, ..., 0, 0, 0, 0, 0, 0, 0, 0 }\",\n            PrintArrayHelper(a));\n}\n\n// Tests printing ::string and ::std::string.\n\n// ::std::string.\nTEST(PrintStringTest, StringInStdNamespace) {\n  const char s[] = \"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v\\x7F\\xFF a\";\n  const ::std::string str(s, sizeof(s));\n  EXPECT_EQ(\"\\\"'\\\\\\\"?\\\\\\\\\\\\a\\\\b\\\\f\\\\n\\\\0\\\\r\\\\t\\\\v\\\\x7F\\\\xFF a\\\\0\\\"\",\n            Print(str));\n}\n\nTEST(PrintStringTest, StringAmbiguousHex) {\n  // \"\\x6BANANA\" is ambiguous, it can be interpreted as starting with either of:\n  // '\\x6', '\\x6B', or '\\x6BA'.\n\n  // a hex escaping sequence following by a decimal digit\n  EXPECT_EQ(\"\\\"0\\\\x12\\\" \\\"3\\\"\", Print(::std::string(\"0\\x12\" \"3\")));\n  // a hex escaping sequence following by a hex digit (lower-case)\n  EXPECT_EQ(\"\\\"mm\\\\x6\\\" \\\"bananas\\\"\", Print(::std::string(\"mm\\x6\" \"bananas\")));\n  // a hex escaping sequence following by a hex digit (upper-case)\n  EXPECT_EQ(\"\\\"NOM\\\\x6\\\" \\\"BANANA\\\"\", Print(::std::string(\"NOM\\x6\" \"BANANA\")));\n  // a hex escaping sequence following by a non-xdigit\n  EXPECT_EQ(\"\\\"!\\\\x5-!\\\"\", Print(::std::string(\"!\\x5-!\")));\n}\n\n// Tests printing ::std::wstring.\n#if GTEST_HAS_STD_WSTRING\n// ::std::wstring.\nTEST(PrintWideStringTest, StringInStdNamespace) {\n  const wchar_t s[] = L\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v\\xD3\\x576\\x8D3\\xC74D a\";\n  const ::std::wstring str(s, sizeof(s)/sizeof(wchar_t));\n  EXPECT_EQ(\"L\\\"'\\\\\\\"?\\\\\\\\\\\\a\\\\b\\\\f\\\\n\\\\0\\\\r\\\\t\\\\v\"\n            \"\\\\xD3\\\\x576\\\\x8D3\\\\xC74D a\\\\0\\\"\",\n            Print(str));\n}\n\nTEST(PrintWideStringTest, StringAmbiguousHex) {\n  // same for wide strings.\n  EXPECT_EQ(\"L\\\"0\\\\x12\\\" L\\\"3\\\"\", Print(::std::wstring(L\"0\\x12\" L\"3\")));\n  EXPECT_EQ(\"L\\\"mm\\\\x6\\\" L\\\"bananas\\\"\",\n            Print(::std::wstring(L\"mm\\x6\" L\"bananas\")));\n  EXPECT_EQ(\"L\\\"NOM\\\\x6\\\" L\\\"BANANA\\\"\",\n            Print(::std::wstring(L\"NOM\\x6\" L\"BANANA\")));\n  EXPECT_EQ(\"L\\\"!\\\\x5-!\\\"\", Print(::std::wstring(L\"!\\x5-!\")));\n}\n#endif  // GTEST_HAS_STD_WSTRING\n\n// Tests printing types that support generic streaming (i.e. streaming\n// to std::basic_ostream<Char, CharTraits> for any valid Char and\n// CharTraits types).\n\n// Tests printing a non-template type that supports generic streaming.\n\nclass AllowsGenericStreaming {};\n\ntemplate <typename Char, typename CharTraits>\nstd::basic_ostream<Char, CharTraits>& operator<<(\n    std::basic_ostream<Char, CharTraits>& os,\n    const AllowsGenericStreaming& /* a */) {\n  return os << \"AllowsGenericStreaming\";\n}\n\nTEST(PrintTypeWithGenericStreamingTest, NonTemplateType) {\n  AllowsGenericStreaming a;\n  EXPECT_EQ(\"AllowsGenericStreaming\", Print(a));\n}\n\n// Tests printing a template type that supports generic streaming.\n\ntemplate <typename T>\nclass AllowsGenericStreamingTemplate {};\n\ntemplate <typename Char, typename CharTraits, typename T>\nstd::basic_ostream<Char, CharTraits>& operator<<(\n    std::basic_ostream<Char, CharTraits>& os,\n    const AllowsGenericStreamingTemplate<T>& /* a */) {\n  return os << \"AllowsGenericStreamingTemplate\";\n}\n\nTEST(PrintTypeWithGenericStreamingTest, TemplateType) {\n  AllowsGenericStreamingTemplate<int> a;\n  EXPECT_EQ(\"AllowsGenericStreamingTemplate\", Print(a));\n}\n\n// Tests printing a type that supports generic streaming and can be\n// implicitly converted to another printable type.\n\ntemplate <typename T>\nclass AllowsGenericStreamingAndImplicitConversionTemplate {\n public:\n  operator bool() const { return false; }\n};\n\ntemplate <typename Char, typename CharTraits, typename T>\nstd::basic_ostream<Char, CharTraits>& operator<<(\n    std::basic_ostream<Char, CharTraits>& os,\n    const AllowsGenericStreamingAndImplicitConversionTemplate<T>& /* a */) {\n  return os << \"AllowsGenericStreamingAndImplicitConversionTemplate\";\n}\n\nTEST(PrintTypeWithGenericStreamingTest, TypeImplicitlyConvertible) {\n  AllowsGenericStreamingAndImplicitConversionTemplate<int> a;\n  EXPECT_EQ(\"AllowsGenericStreamingAndImplicitConversionTemplate\", Print(a));\n}\n\n#if GTEST_HAS_ABSL\n\n// Tests printing ::absl::string_view.\n\nTEST(PrintStringViewTest, SimpleStringView) {\n  const ::absl::string_view sp = \"Hello\";\n  EXPECT_EQ(\"\\\"Hello\\\"\", Print(sp));\n}\n\nTEST(PrintStringViewTest, UnprintableCharacters) {\n  const char str[] = \"NUL (\\0) and \\r\\t\";\n  const ::absl::string_view sp(str, sizeof(str) - 1);\n  EXPECT_EQ(\"\\\"NUL (\\\\0) and \\\\r\\\\t\\\"\", Print(sp));\n}\n\n#endif  // GTEST_HAS_ABSL\n\n// Tests printing STL containers.\n\nTEST(PrintStlContainerTest, EmptyDeque) {\n  deque<char> empty;\n  EXPECT_EQ(\"{}\", Print(empty));\n}\n\nTEST(PrintStlContainerTest, NonEmptyDeque) {\n  deque<int> non_empty;\n  non_empty.push_back(1);\n  non_empty.push_back(3);\n  EXPECT_EQ(\"{ 1, 3 }\", Print(non_empty));\n}\n\n\nTEST(PrintStlContainerTest, OneElementHashMap) {\n  ::std::unordered_map<int, char> map1;\n  map1[1] = 'a';\n  EXPECT_EQ(\"{ (1, 'a' (97, 0x61)) }\", Print(map1));\n}\n\nTEST(PrintStlContainerTest, HashMultiMap) {\n  ::std::unordered_multimap<int, bool> map1;\n  map1.insert(make_pair(5, true));\n  map1.insert(make_pair(5, false));\n\n  // Elements of hash_multimap can be printed in any order.\n  const std::string result = Print(map1);\n  EXPECT_TRUE(result == \"{ (5, true), (5, false) }\" ||\n              result == \"{ (5, false), (5, true) }\")\n                  << \" where Print(map1) returns \\\"\" << result << \"\\\".\";\n}\n\n\n\nTEST(PrintStlContainerTest, HashSet) {\n  ::std::unordered_set<int> set1;\n  set1.insert(1);\n  EXPECT_EQ(\"{ 1 }\", Print(set1));\n}\n\nTEST(PrintStlContainerTest, HashMultiSet) {\n  const int kSize = 5;\n  int a[kSize] = { 1, 1, 2, 5, 1 };\n  ::std::unordered_multiset<int> set1(a, a + kSize);\n\n  // Elements of hash_multiset can be printed in any order.\n  const std::string result = Print(set1);\n  const std::string expected_pattern = \"{ d, d, d, d, d }\";  // d means a digit.\n\n  // Verifies the result matches the expected pattern; also extracts\n  // the numbers in the result.\n  ASSERT_EQ(expected_pattern.length(), result.length());\n  std::vector<int> numbers;\n  for (size_t i = 0; i != result.length(); i++) {\n    if (expected_pattern[i] == 'd') {\n      ASSERT_NE(isdigit(static_cast<unsigned char>(result[i])), 0);\n      numbers.push_back(result[i] - '0');\n    } else {\n      EXPECT_EQ(expected_pattern[i], result[i]) << \" where result is \"\n                                                << result;\n    }\n  }\n\n  // Makes sure the result contains the right numbers.\n  std::sort(numbers.begin(), numbers.end());\n  std::sort(a, a + kSize);\n  EXPECT_TRUE(std::equal(a, a + kSize, numbers.begin()));\n}\n\n\nTEST(PrintStlContainerTest, List) {\n  const std::string a[] = {\"hello\", \"world\"};\n  const list<std::string> strings(a, a + 2);\n  EXPECT_EQ(\"{ \\\"hello\\\", \\\"world\\\" }\", Print(strings));\n}\n\nTEST(PrintStlContainerTest, Map) {\n  map<int, bool> map1;\n  map1[1] = true;\n  map1[5] = false;\n  map1[3] = true;\n  EXPECT_EQ(\"{ (1, true), (3, true), (5, false) }\", Print(map1));\n}\n\nTEST(PrintStlContainerTest, MultiMap) {\n  multimap<bool, int> map1;\n  // The make_pair template function would deduce the type as\n  // pair<bool, int> here, and since the key part in a multimap has to\n  // be constant, without a templated ctor in the pair class (as in\n  // libCstd on Solaris), make_pair call would fail to compile as no\n  // implicit conversion is found.  Thus explicit typename is used\n  // here instead.\n  map1.insert(pair<const bool, int>(true, 0));\n  map1.insert(pair<const bool, int>(true, 1));\n  map1.insert(pair<const bool, int>(false, 2));\n  EXPECT_EQ(\"{ (false, 2), (true, 0), (true, 1) }\", Print(map1));\n}\n\nTEST(PrintStlContainerTest, Set) {\n  const unsigned int a[] = { 3, 0, 5 };\n  set<unsigned int> set1(a, a + 3);\n  EXPECT_EQ(\"{ 0, 3, 5 }\", Print(set1));\n}\n\nTEST(PrintStlContainerTest, MultiSet) {\n  const int a[] = { 1, 1, 2, 5, 1 };\n  multiset<int> set1(a, a + 5);\n  EXPECT_EQ(\"{ 1, 1, 1, 2, 5 }\", Print(set1));\n}\n\n\nTEST(PrintStlContainerTest, SinglyLinkedList) {\n  int a[] = { 9, 2, 8 };\n  const std::forward_list<int> ints(a, a + 3);\n  EXPECT_EQ(\"{ 9, 2, 8 }\", Print(ints));\n}\n\nTEST(PrintStlContainerTest, Pair) {\n  pair<const bool, int> p(true, 5);\n  EXPECT_EQ(\"(true, 5)\", Print(p));\n}\n\nTEST(PrintStlContainerTest, Vector) {\n  vector<int> v;\n  v.push_back(1);\n  v.push_back(2);\n  EXPECT_EQ(\"{ 1, 2 }\", Print(v));\n}\n\nTEST(PrintStlContainerTest, LongSequence) {\n  const int a[100] = { 1, 2, 3 };\n  const vector<int> v(a, a + 100);\n  EXPECT_EQ(\"{ 1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, \"\n            \"0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... }\", Print(v));\n}\n\nTEST(PrintStlContainerTest, NestedContainer) {\n  const int a1[] = { 1, 2 };\n  const int a2[] = { 3, 4, 5 };\n  const list<int> l1(a1, a1 + 2);\n  const list<int> l2(a2, a2 + 3);\n\n  vector<list<int> > v;\n  v.push_back(l1);\n  v.push_back(l2);\n  EXPECT_EQ(\"{ { 1, 2 }, { 3, 4, 5 } }\", Print(v));\n}\n\nTEST(PrintStlContainerTest, OneDimensionalNativeArray) {\n  const int a[3] = { 1, 2, 3 };\n  NativeArray<int> b(a, 3, RelationToSourceReference());\n  EXPECT_EQ(\"{ 1, 2, 3 }\", Print(b));\n}\n\nTEST(PrintStlContainerTest, TwoDimensionalNativeArray) {\n  const int a[2][3] = { { 1, 2, 3 }, { 4, 5, 6 } };\n  NativeArray<int[3]> b(a, 2, RelationToSourceReference());\n  EXPECT_EQ(\"{ { 1, 2, 3 }, { 4, 5, 6 } }\", Print(b));\n}\n\n// Tests that a class named iterator isn't treated as a container.\n\nstruct iterator {\n  char x;\n};\n\nTEST(PrintStlContainerTest, Iterator) {\n  iterator it = {};\n  EXPECT_EQ(\"1-byte object <00>\", Print(it));\n}\n\n// Tests that a class named const_iterator isn't treated as a container.\n\nstruct const_iterator {\n  char x;\n};\n\nTEST(PrintStlContainerTest, ConstIterator) {\n  const_iterator it = {};\n  EXPECT_EQ(\"1-byte object <00>\", Print(it));\n}\n\n// Tests printing ::std::tuples.\n\n// Tuples of various arities.\nTEST(PrintStdTupleTest, VariousSizes) {\n  ::std::tuple<> t0;\n  EXPECT_EQ(\"()\", Print(t0));\n\n  ::std::tuple<int> t1(5);\n  EXPECT_EQ(\"(5)\", Print(t1));\n\n  ::std::tuple<char, bool> t2('a', true);\n  EXPECT_EQ(\"('a' (97, 0x61), true)\", Print(t2));\n\n  ::std::tuple<bool, int, int> t3(false, 2, 3);\n  EXPECT_EQ(\"(false, 2, 3)\", Print(t3));\n\n  ::std::tuple<bool, int, int, int> t4(false, 2, 3, 4);\n  EXPECT_EQ(\"(false, 2, 3, 4)\", Print(t4));\n\n  const char* const str = \"8\";\n  ::std::tuple<bool, char, short, testing::internal::Int32,  // NOLINT\n               testing::internal::Int64, float, double, const char*, void*,\n               std::string>\n      t10(false, 'a', static_cast<short>(3), 4, 5, 1.5F, -2.5, str,  // NOLINT\n          nullptr, \"10\");\n  EXPECT_EQ(\"(false, 'a' (97, 0x61), 3, 4, 5, 1.5, -2.5, \" + PrintPointer(str) +\n            \" pointing to \\\"8\\\", NULL, \\\"10\\\")\",\n            Print(t10));\n}\n\n// Nested tuples.\nTEST(PrintStdTupleTest, NestedTuple) {\n  ::std::tuple< ::std::tuple<int, bool>, char> nested(\n      ::std::make_tuple(5, true), 'a');\n  EXPECT_EQ(\"((5, true), 'a' (97, 0x61))\", Print(nested));\n}\n\nTEST(PrintNullptrT, Basic) {\n  EXPECT_EQ(\"(nullptr)\", Print(nullptr));\n}\n\nTEST(PrintReferenceWrapper, Printable) {\n  int x = 5;\n  EXPECT_EQ(\"@\" + PrintPointer(&x) + \" 5\", Print(std::ref(x)));\n  EXPECT_EQ(\"@\" + PrintPointer(&x) + \" 5\", Print(std::cref(x)));\n}\n\nTEST(PrintReferenceWrapper, Unprintable) {\n  ::foo::UnprintableInFoo up;\n  EXPECT_EQ(\n      \"@\" + PrintPointer(&up) +\n          \" 16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>\",\n      Print(std::ref(up)));\n  EXPECT_EQ(\n      \"@\" + PrintPointer(&up) +\n          \" 16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>\",\n      Print(std::cref(up)));\n}\n\n// Tests printing user-defined unprintable types.\n\n// Unprintable types in the global namespace.\nTEST(PrintUnprintableTypeTest, InGlobalNamespace) {\n  EXPECT_EQ(\"1-byte object <00>\",\n            Print(UnprintableTemplateInGlobal<char>()));\n}\n\n// Unprintable types in a user namespace.\nTEST(PrintUnprintableTypeTest, InUserNamespace) {\n  EXPECT_EQ(\"16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>\",\n            Print(::foo::UnprintableInFoo()));\n}\n\n// Unprintable types are that too big to be printed completely.\n\nstruct Big {\n  Big() { memset(array, 0, sizeof(array)); }\n  char array[257];\n};\n\nTEST(PrintUnpritableTypeTest, BigObject) {\n  EXPECT_EQ(\"257-byte object <00-00 00-00 00-00 00-00 00-00 00-00 \"\n            \"00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 \"\n            \"00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 \"\n            \"00-00 00-00 00-00 00-00 00-00 00-00 ... 00-00 00-00 00-00 \"\n            \"00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 \"\n            \"00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 \"\n            \"00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00>\",\n            Print(Big()));\n}\n\n// Tests printing user-defined streamable types.\n\n// Streamable types in the global namespace.\nTEST(PrintStreamableTypeTest, InGlobalNamespace) {\n  StreamableInGlobal x;\n  EXPECT_EQ(\"StreamableInGlobal\", Print(x));\n  EXPECT_EQ(\"StreamableInGlobal*\", Print(&x));\n}\n\n// Printable template types in a user namespace.\nTEST(PrintStreamableTypeTest, TemplateTypeInUserNamespace) {\n  EXPECT_EQ(\"StreamableTemplateInFoo: 0\",\n            Print(::foo::StreamableTemplateInFoo<int>()));\n}\n\n// Tests printing a user-defined recursive container type that has a <<\n// operator.\nTEST(PrintStreamableTypeTest, PathLikeInUserNamespace) {\n  ::foo::PathLike x;\n  EXPECT_EQ(\"Streamable-PathLike\", Print(x));\n  const ::foo::PathLike cx;\n  EXPECT_EQ(\"Streamable-PathLike\", Print(cx));\n}\n\n// Tests printing user-defined types that have a PrintTo() function.\nTEST(PrintPrintableTypeTest, InUserNamespace) {\n  EXPECT_EQ(\"PrintableViaPrintTo: 0\",\n            Print(::foo::PrintableViaPrintTo()));\n}\n\n// Tests printing a pointer to a user-defined type that has a <<\n// operator for its pointer.\nTEST(PrintPrintableTypeTest, PointerInUserNamespace) {\n  ::foo::PointerPrintable x;\n  EXPECT_EQ(\"PointerPrintable*\", Print(&x));\n}\n\n// Tests printing user-defined class template that have a PrintTo() function.\nTEST(PrintPrintableTypeTest, TemplateInUserNamespace) {\n  EXPECT_EQ(\"PrintableViaPrintToTemplate: 5\",\n            Print(::foo::PrintableViaPrintToTemplate<int>(5)));\n}\n\n// Tests that the universal printer prints both the address and the\n// value of a reference.\nTEST(PrintReferenceTest, PrintsAddressAndValue) {\n  int n = 5;\n  EXPECT_EQ(\"@\" + PrintPointer(&n) + \" 5\", PrintByRef(n));\n\n  int a[2][3] = {\n    { 0, 1, 2 },\n    { 3, 4, 5 }\n  };\n  EXPECT_EQ(\"@\" + PrintPointer(a) + \" { { 0, 1, 2 }, { 3, 4, 5 } }\",\n            PrintByRef(a));\n\n  const ::foo::UnprintableInFoo x;\n  EXPECT_EQ(\"@\" + PrintPointer(&x) + \" 16-byte object \"\n            \"<EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>\",\n            PrintByRef(x));\n}\n\n// Tests that the universal printer prints a function pointer passed by\n// reference.\nTEST(PrintReferenceTest, HandlesFunctionPointer) {\n  void (*fp)(int n) = &MyFunction;\n  const std::string fp_pointer_string =\n      PrintPointer(reinterpret_cast<const void*>(&fp));\n  // We cannot directly cast &MyFunction to const void* because the\n  // standard disallows casting between pointers to functions and\n  // pointers to objects, and some compilers (e.g. GCC 3.4) enforce\n  // this limitation.\n  const std::string fp_string = PrintPointer(reinterpret_cast<const void*>(\n      reinterpret_cast<internal::BiggestInt>(fp)));\n  EXPECT_EQ(\"@\" + fp_pointer_string + \" \" + fp_string,\n            PrintByRef(fp));\n}\n\n// Tests that the universal printer prints a member function pointer\n// passed by reference.\nTEST(PrintReferenceTest, HandlesMemberFunctionPointer) {\n  int (Foo::*p)(char ch) = &Foo::MyMethod;\n  EXPECT_TRUE(HasPrefix(\n      PrintByRef(p),\n      \"@\" + PrintPointer(reinterpret_cast<const void*>(&p)) + \" \" +\n          Print(sizeof(p)) + \"-byte object \"));\n\n  char (Foo::*p2)(int n) = &Foo::MyVirtualMethod;\n  EXPECT_TRUE(HasPrefix(\n      PrintByRef(p2),\n      \"@\" + PrintPointer(reinterpret_cast<const void*>(&p2)) + \" \" +\n          Print(sizeof(p2)) + \"-byte object \"));\n}\n\n// Tests that the universal printer prints a member variable pointer\n// passed by reference.\nTEST(PrintReferenceTest, HandlesMemberVariablePointer) {\n  int Foo::*p = &Foo::value;  // NOLINT\n  EXPECT_TRUE(HasPrefix(\n      PrintByRef(p),\n      \"@\" + PrintPointer(&p) + \" \" + Print(sizeof(p)) + \"-byte object \"));\n}\n\n// Tests that FormatForComparisonFailureMessage(), which is used to print\n// an operand in a comparison assertion (e.g. ASSERT_EQ) when the assertion\n// fails, formats the operand in the desired way.\n\n// scalar\nTEST(FormatForComparisonFailureMessageTest, WorksForScalar) {\n  EXPECT_STREQ(\"123\",\n               FormatForComparisonFailureMessage(123, 124).c_str());\n}\n\n// non-char pointer\nTEST(FormatForComparisonFailureMessageTest, WorksForNonCharPointer) {\n  int n = 0;\n  EXPECT_EQ(PrintPointer(&n),\n            FormatForComparisonFailureMessage(&n, &n).c_str());\n}\n\n// non-char array\nTEST(FormatForComparisonFailureMessageTest, FormatsNonCharArrayAsPointer) {\n  // In expression 'array == x', 'array' is compared by pointer.\n  // Therefore we want to print an array operand as a pointer.\n  int n[] = { 1, 2, 3 };\n  EXPECT_EQ(PrintPointer(n),\n            FormatForComparisonFailureMessage(n, n).c_str());\n}\n\n// Tests formatting a char pointer when it's compared with another pointer.\n// In this case we want to print it as a raw pointer, as the comparison is by\n// pointer.\n\n// char pointer vs pointer\nTEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsPointer) {\n  // In expression 'p == x', where 'p' and 'x' are (const or not) char\n  // pointers, the operands are compared by pointer.  Therefore we\n  // want to print 'p' as a pointer instead of a C string (we don't\n  // even know if it's supposed to point to a valid C string).\n\n  // const char*\n  const char* s = \"hello\";\n  EXPECT_EQ(PrintPointer(s),\n            FormatForComparisonFailureMessage(s, s).c_str());\n\n  // char*\n  char ch = 'a';\n  EXPECT_EQ(PrintPointer(&ch),\n            FormatForComparisonFailureMessage(&ch, &ch).c_str());\n}\n\n// wchar_t pointer vs pointer\nTEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsPointer) {\n  // In expression 'p == x', where 'p' and 'x' are (const or not) char\n  // pointers, the operands are compared by pointer.  Therefore we\n  // want to print 'p' as a pointer instead of a wide C string (we don't\n  // even know if it's supposed to point to a valid wide C string).\n\n  // const wchar_t*\n  const wchar_t* s = L\"hello\";\n  EXPECT_EQ(PrintPointer(s),\n            FormatForComparisonFailureMessage(s, s).c_str());\n\n  // wchar_t*\n  wchar_t ch = L'a';\n  EXPECT_EQ(PrintPointer(&ch),\n            FormatForComparisonFailureMessage(&ch, &ch).c_str());\n}\n\n// Tests formatting a char pointer when it's compared to a string object.\n// In this case we want to print the char pointer as a C string.\n\n// char pointer vs std::string\nTEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsStdString) {\n  const char* s = \"hello \\\"world\";\n  EXPECT_STREQ(\"\\\"hello \\\\\\\"world\\\"\",  // The string content should be escaped.\n               FormatForComparisonFailureMessage(s, ::std::string()).c_str());\n\n  // char*\n  char str[] = \"hi\\1\";\n  char* p = str;\n  EXPECT_STREQ(\"\\\"hi\\\\x1\\\"\",  // The string content should be escaped.\n               FormatForComparisonFailureMessage(p, ::std::string()).c_str());\n}\n\n#if GTEST_HAS_STD_WSTRING\n// wchar_t pointer vs std::wstring\nTEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsStdWString) {\n  const wchar_t* s = L\"hi \\\"world\";\n  EXPECT_STREQ(\"L\\\"hi \\\\\\\"world\\\"\",  // The string content should be escaped.\n               FormatForComparisonFailureMessage(s, ::std::wstring()).c_str());\n\n  // wchar_t*\n  wchar_t str[] = L\"hi\\1\";\n  wchar_t* p = str;\n  EXPECT_STREQ(\"L\\\"hi\\\\x1\\\"\",  // The string content should be escaped.\n               FormatForComparisonFailureMessage(p, ::std::wstring()).c_str());\n}\n#endif\n\n// Tests formatting a char array when it's compared with a pointer or array.\n// In this case we want to print the array as a row pointer, as the comparison\n// is by pointer.\n\n// char array vs pointer\nTEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsPointer) {\n  char str[] = \"hi \\\"world\\\"\";\n  char* p = nullptr;\n  EXPECT_EQ(PrintPointer(str),\n            FormatForComparisonFailureMessage(str, p).c_str());\n}\n\n// char array vs char array\nTEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsCharArray) {\n  const char str[] = \"hi \\\"world\\\"\";\n  EXPECT_EQ(PrintPointer(str),\n            FormatForComparisonFailureMessage(str, str).c_str());\n}\n\n// wchar_t array vs pointer\nTEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsPointer) {\n  wchar_t str[] = L\"hi \\\"world\\\"\";\n  wchar_t* p = nullptr;\n  EXPECT_EQ(PrintPointer(str),\n            FormatForComparisonFailureMessage(str, p).c_str());\n}\n\n// wchar_t array vs wchar_t array\nTEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsWCharArray) {\n  const wchar_t str[] = L\"hi \\\"world\\\"\";\n  EXPECT_EQ(PrintPointer(str),\n            FormatForComparisonFailureMessage(str, str).c_str());\n}\n\n// Tests formatting a char array when it's compared with a string object.\n// In this case we want to print the array as a C string.\n\n// char array vs std::string\nTEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsStdString) {\n  const char str[] = \"hi \\\"world\\\"\";\n  EXPECT_STREQ(\"\\\"hi \\\\\\\"world\\\\\\\"\\\"\",  // The content should be escaped.\n               FormatForComparisonFailureMessage(str, ::std::string()).c_str());\n}\n\n#if GTEST_HAS_STD_WSTRING\n// wchar_t array vs std::wstring\nTEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsStdWString) {\n  const wchar_t str[] = L\"hi \\\"w\\0rld\\\"\";\n  EXPECT_STREQ(\n      \"L\\\"hi \\\\\\\"w\\\"\",  // The content should be escaped.\n                        // Embedded NUL terminates the string.\n      FormatForComparisonFailureMessage(str, ::std::wstring()).c_str());\n}\n#endif\n\n// Useful for testing PrintToString().  We cannot use EXPECT_EQ()\n// there as its implementation uses PrintToString().  The caller must\n// ensure that 'value' has no side effect.\n#define EXPECT_PRINT_TO_STRING_(value, expected_string)         \\\n  EXPECT_TRUE(PrintToString(value) == (expected_string))        \\\n      << \" where \" #value \" prints as \" << (PrintToString(value))\n\nTEST(PrintToStringTest, WorksForScalar) {\n  EXPECT_PRINT_TO_STRING_(123, \"123\");\n}\n\nTEST(PrintToStringTest, WorksForPointerToConstChar) {\n  const char* p = \"hello\";\n  EXPECT_PRINT_TO_STRING_(p, \"\\\"hello\\\"\");\n}\n\nTEST(PrintToStringTest, WorksForPointerToNonConstChar) {\n  char s[] = \"hello\";\n  char* p = s;\n  EXPECT_PRINT_TO_STRING_(p, \"\\\"hello\\\"\");\n}\n\nTEST(PrintToStringTest, EscapesForPointerToConstChar) {\n  const char* p = \"hello\\n\";\n  EXPECT_PRINT_TO_STRING_(p, \"\\\"hello\\\\n\\\"\");\n}\n\nTEST(PrintToStringTest, EscapesForPointerToNonConstChar) {\n  char s[] = \"hello\\1\";\n  char* p = s;\n  EXPECT_PRINT_TO_STRING_(p, \"\\\"hello\\\\x1\\\"\");\n}\n\nTEST(PrintToStringTest, WorksForArray) {\n  int n[3] = { 1, 2, 3 };\n  EXPECT_PRINT_TO_STRING_(n, \"{ 1, 2, 3 }\");\n}\n\nTEST(PrintToStringTest, WorksForCharArray) {\n  char s[] = \"hello\";\n  EXPECT_PRINT_TO_STRING_(s, \"\\\"hello\\\"\");\n}\n\nTEST(PrintToStringTest, WorksForCharArrayWithEmbeddedNul) {\n  const char str_with_nul[] = \"hello\\0 world\";\n  EXPECT_PRINT_TO_STRING_(str_with_nul, \"\\\"hello\\\\0 world\\\"\");\n\n  char mutable_str_with_nul[] = \"hello\\0 world\";\n  EXPECT_PRINT_TO_STRING_(mutable_str_with_nul, \"\\\"hello\\\\0 world\\\"\");\n}\n\n  TEST(PrintToStringTest, ContainsNonLatin) {\n  // Sanity test with valid UTF-8. Prints both in hex and as text.\n  std::string non_ascii_str = ::std::string(\"오전 4:30\");\n  EXPECT_PRINT_TO_STRING_(non_ascii_str,\n                          \"\\\"\\\\xEC\\\\x98\\\\xA4\\\\xEC\\\\xA0\\\\x84 4:30\\\"\\n\"\n                          \"    As Text: \\\"오전 4:30\\\"\");\n  non_ascii_str = ::std::string(\"From ä — ẑ\");\n  EXPECT_PRINT_TO_STRING_(non_ascii_str,\n                          \"\\\"From \\\\xC3\\\\xA4 \\\\xE2\\\\x80\\\\x94 \\\\xE1\\\\xBA\\\\x91\\\"\"\n                          \"\\n    As Text: \\\"From ä — ẑ\\\"\");\n}\n\nTEST(IsValidUTF8Test, IllFormedUTF8) {\n  // The following test strings are ill-formed UTF-8 and are printed\n  // as hex only (or ASCII, in case of ASCII bytes) because IsValidUTF8() is\n  // expected to fail, thus output does not contain \"As Text:\".\n\n  static const char *const kTestdata[][2] = {\n    // 2-byte lead byte followed by a single-byte character.\n    {\"\\xC3\\x74\", \"\\\"\\\\xC3t\\\"\"},\n    // Valid 2-byte character followed by an orphan trail byte.\n    {\"\\xC3\\x84\\xA4\", \"\\\"\\\\xC3\\\\x84\\\\xA4\\\"\"},\n    // Lead byte without trail byte.\n    {\"abc\\xC3\", \"\\\"abc\\\\xC3\\\"\"},\n    // 3-byte lead byte, single-byte character, orphan trail byte.\n    {\"x\\xE2\\x70\\x94\", \"\\\"x\\\\xE2p\\\\x94\\\"\"},\n    // Truncated 3-byte character.\n    {\"\\xE2\\x80\", \"\\\"\\\\xE2\\\\x80\\\"\"},\n    // Truncated 3-byte character followed by valid 2-byte char.\n    {\"\\xE2\\x80\\xC3\\x84\", \"\\\"\\\\xE2\\\\x80\\\\xC3\\\\x84\\\"\"},\n    // Truncated 3-byte character followed by a single-byte character.\n    {\"\\xE2\\x80\\x7A\", \"\\\"\\\\xE2\\\\x80z\\\"\"},\n    // 3-byte lead byte followed by valid 3-byte character.\n    {\"\\xE2\\xE2\\x80\\x94\", \"\\\"\\\\xE2\\\\xE2\\\\x80\\\\x94\\\"\"},\n    // 4-byte lead byte followed by valid 3-byte character.\n    {\"\\xF0\\xE2\\x80\\x94\", \"\\\"\\\\xF0\\\\xE2\\\\x80\\\\x94\\\"\"},\n    // Truncated 4-byte character.\n    {\"\\xF0\\xE2\\x80\", \"\\\"\\\\xF0\\\\xE2\\\\x80\\\"\"},\n     // Invalid UTF-8 byte sequences embedded in other chars.\n    {\"abc\\xE2\\x80\\x94\\xC3\\x74xyc\", \"\\\"abc\\\\xE2\\\\x80\\\\x94\\\\xC3txyc\\\"\"},\n    {\"abc\\xC3\\x84\\xE2\\x80\\xC3\\x84xyz\",\n     \"\\\"abc\\\\xC3\\\\x84\\\\xE2\\\\x80\\\\xC3\\\\x84xyz\\\"\"},\n    // Non-shortest UTF-8 byte sequences are also ill-formed.\n    // The classics: xC0, xC1 lead byte.\n    {\"\\xC0\\x80\", \"\\\"\\\\xC0\\\\x80\\\"\"},\n    {\"\\xC1\\x81\", \"\\\"\\\\xC1\\\\x81\\\"\"},\n    // Non-shortest sequences.\n    {\"\\xE0\\x80\\x80\", \"\\\"\\\\xE0\\\\x80\\\\x80\\\"\"},\n    {\"\\xf0\\x80\\x80\\x80\", \"\\\"\\\\xF0\\\\x80\\\\x80\\\\x80\\\"\"},\n    // Last valid code point before surrogate range, should be printed as text,\n    // too.\n    {\"\\xED\\x9F\\xBF\", \"\\\"\\\\xED\\\\x9F\\\\xBF\\\"\\n    As Text: \\\"퟿\\\"\"},\n    // Start of surrogate lead. Surrogates are not printed as text.\n    {\"\\xED\\xA0\\x80\", \"\\\"\\\\xED\\\\xA0\\\\x80\\\"\"},\n    // Last non-private surrogate lead.\n    {\"\\xED\\xAD\\xBF\", \"\\\"\\\\xED\\\\xAD\\\\xBF\\\"\"},\n    // First private-use surrogate lead.\n    {\"\\xED\\xAE\\x80\", \"\\\"\\\\xED\\\\xAE\\\\x80\\\"\"},\n    // Last private-use surrogate lead.\n    {\"\\xED\\xAF\\xBF\", \"\\\"\\\\xED\\\\xAF\\\\xBF\\\"\"},\n    // Mid-point of surrogate trail.\n    {\"\\xED\\xB3\\xBF\", \"\\\"\\\\xED\\\\xB3\\\\xBF\\\"\"},\n    // First valid code point after surrogate range, should be printed as text,\n    // too.\n    {\"\\xEE\\x80\\x80\", \"\\\"\\\\xEE\\\\x80\\\\x80\\\"\\n    As Text: \\\"\\\"\"}\n  };\n\n  for (int i = 0; i < int(sizeof(kTestdata)/sizeof(kTestdata[0])); ++i) {\n    EXPECT_PRINT_TO_STRING_(kTestdata[i][0], kTestdata[i][1]);\n  }\n}\n\n#undef EXPECT_PRINT_TO_STRING_\n\nTEST(UniversalTersePrintTest, WorksForNonReference) {\n  ::std::stringstream ss;\n  UniversalTersePrint(123, &ss);\n  EXPECT_EQ(\"123\", ss.str());\n}\n\nTEST(UniversalTersePrintTest, WorksForReference) {\n  const int& n = 123;\n  ::std::stringstream ss;\n  UniversalTersePrint(n, &ss);\n  EXPECT_EQ(\"123\", ss.str());\n}\n\nTEST(UniversalTersePrintTest, WorksForCString) {\n  const char* s1 = \"abc\";\n  ::std::stringstream ss1;\n  UniversalTersePrint(s1, &ss1);\n  EXPECT_EQ(\"\\\"abc\\\"\", ss1.str());\n\n  char* s2 = const_cast<char*>(s1);\n  ::std::stringstream ss2;\n  UniversalTersePrint(s2, &ss2);\n  EXPECT_EQ(\"\\\"abc\\\"\", ss2.str());\n\n  const char* s3 = nullptr;\n  ::std::stringstream ss3;\n  UniversalTersePrint(s3, &ss3);\n  EXPECT_EQ(\"NULL\", ss3.str());\n}\n\nTEST(UniversalPrintTest, WorksForNonReference) {\n  ::std::stringstream ss;\n  UniversalPrint(123, &ss);\n  EXPECT_EQ(\"123\", ss.str());\n}\n\nTEST(UniversalPrintTest, WorksForReference) {\n  const int& n = 123;\n  ::std::stringstream ss;\n  UniversalPrint(n, &ss);\n  EXPECT_EQ(\"123\", ss.str());\n}\n\nTEST(UniversalPrintTest, WorksForCString) {\n  const char* s1 = \"abc\";\n  ::std::stringstream ss1;\n  UniversalPrint(s1, &ss1);\n  EXPECT_EQ(PrintPointer(s1) + \" pointing to \\\"abc\\\"\", std::string(ss1.str()));\n\n  char* s2 = const_cast<char*>(s1);\n  ::std::stringstream ss2;\n  UniversalPrint(s2, &ss2);\n  EXPECT_EQ(PrintPointer(s2) + \" pointing to \\\"abc\\\"\", std::string(ss2.str()));\n\n  const char* s3 = nullptr;\n  ::std::stringstream ss3;\n  UniversalPrint(s3, &ss3);\n  EXPECT_EQ(\"NULL\", ss3.str());\n}\n\nTEST(UniversalPrintTest, WorksForCharArray) {\n  const char str[] = \"\\\"Line\\0 1\\\"\\nLine 2\";\n  ::std::stringstream ss1;\n  UniversalPrint(str, &ss1);\n  EXPECT_EQ(\"\\\"\\\\\\\"Line\\\\0 1\\\\\\\"\\\\nLine 2\\\"\", ss1.str());\n\n  const char mutable_str[] = \"\\\"Line\\0 1\\\"\\nLine 2\";\n  ::std::stringstream ss2;\n  UniversalPrint(mutable_str, &ss2);\n  EXPECT_EQ(\"\\\"\\\\\\\"Line\\\\0 1\\\\\\\"\\\\nLine 2\\\"\", ss2.str());\n}\n\nTEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsEmptyTuple) {\n  Strings result = UniversalTersePrintTupleFieldsToStrings(::std::make_tuple());\n  EXPECT_EQ(0u, result.size());\n}\n\nTEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsOneTuple) {\n  Strings result = UniversalTersePrintTupleFieldsToStrings(\n      ::std::make_tuple(1));\n  ASSERT_EQ(1u, result.size());\n  EXPECT_EQ(\"1\", result[0]);\n}\n\nTEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsTwoTuple) {\n  Strings result = UniversalTersePrintTupleFieldsToStrings(\n      ::std::make_tuple(1, 'a'));\n  ASSERT_EQ(2u, result.size());\n  EXPECT_EQ(\"1\", result[0]);\n  EXPECT_EQ(\"'a' (97, 0x61)\", result[1]);\n}\n\nTEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsTersely) {\n  const int n = 1;\n  Strings result = UniversalTersePrintTupleFieldsToStrings(\n      ::std::tuple<const int&, const char*>(n, \"a\"));\n  ASSERT_EQ(2u, result.size());\n  EXPECT_EQ(\"1\", result[0]);\n  EXPECT_EQ(\"\\\"a\\\"\", result[1]);\n}\n\n#if GTEST_HAS_ABSL\n\nTEST(PrintOptionalTest, Basic) {\n  absl::optional<int> value;\n  EXPECT_EQ(\"(nullopt)\", PrintToString(value));\n  value = {7};\n  EXPECT_EQ(\"(7)\", PrintToString(value));\n  EXPECT_EQ(\"(1.1)\", PrintToString(absl::optional<double>{1.1}));\n  EXPECT_EQ(\"(\\\"A\\\")\", PrintToString(absl::optional<std::string>{\"A\"}));\n}\n\nstruct NonPrintable {\n  unsigned char contents = 17;\n};\n\nTEST(PrintOneofTest, Basic) {\n  using Type = absl::variant<int, StreamableInGlobal, NonPrintable>;\n  EXPECT_EQ(\"('int' with value 7)\", PrintToString(Type(7)));\n  EXPECT_EQ(\"('StreamableInGlobal' with value StreamableInGlobal)\",\n            PrintToString(Type(StreamableInGlobal{})));\n  EXPECT_EQ(\n      \"('testing::gtest_printers_test::NonPrintable' with value 1-byte object \"\n      \"<11>)\",\n      PrintToString(Type(NonPrintable{})));\n}\n#endif  // GTEST_HAS_ABSL\nnamespace {\nclass string_ref;\n\n/**\n * This is a synthetic pointer to a fixed size string.\n */\nclass string_ptr {\n public:\n  string_ptr(const char* data, size_t size) : data_(data), size_(size) {}\n\n  string_ptr& operator++() noexcept {\n    data_ += size_;\n    return *this;\n  }\n\n  string_ref operator*() const noexcept;\n\n private:\n  const char* data_;\n  size_t size_;\n};\n\n/**\n * This is a synthetic reference of a fixed size string.\n */\nclass string_ref {\n public:\n  string_ref(const char* data, size_t size) : data_(data), size_(size) {}\n\n  string_ptr operator&() const noexcept { return {data_, size_}; }  // NOLINT\n\n  bool operator==(const char* s) const noexcept {\n    if (size_ > 0 && data_[size_ - 1] != 0) {\n      return std::string(data_, size_) == std::string(s);\n    } else {\n      return std::string(data_) == std::string(s);\n    }\n  }\n\n private:\n  const char* data_;\n  size_t size_;\n};\n\nstring_ref string_ptr::operator*() const noexcept { return {data_, size_}; }\n\nTEST(string_ref, compare) {\n  const char* s = \"alex\\0davidjohn\\0\";\n  string_ptr ptr(s, 5);\n  EXPECT_EQ(*ptr, \"alex\");\n  EXPECT_TRUE(*ptr == \"alex\");\n  ++ptr;\n  EXPECT_EQ(*ptr, \"david\");\n  EXPECT_TRUE(*ptr == \"david\");\n  ++ptr;\n  EXPECT_EQ(*ptr, \"john\");\n}\n\n}  // namespace\n\n}  // namespace gtest_printers_test\n}  // namespace testing\n"
  },
  {
    "path": "test/googletest/test/googletest-shuffle-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2009 Google Inc. All Rights Reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Verifies that test shuffling works.\"\"\"\n\nimport os\nimport gtest_test_utils\n\n# Command to run the googletest-shuffle-test_ program.\nCOMMAND = gtest_test_utils.GetTestExecutablePath('googletest-shuffle-test_')\n\n# The environment variables for test sharding.\nTOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'\nSHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'\n\nTEST_FILTER = 'A*.A:A*.B:C*'\n\nALL_TESTS = []\nACTIVE_TESTS = []\nFILTERED_TESTS = []\nSHARDED_TESTS = []\n\nSHUFFLED_ALL_TESTS = []\nSHUFFLED_ACTIVE_TESTS = []\nSHUFFLED_FILTERED_TESTS = []\nSHUFFLED_SHARDED_TESTS = []\n\n\ndef AlsoRunDisabledTestsFlag():\n  return '--gtest_also_run_disabled_tests'\n\n\ndef FilterFlag(test_filter):\n  return '--gtest_filter=%s' % (test_filter,)\n\n\ndef RepeatFlag(n):\n  return '--gtest_repeat=%s' % (n,)\n\n\ndef ShuffleFlag():\n  return '--gtest_shuffle'\n\n\ndef RandomSeedFlag(n):\n  return '--gtest_random_seed=%s' % (n,)\n\n\ndef RunAndReturnOutput(extra_env, args):\n  \"\"\"Runs the test program and returns its output.\"\"\"\n\n  environ_copy = os.environ.copy()\n  environ_copy.update(extra_env)\n\n  return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output\n\n\ndef GetTestsForAllIterations(extra_env, args):\n  \"\"\"Runs the test program and returns a list of test lists.\n\n  Args:\n    extra_env: a map from environment variables to their values\n    args: command line flags to pass to googletest-shuffle-test_\n\n  Returns:\n    A list where the i-th element is the list of tests run in the i-th\n    test iteration.\n  \"\"\"\n\n  test_iterations = []\n  for line in RunAndReturnOutput(extra_env, args).split('\\n'):\n    if line.startswith('----'):\n      tests = []\n      test_iterations.append(tests)\n    elif line.strip():\n      tests.append(line.strip())  # 'TestCaseName.TestName'\n\n  return test_iterations\n\n\ndef GetTestCases(tests):\n  \"\"\"Returns a list of test cases in the given full test names.\n\n  Args:\n    tests: a list of full test names\n\n  Returns:\n    A list of test cases from 'tests', in their original order.\n    Consecutive duplicates are removed.\n  \"\"\"\n\n  test_cases = []\n  for test in tests:\n    test_case = test.split('.')[0]\n    if not test_case in test_cases:\n      test_cases.append(test_case)\n\n  return test_cases\n\n\ndef CalculateTestLists():\n  \"\"\"Calculates the list of tests run under different flags.\"\"\"\n\n  if not ALL_TESTS:\n    ALL_TESTS.extend(\n        GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0])\n\n  if not ACTIVE_TESTS:\n    ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0])\n\n  if not FILTERED_TESTS:\n    FILTERED_TESTS.extend(\n        GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0])\n\n  if not SHARDED_TESTS:\n    SHARDED_TESTS.extend(\n        GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',\n                                  SHARD_INDEX_ENV_VAR: '1'},\n                                 [])[0])\n\n  if not SHUFFLED_ALL_TESTS:\n    SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations(\n        {}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0])\n\n  if not SHUFFLED_ACTIVE_TESTS:\n    SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations(\n        {}, [ShuffleFlag(), RandomSeedFlag(1)])[0])\n\n  if not SHUFFLED_FILTERED_TESTS:\n    SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations(\n        {}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0])\n\n  if not SHUFFLED_SHARDED_TESTS:\n    SHUFFLED_SHARDED_TESTS.extend(\n        GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',\n                                  SHARD_INDEX_ENV_VAR: '1'},\n                                 [ShuffleFlag(), RandomSeedFlag(1)])[0])\n\n\nclass GTestShuffleUnitTest(gtest_test_utils.TestCase):\n  \"\"\"Tests test shuffling.\"\"\"\n\n  def setUp(self):\n    CalculateTestLists()\n\n  def testShufflePreservesNumberOfTests(self):\n    self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS))\n    self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS))\n    self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS))\n    self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS))\n\n  def testShuffleChangesTestOrder(self):\n    self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS)\n    self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS)\n    self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS,\n                 SHUFFLED_FILTERED_TESTS)\n    self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS,\n                 SHUFFLED_SHARDED_TESTS)\n\n  def testShuffleChangesTestCaseOrder(self):\n    self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS),\n                 GetTestCases(SHUFFLED_ALL_TESTS))\n    self.assert_(\n        GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS),\n        GetTestCases(SHUFFLED_ACTIVE_TESTS))\n    self.assert_(\n        GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS),\n        GetTestCases(SHUFFLED_FILTERED_TESTS))\n    self.assert_(\n        GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS),\n        GetTestCases(SHUFFLED_SHARDED_TESTS))\n\n  def testShuffleDoesNotRepeatTest(self):\n    for test in SHUFFLED_ALL_TESTS:\n      self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test),\n                       '%s appears more than once' % (test,))\n    for test in SHUFFLED_ACTIVE_TESTS:\n      self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test),\n                       '%s appears more than once' % (test,))\n    for test in SHUFFLED_FILTERED_TESTS:\n      self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test),\n                       '%s appears more than once' % (test,))\n    for test in SHUFFLED_SHARDED_TESTS:\n      self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test),\n                       '%s appears more than once' % (test,))\n\n  def testShuffleDoesNotCreateNewTest(self):\n    for test in SHUFFLED_ALL_TESTS:\n      self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,))\n    for test in SHUFFLED_ACTIVE_TESTS:\n      self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,))\n    for test in SHUFFLED_FILTERED_TESTS:\n      self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,))\n    for test in SHUFFLED_SHARDED_TESTS:\n      self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,))\n\n  def testShuffleIncludesAllTests(self):\n    for test in ALL_TESTS:\n      self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,))\n    for test in ACTIVE_TESTS:\n      self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,))\n    for test in FILTERED_TESTS:\n      self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,))\n    for test in SHARDED_TESTS:\n      self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,))\n\n  def testShuffleLeavesDeathTestsAtFront(self):\n    non_death_test_found = False\n    for test in SHUFFLED_ACTIVE_TESTS:\n      if 'DeathTest.' in test:\n        self.assert_(not non_death_test_found,\n                     '%s appears after a non-death test' % (test,))\n      else:\n        non_death_test_found = True\n\n  def _VerifyTestCasesDoNotInterleave(self, tests):\n    test_cases = []\n    for test in tests:\n      [test_case, _] = test.split('.')\n      if test_cases and test_cases[-1] != test_case:\n        test_cases.append(test_case)\n        self.assertEqual(1, test_cases.count(test_case),\n                         'Test case %s is not grouped together in %s' %\n                         (test_case, tests))\n\n  def testShuffleDoesNotInterleaveTestCases(self):\n    self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS)\n    self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS)\n    self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS)\n    self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS)\n\n  def testShuffleRestoresOrderAfterEachIteration(self):\n    # Get the test lists in all 3 iterations, using random seed 1, 2,\n    # and 3 respectively.  Google Test picks a different seed in each\n    # iteration, and this test depends on the current implementation\n    # picking successive numbers.  This dependency is not ideal, but\n    # makes the test much easier to write.\n    [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (\n        GetTestsForAllIterations(\n            {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))\n\n    # Make sure running the tests with random seed 1 gets the same\n    # order as in iteration 1 above.\n    [tests_with_seed1] = GetTestsForAllIterations(\n        {}, [ShuffleFlag(), RandomSeedFlag(1)])\n    self.assertEqual(tests_in_iteration1, tests_with_seed1)\n\n    # Make sure running the tests with random seed 2 gets the same\n    # order as in iteration 2 above.  Success means that Google Test\n    # correctly restores the test order before re-shuffling at the\n    # beginning of iteration 2.\n    [tests_with_seed2] = GetTestsForAllIterations(\n        {}, [ShuffleFlag(), RandomSeedFlag(2)])\n    self.assertEqual(tests_in_iteration2, tests_with_seed2)\n\n    # Make sure running the tests with random seed 3 gets the same\n    # order as in iteration 3 above.  Success means that Google Test\n    # correctly restores the test order before re-shuffling at the\n    # beginning of iteration 3.\n    [tests_with_seed3] = GetTestsForAllIterations(\n        {}, [ShuffleFlag(), RandomSeedFlag(3)])\n    self.assertEqual(tests_in_iteration3, tests_with_seed3)\n\n  def testShuffleGeneratesNewOrderInEachIteration(self):\n    [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = (\n        GetTestsForAllIterations(\n            {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)]))\n\n    self.assert_(tests_in_iteration1 != tests_in_iteration2,\n                 tests_in_iteration1)\n    self.assert_(tests_in_iteration1 != tests_in_iteration3,\n                 tests_in_iteration1)\n    self.assert_(tests_in_iteration2 != tests_in_iteration3,\n                 tests_in_iteration2)\n\n  def testShuffleShardedTestsPreservesPartition(self):\n    # If we run M tests on N shards, the same M tests should be run in\n    # total, regardless of the random seeds used by the shards.\n    [tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',\n                                         SHARD_INDEX_ENV_VAR: '0'},\n                                        [ShuffleFlag(), RandomSeedFlag(1)])\n    [tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',\n                                         SHARD_INDEX_ENV_VAR: '1'},\n                                        [ShuffleFlag(), RandomSeedFlag(20)])\n    [tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3',\n                                         SHARD_INDEX_ENV_VAR: '2'},\n                                        [ShuffleFlag(), RandomSeedFlag(25)])\n    sorted_sharded_tests = tests1 + tests2 + tests3\n    sorted_sharded_tests.sort()\n    sorted_active_tests = []\n    sorted_active_tests.extend(ACTIVE_TESTS)\n    sorted_active_tests.sort()\n    self.assertEqual(sorted_active_tests, sorted_sharded_tests)\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-shuffle-test_.cc",
    "content": "// Copyright 2009, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Verifies that test shuffling works.\n\n#include \"gtest/gtest.h\"\n\nnamespace {\n\nusing ::testing::EmptyTestEventListener;\nusing ::testing::InitGoogleTest;\nusing ::testing::Message;\nusing ::testing::Test;\nusing ::testing::TestEventListeners;\nusing ::testing::TestInfo;\nusing ::testing::UnitTest;\n\n// The test methods are empty, as the sole purpose of this program is\n// to print the test names before/after shuffling.\n\nclass A : public Test {};\nTEST_F(A, A) {}\nTEST_F(A, B) {}\n\nTEST(ADeathTest, A) {}\nTEST(ADeathTest, B) {}\nTEST(ADeathTest, C) {}\n\nTEST(B, A) {}\nTEST(B, B) {}\nTEST(B, C) {}\nTEST(B, DISABLED_D) {}\nTEST(B, DISABLED_E) {}\n\nTEST(BDeathTest, A) {}\nTEST(BDeathTest, B) {}\n\nTEST(C, A) {}\nTEST(C, B) {}\nTEST(C, C) {}\nTEST(C, DISABLED_D) {}\n\nTEST(CDeathTest, A) {}\n\nTEST(DISABLED_D, A) {}\nTEST(DISABLED_D, DISABLED_B) {}\n\n// This printer prints the full test names only, starting each test\n// iteration with a \"----\" marker.\nclass TestNamePrinter : public EmptyTestEventListener {\n public:\n  void OnTestIterationStart(const UnitTest& /* unit_test */,\n                            int /* iteration */) override {\n    printf(\"----\\n\");\n  }\n\n  void OnTestStart(const TestInfo& test_info) override {\n    printf(\"%s.%s\\n\", test_info.test_case_name(), test_info.name());\n  }\n};\n\n}  // namespace\n\nint main(int argc, char **argv) {\n  InitGoogleTest(&argc, argv);\n\n  // Replaces the default printer with TestNamePrinter, which prints\n  // the test name only.\n  TestEventListeners& listeners = UnitTest::GetInstance()->listeners();\n  delete listeners.Release(listeners.default_result_printer());\n  listeners.Append(new TestNamePrinter);\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-test-part-test.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n#include \"gtest/gtest-test-part.h\"\n\n#include \"gtest/gtest.h\"\n\nusing testing::Message;\nusing testing::Test;\nusing testing::TestPartResult;\nusing testing::TestPartResultArray;\n\nnamespace {\n\n// Tests the TestPartResult class.\n\n// The test fixture for testing TestPartResult.\nclass TestPartResultTest : public Test {\n protected:\n  TestPartResultTest()\n      : r1_(TestPartResult::kSuccess, \"foo/bar.cc\", 10, \"Success!\"),\n        r2_(TestPartResult::kNonFatalFailure, \"foo/bar.cc\", -1, \"Failure!\"),\n        r3_(TestPartResult::kFatalFailure, nullptr, -1, \"Failure!\"),\n        r4_(TestPartResult::kSkip, \"foo/bar.cc\", 2, \"Skipped!\") {}\n\n  TestPartResult r1_, r2_, r3_, r4_;\n};\n\n\nTEST_F(TestPartResultTest, ConstructorWorks) {\n  Message message;\n  message << \"something is terribly wrong\";\n  message << static_cast<const char*>(testing::internal::kStackTraceMarker);\n  message << \"some unimportant stack trace\";\n\n  const TestPartResult result(TestPartResult::kNonFatalFailure,\n                              \"some_file.cc\",\n                              42,\n                              message.GetString().c_str());\n\n  EXPECT_EQ(TestPartResult::kNonFatalFailure, result.type());\n  EXPECT_STREQ(\"some_file.cc\", result.file_name());\n  EXPECT_EQ(42, result.line_number());\n  EXPECT_STREQ(message.GetString().c_str(), result.message());\n  EXPECT_STREQ(\"something is terribly wrong\", result.summary());\n}\n\nTEST_F(TestPartResultTest, ResultAccessorsWork) {\n  const TestPartResult success(TestPartResult::kSuccess,\n                               \"file.cc\",\n                               42,\n                               \"message\");\n  EXPECT_TRUE(success.passed());\n  EXPECT_FALSE(success.failed());\n  EXPECT_FALSE(success.nonfatally_failed());\n  EXPECT_FALSE(success.fatally_failed());\n  EXPECT_FALSE(success.skipped());\n\n  const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure,\n                                        \"file.cc\",\n                                        42,\n                                        \"message\");\n  EXPECT_FALSE(nonfatal_failure.passed());\n  EXPECT_TRUE(nonfatal_failure.failed());\n  EXPECT_TRUE(nonfatal_failure.nonfatally_failed());\n  EXPECT_FALSE(nonfatal_failure.fatally_failed());\n  EXPECT_FALSE(nonfatal_failure.skipped());\n\n  const TestPartResult fatal_failure(TestPartResult::kFatalFailure,\n                                     \"file.cc\",\n                                     42,\n                                     \"message\");\n  EXPECT_FALSE(fatal_failure.passed());\n  EXPECT_TRUE(fatal_failure.failed());\n  EXPECT_FALSE(fatal_failure.nonfatally_failed());\n  EXPECT_TRUE(fatal_failure.fatally_failed());\n  EXPECT_FALSE(fatal_failure.skipped());\n\n  const TestPartResult skip(TestPartResult::kSkip, \"file.cc\", 42, \"message\");\n  EXPECT_FALSE(skip.passed());\n  EXPECT_FALSE(skip.failed());\n  EXPECT_FALSE(skip.nonfatally_failed());\n  EXPECT_FALSE(skip.fatally_failed());\n  EXPECT_TRUE(skip.skipped());\n}\n\n// Tests TestPartResult::type().\nTEST_F(TestPartResultTest, type) {\n  EXPECT_EQ(TestPartResult::kSuccess, r1_.type());\n  EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type());\n  EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type());\n  EXPECT_EQ(TestPartResult::kSkip, r4_.type());\n}\n\n// Tests TestPartResult::file_name().\nTEST_F(TestPartResultTest, file_name) {\n  EXPECT_STREQ(\"foo/bar.cc\", r1_.file_name());\n  EXPECT_STREQ(nullptr, r3_.file_name());\n  EXPECT_STREQ(\"foo/bar.cc\", r4_.file_name());\n}\n\n// Tests TestPartResult::line_number().\nTEST_F(TestPartResultTest, line_number) {\n  EXPECT_EQ(10, r1_.line_number());\n  EXPECT_EQ(-1, r2_.line_number());\n  EXPECT_EQ(2, r4_.line_number());\n}\n\n// Tests TestPartResult::message().\nTEST_F(TestPartResultTest, message) {\n  EXPECT_STREQ(\"Success!\", r1_.message());\n  EXPECT_STREQ(\"Skipped!\", r4_.message());\n}\n\n// Tests TestPartResult::passed().\nTEST_F(TestPartResultTest, Passed) {\n  EXPECT_TRUE(r1_.passed());\n  EXPECT_FALSE(r2_.passed());\n  EXPECT_FALSE(r3_.passed());\n  EXPECT_FALSE(r4_.passed());\n}\n\n// Tests TestPartResult::failed().\nTEST_F(TestPartResultTest, Failed) {\n  EXPECT_FALSE(r1_.failed());\n  EXPECT_TRUE(r2_.failed());\n  EXPECT_TRUE(r3_.failed());\n  EXPECT_FALSE(r4_.failed());\n}\n\n// Tests TestPartResult::failed().\nTEST_F(TestPartResultTest, Skipped) {\n  EXPECT_FALSE(r1_.skipped());\n  EXPECT_FALSE(r2_.skipped());\n  EXPECT_FALSE(r3_.skipped());\n  EXPECT_TRUE(r4_.skipped());\n}\n\n// Tests TestPartResult::fatally_failed().\nTEST_F(TestPartResultTest, FatallyFailed) {\n  EXPECT_FALSE(r1_.fatally_failed());\n  EXPECT_FALSE(r2_.fatally_failed());\n  EXPECT_TRUE(r3_.fatally_failed());\n  EXPECT_FALSE(r4_.fatally_failed());\n}\n\n// Tests TestPartResult::nonfatally_failed().\nTEST_F(TestPartResultTest, NonfatallyFailed) {\n  EXPECT_FALSE(r1_.nonfatally_failed());\n  EXPECT_TRUE(r2_.nonfatally_failed());\n  EXPECT_FALSE(r3_.nonfatally_failed());\n  EXPECT_FALSE(r4_.nonfatally_failed());\n}\n\n// Tests the TestPartResultArray class.\n\nclass TestPartResultArrayTest : public Test {\n protected:\n  TestPartResultArrayTest()\n      : r1_(TestPartResult::kNonFatalFailure, \"foo/bar.cc\", -1, \"Failure 1\"),\n        r2_(TestPartResult::kFatalFailure, \"foo/bar.cc\", -1, \"Failure 2\") {}\n\n  const TestPartResult r1_, r2_;\n};\n\n// Tests that TestPartResultArray initially has size 0.\nTEST_F(TestPartResultArrayTest, InitialSizeIsZero) {\n  TestPartResultArray results;\n  EXPECT_EQ(0, results.size());\n}\n\n// Tests that TestPartResultArray contains the given TestPartResult\n// after one Append() operation.\nTEST_F(TestPartResultArrayTest, ContainsGivenResultAfterAppend) {\n  TestPartResultArray results;\n  results.Append(r1_);\n  EXPECT_EQ(1, results.size());\n  EXPECT_STREQ(\"Failure 1\", results.GetTestPartResult(0).message());\n}\n\n// Tests that TestPartResultArray contains the given TestPartResults\n// after two Append() operations.\nTEST_F(TestPartResultArrayTest, ContainsGivenResultsAfterTwoAppends) {\n  TestPartResultArray results;\n  results.Append(r1_);\n  results.Append(r2_);\n  EXPECT_EQ(2, results.size());\n  EXPECT_STREQ(\"Failure 1\", results.GetTestPartResult(0).message());\n  EXPECT_STREQ(\"Failure 2\", results.GetTestPartResult(1).message());\n}\n\ntypedef TestPartResultArrayTest TestPartResultArrayDeathTest;\n\n// Tests that the program dies when GetTestPartResult() is called with\n// an invalid index.\nTEST_F(TestPartResultArrayDeathTest, DiesWhenIndexIsOutOfBound) {\n  TestPartResultArray results;\n  results.Append(r1_);\n\n  EXPECT_DEATH_IF_SUPPORTED(results.GetTestPartResult(-1), \"\");\n  EXPECT_DEATH_IF_SUPPORTED(results.GetTestPartResult(1), \"\");\n}\n\n}  // namespace\n"
  },
  {
    "path": "test/googletest/test/googletest-test2_test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for Google Test itself.  This verifies that the basic constructs of\n// Google Test work.\n\n#include \"gtest/gtest.h\"\n#include \"googletest-param-test-test.h\"\n\nusing ::testing::Values;\nusing ::testing::internal::ParamGenerator;\n\n// Tests that generators defined in a different translation unit\n// are functional. The test using extern_gen_2 is defined\n// in googletest-param-test-test.cc.\nParamGenerator<int> extern_gen_2 = Values(33);\n\n// Tests that a parameterized test case can be defined in one translation unit\n// and instantiated in another. The test is defined in\n// googletest-param-test-test.cc and ExternalInstantiationTest fixture class is\n// defined in gtest-param-test_test.h.\nINSTANTIATE_TEST_SUITE_P(MultiplesOf33,\n                         ExternalInstantiationTest,\n                         Values(33, 66));\n\n// Tests that a parameterized test case can be instantiated\n// in multiple translation units. Another instantiation is defined\n// in googletest-param-test-test.cc and\n// InstantiationInMultipleTranslationUnitsTest fixture is defined in\n// gtest-param-test_test.h\nINSTANTIATE_TEST_SUITE_P(Sequence2,\n                         InstantiationInMultipleTranslationUnitsTest,\n                         Values(42*3, 42*4, 42*5));\n\n"
  },
  {
    "path": "test/googletest/test/googletest-throw-on-failure-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2009, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Tests Google Test's throw-on-failure mode with exceptions disabled.\n\nThis script invokes googletest-throw-on-failure-test_ (a program written with\nGoogle Test) with different environments and command line flags.\n\"\"\"\n\nimport os\nimport gtest_test_utils\n\n\n# Constants.\n\n# The command line flag for enabling/disabling the throw-on-failure mode.\nTHROW_ON_FAILURE = 'gtest_throw_on_failure'\n\n# Path to the googletest-throw-on-failure-test_ program, compiled with\n# exceptions disabled.\nEXE_PATH = gtest_test_utils.GetTestExecutablePath(\n    'googletest-throw-on-failure-test_')\n\n\n# Utilities.\n\n\ndef SetEnvVar(env_var, value):\n  \"\"\"Sets an environment variable to a given value; unsets it when the\n  given value is None.\n  \"\"\"\n\n  env_var = env_var.upper()\n  if value is not None:\n    os.environ[env_var] = value\n  elif env_var in os.environ:\n    del os.environ[env_var]\n\n\ndef Run(command):\n  \"\"\"Runs a command; returns True/False if its exit code is/isn't 0.\"\"\"\n\n  print('Running \"%s\". . .' % ' '.join(command))\n  p = gtest_test_utils.Subprocess(command)\n  return p.exited and p.exit_code == 0\n\n\n# The tests.\nclass ThrowOnFailureTest(gtest_test_utils.TestCase):\n  \"\"\"Tests the throw-on-failure mode.\"\"\"\n\n  def RunAndVerify(self, env_var_value, flag_value, should_fail):\n    \"\"\"Runs googletest-throw-on-failure-test_ and verifies that it does\n    (or does not) exit with a non-zero code.\n\n    Args:\n      env_var_value:    value of the GTEST_BREAK_ON_FAILURE environment\n                        variable; None if the variable should be unset.\n      flag_value:       value of the --gtest_break_on_failure flag;\n                        None if the flag should not be present.\n      should_fail:      True if and only if the program is expected to fail.\n    \"\"\"\n\n    SetEnvVar(THROW_ON_FAILURE, env_var_value)\n\n    if env_var_value is None:\n      env_var_value_msg = ' is not set'\n    else:\n      env_var_value_msg = '=' + env_var_value\n\n    if flag_value is None:\n      flag = ''\n    elif flag_value == '0':\n      flag = '--%s=0' % THROW_ON_FAILURE\n    else:\n      flag = '--%s' % THROW_ON_FAILURE\n\n    command = [EXE_PATH]\n    if flag:\n      command.append(flag)\n\n    if should_fail:\n      should_or_not = 'should'\n    else:\n      should_or_not = 'should not'\n\n    failed = not Run(command)\n\n    SetEnvVar(THROW_ON_FAILURE, None)\n\n    msg = ('when %s%s, an assertion failure in \"%s\" %s cause a non-zero '\n           'exit code.' %\n           (THROW_ON_FAILURE, env_var_value_msg, ' '.join(command),\n            should_or_not))\n    self.assert_(failed == should_fail, msg)\n\n  def testDefaultBehavior(self):\n    \"\"\"Tests the behavior of the default mode.\"\"\"\n\n    self.RunAndVerify(env_var_value=None, flag_value=None, should_fail=False)\n\n  def testThrowOnFailureEnvVar(self):\n    \"\"\"Tests using the GTEST_THROW_ON_FAILURE environment variable.\"\"\"\n\n    self.RunAndVerify(env_var_value='0',\n                      flag_value=None,\n                      should_fail=False)\n    self.RunAndVerify(env_var_value='1',\n                      flag_value=None,\n                      should_fail=True)\n\n  def testThrowOnFailureFlag(self):\n    \"\"\"Tests using the --gtest_throw_on_failure flag.\"\"\"\n\n    self.RunAndVerify(env_var_value=None,\n                      flag_value='0',\n                      should_fail=False)\n    self.RunAndVerify(env_var_value=None,\n                      flag_value='1',\n                      should_fail=True)\n\n  def testThrowOnFailureFlagOverridesEnvVar(self):\n    \"\"\"Tests that --gtest_throw_on_failure overrides GTEST_THROW_ON_FAILURE.\"\"\"\n\n    self.RunAndVerify(env_var_value='0',\n                      flag_value='0',\n                      should_fail=False)\n    self.RunAndVerify(env_var_value='0',\n                      flag_value='1',\n                      should_fail=True)\n    self.RunAndVerify(env_var_value='1',\n                      flag_value='0',\n                      should_fail=False)\n    self.RunAndVerify(env_var_value='1',\n                      flag_value='1',\n                      should_fail=True)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-throw-on-failure-test_.cc",
    "content": "// Copyright 2009, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Tests Google Test's throw-on-failure mode with exceptions disabled.\n//\n// This program must be compiled with exceptions disabled.  It will be\n// invoked by googletest-throw-on-failure-test.py, and is expected to exit\n// with non-zero in the throw-on-failure mode or 0 otherwise.\n\n#include \"gtest/gtest.h\"\n\n#include <stdio.h>                      // for fflush, fprintf, NULL, etc.\n#include <stdlib.h>                     // for exit\n#include <exception>                    // for set_terminate\n\n// This terminate handler aborts the program using exit() rather than abort().\n// This avoids showing pop-ups on Windows systems and core dumps on Unix-like\n// ones.\nvoid TerminateHandler() {\n  fprintf(stderr, \"%s\\n\", \"Unhandled C++ exception terminating the program.\");\n  fflush(nullptr);\n  exit(1);\n}\n\nint main(int argc, char** argv) {\n#if GTEST_HAS_EXCEPTIONS\n  std::set_terminate(&TerminateHandler);\n#endif\n  testing::InitGoogleTest(&argc, argv);\n\n  // We want to ensure that people can use Google Test assertions in\n  // other testing frameworks, as long as they initialize Google Test\n  // properly and set the throw-on-failure mode.  Therefore, we don't\n  // use Google Test's constructs for defining and running tests\n  // (e.g. TEST and RUN_ALL_TESTS) here.\n\n  // In the throw-on-failure mode with exceptions disabled, this\n  // assertion will cause the program to exit with a non-zero code.\n  EXPECT_EQ(2, 3);\n\n  // When not in the throw-on-failure mode, the control will reach\n  // here.\n  return 0;\n}\n"
  },
  {
    "path": "test/googletest/test/googletest-uninitialized-test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2008, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Verifies that Google Test warns the user when not initialized properly.\"\"\"\n\nimport gtest_test_utils\n\nCOMMAND = gtest_test_utils.GetTestExecutablePath('googletest-uninitialized-test_')\n\n\ndef Assert(condition):\n  if not condition:\n    raise AssertionError\n\n\ndef AssertEq(expected, actual):\n  if expected != actual:\n    print('Expected: %s' % (expected,))\n    print('  Actual: %s' % (actual,))\n    raise AssertionError\n\n\ndef TestExitCodeAndOutput(command):\n  \"\"\"Runs the given command and verifies its exit code and output.\"\"\"\n\n  # Verifies that 'command' exits with code 1.\n  p = gtest_test_utils.Subprocess(command)\n  if p.exited and p.exit_code == 0:\n    Assert('IMPORTANT NOTICE' in p.output);\n  Assert('InitGoogleTest' in p.output)\n\n\nclass GTestUninitializedTest(gtest_test_utils.TestCase):\n  def testExitCodeAndOutput(self):\n    TestExitCodeAndOutput(COMMAND)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/googletest-uninitialized-test_.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"gtest/gtest.h\"\n\nTEST(DummyTest, Dummy) {\n  // This test doesn't verify anything.  We just need it to create a\n  // realistic stage for testing the behavior of Google Test when\n  // RUN_ALL_TESTS() is called without\n  // testing::InitGoogleTest() being called first.\n}\n\nint main() {\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/gtest-typed-test2_test.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include <vector>\n\n#include \"test/gtest-typed-test_test.h\"\n#include \"gtest/gtest.h\"\n\n#if GTEST_HAS_TYPED_TEST_P\n\n// Tests that the same type-parameterized test case can be\n// instantiated in different translation units linked together.\n// (ContainerTest is also instantiated in gtest-typed-test_test.cc.)\nINSTANTIATE_TYPED_TEST_SUITE_P(Vector, ContainerTest,\n                               testing::Types<std::vector<int> >);\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n"
  },
  {
    "path": "test/googletest/test/gtest-typed-test_test.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"test/gtest-typed-test_test.h\"\n\n#include <set>\n#include <type_traits>\n#include <vector>\n\n#include \"gtest/gtest.h\"\n\n#if _MSC_VER\nGTEST_DISABLE_MSC_WARNINGS_PUSH_(4127 /* conditional expression is constant */)\n#endif  //  _MSC_VER\n\nusing testing::Test;\n\n// Used for testing that SetUpTestSuite()/TearDownTestSuite(), fixture\n// ctor/dtor, and SetUp()/TearDown() work correctly in typed tests and\n// type-parameterized test.\ntemplate <typename T>\nclass CommonTest : public Test {\n  // For some technical reason, SetUpTestSuite() and TearDownTestSuite()\n  // must be public.\n public:\n  static void SetUpTestSuite() {\n    shared_ = new T(5);\n  }\n\n  static void TearDownTestSuite() {\n    delete shared_;\n    shared_ = nullptr;\n  }\n\n  // This 'protected:' is optional.  There's no harm in making all\n  // members of this fixture class template public.\n protected:\n  // We used to use std::list here, but switched to std::vector since\n  // MSVC's <list> doesn't compile cleanly with /W4.\n  typedef std::vector<T> Vector;\n  typedef std::set<int> IntSet;\n\n  CommonTest() : value_(1) {}\n\n  ~CommonTest() override { EXPECT_EQ(3, value_); }\n\n  void SetUp() override {\n    EXPECT_EQ(1, value_);\n    value_++;\n  }\n\n  void TearDown() override {\n    EXPECT_EQ(2, value_);\n    value_++;\n  }\n\n  T value_;\n  static T* shared_;\n};\n\ntemplate <typename T>\nT* CommonTest<T>::shared_ = nullptr;\n\n// This #ifdef block tests typed tests.\n#if GTEST_HAS_TYPED_TEST\n\nusing testing::Types;\n\n// Tests that SetUpTestSuite()/TearDownTestSuite(), fixture ctor/dtor,\n// and SetUp()/TearDown() work correctly in typed tests\n\ntypedef Types<char, int> TwoTypes;\nTYPED_TEST_SUITE(CommonTest, TwoTypes);\n\nTYPED_TEST(CommonTest, ValuesAreCorrect) {\n  // Static members of the fixture class template can be visited via\n  // the TestFixture:: prefix.\n  EXPECT_EQ(5, *TestFixture::shared_);\n\n  // Typedefs in the fixture class template can be visited via the\n  // \"typename TestFixture::\" prefix.\n  typename TestFixture::Vector empty;\n  EXPECT_EQ(0U, empty.size());\n\n  typename TestFixture::IntSet empty2;\n  EXPECT_EQ(0U, empty2.size());\n\n  // Non-static members of the fixture class must be visited via\n  // 'this', as required by C++ for class templates.\n  EXPECT_EQ(2, this->value_);\n}\n\n// The second test makes sure shared_ is not deleted after the first\n// test.\nTYPED_TEST(CommonTest, ValuesAreStillCorrect) {\n  // Static members of the fixture class template can also be visited\n  // via 'this'.\n  ASSERT_TRUE(this->shared_ != nullptr);\n  EXPECT_EQ(5, *this->shared_);\n\n  // TypeParam can be used to refer to the type parameter.\n  EXPECT_EQ(static_cast<TypeParam>(2), this->value_);\n}\n\n// Tests that multiple TYPED_TEST_SUITE's can be defined in the same\n// translation unit.\n\ntemplate <typename T>\nclass TypedTest1 : public Test {\n};\n\n// Verifies that the second argument of TYPED_TEST_SUITE can be a\n// single type.\nTYPED_TEST_SUITE(TypedTest1, int);\nTYPED_TEST(TypedTest1, A) {}\n\ntemplate <typename T>\nclass TypedTest2 : public Test {\n};\n\n// Verifies that the second argument of TYPED_TEST_SUITE can be a\n// Types<...> type list.\nTYPED_TEST_SUITE(TypedTest2, Types<int>);\n\n// This also verifies that tests from different typed test cases can\n// share the same name.\nTYPED_TEST(TypedTest2, A) {}\n\n// Tests that a typed test case can be defined in a namespace.\n\nnamespace library1 {\n\ntemplate <typename T>\nclass NumericTest : public Test {\n};\n\ntypedef Types<int, long> NumericTypes;\nTYPED_TEST_SUITE(NumericTest, NumericTypes);\n\nTYPED_TEST(NumericTest, DefaultIsZero) {\n  EXPECT_EQ(0, TypeParam());\n}\n\n}  // namespace library1\n\n// Tests that custom names work.\ntemplate <typename T>\nclass TypedTestWithNames : public Test {};\n\nclass TypedTestNames {\n public:\n  template <typename T>\n  static std::string GetName(int i) {\n    if (std::is_same<T, char>::value) {\n      return std::string(\"char\") + ::testing::PrintToString(i);\n    }\n    if (std::is_same<T, int>::value) {\n      return std::string(\"int\") + ::testing::PrintToString(i);\n    }\n  }\n};\n\nTYPED_TEST_SUITE(TypedTestWithNames, TwoTypes, TypedTestNames);\n\nTYPED_TEST(TypedTestWithNames, TestSuiteName) {\n  if (std::is_same<TypeParam, char>::value) {\n    EXPECT_STREQ(::testing::UnitTest::GetInstance()\n                     ->current_test_info()\n                     ->test_case_name(),\n                 \"TypedTestWithNames/char0\");\n  }\n  if (std::is_same<TypeParam, int>::value) {\n    EXPECT_STREQ(::testing::UnitTest::GetInstance()\n                     ->current_test_info()\n                     ->test_case_name(),\n                 \"TypedTestWithNames/int1\");\n  }\n}\n\n#endif  // GTEST_HAS_TYPED_TEST\n\n// This #ifdef block tests type-parameterized tests.\n#if GTEST_HAS_TYPED_TEST_P\n\nusing testing::Types;\nusing testing::internal::TypedTestSuitePState;\n\n// Tests TypedTestSuitePState.\n\nclass TypedTestSuitePStateTest : public Test {\n protected:\n  void SetUp() override {\n    state_.AddTestName(\"foo.cc\", 0, \"FooTest\", \"A\");\n    state_.AddTestName(\"foo.cc\", 0, \"FooTest\", \"B\");\n    state_.AddTestName(\"foo.cc\", 0, \"FooTest\", \"C\");\n  }\n\n  TypedTestSuitePState state_;\n};\n\nTEST_F(TypedTestSuitePStateTest, SucceedsForMatchingList) {\n  const char* tests = \"A, B, C\";\n  EXPECT_EQ(tests,\n            state_.VerifyRegisteredTestNames(\"foo.cc\", 1, tests));\n}\n\n// Makes sure that the order of the tests and spaces around the names\n// don't matter.\nTEST_F(TypedTestSuitePStateTest, IgnoresOrderAndSpaces) {\n  const char* tests = \"A,C,   B\";\n  EXPECT_EQ(tests,\n            state_.VerifyRegisteredTestNames(\"foo.cc\", 1, tests));\n}\n\nusing TypedTestSuitePStateDeathTest = TypedTestSuitePStateTest;\n\nTEST_F(TypedTestSuitePStateDeathTest, DetectsDuplicates) {\n  EXPECT_DEATH_IF_SUPPORTED(\n      state_.VerifyRegisteredTestNames(\"foo.cc\", 1, \"A, B, A, C\"),\n      \"foo\\\\.cc.1.?: Test A is listed more than once\\\\.\");\n}\n\nTEST_F(TypedTestSuitePStateDeathTest, DetectsExtraTest) {\n  EXPECT_DEATH_IF_SUPPORTED(\n      state_.VerifyRegisteredTestNames(\"foo.cc\", 1, \"A, B, C, D\"),\n      \"foo\\\\.cc.1.?: No test named D can be found in this test suite\\\\.\");\n}\n\nTEST_F(TypedTestSuitePStateDeathTest, DetectsMissedTest) {\n  EXPECT_DEATH_IF_SUPPORTED(\n      state_.VerifyRegisteredTestNames(\"foo.cc\", 1, \"A, C\"),\n      \"foo\\\\.cc.1.?: You forgot to list test B\\\\.\");\n}\n\n// Tests that defining a test for a parameterized test case generates\n// a run-time error if the test case has been registered.\nTEST_F(TypedTestSuitePStateDeathTest, DetectsTestAfterRegistration) {\n  state_.VerifyRegisteredTestNames(\"foo.cc\", 1, \"A, B, C\");\n  EXPECT_DEATH_IF_SUPPORTED(\n      state_.AddTestName(\"foo.cc\", 2, \"FooTest\", \"D\"),\n      \"foo\\\\.cc.2.?: Test D must be defined before REGISTER_TYPED_TEST_SUITE_P\"\n      \"\\\\(FooTest, \\\\.\\\\.\\\\.\\\\)\\\\.\");\n}\n\n// Tests that SetUpTestSuite()/TearDownTestSuite(), fixture ctor/dtor,\n// and SetUp()/TearDown() work correctly in type-parameterized tests.\n\ntemplate <typename T>\nclass DerivedTest : public CommonTest<T> {\n};\n\nTYPED_TEST_SUITE_P(DerivedTest);\n\nTYPED_TEST_P(DerivedTest, ValuesAreCorrect) {\n  // Static members of the fixture class template can be visited via\n  // the TestFixture:: prefix.\n  EXPECT_EQ(5, *TestFixture::shared_);\n\n  // Non-static members of the fixture class must be visited via\n  // 'this', as required by C++ for class templates.\n  EXPECT_EQ(2, this->value_);\n}\n\n// The second test makes sure shared_ is not deleted after the first\n// test.\nTYPED_TEST_P(DerivedTest, ValuesAreStillCorrect) {\n  // Static members of the fixture class template can also be visited\n  // via 'this'.\n  ASSERT_TRUE(this->shared_ != nullptr);\n  EXPECT_EQ(5, *this->shared_);\n  EXPECT_EQ(2, this->value_);\n}\n\nREGISTER_TYPED_TEST_SUITE_P(DerivedTest,\n                           ValuesAreCorrect, ValuesAreStillCorrect);\n\ntypedef Types<short, long> MyTwoTypes;\nINSTANTIATE_TYPED_TEST_SUITE_P(My, DerivedTest, MyTwoTypes);\n\n// Tests that custom names work with type parametrized tests. We reuse the\n// TwoTypes from above here.\ntemplate <typename T>\nclass TypeParametrizedTestWithNames : public Test {};\n\nTYPED_TEST_SUITE_P(TypeParametrizedTestWithNames);\n\nTYPED_TEST_P(TypeParametrizedTestWithNames, TestSuiteName) {\n  if (std::is_same<TypeParam, char>::value) {\n    EXPECT_STREQ(::testing::UnitTest::GetInstance()\n                     ->current_test_info()\n                     ->test_case_name(),\n                 \"CustomName/TypeParametrizedTestWithNames/parChar0\");\n  }\n  if (std::is_same<TypeParam, int>::value) {\n    EXPECT_STREQ(::testing::UnitTest::GetInstance()\n                     ->current_test_info()\n                     ->test_case_name(),\n                 \"CustomName/TypeParametrizedTestWithNames/parInt1\");\n  }\n}\n\nREGISTER_TYPED_TEST_SUITE_P(TypeParametrizedTestWithNames, TestSuiteName);\n\nclass TypeParametrizedTestNames {\n public:\n  template <typename T>\n  static std::string GetName(int i) {\n    if (std::is_same<T, char>::value) {\n      return std::string(\"parChar\") + ::testing::PrintToString(i);\n    }\n    if (std::is_same<T, int>::value) {\n      return std::string(\"parInt\") + ::testing::PrintToString(i);\n    }\n  }\n};\n\nINSTANTIATE_TYPED_TEST_SUITE_P(CustomName, TypeParametrizedTestWithNames,\n                              TwoTypes, TypeParametrizedTestNames);\n\n// Tests that multiple TYPED_TEST_SUITE_P's can be defined in the same\n// translation unit.\n\ntemplate <typename T>\nclass TypedTestP1 : public Test {\n};\n\nTYPED_TEST_SUITE_P(TypedTestP1);\n\n// For testing that the code between TYPED_TEST_SUITE_P() and\n// TYPED_TEST_P() is not enclosed in a namespace.\nusing IntAfterTypedTestSuiteP = int;\n\nTYPED_TEST_P(TypedTestP1, A) {}\nTYPED_TEST_P(TypedTestP1, B) {}\n\n// For testing that the code between TYPED_TEST_P() and\n// REGISTER_TYPED_TEST_SUITE_P() is not enclosed in a namespace.\nusing IntBeforeRegisterTypedTestSuiteP = int;\n\nREGISTER_TYPED_TEST_SUITE_P(TypedTestP1, A, B);\n\ntemplate <typename T>\nclass TypedTestP2 : public Test {\n};\n\nTYPED_TEST_SUITE_P(TypedTestP2);\n\n// This also verifies that tests from different type-parameterized\n// test cases can share the same name.\nTYPED_TEST_P(TypedTestP2, A) {}\n\nREGISTER_TYPED_TEST_SUITE_P(TypedTestP2, A);\n\n// Verifies that the code between TYPED_TEST_SUITE_P() and\n// REGISTER_TYPED_TEST_SUITE_P() is not enclosed in a namespace.\nIntAfterTypedTestSuiteP after = 0;\nIntBeforeRegisterTypedTestSuiteP before = 0;\n\n// Verifies that the last argument of INSTANTIATE_TYPED_TEST_SUITE_P()\n// can be either a single type or a Types<...> type list.\nINSTANTIATE_TYPED_TEST_SUITE_P(Int, TypedTestP1, int);\nINSTANTIATE_TYPED_TEST_SUITE_P(Int, TypedTestP2, Types<int>);\n\n// Tests that the same type-parameterized test case can be\n// instantiated more than once in the same translation unit.\nINSTANTIATE_TYPED_TEST_SUITE_P(Double, TypedTestP2, Types<double>);\n\n// Tests that the same type-parameterized test case can be\n// instantiated in different translation units linked together.\n// (ContainerTest is also instantiated in gtest-typed-test_test.cc.)\ntypedef Types<std::vector<double>, std::set<char> > MyContainers;\nINSTANTIATE_TYPED_TEST_SUITE_P(My, ContainerTest, MyContainers);\n\n// Tests that a type-parameterized test case can be defined and\n// instantiated in a namespace.\n\nnamespace library2 {\n\ntemplate <typename T>\nclass NumericTest : public Test {\n};\n\nTYPED_TEST_SUITE_P(NumericTest);\n\nTYPED_TEST_P(NumericTest, DefaultIsZero) {\n  EXPECT_EQ(0, TypeParam());\n}\n\nTYPED_TEST_P(NumericTest, ZeroIsLessThanOne) {\n  EXPECT_LT(TypeParam(0), TypeParam(1));\n}\n\nREGISTER_TYPED_TEST_SUITE_P(NumericTest,\n                           DefaultIsZero, ZeroIsLessThanOne);\ntypedef Types<int, double> NumericTypes;\nINSTANTIATE_TYPED_TEST_SUITE_P(My, NumericTest, NumericTypes);\n\nstatic const char* GetTestName() {\n  return testing::UnitTest::GetInstance()->current_test_info()->name();\n}\n// Test the stripping of space from test names\ntemplate <typename T> class TrimmedTest : public Test { };\nTYPED_TEST_SUITE_P(TrimmedTest);\nTYPED_TEST_P(TrimmedTest, Test1) { EXPECT_STREQ(\"Test1\", GetTestName()); }\nTYPED_TEST_P(TrimmedTest, Test2) { EXPECT_STREQ(\"Test2\", GetTestName()); }\nTYPED_TEST_P(TrimmedTest, Test3) { EXPECT_STREQ(\"Test3\", GetTestName()); }\nTYPED_TEST_P(TrimmedTest, Test4) { EXPECT_STREQ(\"Test4\", GetTestName()); }\nTYPED_TEST_P(TrimmedTest, Test5) { EXPECT_STREQ(\"Test5\", GetTestName()); }\nREGISTER_TYPED_TEST_SUITE_P(\n    TrimmedTest,\n    Test1, Test2,Test3 , Test4 ,Test5 );  // NOLINT\ntemplate <typename T1, typename T2> struct MyPair {};\n// Be sure to try a type with a comma in its name just in case it matters.\ntypedef Types<int, double, MyPair<int, int> > TrimTypes;\nINSTANTIATE_TYPED_TEST_SUITE_P(My, TrimmedTest, TrimTypes);\n\n}  // namespace library2\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n\n#if !defined(GTEST_HAS_TYPED_TEST) && !defined(GTEST_HAS_TYPED_TEST_P)\n\n// Google Test may not support type-parameterized tests with some\n// compilers. If we use conditional compilation to compile out all\n// code referring to the gtest_main library, MSVC linker will not link\n// that library at all and consequently complain about missing entry\n// point defined in that library (fatal error LNK1561: entry point\n// must be defined). This dummy test keeps gtest_main linked in.\nTEST(DummyTest, TypedTestsAreNotSupportedOnThisPlatform) {}\n\n#if _MSC_VER\nGTEST_DISABLE_MSC_WARNINGS_POP_()  //  4127\n#endif                             //  _MSC_VER\n\n#endif  // #if !defined(GTEST_HAS_TYPED_TEST) && !defined(GTEST_HAS_TYPED_TEST_P)\n"
  },
  {
    "path": "test/googletest/test/gtest-typed-test_test.h",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#ifndef GTEST_TEST_GTEST_TYPED_TEST_TEST_H_\n#define GTEST_TEST_GTEST_TYPED_TEST_TEST_H_\n\n#include \"gtest/gtest.h\"\n\n#if GTEST_HAS_TYPED_TEST_P\n\nusing testing::Test;\n\n// For testing that the same type-parameterized test case can be\n// instantiated in different translation units linked together.\n// ContainerTest will be instantiated in both gtest-typed-test_test.cc\n// and gtest-typed-test2_test.cc.\n\ntemplate <typename T>\nclass ContainerTest : public Test {\n};\n\nTYPED_TEST_SUITE_P(ContainerTest);\n\nTYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) {\n  TypeParam container;\n}\n\nTYPED_TEST_P(ContainerTest, InitialSizeIsZero) {\n  TypeParam container;\n  EXPECT_EQ(0U, container.size());\n}\n\nREGISTER_TYPED_TEST_SUITE_P(ContainerTest,\n                            CanBeDefaultConstructed, InitialSizeIsZero);\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n\n#endif  // GTEST_TEST_GTEST_TYPED_TEST_TEST_H_\n"
  },
  {
    "path": "test/googletest/test/gtest-unittest-api_test.cc",
    "content": "// Copyright 2009 Google Inc.  All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// The Google C++ Testing and Mocking Framework (Google Test)\n//\n// This file contains tests verifying correctness of data provided via\n// UnitTest's public methods.\n\n#include \"gtest/gtest.h\"\n\n#include <string.h>  // For strcmp.\n#include <algorithm>\n\nusing ::testing::InitGoogleTest;\n\nnamespace testing {\nnamespace internal {\n\ntemplate <typename T>\nstruct LessByName {\n  bool operator()(const T* a, const T* b) {\n    return strcmp(a->name(), b->name()) < 0;\n  }\n};\n\nclass UnitTestHelper {\n public:\n  // Returns the array of pointers to all test suites sorted by the test suite\n  // name.  The caller is responsible for deleting the array.\n  static TestSuite const** GetSortedTestSuites() {\n    UnitTest& unit_test = *UnitTest::GetInstance();\n    auto const** const test_suites =\n        new const TestSuite*[unit_test.total_test_suite_count()];\n\n    for (int i = 0; i < unit_test.total_test_suite_count(); ++i)\n      test_suites[i] = unit_test.GetTestSuite(i);\n\n    std::sort(test_suites,\n              test_suites + unit_test.total_test_suite_count(),\n              LessByName<TestSuite>());\n    return test_suites;\n  }\n\n  // Returns the test suite by its name.  The caller doesn't own the returned\n  // pointer.\n  static const TestSuite* FindTestSuite(const char* name) {\n    UnitTest& unit_test = *UnitTest::GetInstance();\n    for (int i = 0; i < unit_test.total_test_suite_count(); ++i) {\n      const TestSuite* test_suite = unit_test.GetTestSuite(i);\n      if (0 == strcmp(test_suite->name(), name))\n        return test_suite;\n    }\n    return nullptr;\n  }\n\n  // Returns the array of pointers to all tests in a particular test suite\n  // sorted by the test name.  The caller is responsible for deleting the\n  // array.\n  static TestInfo const** GetSortedTests(const TestSuite* test_suite) {\n    TestInfo const** const tests =\n        new const TestInfo*[test_suite->total_test_count()];\n\n    for (int i = 0; i < test_suite->total_test_count(); ++i)\n      tests[i] = test_suite->GetTestInfo(i);\n\n    std::sort(tests, tests + test_suite->total_test_count(),\n              LessByName<TestInfo>());\n    return tests;\n  }\n};\n\n#if GTEST_HAS_TYPED_TEST\ntemplate <typename T> class TestSuiteWithCommentTest : public Test {};\nTYPED_TEST_SUITE(TestSuiteWithCommentTest, Types<int>);\nTYPED_TEST(TestSuiteWithCommentTest, Dummy) {}\n\nconst int kTypedTestSuites = 1;\nconst int kTypedTests = 1;\n#else\nconst int kTypedTestSuites = 0;\nconst int kTypedTests = 0;\n#endif  // GTEST_HAS_TYPED_TEST\n\n// We can only test the accessors that do not change value while tests run.\n// Since tests can be run in any order, the values the accessors that track\n// test execution (such as failed_test_count) can not be predicted.\nTEST(ApiTest, UnitTestImmutableAccessorsWork) {\n  UnitTest* unit_test = UnitTest::GetInstance();\n\n  ASSERT_EQ(2 + kTypedTestSuites, unit_test->total_test_suite_count());\n  EXPECT_EQ(1 + kTypedTestSuites, unit_test->test_suite_to_run_count());\n  EXPECT_EQ(2, unit_test->disabled_test_count());\n  EXPECT_EQ(5 + kTypedTests, unit_test->total_test_count());\n  EXPECT_EQ(3 + kTypedTests, unit_test->test_to_run_count());\n\n  const TestSuite** const test_suites = UnitTestHelper::GetSortedTestSuites();\n\n  EXPECT_STREQ(\"ApiTest\", test_suites[0]->name());\n  EXPECT_STREQ(\"DISABLED_Test\", test_suites[1]->name());\n#if GTEST_HAS_TYPED_TEST\n  EXPECT_STREQ(\"TestSuiteWithCommentTest/0\", test_suites[2]->name());\n#endif  // GTEST_HAS_TYPED_TEST\n\n  delete[] test_suites;\n\n  // The following lines initiate actions to verify certain methods in\n  // FinalSuccessChecker::TearDown.\n\n  // Records a test property to verify TestResult::GetTestProperty().\n  RecordProperty(\"key\", \"value\");\n}\n\nAssertionResult IsNull(const char* str) {\n  if (str != nullptr) {\n    return testing::AssertionFailure() << \"argument is \" << str;\n  }\n  return AssertionSuccess();\n}\n\nTEST(ApiTest, TestSuiteImmutableAccessorsWork) {\n  const TestSuite* test_suite = UnitTestHelper::FindTestSuite(\"ApiTest\");\n  ASSERT_TRUE(test_suite != nullptr);\n\n  EXPECT_STREQ(\"ApiTest\", test_suite->name());\n  EXPECT_TRUE(IsNull(test_suite->type_param()));\n  EXPECT_TRUE(test_suite->should_run());\n  EXPECT_EQ(1, test_suite->disabled_test_count());\n  EXPECT_EQ(3, test_suite->test_to_run_count());\n  ASSERT_EQ(4, test_suite->total_test_count());\n\n  const TestInfo** tests = UnitTestHelper::GetSortedTests(test_suite);\n\n  EXPECT_STREQ(\"DISABLED_Dummy1\", tests[0]->name());\n  EXPECT_STREQ(\"ApiTest\", tests[0]->test_suite_name());\n  EXPECT_TRUE(IsNull(tests[0]->value_param()));\n  EXPECT_TRUE(IsNull(tests[0]->type_param()));\n  EXPECT_FALSE(tests[0]->should_run());\n\n  EXPECT_STREQ(\"TestSuiteDisabledAccessorsWork\", tests[1]->name());\n  EXPECT_STREQ(\"ApiTest\", tests[1]->test_suite_name());\n  EXPECT_TRUE(IsNull(tests[1]->value_param()));\n  EXPECT_TRUE(IsNull(tests[1]->type_param()));\n  EXPECT_TRUE(tests[1]->should_run());\n\n  EXPECT_STREQ(\"TestSuiteImmutableAccessorsWork\", tests[2]->name());\n  EXPECT_STREQ(\"ApiTest\", tests[2]->test_suite_name());\n  EXPECT_TRUE(IsNull(tests[2]->value_param()));\n  EXPECT_TRUE(IsNull(tests[2]->type_param()));\n  EXPECT_TRUE(tests[2]->should_run());\n\n  EXPECT_STREQ(\"UnitTestImmutableAccessorsWork\", tests[3]->name());\n  EXPECT_STREQ(\"ApiTest\", tests[3]->test_suite_name());\n  EXPECT_TRUE(IsNull(tests[3]->value_param()));\n  EXPECT_TRUE(IsNull(tests[3]->type_param()));\n  EXPECT_TRUE(tests[3]->should_run());\n\n  delete[] tests;\n  tests = nullptr;\n\n#if GTEST_HAS_TYPED_TEST\n  test_suite = UnitTestHelper::FindTestSuite(\"TestSuiteWithCommentTest/0\");\n  ASSERT_TRUE(test_suite != nullptr);\n\n  EXPECT_STREQ(\"TestSuiteWithCommentTest/0\", test_suite->name());\n  EXPECT_STREQ(GetTypeName<Types<int>>().c_str(), test_suite->type_param());\n  EXPECT_TRUE(test_suite->should_run());\n  EXPECT_EQ(0, test_suite->disabled_test_count());\n  EXPECT_EQ(1, test_suite->test_to_run_count());\n  ASSERT_EQ(1, test_suite->total_test_count());\n\n  tests = UnitTestHelper::GetSortedTests(test_suite);\n\n  EXPECT_STREQ(\"Dummy\", tests[0]->name());\n  EXPECT_STREQ(\"TestSuiteWithCommentTest/0\", tests[0]->test_suite_name());\n  EXPECT_TRUE(IsNull(tests[0]->value_param()));\n  EXPECT_STREQ(GetTypeName<Types<int>>().c_str(), tests[0]->type_param());\n  EXPECT_TRUE(tests[0]->should_run());\n\n  delete[] tests;\n#endif  // GTEST_HAS_TYPED_TEST\n}\n\nTEST(ApiTest, TestSuiteDisabledAccessorsWork) {\n  const TestSuite* test_suite = UnitTestHelper::FindTestSuite(\"DISABLED_Test\");\n  ASSERT_TRUE(test_suite != nullptr);\n\n  EXPECT_STREQ(\"DISABLED_Test\", test_suite->name());\n  EXPECT_TRUE(IsNull(test_suite->type_param()));\n  EXPECT_FALSE(test_suite->should_run());\n  EXPECT_EQ(1, test_suite->disabled_test_count());\n  EXPECT_EQ(0, test_suite->test_to_run_count());\n  ASSERT_EQ(1, test_suite->total_test_count());\n\n  const TestInfo* const test_info = test_suite->GetTestInfo(0);\n  EXPECT_STREQ(\"Dummy2\", test_info->name());\n  EXPECT_STREQ(\"DISABLED_Test\", test_info->test_suite_name());\n  EXPECT_TRUE(IsNull(test_info->value_param()));\n  EXPECT_TRUE(IsNull(test_info->type_param()));\n  EXPECT_FALSE(test_info->should_run());\n}\n\n// These two tests are here to provide support for testing\n// test_suite_to_run_count, disabled_test_count, and test_to_run_count.\nTEST(ApiTest, DISABLED_Dummy1) {}\nTEST(DISABLED_Test, Dummy2) {}\n\nclass FinalSuccessChecker : public Environment {\n protected:\n  void TearDown() override {\n    UnitTest* unit_test = UnitTest::GetInstance();\n\n    EXPECT_EQ(1 + kTypedTestSuites, unit_test->successful_test_suite_count());\n    EXPECT_EQ(3 + kTypedTests, unit_test->successful_test_count());\n    EXPECT_EQ(0, unit_test->failed_test_suite_count());\n    EXPECT_EQ(0, unit_test->failed_test_count());\n    EXPECT_TRUE(unit_test->Passed());\n    EXPECT_FALSE(unit_test->Failed());\n    ASSERT_EQ(2 + kTypedTestSuites, unit_test->total_test_suite_count());\n\n    const TestSuite** const test_suites = UnitTestHelper::GetSortedTestSuites();\n\n    EXPECT_STREQ(\"ApiTest\", test_suites[0]->name());\n    EXPECT_TRUE(IsNull(test_suites[0]->type_param()));\n    EXPECT_TRUE(test_suites[0]->should_run());\n    EXPECT_EQ(1, test_suites[0]->disabled_test_count());\n    ASSERT_EQ(4, test_suites[0]->total_test_count());\n    EXPECT_EQ(3, test_suites[0]->successful_test_count());\n    EXPECT_EQ(0, test_suites[0]->failed_test_count());\n    EXPECT_TRUE(test_suites[0]->Passed());\n    EXPECT_FALSE(test_suites[0]->Failed());\n\n    EXPECT_STREQ(\"DISABLED_Test\", test_suites[1]->name());\n    EXPECT_TRUE(IsNull(test_suites[1]->type_param()));\n    EXPECT_FALSE(test_suites[1]->should_run());\n    EXPECT_EQ(1, test_suites[1]->disabled_test_count());\n    ASSERT_EQ(1, test_suites[1]->total_test_count());\n    EXPECT_EQ(0, test_suites[1]->successful_test_count());\n    EXPECT_EQ(0, test_suites[1]->failed_test_count());\n\n#if GTEST_HAS_TYPED_TEST\n    EXPECT_STREQ(\"TestSuiteWithCommentTest/0\", test_suites[2]->name());\n    EXPECT_STREQ(GetTypeName<Types<int>>().c_str(),\n                 test_suites[2]->type_param());\n    EXPECT_TRUE(test_suites[2]->should_run());\n    EXPECT_EQ(0, test_suites[2]->disabled_test_count());\n    ASSERT_EQ(1, test_suites[2]->total_test_count());\n    EXPECT_EQ(1, test_suites[2]->successful_test_count());\n    EXPECT_EQ(0, test_suites[2]->failed_test_count());\n    EXPECT_TRUE(test_suites[2]->Passed());\n    EXPECT_FALSE(test_suites[2]->Failed());\n#endif  // GTEST_HAS_TYPED_TEST\n\n    const TestSuite* test_suite = UnitTestHelper::FindTestSuite(\"ApiTest\");\n    const TestInfo** tests = UnitTestHelper::GetSortedTests(test_suite);\n    EXPECT_STREQ(\"DISABLED_Dummy1\", tests[0]->name());\n    EXPECT_STREQ(\"ApiTest\", tests[0]->test_suite_name());\n    EXPECT_FALSE(tests[0]->should_run());\n\n    EXPECT_STREQ(\"TestSuiteDisabledAccessorsWork\", tests[1]->name());\n    EXPECT_STREQ(\"ApiTest\", tests[1]->test_suite_name());\n    EXPECT_TRUE(IsNull(tests[1]->value_param()));\n    EXPECT_TRUE(IsNull(tests[1]->type_param()));\n    EXPECT_TRUE(tests[1]->should_run());\n    EXPECT_TRUE(tests[1]->result()->Passed());\n    EXPECT_EQ(0, tests[1]->result()->test_property_count());\n\n    EXPECT_STREQ(\"TestSuiteImmutableAccessorsWork\", tests[2]->name());\n    EXPECT_STREQ(\"ApiTest\", tests[2]->test_suite_name());\n    EXPECT_TRUE(IsNull(tests[2]->value_param()));\n    EXPECT_TRUE(IsNull(tests[2]->type_param()));\n    EXPECT_TRUE(tests[2]->should_run());\n    EXPECT_TRUE(tests[2]->result()->Passed());\n    EXPECT_EQ(0, tests[2]->result()->test_property_count());\n\n    EXPECT_STREQ(\"UnitTestImmutableAccessorsWork\", tests[3]->name());\n    EXPECT_STREQ(\"ApiTest\", tests[3]->test_suite_name());\n    EXPECT_TRUE(IsNull(tests[3]->value_param()));\n    EXPECT_TRUE(IsNull(tests[3]->type_param()));\n    EXPECT_TRUE(tests[3]->should_run());\n    EXPECT_TRUE(tests[3]->result()->Passed());\n    EXPECT_EQ(1, tests[3]->result()->test_property_count());\n    const TestProperty& property = tests[3]->result()->GetTestProperty(0);\n    EXPECT_STREQ(\"key\", property.key());\n    EXPECT_STREQ(\"value\", property.value());\n\n    delete[] tests;\n\n#if GTEST_HAS_TYPED_TEST\n    test_suite = UnitTestHelper::FindTestSuite(\"TestSuiteWithCommentTest/0\");\n    tests = UnitTestHelper::GetSortedTests(test_suite);\n\n    EXPECT_STREQ(\"Dummy\", tests[0]->name());\n    EXPECT_STREQ(\"TestSuiteWithCommentTest/0\", tests[0]->test_suite_name());\n    EXPECT_TRUE(IsNull(tests[0]->value_param()));\n    EXPECT_STREQ(GetTypeName<Types<int>>().c_str(), tests[0]->type_param());\n    EXPECT_TRUE(tests[0]->should_run());\n    EXPECT_TRUE(tests[0]->result()->Passed());\n    EXPECT_EQ(0, tests[0]->result()->test_property_count());\n\n    delete[] tests;\n#endif  // GTEST_HAS_TYPED_TEST\n    delete[] test_suites;\n  }\n};\n\n}  // namespace internal\n}  // namespace testing\n\nint main(int argc, char **argv) {\n  InitGoogleTest(&argc, argv);\n\n  AddGlobalTestEnvironment(new testing::internal::FinalSuccessChecker());\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_all_test.cc",
    "content": "// Copyright 2009, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for Google C++ Testing and Mocking Framework (Google Test)\n//\n// Sometimes it's desirable to build most of Google Test's own tests\n// by compiling a single file.  This file serves this purpose.\n#include \"test/googletest-filepath-test.cc\"\n#include \"test/googletest-message-test.cc\"\n#include \"test/googletest-options-test.cc\"\n#include \"test/googletest-port-test.cc\"\n#include \"test/googletest-test-part-test.cc\"\n#include \"test/gtest-typed-test2_test.cc\"\n#include \"test/gtest-typed-test_test.cc\"\n#include \"test/gtest_pred_impl_unittest.cc\"\n#include \"test/gtest_prod_test.cc\"\n#include \"test/gtest_skip_test.cc\"\n#include \"test/gtest_unittest.cc\"\n#include \"test/production.cc\"\n"
  },
  {
    "path": "test/googletest/test/gtest_assert_by_exception_test.cc",
    "content": "// Copyright 2009, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Tests Google Test's assert-by-exception mode with exceptions enabled.\n\n#include \"gtest/gtest.h\"\n\n#include <stdlib.h>\n#include <stdio.h>\n#include <string.h>\n#include <stdexcept>\n\nclass ThrowListener : public testing::EmptyTestEventListener {\n  void OnTestPartResult(const testing::TestPartResult& result) override {\n    if (result.type() == testing::TestPartResult::kFatalFailure) {\n      throw testing::AssertionException(result);\n    }\n  }\n};\n\n// Prints the given failure message and exits the program with\n// non-zero.  We use this instead of a Google Test assertion to\n// indicate a failure, as the latter is been tested and cannot be\n// relied on.\nvoid Fail(const char* msg) {\n  printf(\"FAILURE: %s\\n\", msg);\n  fflush(stdout);\n  exit(1);\n}\n\nstatic void AssertFalse() {\n  ASSERT_EQ(2, 3) << \"Expected failure\";\n}\n\n// Tests that an assertion failure throws a subclass of\n// std::runtime_error.\nTEST(Test, Test) {\n  // A successful assertion shouldn't throw.\n  try {\n    EXPECT_EQ(3, 3);\n  } catch(...) {\n    Fail(\"A successful assertion wrongfully threw.\");\n  }\n\n  // A successful assertion shouldn't throw.\n  try {\n    EXPECT_EQ(3, 4);\n  } catch(...) {\n    Fail(\"A failed non-fatal assertion wrongfully threw.\");\n  }\n\n  // A failed assertion should throw.\n  try {\n    AssertFalse();\n  } catch(const testing::AssertionException& e) {\n    if (strstr(e.what(), \"Expected failure\") != nullptr) throw;\n\n    printf(\"%s\",\n           \"A failed assertion did throw an exception of the right type, \"\n           \"but the message is incorrect.  Instead of containing \\\"Expected \"\n           \"failure\\\", it is:\\n\");\n    Fail(e.what());\n  } catch(...) {\n    Fail(\"A failed assertion threw the wrong type of exception.\");\n  }\n  Fail(\"A failed assertion should've thrown but didn't.\");\n}\n\nint kTestForContinuingTest = 0;\n\nTEST(Test, Test2) {\n  kTestForContinuingTest = 1;\n}\n\nint main(int argc, char** argv) {\n  testing::InitGoogleTest(&argc, argv);\n  testing::UnitTest::GetInstance()->listeners().Append(new ThrowListener);\n\n  int result = RUN_ALL_TESTS();\n  if (result == 0) {\n    printf(\"RUN_ALL_TESTS returned %d\\n\", result);\n    Fail(\"Expected failure instead.\");\n  }\n\n  if (kTestForContinuingTest == 0) {\n    Fail(\"Should have continued with other tests, but did not.\");\n  }\n  return 0;\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_environment_test.cc",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests using global test environments.\n\n#include <stdlib.h>\n#include <stdio.h>\n#include \"gtest/gtest.h\"\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\nGTEST_DECLARE_string_(filter);\n}\n\nnamespace {\n\nenum FailureType {\n  NO_FAILURE, NON_FATAL_FAILURE, FATAL_FAILURE\n};\n\n// For testing using global test environments.\nclass MyEnvironment : public testing::Environment {\n public:\n  MyEnvironment() { Reset(); }\n\n  // Depending on the value of failure_in_set_up_, SetUp() will\n  // generate a non-fatal failure, generate a fatal failure, or\n  // succeed.\n  void SetUp() override {\n    set_up_was_run_ = true;\n\n    switch (failure_in_set_up_) {\n      case NON_FATAL_FAILURE:\n        ADD_FAILURE() << \"Expected non-fatal failure in global set-up.\";\n        break;\n      case FATAL_FAILURE:\n        FAIL() << \"Expected fatal failure in global set-up.\";\n        break;\n      default:\n        break;\n    }\n  }\n\n  // Generates a non-fatal failure.\n  void TearDown() override {\n    tear_down_was_run_ = true;\n    ADD_FAILURE() << \"Expected non-fatal failure in global tear-down.\";\n  }\n\n  // Resets the state of the environment s.t. it can be reused.\n  void Reset() {\n    failure_in_set_up_ = NO_FAILURE;\n    set_up_was_run_ = false;\n    tear_down_was_run_ = false;\n  }\n\n  // We call this function to set the type of failure SetUp() should\n  // generate.\n  void set_failure_in_set_up(FailureType type) {\n    failure_in_set_up_ = type;\n  }\n\n  // Was SetUp() run?\n  bool set_up_was_run() const { return set_up_was_run_; }\n\n  // Was TearDown() run?\n  bool tear_down_was_run() const { return tear_down_was_run_; }\n\n private:\n  FailureType failure_in_set_up_;\n  bool set_up_was_run_;\n  bool tear_down_was_run_;\n};\n\n// Was the TEST run?\nbool test_was_run;\n\n// The sole purpose of this TEST is to enable us to check whether it\n// was run.\nTEST(FooTest, Bar) {\n  test_was_run = true;\n}\n\n// Prints the message and aborts the program if condition is false.\nvoid Check(bool condition, const char* msg) {\n  if (!condition) {\n    printf(\"FAILED: %s\\n\", msg);\n    testing::internal::posix::Abort();\n  }\n}\n\n// Runs the tests.  Return true if and only if successful.\n//\n// The 'failure' parameter specifies the type of failure that should\n// be generated by the global set-up.\nint RunAllTests(MyEnvironment* env, FailureType failure) {\n  env->Reset();\n  env->set_failure_in_set_up(failure);\n  test_was_run = false;\n  testing::internal::GetUnitTestImpl()->ClearAdHocTestResult();\n  return RUN_ALL_TESTS();\n}\n\n}  // namespace\n\nint main(int argc, char **argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  // Registers a global test environment, and verifies that the\n  // registration function returns its argument.\n  MyEnvironment* const env = new MyEnvironment;\n  Check(testing::AddGlobalTestEnvironment(env) == env,\n        \"AddGlobalTestEnvironment() should return its argument.\");\n\n  // Verifies that RUN_ALL_TESTS() runs the tests when the global\n  // set-up is successful.\n  Check(RunAllTests(env, NO_FAILURE) != 0,\n        \"RUN_ALL_TESTS() should return non-zero, as the global tear-down \"\n        \"should generate a failure.\");\n  Check(test_was_run,\n        \"The tests should run, as the global set-up should generate no \"\n        \"failure\");\n  Check(env->tear_down_was_run(),\n        \"The global tear-down should run, as the global set-up was run.\");\n\n  // Verifies that RUN_ALL_TESTS() runs the tests when the global\n  // set-up generates no fatal failure.\n  Check(RunAllTests(env, NON_FATAL_FAILURE) != 0,\n        \"RUN_ALL_TESTS() should return non-zero, as both the global set-up \"\n        \"and the global tear-down should generate a non-fatal failure.\");\n  Check(test_was_run,\n        \"The tests should run, as the global set-up should generate no \"\n        \"fatal failure.\");\n  Check(env->tear_down_was_run(),\n        \"The global tear-down should run, as the global set-up was run.\");\n\n  // Verifies that RUN_ALL_TESTS() runs no test when the global set-up\n  // generates a fatal failure.\n  Check(RunAllTests(env, FATAL_FAILURE) != 0,\n        \"RUN_ALL_TESTS() should return non-zero, as the global set-up \"\n        \"should generate a fatal failure.\");\n  Check(!test_was_run,\n        \"The tests should not run, as the global set-up should generate \"\n        \"a fatal failure.\");\n  Check(env->tear_down_was_run(),\n        \"The global tear-down should run, as the global set-up was run.\");\n\n  // Verifies that RUN_ALL_TESTS() doesn't do global set-up or\n  // tear-down when there is no test to run.\n  testing::GTEST_FLAG(filter) = \"-*\";\n  Check(RunAllTests(env, NO_FAILURE) == 0,\n        \"RUN_ALL_TESTS() should return zero, as there is no test to run.\");\n  Check(!env->set_up_was_run(),\n        \"The global set-up should not run, as there is no test to run.\");\n  Check(!env->tear_down_was_run(),\n        \"The global tear-down should not run, \"\n        \"as the global set-up was not run.\");\n\n  printf(\"PASS\\n\");\n  return 0;\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_help_test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2009, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Tests the --help flag of Google C++ Testing and Mocking Framework.\n\nSYNOPSIS\n       gtest_help_test.py --build_dir=BUILD/DIR\n         # where BUILD/DIR contains the built gtest_help_test_ file.\n       gtest_help_test.py\n\"\"\"\n\nimport os\nimport re\nimport gtest_test_utils\n\n\nIS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'\nIS_WINDOWS = os.name == 'nt'\n\nPROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')\nFLAG_PREFIX = '--gtest_'\nDEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'\nSTREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to'\nUNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'\nLIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'\nINCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),\n                           re.sub('^--', '/', LIST_TESTS_FLAG),\n                           re.sub('_', '-', LIST_TESTS_FLAG)]\nINTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'\n\nSUPPORTS_DEATH_TESTS = \"DeathTest\" in gtest_test_utils.Subprocess(\n    [PROGRAM_PATH, LIST_TESTS_FLAG]).output\n\n# The help message must match this regex.\nHELP_REGEX = re.compile(\n    FLAG_PREFIX + r'list_tests.*' +\n    FLAG_PREFIX + r'filter=.*' +\n    FLAG_PREFIX + r'also_run_disabled_tests.*' +\n    FLAG_PREFIX + r'repeat=.*' +\n    FLAG_PREFIX + r'shuffle.*' +\n    FLAG_PREFIX + r'random_seed=.*' +\n    FLAG_PREFIX + r'color=.*' +\n    FLAG_PREFIX + r'print_time.*' +\n    FLAG_PREFIX + r'output=.*' +\n    FLAG_PREFIX + r'break_on_failure.*' +\n    FLAG_PREFIX + r'throw_on_failure.*' +\n    FLAG_PREFIX + r'catch_exceptions=0.*',\n    re.DOTALL)\n\n\ndef RunWithFlag(flag):\n  \"\"\"Runs gtest_help_test_ with the given flag.\n\n  Returns:\n    the exit code and the text output as a tuple.\n  Args:\n    flag: the command-line flag to pass to gtest_help_test_, or None.\n  \"\"\"\n\n  if flag is None:\n    command = [PROGRAM_PATH]\n  else:\n    command = [PROGRAM_PATH, flag]\n  child = gtest_test_utils.Subprocess(command)\n  return child.exit_code, child.output\n\n\nclass GTestHelpTest(gtest_test_utils.TestCase):\n  \"\"\"Tests the --help flag and its equivalent forms.\"\"\"\n\n  def TestHelpFlag(self, flag):\n    \"\"\"Verifies correct behavior when help flag is specified.\n\n    The right message must be printed and the tests must\n    skipped when the given flag is specified.\n\n    Args:\n      flag:  A flag to pass to the binary or None.\n    \"\"\"\n\n    exit_code, output = RunWithFlag(flag)\n    self.assertEquals(0, exit_code)\n    self.assert_(HELP_REGEX.search(output), output)\n\n    if IS_LINUX:\n      self.assert_(STREAM_RESULT_TO_FLAG in output, output)\n    else:\n      self.assert_(STREAM_RESULT_TO_FLAG not in output, output)\n\n    if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:\n      self.assert_(DEATH_TEST_STYLE_FLAG in output, output)\n    else:\n      self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)\n\n  def TestNonHelpFlag(self, flag):\n    \"\"\"Verifies correct behavior when no help flag is specified.\n\n    Verifies that when no help flag is specified, the tests are run\n    and the help message is not printed.\n\n    Args:\n      flag:  A flag to pass to the binary or None.\n    \"\"\"\n\n    exit_code, output = RunWithFlag(flag)\n    self.assert_(exit_code != 0)\n    self.assert_(not HELP_REGEX.search(output), output)\n\n  def testPrintsHelpWithFullFlag(self):\n    self.TestHelpFlag('--help')\n\n  def testPrintsHelpWithShortFlag(self):\n    self.TestHelpFlag('-h')\n\n  def testPrintsHelpWithQuestionFlag(self):\n    self.TestHelpFlag('-?')\n\n  def testPrintsHelpWithWindowsStyleQuestionFlag(self):\n    self.TestHelpFlag('/?')\n\n  def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):\n    self.TestHelpFlag(UNKNOWN_FLAG)\n\n  def testPrintsHelpWithIncorrectFlagStyle(self):\n    for incorrect_flag in INCORRECT_FLAG_VARIANTS:\n      self.TestHelpFlag(incorrect_flag)\n\n  def testRunsTestsWithoutHelpFlag(self):\n    \"\"\"Verifies that when no help flag is specified, the tests are run\n    and the help message is not printed.\"\"\"\n\n    self.TestNonHelpFlag(None)\n\n  def testRunsTestsWithGtestInternalFlag(self):\n    \"\"\"Verifies that the tests are run and no help message is printed when\n    a flag starting with Google Test prefix and 'internal_' is supplied.\"\"\"\n\n    self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/gtest_help_test_.cc",
    "content": "// Copyright 2009, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This program is meant to be run by gtest_help_test.py.  Do not run\n// it directly.\n\n#include \"gtest/gtest.h\"\n\n// When a help flag is specified, this program should skip the tests\n// and exit with 0; otherwise the following test will be executed,\n// causing this program to exit with a non-zero code.\nTEST(HelpFlagTest, ShouldNotBeRun) {\n  ASSERT_TRUE(false) << \"Tests shouldn't be run when --help is specified.\";\n}\n\n#if GTEST_HAS_DEATH_TEST\nTEST(DeathTest, UsedByPythonScriptToDetectSupportForDeathTestsInThisBinary) {}\n#endif\n"
  },
  {
    "path": "test/googletest/test/gtest_json_test_utils.py",
    "content": "# Copyright 2018, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test utilities for gtest_json_output.\"\"\"\n\nimport re\n\n\ndef normalize(obj):\n  \"\"\"Normalize output object.\n\n  Args:\n     obj: Google Test's JSON output object to normalize.\n\n  Returns:\n     Normalized output without any references to transient information that may\n     change from run to run.\n  \"\"\"\n  def _normalize(key, value):\n    if key == 'time':\n      return re.sub(r'^\\d+(\\.\\d+)?s$', '*', value)\n    elif key == 'timestamp':\n      return re.sub(r'^\\d{4}-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\dZ$', '*', value)\n    elif key == 'failure':\n      value = re.sub(r'^.*[/\\\\](.*:)\\d+\\n', '\\\\1*\\n', value)\n      return re.sub(r'Stack trace:\\n(.|\\n)*', 'Stack trace:\\n*', value)\n    else:\n      return normalize(value)\n  if isinstance(obj, dict):\n    return {k: _normalize(k, v) for k, v in obj.items()}\n  if isinstance(obj, list):\n    return [normalize(x) for x in obj]\n  else:\n    return obj\n"
  },
  {
    "path": "test/googletest/test/gtest_list_output_unittest.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2006, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"Unit test for Google Test's --gtest_list_tests flag.\n\nA user can ask Google Test to list all tests by specifying the\n--gtest_list_tests flag. If output is requested, via --gtest_output=xml\nor --gtest_output=json, the tests are listed, with extra information in the\noutput file.\nThis script tests such functionality by invoking gtest_list_output_unittest_\n (a program written with Google Test) the command line flags.\n\"\"\"\n\nimport os\nimport re\nimport gtest_test_utils\n\nGTEST_LIST_TESTS_FLAG = '--gtest_list_tests'\nGTEST_OUTPUT_FLAG = '--gtest_output'\n\nEXPECTED_XML = \"\"\"<\\?xml version=\"1.0\" encoding=\"UTF-8\"\\?>\n<testsuites tests=\"2\" name=\"AllTests\">\n  <testsuite name=\"FooTest\" tests=\"2\">\n    <testcase name=\"Test1\" file=\".*gtest_list_output_unittest_.cc\" line=\"43\" />\n    <testcase name=\"Test2\" file=\".*gtest_list_output_unittest_.cc\" line=\"45\" />\n  </testsuite>\n</testsuites>\n\"\"\"\n\nEXPECTED_JSON = \"\"\"{\n  \"tests\": 2,\n  \"name\": \"AllTests\",\n  \"testsuites\": \\[\n    {\n      \"name\": \"FooTest\",\n      \"tests\": 2,\n      \"testsuite\": \\[\n        {\n          \"name\": \"Test1\",\n          \"file\": \".*gtest_list_output_unittest_.cc\",\n          \"line\": 43\n        },\n        {\n          \"name\": \"Test2\",\n          \"file\": \".*gtest_list_output_unittest_.cc\",\n          \"line\": 45\n        }\n      \\]\n    }\n  \\]\n}\n\"\"\"\n\n\nclass GTestListTestsOutputUnitTest(gtest_test_utils.TestCase):\n  \"\"\"Unit test for Google Test's list tests with output to file functionality.\n  \"\"\"\n\n  def testXml(self):\n    \"\"\"Verifies XML output for listing tests in a Google Test binary.\n\n    Runs a test program that generates an empty XML output, and\n    tests that the XML output is expected.\n    \"\"\"\n    self._TestOutput('xml', EXPECTED_XML)\n\n  def testJSON(self):\n    \"\"\"Verifies XML output for listing tests in a Google Test binary.\n\n    Runs a test program that generates an empty XML output, and\n    tests that the XML output is expected.\n    \"\"\"\n    self._TestOutput('json', EXPECTED_JSON)\n\n  def _GetOutput(self, out_format):\n    file_path = os.path.join(gtest_test_utils.GetTempDir(),\n                             'test_out.' + out_format)\n    gtest_prog_path = gtest_test_utils.GetTestExecutablePath(\n        'gtest_list_output_unittest_')\n\n    command = ([\n        gtest_prog_path,\n        '%s=%s:%s' % (GTEST_OUTPUT_FLAG, out_format, file_path),\n        '--gtest_list_tests'\n    ])\n    environ_copy = os.environ.copy()\n    p = gtest_test_utils.Subprocess(\n        command, env=environ_copy, working_dir=gtest_test_utils.GetTempDir())\n\n    self.assert_(p.exited)\n    self.assertEquals(0, p.exit_code)\n    with open(file_path) as f:\n      result = f.read()\n    return result\n\n  def _TestOutput(self, test_format, expected_output):\n    actual = self._GetOutput(test_format)\n    actual_lines = actual.splitlines()\n    expected_lines = expected_output.splitlines()\n    line_count = 0\n    for actual_line in actual_lines:\n      expected_line = expected_lines[line_count]\n      expected_line_re = re.compile(expected_line.strip())\n      self.assert_(\n          expected_line_re.match(actual_line.strip()),\n          ('actual output of \"%s\",\\n'\n           'which does not match expected regex of \"%s\"\\n'\n           'on line %d' % (actual, expected_output, line_count)))\n      line_count = line_count + 1\n\n\nif __name__ == '__main__':\n  os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/gtest_list_output_unittest_.cc",
    "content": "// Copyright 2018, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Author: david.schuldenfrei@gmail.com (David Schuldenfrei)\n\n// Unit test for Google Test's --gtest_list_tests and --gtest_output flag.\n//\n// A user can ask Google Test to list all tests that will run,\n// and have the output saved in a Json/Xml file.\n// The tests will not be run after listing.\n//\n// This program will be invoked from a Python unit test.\n// Don't run it directly.\n\n#include \"gtest/gtest.h\"\n\nTEST(FooTest, Test1) {}\n\nTEST(FooTest, Test2) {}\n\nint main(int argc, char **argv) {\n  ::testing::InitGoogleTest(&argc, argv);\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_main_unittest.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n#include \"gtest/gtest.h\"\n\n// Tests that we don't have to define main() when we link to\n// gtest_main instead of gtest.\n\nnamespace {\n\nTEST(GTestMainTest, ShouldSucceed) {\n}\n\n}  // namespace\n\n// We are using the main() function defined in gtest_main.cc, so we\n// don't define it here.\n"
  },
  {
    "path": "test/googletest/test/gtest_no_test_unittest.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// Tests that a Google Test program that has no test defined can run\n// successfully.\n\n#include \"gtest/gtest.h\"\n\nint main(int argc, char **argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  // An ad-hoc assertion outside of all tests.\n  //\n  // This serves three purposes:\n  //\n  // 1. It verifies that an ad-hoc assertion can be executed even if\n  //    no test is defined.\n  // 2. It verifies that a failed ad-hoc assertion causes the test\n  //    program to fail.\n  // 3. We had a bug where the XML output won't be generated if an\n  //    assertion is executed before RUN_ALL_TESTS() is called, even\n  //    though --gtest_output=xml is specified.  This makes sure the\n  //    bug is fixed and doesn't regress.\n  EXPECT_EQ(1, 2);\n\n  // The above EXPECT_EQ() should cause RUN_ALL_TESTS() to return non-zero.\n  return RUN_ALL_TESTS() ? 0 : 1;\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_pred_impl_unittest.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// This file is AUTOMATICALLY GENERATED on 01/02/2019 by command\n// 'gen_gtest_pred_impl.py 5'.  DO NOT EDIT BY HAND!\n\n// Regression test for gtest_pred_impl.h\n//\n// This file is generated by a script and quite long.  If you intend to\n// learn how Google Test works by reading its unit tests, read\n// gtest_unittest.cc instead.\n//\n// This is intended as a regression test for the Google Test predicate\n// assertions.  We compile it as part of the gtest_unittest target\n// only to keep the implementation tidy and compact, as it is quite\n// involved to set up the stage for testing Google Test using Google\n// Test itself.\n//\n// Currently, gtest_unittest takes ~11 seconds to run in the testing\n// daemon.  In the future, if it grows too large and needs much more\n// time to finish, we should consider separating this file into a\n// stand-alone regression test.\n\n#include <iostream>\n\n#include \"gtest/gtest.h\"\n#include \"gtest/gtest-spi.h\"\n\n// A user-defined data type.\nstruct Bool {\n  explicit Bool(int val) : value(val != 0) {}\n\n  bool operator>(int n) const { return value > Bool(n).value; }\n\n  Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); }\n\n  bool operator==(const Bool& rhs) const { return value == rhs.value; }\n\n  bool value;\n};\n\n// Enables Bool to be used in assertions.\nstd::ostream& operator<<(std::ostream& os, const Bool& x) {\n  return os << (x.value ? \"true\" : \"false\");\n}\n\n// Sample functions/functors for testing unary predicate assertions.\n\n// A unary predicate function.\ntemplate <typename T1>\nbool PredFunction1(T1 v1) {\n  return v1 > 0;\n}\n\n// The following two functions are needed to circumvent a bug in\n// gcc 2.95.3, which sometimes has problem with the above template\n// function.\nbool PredFunction1Int(int v1) {\n  return v1 > 0;\n}\nbool PredFunction1Bool(Bool v1) {\n  return v1 > 0;\n}\n\n// A unary predicate functor.\nstruct PredFunctor1 {\n  template <typename T1>\n  bool operator()(const T1& v1) {\n    return v1 > 0;\n  }\n};\n\n// A unary predicate-formatter function.\ntemplate <typename T1>\ntesting::AssertionResult PredFormatFunction1(const char* e1,\n                                             const T1& v1) {\n  if (PredFunction1(v1))\n    return testing::AssertionSuccess();\n\n  return testing::AssertionFailure()\n      << e1\n      << \" is expected to be positive, but evaluates to \"\n      << v1 << \".\";\n}\n\n// A unary predicate-formatter functor.\nstruct PredFormatFunctor1 {\n  template <typename T1>\n  testing::AssertionResult operator()(const char* e1,\n                                      const T1& v1) const {\n    return PredFormatFunction1(e1, v1);\n  }\n};\n\n// Tests for {EXPECT|ASSERT}_PRED_FORMAT1.\n\nclass Predicate1Test : public testing::Test {\n protected:\n  void SetUp() override {\n    expected_to_finish_ = true;\n    finished_ = false;\n    n1_ = 0;\n  }\n\n  void TearDown() override {\n    // Verifies that each of the predicate's arguments was evaluated\n    // exactly once.\n    EXPECT_EQ(1, n1_) <<\n        \"The predicate assertion didn't evaluate argument 2 \"\n        \"exactly once.\";\n\n    // Verifies that the control flow in the test function is expected.\n    if (expected_to_finish_ && !finished_) {\n      FAIL() << \"The predicate assertion unexpactedly aborted the test.\";\n    } else if (!expected_to_finish_ && finished_) {\n      FAIL() << \"The failed predicate assertion didn't abort the test \"\n                \"as expected.\";\n    }\n  }\n\n  // true if and only if the test function is expected to run to finish.\n  static bool expected_to_finish_;\n\n  // true if and only if the test function did run to finish.\n  static bool finished_;\n\n  static int n1_;\n};\n\nbool Predicate1Test::expected_to_finish_;\nbool Predicate1Test::finished_;\nint Predicate1Test::n1_;\n\ntypedef Predicate1Test EXPECT_PRED_FORMAT1Test;\ntypedef Predicate1Test ASSERT_PRED_FORMAT1Test;\ntypedef Predicate1Test EXPECT_PRED1Test;\ntypedef Predicate1Test ASSERT_PRED1Test;\n\n// Tests a successful EXPECT_PRED1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED1(PredFunction1Int,\n               ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED1Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED1(PredFunction1Bool,\n               Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED1(PredFunctor1(),\n               ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED1Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED1(PredFunctor1(),\n               Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED1(PredFunction1Int,\n                 n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED1Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED1(PredFunction1Bool,\n                 Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED1(PredFunctor1(),\n                 n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED1Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED1(PredFunctor1(),\n                 Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED1(PredFunction1Int,\n               ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED1Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED1(PredFunction1Bool,\n               Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED1(PredFunctor1(),\n               ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED1Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED1(PredFunctor1(),\n               Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED1(PredFunction1Int,\n                 n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED1Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED1(PredFunction1Bool,\n                 Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED1(PredFunctor1(),\n                 n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED1Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED1(PredFunctor1(),\n                 Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT1(PredFormatFunction1,\n                      ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT1(PredFormatFunction1,\n                      Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT1(PredFormatFunctor1(),\n                      ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT1(PredFormatFunctor1(),\n                      Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT1(PredFormatFunction1,\n                        n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT1(PredFormatFunction1,\n                        Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT1(PredFormatFunctor1(),\n                        n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT1(PredFormatFunctor1(),\n                        Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT1(PredFormatFunction1,\n                      ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT1(PredFormatFunction1,\n                      Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT1(PredFormatFunctor1(),\n                      ++n1_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT1(PredFormatFunctor1(),\n                      Bool(++n1_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT1(PredFormatFunction1,\n                        n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT1(PredFormatFunction1,\n                        Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT1(PredFormatFunctor1(),\n                        n1_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT1 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT1(PredFormatFunctor1(),\n                        Bool(n1_++));\n    finished_ = true;\n  }, \"\");\n}\n// Sample functions/functors for testing binary predicate assertions.\n\n// A binary predicate function.\ntemplate <typename T1, typename T2>\nbool PredFunction2(T1 v1, T2 v2) {\n  return v1 + v2 > 0;\n}\n\n// The following two functions are needed to circumvent a bug in\n// gcc 2.95.3, which sometimes has problem with the above template\n// function.\nbool PredFunction2Int(int v1, int v2) {\n  return v1 + v2 > 0;\n}\nbool PredFunction2Bool(Bool v1, Bool v2) {\n  return v1 + v2 > 0;\n}\n\n// A binary predicate functor.\nstruct PredFunctor2 {\n  template <typename T1, typename T2>\n  bool operator()(const T1& v1,\n                  const T2& v2) {\n    return v1 + v2 > 0;\n  }\n};\n\n// A binary predicate-formatter function.\ntemplate <typename T1, typename T2>\ntesting::AssertionResult PredFormatFunction2(const char* e1,\n                                             const char* e2,\n                                             const T1& v1,\n                                             const T2& v2) {\n  if (PredFunction2(v1, v2))\n    return testing::AssertionSuccess();\n\n  return testing::AssertionFailure()\n      << e1 << \" + \" << e2\n      << \" is expected to be positive, but evaluates to \"\n      << v1 + v2 << \".\";\n}\n\n// A binary predicate-formatter functor.\nstruct PredFormatFunctor2 {\n  template <typename T1, typename T2>\n  testing::AssertionResult operator()(const char* e1,\n                                      const char* e2,\n                                      const T1& v1,\n                                      const T2& v2) const {\n    return PredFormatFunction2(e1, e2, v1, v2);\n  }\n};\n\n// Tests for {EXPECT|ASSERT}_PRED_FORMAT2.\n\nclass Predicate2Test : public testing::Test {\n protected:\n  void SetUp() override {\n    expected_to_finish_ = true;\n    finished_ = false;\n    n1_ = n2_ = 0;\n  }\n\n  void TearDown() override {\n    // Verifies that each of the predicate's arguments was evaluated\n    // exactly once.\n    EXPECT_EQ(1, n1_) <<\n        \"The predicate assertion didn't evaluate argument 2 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n2_) <<\n        \"The predicate assertion didn't evaluate argument 3 \"\n        \"exactly once.\";\n\n    // Verifies that the control flow in the test function is expected.\n    if (expected_to_finish_ && !finished_) {\n      FAIL() << \"The predicate assertion unexpactedly aborted the test.\";\n    } else if (!expected_to_finish_ && finished_) {\n      FAIL() << \"The failed predicate assertion didn't abort the test \"\n                \"as expected.\";\n    }\n  }\n\n  // true if and only if the test function is expected to run to finish.\n  static bool expected_to_finish_;\n\n  // true if and only if the test function did run to finish.\n  static bool finished_;\n\n  static int n1_;\n  static int n2_;\n};\n\nbool Predicate2Test::expected_to_finish_;\nbool Predicate2Test::finished_;\nint Predicate2Test::n1_;\nint Predicate2Test::n2_;\n\ntypedef Predicate2Test EXPECT_PRED_FORMAT2Test;\ntypedef Predicate2Test ASSERT_PRED_FORMAT2Test;\ntypedef Predicate2Test EXPECT_PRED2Test;\ntypedef Predicate2Test ASSERT_PRED2Test;\n\n// Tests a successful EXPECT_PRED2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED2(PredFunction2Int,\n               ++n1_,\n               ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED2Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED2(PredFunction2Bool,\n               Bool(++n1_),\n               Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED2(PredFunctor2(),\n               ++n1_,\n               ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED2Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED2(PredFunctor2(),\n               Bool(++n1_),\n               Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED2(PredFunction2Int,\n                 n1_++,\n                 n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED2Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED2(PredFunction2Bool,\n                 Bool(n1_++),\n                 Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED2(PredFunctor2(),\n                 n1_++,\n                 n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED2Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED2(PredFunctor2(),\n                 Bool(n1_++),\n                 Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED2(PredFunction2Int,\n               ++n1_,\n               ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED2Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED2(PredFunction2Bool,\n               Bool(++n1_),\n               Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED2(PredFunctor2(),\n               ++n1_,\n               ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED2Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED2(PredFunctor2(),\n               Bool(++n1_),\n               Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED2(PredFunction2Int,\n                 n1_++,\n                 n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED2Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED2(PredFunction2Bool,\n                 Bool(n1_++),\n                 Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED2(PredFunctor2(),\n                 n1_++,\n                 n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED2Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED2(PredFunctor2(),\n                 Bool(n1_++),\n                 Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT2(PredFormatFunction2,\n                      ++n1_,\n                      ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT2(PredFormatFunction2,\n                      Bool(++n1_),\n                      Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT2(PredFormatFunctor2(),\n                      ++n1_,\n                      ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT2(PredFormatFunctor2(),\n                      Bool(++n1_),\n                      Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(PredFormatFunction2,\n                        n1_++,\n                        n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(PredFormatFunction2,\n                        Bool(n1_++),\n                        Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(PredFormatFunctor2(),\n                        n1_++,\n                        n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(PredFormatFunctor2(),\n                        Bool(n1_++),\n                        Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT2(PredFormatFunction2,\n                      ++n1_,\n                      ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT2(PredFormatFunction2,\n                      Bool(++n1_),\n                      Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT2(PredFormatFunctor2(),\n                      ++n1_,\n                      ++n2_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT2(PredFormatFunctor2(),\n                      Bool(++n1_),\n                      Bool(++n2_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT2(PredFormatFunction2,\n                        n1_++,\n                        n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT2(PredFormatFunction2,\n                        Bool(n1_++),\n                        Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT2(PredFormatFunctor2(),\n                        n1_++,\n                        n2_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT2 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT2(PredFormatFunctor2(),\n                        Bool(n1_++),\n                        Bool(n2_++));\n    finished_ = true;\n  }, \"\");\n}\n// Sample functions/functors for testing ternary predicate assertions.\n\n// A ternary predicate function.\ntemplate <typename T1, typename T2, typename T3>\nbool PredFunction3(T1 v1, T2 v2, T3 v3) {\n  return v1 + v2 + v3 > 0;\n}\n\n// The following two functions are needed to circumvent a bug in\n// gcc 2.95.3, which sometimes has problem with the above template\n// function.\nbool PredFunction3Int(int v1, int v2, int v3) {\n  return v1 + v2 + v3 > 0;\n}\nbool PredFunction3Bool(Bool v1, Bool v2, Bool v3) {\n  return v1 + v2 + v3 > 0;\n}\n\n// A ternary predicate functor.\nstruct PredFunctor3 {\n  template <typename T1, typename T2, typename T3>\n  bool operator()(const T1& v1,\n                  const T2& v2,\n                  const T3& v3) {\n    return v1 + v2 + v3 > 0;\n  }\n};\n\n// A ternary predicate-formatter function.\ntemplate <typename T1, typename T2, typename T3>\ntesting::AssertionResult PredFormatFunction3(const char* e1,\n                                             const char* e2,\n                                             const char* e3,\n                                             const T1& v1,\n                                             const T2& v2,\n                                             const T3& v3) {\n  if (PredFunction3(v1, v2, v3))\n    return testing::AssertionSuccess();\n\n  return testing::AssertionFailure()\n      << e1 << \" + \" << e2 << \" + \" << e3\n      << \" is expected to be positive, but evaluates to \"\n      << v1 + v2 + v3 << \".\";\n}\n\n// A ternary predicate-formatter functor.\nstruct PredFormatFunctor3 {\n  template <typename T1, typename T2, typename T3>\n  testing::AssertionResult operator()(const char* e1,\n                                      const char* e2,\n                                      const char* e3,\n                                      const T1& v1,\n                                      const T2& v2,\n                                      const T3& v3) const {\n    return PredFormatFunction3(e1, e2, e3, v1, v2, v3);\n  }\n};\n\n// Tests for {EXPECT|ASSERT}_PRED_FORMAT3.\n\nclass Predicate3Test : public testing::Test {\n protected:\n  void SetUp() override {\n    expected_to_finish_ = true;\n    finished_ = false;\n    n1_ = n2_ = n3_ = 0;\n  }\n\n  void TearDown() override {\n    // Verifies that each of the predicate's arguments was evaluated\n    // exactly once.\n    EXPECT_EQ(1, n1_) <<\n        \"The predicate assertion didn't evaluate argument 2 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n2_) <<\n        \"The predicate assertion didn't evaluate argument 3 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n3_) <<\n        \"The predicate assertion didn't evaluate argument 4 \"\n        \"exactly once.\";\n\n    // Verifies that the control flow in the test function is expected.\n    if (expected_to_finish_ && !finished_) {\n      FAIL() << \"The predicate assertion unexpactedly aborted the test.\";\n    } else if (!expected_to_finish_ && finished_) {\n      FAIL() << \"The failed predicate assertion didn't abort the test \"\n                \"as expected.\";\n    }\n  }\n\n  // true if and only if the test function is expected to run to finish.\n  static bool expected_to_finish_;\n\n  // true if and only if the test function did run to finish.\n  static bool finished_;\n\n  static int n1_;\n  static int n2_;\n  static int n3_;\n};\n\nbool Predicate3Test::expected_to_finish_;\nbool Predicate3Test::finished_;\nint Predicate3Test::n1_;\nint Predicate3Test::n2_;\nint Predicate3Test::n3_;\n\ntypedef Predicate3Test EXPECT_PRED_FORMAT3Test;\ntypedef Predicate3Test ASSERT_PRED_FORMAT3Test;\ntypedef Predicate3Test EXPECT_PRED3Test;\ntypedef Predicate3Test ASSERT_PRED3Test;\n\n// Tests a successful EXPECT_PRED3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED3(PredFunction3Int,\n               ++n1_,\n               ++n2_,\n               ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED3Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED3(PredFunction3Bool,\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED3(PredFunctor3(),\n               ++n1_,\n               ++n2_,\n               ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED3Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED3(PredFunctor3(),\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED3(PredFunction3Int,\n                 n1_++,\n                 n2_++,\n                 n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED3Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED3(PredFunction3Bool,\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED3(PredFunctor3(),\n                 n1_++,\n                 n2_++,\n                 n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED3Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED3(PredFunctor3(),\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED3(PredFunction3Int,\n               ++n1_,\n               ++n2_,\n               ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED3Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED3(PredFunction3Bool,\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED3(PredFunctor3(),\n               ++n1_,\n               ++n2_,\n               ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED3Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED3(PredFunctor3(),\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED3(PredFunction3Int,\n                 n1_++,\n                 n2_++,\n                 n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED3Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED3(PredFunction3Bool,\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED3(PredFunctor3(),\n                 n1_++,\n                 n2_++,\n                 n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED3Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED3(PredFunctor3(),\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT3(PredFormatFunction3,\n                      ++n1_,\n                      ++n2_,\n                      ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT3(PredFormatFunction3,\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT3(PredFormatFunctor3(),\n                      ++n1_,\n                      ++n2_,\n                      ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT3(PredFormatFunctor3(),\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT3(PredFormatFunction3,\n                        n1_++,\n                        n2_++,\n                        n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT3(PredFormatFunction3,\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT3(PredFormatFunctor3(),\n                        n1_++,\n                        n2_++,\n                        n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT3(PredFormatFunctor3(),\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT3(PredFormatFunction3,\n                      ++n1_,\n                      ++n2_,\n                      ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT3(PredFormatFunction3,\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT3(PredFormatFunctor3(),\n                      ++n1_,\n                      ++n2_,\n                      ++n3_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT3(PredFormatFunctor3(),\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT3(PredFormatFunction3,\n                        n1_++,\n                        n2_++,\n                        n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT3(PredFormatFunction3,\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT3(PredFormatFunctor3(),\n                        n1_++,\n                        n2_++,\n                        n3_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT3 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT3(PredFormatFunctor3(),\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++));\n    finished_ = true;\n  }, \"\");\n}\n// Sample functions/functors for testing 4-ary predicate assertions.\n\n// A 4-ary predicate function.\ntemplate <typename T1, typename T2, typename T3, typename T4>\nbool PredFunction4(T1 v1, T2 v2, T3 v3, T4 v4) {\n  return v1 + v2 + v3 + v4 > 0;\n}\n\n// The following two functions are needed to circumvent a bug in\n// gcc 2.95.3, which sometimes has problem with the above template\n// function.\nbool PredFunction4Int(int v1, int v2, int v3, int v4) {\n  return v1 + v2 + v3 + v4 > 0;\n}\nbool PredFunction4Bool(Bool v1, Bool v2, Bool v3, Bool v4) {\n  return v1 + v2 + v3 + v4 > 0;\n}\n\n// A 4-ary predicate functor.\nstruct PredFunctor4 {\n  template <typename T1, typename T2, typename T3, typename T4>\n  bool operator()(const T1& v1,\n                  const T2& v2,\n                  const T3& v3,\n                  const T4& v4) {\n    return v1 + v2 + v3 + v4 > 0;\n  }\n};\n\n// A 4-ary predicate-formatter function.\ntemplate <typename T1, typename T2, typename T3, typename T4>\ntesting::AssertionResult PredFormatFunction4(const char* e1,\n                                             const char* e2,\n                                             const char* e3,\n                                             const char* e4,\n                                             const T1& v1,\n                                             const T2& v2,\n                                             const T3& v3,\n                                             const T4& v4) {\n  if (PredFunction4(v1, v2, v3, v4))\n    return testing::AssertionSuccess();\n\n  return testing::AssertionFailure()\n      << e1 << \" + \" << e2 << \" + \" << e3 << \" + \" << e4\n      << \" is expected to be positive, but evaluates to \"\n      << v1 + v2 + v3 + v4 << \".\";\n}\n\n// A 4-ary predicate-formatter functor.\nstruct PredFormatFunctor4 {\n  template <typename T1, typename T2, typename T3, typename T4>\n  testing::AssertionResult operator()(const char* e1,\n                                      const char* e2,\n                                      const char* e3,\n                                      const char* e4,\n                                      const T1& v1,\n                                      const T2& v2,\n                                      const T3& v3,\n                                      const T4& v4) const {\n    return PredFormatFunction4(e1, e2, e3, e4, v1, v2, v3, v4);\n  }\n};\n\n// Tests for {EXPECT|ASSERT}_PRED_FORMAT4.\n\nclass Predicate4Test : public testing::Test {\n protected:\n  void SetUp() override {\n    expected_to_finish_ = true;\n    finished_ = false;\n    n1_ = n2_ = n3_ = n4_ = 0;\n  }\n\n  void TearDown() override {\n    // Verifies that each of the predicate's arguments was evaluated\n    // exactly once.\n    EXPECT_EQ(1, n1_) <<\n        \"The predicate assertion didn't evaluate argument 2 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n2_) <<\n        \"The predicate assertion didn't evaluate argument 3 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n3_) <<\n        \"The predicate assertion didn't evaluate argument 4 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n4_) <<\n        \"The predicate assertion didn't evaluate argument 5 \"\n        \"exactly once.\";\n\n    // Verifies that the control flow in the test function is expected.\n    if (expected_to_finish_ && !finished_) {\n      FAIL() << \"The predicate assertion unexpactedly aborted the test.\";\n    } else if (!expected_to_finish_ && finished_) {\n      FAIL() << \"The failed predicate assertion didn't abort the test \"\n                \"as expected.\";\n    }\n  }\n\n  // true if and only if the test function is expected to run to finish.\n  static bool expected_to_finish_;\n\n  // true if and only if the test function did run to finish.\n  static bool finished_;\n\n  static int n1_;\n  static int n2_;\n  static int n3_;\n  static int n4_;\n};\n\nbool Predicate4Test::expected_to_finish_;\nbool Predicate4Test::finished_;\nint Predicate4Test::n1_;\nint Predicate4Test::n2_;\nint Predicate4Test::n3_;\nint Predicate4Test::n4_;\n\ntypedef Predicate4Test EXPECT_PRED_FORMAT4Test;\ntypedef Predicate4Test ASSERT_PRED_FORMAT4Test;\ntypedef Predicate4Test EXPECT_PRED4Test;\ntypedef Predicate4Test ASSERT_PRED4Test;\n\n// Tests a successful EXPECT_PRED4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED4(PredFunction4Int,\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED4Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED4(PredFunction4Bool,\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED4(PredFunctor4(),\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED4Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED4(PredFunctor4(),\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED4(PredFunction4Int,\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED4Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED4(PredFunction4Bool,\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED4(PredFunctor4(),\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED4Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED4(PredFunctor4(),\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED4(PredFunction4Int,\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED4Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED4(PredFunction4Bool,\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED4(PredFunctor4(),\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED4Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED4(PredFunctor4(),\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED4(PredFunction4Int,\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED4Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED4(PredFunction4Bool,\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED4(PredFunctor4(),\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED4Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED4(PredFunctor4(),\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT4(PredFormatFunction4,\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT4(PredFormatFunction4,\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT4(PredFormatFunctor4(),\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT4(PredFormatFunctor4(),\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT4(PredFormatFunction4,\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT4(PredFormatFunction4,\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT4(PredFormatFunctor4(),\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT4(PredFormatFunctor4(),\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT4(PredFormatFunction4,\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT4(PredFormatFunction4,\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT4(PredFormatFunctor4(),\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT4(PredFormatFunctor4(),\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT4(PredFormatFunction4,\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT4(PredFormatFunction4,\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT4(PredFormatFunctor4(),\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT4 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT4(PredFormatFunctor4(),\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++));\n    finished_ = true;\n  }, \"\");\n}\n// Sample functions/functors for testing 5-ary predicate assertions.\n\n// A 5-ary predicate function.\ntemplate <typename T1, typename T2, typename T3, typename T4, typename T5>\nbool PredFunction5(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5) {\n  return v1 + v2 + v3 + v4 + v5 > 0;\n}\n\n// The following two functions are needed to circumvent a bug in\n// gcc 2.95.3, which sometimes has problem with the above template\n// function.\nbool PredFunction5Int(int v1, int v2, int v3, int v4, int v5) {\n  return v1 + v2 + v3 + v4 + v5 > 0;\n}\nbool PredFunction5Bool(Bool v1, Bool v2, Bool v3, Bool v4, Bool v5) {\n  return v1 + v2 + v3 + v4 + v5 > 0;\n}\n\n// A 5-ary predicate functor.\nstruct PredFunctor5 {\n  template <typename T1, typename T2, typename T3, typename T4, typename T5>\n  bool operator()(const T1& v1,\n                  const T2& v2,\n                  const T3& v3,\n                  const T4& v4,\n                  const T5& v5) {\n    return v1 + v2 + v3 + v4 + v5 > 0;\n  }\n};\n\n// A 5-ary predicate-formatter function.\ntemplate <typename T1, typename T2, typename T3, typename T4, typename T5>\ntesting::AssertionResult PredFormatFunction5(const char* e1,\n                                             const char* e2,\n                                             const char* e3,\n                                             const char* e4,\n                                             const char* e5,\n                                             const T1& v1,\n                                             const T2& v2,\n                                             const T3& v3,\n                                             const T4& v4,\n                                             const T5& v5) {\n  if (PredFunction5(v1, v2, v3, v4, v5))\n    return testing::AssertionSuccess();\n\n  return testing::AssertionFailure()\n      << e1 << \" + \" << e2 << \" + \" << e3 << \" + \" << e4 << \" + \" << e5\n      << \" is expected to be positive, but evaluates to \"\n      << v1 + v2 + v3 + v4 + v5 << \".\";\n}\n\n// A 5-ary predicate-formatter functor.\nstruct PredFormatFunctor5 {\n  template <typename T1, typename T2, typename T3, typename T4, typename T5>\n  testing::AssertionResult operator()(const char* e1,\n                                      const char* e2,\n                                      const char* e3,\n                                      const char* e4,\n                                      const char* e5,\n                                      const T1& v1,\n                                      const T2& v2,\n                                      const T3& v3,\n                                      const T4& v4,\n                                      const T5& v5) const {\n    return PredFormatFunction5(e1, e2, e3, e4, e5, v1, v2, v3, v4, v5);\n  }\n};\n\n// Tests for {EXPECT|ASSERT}_PRED_FORMAT5.\n\nclass Predicate5Test : public testing::Test {\n protected:\n  void SetUp() override {\n    expected_to_finish_ = true;\n    finished_ = false;\n    n1_ = n2_ = n3_ = n4_ = n5_ = 0;\n  }\n\n  void TearDown() override {\n    // Verifies that each of the predicate's arguments was evaluated\n    // exactly once.\n    EXPECT_EQ(1, n1_) <<\n        \"The predicate assertion didn't evaluate argument 2 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n2_) <<\n        \"The predicate assertion didn't evaluate argument 3 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n3_) <<\n        \"The predicate assertion didn't evaluate argument 4 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n4_) <<\n        \"The predicate assertion didn't evaluate argument 5 \"\n        \"exactly once.\";\n    EXPECT_EQ(1, n5_) <<\n        \"The predicate assertion didn't evaluate argument 6 \"\n        \"exactly once.\";\n\n    // Verifies that the control flow in the test function is expected.\n    if (expected_to_finish_ && !finished_) {\n      FAIL() << \"The predicate assertion unexpactedly aborted the test.\";\n    } else if (!expected_to_finish_ && finished_) {\n      FAIL() << \"The failed predicate assertion didn't abort the test \"\n                \"as expected.\";\n    }\n  }\n\n  // true if and only if the test function is expected to run to finish.\n  static bool expected_to_finish_;\n\n  // true if and only if the test function did run to finish.\n  static bool finished_;\n\n  static int n1_;\n  static int n2_;\n  static int n3_;\n  static int n4_;\n  static int n5_;\n};\n\nbool Predicate5Test::expected_to_finish_;\nbool Predicate5Test::finished_;\nint Predicate5Test::n1_;\nint Predicate5Test::n2_;\nint Predicate5Test::n3_;\nint Predicate5Test::n4_;\nint Predicate5Test::n5_;\n\ntypedef Predicate5Test EXPECT_PRED_FORMAT5Test;\ntypedef Predicate5Test ASSERT_PRED_FORMAT5Test;\ntypedef Predicate5Test EXPECT_PRED5Test;\ntypedef Predicate5Test ASSERT_PRED5Test;\n\n// Tests a successful EXPECT_PRED5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED5(PredFunction5Int,\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_,\n               ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED5Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED5(PredFunction5Bool,\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_),\n               Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED5(PredFunctor5(),\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_,\n               ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED5Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED5(PredFunctor5(),\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_),\n               Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED5(PredFunction5Int,\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++,\n                 n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED5Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED5(PredFunction5Bool,\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++),\n                 Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED5(PredFunctor5(),\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++,\n                 n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED5Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED5(PredFunctor5(),\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++),\n                 Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED5(PredFunction5Int,\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_,\n               ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED5Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED5(PredFunction5Bool,\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_),\n               Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED5(PredFunctor5(),\n               ++n1_,\n               ++n2_,\n               ++n3_,\n               ++n4_,\n               ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED5Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED5(PredFunctor5(),\n               Bool(++n1_),\n               Bool(++n2_),\n               Bool(++n3_),\n               Bool(++n4_),\n               Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED5(PredFunction5Int,\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++,\n                 n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED5Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED5(PredFunction5Bool,\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++),\n                 Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED5(PredFunctor5(),\n                 n1_++,\n                 n2_++,\n                 n3_++,\n                 n4_++,\n                 n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED5Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED5(PredFunctor5(),\n                 Bool(n1_++),\n                 Bool(n2_++),\n                 Bool(n3_++),\n                 Bool(n4_++),\n                 Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT5(PredFormatFunction5,\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_,\n                      ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT5(PredFormatFunction5,\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_),\n                      Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) {\n  EXPECT_PRED_FORMAT5(PredFormatFunctor5(),\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_,\n                      ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) {\n  EXPECT_PRED_FORMAT5(PredFormatFunctor5(),\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_),\n                      Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a failed EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT5(PredFormatFunction5,\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++,\n                        n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT5(PredFormatFunction5,\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++),\n                        Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT5(PredFormatFunctor5(),\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++,\n                        n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed EXPECT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT5(PredFormatFunctor5(),\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++),\n                        Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a successful ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT5(PredFormatFunction5,\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_,\n                      ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT5(PredFormatFunction5,\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_),\n                      Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) {\n  ASSERT_PRED_FORMAT5(PredFormatFunctor5(),\n                      ++n1_,\n                      ++n2_,\n                      ++n3_,\n                      ++n4_,\n                      ++n5_);\n  finished_ = true;\n}\n\n// Tests a successful ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) {\n  ASSERT_PRED_FORMAT5(PredFormatFunctor5(),\n                      Bool(++n1_),\n                      Bool(++n2_),\n                      Bool(++n3_),\n                      Bool(++n4_),\n                      Bool(++n5_));\n  finished_ = true;\n}\n\n// Tests a failed ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT5(PredFormatFunction5,\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++,\n                        n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a function on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT5(PredFormatFunction5,\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++),\n                        Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a built-in type (int).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT5(PredFormatFunctor5(),\n                        n1_++,\n                        n2_++,\n                        n3_++,\n                        n4_++,\n                        n5_++);\n    finished_ = true;\n  }, \"\");\n}\n\n// Tests a failed ASSERT_PRED_FORMAT5 where the\n// predicate-formatter is a functor on a user-defined type (Bool).\nTEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) {\n  expected_to_finish_ = false;\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT5(PredFormatFunctor5(),\n                        Bool(n1_++),\n                        Bool(n2_++),\n                        Bool(n3_++),\n                        Bool(n4_++),\n                        Bool(n5_++));\n    finished_ = true;\n  }, \"\");\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_premature_exit_test.cc",
    "content": "// Copyright 2013, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests that Google Test manipulates the premature-exit-detection\n// file correctly.\n\n#include <stdio.h>\n\n#include \"gtest/gtest.h\"\n\nusing ::testing::InitGoogleTest;\nusing ::testing::Test;\nusing ::testing::internal::posix::GetEnv;\nusing ::testing::internal::posix::Stat;\nusing ::testing::internal::posix::StatStruct;\n\nnamespace {\n\nclass PrematureExitTest : public Test {\n public:\n  // Returns true if and only if the given file exists.\n  static bool FileExists(const char* filepath) {\n    StatStruct stat;\n    return Stat(filepath, &stat) == 0;\n  }\n\n protected:\n  PrematureExitTest() {\n    premature_exit_file_path_ = GetEnv(\"TEST_PREMATURE_EXIT_FILE\");\n\n    // Normalize NULL to \"\" for ease of handling.\n    if (premature_exit_file_path_ == nullptr) {\n      premature_exit_file_path_ = \"\";\n    }\n  }\n\n  // Returns true if and only if the premature-exit file exists.\n  bool PrematureExitFileExists() const {\n    return FileExists(premature_exit_file_path_);\n  }\n\n  const char* premature_exit_file_path_;\n};\n\ntypedef PrematureExitTest PrematureExitDeathTest;\n\n// Tests that:\n//   - the premature-exit file exists during the execution of a\n//     death test (EXPECT_DEATH*), and\n//   - a death test doesn't interfere with the main test process's\n//     handling of the premature-exit file.\nTEST_F(PrematureExitDeathTest, FileExistsDuringExecutionOfDeathTest) {\n  if (*premature_exit_file_path_ == '\\0') {\n    return;\n  }\n\n  EXPECT_DEATH_IF_SUPPORTED({\n      // If the file exists, crash the process such that the main test\n      // process will catch the (expected) crash and report a success;\n      // otherwise don't crash, which will cause the main test process\n      // to report that the death test has failed.\n      if (PrematureExitFileExists()) {\n        exit(1);\n      }\n    }, \"\");\n}\n\n// Tests that the premature-exit file exists during the execution of a\n// normal (non-death) test.\nTEST_F(PrematureExitTest, PrematureExitFileExistsDuringTestExecution) {\n  if (*premature_exit_file_path_ == '\\0') {\n    return;\n  }\n\n  EXPECT_TRUE(PrematureExitFileExists())\n      << \" file \" << premature_exit_file_path_\n      << \" should exist during test execution, but doesn't.\";\n}\n\n}  // namespace\n\nint main(int argc, char **argv) {\n  InitGoogleTest(&argc, argv);\n  const int exit_code = RUN_ALL_TESTS();\n\n  // Test that the premature-exit file is deleted upon return from\n  // RUN_ALL_TESTS().\n  const char* const filepath = GetEnv(\"TEST_PREMATURE_EXIT_FILE\");\n  if (filepath != nullptr && *filepath != '\\0') {\n    if (PrematureExitTest::FileExists(filepath)) {\n      printf(\n          \"File %s shouldn't exist after the test program finishes, but does.\",\n          filepath);\n      return 1;\n    }\n  }\n\n  return exit_code;\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_prod_test.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Unit test for gtest_prod.h.\n\n#include \"production.h\"\n#include \"gtest/gtest.h\"\n\n// Tests that private members can be accessed from a TEST declared as\n// a friend of the class.\nTEST(PrivateCodeTest, CanAccessPrivateMembers) {\n  PrivateCode a;\n  EXPECT_EQ(0, a.x_);\n\n  a.set_x(1);\n  EXPECT_EQ(1, a.x_);\n}\n\ntypedef testing::Test PrivateCodeFixtureTest;\n\n// Tests that private members can be accessed from a TEST_F declared\n// as a friend of the class.\nTEST_F(PrivateCodeFixtureTest, CanAccessPrivateMembers) {\n  PrivateCode a;\n  EXPECT_EQ(0, a.x_);\n\n  a.set_x(2);\n  EXPECT_EQ(2, a.x_);\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_repeat_test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Tests the --gtest_repeat=number flag.\n\n#include <stdlib.h>\n#include <iostream>\n#include \"gtest/gtest.h\"\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\n\nGTEST_DECLARE_string_(death_test_style);\nGTEST_DECLARE_string_(filter);\nGTEST_DECLARE_int32_(repeat);\n\n}  // namespace testing\n\nusing testing::GTEST_FLAG(death_test_style);\nusing testing::GTEST_FLAG(filter);\nusing testing::GTEST_FLAG(repeat);\n\nnamespace {\n\n// We need this when we are testing Google Test itself and therefore\n// cannot use Google Test assertions.\n#define GTEST_CHECK_INT_EQ_(expected, actual) \\\n  do {\\\n    const int expected_val = (expected);\\\n    const int actual_val = (actual);\\\n    if (::testing::internal::IsTrue(expected_val != actual_val)) {\\\n      ::std::cout << \"Value of: \" #actual \"\\n\"\\\n                  << \"  Actual: \" << actual_val << \"\\n\"\\\n                  << \"Expected: \" #expected \"\\n\"\\\n                  << \"Which is: \" << expected_val << \"\\n\";\\\n      ::testing::internal::posix::Abort();\\\n    }\\\n  } while (::testing::internal::AlwaysFalse())\n\n\n// Used for verifying that global environment set-up and tear-down are\n// inside the --gtest_repeat loop.\n\nint g_environment_set_up_count = 0;\nint g_environment_tear_down_count = 0;\n\nclass MyEnvironment : public testing::Environment {\n public:\n  MyEnvironment() {}\n  void SetUp() override { g_environment_set_up_count++; }\n  void TearDown() override { g_environment_tear_down_count++; }\n};\n\n// A test that should fail.\n\nint g_should_fail_count = 0;\n\nTEST(FooTest, ShouldFail) {\n  g_should_fail_count++;\n  EXPECT_EQ(0, 1) << \"Expected failure.\";\n}\n\n// A test that should pass.\n\nint g_should_pass_count = 0;\n\nTEST(FooTest, ShouldPass) {\n  g_should_pass_count++;\n}\n\n// A test that contains a thread-safe death test and a fast death\n// test.  It should pass.\n\nint g_death_test_count = 0;\n\nTEST(BarDeathTest, ThreadSafeAndFast) {\n  g_death_test_count++;\n\n  GTEST_FLAG(death_test_style) = \"threadsafe\";\n  EXPECT_DEATH_IF_SUPPORTED(::testing::internal::posix::Abort(), \"\");\n\n  GTEST_FLAG(death_test_style) = \"fast\";\n  EXPECT_DEATH_IF_SUPPORTED(::testing::internal::posix::Abort(), \"\");\n}\n\nint g_param_test_count = 0;\n\nconst int kNumberOfParamTests = 10;\n\nclass MyParamTest : public testing::TestWithParam<int> {};\n\nTEST_P(MyParamTest, ShouldPass) {\n  GTEST_CHECK_INT_EQ_(g_param_test_count % kNumberOfParamTests, GetParam());\n  g_param_test_count++;\n}\nINSTANTIATE_TEST_SUITE_P(MyParamSequence,\n                         MyParamTest,\n                         testing::Range(0, kNumberOfParamTests));\n\n// Resets the count for each test.\nvoid ResetCounts() {\n  g_environment_set_up_count = 0;\n  g_environment_tear_down_count = 0;\n  g_should_fail_count = 0;\n  g_should_pass_count = 0;\n  g_death_test_count = 0;\n  g_param_test_count = 0;\n}\n\n// Checks that the count for each test is expected.\nvoid CheckCounts(int expected) {\n  GTEST_CHECK_INT_EQ_(expected, g_environment_set_up_count);\n  GTEST_CHECK_INT_EQ_(expected, g_environment_tear_down_count);\n  GTEST_CHECK_INT_EQ_(expected, g_should_fail_count);\n  GTEST_CHECK_INT_EQ_(expected, g_should_pass_count);\n  GTEST_CHECK_INT_EQ_(expected, g_death_test_count);\n  GTEST_CHECK_INT_EQ_(expected * kNumberOfParamTests, g_param_test_count);\n}\n\n// Tests the behavior of Google Test when --gtest_repeat is not specified.\nvoid TestRepeatUnspecified() {\n  ResetCounts();\n  GTEST_CHECK_INT_EQ_(1, RUN_ALL_TESTS());\n  CheckCounts(1);\n}\n\n// Tests the behavior of Google Test when --gtest_repeat has the given value.\nvoid TestRepeat(int repeat) {\n  GTEST_FLAG(repeat) = repeat;\n\n  ResetCounts();\n  GTEST_CHECK_INT_EQ_(repeat > 0 ? 1 : 0, RUN_ALL_TESTS());\n  CheckCounts(repeat);\n}\n\n// Tests using --gtest_repeat when --gtest_filter specifies an empty\n// set of tests.\nvoid TestRepeatWithEmptyFilter(int repeat) {\n  GTEST_FLAG(repeat) = repeat;\n  GTEST_FLAG(filter) = \"None\";\n\n  ResetCounts();\n  GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS());\n  CheckCounts(0);\n}\n\n// Tests using --gtest_repeat when --gtest_filter specifies a set of\n// successful tests.\nvoid TestRepeatWithFilterForSuccessfulTests(int repeat) {\n  GTEST_FLAG(repeat) = repeat;\n  GTEST_FLAG(filter) = \"*-*ShouldFail\";\n\n  ResetCounts();\n  GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS());\n  GTEST_CHECK_INT_EQ_(repeat, g_environment_set_up_count);\n  GTEST_CHECK_INT_EQ_(repeat, g_environment_tear_down_count);\n  GTEST_CHECK_INT_EQ_(0, g_should_fail_count);\n  GTEST_CHECK_INT_EQ_(repeat, g_should_pass_count);\n  GTEST_CHECK_INT_EQ_(repeat, g_death_test_count);\n  GTEST_CHECK_INT_EQ_(repeat * kNumberOfParamTests, g_param_test_count);\n}\n\n// Tests using --gtest_repeat when --gtest_filter specifies a set of\n// failed tests.\nvoid TestRepeatWithFilterForFailedTests(int repeat) {\n  GTEST_FLAG(repeat) = repeat;\n  GTEST_FLAG(filter) = \"*ShouldFail\";\n\n  ResetCounts();\n  GTEST_CHECK_INT_EQ_(1, RUN_ALL_TESTS());\n  GTEST_CHECK_INT_EQ_(repeat, g_environment_set_up_count);\n  GTEST_CHECK_INT_EQ_(repeat, g_environment_tear_down_count);\n  GTEST_CHECK_INT_EQ_(repeat, g_should_fail_count);\n  GTEST_CHECK_INT_EQ_(0, g_should_pass_count);\n  GTEST_CHECK_INT_EQ_(0, g_death_test_count);\n  GTEST_CHECK_INT_EQ_(0, g_param_test_count);\n}\n\n}  // namespace\n\nint main(int argc, char **argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  testing::AddGlobalTestEnvironment(new MyEnvironment);\n\n  TestRepeatUnspecified();\n  TestRepeat(0);\n  TestRepeat(1);\n  TestRepeat(5);\n\n  TestRepeatWithEmptyFilter(2);\n  TestRepeatWithEmptyFilter(3);\n\n  TestRepeatWithFilterForSuccessfulTests(3);\n\n  TestRepeatWithFilterForFailedTests(4);\n\n  // It would be nice to verify that the tests indeed loop forever\n  // when GTEST_FLAG(repeat) is negative, but this test will be quite\n  // complicated to write.  Since this flag is for interactive\n  // debugging only and doesn't affect the normal test result, such a\n  // test would be an overkill.\n\n  printf(\"PASS\\n\");\n  return 0;\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_skip_check_output_test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2019 Google LLC.  All Rights Reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"Tests Google Test's gtest skip in environment setup  behavior.\n\nThis script invokes gtest_skip_in_environment_setup_test_ and verifies its\noutput.\n\"\"\"\n\nimport re\n\nimport gtest_test_utils\n\n# Path to the gtest_skip_in_environment_setup_test binary\nEXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_skip_test')\n\nOUTPUT = gtest_test_utils.Subprocess([EXE_PATH]).output\n\n\n# Test.\nclass SkipEntireEnvironmentTest(gtest_test_utils.TestCase):\n\n  def testSkipEntireEnvironmentTest(self):\n    self.assertIn('Skipped\\nskipping single test\\n', OUTPUT)\n    skip_fixture = 'Skipped\\nskipping all tests for this fixture\\n'\n    self.assertIsNotNone(\n        re.search(skip_fixture + '.*' + skip_fixture, OUTPUT, flags=re.DOTALL),\n        repr(OUTPUT))\n    self.assertNotIn('FAILED', OUTPUT)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/gtest_skip_environment_check_output_test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2019 Google LLC.  All Rights Reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"Tests Google Test's gtest skip in environment setup  behavior.\n\nThis script invokes gtest_skip_in_environment_setup_test_ and verifies its\noutput.\n\"\"\"\n\nimport gtest_test_utils\n\n# Path to the gtest_skip_in_environment_setup_test binary\nEXE_PATH = gtest_test_utils.GetTestExecutablePath(\n    'gtest_skip_in_environment_setup_test')\n\nOUTPUT = gtest_test_utils.Subprocess([EXE_PATH]).output\n\n\n# Test.\nclass SkipEntireEnvironmentTest(gtest_test_utils.TestCase):\n\n  def testSkipEntireEnvironmentTest(self):\n    self.assertIn('Skipping the entire environment', OUTPUT)\n    self.assertNotIn('FAILED', OUTPUT)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/gtest_skip_in_environment_setup_test.cc",
    "content": "// Copyright 2019, Google LLC.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google LLC. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// This test verifies that skipping in the environment results in the\n// testcases being skipped.\n\n#include <iostream>\n#include \"gtest/gtest.h\"\n\nclass SetupEnvironment : public testing::Environment {\n public:\n  void SetUp() override { GTEST_SKIP() << \"Skipping the entire environment\"; }\n};\n\nTEST(Test, AlwaysFails) { EXPECT_EQ(true, false); }\n\nint main(int argc, char **argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  testing::AddGlobalTestEnvironment(new SetupEnvironment());\n\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_skip_test.cc",
    "content": "// Copyright 2008 Google Inc.\n// All Rights Reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// Author: arseny.aprelev@gmail.com (Arseny Aprelev)\n//\n\n#include \"gtest/gtest.h\"\n\nusing ::testing::Test;\n\nTEST(SkipTest, DoesSkip) {\n  GTEST_SKIP() << \"skipping single test\";\n  EXPECT_EQ(0, 1);\n}\n\nclass Fixture : public Test {\n protected:\n  void SetUp() override {\n    GTEST_SKIP() << \"skipping all tests for this fixture\";\n  }\n};\n\nTEST_F(Fixture, SkipsOneTest) {\n  EXPECT_EQ(5, 7);\n}\n\nTEST_F(Fixture, SkipsAnotherTest) {\n  EXPECT_EQ(99, 100);\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_sole_header_test.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// This test verifies that it's possible to use Google Test by including\n// the gtest.h header file alone.\n\n#include \"gtest/gtest.h\"\n\nnamespace {\n\nvoid Subroutine() {\n  EXPECT_EQ(42, 42);\n}\n\nTEST(NoFatalFailureTest, ExpectNoFatalFailure) {\n  EXPECT_NO_FATAL_FAILURE(;);\n  EXPECT_NO_FATAL_FAILURE(SUCCEED());\n  EXPECT_NO_FATAL_FAILURE(Subroutine());\n  EXPECT_NO_FATAL_FAILURE({ SUCCEED(); });\n}\n\nTEST(NoFatalFailureTest, AssertNoFatalFailure) {\n  ASSERT_NO_FATAL_FAILURE(;);\n  ASSERT_NO_FATAL_FAILURE(SUCCEED());\n  ASSERT_NO_FATAL_FAILURE(Subroutine());\n  ASSERT_NO_FATAL_FAILURE({ SUCCEED(); });\n}\n\n}  // namespace\n"
  },
  {
    "path": "test/googletest/test/gtest_stress_test.cc",
    "content": "// Copyright 2007, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Tests that SCOPED_TRACE() and various Google Test assertions can be\n// used in a large number of threads concurrently.\n\n#include \"gtest/gtest.h\"\n\n#include <vector>\n\n#include \"src/gtest-internal-inl.h\"\n\n#if GTEST_IS_THREADSAFE\n\nnamespace testing {\nnamespace {\n\nusing internal::Notification;\nusing internal::TestPropertyKeyIs;\nusing internal::ThreadWithParam;\n\n// In order to run tests in this file, for platforms where Google Test is\n// thread safe, implement ThreadWithParam. See the description of its API\n// in gtest-port.h, where it is defined for already supported platforms.\n\n// How many threads to create?\nconst int kThreadCount = 50;\n\nstd::string IdToKey(int id, const char* suffix) {\n  Message key;\n  key << \"key_\" << id << \"_\" << suffix;\n  return key.GetString();\n}\n\nstd::string IdToString(int id) {\n  Message id_message;\n  id_message << id;\n  return id_message.GetString();\n}\n\nvoid ExpectKeyAndValueWereRecordedForId(\n    const std::vector<TestProperty>& properties,\n    int id, const char* suffix) {\n  TestPropertyKeyIs matches_key(IdToKey(id, suffix).c_str());\n  const std::vector<TestProperty>::const_iterator property =\n      std::find_if(properties.begin(), properties.end(), matches_key);\n  ASSERT_TRUE(property != properties.end())\n      << \"expecting \" << suffix << \" value for id \" << id;\n  EXPECT_STREQ(IdToString(id).c_str(), property->value());\n}\n\n// Calls a large number of Google Test assertions, where exactly one of them\n// will fail.\nvoid ManyAsserts(int id) {\n  GTEST_LOG_(INFO) << \"Thread #\" << id << \" running...\";\n\n  SCOPED_TRACE(Message() << \"Thread #\" << id);\n\n  for (int i = 0; i < kThreadCount; i++) {\n    SCOPED_TRACE(Message() << \"Iteration #\" << i);\n\n    // A bunch of assertions that should succeed.\n    EXPECT_TRUE(true);\n    ASSERT_FALSE(false) << \"This shouldn't fail.\";\n    EXPECT_STREQ(\"a\", \"a\");\n    ASSERT_LE(5, 6);\n    EXPECT_EQ(i, i) << \"This shouldn't fail.\";\n\n    // RecordProperty() should interact safely with other threads as well.\n    // The shared_key forces property updates.\n    Test::RecordProperty(IdToKey(id, \"string\").c_str(), IdToString(id).c_str());\n    Test::RecordProperty(IdToKey(id, \"int\").c_str(), id);\n    Test::RecordProperty(\"shared_key\", IdToString(id).c_str());\n\n    // This assertion should fail kThreadCount times per thread.  It\n    // is for testing whether Google Test can handle failed assertions in a\n    // multi-threaded context.\n    EXPECT_LT(i, 0) << \"This should always fail.\";\n  }\n}\n\nvoid CheckTestFailureCount(int expected_failures) {\n  const TestInfo* const info = UnitTest::GetInstance()->current_test_info();\n  const TestResult* const result = info->result();\n  GTEST_CHECK_(expected_failures == result->total_part_count())\n      << \"Logged \" << result->total_part_count() << \" failures \"\n      << \" vs. \" << expected_failures << \" expected\";\n}\n\n// Tests using SCOPED_TRACE() and Google Test assertions in many threads\n// concurrently.\nTEST(StressTest, CanUseScopedTraceAndAssertionsInManyThreads) {\n  {\n    std::unique_ptr<ThreadWithParam<int> > threads[kThreadCount];\n    Notification threads_can_start;\n    for (int i = 0; i != kThreadCount; i++)\n      threads[i].reset(new ThreadWithParam<int>(&ManyAsserts,\n                                                i,\n                                                &threads_can_start));\n\n    threads_can_start.Notify();\n\n    // Blocks until all the threads are done.\n    for (int i = 0; i != kThreadCount; i++)\n      threads[i]->Join();\n  }\n\n  // Ensures that kThreadCount*kThreadCount failures have been reported.\n  const TestInfo* const info = UnitTest::GetInstance()->current_test_info();\n  const TestResult* const result = info->result();\n\n  std::vector<TestProperty> properties;\n  // We have no access to the TestResult's list of properties but we can\n  // copy them one by one.\n  for (int i = 0; i < result->test_property_count(); ++i)\n    properties.push_back(result->GetTestProperty(i));\n\n  EXPECT_EQ(kThreadCount * 2 + 1, result->test_property_count())\n      << \"String and int values recorded on each thread, \"\n      << \"as well as one shared_key\";\n  for (int i = 0; i < kThreadCount; ++i) {\n    ExpectKeyAndValueWereRecordedForId(properties, i, \"string\");\n    ExpectKeyAndValueWereRecordedForId(properties, i, \"int\");\n  }\n  CheckTestFailureCount(kThreadCount*kThreadCount);\n}\n\nvoid FailingThread(bool is_fatal) {\n  if (is_fatal)\n    FAIL() << \"Fatal failure in some other thread. \"\n           << \"(This failure is expected.)\";\n  else\n    ADD_FAILURE() << \"Non-fatal failure in some other thread. \"\n                  << \"(This failure is expected.)\";\n}\n\nvoid GenerateFatalFailureInAnotherThread(bool is_fatal) {\n  ThreadWithParam<bool> thread(&FailingThread, is_fatal, nullptr);\n  thread.Join();\n}\n\nTEST(NoFatalFailureTest, ExpectNoFatalFailureIgnoresFailuresInOtherThreads) {\n  EXPECT_NO_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true));\n  // We should only have one failure (the one from\n  // GenerateFatalFailureInAnotherThread()), since the EXPECT_NO_FATAL_FAILURE\n  // should succeed.\n  CheckTestFailureCount(1);\n}\n\nvoid AssertNoFatalFailureIgnoresFailuresInOtherThreads() {\n  ASSERT_NO_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true));\n}\nTEST(NoFatalFailureTest, AssertNoFatalFailureIgnoresFailuresInOtherThreads) {\n  // Using a subroutine, to make sure, that the test continues.\n  AssertNoFatalFailureIgnoresFailuresInOtherThreads();\n  // We should only have one failure (the one from\n  // GenerateFatalFailureInAnotherThread()), since the EXPECT_NO_FATAL_FAILURE\n  // should succeed.\n  CheckTestFailureCount(1);\n}\n\nTEST(FatalFailureTest, ExpectFatalFailureIgnoresFailuresInOtherThreads) {\n  // This statement should fail, since the current thread doesn't generate a\n  // fatal failure, only another one does.\n  EXPECT_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true), \"expected\");\n  CheckTestFailureCount(2);\n}\n\nTEST(FatalFailureOnAllThreadsTest, ExpectFatalFailureOnAllThreads) {\n  // This statement should succeed, because failures in all threads are\n  // considered.\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(\n      GenerateFatalFailureInAnotherThread(true), \"expected\");\n  CheckTestFailureCount(0);\n  // We need to add a failure, because main() checks that there are failures.\n  // But when only this test is run, we shouldn't have any failures.\n  ADD_FAILURE() << \"This is an expected non-fatal failure.\";\n}\n\nTEST(NonFatalFailureTest, ExpectNonFatalFailureIgnoresFailuresInOtherThreads) {\n  // This statement should fail, since the current thread doesn't generate a\n  // fatal failure, only another one does.\n  EXPECT_NONFATAL_FAILURE(GenerateFatalFailureInAnotherThread(false),\n                          \"expected\");\n  CheckTestFailureCount(2);\n}\n\nTEST(NonFatalFailureOnAllThreadsTest, ExpectNonFatalFailureOnAllThreads) {\n  // This statement should succeed, because failures in all threads are\n  // considered.\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(\n      GenerateFatalFailureInAnotherThread(false), \"expected\");\n  CheckTestFailureCount(0);\n  // We need to add a failure, because main() checks that there are failures,\n  // But when only this test is run, we shouldn't have any failures.\n  ADD_FAILURE() << \"This is an expected non-fatal failure.\";\n}\n\n}  // namespace\n}  // namespace testing\n\nint main(int argc, char **argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  const int result = RUN_ALL_TESTS();  // Expected to fail.\n  GTEST_CHECK_(result == 1) << \"RUN_ALL_TESTS() did not fail as expected\";\n\n  printf(\"\\nPASS\\n\");\n  return 0;\n}\n\n#else\nTEST(StressTest,\n     DISABLED_ThreadSafetyTestsAreSkippedWhenGoogleTestIsNotThreadSafe) {\n}\n\nint main(int argc, char **argv) {\n  testing::InitGoogleTest(&argc, argv);\n  return RUN_ALL_TESTS();\n}\n#endif  // GTEST_IS_THREADSAFE\n"
  },
  {
    "path": "test/googletest/test/gtest_test_macro_stack_footprint_test.cc",
    "content": "// Copyright 2013, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Each TEST() expands to some static registration logic.  GCC puts all\n// such static initialization logic for a translation unit in a common,\n// internal function.  Since Google's build system restricts how much\n// stack space a function can use, there's a limit on how many TEST()s\n// one can put in a single C++ test file.  This test ensures that a large\n// number of TEST()s can be defined in the same translation unit.\n\n#include \"gtest/gtest.h\"\n\n// This macro defines 10 dummy tests.\n#define TEN_TESTS_(test_case_name) \\\n  TEST(test_case_name, T0) {} \\\n  TEST(test_case_name, T1) {} \\\n  TEST(test_case_name, T2) {} \\\n  TEST(test_case_name, T3) {} \\\n  TEST(test_case_name, T4) {} \\\n  TEST(test_case_name, T5) {} \\\n  TEST(test_case_name, T6) {} \\\n  TEST(test_case_name, T7) {} \\\n  TEST(test_case_name, T8) {} \\\n  TEST(test_case_name, T9) {}\n\n// This macro defines 100 dummy tests.\n#define HUNDRED_TESTS_(test_case_name_prefix) \\\n  TEN_TESTS_(test_case_name_prefix ## 0) \\\n  TEN_TESTS_(test_case_name_prefix ## 1) \\\n  TEN_TESTS_(test_case_name_prefix ## 2) \\\n  TEN_TESTS_(test_case_name_prefix ## 3) \\\n  TEN_TESTS_(test_case_name_prefix ## 4) \\\n  TEN_TESTS_(test_case_name_prefix ## 5) \\\n  TEN_TESTS_(test_case_name_prefix ## 6) \\\n  TEN_TESTS_(test_case_name_prefix ## 7) \\\n  TEN_TESTS_(test_case_name_prefix ## 8) \\\n  TEN_TESTS_(test_case_name_prefix ## 9)\n\n// This macro defines 1000 dummy tests.\n#define THOUSAND_TESTS_(test_case_name_prefix) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 0) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 1) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 2) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 3) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 4) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 5) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 6) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 7) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 8) \\\n  HUNDRED_TESTS_(test_case_name_prefix ## 9)\n\n// Ensures that we can define 1000 TEST()s in the same translation\n// unit.\nTHOUSAND_TESTS_(T)\n\nint main(int argc, char **argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  // We don't actually need to run the dummy tests - the purpose is to\n  // ensure that they compile.\n  return 0;\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_test_utils.py",
    "content": "# Copyright 2006, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test utilities for Google C++ Testing and Mocking Framework.\"\"\"\n# Suppresses the 'Import not at the top of the file' lint complaint.\n# pylint: disable-msg=C6204\n\nimport os\nimport sys\n\nIS_WINDOWS = os.name == 'nt'\nIS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]\nIS_OS2 = os.name == 'os2'\n\nimport atexit\nimport shutil\nimport tempfile\nimport unittest as _test_module\n\ntry:\n  import subprocess\n  _SUBPROCESS_MODULE_AVAILABLE = True\nexcept:\n  import popen2\n  _SUBPROCESS_MODULE_AVAILABLE = False\n# pylint: enable-msg=C6204\n\nGTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT'\n\n# The environment variable for specifying the path to the premature-exit file.\nPREMATURE_EXIT_FILE_ENV_VAR = 'TEST_PREMATURE_EXIT_FILE'\n\nenviron = os.environ.copy()\n\n\ndef SetEnvVar(env_var, value):\n  \"\"\"Sets/unsets an environment variable to a given value.\"\"\"\n\n  if value is not None:\n    environ[env_var] = value\n  elif env_var in environ:\n    del environ[env_var]\n\n\n# Here we expose a class from a particular module, depending on the\n# environment. The comment suppresses the 'Invalid variable name' lint\n# complaint.\nTestCase = _test_module.TestCase  # pylint: disable=C6409\n\n# Initially maps a flag to its default value. After\n# _ParseAndStripGTestFlags() is called, maps a flag to its actual value.\n_flag_map = {'source_dir': os.path.dirname(sys.argv[0]),\n             'build_dir': os.path.dirname(sys.argv[0])}\n_gtest_flags_are_parsed = False\n\n\ndef _ParseAndStripGTestFlags(argv):\n  \"\"\"Parses and strips Google Test flags from argv.  This is idempotent.\"\"\"\n\n  # Suppresses the lint complaint about a global variable since we need it\n  # here to maintain module-wide state.\n  global _gtest_flags_are_parsed  # pylint: disable=W0603\n  if _gtest_flags_are_parsed:\n    return\n\n  _gtest_flags_are_parsed = True\n  for flag in _flag_map:\n    # The environment variable overrides the default value.\n    if flag.upper() in os.environ:\n      _flag_map[flag] = os.environ[flag.upper()]\n\n    # The command line flag overrides the environment variable.\n    i = 1  # Skips the program name.\n    while i < len(argv):\n      prefix = '--' + flag + '='\n      if argv[i].startswith(prefix):\n        _flag_map[flag] = argv[i][len(prefix):]\n        del argv[i]\n        break\n      else:\n        # We don't increment i in case we just found a --gtest_* flag\n        # and removed it from argv.\n        i += 1\n\n\ndef GetFlag(flag):\n  \"\"\"Returns the value of the given flag.\"\"\"\n\n  # In case GetFlag() is called before Main(), we always call\n  # _ParseAndStripGTestFlags() here to make sure the --gtest_* flags\n  # are parsed.\n  _ParseAndStripGTestFlags(sys.argv)\n\n  return _flag_map[flag]\n\n\ndef GetSourceDir():\n  \"\"\"Returns the absolute path of the directory where the .py files are.\"\"\"\n\n  return os.path.abspath(GetFlag('source_dir'))\n\n\ndef GetBuildDir():\n  \"\"\"Returns the absolute path of the directory where the test binaries are.\"\"\"\n\n  return os.path.abspath(GetFlag('build_dir'))\n\n\n_temp_dir = None\n\ndef _RemoveTempDir():\n  if _temp_dir:\n    shutil.rmtree(_temp_dir, ignore_errors=True)\n\natexit.register(_RemoveTempDir)\n\n\ndef GetTempDir():\n  global _temp_dir\n  if not _temp_dir:\n    _temp_dir = tempfile.mkdtemp()\n  return _temp_dir\n\n\ndef GetTestExecutablePath(executable_name, build_dir=None):\n  \"\"\"Returns the absolute path of the test binary given its name.\n\n  The function will print a message and abort the program if the resulting file\n  doesn't exist.\n\n  Args:\n    executable_name: name of the test binary that the test script runs.\n    build_dir:       directory where to look for executables, by default\n                     the result of GetBuildDir().\n\n  Returns:\n    The absolute path of the test binary.\n  \"\"\"\n\n  path = os.path.abspath(os.path.join(build_dir or GetBuildDir(),\n                                      executable_name))\n  if (IS_WINDOWS or IS_CYGWIN or IS_OS2) and not path.endswith('.exe'):\n    path += '.exe'\n\n  if not os.path.exists(path):\n    message = (\n        'Unable to find the test binary \"%s\". Please make sure to provide\\n'\n        'a path to the binary via the --build_dir flag or the BUILD_DIR\\n'\n        'environment variable.' % path)\n    print >> sys.stderr, message\n    sys.exit(1)\n\n  return path\n\n\ndef GetExitStatus(exit_code):\n  \"\"\"Returns the argument to exit(), or -1 if exit() wasn't called.\n\n  Args:\n    exit_code: the result value of os.system(command).\n  \"\"\"\n\n  if os.name == 'nt':\n    # On Windows, os.WEXITSTATUS() doesn't work and os.system() returns\n    # the argument to exit() directly.\n    return exit_code\n  else:\n    # On Unix, os.WEXITSTATUS() must be used to extract the exit status\n    # from the result of os.system().\n    if os.WIFEXITED(exit_code):\n      return os.WEXITSTATUS(exit_code)\n    else:\n      return -1\n\n\nclass Subprocess:\n  def __init__(self, command, working_dir=None, capture_stderr=True, env=None):\n    \"\"\"Changes into a specified directory, if provided, and executes a command.\n\n    Restores the old directory afterwards.\n\n    Args:\n      command:        The command to run, in the form of sys.argv.\n      working_dir:    The directory to change into.\n      capture_stderr: Determines whether to capture stderr in the output member\n                      or to discard it.\n      env:            Dictionary with environment to pass to the subprocess.\n\n    Returns:\n      An object that represents outcome of the executed process. It has the\n      following attributes:\n        terminated_by_signal   True if and only if the child process has been\n                               terminated by a signal.\n        signal                 Sygnal that terminated the child process.\n        exited                 True if and only if the child process exited\n                               normally.\n        exit_code              The code with which the child process exited.\n        output                 Child process's stdout and stderr output\n                               combined in a string.\n    \"\"\"\n\n    # The subprocess module is the preferrable way of running programs\n    # since it is available and behaves consistently on all platforms,\n    # including Windows. But it is only available starting in python 2.4.\n    # In earlier python versions, we revert to the popen2 module, which is\n    # available in python 2.0 and later but doesn't provide required\n    # functionality (Popen4) under Windows. This allows us to support Mac\n    # OS X 10.4 Tiger, which has python 2.3 installed.\n    if _SUBPROCESS_MODULE_AVAILABLE:\n      if capture_stderr:\n        stderr = subprocess.STDOUT\n      else:\n        stderr = subprocess.PIPE\n\n      p = subprocess.Popen(command,\n                           stdout=subprocess.PIPE, stderr=stderr,\n                           cwd=working_dir, universal_newlines=True, env=env)\n      # communicate returns a tuple with the file object for the child's\n      # output.\n      self.output = p.communicate()[0]\n      self._return_code = p.returncode\n    else:\n      old_dir = os.getcwd()\n\n      def _ReplaceEnvDict(dest, src):\n        # Changes made by os.environ.clear are not inheritable by child\n        # processes until Python 2.6. To produce inheritable changes we have\n        # to delete environment items with the del statement.\n        for key in dest.keys():\n          del dest[key]\n        dest.update(src)\n\n      # When 'env' is not None, backup the environment variables and replace\n      # them with the passed 'env'. When 'env' is None, we simply use the\n      # current 'os.environ' for compatibility with the subprocess.Popen\n      # semantics used above.\n      if env is not None:\n        old_environ = os.environ.copy()\n        _ReplaceEnvDict(os.environ, env)\n\n      try:\n        if working_dir is not None:\n          os.chdir(working_dir)\n        if capture_stderr:\n          p = popen2.Popen4(command)\n        else:\n          p = popen2.Popen3(command)\n        p.tochild.close()\n        self.output = p.fromchild.read()\n        ret_code = p.wait()\n      finally:\n        os.chdir(old_dir)\n\n        # Restore the old environment variables\n        # if they were replaced.\n        if env is not None:\n          _ReplaceEnvDict(os.environ, old_environ)\n\n      # Converts ret_code to match the semantics of\n      # subprocess.Popen.returncode.\n      if os.WIFSIGNALED(ret_code):\n        self._return_code = -os.WTERMSIG(ret_code)\n      else:  # os.WIFEXITED(ret_code) should return True here.\n        self._return_code = os.WEXITSTATUS(ret_code)\n\n    if self._return_code < 0:\n      self.terminated_by_signal = True\n      self.exited = False\n      self.signal = -self._return_code\n    else:\n      self.terminated_by_signal = False\n      self.exited = True\n      self.exit_code = self._return_code\n\n\ndef Main():\n  \"\"\"Runs the unit test.\"\"\"\n\n  # We must call _ParseAndStripGTestFlags() before calling\n  # unittest.main().  Otherwise the latter will be confused by the\n  # --gtest_* flags.\n  _ParseAndStripGTestFlags(sys.argv)\n  # The tested binaries should not be writing XML output files unless the\n  # script explicitly instructs them to.\n  if GTEST_OUTPUT_VAR_NAME in os.environ:\n    del os.environ[GTEST_OUTPUT_VAR_NAME]\n\n  _test_module.main()\n"
  },
  {
    "path": "test/googletest/test/gtest_testbridge_test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2018 Google LLC. All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\"\"\"Verifies that Google Test uses filter provided via testbridge.\"\"\"\n\nimport os\n\nimport gtest_test_utils\n\nbinary_name = 'gtest_testbridge_test_'\nCOMMAND = gtest_test_utils.GetTestExecutablePath(binary_name)\nTESTBRIDGE_NAME = 'TESTBRIDGE_TEST_ONLY'\n\n\ndef Assert(condition):\n  if not condition:\n    raise AssertionError\n\n\nclass GTestTestFilterTest(gtest_test_utils.TestCase):\n\n  def testTestExecutionIsFiltered(self):\n    \"\"\"Tests that the test filter is picked up from the testbridge env var.\"\"\"\n    subprocess_env = os.environ.copy()\n\n    subprocess_env[TESTBRIDGE_NAME] = '*.TestThatSucceeds'\n    p = gtest_test_utils.Subprocess(COMMAND, env=subprocess_env)\n\n    self.assertEquals(0, p.exit_code)\n\n    Assert('filter = *.TestThatSucceeds' in p.output)\n    Assert('[       OK ] TestFilterTest.TestThatSucceeds' in p.output)\n    Assert('[  PASSED  ] 1 test.' in p.output)\n\n\nif __name__ == '__main__':\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/gtest_testbridge_test_.cc",
    "content": "// Copyright 2018, Google LLC.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// This program is meant to be run by gtest_test_filter_test.py.  Do not run\n// it directly.\n\n#include \"gtest/gtest.h\"\n\n// These tests are used to detect if filtering is working. Only\n// 'TestThatSucceeds' should ever run.\n\nTEST(TestFilterTest, TestThatSucceeds) {}\n\nTEST(TestFilterTest, TestThatFails) {\n  ASSERT_TRUE(false) << \"This test should never be run.\";\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_throw_on_failure_ex_test.cc",
    "content": "// Copyright 2009, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n// Tests Google Test's throw-on-failure mode with exceptions enabled.\n\n#include \"gtest/gtest.h\"\n\n#include <stdlib.h>\n#include <stdio.h>\n#include <string.h>\n#include <stdexcept>\n\n// Prints the given failure message and exits the program with\n// non-zero.  We use this instead of a Google Test assertion to\n// indicate a failure, as the latter is been tested and cannot be\n// relied on.\nvoid Fail(const char* msg) {\n  printf(\"FAILURE: %s\\n\", msg);\n  fflush(stdout);\n  exit(1);\n}\n\n// Tests that an assertion failure throws a subclass of\n// std::runtime_error.\nvoid TestFailureThrowsRuntimeError() {\n  testing::GTEST_FLAG(throw_on_failure) = true;\n\n  // A successful assertion shouldn't throw.\n  try {\n    EXPECT_EQ(3, 3);\n  } catch(...) {\n    Fail(\"A successful assertion wrongfully threw.\");\n  }\n\n  // A failed assertion should throw a subclass of std::runtime_error.\n  try {\n    EXPECT_EQ(2, 3) << \"Expected failure\";\n  } catch(const std::runtime_error& e) {\n    if (strstr(e.what(), \"Expected failure\") != nullptr) return;\n\n    printf(\"%s\",\n           \"A failed assertion did throw an exception of the right type, \"\n           \"but the message is incorrect.  Instead of containing \\\"Expected \"\n           \"failure\\\", it is:\\n\");\n    Fail(e.what());\n  } catch(...) {\n    Fail(\"A failed assertion threw the wrong type of exception.\");\n  }\n  Fail(\"A failed assertion should've thrown but didn't.\");\n}\n\nint main(int argc, char** argv) {\n  testing::InitGoogleTest(&argc, argv);\n\n  // We want to ensure that people can use Google Test assertions in\n  // other testing frameworks, as long as they initialize Google Test\n  // properly and set the thrown-on-failure mode.  Therefore, we don't\n  // use Google Test's constructs for defining and running tests\n  // (e.g. TEST and RUN_ALL_TESTS) here.\n\n  TestFailureThrowsRuntimeError();\n  return 0;\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_unittest.cc",
    "content": "// Copyright 2005, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// Tests for Google Test itself.  This verifies that the basic constructs of\n// Google Test work.\n\n#include \"gtest/gtest.h\"\n\n// Verifies that the command line flag variables can be accessed in\n// code once \"gtest.h\" has been #included.\n// Do not move it after other gtest #includes.\nTEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) {\n  bool dummy = testing::GTEST_FLAG(also_run_disabled_tests)\n      || testing::GTEST_FLAG(break_on_failure)\n      || testing::GTEST_FLAG(catch_exceptions)\n      || testing::GTEST_FLAG(color) != \"unknown\"\n      || testing::GTEST_FLAG(filter) != \"unknown\"\n      || testing::GTEST_FLAG(list_tests)\n      || testing::GTEST_FLAG(output) != \"unknown\"\n      || testing::GTEST_FLAG(print_time)\n      || testing::GTEST_FLAG(random_seed)\n      || testing::GTEST_FLAG(repeat) > 0\n      || testing::GTEST_FLAG(show_internal_stack_frames)\n      || testing::GTEST_FLAG(shuffle)\n      || testing::GTEST_FLAG(stack_trace_depth) > 0\n      || testing::GTEST_FLAG(stream_result_to) != \"unknown\"\n      || testing::GTEST_FLAG(throw_on_failure);\n  EXPECT_TRUE(dummy || !dummy);  // Suppresses warning that dummy is unused.\n}\n\n#include <limits.h>  // For INT_MAX.\n#include <stdlib.h>\n#include <string.h>\n#include <time.h>\n\n#include <map>\n#include <ostream>\n#include <type_traits>\n#include <unordered_set>\n#include <vector>\n\n#include \"gtest/gtest-spi.h\"\n#include \"src/gtest-internal-inl.h\"\n\nnamespace testing {\nnamespace internal {\n\n#if GTEST_CAN_STREAM_RESULTS_\n\nclass StreamingListenerTest : public Test {\n public:\n  class FakeSocketWriter : public StreamingListener::AbstractSocketWriter {\n   public:\n    // Sends a string to the socket.\n    void Send(const std::string& message) override { output_ += message; }\n\n    std::string output_;\n  };\n\n  StreamingListenerTest()\n      : fake_sock_writer_(new FakeSocketWriter),\n        streamer_(fake_sock_writer_),\n        test_info_obj_(\"FooTest\", \"Bar\", nullptr, nullptr,\n                       CodeLocation(__FILE__, __LINE__), nullptr, nullptr) {}\n\n protected:\n  std::string* output() { return &(fake_sock_writer_->output_); }\n\n  FakeSocketWriter* const fake_sock_writer_;\n  StreamingListener streamer_;\n  UnitTest unit_test_;\n  TestInfo test_info_obj_;  // The name test_info_ was taken by testing::Test.\n};\n\nTEST_F(StreamingListenerTest, OnTestProgramEnd) {\n  *output() = \"\";\n  streamer_.OnTestProgramEnd(unit_test_);\n  EXPECT_EQ(\"event=TestProgramEnd&passed=1\\n\", *output());\n}\n\nTEST_F(StreamingListenerTest, OnTestIterationEnd) {\n  *output() = \"\";\n  streamer_.OnTestIterationEnd(unit_test_, 42);\n  EXPECT_EQ(\"event=TestIterationEnd&passed=1&elapsed_time=0ms\\n\", *output());\n}\n\nTEST_F(StreamingListenerTest, OnTestCaseStart) {\n  *output() = \"\";\n  streamer_.OnTestCaseStart(TestCase(\"FooTest\", \"Bar\", nullptr, nullptr));\n  EXPECT_EQ(\"event=TestCaseStart&name=FooTest\\n\", *output());\n}\n\nTEST_F(StreamingListenerTest, OnTestCaseEnd) {\n  *output() = \"\";\n  streamer_.OnTestCaseEnd(TestCase(\"FooTest\", \"Bar\", nullptr, nullptr));\n  EXPECT_EQ(\"event=TestCaseEnd&passed=1&elapsed_time=0ms\\n\", *output());\n}\n\nTEST_F(StreamingListenerTest, OnTestStart) {\n  *output() = \"\";\n  streamer_.OnTestStart(test_info_obj_);\n  EXPECT_EQ(\"event=TestStart&name=Bar\\n\", *output());\n}\n\nTEST_F(StreamingListenerTest, OnTestEnd) {\n  *output() = \"\";\n  streamer_.OnTestEnd(test_info_obj_);\n  EXPECT_EQ(\"event=TestEnd&passed=1&elapsed_time=0ms\\n\", *output());\n}\n\nTEST_F(StreamingListenerTest, OnTestPartResult) {\n  *output() = \"\";\n  streamer_.OnTestPartResult(TestPartResult(\n      TestPartResult::kFatalFailure, \"foo.cc\", 42, \"failed=\\n&%\"));\n\n  // Meta characters in the failure message should be properly escaped.\n  EXPECT_EQ(\n      \"event=TestPartResult&file=foo.cc&line=42&message=failed%3D%0A%26%25\\n\",\n      *output());\n}\n\n#endif  // GTEST_CAN_STREAM_RESULTS_\n\n// Provides access to otherwise private parts of the TestEventListeners class\n// that are needed to test it.\nclass TestEventListenersAccessor {\n public:\n  static TestEventListener* GetRepeater(TestEventListeners* listeners) {\n    return listeners->repeater();\n  }\n\n  static void SetDefaultResultPrinter(TestEventListeners* listeners,\n                                      TestEventListener* listener) {\n    listeners->SetDefaultResultPrinter(listener);\n  }\n  static void SetDefaultXmlGenerator(TestEventListeners* listeners,\n                                     TestEventListener* listener) {\n    listeners->SetDefaultXmlGenerator(listener);\n  }\n\n  static bool EventForwardingEnabled(const TestEventListeners& listeners) {\n    return listeners.EventForwardingEnabled();\n  }\n\n  static void SuppressEventForwarding(TestEventListeners* listeners) {\n    listeners->SuppressEventForwarding();\n  }\n};\n\nclass UnitTestRecordPropertyTestHelper : public Test {\n protected:\n  UnitTestRecordPropertyTestHelper() {}\n\n  // Forwards to UnitTest::RecordProperty() to bypass access controls.\n  void UnitTestRecordProperty(const char* key, const std::string& value) {\n    unit_test_.RecordProperty(key, value);\n  }\n\n  UnitTest unit_test_;\n};\n\n}  // namespace internal\n}  // namespace testing\n\nusing testing::AssertionFailure;\nusing testing::AssertionResult;\nusing testing::AssertionSuccess;\nusing testing::DoubleLE;\nusing testing::EmptyTestEventListener;\nusing testing::Environment;\nusing testing::FloatLE;\nusing testing::GTEST_FLAG(also_run_disabled_tests);\nusing testing::GTEST_FLAG(break_on_failure);\nusing testing::GTEST_FLAG(catch_exceptions);\nusing testing::GTEST_FLAG(color);\nusing testing::GTEST_FLAG(death_test_use_fork);\nusing testing::GTEST_FLAG(filter);\nusing testing::GTEST_FLAG(list_tests);\nusing testing::GTEST_FLAG(output);\nusing testing::GTEST_FLAG(print_time);\nusing testing::GTEST_FLAG(random_seed);\nusing testing::GTEST_FLAG(repeat);\nusing testing::GTEST_FLAG(show_internal_stack_frames);\nusing testing::GTEST_FLAG(shuffle);\nusing testing::GTEST_FLAG(stack_trace_depth);\nusing testing::GTEST_FLAG(stream_result_to);\nusing testing::GTEST_FLAG(throw_on_failure);\nusing testing::IsNotSubstring;\nusing testing::IsSubstring;\nusing testing::Message;\nusing testing::ScopedFakeTestPartResultReporter;\nusing testing::StaticAssertTypeEq;\nusing testing::Test;\nusing testing::TestCase;\nusing testing::TestEventListeners;\nusing testing::TestInfo;\nusing testing::TestPartResult;\nusing testing::TestPartResultArray;\nusing testing::TestProperty;\nusing testing::TestResult;\nusing testing::TimeInMillis;\nusing testing::UnitTest;\nusing testing::internal::AlwaysFalse;\nusing testing::internal::AlwaysTrue;\nusing testing::internal::AppendUserMessage;\nusing testing::internal::ArrayAwareFind;\nusing testing::internal::ArrayEq;\nusing testing::internal::CodePointToUtf8;\nusing testing::internal::CopyArray;\nusing testing::internal::CountIf;\nusing testing::internal::EqFailure;\nusing testing::internal::FloatingPoint;\nusing testing::internal::ForEach;\nusing testing::internal::FormatEpochTimeInMillisAsIso8601;\nusing testing::internal::FormatTimeInMillisAsSeconds;\nusing testing::internal::GTestFlagSaver;\nusing testing::internal::GetCurrentOsStackTraceExceptTop;\nusing testing::internal::GetElementOr;\nusing testing::internal::GetNextRandomSeed;\nusing testing::internal::GetRandomSeedFromFlag;\nusing testing::internal::GetTestTypeId;\nusing testing::internal::GetTimeInMillis;\nusing testing::internal::GetTypeId;\nusing testing::internal::GetUnitTestImpl;\nusing testing::internal::Int32;\nusing testing::internal::Int32FromEnvOrDie;\nusing testing::internal::IsAProtocolMessage;\nusing testing::internal::IsContainer;\nusing testing::internal::IsContainerTest;\nusing testing::internal::IsNotContainer;\nusing testing::internal::NativeArray;\nusing testing::internal::OsStackTraceGetter;\nusing testing::internal::OsStackTraceGetterInterface;\nusing testing::internal::ParseInt32Flag;\nusing testing::internal::RelationToSourceCopy;\nusing testing::internal::RelationToSourceReference;\nusing testing::internal::ShouldRunTestOnShard;\nusing testing::internal::ShouldShard;\nusing testing::internal::ShouldUseColor;\nusing testing::internal::Shuffle;\nusing testing::internal::ShuffleRange;\nusing testing::internal::SkipPrefix;\nusing testing::internal::StreamableToString;\nusing testing::internal::String;\nusing testing::internal::TestEventListenersAccessor;\nusing testing::internal::TestResultAccessor;\nusing testing::internal::UInt32;\nusing testing::internal::UnitTestImpl;\nusing testing::internal::WideStringToUtf8;\nusing testing::internal::edit_distance::CalculateOptimalEdits;\nusing testing::internal::edit_distance::CreateUnifiedDiff;\nusing testing::internal::edit_distance::EditType;\nusing testing::internal::kMaxRandomSeed;\nusing testing::internal::kTestTypeIdInGoogleTest;\nusing testing::kMaxStackTraceDepth;\n\n#if GTEST_HAS_STREAM_REDIRECTION\nusing testing::internal::CaptureStdout;\nusing testing::internal::GetCapturedStdout;\n#endif\n\n#if GTEST_IS_THREADSAFE\nusing testing::internal::ThreadWithParam;\n#endif\n\nclass TestingVector : public std::vector<int> {\n};\n\n::std::ostream& operator<<(::std::ostream& os,\n                           const TestingVector& vector) {\n  os << \"{ \";\n  for (size_t i = 0; i < vector.size(); i++) {\n    os << vector[i] << \" \";\n  }\n  os << \"}\";\n  return os;\n}\n\n// This line tests that we can define tests in an unnamed namespace.\nnamespace {\n\nTEST(GetRandomSeedFromFlagTest, HandlesZero) {\n  const int seed = GetRandomSeedFromFlag(0);\n  EXPECT_LE(1, seed);\n  EXPECT_LE(seed, static_cast<int>(kMaxRandomSeed));\n}\n\nTEST(GetRandomSeedFromFlagTest, PreservesValidSeed) {\n  EXPECT_EQ(1, GetRandomSeedFromFlag(1));\n  EXPECT_EQ(2, GetRandomSeedFromFlag(2));\n  EXPECT_EQ(kMaxRandomSeed - 1, GetRandomSeedFromFlag(kMaxRandomSeed - 1));\n  EXPECT_EQ(static_cast<int>(kMaxRandomSeed),\n            GetRandomSeedFromFlag(kMaxRandomSeed));\n}\n\nTEST(GetRandomSeedFromFlagTest, NormalizesInvalidSeed) {\n  const int seed1 = GetRandomSeedFromFlag(-1);\n  EXPECT_LE(1, seed1);\n  EXPECT_LE(seed1, static_cast<int>(kMaxRandomSeed));\n\n  const int seed2 = GetRandomSeedFromFlag(kMaxRandomSeed + 1);\n  EXPECT_LE(1, seed2);\n  EXPECT_LE(seed2, static_cast<int>(kMaxRandomSeed));\n}\n\nTEST(GetNextRandomSeedTest, WorksForValidInput) {\n  EXPECT_EQ(2, GetNextRandomSeed(1));\n  EXPECT_EQ(3, GetNextRandomSeed(2));\n  EXPECT_EQ(static_cast<int>(kMaxRandomSeed),\n            GetNextRandomSeed(kMaxRandomSeed - 1));\n  EXPECT_EQ(1, GetNextRandomSeed(kMaxRandomSeed));\n\n  // We deliberately don't test GetNextRandomSeed() with invalid\n  // inputs, as that requires death tests, which are expensive.  This\n  // is fine as GetNextRandomSeed() is internal and has a\n  // straightforward definition.\n}\n\nstatic void ClearCurrentTestPartResults() {\n  TestResultAccessor::ClearTestPartResults(\n      GetUnitTestImpl()->current_test_result());\n}\n\n// Tests GetTypeId.\n\nTEST(GetTypeIdTest, ReturnsSameValueForSameType) {\n  EXPECT_EQ(GetTypeId<int>(), GetTypeId<int>());\n  EXPECT_EQ(GetTypeId<Test>(), GetTypeId<Test>());\n}\n\nclass SubClassOfTest : public Test {};\nclass AnotherSubClassOfTest : public Test {};\n\nTEST(GetTypeIdTest, ReturnsDifferentValuesForDifferentTypes) {\n  EXPECT_NE(GetTypeId<int>(), GetTypeId<const int>());\n  EXPECT_NE(GetTypeId<int>(), GetTypeId<char>());\n  EXPECT_NE(GetTypeId<int>(), GetTestTypeId());\n  EXPECT_NE(GetTypeId<SubClassOfTest>(), GetTestTypeId());\n  EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTestTypeId());\n  EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTypeId<SubClassOfTest>());\n}\n\n// Verifies that GetTestTypeId() returns the same value, no matter it\n// is called from inside Google Test or outside of it.\nTEST(GetTestTypeIdTest, ReturnsTheSameValueInsideOrOutsideOfGoogleTest) {\n  EXPECT_EQ(kTestTypeIdInGoogleTest, GetTestTypeId());\n}\n\n// Tests CanonicalizeForStdLibVersioning.\n\nusing ::testing::internal::CanonicalizeForStdLibVersioning;\n\nTEST(CanonicalizeForStdLibVersioning, LeavesUnversionedNamesUnchanged) {\n  EXPECT_EQ(\"std::bind\", CanonicalizeForStdLibVersioning(\"std::bind\"));\n  EXPECT_EQ(\"std::_\", CanonicalizeForStdLibVersioning(\"std::_\"));\n  EXPECT_EQ(\"std::__foo\", CanonicalizeForStdLibVersioning(\"std::__foo\"));\n  EXPECT_EQ(\"gtl::__1::x\", CanonicalizeForStdLibVersioning(\"gtl::__1::x\"));\n  EXPECT_EQ(\"__1::x\", CanonicalizeForStdLibVersioning(\"__1::x\"));\n  EXPECT_EQ(\"::__1::x\", CanonicalizeForStdLibVersioning(\"::__1::x\"));\n}\n\nTEST(CanonicalizeForStdLibVersioning, ElidesDoubleUnderNames) {\n  EXPECT_EQ(\"std::bind\", CanonicalizeForStdLibVersioning(\"std::__1::bind\"));\n  EXPECT_EQ(\"std::_\", CanonicalizeForStdLibVersioning(\"std::__1::_\"));\n\n  EXPECT_EQ(\"std::bind\", CanonicalizeForStdLibVersioning(\"std::__g::bind\"));\n  EXPECT_EQ(\"std::_\", CanonicalizeForStdLibVersioning(\"std::__g::_\"));\n\n  EXPECT_EQ(\"std::bind\",\n            CanonicalizeForStdLibVersioning(\"std::__google::bind\"));\n  EXPECT_EQ(\"std::_\", CanonicalizeForStdLibVersioning(\"std::__google::_\"));\n}\n\n// Tests FormatTimeInMillisAsSeconds().\n\nTEST(FormatTimeInMillisAsSecondsTest, FormatsZero) {\n  EXPECT_EQ(\"0\", FormatTimeInMillisAsSeconds(0));\n}\n\nTEST(FormatTimeInMillisAsSecondsTest, FormatsPositiveNumber) {\n  EXPECT_EQ(\"0.003\", FormatTimeInMillisAsSeconds(3));\n  EXPECT_EQ(\"0.01\", FormatTimeInMillisAsSeconds(10));\n  EXPECT_EQ(\"0.2\", FormatTimeInMillisAsSeconds(200));\n  EXPECT_EQ(\"1.2\", FormatTimeInMillisAsSeconds(1200));\n  EXPECT_EQ(\"3\", FormatTimeInMillisAsSeconds(3000));\n}\n\nTEST(FormatTimeInMillisAsSecondsTest, FormatsNegativeNumber) {\n  EXPECT_EQ(\"-0.003\", FormatTimeInMillisAsSeconds(-3));\n  EXPECT_EQ(\"-0.01\", FormatTimeInMillisAsSeconds(-10));\n  EXPECT_EQ(\"-0.2\", FormatTimeInMillisAsSeconds(-200));\n  EXPECT_EQ(\"-1.2\", FormatTimeInMillisAsSeconds(-1200));\n  EXPECT_EQ(\"-3\", FormatTimeInMillisAsSeconds(-3000));\n}\n\n// Tests FormatEpochTimeInMillisAsIso8601().  The correctness of conversion\n// for particular dates below was verified in Python using\n// datetime.datetime.fromutctimestamp(<timetamp>/1000).\n\n// FormatEpochTimeInMillisAsIso8601 depends on the current timezone, so we\n// have to set up a particular timezone to obtain predictable results.\nclass FormatEpochTimeInMillisAsIso8601Test : public Test {\n public:\n  // On Cygwin, GCC doesn't allow unqualified integer literals to exceed\n  // 32 bits, even when 64-bit integer types are available.  We have to\n  // force the constants to have a 64-bit type here.\n  static const TimeInMillis kMillisPerSec = 1000;\n\n private:\n  void SetUp() override {\n    saved_tz_ = nullptr;\n\n    GTEST_DISABLE_MSC_DEPRECATED_PUSH_(/* getenv, strdup: deprecated */)\n    if (getenv(\"TZ\"))\n      saved_tz_ = strdup(getenv(\"TZ\"));\n    GTEST_DISABLE_MSC_DEPRECATED_POP_()\n\n    // Set up the time zone for FormatEpochTimeInMillisAsIso8601 to use.  We\n    // cannot use the local time zone because the function's output depends\n    // on the time zone.\n    SetTimeZone(\"UTC+00\");\n  }\n\n  void TearDown() override {\n    SetTimeZone(saved_tz_);\n    free(const_cast<char*>(saved_tz_));\n    saved_tz_ = nullptr;\n  }\n\n  static void SetTimeZone(const char* time_zone) {\n    // tzset() distinguishes between the TZ variable being present and empty\n    // and not being present, so we have to consider the case of time_zone\n    // being NULL.\n#if _MSC_VER || GTEST_OS_WINDOWS_MINGW\n    // ...Unless it's MSVC, whose standard library's _putenv doesn't\n    // distinguish between an empty and a missing variable.\n    const std::string env_var =\n        std::string(\"TZ=\") + (time_zone ? time_zone : \"\");\n    _putenv(env_var.c_str());\n    GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996 /* deprecated function */)\n    tzset();\n    GTEST_DISABLE_MSC_WARNINGS_POP_()\n#else\n    if (time_zone) {\n      setenv((\"TZ\"), time_zone, 1);\n    } else {\n      unsetenv(\"TZ\");\n    }\n    tzset();\n#endif\n  }\n\n  const char* saved_tz_;\n};\n\nconst TimeInMillis FormatEpochTimeInMillisAsIso8601Test::kMillisPerSec;\n\nTEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsTwoDigitSegments) {\n  EXPECT_EQ(\"2011-10-31T18:52:42\",\n            FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec));\n}\n\nTEST_F(FormatEpochTimeInMillisAsIso8601Test, MillisecondsDoNotAffectResult) {\n  EXPECT_EQ(\n      \"2011-10-31T18:52:42\",\n      FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec + 234));\n}\n\nTEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsLeadingZeroes) {\n  EXPECT_EQ(\"2011-09-03T05:07:02\",\n            FormatEpochTimeInMillisAsIso8601(1315026422 * kMillisPerSec));\n}\n\nTEST_F(FormatEpochTimeInMillisAsIso8601Test, Prints24HourTime) {\n  EXPECT_EQ(\"2011-09-28T17:08:22\",\n            FormatEpochTimeInMillisAsIso8601(1317229702 * kMillisPerSec));\n}\n\nTEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsEpochStart) {\n  EXPECT_EQ(\"1970-01-01T00:00:00\", FormatEpochTimeInMillisAsIso8601(0));\n}\n\n# ifdef __BORLANDC__\n// Silences warnings: \"Condition is always true\", \"Unreachable code\"\n#  pragma option push -w-ccc -w-rch\n# endif\n\n// Tests that the LHS of EXPECT_EQ or ASSERT_EQ can be used as a null literal\n// when the RHS is a pointer type.\nTEST(NullLiteralTest, LHSAllowsNullLiterals) {\n  EXPECT_EQ(0, static_cast<void*>(nullptr));     // NOLINT\n  ASSERT_EQ(0, static_cast<void*>(nullptr));     // NOLINT\n  EXPECT_EQ(NULL, static_cast<void*>(nullptr));  // NOLINT\n  ASSERT_EQ(NULL, static_cast<void*>(nullptr));  // NOLINT\n  EXPECT_EQ(nullptr, static_cast<void*>(nullptr));\n  ASSERT_EQ(nullptr, static_cast<void*>(nullptr));\n\n  const int* const p = nullptr;\n  EXPECT_EQ(0, p);     // NOLINT\n  ASSERT_EQ(0, p);     // NOLINT\n  EXPECT_EQ(NULL, p);  // NOLINT\n  ASSERT_EQ(NULL, p);  // NOLINT\n  EXPECT_EQ(nullptr, p);\n  ASSERT_EQ(nullptr, p);\n}\n\nstruct ConvertToAll {\n  template <typename T>\n  operator T() const {  // NOLINT\n    return T();\n  }\n};\n\nstruct ConvertToPointer {\n  template <class T>\n  operator T*() const {  // NOLINT\n    return nullptr;\n  }\n};\n\nstruct ConvertToAllButNoPointers {\n  template <typename T,\n            typename std::enable_if<!std::is_pointer<T>::value, int>::type = 0>\n  operator T() const {  // NOLINT\n    return T();\n  }\n};\n\nstruct MyType {};\ninline bool operator==(MyType const&, MyType const&) { return true; }\n\nTEST(NullLiteralTest, ImplicitConversion) {\n  EXPECT_EQ(ConvertToPointer{}, static_cast<void*>(nullptr));\n#if !defined(__GNUC__) || defined(__clang__)\n  // Disabled due to GCC bug gcc.gnu.org/PR89580\n  EXPECT_EQ(ConvertToAll{}, static_cast<void*>(nullptr));\n#endif\n  EXPECT_EQ(ConvertToAll{}, MyType{});\n  EXPECT_EQ(ConvertToAllButNoPointers{}, MyType{});\n}\n\n#ifdef __clang__\n#pragma clang diagnostic push\n#if __has_warning(\"-Wzero-as-null-pointer-constant\")\n#pragma clang diagnostic error \"-Wzero-as-null-pointer-constant\"\n#endif\n#endif\n\nTEST(NullLiteralTest, NoConversionNoWarning) {\n  // Test that gtests detection and handling of null pointer constants\n  // doesn't trigger a warning when '0' isn't actually used as null.\n  EXPECT_EQ(0, 0);\n  ASSERT_EQ(0, 0);\n}\n\n#ifdef __clang__\n#pragma clang diagnostic pop\n#endif\n\n# ifdef __BORLANDC__\n// Restores warnings after previous \"#pragma option push\" suppressed them.\n#  pragma option pop\n# endif\n\n//\n// Tests CodePointToUtf8().\n\n// Tests that the NUL character L'\\0' is encoded correctly.\nTEST(CodePointToUtf8Test, CanEncodeNul) {\n  EXPECT_EQ(\"\", CodePointToUtf8(L'\\0'));\n}\n\n// Tests that ASCII characters are encoded correctly.\nTEST(CodePointToUtf8Test, CanEncodeAscii) {\n  EXPECT_EQ(\"a\", CodePointToUtf8(L'a'));\n  EXPECT_EQ(\"Z\", CodePointToUtf8(L'Z'));\n  EXPECT_EQ(\"&\", CodePointToUtf8(L'&'));\n  EXPECT_EQ(\"\\x7F\", CodePointToUtf8(L'\\x7F'));\n}\n\n// Tests that Unicode code-points that have 8 to 11 bits are encoded\n// as 110xxxxx 10xxxxxx.\nTEST(CodePointToUtf8Test, CanEncode8To11Bits) {\n  // 000 1101 0011 => 110-00011 10-010011\n  EXPECT_EQ(\"\\xC3\\x93\", CodePointToUtf8(L'\\xD3'));\n\n  // 101 0111 0110 => 110-10101 10-110110\n  // Some compilers (e.g., GCC on MinGW) cannot handle non-ASCII codepoints\n  // in wide strings and wide chars. In order to accommodate them, we have to\n  // introduce such character constants as integers.\n  EXPECT_EQ(\"\\xD5\\xB6\",\n            CodePointToUtf8(static_cast<wchar_t>(0x576)));\n}\n\n// Tests that Unicode code-points that have 12 to 16 bits are encoded\n// as 1110xxxx 10xxxxxx 10xxxxxx.\nTEST(CodePointToUtf8Test, CanEncode12To16Bits) {\n  // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011\n  EXPECT_EQ(\"\\xE0\\xA3\\x93\",\n            CodePointToUtf8(static_cast<wchar_t>(0x8D3)));\n\n  // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101\n  EXPECT_EQ(\"\\xEC\\x9D\\x8D\",\n            CodePointToUtf8(static_cast<wchar_t>(0xC74D)));\n}\n\n#if !GTEST_WIDE_STRING_USES_UTF16_\n// Tests in this group require a wchar_t to hold > 16 bits, and thus\n// are skipped on Windows, and Cygwin, where a wchar_t is\n// 16-bit wide. This code may not compile on those systems.\n\n// Tests that Unicode code-points that have 17 to 21 bits are encoded\n// as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx.\nTEST(CodePointToUtf8Test, CanEncode17To21Bits) {\n  // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011\n  EXPECT_EQ(\"\\xF0\\x90\\xA3\\x93\", CodePointToUtf8(L'\\x108D3'));\n\n  // 0 0001 0000 0100 0000 0000 => 11110-000 10-010000 10-010000 10-000000\n  EXPECT_EQ(\"\\xF0\\x90\\x90\\x80\", CodePointToUtf8(L'\\x10400'));\n\n  // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100\n  EXPECT_EQ(\"\\xF4\\x88\\x98\\xB4\", CodePointToUtf8(L'\\x108634'));\n}\n\n// Tests that encoding an invalid code-point generates the expected result.\nTEST(CodePointToUtf8Test, CanEncodeInvalidCodePoint) {\n  EXPECT_EQ(\"(Invalid Unicode 0x1234ABCD)\", CodePointToUtf8(L'\\x1234ABCD'));\n}\n\n#endif  // !GTEST_WIDE_STRING_USES_UTF16_\n\n// Tests WideStringToUtf8().\n\n// Tests that the NUL character L'\\0' is encoded correctly.\nTEST(WideStringToUtf8Test, CanEncodeNul) {\n  EXPECT_STREQ(\"\", WideStringToUtf8(L\"\", 0).c_str());\n  EXPECT_STREQ(\"\", WideStringToUtf8(L\"\", -1).c_str());\n}\n\n// Tests that ASCII strings are encoded correctly.\nTEST(WideStringToUtf8Test, CanEncodeAscii) {\n  EXPECT_STREQ(\"a\", WideStringToUtf8(L\"a\", 1).c_str());\n  EXPECT_STREQ(\"ab\", WideStringToUtf8(L\"ab\", 2).c_str());\n  EXPECT_STREQ(\"a\", WideStringToUtf8(L\"a\", -1).c_str());\n  EXPECT_STREQ(\"ab\", WideStringToUtf8(L\"ab\", -1).c_str());\n}\n\n// Tests that Unicode code-points that have 8 to 11 bits are encoded\n// as 110xxxxx 10xxxxxx.\nTEST(WideStringToUtf8Test, CanEncode8To11Bits) {\n  // 000 1101 0011 => 110-00011 10-010011\n  EXPECT_STREQ(\"\\xC3\\x93\", WideStringToUtf8(L\"\\xD3\", 1).c_str());\n  EXPECT_STREQ(\"\\xC3\\x93\", WideStringToUtf8(L\"\\xD3\", -1).c_str());\n\n  // 101 0111 0110 => 110-10101 10-110110\n  const wchar_t s[] = { 0x576, '\\0' };\n  EXPECT_STREQ(\"\\xD5\\xB6\", WideStringToUtf8(s, 1).c_str());\n  EXPECT_STREQ(\"\\xD5\\xB6\", WideStringToUtf8(s, -1).c_str());\n}\n\n// Tests that Unicode code-points that have 12 to 16 bits are encoded\n// as 1110xxxx 10xxxxxx 10xxxxxx.\nTEST(WideStringToUtf8Test, CanEncode12To16Bits) {\n  // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011\n  const wchar_t s1[] = { 0x8D3, '\\0' };\n  EXPECT_STREQ(\"\\xE0\\xA3\\x93\", WideStringToUtf8(s1, 1).c_str());\n  EXPECT_STREQ(\"\\xE0\\xA3\\x93\", WideStringToUtf8(s1, -1).c_str());\n\n  // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101\n  const wchar_t s2[] = { 0xC74D, '\\0' };\n  EXPECT_STREQ(\"\\xEC\\x9D\\x8D\", WideStringToUtf8(s2, 1).c_str());\n  EXPECT_STREQ(\"\\xEC\\x9D\\x8D\", WideStringToUtf8(s2, -1).c_str());\n}\n\n// Tests that the conversion stops when the function encounters \\0 character.\nTEST(WideStringToUtf8Test, StopsOnNulCharacter) {\n  EXPECT_STREQ(\"ABC\", WideStringToUtf8(L\"ABC\\0XYZ\", 100).c_str());\n}\n\n// Tests that the conversion stops when the function reaches the limit\n// specified by the 'length' parameter.\nTEST(WideStringToUtf8Test, StopsWhenLengthLimitReached) {\n  EXPECT_STREQ(\"ABC\", WideStringToUtf8(L\"ABCDEF\", 3).c_str());\n}\n\n#if !GTEST_WIDE_STRING_USES_UTF16_\n// Tests that Unicode code-points that have 17 to 21 bits are encoded\n// as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx. This code may not compile\n// on the systems using UTF-16 encoding.\nTEST(WideStringToUtf8Test, CanEncode17To21Bits) {\n  // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011\n  EXPECT_STREQ(\"\\xF0\\x90\\xA3\\x93\", WideStringToUtf8(L\"\\x108D3\", 1).c_str());\n  EXPECT_STREQ(\"\\xF0\\x90\\xA3\\x93\", WideStringToUtf8(L\"\\x108D3\", -1).c_str());\n\n  // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100\n  EXPECT_STREQ(\"\\xF4\\x88\\x98\\xB4\", WideStringToUtf8(L\"\\x108634\", 1).c_str());\n  EXPECT_STREQ(\"\\xF4\\x88\\x98\\xB4\", WideStringToUtf8(L\"\\x108634\", -1).c_str());\n}\n\n// Tests that encoding an invalid code-point generates the expected result.\nTEST(WideStringToUtf8Test, CanEncodeInvalidCodePoint) {\n  EXPECT_STREQ(\"(Invalid Unicode 0xABCDFF)\",\n               WideStringToUtf8(L\"\\xABCDFF\", -1).c_str());\n}\n#else  // !GTEST_WIDE_STRING_USES_UTF16_\n// Tests that surrogate pairs are encoded correctly on the systems using\n// UTF-16 encoding in the wide strings.\nTEST(WideStringToUtf8Test, CanEncodeValidUtf16SUrrogatePairs) {\n  const wchar_t s[] = { 0xD801, 0xDC00, '\\0' };\n  EXPECT_STREQ(\"\\xF0\\x90\\x90\\x80\", WideStringToUtf8(s, -1).c_str());\n}\n\n// Tests that encoding an invalid UTF-16 surrogate pair\n// generates the expected result.\nTEST(WideStringToUtf8Test, CanEncodeInvalidUtf16SurrogatePair) {\n  // Leading surrogate is at the end of the string.\n  const wchar_t s1[] = { 0xD800, '\\0' };\n  EXPECT_STREQ(\"\\xED\\xA0\\x80\", WideStringToUtf8(s1, -1).c_str());\n  // Leading surrogate is not followed by the trailing surrogate.\n  const wchar_t s2[] = { 0xD800, 'M', '\\0' };\n  EXPECT_STREQ(\"\\xED\\xA0\\x80M\", WideStringToUtf8(s2, -1).c_str());\n  // Trailing surrogate appearas without a leading surrogate.\n  const wchar_t s3[] = { 0xDC00, 'P', 'Q', 'R', '\\0' };\n  EXPECT_STREQ(\"\\xED\\xB0\\x80PQR\", WideStringToUtf8(s3, -1).c_str());\n}\n#endif  // !GTEST_WIDE_STRING_USES_UTF16_\n\n// Tests that codepoint concatenation works correctly.\n#if !GTEST_WIDE_STRING_USES_UTF16_\nTEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {\n  const wchar_t s[] = { 0x108634, 0xC74D, '\\n', 0x576, 0x8D3, 0x108634, '\\0'};\n  EXPECT_STREQ(\n      \"\\xF4\\x88\\x98\\xB4\"\n          \"\\xEC\\x9D\\x8D\"\n          \"\\n\"\n          \"\\xD5\\xB6\"\n          \"\\xE0\\xA3\\x93\"\n          \"\\xF4\\x88\\x98\\xB4\",\n      WideStringToUtf8(s, -1).c_str());\n}\n#else\nTEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) {\n  const wchar_t s[] = { 0xC74D, '\\n', 0x576, 0x8D3, '\\0'};\n  EXPECT_STREQ(\n      \"\\xEC\\x9D\\x8D\" \"\\n\" \"\\xD5\\xB6\" \"\\xE0\\xA3\\x93\",\n      WideStringToUtf8(s, -1).c_str());\n}\n#endif  // !GTEST_WIDE_STRING_USES_UTF16_\n\n// Tests the Random class.\n\nTEST(RandomDeathTest, GeneratesCrashesOnInvalidRange) {\n  testing::internal::Random random(42);\n  EXPECT_DEATH_IF_SUPPORTED(\n      random.Generate(0),\n      \"Cannot generate a number in the range \\\\[0, 0\\\\)\");\n  EXPECT_DEATH_IF_SUPPORTED(\n      random.Generate(testing::internal::Random::kMaxRange + 1),\n      \"Generation of a number in \\\\[0, 2147483649\\\\) was requested, \"\n      \"but this can only generate numbers in \\\\[0, 2147483648\\\\)\");\n}\n\nTEST(RandomTest, GeneratesNumbersWithinRange) {\n  const UInt32 kRange = 10000;\n  testing::internal::Random random(12345);\n  for (int i = 0; i < 10; i++) {\n    EXPECT_LT(random.Generate(kRange), kRange) << \" for iteration \" << i;\n  }\n\n  testing::internal::Random random2(testing::internal::Random::kMaxRange);\n  for (int i = 0; i < 10; i++) {\n    EXPECT_LT(random2.Generate(kRange), kRange) << \" for iteration \" << i;\n  }\n}\n\nTEST(RandomTest, RepeatsWhenReseeded) {\n  const int kSeed = 123;\n  const int kArraySize = 10;\n  const UInt32 kRange = 10000;\n  UInt32 values[kArraySize];\n\n  testing::internal::Random random(kSeed);\n  for (int i = 0; i < kArraySize; i++) {\n    values[i] = random.Generate(kRange);\n  }\n\n  random.Reseed(kSeed);\n  for (int i = 0; i < kArraySize; i++) {\n    EXPECT_EQ(values[i], random.Generate(kRange)) << \" for iteration \" << i;\n  }\n}\n\n// Tests STL container utilities.\n\n// Tests CountIf().\n\nstatic bool IsPositive(int n) { return n > 0; }\n\nTEST(ContainerUtilityTest, CountIf) {\n  std::vector<int> v;\n  EXPECT_EQ(0, CountIf(v, IsPositive));  // Works for an empty container.\n\n  v.push_back(-1);\n  v.push_back(0);\n  EXPECT_EQ(0, CountIf(v, IsPositive));  // Works when no value satisfies.\n\n  v.push_back(2);\n  v.push_back(-10);\n  v.push_back(10);\n  EXPECT_EQ(2, CountIf(v, IsPositive));\n}\n\n// Tests ForEach().\n\nstatic int g_sum = 0;\nstatic void Accumulate(int n) { g_sum += n; }\n\nTEST(ContainerUtilityTest, ForEach) {\n  std::vector<int> v;\n  g_sum = 0;\n  ForEach(v, Accumulate);\n  EXPECT_EQ(0, g_sum);  // Works for an empty container;\n\n  g_sum = 0;\n  v.push_back(1);\n  ForEach(v, Accumulate);\n  EXPECT_EQ(1, g_sum);  // Works for a container with one element.\n\n  g_sum = 0;\n  v.push_back(20);\n  v.push_back(300);\n  ForEach(v, Accumulate);\n  EXPECT_EQ(321, g_sum);\n}\n\n// Tests GetElementOr().\nTEST(ContainerUtilityTest, GetElementOr) {\n  std::vector<char> a;\n  EXPECT_EQ('x', GetElementOr(a, 0, 'x'));\n\n  a.push_back('a');\n  a.push_back('b');\n  EXPECT_EQ('a', GetElementOr(a, 0, 'x'));\n  EXPECT_EQ('b', GetElementOr(a, 1, 'x'));\n  EXPECT_EQ('x', GetElementOr(a, -2, 'x'));\n  EXPECT_EQ('x', GetElementOr(a, 2, 'x'));\n}\n\nTEST(ContainerUtilityDeathTest, ShuffleRange) {\n  std::vector<int> a;\n  a.push_back(0);\n  a.push_back(1);\n  a.push_back(2);\n  testing::internal::Random random(1);\n\n  EXPECT_DEATH_IF_SUPPORTED(\n      ShuffleRange(&random, -1, 1, &a),\n      \"Invalid shuffle range start -1: must be in range \\\\[0, 3\\\\]\");\n  EXPECT_DEATH_IF_SUPPORTED(\n      ShuffleRange(&random, 4, 4, &a),\n      \"Invalid shuffle range start 4: must be in range \\\\[0, 3\\\\]\");\n  EXPECT_DEATH_IF_SUPPORTED(\n      ShuffleRange(&random, 3, 2, &a),\n      \"Invalid shuffle range finish 2: must be in range \\\\[3, 3\\\\]\");\n  EXPECT_DEATH_IF_SUPPORTED(\n      ShuffleRange(&random, 3, 4, &a),\n      \"Invalid shuffle range finish 4: must be in range \\\\[3, 3\\\\]\");\n}\n\nclass VectorShuffleTest : public Test {\n protected:\n  static const size_t kVectorSize = 20;\n\n  VectorShuffleTest() : random_(1) {\n    for (int i = 0; i < static_cast<int>(kVectorSize); i++) {\n      vector_.push_back(i);\n    }\n  }\n\n  static bool VectorIsCorrupt(const TestingVector& vector) {\n    if (kVectorSize != vector.size()) {\n      return true;\n    }\n\n    bool found_in_vector[kVectorSize] = { false };\n    for (size_t i = 0; i < vector.size(); i++) {\n      const int e = vector[i];\n      if (e < 0 || e >= static_cast<int>(kVectorSize) || found_in_vector[e]) {\n        return true;\n      }\n      found_in_vector[e] = true;\n    }\n\n    // Vector size is correct, elements' range is correct, no\n    // duplicate elements.  Therefore no corruption has occurred.\n    return false;\n  }\n\n  static bool VectorIsNotCorrupt(const TestingVector& vector) {\n    return !VectorIsCorrupt(vector);\n  }\n\n  static bool RangeIsShuffled(const TestingVector& vector, int begin, int end) {\n    for (int i = begin; i < end; i++) {\n      if (i != vector[static_cast<size_t>(i)]) {\n        return true;\n      }\n    }\n    return false;\n  }\n\n  static bool RangeIsUnshuffled(\n      const TestingVector& vector, int begin, int end) {\n    return !RangeIsShuffled(vector, begin, end);\n  }\n\n  static bool VectorIsShuffled(const TestingVector& vector) {\n    return RangeIsShuffled(vector, 0, static_cast<int>(vector.size()));\n  }\n\n  static bool VectorIsUnshuffled(const TestingVector& vector) {\n    return !VectorIsShuffled(vector);\n  }\n\n  testing::internal::Random random_;\n  TestingVector vector_;\n};  // class VectorShuffleTest\n\nconst size_t VectorShuffleTest::kVectorSize;\n\nTEST_F(VectorShuffleTest, HandlesEmptyRange) {\n  // Tests an empty range at the beginning...\n  ShuffleRange(&random_, 0, 0, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsUnshuffled, vector_);\n\n  // ...in the middle...\n  ShuffleRange(&random_, kVectorSize/2, kVectorSize/2, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsUnshuffled, vector_);\n\n  // ...at the end...\n  ShuffleRange(&random_, kVectorSize - 1, kVectorSize - 1, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsUnshuffled, vector_);\n\n  // ...and past the end.\n  ShuffleRange(&random_, kVectorSize, kVectorSize, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsUnshuffled, vector_);\n}\n\nTEST_F(VectorShuffleTest, HandlesRangeOfSizeOne) {\n  // Tests a size one range at the beginning...\n  ShuffleRange(&random_, 0, 1, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsUnshuffled, vector_);\n\n  // ...in the middle...\n  ShuffleRange(&random_, kVectorSize/2, kVectorSize/2 + 1, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsUnshuffled, vector_);\n\n  // ...and at the end.\n  ShuffleRange(&random_, kVectorSize - 1, kVectorSize, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsUnshuffled, vector_);\n}\n\n// Because we use our own random number generator and a fixed seed,\n// we can guarantee that the following \"random\" tests will succeed.\n\nTEST_F(VectorShuffleTest, ShufflesEntireVector) {\n  Shuffle(&random_, &vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  EXPECT_FALSE(VectorIsUnshuffled(vector_)) << vector_;\n\n  // Tests the first and last elements in particular to ensure that\n  // there are no off-by-one problems in our shuffle algorithm.\n  EXPECT_NE(0, vector_[0]);\n  EXPECT_NE(static_cast<int>(kVectorSize - 1), vector_[kVectorSize - 1]);\n}\n\nTEST_F(VectorShuffleTest, ShufflesStartOfVector) {\n  const int kRangeSize = kVectorSize/2;\n\n  ShuffleRange(&random_, 0, kRangeSize, &vector_);\n\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  EXPECT_PRED3(RangeIsShuffled, vector_, 0, kRangeSize);\n  EXPECT_PRED3(RangeIsUnshuffled, vector_, kRangeSize,\n               static_cast<int>(kVectorSize));\n}\n\nTEST_F(VectorShuffleTest, ShufflesEndOfVector) {\n  const int kRangeSize = kVectorSize / 2;\n  ShuffleRange(&random_, kRangeSize, kVectorSize, &vector_);\n\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);\n  EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize,\n               static_cast<int>(kVectorSize));\n}\n\nTEST_F(VectorShuffleTest, ShufflesMiddleOfVector) {\n  const int kRangeSize = static_cast<int>(kVectorSize) / 3;\n  ShuffleRange(&random_, kRangeSize, 2*kRangeSize, &vector_);\n\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize);\n  EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, 2*kRangeSize);\n  EXPECT_PRED3(RangeIsUnshuffled, vector_, 2 * kRangeSize,\n               static_cast<int>(kVectorSize));\n}\n\nTEST_F(VectorShuffleTest, ShufflesRepeatably) {\n  TestingVector vector2;\n  for (size_t i = 0; i < kVectorSize; i++) {\n    vector2.push_back(static_cast<int>(i));\n  }\n\n  random_.Reseed(1234);\n  Shuffle(&random_, &vector_);\n  random_.Reseed(1234);\n  Shuffle(&random_, &vector2);\n\n  ASSERT_PRED1(VectorIsNotCorrupt, vector_);\n  ASSERT_PRED1(VectorIsNotCorrupt, vector2);\n\n  for (size_t i = 0; i < kVectorSize; i++) {\n    EXPECT_EQ(vector_[i], vector2[i]) << \" where i is \" << i;\n  }\n}\n\n// Tests the size of the AssertHelper class.\n\nTEST(AssertHelperTest, AssertHelperIsSmall) {\n  // To avoid breaking clients that use lots of assertions in one\n  // function, we cannot grow the size of AssertHelper.\n  EXPECT_LE(sizeof(testing::internal::AssertHelper), sizeof(void*));\n}\n\n// Tests String::EndsWithCaseInsensitive().\nTEST(StringTest, EndsWithCaseInsensitive) {\n  EXPECT_TRUE(String::EndsWithCaseInsensitive(\"foobar\", \"BAR\"));\n  EXPECT_TRUE(String::EndsWithCaseInsensitive(\"foobaR\", \"bar\"));\n  EXPECT_TRUE(String::EndsWithCaseInsensitive(\"foobar\", \"\"));\n  EXPECT_TRUE(String::EndsWithCaseInsensitive(\"\", \"\"));\n\n  EXPECT_FALSE(String::EndsWithCaseInsensitive(\"Foobar\", \"foo\"));\n  EXPECT_FALSE(String::EndsWithCaseInsensitive(\"foobar\", \"Foo\"));\n  EXPECT_FALSE(String::EndsWithCaseInsensitive(\"\", \"foo\"));\n}\n\n// C++Builder's preprocessor is buggy; it fails to expand macros that\n// appear in macro parameters after wide char literals.  Provide an alias\n// for NULL as a workaround.\nstatic const wchar_t* const kNull = nullptr;\n\n// Tests String::CaseInsensitiveWideCStringEquals\nTEST(StringTest, CaseInsensitiveWideCStringEquals) {\n  EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(nullptr, nullptr));\n  EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L\"\"));\n  EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L\"\", kNull));\n  EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L\"foobar\"));\n  EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L\"foobar\", kNull));\n  EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L\"foobar\", L\"foobar\"));\n  EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L\"foobar\", L\"FOOBAR\"));\n  EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L\"FOOBAR\", L\"foobar\"));\n}\n\n#if GTEST_OS_WINDOWS\n\n// Tests String::ShowWideCString().\nTEST(StringTest, ShowWideCString) {\n  EXPECT_STREQ(\"(null)\",\n               String::ShowWideCString(NULL).c_str());\n  EXPECT_STREQ(\"\", String::ShowWideCString(L\"\").c_str());\n  EXPECT_STREQ(\"foo\", String::ShowWideCString(L\"foo\").c_str());\n}\n\n# if GTEST_OS_WINDOWS_MOBILE\nTEST(StringTest, AnsiAndUtf16Null) {\n  EXPECT_EQ(NULL, String::AnsiToUtf16(NULL));\n  EXPECT_EQ(NULL, String::Utf16ToAnsi(NULL));\n}\n\nTEST(StringTest, AnsiAndUtf16ConvertBasic) {\n  const char* ansi = String::Utf16ToAnsi(L\"str\");\n  EXPECT_STREQ(\"str\", ansi);\n  delete [] ansi;\n  const WCHAR* utf16 = String::AnsiToUtf16(\"str\");\n  EXPECT_EQ(0, wcsncmp(L\"str\", utf16, 3));\n  delete [] utf16;\n}\n\nTEST(StringTest, AnsiAndUtf16ConvertPathChars) {\n  const char* ansi = String::Utf16ToAnsi(L\".:\\\\ \\\"*?\");\n  EXPECT_STREQ(\".:\\\\ \\\"*?\", ansi);\n  delete [] ansi;\n  const WCHAR* utf16 = String::AnsiToUtf16(\".:\\\\ \\\"*?\");\n  EXPECT_EQ(0, wcsncmp(L\".:\\\\ \\\"*?\", utf16, 3));\n  delete [] utf16;\n}\n# endif  // GTEST_OS_WINDOWS_MOBILE\n\n#endif  // GTEST_OS_WINDOWS\n\n// Tests TestProperty construction.\nTEST(TestPropertyTest, StringValue) {\n  TestProperty property(\"key\", \"1\");\n  EXPECT_STREQ(\"key\", property.key());\n  EXPECT_STREQ(\"1\", property.value());\n}\n\n// Tests TestProperty replacing a value.\nTEST(TestPropertyTest, ReplaceStringValue) {\n  TestProperty property(\"key\", \"1\");\n  EXPECT_STREQ(\"1\", property.value());\n  property.SetValue(\"2\");\n  EXPECT_STREQ(\"2\", property.value());\n}\n\n// AddFatalFailure() and AddNonfatalFailure() must be stand-alone\n// functions (i.e. their definitions cannot be inlined at the call\n// sites), or C++Builder won't compile the code.\nstatic void AddFatalFailure() {\n  FAIL() << \"Expected fatal failure.\";\n}\n\nstatic void AddNonfatalFailure() {\n  ADD_FAILURE() << \"Expected non-fatal failure.\";\n}\n\nclass ScopedFakeTestPartResultReporterTest : public Test {\n public:  // Must be public and not protected due to a bug in g++ 3.4.2.\n  enum FailureMode {\n    FATAL_FAILURE,\n    NONFATAL_FAILURE\n  };\n  static void AddFailure(FailureMode failure) {\n    if (failure == FATAL_FAILURE) {\n      AddFatalFailure();\n    } else {\n      AddNonfatalFailure();\n    }\n  }\n};\n\n// Tests that ScopedFakeTestPartResultReporter intercepts test\n// failures.\nTEST_F(ScopedFakeTestPartResultReporterTest, InterceptsTestFailures) {\n  TestPartResultArray results;\n  {\n    ScopedFakeTestPartResultReporter reporter(\n        ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD,\n        &results);\n    AddFailure(NONFATAL_FAILURE);\n    AddFailure(FATAL_FAILURE);\n  }\n\n  EXPECT_EQ(2, results.size());\n  EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed());\n  EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed());\n}\n\nTEST_F(ScopedFakeTestPartResultReporterTest, DeprecatedConstructor) {\n  TestPartResultArray results;\n  {\n    // Tests, that the deprecated constructor still works.\n    ScopedFakeTestPartResultReporter reporter(&results);\n    AddFailure(NONFATAL_FAILURE);\n  }\n  EXPECT_EQ(1, results.size());\n}\n\n#if GTEST_IS_THREADSAFE\n\nclass ScopedFakeTestPartResultReporterWithThreadsTest\n  : public ScopedFakeTestPartResultReporterTest {\n protected:\n  static void AddFailureInOtherThread(FailureMode failure) {\n    ThreadWithParam<FailureMode> thread(&AddFailure, failure, nullptr);\n    thread.Join();\n  }\n};\n\nTEST_F(ScopedFakeTestPartResultReporterWithThreadsTest,\n       InterceptsTestFailuresInAllThreads) {\n  TestPartResultArray results;\n  {\n    ScopedFakeTestPartResultReporter reporter(\n        ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS, &results);\n    AddFailure(NONFATAL_FAILURE);\n    AddFailure(FATAL_FAILURE);\n    AddFailureInOtherThread(NONFATAL_FAILURE);\n    AddFailureInOtherThread(FATAL_FAILURE);\n  }\n\n  EXPECT_EQ(4, results.size());\n  EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed());\n  EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed());\n  EXPECT_TRUE(results.GetTestPartResult(2).nonfatally_failed());\n  EXPECT_TRUE(results.GetTestPartResult(3).fatally_failed());\n}\n\n#endif  // GTEST_IS_THREADSAFE\n\n// Tests EXPECT_FATAL_FAILURE{,ON_ALL_THREADS}.  Makes sure that they\n// work even if the failure is generated in a called function rather than\n// the current context.\n\ntypedef ScopedFakeTestPartResultReporterTest ExpectFatalFailureTest;\n\nTEST_F(ExpectFatalFailureTest, CatchesFatalFaliure) {\n  EXPECT_FATAL_FAILURE(AddFatalFailure(), \"Expected fatal failure.\");\n}\n\nTEST_F(ExpectFatalFailureTest, AcceptsStdStringObject) {\n  EXPECT_FATAL_FAILURE(AddFatalFailure(),\n                       ::std::string(\"Expected fatal failure.\"));\n}\n\nTEST_F(ExpectFatalFailureTest, CatchesFatalFailureOnAllThreads) {\n  // We have another test below to verify that the macro catches fatal\n  // failures generated on another thread.\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFatalFailure(),\n                                      \"Expected fatal failure.\");\n}\n\n#ifdef __BORLANDC__\n// Silences warnings: \"Condition is always true\"\n# pragma option push -w-ccc\n#endif\n\n// Tests that EXPECT_FATAL_FAILURE() can be used in a non-void\n// function even when the statement in it contains ASSERT_*.\n\nint NonVoidFunction() {\n  EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), \"\");\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), \"\");\n  return 0;\n}\n\nTEST_F(ExpectFatalFailureTest, CanBeUsedInNonVoidFunction) {\n  NonVoidFunction();\n}\n\n// Tests that EXPECT_FATAL_FAILURE(statement, ...) doesn't abort the\n// current function even though 'statement' generates a fatal failure.\n\nvoid DoesNotAbortHelper(bool* aborted) {\n  EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), \"\");\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), \"\");\n\n  *aborted = false;\n}\n\n#ifdef __BORLANDC__\n// Restores warnings after previous \"#pragma option push\" suppressed them.\n# pragma option pop\n#endif\n\nTEST_F(ExpectFatalFailureTest, DoesNotAbort) {\n  bool aborted = true;\n  DoesNotAbortHelper(&aborted);\n  EXPECT_FALSE(aborted);\n}\n\n// Tests that the EXPECT_FATAL_FAILURE{,_ON_ALL_THREADS} accepts a\n// statement that contains a macro which expands to code containing an\n// unprotected comma.\n\nstatic int global_var = 0;\n#define GTEST_USE_UNPROTECTED_COMMA_ global_var++, global_var++\n\nTEST_F(ExpectFatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {\n#ifndef __BORLANDC__\n  // ICE's in C++Builder.\n  EXPECT_FATAL_FAILURE({\n    GTEST_USE_UNPROTECTED_COMMA_;\n    AddFatalFailure();\n  }, \"\");\n#endif\n\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS({\n    GTEST_USE_UNPROTECTED_COMMA_;\n    AddFatalFailure();\n  }, \"\");\n}\n\n// Tests EXPECT_NONFATAL_FAILURE{,ON_ALL_THREADS}.\n\ntypedef ScopedFakeTestPartResultReporterTest ExpectNonfatalFailureTest;\n\nTEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailure) {\n  EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),\n                          \"Expected non-fatal failure.\");\n}\n\nTEST_F(ExpectNonfatalFailureTest, AcceptsStdStringObject) {\n  EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(),\n                          ::std::string(\"Expected non-fatal failure.\"));\n}\n\nTEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailureOnAllThreads) {\n  // We have another test below to verify that the macro catches\n  // non-fatal failures generated on another thread.\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddNonfatalFailure(),\n                                         \"Expected non-fatal failure.\");\n}\n\n// Tests that the EXPECT_NONFATAL_FAILURE{,_ON_ALL_THREADS} accepts a\n// statement that contains a macro which expands to code containing an\n// unprotected comma.\nTEST_F(ExpectNonfatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) {\n  EXPECT_NONFATAL_FAILURE({\n    GTEST_USE_UNPROTECTED_COMMA_;\n    AddNonfatalFailure();\n  }, \"\");\n\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS({\n    GTEST_USE_UNPROTECTED_COMMA_;\n    AddNonfatalFailure();\n  }, \"\");\n}\n\n#if GTEST_IS_THREADSAFE\n\ntypedef ScopedFakeTestPartResultReporterWithThreadsTest\n    ExpectFailureWithThreadsTest;\n\nTEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailureOnAllThreads) {\n  EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailureInOtherThread(FATAL_FAILURE),\n                                      \"Expected fatal failure.\");\n}\n\nTEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailureOnAllThreads) {\n  EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(\n      AddFailureInOtherThread(NONFATAL_FAILURE), \"Expected non-fatal failure.\");\n}\n\n#endif  // GTEST_IS_THREADSAFE\n\n// Tests the TestProperty class.\n\nTEST(TestPropertyTest, ConstructorWorks) {\n  const TestProperty property(\"key\", \"value\");\n  EXPECT_STREQ(\"key\", property.key());\n  EXPECT_STREQ(\"value\", property.value());\n}\n\nTEST(TestPropertyTest, SetValue) {\n  TestProperty property(\"key\", \"value_1\");\n  EXPECT_STREQ(\"key\", property.key());\n  property.SetValue(\"value_2\");\n  EXPECT_STREQ(\"key\", property.key());\n  EXPECT_STREQ(\"value_2\", property.value());\n}\n\n// Tests the TestResult class\n\n// The test fixture for testing TestResult.\nclass TestResultTest : public Test {\n protected:\n  typedef std::vector<TestPartResult> TPRVector;\n\n  // We make use of 2 TestPartResult objects,\n  TestPartResult * pr1, * pr2;\n\n  // ... and 3 TestResult objects.\n  TestResult * r0, * r1, * r2;\n\n  void SetUp() override {\n    // pr1 is for success.\n    pr1 = new TestPartResult(TestPartResult::kSuccess,\n                             \"foo/bar.cc\",\n                             10,\n                             \"Success!\");\n\n    // pr2 is for fatal failure.\n    pr2 = new TestPartResult(TestPartResult::kFatalFailure,\n                             \"foo/bar.cc\",\n                             -1,  // This line number means \"unknown\"\n                             \"Failure!\");\n\n    // Creates the TestResult objects.\n    r0 = new TestResult();\n    r1 = new TestResult();\n    r2 = new TestResult();\n\n    // In order to test TestResult, we need to modify its internal\n    // state, in particular the TestPartResult vector it holds.\n    // test_part_results() returns a const reference to this vector.\n    // We cast it to a non-const object s.t. it can be modified\n    TPRVector* results1 = const_cast<TPRVector*>(\n        &TestResultAccessor::test_part_results(*r1));\n    TPRVector* results2 = const_cast<TPRVector*>(\n        &TestResultAccessor::test_part_results(*r2));\n\n    // r0 is an empty TestResult.\n\n    // r1 contains a single SUCCESS TestPartResult.\n    results1->push_back(*pr1);\n\n    // r2 contains a SUCCESS, and a FAILURE.\n    results2->push_back(*pr1);\n    results2->push_back(*pr2);\n  }\n\n  void TearDown() override {\n    delete pr1;\n    delete pr2;\n\n    delete r0;\n    delete r1;\n    delete r2;\n  }\n\n  // Helper that compares two TestPartResults.\n  static void CompareTestPartResult(const TestPartResult& expected,\n                                    const TestPartResult& actual) {\n    EXPECT_EQ(expected.type(), actual.type());\n    EXPECT_STREQ(expected.file_name(), actual.file_name());\n    EXPECT_EQ(expected.line_number(), actual.line_number());\n    EXPECT_STREQ(expected.summary(), actual.summary());\n    EXPECT_STREQ(expected.message(), actual.message());\n    EXPECT_EQ(expected.passed(), actual.passed());\n    EXPECT_EQ(expected.failed(), actual.failed());\n    EXPECT_EQ(expected.nonfatally_failed(), actual.nonfatally_failed());\n    EXPECT_EQ(expected.fatally_failed(), actual.fatally_failed());\n  }\n};\n\n// Tests TestResult::total_part_count().\nTEST_F(TestResultTest, total_part_count) {\n  ASSERT_EQ(0, r0->total_part_count());\n  ASSERT_EQ(1, r1->total_part_count());\n  ASSERT_EQ(2, r2->total_part_count());\n}\n\n// Tests TestResult::Passed().\nTEST_F(TestResultTest, Passed) {\n  ASSERT_TRUE(r0->Passed());\n  ASSERT_TRUE(r1->Passed());\n  ASSERT_FALSE(r2->Passed());\n}\n\n// Tests TestResult::Failed().\nTEST_F(TestResultTest, Failed) {\n  ASSERT_FALSE(r0->Failed());\n  ASSERT_FALSE(r1->Failed());\n  ASSERT_TRUE(r2->Failed());\n}\n\n// Tests TestResult::GetTestPartResult().\n\ntypedef TestResultTest TestResultDeathTest;\n\nTEST_F(TestResultDeathTest, GetTestPartResult) {\n  CompareTestPartResult(*pr1, r2->GetTestPartResult(0));\n  CompareTestPartResult(*pr2, r2->GetTestPartResult(1));\n  EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(2), \"\");\n  EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(-1), \"\");\n}\n\n// Tests TestResult has no properties when none are added.\nTEST(TestResultPropertyTest, NoPropertiesFoundWhenNoneAreAdded) {\n  TestResult test_result;\n  ASSERT_EQ(0, test_result.test_property_count());\n}\n\n// Tests TestResult has the expected property when added.\nTEST(TestResultPropertyTest, OnePropertyFoundWhenAdded) {\n  TestResult test_result;\n  TestProperty property(\"key_1\", \"1\");\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property);\n  ASSERT_EQ(1, test_result.test_property_count());\n  const TestProperty& actual_property = test_result.GetTestProperty(0);\n  EXPECT_STREQ(\"key_1\", actual_property.key());\n  EXPECT_STREQ(\"1\", actual_property.value());\n}\n\n// Tests TestResult has multiple properties when added.\nTEST(TestResultPropertyTest, MultiplePropertiesFoundWhenAdded) {\n  TestResult test_result;\n  TestProperty property_1(\"key_1\", \"1\");\n  TestProperty property_2(\"key_2\", \"2\");\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_1);\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_2);\n  ASSERT_EQ(2, test_result.test_property_count());\n  const TestProperty& actual_property_1 = test_result.GetTestProperty(0);\n  EXPECT_STREQ(\"key_1\", actual_property_1.key());\n  EXPECT_STREQ(\"1\", actual_property_1.value());\n\n  const TestProperty& actual_property_2 = test_result.GetTestProperty(1);\n  EXPECT_STREQ(\"key_2\", actual_property_2.key());\n  EXPECT_STREQ(\"2\", actual_property_2.value());\n}\n\n// Tests TestResult::RecordProperty() overrides values for duplicate keys.\nTEST(TestResultPropertyTest, OverridesValuesForDuplicateKeys) {\n  TestResult test_result;\n  TestProperty property_1_1(\"key_1\", \"1\");\n  TestProperty property_2_1(\"key_2\", \"2\");\n  TestProperty property_1_2(\"key_1\", \"12\");\n  TestProperty property_2_2(\"key_2\", \"22\");\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_1_1);\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_2_1);\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_1_2);\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_2_2);\n\n  ASSERT_EQ(2, test_result.test_property_count());\n  const TestProperty& actual_property_1 = test_result.GetTestProperty(0);\n  EXPECT_STREQ(\"key_1\", actual_property_1.key());\n  EXPECT_STREQ(\"12\", actual_property_1.value());\n\n  const TestProperty& actual_property_2 = test_result.GetTestProperty(1);\n  EXPECT_STREQ(\"key_2\", actual_property_2.key());\n  EXPECT_STREQ(\"22\", actual_property_2.value());\n}\n\n// Tests TestResult::GetTestProperty().\nTEST(TestResultPropertyTest, GetTestProperty) {\n  TestResult test_result;\n  TestProperty property_1(\"key_1\", \"1\");\n  TestProperty property_2(\"key_2\", \"2\");\n  TestProperty property_3(\"key_3\", \"3\");\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_1);\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_2);\n  TestResultAccessor::RecordProperty(&test_result, \"testcase\", property_3);\n\n  const TestProperty& fetched_property_1 = test_result.GetTestProperty(0);\n  const TestProperty& fetched_property_2 = test_result.GetTestProperty(1);\n  const TestProperty& fetched_property_3 = test_result.GetTestProperty(2);\n\n  EXPECT_STREQ(\"key_1\", fetched_property_1.key());\n  EXPECT_STREQ(\"1\", fetched_property_1.value());\n\n  EXPECT_STREQ(\"key_2\", fetched_property_2.key());\n  EXPECT_STREQ(\"2\", fetched_property_2.value());\n\n  EXPECT_STREQ(\"key_3\", fetched_property_3.key());\n  EXPECT_STREQ(\"3\", fetched_property_3.value());\n\n  EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(3), \"\");\n  EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(-1), \"\");\n}\n\n// Tests the Test class.\n//\n// It's difficult to test every public method of this class (we are\n// already stretching the limit of Google Test by using it to test itself!).\n// Fortunately, we don't have to do that, as we are already testing\n// the functionalities of the Test class extensively by using Google Test\n// alone.\n//\n// Therefore, this section only contains one test.\n\n// Tests that GTestFlagSaver works on Windows and Mac.\n\nclass GTestFlagSaverTest : public Test {\n protected:\n  // Saves the Google Test flags such that we can restore them later, and\n  // then sets them to their default values.  This will be called\n  // before the first test in this test case is run.\n  static void SetUpTestSuite() {\n    saver_ = new GTestFlagSaver;\n\n    GTEST_FLAG(also_run_disabled_tests) = false;\n    GTEST_FLAG(break_on_failure) = false;\n    GTEST_FLAG(catch_exceptions) = false;\n    GTEST_FLAG(death_test_use_fork) = false;\n    GTEST_FLAG(color) = \"auto\";\n    GTEST_FLAG(filter) = \"\";\n    GTEST_FLAG(list_tests) = false;\n    GTEST_FLAG(output) = \"\";\n    GTEST_FLAG(print_time) = true;\n    GTEST_FLAG(random_seed) = 0;\n    GTEST_FLAG(repeat) = 1;\n    GTEST_FLAG(shuffle) = false;\n    GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;\n    GTEST_FLAG(stream_result_to) = \"\";\n    GTEST_FLAG(throw_on_failure) = false;\n  }\n\n  // Restores the Google Test flags that the tests have modified.  This will\n  // be called after the last test in this test case is run.\n  static void TearDownTestSuite() {\n    delete saver_;\n    saver_ = nullptr;\n  }\n\n  // Verifies that the Google Test flags have their default values, and then\n  // modifies each of them.\n  void VerifyAndModifyFlags() {\n    EXPECT_FALSE(GTEST_FLAG(also_run_disabled_tests));\n    EXPECT_FALSE(GTEST_FLAG(break_on_failure));\n    EXPECT_FALSE(GTEST_FLAG(catch_exceptions));\n    EXPECT_STREQ(\"auto\", GTEST_FLAG(color).c_str());\n    EXPECT_FALSE(GTEST_FLAG(death_test_use_fork));\n    EXPECT_STREQ(\"\", GTEST_FLAG(filter).c_str());\n    EXPECT_FALSE(GTEST_FLAG(list_tests));\n    EXPECT_STREQ(\"\", GTEST_FLAG(output).c_str());\n    EXPECT_TRUE(GTEST_FLAG(print_time));\n    EXPECT_EQ(0, GTEST_FLAG(random_seed));\n    EXPECT_EQ(1, GTEST_FLAG(repeat));\n    EXPECT_FALSE(GTEST_FLAG(shuffle));\n    EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG(stack_trace_depth));\n    EXPECT_STREQ(\"\", GTEST_FLAG(stream_result_to).c_str());\n    EXPECT_FALSE(GTEST_FLAG(throw_on_failure));\n\n    GTEST_FLAG(also_run_disabled_tests) = true;\n    GTEST_FLAG(break_on_failure) = true;\n    GTEST_FLAG(catch_exceptions) = true;\n    GTEST_FLAG(color) = \"no\";\n    GTEST_FLAG(death_test_use_fork) = true;\n    GTEST_FLAG(filter) = \"abc\";\n    GTEST_FLAG(list_tests) = true;\n    GTEST_FLAG(output) = \"xml:foo.xml\";\n    GTEST_FLAG(print_time) = false;\n    GTEST_FLAG(random_seed) = 1;\n    GTEST_FLAG(repeat) = 100;\n    GTEST_FLAG(shuffle) = true;\n    GTEST_FLAG(stack_trace_depth) = 1;\n    GTEST_FLAG(stream_result_to) = \"localhost:1234\";\n    GTEST_FLAG(throw_on_failure) = true;\n  }\n\n private:\n  // For saving Google Test flags during this test case.\n  static GTestFlagSaver* saver_;\n};\n\nGTestFlagSaver* GTestFlagSaverTest::saver_ = nullptr;\n\n// Google Test doesn't guarantee the order of tests.  The following two\n// tests are designed to work regardless of their order.\n\n// Modifies the Google Test flags in the test body.\nTEST_F(GTestFlagSaverTest, ModifyGTestFlags) {\n  VerifyAndModifyFlags();\n}\n\n// Verifies that the Google Test flags in the body of the previous test were\n// restored to their original values.\nTEST_F(GTestFlagSaverTest, VerifyGTestFlags) {\n  VerifyAndModifyFlags();\n}\n\n// Sets an environment variable with the given name to the given\n// value.  If the value argument is \"\", unsets the environment\n// variable.  The caller must ensure that both arguments are not NULL.\nstatic void SetEnv(const char* name, const char* value) {\n#if GTEST_OS_WINDOWS_MOBILE\n  // Environment variables are not supported on Windows CE.\n  return;\n#elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9)\n  // C++Builder's putenv only stores a pointer to its parameter; we have to\n  // ensure that the string remains valid as long as it might be needed.\n  // We use an std::map to do so.\n  static std::map<std::string, std::string*> added_env;\n\n  // Because putenv stores a pointer to the string buffer, we can't delete the\n  // previous string (if present) until after it's replaced.\n  std::string *prev_env = NULL;\n  if (added_env.find(name) != added_env.end()) {\n    prev_env = added_env[name];\n  }\n  added_env[name] = new std::string(\n      (Message() << name << \"=\" << value).GetString());\n\n  // The standard signature of putenv accepts a 'char*' argument. Other\n  // implementations, like C++Builder's, accept a 'const char*'.\n  // We cast away the 'const' since that would work for both variants.\n  putenv(const_cast<char*>(added_env[name]->c_str()));\n  delete prev_env;\n#elif GTEST_OS_WINDOWS  // If we are on Windows proper.\n  _putenv((Message() << name << \"=\" << value).GetString().c_str());\n#else\n  if (*value == '\\0') {\n    unsetenv(name);\n  } else {\n    setenv(name, value, 1);\n  }\n#endif  // GTEST_OS_WINDOWS_MOBILE\n}\n\n#if !GTEST_OS_WINDOWS_MOBILE\n// Environment variables are not supported on Windows CE.\n\nusing testing::internal::Int32FromGTestEnv;\n\n// Tests Int32FromGTestEnv().\n\n// Tests that Int32FromGTestEnv() returns the default value when the\n// environment variable is not set.\nTEST(Int32FromGTestEnvTest, ReturnsDefaultWhenVariableIsNotSet) {\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"TEMP\", \"\");\n  EXPECT_EQ(10, Int32FromGTestEnv(\"temp\", 10));\n}\n\n# if !defined(GTEST_GET_INT32_FROM_ENV_)\n\n// Tests that Int32FromGTestEnv() returns the default value when the\n// environment variable overflows as an Int32.\nTEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueOverflows) {\n  printf(\"(expecting 2 warnings)\\n\");\n\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"TEMP\", \"12345678987654321\");\n  EXPECT_EQ(20, Int32FromGTestEnv(\"temp\", 20));\n\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"TEMP\", \"-12345678987654321\");\n  EXPECT_EQ(30, Int32FromGTestEnv(\"temp\", 30));\n}\n\n// Tests that Int32FromGTestEnv() returns the default value when the\n// environment variable does not represent a valid decimal integer.\nTEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueIsInvalid) {\n  printf(\"(expecting 2 warnings)\\n\");\n\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"TEMP\", \"A1\");\n  EXPECT_EQ(40, Int32FromGTestEnv(\"temp\", 40));\n\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"TEMP\", \"12X\");\n  EXPECT_EQ(50, Int32FromGTestEnv(\"temp\", 50));\n}\n\n# endif  // !defined(GTEST_GET_INT32_FROM_ENV_)\n\n// Tests that Int32FromGTestEnv() parses and returns the value of the\n// environment variable when it represents a valid decimal integer in\n// the range of an Int32.\nTEST(Int32FromGTestEnvTest, ParsesAndReturnsValidValue) {\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"TEMP\", \"123\");\n  EXPECT_EQ(123, Int32FromGTestEnv(\"temp\", 0));\n\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"TEMP\", \"-321\");\n  EXPECT_EQ(-321, Int32FromGTestEnv(\"temp\", 0));\n}\n#endif  // !GTEST_OS_WINDOWS_MOBILE\n\n// Tests ParseInt32Flag().\n\n// Tests that ParseInt32Flag() returns false and doesn't change the\n// output value when the flag has wrong format\nTEST(ParseInt32FlagTest, ReturnsFalseForInvalidFlag) {\n  Int32 value = 123;\n  EXPECT_FALSE(ParseInt32Flag(\"--a=100\", \"b\", &value));\n  EXPECT_EQ(123, value);\n\n  EXPECT_FALSE(ParseInt32Flag(\"a=100\", \"a\", &value));\n  EXPECT_EQ(123, value);\n}\n\n// Tests that ParseInt32Flag() returns false and doesn't change the\n// output value when the flag overflows as an Int32.\nTEST(ParseInt32FlagTest, ReturnsDefaultWhenValueOverflows) {\n  printf(\"(expecting 2 warnings)\\n\");\n\n  Int32 value = 123;\n  EXPECT_FALSE(ParseInt32Flag(\"--abc=12345678987654321\", \"abc\", &value));\n  EXPECT_EQ(123, value);\n\n  EXPECT_FALSE(ParseInt32Flag(\"--abc=-12345678987654321\", \"abc\", &value));\n  EXPECT_EQ(123, value);\n}\n\n// Tests that ParseInt32Flag() returns false and doesn't change the\n// output value when the flag does not represent a valid decimal\n// integer.\nTEST(ParseInt32FlagTest, ReturnsDefaultWhenValueIsInvalid) {\n  printf(\"(expecting 2 warnings)\\n\");\n\n  Int32 value = 123;\n  EXPECT_FALSE(ParseInt32Flag(\"--abc=A1\", \"abc\", &value));\n  EXPECT_EQ(123, value);\n\n  EXPECT_FALSE(ParseInt32Flag(\"--abc=12X\", \"abc\", &value));\n  EXPECT_EQ(123, value);\n}\n\n// Tests that ParseInt32Flag() parses the value of the flag and\n// returns true when the flag represents a valid decimal integer in\n// the range of an Int32.\nTEST(ParseInt32FlagTest, ParsesAndReturnsValidValue) {\n  Int32 value = 123;\n  EXPECT_TRUE(ParseInt32Flag(\"--\" GTEST_FLAG_PREFIX_ \"abc=456\", \"abc\", &value));\n  EXPECT_EQ(456, value);\n\n  EXPECT_TRUE(ParseInt32Flag(\"--\" GTEST_FLAG_PREFIX_ \"abc=-789\",\n                             \"abc\", &value));\n  EXPECT_EQ(-789, value);\n}\n\n// Tests that Int32FromEnvOrDie() parses the value of the var or\n// returns the correct default.\n// Environment variables are not supported on Windows CE.\n#if !GTEST_OS_WINDOWS_MOBILE\nTEST(Int32FromEnvOrDieTest, ParsesAndReturnsValidValue) {\n  EXPECT_EQ(333, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ \"UnsetVar\", 333));\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"UnsetVar\", \"123\");\n  EXPECT_EQ(123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ \"UnsetVar\", 333));\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"UnsetVar\", \"-123\");\n  EXPECT_EQ(-123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ \"UnsetVar\", 333));\n}\n#endif  // !GTEST_OS_WINDOWS_MOBILE\n\n// Tests that Int32FromEnvOrDie() aborts with an error message\n// if the variable is not an Int32.\nTEST(Int32FromEnvOrDieDeathTest, AbortsOnFailure) {\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"VAR\", \"xxx\");\n  EXPECT_DEATH_IF_SUPPORTED(\n      Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ \"VAR\", 123),\n      \".*\");\n}\n\n// Tests that Int32FromEnvOrDie() aborts with an error message\n// if the variable cannot be represented by an Int32.\nTEST(Int32FromEnvOrDieDeathTest, AbortsOnInt32Overflow) {\n  SetEnv(GTEST_FLAG_PREFIX_UPPER_ \"VAR\", \"1234567891234567891234\");\n  EXPECT_DEATH_IF_SUPPORTED(\n      Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ \"VAR\", 123),\n      \".*\");\n}\n\n// Tests that ShouldRunTestOnShard() selects all tests\n// where there is 1 shard.\nTEST(ShouldRunTestOnShardTest, IsPartitionWhenThereIsOneShard) {\n  EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 0));\n  EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 1));\n  EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 2));\n  EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 3));\n  EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 4));\n}\n\nclass ShouldShardTest : public testing::Test {\n protected:\n  void SetUp() override {\n    index_var_ = GTEST_FLAG_PREFIX_UPPER_ \"INDEX\";\n    total_var_ = GTEST_FLAG_PREFIX_UPPER_ \"TOTAL\";\n  }\n\n  void TearDown() override {\n    SetEnv(index_var_, \"\");\n    SetEnv(total_var_, \"\");\n  }\n\n  const char* index_var_;\n  const char* total_var_;\n};\n\n// Tests that sharding is disabled if neither of the environment variables\n// are set.\nTEST_F(ShouldShardTest, ReturnsFalseWhenNeitherEnvVarIsSet) {\n  SetEnv(index_var_, \"\");\n  SetEnv(total_var_, \"\");\n\n  EXPECT_FALSE(ShouldShard(total_var_, index_var_, false));\n  EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));\n}\n\n// Tests that sharding is not enabled if total_shards  == 1.\nTEST_F(ShouldShardTest, ReturnsFalseWhenTotalShardIsOne) {\n  SetEnv(index_var_, \"0\");\n  SetEnv(total_var_, \"1\");\n  EXPECT_FALSE(ShouldShard(total_var_, index_var_, false));\n  EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));\n}\n\n// Tests that sharding is enabled if total_shards > 1 and\n// we are not in a death test subprocess.\n// Environment variables are not supported on Windows CE.\n#if !GTEST_OS_WINDOWS_MOBILE\nTEST_F(ShouldShardTest, WorksWhenShardEnvVarsAreValid) {\n  SetEnv(index_var_, \"4\");\n  SetEnv(total_var_, \"22\");\n  EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));\n  EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));\n\n  SetEnv(index_var_, \"8\");\n  SetEnv(total_var_, \"9\");\n  EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));\n  EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));\n\n  SetEnv(index_var_, \"0\");\n  SetEnv(total_var_, \"9\");\n  EXPECT_TRUE(ShouldShard(total_var_, index_var_, false));\n  EXPECT_FALSE(ShouldShard(total_var_, index_var_, true));\n}\n#endif  // !GTEST_OS_WINDOWS_MOBILE\n\n// Tests that we exit in error if the sharding values are not valid.\n\ntypedef ShouldShardTest ShouldShardDeathTest;\n\nTEST_F(ShouldShardDeathTest, AbortsWhenShardingEnvVarsAreInvalid) {\n  SetEnv(index_var_, \"4\");\n  SetEnv(total_var_, \"4\");\n  EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), \".*\");\n\n  SetEnv(index_var_, \"4\");\n  SetEnv(total_var_, \"-2\");\n  EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), \".*\");\n\n  SetEnv(index_var_, \"5\");\n  SetEnv(total_var_, \"\");\n  EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), \".*\");\n\n  SetEnv(index_var_, \"\");\n  SetEnv(total_var_, \"5\");\n  EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), \".*\");\n}\n\n// Tests that ShouldRunTestOnShard is a partition when 5\n// shards are used.\nTEST(ShouldRunTestOnShardTest, IsPartitionWhenThereAreFiveShards) {\n  // Choose an arbitrary number of tests and shards.\n  const int num_tests = 17;\n  const int num_shards = 5;\n\n  // Check partitioning: each test should be on exactly 1 shard.\n  for (int test_id = 0; test_id < num_tests; test_id++) {\n    int prev_selected_shard_index = -1;\n    for (int shard_index = 0; shard_index < num_shards; shard_index++) {\n      if (ShouldRunTestOnShard(num_shards, shard_index, test_id)) {\n        if (prev_selected_shard_index < 0) {\n          prev_selected_shard_index = shard_index;\n        } else {\n          ADD_FAILURE() << \"Shard \" << prev_selected_shard_index << \" and \"\n            << shard_index << \" are both selected to run test \" << test_id;\n        }\n      }\n    }\n  }\n\n  // Check balance: This is not required by the sharding protocol, but is a\n  // desirable property for performance.\n  for (int shard_index = 0; shard_index < num_shards; shard_index++) {\n    int num_tests_on_shard = 0;\n    for (int test_id = 0; test_id < num_tests; test_id++) {\n      num_tests_on_shard +=\n        ShouldRunTestOnShard(num_shards, shard_index, test_id);\n    }\n    EXPECT_GE(num_tests_on_shard, num_tests / num_shards);\n  }\n}\n\n// For the same reason we are not explicitly testing everything in the\n// Test class, there are no separate tests for the following classes\n// (except for some trivial cases):\n//\n//   TestSuite, UnitTest, UnitTestResultPrinter.\n//\n// Similarly, there are no separate tests for the following macros:\n//\n//   TEST, TEST_F, RUN_ALL_TESTS\n\nTEST(UnitTestTest, CanGetOriginalWorkingDir) {\n  ASSERT_TRUE(UnitTest::GetInstance()->original_working_dir() != nullptr);\n  EXPECT_STRNE(UnitTest::GetInstance()->original_working_dir(), \"\");\n}\n\nTEST(UnitTestTest, ReturnsPlausibleTimestamp) {\n  EXPECT_LT(0, UnitTest::GetInstance()->start_timestamp());\n  EXPECT_LE(UnitTest::GetInstance()->start_timestamp(), GetTimeInMillis());\n}\n\n// When a property using a reserved key is supplied to this function, it\n// tests that a non-fatal failure is added, a fatal failure is not added,\n// and that the property is not recorded.\nvoid ExpectNonFatalFailureRecordingPropertyWithReservedKey(\n    const TestResult& test_result, const char* key) {\n  EXPECT_NONFATAL_FAILURE(Test::RecordProperty(key, \"1\"), \"Reserved key\");\n  ASSERT_EQ(0, test_result.test_property_count()) << \"Property for key '\" << key\n                                                  << \"' recorded unexpectedly.\";\n}\n\nvoid ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(\n    const char* key) {\n  const TestInfo* test_info = UnitTest::GetInstance()->current_test_info();\n  ASSERT_TRUE(test_info != nullptr);\n  ExpectNonFatalFailureRecordingPropertyWithReservedKey(*test_info->result(),\n                                                        key);\n}\n\nvoid ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(\n    const char* key) {\n  const testing::TestSuite* test_suite =\n      UnitTest::GetInstance()->current_test_suite();\n  ASSERT_TRUE(test_suite != nullptr);\n  ExpectNonFatalFailureRecordingPropertyWithReservedKey(\n      test_suite->ad_hoc_test_result(), key);\n}\n\nvoid ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n    const char* key) {\n  ExpectNonFatalFailureRecordingPropertyWithReservedKey(\n      UnitTest::GetInstance()->ad_hoc_test_result(), key);\n}\n\n// Tests that property recording functions in UnitTest outside of tests\n// functions correcly.  Creating a separate instance of UnitTest ensures it\n// is in a state similar to the UnitTest's singleton's between tests.\nclass UnitTestRecordPropertyTest :\n    public testing::internal::UnitTestRecordPropertyTestHelper {\n public:\n  static void SetUpTestSuite() {\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(\n        \"disabled\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(\n        \"errors\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(\n        \"failures\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(\n        \"name\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(\n        \"tests\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite(\n        \"time\");\n\n    Test::RecordProperty(\"test_case_key_1\", \"1\");\n\n    const testing::TestSuite* test_suite =\n        UnitTest::GetInstance()->current_test_suite();\n\n    ASSERT_TRUE(test_suite != nullptr);\n\n    ASSERT_EQ(1, test_suite->ad_hoc_test_result().test_property_count());\n    EXPECT_STREQ(\"test_case_key_1\",\n                 test_suite->ad_hoc_test_result().GetTestProperty(0).key());\n    EXPECT_STREQ(\"1\",\n                 test_suite->ad_hoc_test_result().GetTestProperty(0).value());\n  }\n};\n\n// Tests TestResult has the expected property when added.\nTEST_F(UnitTestRecordPropertyTest, OnePropertyFoundWhenAdded) {\n  UnitTestRecordProperty(\"key_1\", \"1\");\n\n  ASSERT_EQ(1, unit_test_.ad_hoc_test_result().test_property_count());\n\n  EXPECT_STREQ(\"key_1\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(0).key());\n  EXPECT_STREQ(\"1\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(0).value());\n}\n\n// Tests TestResult has multiple properties when added.\nTEST_F(UnitTestRecordPropertyTest, MultiplePropertiesFoundWhenAdded) {\n  UnitTestRecordProperty(\"key_1\", \"1\");\n  UnitTestRecordProperty(\"key_2\", \"2\");\n\n  ASSERT_EQ(2, unit_test_.ad_hoc_test_result().test_property_count());\n\n  EXPECT_STREQ(\"key_1\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(0).key());\n  EXPECT_STREQ(\"1\", unit_test_.ad_hoc_test_result().GetTestProperty(0).value());\n\n  EXPECT_STREQ(\"key_2\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(1).key());\n  EXPECT_STREQ(\"2\", unit_test_.ad_hoc_test_result().GetTestProperty(1).value());\n}\n\n// Tests TestResult::RecordProperty() overrides values for duplicate keys.\nTEST_F(UnitTestRecordPropertyTest, OverridesValuesForDuplicateKeys) {\n  UnitTestRecordProperty(\"key_1\", \"1\");\n  UnitTestRecordProperty(\"key_2\", \"2\");\n  UnitTestRecordProperty(\"key_1\", \"12\");\n  UnitTestRecordProperty(\"key_2\", \"22\");\n\n  ASSERT_EQ(2, unit_test_.ad_hoc_test_result().test_property_count());\n\n  EXPECT_STREQ(\"key_1\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(0).key());\n  EXPECT_STREQ(\"12\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(0).value());\n\n  EXPECT_STREQ(\"key_2\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(1).key());\n  EXPECT_STREQ(\"22\",\n               unit_test_.ad_hoc_test_result().GetTestProperty(1).value());\n}\n\nTEST_F(UnitTestRecordPropertyTest,\n       AddFailureInsideTestsWhenUsingTestSuiteReservedKeys) {\n  ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(\n      \"name\");\n  ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(\n      \"value_param\");\n  ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(\n      \"type_param\");\n  ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(\n      \"status\");\n  ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(\n      \"time\");\n  ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest(\n      \"classname\");\n}\n\nTEST_F(UnitTestRecordPropertyTest,\n       AddRecordWithReservedKeysGeneratesCorrectPropertyList) {\n  EXPECT_NONFATAL_FAILURE(\n      Test::RecordProperty(\"name\", \"1\"),\n      \"'classname', 'name', 'status', 'time', 'type_param', 'value_param',\"\n      \" 'file', and 'line' are reserved\");\n}\n\nclass UnitTestRecordPropertyTestEnvironment : public Environment {\n public:\n  void TearDown() override {\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"tests\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"failures\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"disabled\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"errors\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"name\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"timestamp\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"time\");\n    ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite(\n        \"random_seed\");\n  }\n};\n\n// This will test property recording outside of any test or test case.\nstatic Environment* record_property_env GTEST_ATTRIBUTE_UNUSED_ =\n    AddGlobalTestEnvironment(new UnitTestRecordPropertyTestEnvironment);\n\n// This group of tests is for predicate assertions (ASSERT_PRED*, etc)\n// of various arities.  They do not attempt to be exhaustive.  Rather,\n// view them as smoke tests that can be easily reviewed and verified.\n// A more complete set of tests for predicate assertions can be found\n// in gtest_pred_impl_unittest.cc.\n\n// First, some predicates and predicate-formatters needed by the tests.\n\n// Returns true if and only if the argument is an even number.\nbool IsEven(int n) {\n  return (n % 2) == 0;\n}\n\n// A functor that returns true if and only if the argument is an even number.\nstruct IsEvenFunctor {\n  bool operator()(int n) { return IsEven(n); }\n};\n\n// A predicate-formatter function that asserts the argument is an even\n// number.\nAssertionResult AssertIsEven(const char* expr, int n) {\n  if (IsEven(n)) {\n    return AssertionSuccess();\n  }\n\n  Message msg;\n  msg << expr << \" evaluates to \" << n << \", which is not even.\";\n  return AssertionFailure(msg);\n}\n\n// A predicate function that returns AssertionResult for use in\n// EXPECT/ASSERT_TRUE/FALSE.\nAssertionResult ResultIsEven(int n) {\n  if (IsEven(n))\n    return AssertionSuccess() << n << \" is even\";\n  else\n    return AssertionFailure() << n << \" is odd\";\n}\n\n// A predicate function that returns AssertionResult but gives no\n// explanation why it succeeds. Needed for testing that\n// EXPECT/ASSERT_FALSE handles such functions correctly.\nAssertionResult ResultIsEvenNoExplanation(int n) {\n  if (IsEven(n))\n    return AssertionSuccess();\n  else\n    return AssertionFailure() << n << \" is odd\";\n}\n\n// A predicate-formatter functor that asserts the argument is an even\n// number.\nstruct AssertIsEvenFunctor {\n  AssertionResult operator()(const char* expr, int n) {\n    return AssertIsEven(expr, n);\n  }\n};\n\n// Returns true if and only if the sum of the arguments is an even number.\nbool SumIsEven2(int n1, int n2) {\n  return IsEven(n1 + n2);\n}\n\n// A functor that returns true if and only if the sum of the arguments is an\n// even number.\nstruct SumIsEven3Functor {\n  bool operator()(int n1, int n2, int n3) {\n    return IsEven(n1 + n2 + n3);\n  }\n};\n\n// A predicate-formatter function that asserts the sum of the\n// arguments is an even number.\nAssertionResult AssertSumIsEven4(\n    const char* e1, const char* e2, const char* e3, const char* e4,\n    int n1, int n2, int n3, int n4) {\n  const int sum = n1 + n2 + n3 + n4;\n  if (IsEven(sum)) {\n    return AssertionSuccess();\n  }\n\n  Message msg;\n  msg << e1 << \" + \" << e2 << \" + \" << e3 << \" + \" << e4\n      << \" (\" << n1 << \" + \" << n2 << \" + \" << n3 << \" + \" << n4\n      << \") evaluates to \" << sum << \", which is not even.\";\n  return AssertionFailure(msg);\n}\n\n// A predicate-formatter functor that asserts the sum of the arguments\n// is an even number.\nstruct AssertSumIsEven5Functor {\n  AssertionResult operator()(\n      const char* e1, const char* e2, const char* e3, const char* e4,\n      const char* e5, int n1, int n2, int n3, int n4, int n5) {\n    const int sum = n1 + n2 + n3 + n4 + n5;\n    if (IsEven(sum)) {\n      return AssertionSuccess();\n    }\n\n    Message msg;\n    msg << e1 << \" + \" << e2 << \" + \" << e3 << \" + \" << e4 << \" + \" << e5\n        << \" (\"\n        << n1 << \" + \" << n2 << \" + \" << n3 << \" + \" << n4 << \" + \" << n5\n        << \") evaluates to \" << sum << \", which is not even.\";\n    return AssertionFailure(msg);\n  }\n};\n\n\n// Tests unary predicate assertions.\n\n// Tests unary predicate assertions that don't use a custom formatter.\nTEST(Pred1Test, WithoutFormat) {\n  // Success cases.\n  EXPECT_PRED1(IsEvenFunctor(), 2) << \"This failure is UNEXPECTED!\";\n  ASSERT_PRED1(IsEven, 4);\n\n  // Failure cases.\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED1(IsEven, 5) << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n  EXPECT_FATAL_FAILURE(ASSERT_PRED1(IsEvenFunctor(), 5),\n                       \"evaluates to false\");\n}\n\n// Tests unary predicate assertions that use a custom formatter.\nTEST(Pred1Test, WithFormat) {\n  // Success cases.\n  EXPECT_PRED_FORMAT1(AssertIsEven, 2);\n  ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), 4)\n    << \"This failure is UNEXPECTED!\";\n\n  // Failure cases.\n  const int n = 5;\n  EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT1(AssertIsEvenFunctor(), n),\n                          \"n evaluates to 5, which is not even.\");\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT1(AssertIsEven, 5) << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n}\n\n// Tests that unary predicate assertions evaluates their arguments\n// exactly once.\nTEST(Pred1Test, SingleEvaluationOnFailure) {\n  // A success case.\n  static int n = 0;\n  EXPECT_PRED1(IsEven, n++);\n  EXPECT_EQ(1, n) << \"The argument is not evaluated exactly once.\";\n\n  // A failure case.\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), n++)\n        << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n  EXPECT_EQ(2, n) << \"The argument is not evaluated exactly once.\";\n}\n\n\n// Tests predicate assertions whose arity is >= 2.\n\n// Tests predicate assertions that don't use a custom formatter.\nTEST(PredTest, WithoutFormat) {\n  // Success cases.\n  ASSERT_PRED2(SumIsEven2, 2, 4) << \"This failure is UNEXPECTED!\";\n  EXPECT_PRED3(SumIsEven3Functor(), 4, 6, 8);\n\n  // Failure cases.\n  const int n1 = 1;\n  const int n2 = 2;\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED2(SumIsEven2, n1, n2) << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED3(SumIsEven3Functor(), 1, 2, 4);\n  }, \"evaluates to false\");\n}\n\n// Tests predicate assertions that use a custom formatter.\nTEST(PredTest, WithFormat) {\n  // Success cases.\n  ASSERT_PRED_FORMAT4(AssertSumIsEven4, 4, 6, 8, 10) <<\n    \"This failure is UNEXPECTED!\";\n  EXPECT_PRED_FORMAT5(AssertSumIsEven5Functor(), 2, 4, 6, 8, 10);\n\n  // Failure cases.\n  const int n1 = 1;\n  const int n2 = 2;\n  const int n3 = 4;\n  const int n4 = 6;\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT4(AssertSumIsEven4, n1, n2, n3, n4);\n  }, \"evaluates to 13, which is not even.\");\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), 1, 2, 4, 6, 8)\n        << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n}\n\n// Tests that predicate assertions evaluates their arguments\n// exactly once.\nTEST(PredTest, SingleEvaluationOnFailure) {\n  // A success case.\n  int n1 = 0;\n  int n2 = 0;\n  EXPECT_PRED2(SumIsEven2, n1++, n2++);\n  EXPECT_EQ(1, n1) << \"Argument 1 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n2) << \"Argument 2 is not evaluated exactly once.\";\n\n  // Another success case.\n  n1 = n2 = 0;\n  int n3 = 0;\n  int n4 = 0;\n  int n5 = 0;\n  ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(),\n                      n1++, n2++, n3++, n4++, n5++)\n                        << \"This failure is UNEXPECTED!\";\n  EXPECT_EQ(1, n1) << \"Argument 1 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n2) << \"Argument 2 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n3) << \"Argument 3 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n4) << \"Argument 4 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n5) << \"Argument 5 is not evaluated exactly once.\";\n\n  // A failure case.\n  n1 = n2 = n3 = 0;\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED3(SumIsEven3Functor(), ++n1, n2++, n3++)\n        << \"This failure is expected.\";\n  }, \"This failure is expected.\");\n  EXPECT_EQ(1, n1) << \"Argument 1 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n2) << \"Argument 2 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n3) << \"Argument 3 is not evaluated exactly once.\";\n\n  // Another failure case.\n  n1 = n2 = n3 = n4 = 0;\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT4(AssertSumIsEven4, ++n1, n2++, n3++, n4++);\n  }, \"evaluates to 1, which is not even.\");\n  EXPECT_EQ(1, n1) << \"Argument 1 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n2) << \"Argument 2 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n3) << \"Argument 3 is not evaluated exactly once.\";\n  EXPECT_EQ(1, n4) << \"Argument 4 is not evaluated exactly once.\";\n}\n\n// Test predicate assertions for sets\nTEST(PredTest, ExpectPredEvalFailure) {\n  std::set<int> set_a = {2, 1, 3, 4, 5};\n  std::set<int> set_b = {0, 4, 8};\n  const auto compare_sets = [] (std::set<int>, std::set<int>) { return false; };\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_PRED2(compare_sets, set_a, set_b),\n      \"compare_sets(set_a, set_b) evaluates to false, where\\nset_a evaluates \"\n      \"to { 1, 2, 3, 4, 5 }\\nset_b evaluates to { 0, 4, 8 }\");\n}\n\n// Some helper functions for testing using overloaded/template\n// functions with ASSERT_PREDn and EXPECT_PREDn.\n\nbool IsPositive(double x) {\n  return x > 0;\n}\n\ntemplate <typename T>\nbool IsNegative(T x) {\n  return x < 0;\n}\n\ntemplate <typename T1, typename T2>\nbool GreaterThan(T1 x1, T2 x2) {\n  return x1 > x2;\n}\n\n// Tests that overloaded functions can be used in *_PRED* as long as\n// their types are explicitly specified.\nTEST(PredicateAssertionTest, AcceptsOverloadedFunction) {\n  // C++Builder requires C-style casts rather than static_cast.\n  EXPECT_PRED1((bool (*)(int))(IsPositive), 5);  // NOLINT\n  ASSERT_PRED1((bool (*)(double))(IsPositive), 6.0);  // NOLINT\n}\n\n// Tests that template functions can be used in *_PRED* as long as\n// their types are explicitly specified.\nTEST(PredicateAssertionTest, AcceptsTemplateFunction) {\n  EXPECT_PRED1(IsNegative<int>, -5);\n  // Makes sure that we can handle templates with more than one\n  // parameter.\n  ASSERT_PRED2((GreaterThan<int, int>), 5, 0);\n}\n\n\n// Some helper functions for testing using overloaded/template\n// functions with ASSERT_PRED_FORMATn and EXPECT_PRED_FORMATn.\n\nAssertionResult IsPositiveFormat(const char* /* expr */, int n) {\n  return n > 0 ? AssertionSuccess() :\n      AssertionFailure(Message() << \"Failure\");\n}\n\nAssertionResult IsPositiveFormat(const char* /* expr */, double x) {\n  return x > 0 ? AssertionSuccess() :\n      AssertionFailure(Message() << \"Failure\");\n}\n\ntemplate <typename T>\nAssertionResult IsNegativeFormat(const char* /* expr */, T x) {\n  return x < 0 ? AssertionSuccess() :\n      AssertionFailure(Message() << \"Failure\");\n}\n\ntemplate <typename T1, typename T2>\nAssertionResult EqualsFormat(const char* /* expr1 */, const char* /* expr2 */,\n                             const T1& x1, const T2& x2) {\n  return x1 == x2 ? AssertionSuccess() :\n      AssertionFailure(Message() << \"Failure\");\n}\n\n// Tests that overloaded functions can be used in *_PRED_FORMAT*\n// without explicitly specifying their types.\nTEST(PredicateFormatAssertionTest, AcceptsOverloadedFunction) {\n  EXPECT_PRED_FORMAT1(IsPositiveFormat, 5);\n  ASSERT_PRED_FORMAT1(IsPositiveFormat, 6.0);\n}\n\n// Tests that template functions can be used in *_PRED_FORMAT* without\n// explicitly specifying their types.\nTEST(PredicateFormatAssertionTest, AcceptsTemplateFunction) {\n  EXPECT_PRED_FORMAT1(IsNegativeFormat, -5);\n  ASSERT_PRED_FORMAT2(EqualsFormat, 3, 3);\n}\n\n\n// Tests string assertions.\n\n// Tests ASSERT_STREQ with non-NULL arguments.\nTEST(StringAssertionTest, ASSERT_STREQ) {\n  const char * const p1 = \"good\";\n  ASSERT_STREQ(p1, p1);\n\n  // Let p2 have the same content as p1, but be at a different address.\n  const char p2[] = \"good\";\n  ASSERT_STREQ(p1, p2);\n\n  EXPECT_FATAL_FAILURE(ASSERT_STREQ(\"bad\", \"good\"),\n                       \"  \\\"bad\\\"\\n  \\\"good\\\"\");\n}\n\n// Tests ASSERT_STREQ with NULL arguments.\nTEST(StringAssertionTest, ASSERT_STREQ_Null) {\n  ASSERT_STREQ(static_cast<const char*>(nullptr), nullptr);\n  EXPECT_FATAL_FAILURE(ASSERT_STREQ(nullptr, \"non-null\"), \"non-null\");\n}\n\n// Tests ASSERT_STREQ with NULL arguments.\nTEST(StringAssertionTest, ASSERT_STREQ_Null2) {\n  EXPECT_FATAL_FAILURE(ASSERT_STREQ(\"non-null\", nullptr), \"non-null\");\n}\n\n// Tests ASSERT_STRNE.\nTEST(StringAssertionTest, ASSERT_STRNE) {\n  ASSERT_STRNE(\"hi\", \"Hi\");\n  ASSERT_STRNE(\"Hi\", nullptr);\n  ASSERT_STRNE(nullptr, \"Hi\");\n  ASSERT_STRNE(\"\", nullptr);\n  ASSERT_STRNE(nullptr, \"\");\n  ASSERT_STRNE(\"\", \"Hi\");\n  ASSERT_STRNE(\"Hi\", \"\");\n  EXPECT_FATAL_FAILURE(ASSERT_STRNE(\"Hi\", \"Hi\"),\n                       \"\\\"Hi\\\" vs \\\"Hi\\\"\");\n}\n\n// Tests ASSERT_STRCASEEQ.\nTEST(StringAssertionTest, ASSERT_STRCASEEQ) {\n  ASSERT_STRCASEEQ(\"hi\", \"Hi\");\n  ASSERT_STRCASEEQ(static_cast<const char*>(nullptr), nullptr);\n\n  ASSERT_STRCASEEQ(\"\", \"\");\n  EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ(\"Hi\", \"hi2\"),\n                       \"Ignoring case\");\n}\n\n// Tests ASSERT_STRCASENE.\nTEST(StringAssertionTest, ASSERT_STRCASENE) {\n  ASSERT_STRCASENE(\"hi1\", \"Hi2\");\n  ASSERT_STRCASENE(\"Hi\", nullptr);\n  ASSERT_STRCASENE(nullptr, \"Hi\");\n  ASSERT_STRCASENE(\"\", nullptr);\n  ASSERT_STRCASENE(nullptr, \"\");\n  ASSERT_STRCASENE(\"\", \"Hi\");\n  ASSERT_STRCASENE(\"Hi\", \"\");\n  EXPECT_FATAL_FAILURE(ASSERT_STRCASENE(\"Hi\", \"hi\"),\n                       \"(ignoring case)\");\n}\n\n// Tests *_STREQ on wide strings.\nTEST(StringAssertionTest, STREQ_Wide) {\n  // NULL strings.\n  ASSERT_STREQ(static_cast<const wchar_t*>(nullptr), nullptr);\n\n  // Empty strings.\n  ASSERT_STREQ(L\"\", L\"\");\n\n  // Non-null vs NULL.\n  EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L\"non-null\", nullptr), \"non-null\");\n\n  // Equal strings.\n  EXPECT_STREQ(L\"Hi\", L\"Hi\");\n\n  // Unequal strings.\n  EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L\"abc\", L\"Abc\"),\n                          \"Abc\");\n\n  // Strings containing wide characters.\n  EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L\"abc\\x8119\", L\"abc\\x8120\"),\n                          \"abc\");\n\n  // The streaming variation.\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_STREQ(L\"abc\\x8119\", L\"abc\\x8121\") << \"Expected failure\";\n  }, \"Expected failure\");\n}\n\n// Tests *_STRNE on wide strings.\nTEST(StringAssertionTest, STRNE_Wide) {\n  // NULL strings.\n  EXPECT_NONFATAL_FAILURE(\n      {  // NOLINT\n        EXPECT_STRNE(static_cast<const wchar_t*>(nullptr), nullptr);\n      },\n      \"\");\n\n  // Empty strings.\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L\"\", L\"\"),\n                          \"L\\\"\\\"\");\n\n  // Non-null vs NULL.\n  ASSERT_STRNE(L\"non-null\", nullptr);\n\n  // Equal strings.\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L\"Hi\", L\"Hi\"),\n                          \"L\\\"Hi\\\"\");\n\n  // Unequal strings.\n  EXPECT_STRNE(L\"abc\", L\"Abc\");\n\n  // Strings containing wide characters.\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L\"abc\\x8119\", L\"abc\\x8119\"),\n                          \"abc\");\n\n  // The streaming variation.\n  ASSERT_STRNE(L\"abc\\x8119\", L\"abc\\x8120\") << \"This shouldn't happen\";\n}\n\n// Tests for ::testing::IsSubstring().\n\n// Tests that IsSubstring() returns the correct result when the input\n// argument type is const char*.\nTEST(IsSubstringTest, ReturnsCorrectResultForCString) {\n  EXPECT_FALSE(IsSubstring(\"\", \"\", nullptr, \"a\"));\n  EXPECT_FALSE(IsSubstring(\"\", \"\", \"b\", nullptr));\n  EXPECT_FALSE(IsSubstring(\"\", \"\", \"needle\", \"haystack\"));\n\n  EXPECT_TRUE(IsSubstring(\"\", \"\", static_cast<const char*>(nullptr), nullptr));\n  EXPECT_TRUE(IsSubstring(\"\", \"\", \"needle\", \"two needles\"));\n}\n\n// Tests that IsSubstring() returns the correct result when the input\n// argument type is const wchar_t*.\nTEST(IsSubstringTest, ReturnsCorrectResultForWideCString) {\n  EXPECT_FALSE(IsSubstring(\"\", \"\", kNull, L\"a\"));\n  EXPECT_FALSE(IsSubstring(\"\", \"\", L\"b\", kNull));\n  EXPECT_FALSE(IsSubstring(\"\", \"\", L\"needle\", L\"haystack\"));\n\n  EXPECT_TRUE(\n      IsSubstring(\"\", \"\", static_cast<const wchar_t*>(nullptr), nullptr));\n  EXPECT_TRUE(IsSubstring(\"\", \"\", L\"needle\", L\"two needles\"));\n}\n\n// Tests that IsSubstring() generates the correct message when the input\n// argument type is const char*.\nTEST(IsSubstringTest, GeneratesCorrectMessageForCString) {\n  EXPECT_STREQ(\"Value of: needle_expr\\n\"\n               \"  Actual: \\\"needle\\\"\\n\"\n               \"Expected: a substring of haystack_expr\\n\"\n               \"Which is: \\\"haystack\\\"\",\n               IsSubstring(\"needle_expr\", \"haystack_expr\",\n                           \"needle\", \"haystack\").failure_message());\n}\n\n// Tests that IsSubstring returns the correct result when the input\n// argument type is ::std::string.\nTEST(IsSubstringTest, ReturnsCorrectResultsForStdString) {\n  EXPECT_TRUE(IsSubstring(\"\", \"\", std::string(\"hello\"), \"ahellob\"));\n  EXPECT_FALSE(IsSubstring(\"\", \"\", \"hello\", std::string(\"world\")));\n}\n\n#if GTEST_HAS_STD_WSTRING\n// Tests that IsSubstring returns the correct result when the input\n// argument type is ::std::wstring.\nTEST(IsSubstringTest, ReturnsCorrectResultForStdWstring) {\n  EXPECT_TRUE(IsSubstring(\"\", \"\", ::std::wstring(L\"needle\"), L\"two needles\"));\n  EXPECT_FALSE(IsSubstring(\"\", \"\", L\"needle\", ::std::wstring(L\"haystack\")));\n}\n\n// Tests that IsSubstring() generates the correct message when the input\n// argument type is ::std::wstring.\nTEST(IsSubstringTest, GeneratesCorrectMessageForWstring) {\n  EXPECT_STREQ(\"Value of: needle_expr\\n\"\n               \"  Actual: L\\\"needle\\\"\\n\"\n               \"Expected: a substring of haystack_expr\\n\"\n               \"Which is: L\\\"haystack\\\"\",\n               IsSubstring(\n                   \"needle_expr\", \"haystack_expr\",\n                   ::std::wstring(L\"needle\"), L\"haystack\").failure_message());\n}\n\n#endif  // GTEST_HAS_STD_WSTRING\n\n// Tests for ::testing::IsNotSubstring().\n\n// Tests that IsNotSubstring() returns the correct result when the input\n// argument type is const char*.\nTEST(IsNotSubstringTest, ReturnsCorrectResultForCString) {\n  EXPECT_TRUE(IsNotSubstring(\"\", \"\", \"needle\", \"haystack\"));\n  EXPECT_FALSE(IsNotSubstring(\"\", \"\", \"needle\", \"two needles\"));\n}\n\n// Tests that IsNotSubstring() returns the correct result when the input\n// argument type is const wchar_t*.\nTEST(IsNotSubstringTest, ReturnsCorrectResultForWideCString) {\n  EXPECT_TRUE(IsNotSubstring(\"\", \"\", L\"needle\", L\"haystack\"));\n  EXPECT_FALSE(IsNotSubstring(\"\", \"\", L\"needle\", L\"two needles\"));\n}\n\n// Tests that IsNotSubstring() generates the correct message when the input\n// argument type is const wchar_t*.\nTEST(IsNotSubstringTest, GeneratesCorrectMessageForWideCString) {\n  EXPECT_STREQ(\"Value of: needle_expr\\n\"\n               \"  Actual: L\\\"needle\\\"\\n\"\n               \"Expected: not a substring of haystack_expr\\n\"\n               \"Which is: L\\\"two needles\\\"\",\n               IsNotSubstring(\n                   \"needle_expr\", \"haystack_expr\",\n                   L\"needle\", L\"two needles\").failure_message());\n}\n\n// Tests that IsNotSubstring returns the correct result when the input\n// argument type is ::std::string.\nTEST(IsNotSubstringTest, ReturnsCorrectResultsForStdString) {\n  EXPECT_FALSE(IsNotSubstring(\"\", \"\", std::string(\"hello\"), \"ahellob\"));\n  EXPECT_TRUE(IsNotSubstring(\"\", \"\", \"hello\", std::string(\"world\")));\n}\n\n// Tests that IsNotSubstring() generates the correct message when the input\n// argument type is ::std::string.\nTEST(IsNotSubstringTest, GeneratesCorrectMessageForStdString) {\n  EXPECT_STREQ(\"Value of: needle_expr\\n\"\n               \"  Actual: \\\"needle\\\"\\n\"\n               \"Expected: not a substring of haystack_expr\\n\"\n               \"Which is: \\\"two needles\\\"\",\n               IsNotSubstring(\n                   \"needle_expr\", \"haystack_expr\",\n                   ::std::string(\"needle\"), \"two needles\").failure_message());\n}\n\n#if GTEST_HAS_STD_WSTRING\n\n// Tests that IsNotSubstring returns the correct result when the input\n// argument type is ::std::wstring.\nTEST(IsNotSubstringTest, ReturnsCorrectResultForStdWstring) {\n  EXPECT_FALSE(\n      IsNotSubstring(\"\", \"\", ::std::wstring(L\"needle\"), L\"two needles\"));\n  EXPECT_TRUE(IsNotSubstring(\"\", \"\", L\"needle\", ::std::wstring(L\"haystack\")));\n}\n\n#endif  // GTEST_HAS_STD_WSTRING\n\n// Tests floating-point assertions.\n\ntemplate <typename RawType>\nclass FloatingPointTest : public Test {\n protected:\n  // Pre-calculated numbers to be used by the tests.\n  struct TestValues {\n    RawType close_to_positive_zero;\n    RawType close_to_negative_zero;\n    RawType further_from_negative_zero;\n\n    RawType close_to_one;\n    RawType further_from_one;\n\n    RawType infinity;\n    RawType close_to_infinity;\n    RawType further_from_infinity;\n\n    RawType nan1;\n    RawType nan2;\n  };\n\n  typedef typename testing::internal::FloatingPoint<RawType> Floating;\n  typedef typename Floating::Bits Bits;\n\n  void SetUp() override {\n    const size_t max_ulps = Floating::kMaxUlps;\n\n    // The bits that represent 0.0.\n    const Bits zero_bits = Floating(0).bits();\n\n    // Makes some numbers close to 0.0.\n    values_.close_to_positive_zero = Floating::ReinterpretBits(\n        zero_bits + max_ulps/2);\n    values_.close_to_negative_zero = -Floating::ReinterpretBits(\n        zero_bits + max_ulps - max_ulps/2);\n    values_.further_from_negative_zero = -Floating::ReinterpretBits(\n        zero_bits + max_ulps + 1 - max_ulps/2);\n\n    // The bits that represent 1.0.\n    const Bits one_bits = Floating(1).bits();\n\n    // Makes some numbers close to 1.0.\n    values_.close_to_one = Floating::ReinterpretBits(one_bits + max_ulps);\n    values_.further_from_one = Floating::ReinterpretBits(\n        one_bits + max_ulps + 1);\n\n    // +infinity.\n    values_.infinity = Floating::Infinity();\n\n    // The bits that represent +infinity.\n    const Bits infinity_bits = Floating(values_.infinity).bits();\n\n    // Makes some numbers close to infinity.\n    values_.close_to_infinity = Floating::ReinterpretBits(\n        infinity_bits - max_ulps);\n    values_.further_from_infinity = Floating::ReinterpretBits(\n        infinity_bits - max_ulps - 1);\n\n    // Makes some NAN's.  Sets the most significant bit of the fraction so that\n    // our NaN's are quiet; trying to process a signaling NaN would raise an\n    // exception if our environment enables floating point exceptions.\n    values_.nan1 = Floating::ReinterpretBits(Floating::kExponentBitMask\n        | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 1);\n    values_.nan2 = Floating::ReinterpretBits(Floating::kExponentBitMask\n        | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 200);\n  }\n\n  void TestSize() {\n    EXPECT_EQ(sizeof(RawType), sizeof(Bits));\n  }\n\n  static TestValues values_;\n};\n\ntemplate <typename RawType>\ntypename FloatingPointTest<RawType>::TestValues\n    FloatingPointTest<RawType>::values_;\n\n// Instantiates FloatingPointTest for testing *_FLOAT_EQ.\ntypedef FloatingPointTest<float> FloatTest;\n\n// Tests that the size of Float::Bits matches the size of float.\nTEST_F(FloatTest, Size) {\n  TestSize();\n}\n\n// Tests comparing with +0 and -0.\nTEST_F(FloatTest, Zeros) {\n  EXPECT_FLOAT_EQ(0.0, -0.0);\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(-0.0, 1.0),\n                          \"1.0\");\n  EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.5),\n                       \"1.5\");\n}\n\n// Tests comparing numbers close to 0.\n//\n// This ensures that *_FLOAT_EQ handles the sign correctly and no\n// overflow occurs when comparing numbers whose absolute value is very\n// small.\nTEST_F(FloatTest, AlmostZeros) {\n  // In C++Builder, names within local classes (such as used by\n  // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the\n  // scoping class.  Use a static local alias as a workaround.\n  // We use the assignment syntax since some compilers, like Sun Studio,\n  // don't allow initializing references using construction syntax\n  // (parentheses).\n  static const FloatTest::TestValues& v = this->values_;\n\n  EXPECT_FLOAT_EQ(0.0, v.close_to_positive_zero);\n  EXPECT_FLOAT_EQ(-0.0, v.close_to_negative_zero);\n  EXPECT_FLOAT_EQ(v.close_to_positive_zero, v.close_to_negative_zero);\n\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_FLOAT_EQ(v.close_to_positive_zero,\n                    v.further_from_negative_zero);\n  }, \"v.further_from_negative_zero\");\n}\n\n// Tests comparing numbers close to each other.\nTEST_F(FloatTest, SmallDiff) {\n  EXPECT_FLOAT_EQ(1.0, values_.close_to_one);\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, values_.further_from_one),\n                          \"values_.further_from_one\");\n}\n\n// Tests comparing numbers far apart.\nTEST_F(FloatTest, LargeDiff) {\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(2.5, 3.0),\n                          \"3.0\");\n}\n\n// Tests comparing with infinity.\n//\n// This ensures that no overflow occurs when comparing numbers whose\n// absolute value is very large.\nTEST_F(FloatTest, Infinity) {\n  EXPECT_FLOAT_EQ(values_.infinity, values_.close_to_infinity);\n  EXPECT_FLOAT_EQ(-values_.infinity, -values_.close_to_infinity);\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, -values_.infinity),\n                          \"-values_.infinity\");\n\n  // This is interesting as the representations of infinity and nan1\n  // are only 1 DLP apart.\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, values_.nan1),\n                          \"values_.nan1\");\n}\n\n// Tests that comparing with NAN always returns false.\nTEST_F(FloatTest, NaN) {\n  // In C++Builder, names within local classes (such as used by\n  // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the\n  // scoping class.  Use a static local alias as a workaround.\n  // We use the assignment syntax since some compilers, like Sun Studio,\n  // don't allow initializing references using construction syntax\n  // (parentheses).\n  static const FloatTest::TestValues& v = this->values_;\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan1),\n                          \"v.nan1\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan2),\n                          \"v.nan2\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, v.nan1),\n                          \"v.nan1\");\n\n  EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity),\n                       \"v.infinity\");\n}\n\n// Tests that *_FLOAT_EQ are reflexive.\nTEST_F(FloatTest, Reflexive) {\n  EXPECT_FLOAT_EQ(0.0, 0.0);\n  EXPECT_FLOAT_EQ(1.0, 1.0);\n  ASSERT_FLOAT_EQ(values_.infinity, values_.infinity);\n}\n\n// Tests that *_FLOAT_EQ are commutative.\nTEST_F(FloatTest, Commutative) {\n  // We already tested EXPECT_FLOAT_EQ(1.0, values_.close_to_one).\n  EXPECT_FLOAT_EQ(values_.close_to_one, 1.0);\n\n  // We already tested EXPECT_FLOAT_EQ(1.0, values_.further_from_one).\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.further_from_one, 1.0),\n                          \"1.0\");\n}\n\n// Tests EXPECT_NEAR.\nTEST_F(FloatTest, EXPECT_NEAR) {\n  EXPECT_NEAR(-1.0f, -1.1f, 0.2f);\n  EXPECT_NEAR(2.0f, 3.0f, 1.0f);\n  EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0f,1.5f, 0.25f),  // NOLINT\n                          \"The difference between 1.0f and 1.5f is 0.5, \"\n                          \"which exceeds 0.25f\");\n  // To work around a bug in gcc 2.95.0, there is intentionally no\n  // space after the first comma in the previous line.\n}\n\n// Tests ASSERT_NEAR.\nTEST_F(FloatTest, ASSERT_NEAR) {\n  ASSERT_NEAR(-1.0f, -1.1f, 0.2f);\n  ASSERT_NEAR(2.0f, 3.0f, 1.0f);\n  EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0f,1.5f, 0.25f),  // NOLINT\n                       \"The difference between 1.0f and 1.5f is 0.5, \"\n                       \"which exceeds 0.25f\");\n  // To work around a bug in gcc 2.95.0, there is intentionally no\n  // space after the first comma in the previous line.\n}\n\n// Tests the cases where FloatLE() should succeed.\nTEST_F(FloatTest, FloatLESucceeds) {\n  EXPECT_PRED_FORMAT2(FloatLE, 1.0f, 2.0f);  // When val1 < val2,\n  ASSERT_PRED_FORMAT2(FloatLE, 1.0f, 1.0f);  // val1 == val2,\n\n  // or when val1 is greater than, but almost equals to, val2.\n  EXPECT_PRED_FORMAT2(FloatLE, values_.close_to_positive_zero, 0.0f);\n}\n\n// Tests the cases where FloatLE() should fail.\nTEST_F(FloatTest, FloatLEFails) {\n  // When val1 is greater than val2 by a large margin,\n  EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(FloatLE, 2.0f, 1.0f),\n                          \"(2.0f) <= (1.0f)\");\n\n  // or by a small yet non-negligible margin,\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(FloatLE, values_.further_from_one, 1.0f);\n  }, \"(values_.further_from_one) <= (1.0f)\");\n\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(FloatLE, values_.nan1, values_.infinity);\n  }, \"(values_.nan1) <= (values_.infinity)\");\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(FloatLE, -values_.infinity, values_.nan1);\n  }, \"(-values_.infinity) <= (values_.nan1)\");\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT2(FloatLE, values_.nan1, values_.nan1);\n  }, \"(values_.nan1) <= (values_.nan1)\");\n}\n\n// Instantiates FloatingPointTest for testing *_DOUBLE_EQ.\ntypedef FloatingPointTest<double> DoubleTest;\n\n// Tests that the size of Double::Bits matches the size of double.\nTEST_F(DoubleTest, Size) {\n  TestSize();\n}\n\n// Tests comparing with +0 and -0.\nTEST_F(DoubleTest, Zeros) {\n  EXPECT_DOUBLE_EQ(0.0, -0.0);\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(-0.0, 1.0),\n                          \"1.0\");\n  EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(0.0, 1.0),\n                       \"1.0\");\n}\n\n// Tests comparing numbers close to 0.\n//\n// This ensures that *_DOUBLE_EQ handles the sign correctly and no\n// overflow occurs when comparing numbers whose absolute value is very\n// small.\nTEST_F(DoubleTest, AlmostZeros) {\n  // In C++Builder, names within local classes (such as used by\n  // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the\n  // scoping class.  Use a static local alias as a workaround.\n  // We use the assignment syntax since some compilers, like Sun Studio,\n  // don't allow initializing references using construction syntax\n  // (parentheses).\n  static const DoubleTest::TestValues& v = this->values_;\n\n  EXPECT_DOUBLE_EQ(0.0, v.close_to_positive_zero);\n  EXPECT_DOUBLE_EQ(-0.0, v.close_to_negative_zero);\n  EXPECT_DOUBLE_EQ(v.close_to_positive_zero, v.close_to_negative_zero);\n\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_DOUBLE_EQ(v.close_to_positive_zero,\n                     v.further_from_negative_zero);\n  }, \"v.further_from_negative_zero\");\n}\n\n// Tests comparing numbers close to each other.\nTEST_F(DoubleTest, SmallDiff) {\n  EXPECT_DOUBLE_EQ(1.0, values_.close_to_one);\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, values_.further_from_one),\n                          \"values_.further_from_one\");\n}\n\n// Tests comparing numbers far apart.\nTEST_F(DoubleTest, LargeDiff) {\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(2.0, 3.0),\n                          \"3.0\");\n}\n\n// Tests comparing with infinity.\n//\n// This ensures that no overflow occurs when comparing numbers whose\n// absolute value is very large.\nTEST_F(DoubleTest, Infinity) {\n  EXPECT_DOUBLE_EQ(values_.infinity, values_.close_to_infinity);\n  EXPECT_DOUBLE_EQ(-values_.infinity, -values_.close_to_infinity);\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, -values_.infinity),\n                          \"-values_.infinity\");\n\n  // This is interesting as the representations of infinity_ and nan1_\n  // are only 1 DLP apart.\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, values_.nan1),\n                          \"values_.nan1\");\n}\n\n// Tests that comparing with NAN always returns false.\nTEST_F(DoubleTest, NaN) {\n  static const DoubleTest::TestValues& v = this->values_;\n\n  // Nokia's STLport crashes if we try to output infinity or NaN.\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan1),\n                          \"v.nan1\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan2), \"v.nan2\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, v.nan1), \"v.nan1\");\n  EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity),\n                       \"v.infinity\");\n}\n\n// Tests that *_DOUBLE_EQ are reflexive.\nTEST_F(DoubleTest, Reflexive) {\n  EXPECT_DOUBLE_EQ(0.0, 0.0);\n  EXPECT_DOUBLE_EQ(1.0, 1.0);\n  ASSERT_DOUBLE_EQ(values_.infinity, values_.infinity);\n}\n\n// Tests that *_DOUBLE_EQ are commutative.\nTEST_F(DoubleTest, Commutative) {\n  // We already tested EXPECT_DOUBLE_EQ(1.0, values_.close_to_one).\n  EXPECT_DOUBLE_EQ(values_.close_to_one, 1.0);\n\n  // We already tested EXPECT_DOUBLE_EQ(1.0, values_.further_from_one).\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.further_from_one, 1.0),\n                          \"1.0\");\n}\n\n// Tests EXPECT_NEAR.\nTEST_F(DoubleTest, EXPECT_NEAR) {\n  EXPECT_NEAR(-1.0, -1.1, 0.2);\n  EXPECT_NEAR(2.0, 3.0, 1.0);\n  EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0, 1.5, 0.25),  // NOLINT\n                          \"The difference between 1.0 and 1.5 is 0.5, \"\n                          \"which exceeds 0.25\");\n  // To work around a bug in gcc 2.95.0, there is intentionally no\n  // space after the first comma in the previous statement.\n}\n\n// Tests ASSERT_NEAR.\nTEST_F(DoubleTest, ASSERT_NEAR) {\n  ASSERT_NEAR(-1.0, -1.1, 0.2);\n  ASSERT_NEAR(2.0, 3.0, 1.0);\n  EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0, 1.5, 0.25),  // NOLINT\n                       \"The difference between 1.0 and 1.5 is 0.5, \"\n                       \"which exceeds 0.25\");\n  // To work around a bug in gcc 2.95.0, there is intentionally no\n  // space after the first comma in the previous statement.\n}\n\n// Tests the cases where DoubleLE() should succeed.\nTEST_F(DoubleTest, DoubleLESucceeds) {\n  EXPECT_PRED_FORMAT2(DoubleLE, 1.0, 2.0);  // When val1 < val2,\n  ASSERT_PRED_FORMAT2(DoubleLE, 1.0, 1.0);  // val1 == val2,\n\n  // or when val1 is greater than, but almost equals to, val2.\n  EXPECT_PRED_FORMAT2(DoubleLE, values_.close_to_positive_zero, 0.0);\n}\n\n// Tests the cases where DoubleLE() should fail.\nTEST_F(DoubleTest, DoubleLEFails) {\n  // When val1 is greater than val2 by a large margin,\n  EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(DoubleLE, 2.0, 1.0),\n                          \"(2.0) <= (1.0)\");\n\n  // or by a small yet non-negligible margin,\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(DoubleLE, values_.further_from_one, 1.0);\n  }, \"(values_.further_from_one) <= (1.0)\");\n\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.infinity);\n  }, \"(values_.nan1) <= (values_.infinity)\");\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_PRED_FORMAT2(DoubleLE, -values_.infinity, values_.nan1);\n  }, \" (-values_.infinity) <= (values_.nan1)\");\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.nan1);\n  }, \"(values_.nan1) <= (values_.nan1)\");\n}\n\n\n// Verifies that a test or test case whose name starts with DISABLED_ is\n// not run.\n\n// A test whose name starts with DISABLED_.\n// Should not run.\nTEST(DisabledTest, DISABLED_TestShouldNotRun) {\n  FAIL() << \"Unexpected failure: Disabled test should not be run.\";\n}\n\n// A test whose name does not start with DISABLED_.\n// Should run.\nTEST(DisabledTest, NotDISABLED_TestShouldRun) {\n  EXPECT_EQ(1, 1);\n}\n\n// A test case whose name starts with DISABLED_.\n// Should not run.\nTEST(DISABLED_TestSuite, TestShouldNotRun) {\n  FAIL() << \"Unexpected failure: Test in disabled test case should not be run.\";\n}\n\n// A test case and test whose names start with DISABLED_.\n// Should not run.\nTEST(DISABLED_TestSuite, DISABLED_TestShouldNotRun) {\n  FAIL() << \"Unexpected failure: Test in disabled test case should not be run.\";\n}\n\n// Check that when all tests in a test case are disabled, SetUpTestSuite() and\n// TearDownTestSuite() are not called.\nclass DisabledTestsTest : public Test {\n protected:\n  static void SetUpTestSuite() {\n    FAIL() << \"Unexpected failure: All tests disabled in test case. \"\n              \"SetUpTestSuite() should not be called.\";\n  }\n\n  static void TearDownTestSuite() {\n    FAIL() << \"Unexpected failure: All tests disabled in test case. \"\n              \"TearDownTestSuite() should not be called.\";\n  }\n};\n\nTEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_1) {\n  FAIL() << \"Unexpected failure: Disabled test should not be run.\";\n}\n\nTEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_2) {\n  FAIL() << \"Unexpected failure: Disabled test should not be run.\";\n}\n\n// Tests that disabled typed tests aren't run.\n\n#if GTEST_HAS_TYPED_TEST\n\ntemplate <typename T>\nclass TypedTest : public Test {\n};\n\ntypedef testing::Types<int, double> NumericTypes;\nTYPED_TEST_SUITE(TypedTest, NumericTypes);\n\nTYPED_TEST(TypedTest, DISABLED_ShouldNotRun) {\n  FAIL() << \"Unexpected failure: Disabled typed test should not run.\";\n}\n\ntemplate <typename T>\nclass DISABLED_TypedTest : public Test {\n};\n\nTYPED_TEST_SUITE(DISABLED_TypedTest, NumericTypes);\n\nTYPED_TEST(DISABLED_TypedTest, ShouldNotRun) {\n  FAIL() << \"Unexpected failure: Disabled typed test should not run.\";\n}\n\n#endif  // GTEST_HAS_TYPED_TEST\n\n// Tests that disabled type-parameterized tests aren't run.\n\n#if GTEST_HAS_TYPED_TEST_P\n\ntemplate <typename T>\nclass TypedTestP : public Test {\n};\n\nTYPED_TEST_SUITE_P(TypedTestP);\n\nTYPED_TEST_P(TypedTestP, DISABLED_ShouldNotRun) {\n  FAIL() << \"Unexpected failure: \"\n         << \"Disabled type-parameterized test should not run.\";\n}\n\nREGISTER_TYPED_TEST_SUITE_P(TypedTestP, DISABLED_ShouldNotRun);\n\nINSTANTIATE_TYPED_TEST_SUITE_P(My, TypedTestP, NumericTypes);\n\ntemplate <typename T>\nclass DISABLED_TypedTestP : public Test {\n};\n\nTYPED_TEST_SUITE_P(DISABLED_TypedTestP);\n\nTYPED_TEST_P(DISABLED_TypedTestP, ShouldNotRun) {\n  FAIL() << \"Unexpected failure: \"\n         << \"Disabled type-parameterized test should not run.\";\n}\n\nREGISTER_TYPED_TEST_SUITE_P(DISABLED_TypedTestP, ShouldNotRun);\n\nINSTANTIATE_TYPED_TEST_SUITE_P(My, DISABLED_TypedTestP, NumericTypes);\n\n#endif  // GTEST_HAS_TYPED_TEST_P\n\n// Tests that assertion macros evaluate their arguments exactly once.\n\nclass SingleEvaluationTest : public Test {\n public:  // Must be public and not protected due to a bug in g++ 3.4.2.\n  // This helper function is needed by the FailedASSERT_STREQ test\n  // below.  It's public to work around C++Builder's bug with scoping local\n  // classes.\n  static void CompareAndIncrementCharPtrs() {\n    ASSERT_STREQ(p1_++, p2_++);\n  }\n\n  // This helper function is needed by the FailedASSERT_NE test below.  It's\n  // public to work around C++Builder's bug with scoping local classes.\n  static void CompareAndIncrementInts() {\n    ASSERT_NE(a_++, b_++);\n  }\n\n protected:\n  SingleEvaluationTest() {\n    p1_ = s1_;\n    p2_ = s2_;\n    a_ = 0;\n    b_ = 0;\n  }\n\n  static const char* const s1_;\n  static const char* const s2_;\n  static const char* p1_;\n  static const char* p2_;\n\n  static int a_;\n  static int b_;\n};\n\nconst char* const SingleEvaluationTest::s1_ = \"01234\";\nconst char* const SingleEvaluationTest::s2_ = \"abcde\";\nconst char* SingleEvaluationTest::p1_;\nconst char* SingleEvaluationTest::p2_;\nint SingleEvaluationTest::a_;\nint SingleEvaluationTest::b_;\n\n// Tests that when ASSERT_STREQ fails, it evaluates its arguments\n// exactly once.\nTEST_F(SingleEvaluationTest, FailedASSERT_STREQ) {\n  EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementCharPtrs(),\n                       \"p2_++\");\n  EXPECT_EQ(s1_ + 1, p1_);\n  EXPECT_EQ(s2_ + 1, p2_);\n}\n\n// Tests that string assertion arguments are evaluated exactly once.\nTEST_F(SingleEvaluationTest, ASSERT_STR) {\n  // successful EXPECT_STRNE\n  EXPECT_STRNE(p1_++, p2_++);\n  EXPECT_EQ(s1_ + 1, p1_);\n  EXPECT_EQ(s2_ + 1, p2_);\n\n  // failed EXPECT_STRCASEEQ\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ(p1_++, p2_++),\n                          \"Ignoring case\");\n  EXPECT_EQ(s1_ + 2, p1_);\n  EXPECT_EQ(s2_ + 2, p2_);\n}\n\n// Tests that when ASSERT_NE fails, it evaluates its arguments exactly\n// once.\nTEST_F(SingleEvaluationTest, FailedASSERT_NE) {\n  EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementInts(),\n                       \"(a_++) != (b_++)\");\n  EXPECT_EQ(1, a_);\n  EXPECT_EQ(1, b_);\n}\n\n// Tests that assertion arguments are evaluated exactly once.\nTEST_F(SingleEvaluationTest, OtherCases) {\n  // successful EXPECT_TRUE\n  EXPECT_TRUE(0 == a_++);  // NOLINT\n  EXPECT_EQ(1, a_);\n\n  // failed EXPECT_TRUE\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(-1 == a_++), \"-1 == a_++\");\n  EXPECT_EQ(2, a_);\n\n  // successful EXPECT_GT\n  EXPECT_GT(a_++, b_++);\n  EXPECT_EQ(3, a_);\n  EXPECT_EQ(1, b_);\n\n  // failed EXPECT_LT\n  EXPECT_NONFATAL_FAILURE(EXPECT_LT(a_++, b_++), \"(a_++) < (b_++)\");\n  EXPECT_EQ(4, a_);\n  EXPECT_EQ(2, b_);\n\n  // successful ASSERT_TRUE\n  ASSERT_TRUE(0 < a_++);  // NOLINT\n  EXPECT_EQ(5, a_);\n\n  // successful ASSERT_GT\n  ASSERT_GT(a_++, b_++);\n  EXPECT_EQ(6, a_);\n  EXPECT_EQ(3, b_);\n}\n\n#if GTEST_HAS_EXCEPTIONS\n\nvoid ThrowAnInteger() {\n  throw 1;\n}\nvoid ThrowRuntimeError(const char* what) {\n  throw std::runtime_error(what);\n}\n\n// Tests that assertion arguments are evaluated exactly once.\nTEST_F(SingleEvaluationTest, ExceptionTests) {\n  // successful EXPECT_THROW\n  EXPECT_THROW({  // NOLINT\n    a_++;\n    ThrowAnInteger();\n  }, int);\n  EXPECT_EQ(1, a_);\n\n  // failed EXPECT_THROW, throws different\n  EXPECT_NONFATAL_FAILURE(EXPECT_THROW({  // NOLINT\n    a_++;\n    ThrowAnInteger();\n  }, bool), \"throws a different type\");\n  EXPECT_EQ(2, a_);\n\n  // failed EXPECT_THROW, throws nothing\n  EXPECT_NONFATAL_FAILURE(EXPECT_THROW(a_++, bool), \"throws nothing\");\n  EXPECT_EQ(3, a_);\n\n  // successful EXPECT_NO_THROW\n  EXPECT_NO_THROW(a_++);\n  EXPECT_EQ(4, a_);\n\n  // failed EXPECT_NO_THROW\n  EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW({  // NOLINT\n    a_++;\n    ThrowAnInteger();\n  }), \"it throws\");\n  EXPECT_EQ(5, a_);\n\n  // successful EXPECT_ANY_THROW\n  EXPECT_ANY_THROW({  // NOLINT\n    a_++;\n    ThrowAnInteger();\n  });\n  EXPECT_EQ(6, a_);\n\n  // failed EXPECT_ANY_THROW\n  EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(a_++), \"it doesn't\");\n  EXPECT_EQ(7, a_);\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// Tests {ASSERT|EXPECT}_NO_FATAL_FAILURE.\nclass NoFatalFailureTest : public Test {\n protected:\n  void Succeeds() {}\n  void FailsNonFatal() {\n    ADD_FAILURE() << \"some non-fatal failure\";\n  }\n  void Fails() {\n    FAIL() << \"some fatal failure\";\n  }\n\n  void DoAssertNoFatalFailureOnFails() {\n    ASSERT_NO_FATAL_FAILURE(Fails());\n    ADD_FAILURE() << \"should not reach here.\";\n  }\n\n  void DoExpectNoFatalFailureOnFails() {\n    EXPECT_NO_FATAL_FAILURE(Fails());\n    ADD_FAILURE() << \"other failure\";\n  }\n};\n\nTEST_F(NoFatalFailureTest, NoFailure) {\n  EXPECT_NO_FATAL_FAILURE(Succeeds());\n  ASSERT_NO_FATAL_FAILURE(Succeeds());\n}\n\nTEST_F(NoFatalFailureTest, NonFatalIsNoFailure) {\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_NO_FATAL_FAILURE(FailsNonFatal()),\n      \"some non-fatal failure\");\n  EXPECT_NONFATAL_FAILURE(\n      ASSERT_NO_FATAL_FAILURE(FailsNonFatal()),\n      \"some non-fatal failure\");\n}\n\nTEST_F(NoFatalFailureTest, AssertNoFatalFailureOnFatalFailure) {\n  TestPartResultArray gtest_failures;\n  {\n    ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);\n    DoAssertNoFatalFailureOnFails();\n  }\n  ASSERT_EQ(2, gtest_failures.size());\n  EXPECT_EQ(TestPartResult::kFatalFailure,\n            gtest_failures.GetTestPartResult(0).type());\n  EXPECT_EQ(TestPartResult::kFatalFailure,\n            gtest_failures.GetTestPartResult(1).type());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"some fatal failure\",\n                      gtest_failures.GetTestPartResult(0).message());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"it does\",\n                      gtest_failures.GetTestPartResult(1).message());\n}\n\nTEST_F(NoFatalFailureTest, ExpectNoFatalFailureOnFatalFailure) {\n  TestPartResultArray gtest_failures;\n  {\n    ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);\n    DoExpectNoFatalFailureOnFails();\n  }\n  ASSERT_EQ(3, gtest_failures.size());\n  EXPECT_EQ(TestPartResult::kFatalFailure,\n            gtest_failures.GetTestPartResult(0).type());\n  EXPECT_EQ(TestPartResult::kNonFatalFailure,\n            gtest_failures.GetTestPartResult(1).type());\n  EXPECT_EQ(TestPartResult::kNonFatalFailure,\n            gtest_failures.GetTestPartResult(2).type());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"some fatal failure\",\n                      gtest_failures.GetTestPartResult(0).message());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"it does\",\n                      gtest_failures.GetTestPartResult(1).message());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"other failure\",\n                      gtest_failures.GetTestPartResult(2).message());\n}\n\nTEST_F(NoFatalFailureTest, MessageIsStreamable) {\n  TestPartResultArray gtest_failures;\n  {\n    ScopedFakeTestPartResultReporter gtest_reporter(&gtest_failures);\n    EXPECT_NO_FATAL_FAILURE(FAIL() << \"foo\") << \"my message\";\n  }\n  ASSERT_EQ(2, gtest_failures.size());\n  EXPECT_EQ(TestPartResult::kNonFatalFailure,\n            gtest_failures.GetTestPartResult(0).type());\n  EXPECT_EQ(TestPartResult::kNonFatalFailure,\n            gtest_failures.GetTestPartResult(1).type());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"foo\",\n                      gtest_failures.GetTestPartResult(0).message());\n  EXPECT_PRED_FORMAT2(testing::IsSubstring, \"my message\",\n                      gtest_failures.GetTestPartResult(1).message());\n}\n\n// Tests non-string assertions.\n\nstd::string EditsToString(const std::vector<EditType>& edits) {\n  std::string out;\n  for (size_t i = 0; i < edits.size(); ++i) {\n    static const char kEdits[] = \" +-/\";\n    out.append(1, kEdits[edits[i]]);\n  }\n  return out;\n}\n\nstd::vector<size_t> CharsToIndices(const std::string& str) {\n  std::vector<size_t> out;\n  for (size_t i = 0; i < str.size(); ++i) {\n    out.push_back(static_cast<size_t>(str[i]));\n  }\n  return out;\n}\n\nstd::vector<std::string> CharsToLines(const std::string& str) {\n  std::vector<std::string> out;\n  for (size_t i = 0; i < str.size(); ++i) {\n    out.push_back(str.substr(i, 1));\n  }\n  return out;\n}\n\nTEST(EditDistance, TestSuites) {\n  struct Case {\n    int line;\n    const char* left;\n    const char* right;\n    const char* expected_edits;\n    const char* expected_diff;\n  };\n  static const Case kCases[] = {\n      // No change.\n      {__LINE__, \"A\", \"A\", \" \", \"\"},\n      {__LINE__, \"ABCDE\", \"ABCDE\", \"     \", \"\"},\n      // Simple adds.\n      {__LINE__, \"X\", \"XA\", \" +\", \"@@ +1,2 @@\\n X\\n+A\\n\"},\n      {__LINE__, \"X\", \"XABCD\", \" ++++\", \"@@ +1,5 @@\\n X\\n+A\\n+B\\n+C\\n+D\\n\"},\n      // Simple removes.\n      {__LINE__, \"XA\", \"X\", \" -\", \"@@ -1,2 @@\\n X\\n-A\\n\"},\n      {__LINE__, \"XABCD\", \"X\", \" ----\", \"@@ -1,5 @@\\n X\\n-A\\n-B\\n-C\\n-D\\n\"},\n      // Simple replaces.\n      {__LINE__, \"A\", \"a\", \"/\", \"@@ -1,1 +1,1 @@\\n-A\\n+a\\n\"},\n      {__LINE__, \"ABCD\", \"abcd\", \"////\",\n       \"@@ -1,4 +1,4 @@\\n-A\\n-B\\n-C\\n-D\\n+a\\n+b\\n+c\\n+d\\n\"},\n      // Path finding.\n      {__LINE__, \"ABCDEFGH\", \"ABXEGH1\", \"  -/ -  +\",\n       \"@@ -1,8 +1,7 @@\\n A\\n B\\n-C\\n-D\\n+X\\n E\\n-F\\n G\\n H\\n+1\\n\"},\n      {__LINE__, \"AAAABCCCC\", \"ABABCDCDC\", \"- /   + / \",\n       \"@@ -1,9 +1,9 @@\\n-A\\n A\\n-A\\n+B\\n A\\n B\\n C\\n+D\\n C\\n-C\\n+D\\n C\\n\"},\n      {__LINE__, \"ABCDE\", \"BCDCD\", \"-   +/\",\n       \"@@ -1,5 +1,5 @@\\n-A\\n B\\n C\\n D\\n-E\\n+C\\n+D\\n\"},\n      {__LINE__, \"ABCDEFGHIJKL\", \"BCDCDEFGJKLJK\", \"- ++     --   ++\",\n       \"@@ -1,4 +1,5 @@\\n-A\\n B\\n+C\\n+D\\n C\\n D\\n\"\n       \"@@ -6,7 +7,7 @@\\n F\\n G\\n-H\\n-I\\n J\\n K\\n L\\n+J\\n+K\\n\"},\n      {}};\n  for (const Case* c = kCases; c->left; ++c) {\n    EXPECT_TRUE(c->expected_edits ==\n                EditsToString(CalculateOptimalEdits(CharsToIndices(c->left),\n                                                    CharsToIndices(c->right))))\n        << \"Left <\" << c->left << \"> Right <\" << c->right << \"> Edits <\"\n        << EditsToString(CalculateOptimalEdits(\n               CharsToIndices(c->left), CharsToIndices(c->right))) << \">\";\n    EXPECT_TRUE(c->expected_diff == CreateUnifiedDiff(CharsToLines(c->left),\n                                                      CharsToLines(c->right)))\n        << \"Left <\" << c->left << \"> Right <\" << c->right << \"> Diff <\"\n        << CreateUnifiedDiff(CharsToLines(c->left), CharsToLines(c->right))\n        << \">\";\n  }\n}\n\n// Tests EqFailure(), used for implementing *EQ* assertions.\nTEST(AssertionTest, EqFailure) {\n  const std::string foo_val(\"5\"), bar_val(\"6\");\n  const std::string msg1(\n      EqFailure(\"foo\", \"bar\", foo_val, bar_val, false)\n      .failure_message());\n  EXPECT_STREQ(\n      \"Expected equality of these values:\\n\"\n      \"  foo\\n\"\n      \"    Which is: 5\\n\"\n      \"  bar\\n\"\n      \"    Which is: 6\",\n      msg1.c_str());\n\n  const std::string msg2(\n      EqFailure(\"foo\", \"6\", foo_val, bar_val, false)\n      .failure_message());\n  EXPECT_STREQ(\n      \"Expected equality of these values:\\n\"\n      \"  foo\\n\"\n      \"    Which is: 5\\n\"\n      \"  6\",\n      msg2.c_str());\n\n  const std::string msg3(\n      EqFailure(\"5\", \"bar\", foo_val, bar_val, false)\n      .failure_message());\n  EXPECT_STREQ(\n      \"Expected equality of these values:\\n\"\n      \"  5\\n\"\n      \"  bar\\n\"\n      \"    Which is: 6\",\n      msg3.c_str());\n\n  const std::string msg4(\n      EqFailure(\"5\", \"6\", foo_val, bar_val, false).failure_message());\n  EXPECT_STREQ(\n      \"Expected equality of these values:\\n\"\n      \"  5\\n\"\n      \"  6\",\n      msg4.c_str());\n\n  const std::string msg5(\n      EqFailure(\"foo\", \"bar\",\n                std::string(\"\\\"x\\\"\"), std::string(\"\\\"y\\\"\"),\n                true).failure_message());\n  EXPECT_STREQ(\n      \"Expected equality of these values:\\n\"\n      \"  foo\\n\"\n      \"    Which is: \\\"x\\\"\\n\"\n      \"  bar\\n\"\n      \"    Which is: \\\"y\\\"\\n\"\n      \"Ignoring case\",\n      msg5.c_str());\n}\n\nTEST(AssertionTest, EqFailureWithDiff) {\n  const std::string left(\n      \"1\\\\n2XXX\\\\n3\\\\n5\\\\n6\\\\n7\\\\n8\\\\n9\\\\n10\\\\n11\\\\n12XXX\\\\n13\\\\n14\\\\n15\");\n  const std::string right(\n      \"1\\\\n2\\\\n3\\\\n4\\\\n5\\\\n6\\\\n7\\\\n8\\\\n9\\\\n11\\\\n12\\\\n13\\\\n14\");\n  const std::string msg1(\n      EqFailure(\"left\", \"right\", left, right, false).failure_message());\n  EXPECT_STREQ(\n      \"Expected equality of these values:\\n\"\n      \"  left\\n\"\n      \"    Which is: \"\n      \"1\\\\n2XXX\\\\n3\\\\n5\\\\n6\\\\n7\\\\n8\\\\n9\\\\n10\\\\n11\\\\n12XXX\\\\n13\\\\n14\\\\n15\\n\"\n      \"  right\\n\"\n      \"    Which is: 1\\\\n2\\\\n3\\\\n4\\\\n5\\\\n6\\\\n7\\\\n8\\\\n9\\\\n11\\\\n12\\\\n13\\\\n14\\n\"\n      \"With diff:\\n@@ -1,5 +1,6 @@\\n 1\\n-2XXX\\n+2\\n 3\\n+4\\n 5\\n 6\\n\"\n      \"@@ -7,8 +8,6 @@\\n 8\\n 9\\n-10\\n 11\\n-12XXX\\n+12\\n 13\\n 14\\n-15\\n\",\n      msg1.c_str());\n}\n\n// Tests AppendUserMessage(), used for implementing the *EQ* macros.\nTEST(AssertionTest, AppendUserMessage) {\n  const std::string foo(\"foo\");\n\n  Message msg;\n  EXPECT_STREQ(\"foo\",\n               AppendUserMessage(foo, msg).c_str());\n\n  msg << \"bar\";\n  EXPECT_STREQ(\"foo\\nbar\",\n               AppendUserMessage(foo, msg).c_str());\n}\n\n#ifdef __BORLANDC__\n// Silences warnings: \"Condition is always true\", \"Unreachable code\"\n# pragma option push -w-ccc -w-rch\n#endif\n\n// Tests ASSERT_TRUE.\nTEST(AssertionTest, ASSERT_TRUE) {\n  ASSERT_TRUE(2 > 1);  // NOLINT\n  EXPECT_FATAL_FAILURE(ASSERT_TRUE(2 < 1),\n                       \"2 < 1\");\n}\n\n// Tests ASSERT_TRUE(predicate) for predicates returning AssertionResult.\nTEST(AssertionTest, AssertTrueWithAssertionResult) {\n  ASSERT_TRUE(ResultIsEven(2));\n#ifndef __BORLANDC__\n  // ICE's in C++Builder.\n  EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEven(3)),\n                       \"Value of: ResultIsEven(3)\\n\"\n                       \"  Actual: false (3 is odd)\\n\"\n                       \"Expected: true\");\n#endif\n  ASSERT_TRUE(ResultIsEvenNoExplanation(2));\n  EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEvenNoExplanation(3)),\n                       \"Value of: ResultIsEvenNoExplanation(3)\\n\"\n                       \"  Actual: false (3 is odd)\\n\"\n                       \"Expected: true\");\n}\n\n// Tests ASSERT_FALSE.\nTEST(AssertionTest, ASSERT_FALSE) {\n  ASSERT_FALSE(2 < 1);  // NOLINT\n  EXPECT_FATAL_FAILURE(ASSERT_FALSE(2 > 1),\n                       \"Value of: 2 > 1\\n\"\n                       \"  Actual: true\\n\"\n                       \"Expected: false\");\n}\n\n// Tests ASSERT_FALSE(predicate) for predicates returning AssertionResult.\nTEST(AssertionTest, AssertFalseWithAssertionResult) {\n  ASSERT_FALSE(ResultIsEven(3));\n#ifndef __BORLANDC__\n  // ICE's in C++Builder.\n  EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEven(2)),\n                       \"Value of: ResultIsEven(2)\\n\"\n                       \"  Actual: true (2 is even)\\n\"\n                       \"Expected: false\");\n#endif\n  ASSERT_FALSE(ResultIsEvenNoExplanation(3));\n  EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEvenNoExplanation(2)),\n                       \"Value of: ResultIsEvenNoExplanation(2)\\n\"\n                       \"  Actual: true\\n\"\n                       \"Expected: false\");\n}\n\n#ifdef __BORLANDC__\n// Restores warnings after previous \"#pragma option push\" suppressed them\n# pragma option pop\n#endif\n\n// Tests using ASSERT_EQ on double values.  The purpose is to make\n// sure that the specialization we did for integer and anonymous enums\n// isn't used for double arguments.\nTEST(ExpectTest, ASSERT_EQ_Double) {\n  // A success.\n  ASSERT_EQ(5.6, 5.6);\n\n  // A failure.\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(5.1, 5.2),\n                       \"5.1\");\n}\n\n// Tests ASSERT_EQ.\nTEST(AssertionTest, ASSERT_EQ) {\n  ASSERT_EQ(5, 2 + 3);\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(5, 2*3),\n                       \"Expected equality of these values:\\n\"\n                       \"  5\\n\"\n                       \"  2*3\\n\"\n                       \"    Which is: 6\");\n}\n\n// Tests ASSERT_EQ(NULL, pointer).\nTEST(AssertionTest, ASSERT_EQ_NULL) {\n  // A success.\n  const char* p = nullptr;\n  // Some older GCC versions may issue a spurious warning in this or the next\n  // assertion statement. This warning should not be suppressed with\n  // static_cast since the test verifies the ability to use bare NULL as the\n  // expected parameter to the macro.\n  ASSERT_EQ(nullptr, p);\n\n  // A failure.\n  static int n = 0;\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(nullptr, &n), \"  &n\\n    Which is:\");\n}\n\n// Tests ASSERT_EQ(0, non_pointer).  Since the literal 0 can be\n// treated as a null pointer by the compiler, we need to make sure\n// that ASSERT_EQ(0, non_pointer) isn't interpreted by Google Test as\n// ASSERT_EQ(static_cast<void*>(NULL), non_pointer).\nTEST(ExpectTest, ASSERT_EQ_0) {\n  int n = 0;\n\n  // A success.\n  ASSERT_EQ(0, n);\n\n  // A failure.\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(0, 5.6),\n                       \"  0\\n  5.6\");\n}\n\n// Tests ASSERT_NE.\nTEST(AssertionTest, ASSERT_NE) {\n  ASSERT_NE(6, 7);\n  EXPECT_FATAL_FAILURE(ASSERT_NE('a', 'a'),\n                       \"Expected: ('a') != ('a'), \"\n                       \"actual: 'a' (97, 0x61) vs 'a' (97, 0x61)\");\n}\n\n// Tests ASSERT_LE.\nTEST(AssertionTest, ASSERT_LE) {\n  ASSERT_LE(2, 3);\n  ASSERT_LE(2, 2);\n  EXPECT_FATAL_FAILURE(ASSERT_LE(2, 0),\n                       \"Expected: (2) <= (0), actual: 2 vs 0\");\n}\n\n// Tests ASSERT_LT.\nTEST(AssertionTest, ASSERT_LT) {\n  ASSERT_LT(2, 3);\n  EXPECT_FATAL_FAILURE(ASSERT_LT(2, 2),\n                       \"Expected: (2) < (2), actual: 2 vs 2\");\n}\n\n// Tests ASSERT_GE.\nTEST(AssertionTest, ASSERT_GE) {\n  ASSERT_GE(2, 1);\n  ASSERT_GE(2, 2);\n  EXPECT_FATAL_FAILURE(ASSERT_GE(2, 3),\n                       \"Expected: (2) >= (3), actual: 2 vs 3\");\n}\n\n// Tests ASSERT_GT.\nTEST(AssertionTest, ASSERT_GT) {\n  ASSERT_GT(2, 1);\n  EXPECT_FATAL_FAILURE(ASSERT_GT(2, 2),\n                       \"Expected: (2) > (2), actual: 2 vs 2\");\n}\n\n#if GTEST_HAS_EXCEPTIONS\n\nvoid ThrowNothing() {}\n\n// Tests ASSERT_THROW.\nTEST(AssertionTest, ASSERT_THROW) {\n  ASSERT_THROW(ThrowAnInteger(), int);\n\n# ifndef __BORLANDC__\n\n  // ICE's in C++Builder 2007 and 2009.\n  EXPECT_FATAL_FAILURE(\n      ASSERT_THROW(ThrowAnInteger(), bool),\n      \"Expected: ThrowAnInteger() throws an exception of type bool.\\n\"\n      \"  Actual: it throws a different type.\");\n# endif\n\n  EXPECT_FATAL_FAILURE(\n      ASSERT_THROW(ThrowNothing(), bool),\n      \"Expected: ThrowNothing() throws an exception of type bool.\\n\"\n      \"  Actual: it throws nothing.\");\n}\n\n// Tests ASSERT_NO_THROW.\nTEST(AssertionTest, ASSERT_NO_THROW) {\n  ASSERT_NO_THROW(ThrowNothing());\n  EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()),\n                       \"Expected: ThrowAnInteger() doesn't throw an exception.\"\n                       \"\\n  Actual: it throws.\");\n  EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowRuntimeError(\"A description\")),\n                       \"Expected: ThrowRuntimeError(\\\"A description\\\") \"\n                       \"doesn't throw an exception.\\n  \"\n                       \"Actual: it throws std::exception-derived exception \"\n                       \"with description: \\\"A description\\\".\");\n}\n\n// Tests ASSERT_ANY_THROW.\nTEST(AssertionTest, ASSERT_ANY_THROW) {\n  ASSERT_ANY_THROW(ThrowAnInteger());\n  EXPECT_FATAL_FAILURE(\n      ASSERT_ANY_THROW(ThrowNothing()),\n      \"Expected: ThrowNothing() throws an exception.\\n\"\n      \"  Actual: it doesn't.\");\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// Makes sure we deal with the precedence of <<.  This test should\n// compile.\nTEST(AssertionTest, AssertPrecedence) {\n  ASSERT_EQ(1 < 2, true);\n  bool false_value = false;\n  ASSERT_EQ(true && false_value, false);\n}\n\n// A subroutine used by the following test.\nvoid TestEq1(int x) {\n  ASSERT_EQ(1, x);\n}\n\n// Tests calling a test subroutine that's not part of a fixture.\nTEST(AssertionTest, NonFixtureSubroutine) {\n  EXPECT_FATAL_FAILURE(TestEq1(2),\n                       \"  x\\n    Which is: 2\");\n}\n\n// An uncopyable class.\nclass Uncopyable {\n public:\n  explicit Uncopyable(int a_value) : value_(a_value) {}\n\n  int value() const { return value_; }\n  bool operator==(const Uncopyable& rhs) const {\n    return value() == rhs.value();\n  }\n private:\n  // This constructor deliberately has no implementation, as we don't\n  // want this class to be copyable.\n  Uncopyable(const Uncopyable&);  // NOLINT\n\n  int value_;\n};\n\n::std::ostream& operator<<(::std::ostream& os, const Uncopyable& value) {\n  return os << value.value();\n}\n\n\nbool IsPositiveUncopyable(const Uncopyable& x) {\n  return x.value() > 0;\n}\n\n// A subroutine used by the following test.\nvoid TestAssertNonPositive() {\n  Uncopyable y(-1);\n  ASSERT_PRED1(IsPositiveUncopyable, y);\n}\n// A subroutine used by the following test.\nvoid TestAssertEqualsUncopyable() {\n  Uncopyable x(5);\n  Uncopyable y(-1);\n  ASSERT_EQ(x, y);\n}\n\n// Tests that uncopyable objects can be used in assertions.\nTEST(AssertionTest, AssertWorksWithUncopyableObject) {\n  Uncopyable x(5);\n  ASSERT_PRED1(IsPositiveUncopyable, x);\n  ASSERT_EQ(x, x);\n  EXPECT_FATAL_FAILURE(TestAssertNonPositive(),\n    \"IsPositiveUncopyable(y) evaluates to false, where\\ny evaluates to -1\");\n  EXPECT_FATAL_FAILURE(TestAssertEqualsUncopyable(),\n                       \"Expected equality of these values:\\n\"\n                       \"  x\\n    Which is: 5\\n  y\\n    Which is: -1\");\n}\n\n// Tests that uncopyable objects can be used in expects.\nTEST(AssertionTest, ExpectWorksWithUncopyableObject) {\n  Uncopyable x(5);\n  EXPECT_PRED1(IsPositiveUncopyable, x);\n  Uncopyable y(-1);\n  EXPECT_NONFATAL_FAILURE(EXPECT_PRED1(IsPositiveUncopyable, y),\n    \"IsPositiveUncopyable(y) evaluates to false, where\\ny evaluates to -1\");\n  EXPECT_EQ(x, x);\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y),\n                          \"Expected equality of these values:\\n\"\n                          \"  x\\n    Which is: 5\\n  y\\n    Which is: -1\");\n}\n\nenum NamedEnum {\n  kE1 = 0,\n  kE2 = 1\n};\n\nTEST(AssertionTest, NamedEnum) {\n  EXPECT_EQ(kE1, kE1);\n  EXPECT_LT(kE1, kE2);\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(kE1, kE2), \"Which is: 0\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(kE1, kE2), \"Which is: 1\");\n}\n\n// Sun Studio and HP aCC2reject this code.\n#if !defined(__SUNPRO_CC) && !defined(__HP_aCC)\n\n// Tests using assertions with anonymous enums.\nenum {\n  kCaseA = -1,\n\n# if GTEST_OS_LINUX\n\n  // We want to test the case where the size of the anonymous enum is\n  // larger than sizeof(int), to make sure our implementation of the\n  // assertions doesn't truncate the enums.  However, MSVC\n  // (incorrectly) doesn't allow an enum value to exceed the range of\n  // an int, so this has to be conditionally compiled.\n  //\n  // On Linux, kCaseB and kCaseA have the same value when truncated to\n  // int size.  We want to test whether this will confuse the\n  // assertions.\n  kCaseB = testing::internal::kMaxBiggestInt,\n\n# else\n\n  kCaseB = INT_MAX,\n\n# endif  // GTEST_OS_LINUX\n\n  kCaseC = 42\n};\n\nTEST(AssertionTest, AnonymousEnum) {\n# if GTEST_OS_LINUX\n\n  EXPECT_EQ(static_cast<int>(kCaseA), static_cast<int>(kCaseB));\n\n# endif  // GTEST_OS_LINUX\n\n  EXPECT_EQ(kCaseA, kCaseA);\n  EXPECT_NE(kCaseA, kCaseB);\n  EXPECT_LT(kCaseA, kCaseB);\n  EXPECT_LE(kCaseA, kCaseB);\n  EXPECT_GT(kCaseB, kCaseA);\n  EXPECT_GE(kCaseA, kCaseA);\n  EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseB),\n                          \"(kCaseA) >= (kCaseB)\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseC),\n                          \"-1 vs 42\");\n\n  ASSERT_EQ(kCaseA, kCaseA);\n  ASSERT_NE(kCaseA, kCaseB);\n  ASSERT_LT(kCaseA, kCaseB);\n  ASSERT_LE(kCaseA, kCaseB);\n  ASSERT_GT(kCaseB, kCaseA);\n  ASSERT_GE(kCaseA, kCaseA);\n\n# ifndef __BORLANDC__\n\n  // ICE's in C++Builder.\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseB),\n                       \"  kCaseB\\n    Which is: \");\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC),\n                       \"\\n    Which is: 42\");\n# endif\n\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC),\n                       \"\\n    Which is: -1\");\n}\n\n#endif  // !GTEST_OS_MAC && !defined(__SUNPRO_CC)\n\n#if GTEST_OS_WINDOWS\n\nstatic HRESULT UnexpectedHRESULTFailure() {\n  return E_UNEXPECTED;\n}\n\nstatic HRESULT OkHRESULTSuccess() {\n  return S_OK;\n}\n\nstatic HRESULT FalseHRESULTSuccess() {\n  return S_FALSE;\n}\n\n// HRESULT assertion tests test both zero and non-zero\n// success codes as well as failure message for each.\n//\n// Windows CE doesn't support message texts.\nTEST(HRESULTAssertionTest, EXPECT_HRESULT_SUCCEEDED) {\n  EXPECT_HRESULT_SUCCEEDED(S_OK);\n  EXPECT_HRESULT_SUCCEEDED(S_FALSE);\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),\n    \"Expected: (UnexpectedHRESULTFailure()) succeeds.\\n\"\n    \"  Actual: 0x8000FFFF\");\n}\n\nTEST(HRESULTAssertionTest, ASSERT_HRESULT_SUCCEEDED) {\n  ASSERT_HRESULT_SUCCEEDED(S_OK);\n  ASSERT_HRESULT_SUCCEEDED(S_FALSE);\n\n  EXPECT_FATAL_FAILURE(ASSERT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()),\n    \"Expected: (UnexpectedHRESULTFailure()) succeeds.\\n\"\n    \"  Actual: 0x8000FFFF\");\n}\n\nTEST(HRESULTAssertionTest, EXPECT_HRESULT_FAILED) {\n  EXPECT_HRESULT_FAILED(E_UNEXPECTED);\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(OkHRESULTSuccess()),\n    \"Expected: (OkHRESULTSuccess()) fails.\\n\"\n    \"  Actual: 0x0\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(FalseHRESULTSuccess()),\n    \"Expected: (FalseHRESULTSuccess()) fails.\\n\"\n    \"  Actual: 0x1\");\n}\n\nTEST(HRESULTAssertionTest, ASSERT_HRESULT_FAILED) {\n  ASSERT_HRESULT_FAILED(E_UNEXPECTED);\n\n# ifndef __BORLANDC__\n\n  // ICE's in C++Builder 2007 and 2009.\n  EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(OkHRESULTSuccess()),\n    \"Expected: (OkHRESULTSuccess()) fails.\\n\"\n    \"  Actual: 0x0\");\n# endif\n\n  EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(FalseHRESULTSuccess()),\n    \"Expected: (FalseHRESULTSuccess()) fails.\\n\"\n    \"  Actual: 0x1\");\n}\n\n// Tests that streaming to the HRESULT macros works.\nTEST(HRESULTAssertionTest, Streaming) {\n  EXPECT_HRESULT_SUCCEEDED(S_OK) << \"unexpected failure\";\n  ASSERT_HRESULT_SUCCEEDED(S_OK) << \"unexpected failure\";\n  EXPECT_HRESULT_FAILED(E_UNEXPECTED) << \"unexpected failure\";\n  ASSERT_HRESULT_FAILED(E_UNEXPECTED) << \"unexpected failure\";\n\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_HRESULT_SUCCEEDED(E_UNEXPECTED) << \"expected failure\",\n      \"expected failure\");\n\n# ifndef __BORLANDC__\n\n  // ICE's in C++Builder 2007 and 2009.\n  EXPECT_FATAL_FAILURE(\n      ASSERT_HRESULT_SUCCEEDED(E_UNEXPECTED) << \"expected failure\",\n      \"expected failure\");\n# endif\n\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_HRESULT_FAILED(S_OK) << \"expected failure\",\n      \"expected failure\");\n\n  EXPECT_FATAL_FAILURE(\n      ASSERT_HRESULT_FAILED(S_OK) << \"expected failure\",\n      \"expected failure\");\n}\n\n#endif  // GTEST_OS_WINDOWS\n\n#ifdef __BORLANDC__\n// Silences warnings: \"Condition is always true\", \"Unreachable code\"\n# pragma option push -w-ccc -w-rch\n#endif\n\n// Tests that the assertion macros behave like single statements.\nTEST(AssertionSyntaxTest, BasicAssertionsBehavesLikeSingleStatement) {\n  if (AlwaysFalse())\n    ASSERT_TRUE(false) << \"This should never be executed; \"\n                          \"It's a compilation test only.\";\n\n  if (AlwaysTrue())\n    EXPECT_FALSE(false);\n  else\n    ;  // NOLINT\n\n  if (AlwaysFalse())\n    ASSERT_LT(1, 3);\n\n  if (AlwaysFalse())\n    ;  // NOLINT\n  else\n    EXPECT_GT(3, 2) << \"\";\n}\n\n#if GTEST_HAS_EXCEPTIONS\n// Tests that the compiler will not complain about unreachable code in the\n// EXPECT_THROW/EXPECT_ANY_THROW/EXPECT_NO_THROW macros.\nTEST(ExpectThrowTest, DoesNotGenerateUnreachableCodeWarning) {\n  int n = 0;\n\n  EXPECT_THROW(throw 1, int);\n  EXPECT_NONFATAL_FAILURE(EXPECT_THROW(n++, int), \"\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_THROW(throw 1, const char*), \"\");\n  EXPECT_NO_THROW(n++);\n  EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(throw 1), \"\");\n  EXPECT_ANY_THROW(throw 1);\n  EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(n++), \"\");\n}\n\nTEST(AssertionSyntaxTest, ExceptionAssertionsBehavesLikeSingleStatement) {\n  if (AlwaysFalse())\n    EXPECT_THROW(ThrowNothing(), bool);\n\n  if (AlwaysTrue())\n    EXPECT_THROW(ThrowAnInteger(), int);\n  else\n    ;  // NOLINT\n\n  if (AlwaysFalse())\n    EXPECT_NO_THROW(ThrowAnInteger());\n\n  if (AlwaysTrue())\n    EXPECT_NO_THROW(ThrowNothing());\n  else\n    ;  // NOLINT\n\n  if (AlwaysFalse())\n    EXPECT_ANY_THROW(ThrowNothing());\n\n  if (AlwaysTrue())\n    EXPECT_ANY_THROW(ThrowAnInteger());\n  else\n    ;  // NOLINT\n}\n#endif  // GTEST_HAS_EXCEPTIONS\n\nTEST(AssertionSyntaxTest, NoFatalFailureAssertionsBehavesLikeSingleStatement) {\n  if (AlwaysFalse())\n    EXPECT_NO_FATAL_FAILURE(FAIL()) << \"This should never be executed. \"\n                                    << \"It's a compilation test only.\";\n  else\n    ;  // NOLINT\n\n  if (AlwaysFalse())\n    ASSERT_NO_FATAL_FAILURE(FAIL()) << \"\";\n  else\n    ;  // NOLINT\n\n  if (AlwaysTrue())\n    EXPECT_NO_FATAL_FAILURE(SUCCEED());\n  else\n    ;  // NOLINT\n\n  if (AlwaysFalse())\n    ;  // NOLINT\n  else\n    ASSERT_NO_FATAL_FAILURE(SUCCEED());\n}\n\n// Tests that the assertion macros work well with switch statements.\nTEST(AssertionSyntaxTest, WorksWithSwitch) {\n  switch (0) {\n    case 1:\n      break;\n    default:\n      ASSERT_TRUE(true);\n  }\n\n  switch (0)\n    case 0:\n      EXPECT_FALSE(false) << \"EXPECT_FALSE failed in switch case\";\n\n  // Binary assertions are implemented using a different code path\n  // than the Boolean assertions.  Hence we test them separately.\n  switch (0) {\n    case 1:\n    default:\n      ASSERT_EQ(1, 1) << \"ASSERT_EQ failed in default switch handler\";\n  }\n\n  switch (0)\n    case 0:\n      EXPECT_NE(1, 2);\n}\n\n#if GTEST_HAS_EXCEPTIONS\n\nvoid ThrowAString() {\n    throw \"std::string\";\n}\n\n// Test that the exception assertion macros compile and work with const\n// type qualifier.\nTEST(AssertionSyntaxTest, WorksWithConst) {\n    ASSERT_THROW(ThrowAString(), const char*);\n\n    EXPECT_THROW(ThrowAString(), const char*);\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n}  // namespace\n\nnamespace testing {\n\n// Tests that Google Test tracks SUCCEED*.\nTEST(SuccessfulAssertionTest, SUCCEED) {\n  SUCCEED();\n  SUCCEED() << \"OK\";\n  EXPECT_EQ(2, GetUnitTestImpl()->current_test_result()->total_part_count());\n}\n\n// Tests that Google Test doesn't track successful EXPECT_*.\nTEST(SuccessfulAssertionTest, EXPECT) {\n  EXPECT_TRUE(true);\n  EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());\n}\n\n// Tests that Google Test doesn't track successful EXPECT_STR*.\nTEST(SuccessfulAssertionTest, EXPECT_STR) {\n  EXPECT_STREQ(\"\", \"\");\n  EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());\n}\n\n// Tests that Google Test doesn't track successful ASSERT_*.\nTEST(SuccessfulAssertionTest, ASSERT) {\n  ASSERT_TRUE(true);\n  EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());\n}\n\n// Tests that Google Test doesn't track successful ASSERT_STR*.\nTEST(SuccessfulAssertionTest, ASSERT_STR) {\n  ASSERT_STREQ(\"\", \"\");\n  EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count());\n}\n\n}  // namespace testing\n\nnamespace {\n\n// Tests the message streaming variation of assertions.\n\nTEST(AssertionWithMessageTest, EXPECT) {\n  EXPECT_EQ(1, 1) << \"This should succeed.\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_NE(1, 1) << \"Expected failure #1.\",\n                          \"Expected failure #1\");\n  EXPECT_LE(1, 2) << \"This should succeed.\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_LT(1, 0) << \"Expected failure #2.\",\n                          \"Expected failure #2.\");\n  EXPECT_GE(1, 0) << \"This should succeed.\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_GT(1, 2) << \"Expected failure #3.\",\n                          \"Expected failure #3.\");\n\n  EXPECT_STREQ(\"1\", \"1\") << \"This should succeed.\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(\"1\", \"1\") << \"Expected failure #4.\",\n                          \"Expected failure #4.\");\n  EXPECT_STRCASEEQ(\"a\", \"A\") << \"This should succeed.\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRCASENE(\"a\", \"A\") << \"Expected failure #5.\",\n                          \"Expected failure #5.\");\n\n  EXPECT_FLOAT_EQ(1, 1) << \"This should succeed.\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1, 1.2) << \"Expected failure #6.\",\n                          \"Expected failure #6.\");\n  EXPECT_NEAR(1, 1.1, 0.2) << \"This should succeed.\";\n}\n\nTEST(AssertionWithMessageTest, ASSERT) {\n  ASSERT_EQ(1, 1) << \"This should succeed.\";\n  ASSERT_NE(1, 2) << \"This should succeed.\";\n  ASSERT_LE(1, 2) << \"This should succeed.\";\n  ASSERT_LT(1, 2) << \"This should succeed.\";\n  ASSERT_GE(1, 0) << \"This should succeed.\";\n  EXPECT_FATAL_FAILURE(ASSERT_GT(1, 2) << \"Expected failure.\",\n                       \"Expected failure.\");\n}\n\nTEST(AssertionWithMessageTest, ASSERT_STR) {\n  ASSERT_STREQ(\"1\", \"1\") << \"This should succeed.\";\n  ASSERT_STRNE(\"1\", \"2\") << \"This should succeed.\";\n  ASSERT_STRCASEEQ(\"a\", \"A\") << \"This should succeed.\";\n  EXPECT_FATAL_FAILURE(ASSERT_STRCASENE(\"a\", \"A\") << \"Expected failure.\",\n                       \"Expected failure.\");\n}\n\nTEST(AssertionWithMessageTest, ASSERT_FLOATING) {\n  ASSERT_FLOAT_EQ(1, 1) << \"This should succeed.\";\n  ASSERT_DOUBLE_EQ(1, 1) << \"This should succeed.\";\n  EXPECT_FATAL_FAILURE(ASSERT_NEAR(1,1.2, 0.1) << \"Expect failure.\",  // NOLINT\n                       \"Expect failure.\");\n  // To work around a bug in gcc 2.95.0, there is intentionally no\n  // space after the first comma in the previous statement.\n}\n\n// Tests using ASSERT_FALSE with a streamed message.\nTEST(AssertionWithMessageTest, ASSERT_FALSE) {\n  ASSERT_FALSE(false) << \"This shouldn't fail.\";\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_FALSE(true) << \"Expected failure: \" << 2 << \" > \" << 1\n                       << \" evaluates to \" << true;\n  }, \"Expected failure\");\n}\n\n// Tests using FAIL with a streamed message.\nTEST(AssertionWithMessageTest, FAIL) {\n  EXPECT_FATAL_FAILURE(FAIL() << 0,\n                       \"0\");\n}\n\n// Tests using SUCCEED with a streamed message.\nTEST(AssertionWithMessageTest, SUCCEED) {\n  SUCCEED() << \"Success == \" << 1;\n}\n\n// Tests using ASSERT_TRUE with a streamed message.\nTEST(AssertionWithMessageTest, ASSERT_TRUE) {\n  ASSERT_TRUE(true) << \"This should succeed.\";\n  ASSERT_TRUE(true) << true;\n  EXPECT_FATAL_FAILURE(\n      {  // NOLINT\n        ASSERT_TRUE(false) << static_cast<const char*>(nullptr)\n                           << static_cast<char*>(nullptr);\n      },\n      \"(null)(null)\");\n}\n\n#if GTEST_OS_WINDOWS\n// Tests using wide strings in assertion messages.\nTEST(AssertionWithMessageTest, WideStringMessage) {\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_TRUE(false) << L\"This failure is expected.\\x8119\";\n  }, \"This failure is expected.\");\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_EQ(1, 2) << \"This failure is \"\n                    << L\"expected too.\\x8120\";\n  }, \"This failure is expected too.\");\n}\n#endif  // GTEST_OS_WINDOWS\n\n// Tests EXPECT_TRUE.\nTEST(ExpectTest, EXPECT_TRUE) {\n  EXPECT_TRUE(true) << \"Intentional success\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << \"Intentional failure #1.\",\n                          \"Intentional failure #1.\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << \"Intentional failure #2.\",\n                          \"Intentional failure #2.\");\n  EXPECT_TRUE(2 > 1);  // NOLINT\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 < 1),\n                          \"Value of: 2 < 1\\n\"\n                          \"  Actual: false\\n\"\n                          \"Expected: true\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 > 3),\n                          \"2 > 3\");\n}\n\n// Tests EXPECT_TRUE(predicate) for predicates returning AssertionResult.\nTEST(ExpectTest, ExpectTrueWithAssertionResult) {\n  EXPECT_TRUE(ResultIsEven(2));\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEven(3)),\n                          \"Value of: ResultIsEven(3)\\n\"\n                          \"  Actual: false (3 is odd)\\n\"\n                          \"Expected: true\");\n  EXPECT_TRUE(ResultIsEvenNoExplanation(2));\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEvenNoExplanation(3)),\n                          \"Value of: ResultIsEvenNoExplanation(3)\\n\"\n                          \"  Actual: false (3 is odd)\\n\"\n                          \"Expected: true\");\n}\n\n// Tests EXPECT_FALSE with a streamed message.\nTEST(ExpectTest, EXPECT_FALSE) {\n  EXPECT_FALSE(2 < 1);  // NOLINT\n  EXPECT_FALSE(false) << \"Intentional success\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << \"Intentional failure #1.\",\n                          \"Intentional failure #1.\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << \"Intentional failure #2.\",\n                          \"Intentional failure #2.\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 > 1),\n                          \"Value of: 2 > 1\\n\"\n                          \"  Actual: true\\n\"\n                          \"Expected: false\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 < 3),\n                          \"2 < 3\");\n}\n\n// Tests EXPECT_FALSE(predicate) for predicates returning AssertionResult.\nTEST(ExpectTest, ExpectFalseWithAssertionResult) {\n  EXPECT_FALSE(ResultIsEven(3));\n  EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEven(2)),\n                          \"Value of: ResultIsEven(2)\\n\"\n                          \"  Actual: true (2 is even)\\n\"\n                          \"Expected: false\");\n  EXPECT_FALSE(ResultIsEvenNoExplanation(3));\n  EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEvenNoExplanation(2)),\n                          \"Value of: ResultIsEvenNoExplanation(2)\\n\"\n                          \"  Actual: true\\n\"\n                          \"Expected: false\");\n}\n\n#ifdef __BORLANDC__\n// Restores warnings after previous \"#pragma option push\" suppressed them\n# pragma option pop\n#endif\n\n// Tests EXPECT_EQ.\nTEST(ExpectTest, EXPECT_EQ) {\n  EXPECT_EQ(5, 2 + 3);\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2*3),\n                          \"Expected equality of these values:\\n\"\n                          \"  5\\n\"\n                          \"  2*3\\n\"\n                          \"    Which is: 6\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2 - 3),\n                          \"2 - 3\");\n}\n\n// Tests using EXPECT_EQ on double values.  The purpose is to make\n// sure that the specialization we did for integer and anonymous enums\n// isn't used for double arguments.\nTEST(ExpectTest, EXPECT_EQ_Double) {\n  // A success.\n  EXPECT_EQ(5.6, 5.6);\n\n  // A failure.\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5.1, 5.2),\n                          \"5.1\");\n}\n\n// Tests EXPECT_EQ(NULL, pointer).\nTEST(ExpectTest, EXPECT_EQ_NULL) {\n  // A success.\n  const char* p = nullptr;\n  // Some older GCC versions may issue a spurious warning in this or the next\n  // assertion statement. This warning should not be suppressed with\n  // static_cast since the test verifies the ability to use bare NULL as the\n  // expected parameter to the macro.\n  EXPECT_EQ(nullptr, p);\n\n  // A failure.\n  int n = 0;\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(nullptr, &n), \"  &n\\n    Which is:\");\n}\n\n// Tests EXPECT_EQ(0, non_pointer).  Since the literal 0 can be\n// treated as a null pointer by the compiler, we need to make sure\n// that EXPECT_EQ(0, non_pointer) isn't interpreted by Google Test as\n// EXPECT_EQ(static_cast<void*>(NULL), non_pointer).\nTEST(ExpectTest, EXPECT_EQ_0) {\n  int n = 0;\n\n  // A success.\n  EXPECT_EQ(0, n);\n\n  // A failure.\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(0, 5.6),\n                          \"  0\\n  5.6\");\n}\n\n// Tests EXPECT_NE.\nTEST(ExpectTest, EXPECT_NE) {\n  EXPECT_NE(6, 7);\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_NE('a', 'a'),\n                          \"Expected: ('a') != ('a'), \"\n                          \"actual: 'a' (97, 0x61) vs 'a' (97, 0x61)\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_NE(2, 2),\n                          \"2\");\n  char* const p0 = nullptr;\n  EXPECT_NONFATAL_FAILURE(EXPECT_NE(p0, p0),\n                          \"p0\");\n  // Only way to get the Nokia compiler to compile the cast\n  // is to have a separate void* variable first. Putting\n  // the two casts on the same line doesn't work, neither does\n  // a direct C-style to char*.\n  void* pv1 = (void*)0x1234;  // NOLINT\n  char* const p1 = reinterpret_cast<char*>(pv1);\n  EXPECT_NONFATAL_FAILURE(EXPECT_NE(p1, p1),\n                          \"p1\");\n}\n\n// Tests EXPECT_LE.\nTEST(ExpectTest, EXPECT_LE) {\n  EXPECT_LE(2, 3);\n  EXPECT_LE(2, 2);\n  EXPECT_NONFATAL_FAILURE(EXPECT_LE(2, 0),\n                          \"Expected: (2) <= (0), actual: 2 vs 0\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_LE(1.1, 0.9),\n                          \"(1.1) <= (0.9)\");\n}\n\n// Tests EXPECT_LT.\nTEST(ExpectTest, EXPECT_LT) {\n  EXPECT_LT(2, 3);\n  EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 2),\n                          \"Expected: (2) < (2), actual: 2 vs 2\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1),\n                          \"(2) < (1)\");\n}\n\n// Tests EXPECT_GE.\nTEST(ExpectTest, EXPECT_GE) {\n  EXPECT_GE(2, 1);\n  EXPECT_GE(2, 2);\n  EXPECT_NONFATAL_FAILURE(EXPECT_GE(2, 3),\n                          \"Expected: (2) >= (3), actual: 2 vs 3\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_GE(0.9, 1.1),\n                          \"(0.9) >= (1.1)\");\n}\n\n// Tests EXPECT_GT.\nTEST(ExpectTest, EXPECT_GT) {\n  EXPECT_GT(2, 1);\n  EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 2),\n                          \"Expected: (2) > (2), actual: 2 vs 2\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 3),\n                          \"(2) > (3)\");\n}\n\n#if GTEST_HAS_EXCEPTIONS\n\n// Tests EXPECT_THROW.\nTEST(ExpectTest, EXPECT_THROW) {\n  EXPECT_THROW(ThrowAnInteger(), int);\n  EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool),\n                          \"Expected: ThrowAnInteger() throws an exception of \"\n                          \"type bool.\\n  Actual: it throws a different type.\");\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_THROW(ThrowNothing(), bool),\n      \"Expected: ThrowNothing() throws an exception of type bool.\\n\"\n      \"  Actual: it throws nothing.\");\n}\n\n// Tests EXPECT_NO_THROW.\nTEST(ExpectTest, EXPECT_NO_THROW) {\n  EXPECT_NO_THROW(ThrowNothing());\n  EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()),\n                          \"Expected: ThrowAnInteger() doesn't throw an \"\n                          \"exception.\\n  Actual: it throws.\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowRuntimeError(\"A description\")),\n                          \"Expected: ThrowRuntimeError(\\\"A description\\\") \"\n                          \"doesn't throw an exception.\\n  \"\n                          \"Actual: it throws std::exception-derived exception \"\n                          \"with description: \\\"A description\\\".\");\n}\n\n// Tests EXPECT_ANY_THROW.\nTEST(ExpectTest, EXPECT_ANY_THROW) {\n  EXPECT_ANY_THROW(ThrowAnInteger());\n  EXPECT_NONFATAL_FAILURE(\n      EXPECT_ANY_THROW(ThrowNothing()),\n      \"Expected: ThrowNothing() throws an exception.\\n\"\n      \"  Actual: it doesn't.\");\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// Make sure we deal with the precedence of <<.\nTEST(ExpectTest, ExpectPrecedence) {\n  EXPECT_EQ(1 < 2, true);\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(true, true && false),\n                          \"  true && false\\n    Which is: false\");\n}\n\n\n// Tests the StreamableToString() function.\n\n// Tests using StreamableToString() on a scalar.\nTEST(StreamableToStringTest, Scalar) {\n  EXPECT_STREQ(\"5\", StreamableToString(5).c_str());\n}\n\n// Tests using StreamableToString() on a non-char pointer.\nTEST(StreamableToStringTest, Pointer) {\n  int n = 0;\n  int* p = &n;\n  EXPECT_STRNE(\"(null)\", StreamableToString(p).c_str());\n}\n\n// Tests using StreamableToString() on a NULL non-char pointer.\nTEST(StreamableToStringTest, NullPointer) {\n  int* p = nullptr;\n  EXPECT_STREQ(\"(null)\", StreamableToString(p).c_str());\n}\n\n// Tests using StreamableToString() on a C string.\nTEST(StreamableToStringTest, CString) {\n  EXPECT_STREQ(\"Foo\", StreamableToString(\"Foo\").c_str());\n}\n\n// Tests using StreamableToString() on a NULL C string.\nTEST(StreamableToStringTest, NullCString) {\n  char* p = nullptr;\n  EXPECT_STREQ(\"(null)\", StreamableToString(p).c_str());\n}\n\n// Tests using streamable values as assertion messages.\n\n// Tests using std::string as an assertion message.\nTEST(StreamableTest, string) {\n  static const std::string str(\n      \"This failure message is a std::string, and is expected.\");\n  EXPECT_FATAL_FAILURE(FAIL() << str,\n                       str.c_str());\n}\n\n// Tests that we can output strings containing embedded NULs.\n// Limited to Linux because we can only do this with std::string's.\nTEST(StreamableTest, stringWithEmbeddedNUL) {\n  static const char char_array_with_nul[] =\n      \"Here's a NUL\\0 and some more string\";\n  static const std::string string_with_nul(char_array_with_nul,\n                                           sizeof(char_array_with_nul)\n                                           - 1);  // drops the trailing NUL\n  EXPECT_FATAL_FAILURE(FAIL() << string_with_nul,\n                       \"Here's a NUL\\\\0 and some more string\");\n}\n\n// Tests that we can output a NUL char.\nTEST(StreamableTest, NULChar) {\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    FAIL() << \"A NUL\" << '\\0' << \" and some more string\";\n  }, \"A NUL\\\\0 and some more string\");\n}\n\n// Tests using int as an assertion message.\nTEST(StreamableTest, int) {\n  EXPECT_FATAL_FAILURE(FAIL() << 900913,\n                       \"900913\");\n}\n\n// Tests using NULL char pointer as an assertion message.\n//\n// In MSVC, streaming a NULL char * causes access violation.  Google Test\n// implemented a workaround (substituting \"(null)\" for NULL).  This\n// tests whether the workaround works.\nTEST(StreamableTest, NullCharPtr) {\n  EXPECT_FATAL_FAILURE(FAIL() << static_cast<const char*>(nullptr), \"(null)\");\n}\n\n// Tests that basic IO manipulators (endl, ends, and flush) can be\n// streamed to testing::Message.\nTEST(StreamableTest, BasicIoManip) {\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    FAIL() << \"Line 1.\" << std::endl\n           << \"A NUL char \" << std::ends << std::flush << \" in line 2.\";\n  }, \"Line 1.\\nA NUL char \\\\0 in line 2.\");\n}\n\n// Tests the macros that haven't been covered so far.\n\nvoid AddFailureHelper(bool* aborted) {\n  *aborted = true;\n  ADD_FAILURE() << \"Intentional failure.\";\n  *aborted = false;\n}\n\n// Tests ADD_FAILURE.\nTEST(MacroTest, ADD_FAILURE) {\n  bool aborted = true;\n  EXPECT_NONFATAL_FAILURE(AddFailureHelper(&aborted),\n                          \"Intentional failure.\");\n  EXPECT_FALSE(aborted);\n}\n\n// Tests ADD_FAILURE_AT.\nTEST(MacroTest, ADD_FAILURE_AT) {\n  // Verifies that ADD_FAILURE_AT does generate a nonfatal failure and\n  // the failure message contains the user-streamed part.\n  EXPECT_NONFATAL_FAILURE(ADD_FAILURE_AT(\"foo.cc\", 42) << \"Wrong!\", \"Wrong!\");\n\n  // Verifies that the user-streamed part is optional.\n  EXPECT_NONFATAL_FAILURE(ADD_FAILURE_AT(\"foo.cc\", 42), \"Failed\");\n\n  // Unfortunately, we cannot verify that the failure message contains\n  // the right file path and line number the same way, as\n  // EXPECT_NONFATAL_FAILURE() doesn't get to see the file path and\n  // line number.  Instead, we do that in googletest-output-test_.cc.\n}\n\n// Tests FAIL.\nTEST(MacroTest, FAIL) {\n  EXPECT_FATAL_FAILURE(FAIL(),\n                       \"Failed\");\n  EXPECT_FATAL_FAILURE(FAIL() << \"Intentional failure.\",\n                       \"Intentional failure.\");\n}\n\n// Tests GTEST_FAIL_AT.\nTEST(MacroTest, GTEST_FAIL_AT) {\n  // Verifies that GTEST_FAIL_AT does generate a fatal failure and\n  // the failure message contains the user-streamed part.\n  EXPECT_FATAL_FAILURE(GTEST_FAIL_AT(\"foo.cc\", 42) << \"Wrong!\", \"Wrong!\");\n\n  // Verifies that the user-streamed part is optional.\n  EXPECT_FATAL_FAILURE(GTEST_FAIL_AT(\"foo.cc\", 42), \"Failed\");\n\n  // See the ADD_FAIL_AT test above to see how we test that the failure message\n  // contains the right filename and line number -- the same applies here.\n}\n\n// Tests SUCCEED\nTEST(MacroTest, SUCCEED) {\n  SUCCEED();\n  SUCCEED() << \"Explicit success.\";\n}\n\n// Tests for EXPECT_EQ() and ASSERT_EQ().\n//\n// These tests fail *intentionally*, s.t. the failure messages can be\n// generated and tested.\n//\n// We have different tests for different argument types.\n\n// Tests using bool values in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, Bool) {\n  EXPECT_EQ(true,  true);\n  EXPECT_FATAL_FAILURE({\n      bool false_value = false;\n      ASSERT_EQ(false_value, true);\n    }, \"  false_value\\n    Which is: false\\n  true\");\n}\n\n// Tests using int values in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, Int) {\n  ASSERT_EQ(32, 32);\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(32, 33),\n                          \"  32\\n  33\");\n}\n\n// Tests using time_t values in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, Time_T) {\n  EXPECT_EQ(static_cast<time_t>(0),\n            static_cast<time_t>(0));\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<time_t>(0),\n                                 static_cast<time_t>(1234)),\n                       \"1234\");\n}\n\n// Tests using char values in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, Char) {\n  ASSERT_EQ('z', 'z');\n  const char ch = 'b';\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ('\\0', ch),\n                          \"  ch\\n    Which is: 'b'\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ('a', ch),\n                          \"  ch\\n    Which is: 'b'\");\n}\n\n// Tests using wchar_t values in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, WideChar) {\n  EXPECT_EQ(L'b', L'b');\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'\\0', L'x'),\n                          \"Expected equality of these values:\\n\"\n                          \"  L'\\0'\\n\"\n                          \"    Which is: L'\\0' (0, 0x0)\\n\"\n                          \"  L'x'\\n\"\n                          \"    Which is: L'x' (120, 0x78)\");\n\n  static wchar_t wchar;\n  wchar = L'b';\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'a', wchar),\n                          \"wchar\");\n  wchar = 0x8119;\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<wchar_t>(0x8120), wchar),\n                       \"  wchar\\n    Which is: L'\");\n}\n\n// Tests using ::std::string values in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, StdString) {\n  // Compares a const char* to an std::string that has identical\n  // content.\n  ASSERT_EQ(\"Test\", ::std::string(\"Test\"));\n\n  // Compares two identical std::strings.\n  static const ::std::string str1(\"A * in the middle\");\n  static const ::std::string str2(str1);\n  EXPECT_EQ(str1, str2);\n\n  // Compares a const char* to an std::string that has different\n  // content\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(\"Test\", ::std::string(\"test\")),\n                          \"\\\"test\\\"\");\n\n  // Compares an std::string to a char* that has different content.\n  char* const p1 = const_cast<char*>(\"foo\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::std::string(\"bar\"), p1),\n                          \"p1\");\n\n  // Compares two std::strings that have different contents, one of\n  // which having a NUL character in the middle.  This should fail.\n  static ::std::string str3(str1);\n  str3.at(2) = '\\0';\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(str1, str3),\n                       \"  str3\\n    Which is: \\\"A \\\\0 in the middle\\\"\");\n}\n\n#if GTEST_HAS_STD_WSTRING\n\n// Tests using ::std::wstring values in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, StdWideString) {\n  // Compares two identical std::wstrings.\n  const ::std::wstring wstr1(L\"A * in the middle\");\n  const ::std::wstring wstr2(wstr1);\n  ASSERT_EQ(wstr1, wstr2);\n\n  // Compares an std::wstring to a const wchar_t* that has identical\n  // content.\n  const wchar_t kTestX8119[] = { 'T', 'e', 's', 't', 0x8119, '\\0' };\n  EXPECT_EQ(::std::wstring(kTestX8119), kTestX8119);\n\n  // Compares an std::wstring to a const wchar_t* that has different\n  // content.\n  const wchar_t kTestX8120[] = { 'T', 'e', 's', 't', 0x8120, '\\0' };\n  EXPECT_NONFATAL_FAILURE({  // NOLINT\n    EXPECT_EQ(::std::wstring(kTestX8119), kTestX8120);\n  }, \"kTestX8120\");\n\n  // Compares two std::wstrings that have different contents, one of\n  // which having a NUL character in the middle.\n  ::std::wstring wstr3(wstr1);\n  wstr3.at(2) = L'\\0';\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(wstr1, wstr3),\n                          \"wstr3\");\n\n  // Compares a wchar_t* to an std::wstring that has different\n  // content.\n  EXPECT_FATAL_FAILURE({  // NOLINT\n    ASSERT_EQ(const_cast<wchar_t*>(L\"foo\"), ::std::wstring(L\"bar\"));\n  }, \"\");\n}\n\n#endif  // GTEST_HAS_STD_WSTRING\n\n// Tests using char pointers in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, CharPointer) {\n  char* const p0 = nullptr;\n  // Only way to get the Nokia compiler to compile the cast\n  // is to have a separate void* variable first. Putting\n  // the two casts on the same line doesn't work, neither does\n  // a direct C-style to char*.\n  void* pv1 = (void*)0x1234;  // NOLINT\n  void* pv2 = (void*)0xABC0;  // NOLINT\n  char* const p1 = reinterpret_cast<char*>(pv1);\n  char* const p2 = reinterpret_cast<char*>(pv2);\n  ASSERT_EQ(p1, p1);\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),\n                          \"  p2\\n    Which is:\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),\n                          \"  p2\\n    Which is:\");\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(reinterpret_cast<char*>(0x1234),\n                                 reinterpret_cast<char*>(0xABC0)),\n                       \"ABC0\");\n}\n\n// Tests using wchar_t pointers in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, WideCharPointer) {\n  wchar_t* const p0 = nullptr;\n  // Only way to get the Nokia compiler to compile the cast\n  // is to have a separate void* variable first. Putting\n  // the two casts on the same line doesn't work, neither does\n  // a direct C-style to char*.\n  void* pv1 = (void*)0x1234;  // NOLINT\n  void* pv2 = (void*)0xABC0;  // NOLINT\n  wchar_t* const p1 = reinterpret_cast<wchar_t*>(pv1);\n  wchar_t* const p2 = reinterpret_cast<wchar_t*>(pv2);\n  EXPECT_EQ(p0, p0);\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2),\n                          \"  p2\\n    Which is:\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2),\n                          \"  p2\\n    Which is:\");\n  void* pv3 = (void*)0x1234;  // NOLINT\n  void* pv4 = (void*)0xABC0;  // NOLINT\n  const wchar_t* p3 = reinterpret_cast<const wchar_t*>(pv3);\n  const wchar_t* p4 = reinterpret_cast<const wchar_t*>(pv4);\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p3, p4),\n                          \"p4\");\n}\n\n// Tests using other types of pointers in {EXPECT|ASSERT}_EQ.\nTEST(EqAssertionTest, OtherPointer) {\n  ASSERT_EQ(static_cast<const int*>(nullptr), static_cast<const int*>(nullptr));\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<const int*>(nullptr),\n                                 reinterpret_cast<const int*>(0x1234)),\n                       \"0x1234\");\n}\n\n// A class that supports binary comparison operators but not streaming.\nclass UnprintableChar {\n public:\n  explicit UnprintableChar(char ch) : char_(ch) {}\n\n  bool operator==(const UnprintableChar& rhs) const {\n    return char_ == rhs.char_;\n  }\n  bool operator!=(const UnprintableChar& rhs) const {\n    return char_ != rhs.char_;\n  }\n  bool operator<(const UnprintableChar& rhs) const {\n    return char_ < rhs.char_;\n  }\n  bool operator<=(const UnprintableChar& rhs) const {\n    return char_ <= rhs.char_;\n  }\n  bool operator>(const UnprintableChar& rhs) const {\n    return char_ > rhs.char_;\n  }\n  bool operator>=(const UnprintableChar& rhs) const {\n    return char_ >= rhs.char_;\n  }\n\n private:\n  char char_;\n};\n\n// Tests that ASSERT_EQ() and friends don't require the arguments to\n// be printable.\nTEST(ComparisonAssertionTest, AcceptsUnprintableArgs) {\n  const UnprintableChar x('x'), y('y');\n  ASSERT_EQ(x, x);\n  EXPECT_NE(x, y);\n  ASSERT_LT(x, y);\n  EXPECT_LE(x, y);\n  ASSERT_GT(y, x);\n  EXPECT_GE(x, x);\n\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y), \"1-byte object <78>\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y), \"1-byte object <79>\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_LT(y, y), \"1-byte object <79>\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_GT(x, y), \"1-byte object <78>\");\n  EXPECT_NONFATAL_FAILURE(EXPECT_GT(x, y), \"1-byte object <79>\");\n\n  // Code tested by EXPECT_FATAL_FAILURE cannot reference local\n  // variables, so we have to write UnprintableChar('x') instead of x.\n#ifndef __BORLANDC__\n  // ICE's in C++Builder.\n  EXPECT_FATAL_FAILURE(ASSERT_NE(UnprintableChar('x'), UnprintableChar('x')),\n                       \"1-byte object <78>\");\n  EXPECT_FATAL_FAILURE(ASSERT_LE(UnprintableChar('y'), UnprintableChar('x')),\n                       \"1-byte object <78>\");\n#endif\n  EXPECT_FATAL_FAILURE(ASSERT_LE(UnprintableChar('y'), UnprintableChar('x')),\n                       \"1-byte object <79>\");\n  EXPECT_FATAL_FAILURE(ASSERT_GE(UnprintableChar('x'), UnprintableChar('y')),\n                       \"1-byte object <78>\");\n  EXPECT_FATAL_FAILURE(ASSERT_GE(UnprintableChar('x'), UnprintableChar('y')),\n                       \"1-byte object <79>\");\n}\n\n// Tests the FRIEND_TEST macro.\n\n// This class has a private member we want to test.  We will test it\n// both in a TEST and in a TEST_F.\nclass Foo {\n public:\n  Foo() {}\n\n private:\n  int Bar() const { return 1; }\n\n  // Declares the friend tests that can access the private member\n  // Bar().\n  FRIEND_TEST(FRIEND_TEST_Test, TEST);\n  FRIEND_TEST(FRIEND_TEST_Test2, TEST_F);\n};\n\n// Tests that the FRIEND_TEST declaration allows a TEST to access a\n// class's private members.  This should compile.\nTEST(FRIEND_TEST_Test, TEST) {\n  ASSERT_EQ(1, Foo().Bar());\n}\n\n// The fixture needed to test using FRIEND_TEST with TEST_F.\nclass FRIEND_TEST_Test2 : public Test {\n protected:\n  Foo foo;\n};\n\n// Tests that the FRIEND_TEST declaration allows a TEST_F to access a\n// class's private members.  This should compile.\nTEST_F(FRIEND_TEST_Test2, TEST_F) {\n  ASSERT_EQ(1, foo.Bar());\n}\n\n// Tests the life cycle of Test objects.\n\n// The test fixture for testing the life cycle of Test objects.\n//\n// This class counts the number of live test objects that uses this\n// fixture.\nclass TestLifeCycleTest : public Test {\n protected:\n  // Constructor.  Increments the number of test objects that uses\n  // this fixture.\n  TestLifeCycleTest() { count_++; }\n\n  // Destructor.  Decrements the number of test objects that uses this\n  // fixture.\n  ~TestLifeCycleTest() override { count_--; }\n\n  // Returns the number of live test objects that uses this fixture.\n  int count() const { return count_; }\n\n private:\n  static int count_;\n};\n\nint TestLifeCycleTest::count_ = 0;\n\n// Tests the life cycle of test objects.\nTEST_F(TestLifeCycleTest, Test1) {\n  // There should be only one test object in this test case that's\n  // currently alive.\n  ASSERT_EQ(1, count());\n}\n\n// Tests the life cycle of test objects.\nTEST_F(TestLifeCycleTest, Test2) {\n  // After Test1 is done and Test2 is started, there should still be\n  // only one live test object, as the object for Test1 should've been\n  // deleted.\n  ASSERT_EQ(1, count());\n}\n\n}  // namespace\n\n// Tests that the copy constructor works when it is NOT optimized away by\n// the compiler.\nTEST(AssertionResultTest, CopyConstructorWorksWhenNotOptimied) {\n  // Checks that the copy constructor doesn't try to dereference NULL pointers\n  // in the source object.\n  AssertionResult r1 = AssertionSuccess();\n  AssertionResult r2 = r1;\n  // The following line is added to prevent the compiler from optimizing\n  // away the constructor call.\n  r1 << \"abc\";\n\n  AssertionResult r3 = r1;\n  EXPECT_EQ(static_cast<bool>(r3), static_cast<bool>(r1));\n  EXPECT_STREQ(\"abc\", r1.message());\n}\n\n// Tests that AssertionSuccess and AssertionFailure construct\n// AssertionResult objects as expected.\nTEST(AssertionResultTest, ConstructionWorks) {\n  AssertionResult r1 = AssertionSuccess();\n  EXPECT_TRUE(r1);\n  EXPECT_STREQ(\"\", r1.message());\n\n  AssertionResult r2 = AssertionSuccess() << \"abc\";\n  EXPECT_TRUE(r2);\n  EXPECT_STREQ(\"abc\", r2.message());\n\n  AssertionResult r3 = AssertionFailure();\n  EXPECT_FALSE(r3);\n  EXPECT_STREQ(\"\", r3.message());\n\n  AssertionResult r4 = AssertionFailure() << \"def\";\n  EXPECT_FALSE(r4);\n  EXPECT_STREQ(\"def\", r4.message());\n\n  AssertionResult r5 = AssertionFailure(Message() << \"ghi\");\n  EXPECT_FALSE(r5);\n  EXPECT_STREQ(\"ghi\", r5.message());\n}\n\n// Tests that the negation flips the predicate result but keeps the message.\nTEST(AssertionResultTest, NegationWorks) {\n  AssertionResult r1 = AssertionSuccess() << \"abc\";\n  EXPECT_FALSE(!r1);\n  EXPECT_STREQ(\"abc\", (!r1).message());\n\n  AssertionResult r2 = AssertionFailure() << \"def\";\n  EXPECT_TRUE(!r2);\n  EXPECT_STREQ(\"def\", (!r2).message());\n}\n\nTEST(AssertionResultTest, StreamingWorks) {\n  AssertionResult r = AssertionSuccess();\n  r << \"abc\" << 'd' << 0 << true;\n  EXPECT_STREQ(\"abcd0true\", r.message());\n}\n\nTEST(AssertionResultTest, CanStreamOstreamManipulators) {\n  AssertionResult r = AssertionSuccess();\n  r << \"Data\" << std::endl << std::flush << std::ends << \"Will be visible\";\n  EXPECT_STREQ(\"Data\\n\\\\0Will be visible\", r.message());\n}\n\n// The next test uses explicit conversion operators\n\nTEST(AssertionResultTest, ConstructibleFromContextuallyConvertibleToBool) {\n  struct ExplicitlyConvertibleToBool {\n    explicit operator bool() const { return value; }\n    bool value;\n  };\n  ExplicitlyConvertibleToBool v1 = {false};\n  ExplicitlyConvertibleToBool v2 = {true};\n  EXPECT_FALSE(v1);\n  EXPECT_TRUE(v2);\n}\n\nstruct ConvertibleToAssertionResult {\n  operator AssertionResult() const { return AssertionResult(true); }\n};\n\nTEST(AssertionResultTest, ConstructibleFromImplicitlyConvertible) {\n  ConvertibleToAssertionResult obj;\n  EXPECT_TRUE(obj);\n}\n\n// Tests streaming a user type whose definition and operator << are\n// both in the global namespace.\nclass Base {\n public:\n  explicit Base(int an_x) : x_(an_x) {}\n  int x() const { return x_; }\n private:\n  int x_;\n};\nstd::ostream& operator<<(std::ostream& os,\n                         const Base& val) {\n  return os << val.x();\n}\nstd::ostream& operator<<(std::ostream& os,\n                         const Base* pointer) {\n  return os << \"(\" << pointer->x() << \")\";\n}\n\nTEST(MessageTest, CanStreamUserTypeInGlobalNameSpace) {\n  Message msg;\n  Base a(1);\n\n  msg << a << &a;  // Uses ::operator<<.\n  EXPECT_STREQ(\"1(1)\", msg.GetString().c_str());\n}\n\n// Tests streaming a user type whose definition and operator<< are\n// both in an unnamed namespace.\nnamespace {\nclass MyTypeInUnnamedNameSpace : public Base {\n public:\n  explicit MyTypeInUnnamedNameSpace(int an_x): Base(an_x) {}\n};\nstd::ostream& operator<<(std::ostream& os,\n                         const MyTypeInUnnamedNameSpace& val) {\n  return os << val.x();\n}\nstd::ostream& operator<<(std::ostream& os,\n                         const MyTypeInUnnamedNameSpace* pointer) {\n  return os << \"(\" << pointer->x() << \")\";\n}\n}  // namespace\n\nTEST(MessageTest, CanStreamUserTypeInUnnamedNameSpace) {\n  Message msg;\n  MyTypeInUnnamedNameSpace a(1);\n\n  msg << a << &a;  // Uses <unnamed_namespace>::operator<<.\n  EXPECT_STREQ(\"1(1)\", msg.GetString().c_str());\n}\n\n// Tests streaming a user type whose definition and operator<< are\n// both in a user namespace.\nnamespace namespace1 {\nclass MyTypeInNameSpace1 : public Base {\n public:\n  explicit MyTypeInNameSpace1(int an_x): Base(an_x) {}\n};\nstd::ostream& operator<<(std::ostream& os,\n                         const MyTypeInNameSpace1& val) {\n  return os << val.x();\n}\nstd::ostream& operator<<(std::ostream& os,\n                         const MyTypeInNameSpace1* pointer) {\n  return os << \"(\" << pointer->x() << \")\";\n}\n}  // namespace namespace1\n\nTEST(MessageTest, CanStreamUserTypeInUserNameSpace) {\n  Message msg;\n  namespace1::MyTypeInNameSpace1 a(1);\n\n  msg << a << &a;  // Uses namespace1::operator<<.\n  EXPECT_STREQ(\"1(1)\", msg.GetString().c_str());\n}\n\n// Tests streaming a user type whose definition is in a user namespace\n// but whose operator<< is in the global namespace.\nnamespace namespace2 {\nclass MyTypeInNameSpace2 : public ::Base {\n public:\n  explicit MyTypeInNameSpace2(int an_x): Base(an_x) {}\n};\n}  // namespace namespace2\nstd::ostream& operator<<(std::ostream& os,\n                         const namespace2::MyTypeInNameSpace2& val) {\n  return os << val.x();\n}\nstd::ostream& operator<<(std::ostream& os,\n                         const namespace2::MyTypeInNameSpace2* pointer) {\n  return os << \"(\" << pointer->x() << \")\";\n}\n\nTEST(MessageTest, CanStreamUserTypeInUserNameSpaceWithStreamOperatorInGlobal) {\n  Message msg;\n  namespace2::MyTypeInNameSpace2 a(1);\n\n  msg << a << &a;  // Uses ::operator<<.\n  EXPECT_STREQ(\"1(1)\", msg.GetString().c_str());\n}\n\n// Tests streaming NULL pointers to testing::Message.\nTEST(MessageTest, NullPointers) {\n  Message msg;\n  char* const p1 = nullptr;\n  unsigned char* const p2 = nullptr;\n  int* p3 = nullptr;\n  double* p4 = nullptr;\n  bool* p5 = nullptr;\n  Message* p6 = nullptr;\n\n  msg << p1 << p2 << p3 << p4 << p5 << p6;\n  ASSERT_STREQ(\"(null)(null)(null)(null)(null)(null)\",\n               msg.GetString().c_str());\n}\n\n// Tests streaming wide strings to testing::Message.\nTEST(MessageTest, WideStrings) {\n  // Streams a NULL of type const wchar_t*.\n  const wchar_t* const_wstr = nullptr;\n  EXPECT_STREQ(\"(null)\",\n               (Message() << const_wstr).GetString().c_str());\n\n  // Streams a NULL of type wchar_t*.\n  wchar_t* wstr = nullptr;\n  EXPECT_STREQ(\"(null)\",\n               (Message() << wstr).GetString().c_str());\n\n  // Streams a non-NULL of type const wchar_t*.\n  const_wstr = L\"abc\\x8119\";\n  EXPECT_STREQ(\"abc\\xe8\\x84\\x99\",\n               (Message() << const_wstr).GetString().c_str());\n\n  // Streams a non-NULL of type wchar_t*.\n  wstr = const_cast<wchar_t*>(const_wstr);\n  EXPECT_STREQ(\"abc\\xe8\\x84\\x99\",\n               (Message() << wstr).GetString().c_str());\n}\n\n\n// This line tests that we can define tests in the testing namespace.\nnamespace testing {\n\n// Tests the TestInfo class.\n\nclass TestInfoTest : public Test {\n protected:\n  static const TestInfo* GetTestInfo(const char* test_name) {\n    const TestSuite* const test_suite =\n        GetUnitTestImpl()->GetTestSuite(\"TestInfoTest\", \"\", nullptr, nullptr);\n\n    for (int i = 0; i < test_suite->total_test_count(); ++i) {\n      const TestInfo* const test_info = test_suite->GetTestInfo(i);\n      if (strcmp(test_name, test_info->name()) == 0)\n        return test_info;\n    }\n    return nullptr;\n  }\n\n  static const TestResult* GetTestResult(\n      const TestInfo* test_info) {\n    return test_info->result();\n  }\n};\n\n// Tests TestInfo::test_case_name() and TestInfo::name().\nTEST_F(TestInfoTest, Names) {\n  const TestInfo* const test_info = GetTestInfo(\"Names\");\n\n  ASSERT_STREQ(\"TestInfoTest\", test_info->test_case_name());\n  ASSERT_STREQ(\"Names\", test_info->name());\n}\n\n// Tests TestInfo::result().\nTEST_F(TestInfoTest, result) {\n  const TestInfo* const test_info = GetTestInfo(\"result\");\n\n  // Initially, there is no TestPartResult for this test.\n  ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());\n\n  // After the previous assertion, there is still none.\n  ASSERT_EQ(0, GetTestResult(test_info)->total_part_count());\n}\n\n#define VERIFY_CODE_LOCATION \\\n  const int expected_line = __LINE__ - 1; \\\n  const TestInfo* const test_info = GetUnitTestImpl()->current_test_info(); \\\n  ASSERT_TRUE(test_info); \\\n  EXPECT_STREQ(__FILE__, test_info->file()); \\\n  EXPECT_EQ(expected_line, test_info->line())\n\nTEST(CodeLocationForTEST, Verify) {\n  VERIFY_CODE_LOCATION;\n}\n\nclass CodeLocationForTESTF : public Test {\n};\n\nTEST_F(CodeLocationForTESTF, Verify) {\n  VERIFY_CODE_LOCATION;\n}\n\nclass CodeLocationForTESTP : public TestWithParam<int> {\n};\n\nTEST_P(CodeLocationForTESTP, Verify) {\n  VERIFY_CODE_LOCATION;\n}\n\nINSTANTIATE_TEST_SUITE_P(All, CodeLocationForTESTP, Values(0));\n\ntemplate <typename T>\nclass CodeLocationForTYPEDTEST : public Test {\n};\n\nTYPED_TEST_SUITE(CodeLocationForTYPEDTEST, int);\n\nTYPED_TEST(CodeLocationForTYPEDTEST, Verify) {\n  VERIFY_CODE_LOCATION;\n}\n\ntemplate <typename T>\nclass CodeLocationForTYPEDTESTP : public Test {\n};\n\nTYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP);\n\nTYPED_TEST_P(CodeLocationForTYPEDTESTP, Verify) {\n  VERIFY_CODE_LOCATION;\n}\n\nREGISTER_TYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP, Verify);\n\nINSTANTIATE_TYPED_TEST_SUITE_P(My, CodeLocationForTYPEDTESTP, int);\n\n#undef VERIFY_CODE_LOCATION\n\n// Tests setting up and tearing down a test case.\n// Legacy API is deprecated but still available\n#ifndef REMOVE_LEGACY_TEST_CASEAPI\nclass SetUpTestCaseTest : public Test {\n protected:\n  // This will be called once before the first test in this test case\n  // is run.\n  static void SetUpTestCase() {\n    printf(\"Setting up the test case . . .\\n\");\n\n    // Initializes some shared resource.  In this simple example, we\n    // just create a C string.  More complex stuff can be done if\n    // desired.\n    shared_resource_ = \"123\";\n\n    // Increments the number of test cases that have been set up.\n    counter_++;\n\n    // SetUpTestCase() should be called only once.\n    EXPECT_EQ(1, counter_);\n  }\n\n  // This will be called once after the last test in this test case is\n  // run.\n  static void TearDownTestCase() {\n    printf(\"Tearing down the test case . . .\\n\");\n\n    // Decrements the number of test cases that have been set up.\n    counter_--;\n\n    // TearDownTestCase() should be called only once.\n    EXPECT_EQ(0, counter_);\n\n    // Cleans up the shared resource.\n    shared_resource_ = nullptr;\n  }\n\n  // This will be called before each test in this test case.\n  void SetUp() override {\n    // SetUpTestCase() should be called only once, so counter_ should\n    // always be 1.\n    EXPECT_EQ(1, counter_);\n  }\n\n  // Number of test cases that have been set up.\n  static int counter_;\n\n  // Some resource to be shared by all tests in this test case.\n  static const char* shared_resource_;\n};\n\nint SetUpTestCaseTest::counter_ = 0;\nconst char* SetUpTestCaseTest::shared_resource_ = nullptr;\n\n// A test that uses the shared resource.\nTEST_F(SetUpTestCaseTest, Test1) { EXPECT_STRNE(nullptr, shared_resource_); }\n\n// Another test that uses the shared resource.\nTEST_F(SetUpTestCaseTest, Test2) {\n  EXPECT_STREQ(\"123\", shared_resource_);\n}\n#endif  //  REMOVE_LEGACY_TEST_CASEAPI\n\n// Tests SetupTestSuite/TearDown TestSuite\nclass SetUpTestSuiteTest : public Test {\n protected:\n  // This will be called once before the first test in this test case\n  // is run.\n  static void SetUpTestSuite() {\n    printf(\"Setting up the test suite . . .\\n\");\n\n    // Initializes some shared resource.  In this simple example, we\n    // just create a C string.  More complex stuff can be done if\n    // desired.\n    shared_resource_ = \"123\";\n\n    // Increments the number of test cases that have been set up.\n    counter_++;\n\n    // SetUpTestSuite() should be called only once.\n    EXPECT_EQ(1, counter_);\n  }\n\n  // This will be called once after the last test in this test case is\n  // run.\n  static void TearDownTestSuite() {\n    printf(\"Tearing down the test suite . . .\\n\");\n\n    // Decrements the number of test suites that have been set up.\n    counter_--;\n\n    // TearDownTestSuite() should be called only once.\n    EXPECT_EQ(0, counter_);\n\n    // Cleans up the shared resource.\n    shared_resource_ = nullptr;\n  }\n\n  // This will be called before each test in this test case.\n  void SetUp() override {\n    // SetUpTestSuite() should be called only once, so counter_ should\n    // always be 1.\n    EXPECT_EQ(1, counter_);\n  }\n\n  // Number of test suites that have been set up.\n  static int counter_;\n\n  // Some resource to be shared by all tests in this test case.\n  static const char* shared_resource_;\n};\n\nint SetUpTestSuiteTest::counter_ = 0;\nconst char* SetUpTestSuiteTest::shared_resource_ = nullptr;\n\n// A test that uses the shared resource.\nTEST_F(SetUpTestSuiteTest, TestSetupTestSuite1) {\n  EXPECT_STRNE(nullptr, shared_resource_);\n}\n\n// Another test that uses the shared resource.\nTEST_F(SetUpTestSuiteTest, TestSetupTestSuite2) {\n  EXPECT_STREQ(\"123\", shared_resource_);\n}\n\n// The ParseFlagsTest test case tests ParseGoogleTestFlagsOnly.\n\n// The Flags struct stores a copy of all Google Test flags.\nstruct Flags {\n  // Constructs a Flags struct where each flag has its default value.\n  Flags() : also_run_disabled_tests(false),\n            break_on_failure(false),\n            catch_exceptions(false),\n            death_test_use_fork(false),\n            filter(\"\"),\n            list_tests(false),\n            output(\"\"),\n            print_time(true),\n            random_seed(0),\n            repeat(1),\n            shuffle(false),\n            stack_trace_depth(kMaxStackTraceDepth),\n            stream_result_to(\"\"),\n            throw_on_failure(false) {}\n\n  // Factory methods.\n\n  // Creates a Flags struct where the gtest_also_run_disabled_tests flag has\n  // the given value.\n  static Flags AlsoRunDisabledTests(bool also_run_disabled_tests) {\n    Flags flags;\n    flags.also_run_disabled_tests = also_run_disabled_tests;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_break_on_failure flag has\n  // the given value.\n  static Flags BreakOnFailure(bool break_on_failure) {\n    Flags flags;\n    flags.break_on_failure = break_on_failure;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_catch_exceptions flag has\n  // the given value.\n  static Flags CatchExceptions(bool catch_exceptions) {\n    Flags flags;\n    flags.catch_exceptions = catch_exceptions;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_death_test_use_fork flag has\n  // the given value.\n  static Flags DeathTestUseFork(bool death_test_use_fork) {\n    Flags flags;\n    flags.death_test_use_fork = death_test_use_fork;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_filter flag has the given\n  // value.\n  static Flags Filter(const char* filter) {\n    Flags flags;\n    flags.filter = filter;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_list_tests flag has the\n  // given value.\n  static Flags ListTests(bool list_tests) {\n    Flags flags;\n    flags.list_tests = list_tests;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_output flag has the given\n  // value.\n  static Flags Output(const char* output) {\n    Flags flags;\n    flags.output = output;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_print_time flag has the given\n  // value.\n  static Flags PrintTime(bool print_time) {\n    Flags flags;\n    flags.print_time = print_time;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_random_seed flag has the given\n  // value.\n  static Flags RandomSeed(Int32 random_seed) {\n    Flags flags;\n    flags.random_seed = random_seed;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_repeat flag has the given\n  // value.\n  static Flags Repeat(Int32 repeat) {\n    Flags flags;\n    flags.repeat = repeat;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_shuffle flag has the given\n  // value.\n  static Flags Shuffle(bool shuffle) {\n    Flags flags;\n    flags.shuffle = shuffle;\n    return flags;\n  }\n\n  // Creates a Flags struct where the GTEST_FLAG(stack_trace_depth) flag has\n  // the given value.\n  static Flags StackTraceDepth(Int32 stack_trace_depth) {\n    Flags flags;\n    flags.stack_trace_depth = stack_trace_depth;\n    return flags;\n  }\n\n  // Creates a Flags struct where the GTEST_FLAG(stream_result_to) flag has\n  // the given value.\n  static Flags StreamResultTo(const char* stream_result_to) {\n    Flags flags;\n    flags.stream_result_to = stream_result_to;\n    return flags;\n  }\n\n  // Creates a Flags struct where the gtest_throw_on_failure flag has\n  // the given value.\n  static Flags ThrowOnFailure(bool throw_on_failure) {\n    Flags flags;\n    flags.throw_on_failure = throw_on_failure;\n    return flags;\n  }\n\n  // These fields store the flag values.\n  bool also_run_disabled_tests;\n  bool break_on_failure;\n  bool catch_exceptions;\n  bool death_test_use_fork;\n  const char* filter;\n  bool list_tests;\n  const char* output;\n  bool print_time;\n  Int32 random_seed;\n  Int32 repeat;\n  bool shuffle;\n  Int32 stack_trace_depth;\n  const char* stream_result_to;\n  bool throw_on_failure;\n};\n\n// Fixture for testing ParseGoogleTestFlagsOnly().\nclass ParseFlagsTest : public Test {\n protected:\n  // Clears the flags before each test.\n  void SetUp() override {\n    GTEST_FLAG(also_run_disabled_tests) = false;\n    GTEST_FLAG(break_on_failure) = false;\n    GTEST_FLAG(catch_exceptions) = false;\n    GTEST_FLAG(death_test_use_fork) = false;\n    GTEST_FLAG(filter) = \"\";\n    GTEST_FLAG(list_tests) = false;\n    GTEST_FLAG(output) = \"\";\n    GTEST_FLAG(print_time) = true;\n    GTEST_FLAG(random_seed) = 0;\n    GTEST_FLAG(repeat) = 1;\n    GTEST_FLAG(shuffle) = false;\n    GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth;\n    GTEST_FLAG(stream_result_to) = \"\";\n    GTEST_FLAG(throw_on_failure) = false;\n  }\n\n  // Asserts that two narrow or wide string arrays are equal.\n  template <typename CharType>\n  static void AssertStringArrayEq(int size1, CharType** array1, int size2,\n                                  CharType** array2) {\n    ASSERT_EQ(size1, size2) << \" Array sizes different.\";\n\n    for (int i = 0; i != size1; i++) {\n      ASSERT_STREQ(array1[i], array2[i]) << \" where i == \" << i;\n    }\n  }\n\n  // Verifies that the flag values match the expected values.\n  static void CheckFlags(const Flags& expected) {\n    EXPECT_EQ(expected.also_run_disabled_tests,\n              GTEST_FLAG(also_run_disabled_tests));\n    EXPECT_EQ(expected.break_on_failure, GTEST_FLAG(break_on_failure));\n    EXPECT_EQ(expected.catch_exceptions, GTEST_FLAG(catch_exceptions));\n    EXPECT_EQ(expected.death_test_use_fork, GTEST_FLAG(death_test_use_fork));\n    EXPECT_STREQ(expected.filter, GTEST_FLAG(filter).c_str());\n    EXPECT_EQ(expected.list_tests, GTEST_FLAG(list_tests));\n    EXPECT_STREQ(expected.output, GTEST_FLAG(output).c_str());\n    EXPECT_EQ(expected.print_time, GTEST_FLAG(print_time));\n    EXPECT_EQ(expected.random_seed, GTEST_FLAG(random_seed));\n    EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat));\n    EXPECT_EQ(expected.shuffle, GTEST_FLAG(shuffle));\n    EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG(stack_trace_depth));\n    EXPECT_STREQ(expected.stream_result_to,\n                 GTEST_FLAG(stream_result_to).c_str());\n    EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG(throw_on_failure));\n  }\n\n  // Parses a command line (specified by argc1 and argv1), then\n  // verifies that the flag values are expected and that the\n  // recognized flags are removed from the command line.\n  template <typename CharType>\n  static void TestParsingFlags(int argc1, const CharType** argv1,\n                               int argc2, const CharType** argv2,\n                               const Flags& expected, bool should_print_help) {\n    const bool saved_help_flag = ::testing::internal::g_help_flag;\n    ::testing::internal::g_help_flag = false;\n\n# if GTEST_HAS_STREAM_REDIRECTION\n    CaptureStdout();\n# endif\n\n    // Parses the command line.\n    internal::ParseGoogleTestFlagsOnly(&argc1, const_cast<CharType**>(argv1));\n\n# if GTEST_HAS_STREAM_REDIRECTION\n    const std::string captured_stdout = GetCapturedStdout();\n# endif\n\n    // Verifies the flag values.\n    CheckFlags(expected);\n\n    // Verifies that the recognized flags are removed from the command\n    // line.\n    AssertStringArrayEq(argc1 + 1, argv1, argc2 + 1, argv2);\n\n    // ParseGoogleTestFlagsOnly should neither set g_help_flag nor print the\n    // help message for the flags it recognizes.\n    EXPECT_EQ(should_print_help, ::testing::internal::g_help_flag);\n\n# if GTEST_HAS_STREAM_REDIRECTION\n    const char* const expected_help_fragment =\n        \"This program contains tests written using\";\n    if (should_print_help) {\n      EXPECT_PRED_FORMAT2(IsSubstring, expected_help_fragment, captured_stdout);\n    } else {\n      EXPECT_PRED_FORMAT2(IsNotSubstring,\n                          expected_help_fragment, captured_stdout);\n    }\n# endif  // GTEST_HAS_STREAM_REDIRECTION\n\n    ::testing::internal::g_help_flag = saved_help_flag;\n  }\n\n  // This macro wraps TestParsingFlags s.t. the user doesn't need\n  // to specify the array sizes.\n\n# define GTEST_TEST_PARSING_FLAGS_(argv1, argv2, expected, should_print_help) \\\n  TestParsingFlags(sizeof(argv1)/sizeof(*argv1) - 1, argv1, \\\n                   sizeof(argv2)/sizeof(*argv2) - 1, argv2, \\\n                   expected, should_print_help)\n};\n\n// Tests parsing an empty command line.\nTEST_F(ParseFlagsTest, Empty) {\n  const char* argv[] = {nullptr};\n\n  const char* argv2[] = {nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);\n}\n\n// Tests parsing a command line that has no flag.\nTEST_F(ParseFlagsTest, NoFlag) {\n  const char* argv[] = {\"foo.exe\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);\n}\n\n// Tests parsing a bad --gtest_filter flag.\nTEST_F(ParseFlagsTest, FilterBad) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_filter\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", \"--gtest_filter\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(\"\"), true);\n}\n\n// Tests parsing an empty --gtest_filter flag.\nTEST_F(ParseFlagsTest, FilterEmpty) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_filter=\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(\"\"), false);\n}\n\n// Tests parsing a non-empty --gtest_filter flag.\nTEST_F(ParseFlagsTest, FilterNonEmpty) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_filter=abc\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(\"abc\"), false);\n}\n\n// Tests parsing --gtest_break_on_failure.\nTEST_F(ParseFlagsTest, BreakOnFailureWithoutValue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_break_on_failure\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false);\n}\n\n// Tests parsing --gtest_break_on_failure=0.\nTEST_F(ParseFlagsTest, BreakOnFailureFalse_0) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_break_on_failure=0\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);\n}\n\n// Tests parsing --gtest_break_on_failure=f.\nTEST_F(ParseFlagsTest, BreakOnFailureFalse_f) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_break_on_failure=f\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);\n}\n\n// Tests parsing --gtest_break_on_failure=F.\nTEST_F(ParseFlagsTest, BreakOnFailureFalse_F) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_break_on_failure=F\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false);\n}\n\n// Tests parsing a --gtest_break_on_failure flag that has a \"true\"\n// definition.\nTEST_F(ParseFlagsTest, BreakOnFailureTrue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_break_on_failure=1\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false);\n}\n\n// Tests parsing --gtest_catch_exceptions.\nTEST_F(ParseFlagsTest, CatchExceptions) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_catch_exceptions\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::CatchExceptions(true), false);\n}\n\n// Tests parsing --gtest_death_test_use_fork.\nTEST_F(ParseFlagsTest, DeathTestUseFork) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_death_test_use_fork\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::DeathTestUseFork(true), false);\n}\n\n// Tests having the same flag twice with different values.  The\n// expected behavior is that the one coming last takes precedence.\nTEST_F(ParseFlagsTest, DuplicatedFlags) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_filter=a\", \"--gtest_filter=b\",\n                        nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(\"b\"), false);\n}\n\n// Tests having an unrecognized flag on the command line.\nTEST_F(ParseFlagsTest, UnrecognizedFlag) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_break_on_failure\",\n                        \"bar\",  // Unrecognized by Google Test.\n                        \"--gtest_filter=b\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", \"bar\", nullptr};\n\n  Flags flags;\n  flags.break_on_failure = true;\n  flags.filter = \"b\";\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, flags, false);\n}\n\n// Tests having a --gtest_list_tests flag\nTEST_F(ParseFlagsTest, ListTestsFlag) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_list_tests\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false);\n}\n\n// Tests having a --gtest_list_tests flag with a \"true\" value\nTEST_F(ParseFlagsTest, ListTestsTrue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_list_tests=1\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false);\n}\n\n// Tests having a --gtest_list_tests flag with a \"false\" value\nTEST_F(ParseFlagsTest, ListTestsFalse) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_list_tests=0\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);\n}\n\n// Tests parsing --gtest_list_tests=f.\nTEST_F(ParseFlagsTest, ListTestsFalse_f) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_list_tests=f\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);\n}\n\n// Tests parsing --gtest_list_tests=F.\nTEST_F(ParseFlagsTest, ListTestsFalse_F) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_list_tests=F\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false);\n}\n\n// Tests parsing --gtest_output (invalid).\nTEST_F(ParseFlagsTest, OutputEmpty) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_output\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", \"--gtest_output\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), true);\n}\n\n// Tests parsing --gtest_output=xml\nTEST_F(ParseFlagsTest, OutputXml) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_output=xml\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output(\"xml\"), false);\n}\n\n// Tests parsing --gtest_output=xml:file\nTEST_F(ParseFlagsTest, OutputXmlFile) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_output=xml:file\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output(\"xml:file\"), false);\n}\n\n// Tests parsing --gtest_output=xml:directory/path/\nTEST_F(ParseFlagsTest, OutputXmlDirectory) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_output=xml:directory/path/\",\n                        nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2,\n                            Flags::Output(\"xml:directory/path/\"), false);\n}\n\n// Tests having a --gtest_print_time flag\nTEST_F(ParseFlagsTest, PrintTimeFlag) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_print_time\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false);\n}\n\n// Tests having a --gtest_print_time flag with a \"true\" value\nTEST_F(ParseFlagsTest, PrintTimeTrue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_print_time=1\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false);\n}\n\n// Tests having a --gtest_print_time flag with a \"false\" value\nTEST_F(ParseFlagsTest, PrintTimeFalse) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_print_time=0\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);\n}\n\n// Tests parsing --gtest_print_time=f.\nTEST_F(ParseFlagsTest, PrintTimeFalse_f) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_print_time=f\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);\n}\n\n// Tests parsing --gtest_print_time=F.\nTEST_F(ParseFlagsTest, PrintTimeFalse_F) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_print_time=F\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false);\n}\n\n// Tests parsing --gtest_random_seed=number\nTEST_F(ParseFlagsTest, RandomSeed) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_random_seed=1000\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::RandomSeed(1000), false);\n}\n\n// Tests parsing --gtest_repeat=number\nTEST_F(ParseFlagsTest, Repeat) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_repeat=1000\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Repeat(1000), false);\n}\n\n// Tests having a --gtest_also_run_disabled_tests flag\nTEST_F(ParseFlagsTest, AlsoRunDisabledTestsFlag) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_also_run_disabled_tests\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::AlsoRunDisabledTests(true),\n                            false);\n}\n\n// Tests having a --gtest_also_run_disabled_tests flag with a \"true\" value\nTEST_F(ParseFlagsTest, AlsoRunDisabledTestsTrue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_also_run_disabled_tests=1\",\n                        nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::AlsoRunDisabledTests(true),\n                            false);\n}\n\n// Tests having a --gtest_also_run_disabled_tests flag with a \"false\" value\nTEST_F(ParseFlagsTest, AlsoRunDisabledTestsFalse) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_also_run_disabled_tests=0\",\n                        nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::AlsoRunDisabledTests(false),\n                            false);\n}\n\n// Tests parsing --gtest_shuffle.\nTEST_F(ParseFlagsTest, ShuffleWithoutValue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_shuffle\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false);\n}\n\n// Tests parsing --gtest_shuffle=0.\nTEST_F(ParseFlagsTest, ShuffleFalse_0) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_shuffle=0\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(false), false);\n}\n\n// Tests parsing a --gtest_shuffle flag that has a \"true\" definition.\nTEST_F(ParseFlagsTest, ShuffleTrue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_shuffle=1\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false);\n}\n\n// Tests parsing --gtest_stack_trace_depth=number.\nTEST_F(ParseFlagsTest, StackTraceDepth) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_stack_trace_depth=5\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::StackTraceDepth(5), false);\n}\n\nTEST_F(ParseFlagsTest, StreamResultTo) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_stream_result_to=localhost:1234\",\n                        nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(\n      argv, argv2, Flags::StreamResultTo(\"localhost:1234\"), false);\n}\n\n// Tests parsing --gtest_throw_on_failure.\nTEST_F(ParseFlagsTest, ThrowOnFailureWithoutValue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_throw_on_failure\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);\n}\n\n// Tests parsing --gtest_throw_on_failure=0.\nTEST_F(ParseFlagsTest, ThrowOnFailureFalse_0) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_throw_on_failure=0\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(false), false);\n}\n\n// Tests parsing a --gtest_throw_on_failure flag that has a \"true\"\n// definition.\nTEST_F(ParseFlagsTest, ThrowOnFailureTrue) {\n  const char* argv[] = {\"foo.exe\", \"--gtest_throw_on_failure=1\", nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false);\n}\n\n# if GTEST_OS_WINDOWS\n// Tests parsing wide strings.\nTEST_F(ParseFlagsTest, WideStrings) {\n  const wchar_t* argv[] = {\n    L\"foo.exe\",\n    L\"--gtest_filter=Foo*\",\n    L\"--gtest_list_tests=1\",\n    L\"--gtest_break_on_failure\",\n    L\"--non_gtest_flag\",\n    NULL\n  };\n\n  const wchar_t* argv2[] = {\n    L\"foo.exe\",\n    L\"--non_gtest_flag\",\n    NULL\n  };\n\n  Flags expected_flags;\n  expected_flags.break_on_failure = true;\n  expected_flags.filter = \"Foo*\";\n  expected_flags.list_tests = true;\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false);\n}\n# endif  // GTEST_OS_WINDOWS\n\n#if GTEST_USE_OWN_FLAGFILE_FLAG_\nclass FlagfileTest : public ParseFlagsTest {\n public:\n  void SetUp() override {\n    ParseFlagsTest::SetUp();\n\n    testdata_path_.Set(internal::FilePath(\n        testing::TempDir() + internal::GetCurrentExecutableName().string() +\n        \"_flagfile_test\"));\n    testing::internal::posix::RmDir(testdata_path_.c_str());\n    EXPECT_TRUE(testdata_path_.CreateFolder());\n  }\n\n  void TearDown() override {\n    testing::internal::posix::RmDir(testdata_path_.c_str());\n    ParseFlagsTest::TearDown();\n  }\n\n  internal::FilePath CreateFlagfile(const char* contents) {\n    internal::FilePath file_path(internal::FilePath::GenerateUniqueFileName(\n        testdata_path_, internal::FilePath(\"unique\"), \"txt\"));\n    FILE* f = testing::internal::posix::FOpen(file_path.c_str(), \"w\");\n    fprintf(f, \"%s\", contents);\n    fclose(f);\n    return file_path;\n  }\n\n private:\n  internal::FilePath testdata_path_;\n};\n\n// Tests an empty flagfile.\nTEST_F(FlagfileTest, Empty) {\n  internal::FilePath flagfile_path(CreateFlagfile(\"\"));\n  std::string flagfile_flag =\n      std::string(\"--\" GTEST_FLAG_PREFIX_ \"flagfile=\") + flagfile_path.c_str();\n\n  const char* argv[] = {\"foo.exe\", flagfile_flag.c_str(), nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false);\n}\n\n// Tests passing a non-empty --gtest_filter flag via --gtest_flagfile.\nTEST_F(FlagfileTest, FilterNonEmpty) {\n  internal::FilePath flagfile_path(CreateFlagfile(\n      \"--\"  GTEST_FLAG_PREFIX_  \"filter=abc\"));\n  std::string flagfile_flag =\n      std::string(\"--\" GTEST_FLAG_PREFIX_ \"flagfile=\") + flagfile_path.c_str();\n\n  const char* argv[] = {\"foo.exe\", flagfile_flag.c_str(), nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(\"abc\"), false);\n}\n\n// Tests passing several flags via --gtest_flagfile.\nTEST_F(FlagfileTest, SeveralFlags) {\n  internal::FilePath flagfile_path(CreateFlagfile(\n      \"--\"  GTEST_FLAG_PREFIX_  \"filter=abc\\n\"\n      \"--\"  GTEST_FLAG_PREFIX_  \"break_on_failure\\n\"\n      \"--\"  GTEST_FLAG_PREFIX_  \"list_tests\"));\n  std::string flagfile_flag =\n      std::string(\"--\" GTEST_FLAG_PREFIX_ \"flagfile=\") + flagfile_path.c_str();\n\n  const char* argv[] = {\"foo.exe\", flagfile_flag.c_str(), nullptr};\n\n  const char* argv2[] = {\"foo.exe\", nullptr};\n\n  Flags expected_flags;\n  expected_flags.break_on_failure = true;\n  expected_flags.filter = \"abc\";\n  expected_flags.list_tests = true;\n\n  GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false);\n}\n#endif  // GTEST_USE_OWN_FLAGFILE_FLAG_\n\n// Tests current_test_info() in UnitTest.\nclass CurrentTestInfoTest : public Test {\n protected:\n  // Tests that current_test_info() returns NULL before the first test in\n  // the test case is run.\n  static void SetUpTestSuite() {\n    // There should be no tests running at this point.\n    const TestInfo* test_info =\n      UnitTest::GetInstance()->current_test_info();\n    EXPECT_TRUE(test_info == nullptr)\n        << \"There should be no tests running at this point.\";\n  }\n\n  // Tests that current_test_info() returns NULL after the last test in\n  // the test case has run.\n  static void TearDownTestSuite() {\n    const TestInfo* test_info =\n      UnitTest::GetInstance()->current_test_info();\n    EXPECT_TRUE(test_info == nullptr)\n        << \"There should be no tests running at this point.\";\n  }\n};\n\n// Tests that current_test_info() returns TestInfo for currently running\n// test by checking the expected test name against the actual one.\nTEST_F(CurrentTestInfoTest, WorksForFirstTestInATestSuite) {\n  const TestInfo* test_info =\n    UnitTest::GetInstance()->current_test_info();\n  ASSERT_TRUE(nullptr != test_info)\n      << \"There is a test running so we should have a valid TestInfo.\";\n  EXPECT_STREQ(\"CurrentTestInfoTest\", test_info->test_case_name())\n      << \"Expected the name of the currently running test case.\";\n  EXPECT_STREQ(\"WorksForFirstTestInATestSuite\", test_info->name())\n      << \"Expected the name of the currently running test.\";\n}\n\n// Tests that current_test_info() returns TestInfo for currently running\n// test by checking the expected test name against the actual one.  We\n// use this test to see that the TestInfo object actually changed from\n// the previous invocation.\nTEST_F(CurrentTestInfoTest, WorksForSecondTestInATestSuite) {\n  const TestInfo* test_info =\n    UnitTest::GetInstance()->current_test_info();\n  ASSERT_TRUE(nullptr != test_info)\n      << \"There is a test running so we should have a valid TestInfo.\";\n  EXPECT_STREQ(\"CurrentTestInfoTest\", test_info->test_case_name())\n      << \"Expected the name of the currently running test case.\";\n  EXPECT_STREQ(\"WorksForSecondTestInATestSuite\", test_info->name())\n      << \"Expected the name of the currently running test.\";\n}\n\n}  // namespace testing\n\n\n// These two lines test that we can define tests in a namespace that\n// has the name \"testing\" and is nested in another namespace.\nnamespace my_namespace {\nnamespace testing {\n\n// Makes sure that TEST knows to use ::testing::Test instead of\n// ::my_namespace::testing::Test.\nclass Test {};\n\n// Makes sure that an assertion knows to use ::testing::Message instead of\n// ::my_namespace::testing::Message.\nclass Message {};\n\n// Makes sure that an assertion knows to use\n// ::testing::AssertionResult instead of\n// ::my_namespace::testing::AssertionResult.\nclass AssertionResult {};\n\n// Tests that an assertion that should succeed works as expected.\nTEST(NestedTestingNamespaceTest, Success) {\n  EXPECT_EQ(1, 1) << \"This shouldn't fail.\";\n}\n\n// Tests that an assertion that should fail works as expected.\nTEST(NestedTestingNamespaceTest, Failure) {\n  EXPECT_FATAL_FAILURE(FAIL() << \"This failure is expected.\",\n                       \"This failure is expected.\");\n}\n\n}  // namespace testing\n}  // namespace my_namespace\n\n// Tests that one can call superclass SetUp and TearDown methods--\n// that is, that they are not private.\n// No tests are based on this fixture; the test \"passes\" if it compiles\n// successfully.\nclass ProtectedFixtureMethodsTest : public Test {\n protected:\n  void SetUp() override { Test::SetUp(); }\n  void TearDown() override { Test::TearDown(); }\n};\n\n// StreamingAssertionsTest tests the streaming versions of a representative\n// sample of assertions.\nTEST(StreamingAssertionsTest, Unconditional) {\n  SUCCEED() << \"expected success\";\n  EXPECT_NONFATAL_FAILURE(ADD_FAILURE() << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(FAIL() << \"expected failure\",\n                       \"expected failure\");\n}\n\n#ifdef __BORLANDC__\n// Silences warnings: \"Condition is always true\", \"Unreachable code\"\n# pragma option push -w-ccc -w-rch\n#endif\n\nTEST(StreamingAssertionsTest, Truth) {\n  EXPECT_TRUE(true) << \"unexpected failure\";\n  ASSERT_TRUE(true) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_TRUE(false) << \"expected failure\",\n                       \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, Truth2) {\n  EXPECT_FALSE(false) << \"unexpected failure\";\n  ASSERT_FALSE(false) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_FALSE(true) << \"expected failure\",\n                       \"expected failure\");\n}\n\n#ifdef __BORLANDC__\n// Restores warnings after previous \"#pragma option push\" suppressed them\n# pragma option pop\n#endif\n\nTEST(StreamingAssertionsTest, IntegerEquals) {\n  EXPECT_EQ(1, 1) << \"unexpected failure\";\n  ASSERT_EQ(1, 1) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_EQ(1, 2) << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_EQ(1, 2) << \"expected failure\",\n                       \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, IntegerLessThan) {\n  EXPECT_LT(1, 2) << \"unexpected failure\";\n  ASSERT_LT(1, 2) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1) << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_LT(2, 1) << \"expected failure\",\n                       \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, StringsEqual) {\n  EXPECT_STREQ(\"foo\", \"foo\") << \"unexpected failure\";\n  ASSERT_STREQ(\"foo\", \"foo\") << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(\"foo\", \"bar\") << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_STREQ(\"foo\", \"bar\") << \"expected failure\",\n                       \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, StringsNotEqual) {\n  EXPECT_STRNE(\"foo\", \"bar\") << \"unexpected failure\";\n  ASSERT_STRNE(\"foo\", \"bar\") << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(\"foo\", \"foo\") << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_STRNE(\"foo\", \"foo\") << \"expected failure\",\n                       \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, StringsEqualIgnoringCase) {\n  EXPECT_STRCASEEQ(\"foo\", \"FOO\") << \"unexpected failure\";\n  ASSERT_STRCASEEQ(\"foo\", \"FOO\") << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ(\"foo\", \"bar\") << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ(\"foo\", \"bar\") << \"expected failure\",\n                       \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, StringNotEqualIgnoringCase) {\n  EXPECT_STRCASENE(\"foo\", \"bar\") << \"unexpected failure\";\n  ASSERT_STRCASENE(\"foo\", \"bar\") << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_STRCASENE(\"foo\", \"FOO\") << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_STRCASENE(\"bar\", \"BAR\") << \"expected failure\",\n                       \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, FloatingPointEquals) {\n  EXPECT_FLOAT_EQ(1.0, 1.0) << \"unexpected failure\";\n  ASSERT_FLOAT_EQ(1.0, 1.0) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(0.0, 1.0) << \"expected failure\",\n                          \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.0) << \"expected failure\",\n                       \"expected failure\");\n}\n\n#if GTEST_HAS_EXCEPTIONS\n\nTEST(StreamingAssertionsTest, Throw) {\n  EXPECT_THROW(ThrowAnInteger(), int) << \"unexpected failure\";\n  ASSERT_THROW(ThrowAnInteger(), int) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool) <<\n                          \"expected failure\", \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_THROW(ThrowAnInteger(), bool) <<\n                       \"expected failure\", \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, NoThrow) {\n  EXPECT_NO_THROW(ThrowNothing()) << \"unexpected failure\";\n  ASSERT_NO_THROW(ThrowNothing()) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()) <<\n                          \"expected failure\", \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()) <<\n                       \"expected failure\", \"expected failure\");\n}\n\nTEST(StreamingAssertionsTest, AnyThrow) {\n  EXPECT_ANY_THROW(ThrowAnInteger()) << \"unexpected failure\";\n  ASSERT_ANY_THROW(ThrowAnInteger()) << \"unexpected failure\";\n  EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(ThrowNothing()) <<\n                          \"expected failure\", \"expected failure\");\n  EXPECT_FATAL_FAILURE(ASSERT_ANY_THROW(ThrowNothing()) <<\n                       \"expected failure\", \"expected failure\");\n}\n\n#endif  // GTEST_HAS_EXCEPTIONS\n\n// Tests that Google Test correctly decides whether to use colors in the output.\n\nTEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsYes) {\n  GTEST_FLAG(color) = \"yes\";\n\n  SetEnv(\"TERM\", \"xterm\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n  EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.\n\n  SetEnv(\"TERM\", \"dumb\");  // TERM doesn't support colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n  EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.\n}\n\nTEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsAliasOfYes) {\n  SetEnv(\"TERM\", \"dumb\");  // TERM doesn't support colors.\n\n  GTEST_FLAG(color) = \"True\";\n  EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.\n\n  GTEST_FLAG(color) = \"t\";\n  EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.\n\n  GTEST_FLAG(color) = \"1\";\n  EXPECT_TRUE(ShouldUseColor(false));  // Stdout is not a TTY.\n}\n\nTEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsNo) {\n  GTEST_FLAG(color) = \"no\";\n\n  SetEnv(\"TERM\", \"xterm\");  // TERM supports colors.\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n  EXPECT_FALSE(ShouldUseColor(false));  // Stdout is not a TTY.\n\n  SetEnv(\"TERM\", \"dumb\");  // TERM doesn't support colors.\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n  EXPECT_FALSE(ShouldUseColor(false));  // Stdout is not a TTY.\n}\n\nTEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsInvalid) {\n  SetEnv(\"TERM\", \"xterm\");  // TERM supports colors.\n\n  GTEST_FLAG(color) = \"F\";\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  GTEST_FLAG(color) = \"0\";\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  GTEST_FLAG(color) = \"unknown\";\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n}\n\nTEST(ColoredOutputTest, UsesColorsWhenStdoutIsTty) {\n  GTEST_FLAG(color) = \"auto\";\n\n  SetEnv(\"TERM\", \"xterm\");  // TERM supports colors.\n  EXPECT_FALSE(ShouldUseColor(false));  // Stdout is not a TTY.\n  EXPECT_TRUE(ShouldUseColor(true));    // Stdout is a TTY.\n}\n\nTEST(ColoredOutputTest, UsesColorsWhenTermSupportsColors) {\n  GTEST_FLAG(color) = \"auto\";\n\n#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW\n  // On Windows, we ignore the TERM variable as it's usually not set.\n\n  SetEnv(\"TERM\", \"dumb\");\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"\");\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"xterm\");\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n#else\n  // On non-Windows platforms, we rely on TERM to determine if the\n  // terminal supports colors.\n\n  SetEnv(\"TERM\", \"dumb\");  // TERM doesn't support colors.\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"emacs\");  // TERM doesn't support colors.\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"vt100\");  // TERM doesn't support colors.\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"xterm-mono\");  // TERM doesn't support colors.\n  EXPECT_FALSE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"xterm\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"xterm-color\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"xterm-256color\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"screen\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"screen-256color\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"tmux\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"tmux-256color\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"rxvt-unicode\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"rxvt-unicode-256color\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"linux\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n\n  SetEnv(\"TERM\", \"cygwin\");  // TERM supports colors.\n  EXPECT_TRUE(ShouldUseColor(true));  // Stdout is a TTY.\n#endif  // GTEST_OS_WINDOWS\n}\n\n// Verifies that StaticAssertTypeEq works in a namespace scope.\n\nstatic bool dummy1 GTEST_ATTRIBUTE_UNUSED_ = StaticAssertTypeEq<bool, bool>();\nstatic bool dummy2 GTEST_ATTRIBUTE_UNUSED_ =\n    StaticAssertTypeEq<const int, const int>();\n\n// Verifies that StaticAssertTypeEq works in a class.\n\ntemplate <typename T>\nclass StaticAssertTypeEqTestHelper {\n public:\n  StaticAssertTypeEqTestHelper() { StaticAssertTypeEq<bool, T>(); }\n};\n\nTEST(StaticAssertTypeEqTest, WorksInClass) {\n  StaticAssertTypeEqTestHelper<bool>();\n}\n\n// Verifies that StaticAssertTypeEq works inside a function.\n\ntypedef int IntAlias;\n\nTEST(StaticAssertTypeEqTest, CompilesForEqualTypes) {\n  StaticAssertTypeEq<int, IntAlias>();\n  StaticAssertTypeEq<int*, IntAlias*>();\n}\n\nTEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsNoFailure) {\n  EXPECT_FALSE(HasNonfatalFailure());\n}\n\nstatic void FailFatally() { FAIL(); }\n\nTEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsOnlyFatalFailure) {\n  FailFatally();\n  const bool has_nonfatal_failure = HasNonfatalFailure();\n  ClearCurrentTestPartResults();\n  EXPECT_FALSE(has_nonfatal_failure);\n}\n\nTEST(HasNonfatalFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) {\n  ADD_FAILURE();\n  const bool has_nonfatal_failure = HasNonfatalFailure();\n  ClearCurrentTestPartResults();\n  EXPECT_TRUE(has_nonfatal_failure);\n}\n\nTEST(HasNonfatalFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) {\n  FailFatally();\n  ADD_FAILURE();\n  const bool has_nonfatal_failure = HasNonfatalFailure();\n  ClearCurrentTestPartResults();\n  EXPECT_TRUE(has_nonfatal_failure);\n}\n\n// A wrapper for calling HasNonfatalFailure outside of a test body.\nstatic bool HasNonfatalFailureHelper() {\n  return testing::Test::HasNonfatalFailure();\n}\n\nTEST(HasNonfatalFailureTest, WorksOutsideOfTestBody) {\n  EXPECT_FALSE(HasNonfatalFailureHelper());\n}\n\nTEST(HasNonfatalFailureTest, WorksOutsideOfTestBody2) {\n  ADD_FAILURE();\n  const bool has_nonfatal_failure = HasNonfatalFailureHelper();\n  ClearCurrentTestPartResults();\n  EXPECT_TRUE(has_nonfatal_failure);\n}\n\nTEST(HasFailureTest, ReturnsFalseWhenThereIsNoFailure) {\n  EXPECT_FALSE(HasFailure());\n}\n\nTEST(HasFailureTest, ReturnsTrueWhenThereIsFatalFailure) {\n  FailFatally();\n  const bool has_failure = HasFailure();\n  ClearCurrentTestPartResults();\n  EXPECT_TRUE(has_failure);\n}\n\nTEST(HasFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) {\n  ADD_FAILURE();\n  const bool has_failure = HasFailure();\n  ClearCurrentTestPartResults();\n  EXPECT_TRUE(has_failure);\n}\n\nTEST(HasFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) {\n  FailFatally();\n  ADD_FAILURE();\n  const bool has_failure = HasFailure();\n  ClearCurrentTestPartResults();\n  EXPECT_TRUE(has_failure);\n}\n\n// A wrapper for calling HasFailure outside of a test body.\nstatic bool HasFailureHelper() { return testing::Test::HasFailure(); }\n\nTEST(HasFailureTest, WorksOutsideOfTestBody) {\n  EXPECT_FALSE(HasFailureHelper());\n}\n\nTEST(HasFailureTest, WorksOutsideOfTestBody2) {\n  ADD_FAILURE();\n  const bool has_failure = HasFailureHelper();\n  ClearCurrentTestPartResults();\n  EXPECT_TRUE(has_failure);\n}\n\nclass TestListener : public EmptyTestEventListener {\n public:\n  TestListener() : on_start_counter_(nullptr), is_destroyed_(nullptr) {}\n  TestListener(int* on_start_counter, bool* is_destroyed)\n      : on_start_counter_(on_start_counter),\n        is_destroyed_(is_destroyed) {}\n\n  ~TestListener() override {\n    if (is_destroyed_)\n      *is_destroyed_ = true;\n  }\n\n protected:\n  void OnTestProgramStart(const UnitTest& /*unit_test*/) override {\n    if (on_start_counter_ != nullptr) (*on_start_counter_)++;\n  }\n\n private:\n  int* on_start_counter_;\n  bool* is_destroyed_;\n};\n\n// Tests the constructor.\nTEST(TestEventListenersTest, ConstructionWorks) {\n  TestEventListeners listeners;\n\n  EXPECT_TRUE(TestEventListenersAccessor::GetRepeater(&listeners) != nullptr);\n  EXPECT_TRUE(listeners.default_result_printer() == nullptr);\n  EXPECT_TRUE(listeners.default_xml_generator() == nullptr);\n}\n\n// Tests that the TestEventListeners destructor deletes all the listeners it\n// owns.\nTEST(TestEventListenersTest, DestructionWorks) {\n  bool default_result_printer_is_destroyed = false;\n  bool default_xml_printer_is_destroyed = false;\n  bool extra_listener_is_destroyed = false;\n  TestListener* default_result_printer =\n      new TestListener(nullptr, &default_result_printer_is_destroyed);\n  TestListener* default_xml_printer =\n      new TestListener(nullptr, &default_xml_printer_is_destroyed);\n  TestListener* extra_listener =\n      new TestListener(nullptr, &extra_listener_is_destroyed);\n\n  {\n    TestEventListeners listeners;\n    TestEventListenersAccessor::SetDefaultResultPrinter(&listeners,\n                                                        default_result_printer);\n    TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners,\n                                                       default_xml_printer);\n    listeners.Append(extra_listener);\n  }\n  EXPECT_TRUE(default_result_printer_is_destroyed);\n  EXPECT_TRUE(default_xml_printer_is_destroyed);\n  EXPECT_TRUE(extra_listener_is_destroyed);\n}\n\n// Tests that a listener Append'ed to a TestEventListeners list starts\n// receiving events.\nTEST(TestEventListenersTest, Append) {\n  int on_start_counter = 0;\n  bool is_destroyed = false;\n  TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);\n  {\n    TestEventListeners listeners;\n    listeners.Append(listener);\n    TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n        *UnitTest::GetInstance());\n    EXPECT_EQ(1, on_start_counter);\n  }\n  EXPECT_TRUE(is_destroyed);\n}\n\n// Tests that listeners receive events in the order they were appended to\n// the list, except for *End requests, which must be received in the reverse\n// order.\nclass SequenceTestingListener : public EmptyTestEventListener {\n public:\n  SequenceTestingListener(std::vector<std::string>* vector, const char* id)\n      : vector_(vector), id_(id) {}\n\n protected:\n  void OnTestProgramStart(const UnitTest& /*unit_test*/) override {\n    vector_->push_back(GetEventDescription(\"OnTestProgramStart\"));\n  }\n\n  void OnTestProgramEnd(const UnitTest& /*unit_test*/) override {\n    vector_->push_back(GetEventDescription(\"OnTestProgramEnd\"));\n  }\n\n  void OnTestIterationStart(const UnitTest& /*unit_test*/,\n                            int /*iteration*/) override {\n    vector_->push_back(GetEventDescription(\"OnTestIterationStart\"));\n  }\n\n  void OnTestIterationEnd(const UnitTest& /*unit_test*/,\n                          int /*iteration*/) override {\n    vector_->push_back(GetEventDescription(\"OnTestIterationEnd\"));\n  }\n\n private:\n  std::string GetEventDescription(const char* method) {\n    Message message;\n    message << id_ << \".\" << method;\n    return message.GetString();\n  }\n\n  std::vector<std::string>* vector_;\n  const char* const id_;\n\n  GTEST_DISALLOW_COPY_AND_ASSIGN_(SequenceTestingListener);\n};\n\nTEST(EventListenerTest, AppendKeepsOrder) {\n  std::vector<std::string> vec;\n  TestEventListeners listeners;\n  listeners.Append(new SequenceTestingListener(&vec, \"1st\"));\n  listeners.Append(new SequenceTestingListener(&vec, \"2nd\"));\n  listeners.Append(new SequenceTestingListener(&vec, \"3rd\"));\n\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n      *UnitTest::GetInstance());\n  ASSERT_EQ(3U, vec.size());\n  EXPECT_STREQ(\"1st.OnTestProgramStart\", vec[0].c_str());\n  EXPECT_STREQ(\"2nd.OnTestProgramStart\", vec[1].c_str());\n  EXPECT_STREQ(\"3rd.OnTestProgramStart\", vec[2].c_str());\n\n  vec.clear();\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramEnd(\n      *UnitTest::GetInstance());\n  ASSERT_EQ(3U, vec.size());\n  EXPECT_STREQ(\"3rd.OnTestProgramEnd\", vec[0].c_str());\n  EXPECT_STREQ(\"2nd.OnTestProgramEnd\", vec[1].c_str());\n  EXPECT_STREQ(\"1st.OnTestProgramEnd\", vec[2].c_str());\n\n  vec.clear();\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationStart(\n      *UnitTest::GetInstance(), 0);\n  ASSERT_EQ(3U, vec.size());\n  EXPECT_STREQ(\"1st.OnTestIterationStart\", vec[0].c_str());\n  EXPECT_STREQ(\"2nd.OnTestIterationStart\", vec[1].c_str());\n  EXPECT_STREQ(\"3rd.OnTestIterationStart\", vec[2].c_str());\n\n  vec.clear();\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationEnd(\n      *UnitTest::GetInstance(), 0);\n  ASSERT_EQ(3U, vec.size());\n  EXPECT_STREQ(\"3rd.OnTestIterationEnd\", vec[0].c_str());\n  EXPECT_STREQ(\"2nd.OnTestIterationEnd\", vec[1].c_str());\n  EXPECT_STREQ(\"1st.OnTestIterationEnd\", vec[2].c_str());\n}\n\n// Tests that a listener removed from a TestEventListeners list stops receiving\n// events and is not deleted when the list is destroyed.\nTEST(TestEventListenersTest, Release) {\n  int on_start_counter = 0;\n  bool is_destroyed = false;\n  // Although Append passes the ownership of this object to the list,\n  // the following calls release it, and we need to delete it before the\n  // test ends.\n  TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);\n  {\n    TestEventListeners listeners;\n    listeners.Append(listener);\n    EXPECT_EQ(listener, listeners.Release(listener));\n    TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n        *UnitTest::GetInstance());\n    EXPECT_TRUE(listeners.Release(listener) == nullptr);\n  }\n  EXPECT_EQ(0, on_start_counter);\n  EXPECT_FALSE(is_destroyed);\n  delete listener;\n}\n\n// Tests that no events are forwarded when event forwarding is disabled.\nTEST(EventListenerTest, SuppressEventForwarding) {\n  int on_start_counter = 0;\n  TestListener* listener = new TestListener(&on_start_counter, nullptr);\n\n  TestEventListeners listeners;\n  listeners.Append(listener);\n  ASSERT_TRUE(TestEventListenersAccessor::EventForwardingEnabled(listeners));\n  TestEventListenersAccessor::SuppressEventForwarding(&listeners);\n  ASSERT_FALSE(TestEventListenersAccessor::EventForwardingEnabled(listeners));\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n      *UnitTest::GetInstance());\n  EXPECT_EQ(0, on_start_counter);\n}\n\n// Tests that events generated by Google Test are not forwarded in\n// death test subprocesses.\nTEST(EventListenerDeathTest, EventsNotForwardedInDeathTestSubprecesses) {\n  EXPECT_DEATH_IF_SUPPORTED({\n      GTEST_CHECK_(TestEventListenersAccessor::EventForwardingEnabled(\n          *GetUnitTestImpl()->listeners())) << \"expected failure\";},\n      \"expected failure\");\n}\n\n// Tests that a listener installed via SetDefaultResultPrinter() starts\n// receiving events and is returned via default_result_printer() and that\n// the previous default_result_printer is removed from the list and deleted.\nTEST(EventListenerTest, default_result_printer) {\n  int on_start_counter = 0;\n  bool is_destroyed = false;\n  TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);\n\n  TestEventListeners listeners;\n  TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener);\n\n  EXPECT_EQ(listener, listeners.default_result_printer());\n\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n      *UnitTest::GetInstance());\n\n  EXPECT_EQ(1, on_start_counter);\n\n  // Replacing default_result_printer with something else should remove it\n  // from the list and destroy it.\n  TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, nullptr);\n\n  EXPECT_TRUE(listeners.default_result_printer() == nullptr);\n  EXPECT_TRUE(is_destroyed);\n\n  // After broadcasting an event the counter is still the same, indicating\n  // the listener is not in the list anymore.\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n      *UnitTest::GetInstance());\n  EXPECT_EQ(1, on_start_counter);\n}\n\n// Tests that the default_result_printer listener stops receiving events\n// when removed via Release and that is not owned by the list anymore.\nTEST(EventListenerTest, RemovingDefaultResultPrinterWorks) {\n  int on_start_counter = 0;\n  bool is_destroyed = false;\n  // Although Append passes the ownership of this object to the list,\n  // the following calls release it, and we need to delete it before the\n  // test ends.\n  TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);\n  {\n    TestEventListeners listeners;\n    TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener);\n\n    EXPECT_EQ(listener, listeners.Release(listener));\n    EXPECT_TRUE(listeners.default_result_printer() == nullptr);\n    EXPECT_FALSE(is_destroyed);\n\n    // Broadcasting events now should not affect default_result_printer.\n    TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n        *UnitTest::GetInstance());\n    EXPECT_EQ(0, on_start_counter);\n  }\n  // Destroying the list should not affect the listener now, too.\n  EXPECT_FALSE(is_destroyed);\n  delete listener;\n}\n\n// Tests that a listener installed via SetDefaultXmlGenerator() starts\n// receiving events and is returned via default_xml_generator() and that\n// the previous default_xml_generator is removed from the list and deleted.\nTEST(EventListenerTest, default_xml_generator) {\n  int on_start_counter = 0;\n  bool is_destroyed = false;\n  TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);\n\n  TestEventListeners listeners;\n  TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener);\n\n  EXPECT_EQ(listener, listeners.default_xml_generator());\n\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n      *UnitTest::GetInstance());\n\n  EXPECT_EQ(1, on_start_counter);\n\n  // Replacing default_xml_generator with something else should remove it\n  // from the list and destroy it.\n  TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, nullptr);\n\n  EXPECT_TRUE(listeners.default_xml_generator() == nullptr);\n  EXPECT_TRUE(is_destroyed);\n\n  // After broadcasting an event the counter is still the same, indicating\n  // the listener is not in the list anymore.\n  TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n      *UnitTest::GetInstance());\n  EXPECT_EQ(1, on_start_counter);\n}\n\n// Tests that the default_xml_generator listener stops receiving events\n// when removed via Release and that is not owned by the list anymore.\nTEST(EventListenerTest, RemovingDefaultXmlGeneratorWorks) {\n  int on_start_counter = 0;\n  bool is_destroyed = false;\n  // Although Append passes the ownership of this object to the list,\n  // the following calls release it, and we need to delete it before the\n  // test ends.\n  TestListener* listener = new TestListener(&on_start_counter, &is_destroyed);\n  {\n    TestEventListeners listeners;\n    TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener);\n\n    EXPECT_EQ(listener, listeners.Release(listener));\n    EXPECT_TRUE(listeners.default_xml_generator() == nullptr);\n    EXPECT_FALSE(is_destroyed);\n\n    // Broadcasting events now should not affect default_xml_generator.\n    TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart(\n        *UnitTest::GetInstance());\n    EXPECT_EQ(0, on_start_counter);\n  }\n  // Destroying the list should not affect the listener now, too.\n  EXPECT_FALSE(is_destroyed);\n  delete listener;\n}\n\n// Sanity tests to ensure that the alternative, verbose spellings of\n// some of the macros work.  We don't test them thoroughly as that\n// would be quite involved.  Since their implementations are\n// straightforward, and they are rarely used, we'll just rely on the\n// users to tell us when they are broken.\nGTEST_TEST(AlternativeNameTest, Works) {  // GTEST_TEST is the same as TEST.\n  GTEST_SUCCEED() << \"OK\";  // GTEST_SUCCEED is the same as SUCCEED.\n\n  // GTEST_FAIL is the same as FAIL.\n  EXPECT_FATAL_FAILURE(GTEST_FAIL() << \"An expected failure\",\n                       \"An expected failure\");\n\n  // GTEST_ASSERT_XY is the same as ASSERT_XY.\n\n  GTEST_ASSERT_EQ(0, 0);\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_EQ(0, 1) << \"An expected failure\",\n                       \"An expected failure\");\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_EQ(1, 0) << \"An expected failure\",\n                       \"An expected failure\");\n\n  GTEST_ASSERT_NE(0, 1);\n  GTEST_ASSERT_NE(1, 0);\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_NE(0, 0) << \"An expected failure\",\n                       \"An expected failure\");\n\n  GTEST_ASSERT_LE(0, 0);\n  GTEST_ASSERT_LE(0, 1);\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_LE(1, 0) << \"An expected failure\",\n                       \"An expected failure\");\n\n  GTEST_ASSERT_LT(0, 1);\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_LT(0, 0) << \"An expected failure\",\n                       \"An expected failure\");\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_LT(1, 0) << \"An expected failure\",\n                       \"An expected failure\");\n\n  GTEST_ASSERT_GE(0, 0);\n  GTEST_ASSERT_GE(1, 0);\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_GE(0, 1) << \"An expected failure\",\n                       \"An expected failure\");\n\n  GTEST_ASSERT_GT(1, 0);\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_GT(0, 1) << \"An expected failure\",\n                       \"An expected failure\");\n  EXPECT_FATAL_FAILURE(GTEST_ASSERT_GT(1, 1) << \"An expected failure\",\n                       \"An expected failure\");\n}\n\n// Tests for internal utilities necessary for implementation of the universal\n// printing.\n\nclass ConversionHelperBase {};\nclass ConversionHelperDerived : public ConversionHelperBase {};\n\n// Tests that IsAProtocolMessage<T>::value is a compile-time constant.\nTEST(IsAProtocolMessageTest, ValueIsCompileTimeConstant) {\n  GTEST_COMPILE_ASSERT_(IsAProtocolMessage<::proto2::Message>::value,\n                        const_true);\n  GTEST_COMPILE_ASSERT_(!IsAProtocolMessage<int>::value, const_false);\n}\n\n// Tests that IsAProtocolMessage<T>::value is true when T is\n// proto2::Message or a sub-class of it.\nTEST(IsAProtocolMessageTest, ValueIsTrueWhenTypeIsAProtocolMessage) {\n  EXPECT_TRUE(IsAProtocolMessage< ::proto2::Message>::value);\n}\n\n// Tests that IsAProtocolMessage<T>::value is false when T is neither\n// ::proto2::Message nor a sub-class of it.\nTEST(IsAProtocolMessageTest, ValueIsFalseWhenTypeIsNotAProtocolMessage) {\n  EXPECT_FALSE(IsAProtocolMessage<int>::value);\n  EXPECT_FALSE(IsAProtocolMessage<const ConversionHelperBase>::value);\n}\n\n// Tests GTEST_REMOVE_REFERENCE_AND_CONST_.\n\ntemplate <typename T1, typename T2>\nvoid TestGTestRemoveReferenceAndConst() {\n  static_assert(std::is_same<T1, GTEST_REMOVE_REFERENCE_AND_CONST_(T2)>::value,\n                \"GTEST_REMOVE_REFERENCE_AND_CONST_ failed.\");\n}\n\nTEST(RemoveReferenceToConstTest, Works) {\n  TestGTestRemoveReferenceAndConst<int, int>();\n  TestGTestRemoveReferenceAndConst<double, double&>();\n  TestGTestRemoveReferenceAndConst<char, const char>();\n  TestGTestRemoveReferenceAndConst<char, const char&>();\n  TestGTestRemoveReferenceAndConst<const char*, const char*>();\n}\n\n// Tests GTEST_REFERENCE_TO_CONST_.\n\ntemplate <typename T1, typename T2>\nvoid TestGTestReferenceToConst() {\n  static_assert(std::is_same<T1, GTEST_REFERENCE_TO_CONST_(T2)>::value,\n                \"GTEST_REFERENCE_TO_CONST_ failed.\");\n}\n\nTEST(GTestReferenceToConstTest, Works) {\n  TestGTestReferenceToConst<const char&, char>();\n  TestGTestReferenceToConst<const int&, const int>();\n  TestGTestReferenceToConst<const double&, double>();\n  TestGTestReferenceToConst<const std::string&, const std::string&>();\n}\n\n\n// Tests IsContainerTest.\n\nclass NonContainer {};\n\nTEST(IsContainerTestTest, WorksForNonContainer) {\n  EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<int>(0)));\n  EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<char[5]>(0)));\n  EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<NonContainer>(0)));\n}\n\nTEST(IsContainerTestTest, WorksForContainer) {\n  EXPECT_EQ(sizeof(IsContainer),\n            sizeof(IsContainerTest<std::vector<bool> >(0)));\n  EXPECT_EQ(sizeof(IsContainer),\n            sizeof(IsContainerTest<std::map<int, double> >(0)));\n}\n\nstruct ConstOnlyContainerWithPointerIterator {\n  using const_iterator = int*;\n  const_iterator begin() const;\n  const_iterator end() const;\n};\n\nstruct ConstOnlyContainerWithClassIterator {\n  struct const_iterator {\n    const int& operator*() const;\n    const_iterator& operator++(/* pre-increment */);\n  };\n  const_iterator begin() const;\n  const_iterator end() const;\n};\n\nTEST(IsContainerTestTest, ConstOnlyContainer) {\n  EXPECT_EQ(sizeof(IsContainer),\n            sizeof(IsContainerTest<ConstOnlyContainerWithPointerIterator>(0)));\n  EXPECT_EQ(sizeof(IsContainer),\n            sizeof(IsContainerTest<ConstOnlyContainerWithClassIterator>(0)));\n}\n\n// Tests IsHashTable.\nstruct AHashTable {\n  typedef void hasher;\n};\nstruct NotReallyAHashTable {\n  typedef void hasher;\n  typedef void reverse_iterator;\n};\nTEST(IsHashTable, Basic) {\n  EXPECT_TRUE(testing::internal::IsHashTable<AHashTable>::value);\n  EXPECT_FALSE(testing::internal::IsHashTable<NotReallyAHashTable>::value);\n  EXPECT_FALSE(testing::internal::IsHashTable<std::vector<int>>::value);\n  EXPECT_TRUE(testing::internal::IsHashTable<std::unordered_set<int>>::value);\n}\n\n// Tests ArrayEq().\n\nTEST(ArrayEqTest, WorksForDegeneratedArrays) {\n  EXPECT_TRUE(ArrayEq(5, 5L));\n  EXPECT_FALSE(ArrayEq('a', 0));\n}\n\nTEST(ArrayEqTest, WorksForOneDimensionalArrays) {\n  // Note that a and b are distinct but compatible types.\n  const int a[] = { 0, 1 };\n  long b[] = { 0, 1 };\n  EXPECT_TRUE(ArrayEq(a, b));\n  EXPECT_TRUE(ArrayEq(a, 2, b));\n\n  b[0] = 2;\n  EXPECT_FALSE(ArrayEq(a, b));\n  EXPECT_FALSE(ArrayEq(a, 1, b));\n}\n\nTEST(ArrayEqTest, WorksForTwoDimensionalArrays) {\n  const char a[][3] = { \"hi\", \"lo\" };\n  const char b[][3] = { \"hi\", \"lo\" };\n  const char c[][3] = { \"hi\", \"li\" };\n\n  EXPECT_TRUE(ArrayEq(a, b));\n  EXPECT_TRUE(ArrayEq(a, 2, b));\n\n  EXPECT_FALSE(ArrayEq(a, c));\n  EXPECT_FALSE(ArrayEq(a, 2, c));\n}\n\n// Tests ArrayAwareFind().\n\nTEST(ArrayAwareFindTest, WorksForOneDimensionalArray) {\n  const char a[] = \"hello\";\n  EXPECT_EQ(a + 4, ArrayAwareFind(a, a + 5, 'o'));\n  EXPECT_EQ(a + 5, ArrayAwareFind(a, a + 5, 'x'));\n}\n\nTEST(ArrayAwareFindTest, WorksForTwoDimensionalArray) {\n  int a[][2] = { { 0, 1 }, { 2, 3 }, { 4, 5 } };\n  const int b[2] = { 2, 3 };\n  EXPECT_EQ(a + 1, ArrayAwareFind(a, a + 3, b));\n\n  const int c[2] = { 6, 7 };\n  EXPECT_EQ(a + 3, ArrayAwareFind(a, a + 3, c));\n}\n\n// Tests CopyArray().\n\nTEST(CopyArrayTest, WorksForDegeneratedArrays) {\n  int n = 0;\n  CopyArray('a', &n);\n  EXPECT_EQ('a', n);\n}\n\nTEST(CopyArrayTest, WorksForOneDimensionalArrays) {\n  const char a[3] = \"hi\";\n  int b[3];\n#ifndef __BORLANDC__  // C++Builder cannot compile some array size deductions.\n  CopyArray(a, &b);\n  EXPECT_TRUE(ArrayEq(a, b));\n#endif\n\n  int c[3];\n  CopyArray(a, 3, c);\n  EXPECT_TRUE(ArrayEq(a, c));\n}\n\nTEST(CopyArrayTest, WorksForTwoDimensionalArrays) {\n  const int a[2][3] = { { 0, 1, 2 }, { 3, 4, 5 } };\n  int b[2][3];\n#ifndef __BORLANDC__  // C++Builder cannot compile some array size deductions.\n  CopyArray(a, &b);\n  EXPECT_TRUE(ArrayEq(a, b));\n#endif\n\n  int c[2][3];\n  CopyArray(a, 2, c);\n  EXPECT_TRUE(ArrayEq(a, c));\n}\n\n// Tests NativeArray.\n\nTEST(NativeArrayTest, ConstructorFromArrayWorks) {\n  const int a[3] = { 0, 1, 2 };\n  NativeArray<int> na(a, 3, RelationToSourceReference());\n  EXPECT_EQ(3U, na.size());\n  EXPECT_EQ(a, na.begin());\n}\n\nTEST(NativeArrayTest, CreatesAndDeletesCopyOfArrayWhenAskedTo) {\n  typedef int Array[2];\n  Array* a = new Array[1];\n  (*a)[0] = 0;\n  (*a)[1] = 1;\n  NativeArray<int> na(*a, 2, RelationToSourceCopy());\n  EXPECT_NE(*a, na.begin());\n  delete[] a;\n  EXPECT_EQ(0, na.begin()[0]);\n  EXPECT_EQ(1, na.begin()[1]);\n\n  // We rely on the heap checker to verify that na deletes the copy of\n  // array.\n}\n\nTEST(NativeArrayTest, TypeMembersAreCorrect) {\n  StaticAssertTypeEq<char, NativeArray<char>::value_type>();\n  StaticAssertTypeEq<int[2], NativeArray<int[2]>::value_type>();\n\n  StaticAssertTypeEq<const char*, NativeArray<char>::const_iterator>();\n  StaticAssertTypeEq<const bool(*)[2], NativeArray<bool[2]>::const_iterator>();\n}\n\nTEST(NativeArrayTest, MethodsWork) {\n  const int a[3] = { 0, 1, 2 };\n  NativeArray<int> na(a, 3, RelationToSourceCopy());\n  ASSERT_EQ(3U, na.size());\n  EXPECT_EQ(3, na.end() - na.begin());\n\n  NativeArray<int>::const_iterator it = na.begin();\n  EXPECT_EQ(0, *it);\n  ++it;\n  EXPECT_EQ(1, *it);\n  it++;\n  EXPECT_EQ(2, *it);\n  ++it;\n  EXPECT_EQ(na.end(), it);\n\n  EXPECT_TRUE(na == na);\n\n  NativeArray<int> na2(a, 3, RelationToSourceReference());\n  EXPECT_TRUE(na == na2);\n\n  const int b1[3] = { 0, 1, 1 };\n  const int b2[4] = { 0, 1, 2, 3 };\n  EXPECT_FALSE(na == NativeArray<int>(b1, 3, RelationToSourceReference()));\n  EXPECT_FALSE(na == NativeArray<int>(b2, 4, RelationToSourceCopy()));\n}\n\nTEST(NativeArrayTest, WorksForTwoDimensionalArray) {\n  const char a[2][3] = { \"hi\", \"lo\" };\n  NativeArray<char[3]> na(a, 2, RelationToSourceReference());\n  ASSERT_EQ(2U, na.size());\n  EXPECT_EQ(a, na.begin());\n}\n\n// IndexSequence\nTEST(IndexSequence, MakeIndexSequence) {\n  using testing::internal::IndexSequence;\n  using testing::internal::MakeIndexSequence;\n  EXPECT_TRUE(\n      (std::is_same<IndexSequence<>, MakeIndexSequence<0>::type>::value));\n  EXPECT_TRUE(\n      (std::is_same<IndexSequence<0>, MakeIndexSequence<1>::type>::value));\n  EXPECT_TRUE(\n      (std::is_same<IndexSequence<0, 1>, MakeIndexSequence<2>::type>::value));\n  EXPECT_TRUE((\n      std::is_same<IndexSequence<0, 1, 2>, MakeIndexSequence<3>::type>::value));\n  EXPECT_TRUE(\n      (std::is_base_of<IndexSequence<0, 1, 2>, MakeIndexSequence<3>>::value));\n}\n\n// ElemFromList\nTEST(ElemFromList, Basic) {\n  using testing::internal::ElemFromList;\n  EXPECT_TRUE(\n      (std::is_same<int, ElemFromList<0, int, double, char>::type>::value));\n  EXPECT_TRUE(\n      (std::is_same<double, ElemFromList<1, int, double, char>::type>::value));\n  EXPECT_TRUE(\n      (std::is_same<char, ElemFromList<2, int, double, char>::type>::value));\n  EXPECT_TRUE((\n      std::is_same<char, ElemFromList<7, int, int, int, int, int, int, int,\n                                      char, int, int, int, int>::type>::value));\n}\n\n// FlatTuple\nTEST(FlatTuple, Basic) {\n  using testing::internal::FlatTuple;\n\n  FlatTuple<int, double, const char*> tuple = {};\n  EXPECT_EQ(0, tuple.Get<0>());\n  EXPECT_EQ(0.0, tuple.Get<1>());\n  EXPECT_EQ(nullptr, tuple.Get<2>());\n\n  tuple = FlatTuple<int, double, const char*>(7, 3.2, \"Foo\");\n  EXPECT_EQ(7, tuple.Get<0>());\n  EXPECT_EQ(3.2, tuple.Get<1>());\n  EXPECT_EQ(std::string(\"Foo\"), tuple.Get<2>());\n\n  tuple.Get<1>() = 5.1;\n  EXPECT_EQ(5.1, tuple.Get<1>());\n}\n\nTEST(FlatTuple, ManyTypes) {\n  using testing::internal::FlatTuple;\n\n  // Instantiate FlatTuple with 257 ints.\n  // Tests show that we can do it with thousands of elements, but very long\n  // compile times makes it unusuitable for this test.\n#define GTEST_FLAT_TUPLE_INT8 int, int, int, int, int, int, int, int,\n#define GTEST_FLAT_TUPLE_INT16 GTEST_FLAT_TUPLE_INT8 GTEST_FLAT_TUPLE_INT8\n#define GTEST_FLAT_TUPLE_INT32 GTEST_FLAT_TUPLE_INT16 GTEST_FLAT_TUPLE_INT16\n#define GTEST_FLAT_TUPLE_INT64 GTEST_FLAT_TUPLE_INT32 GTEST_FLAT_TUPLE_INT32\n#define GTEST_FLAT_TUPLE_INT128 GTEST_FLAT_TUPLE_INT64 GTEST_FLAT_TUPLE_INT64\n#define GTEST_FLAT_TUPLE_INT256 GTEST_FLAT_TUPLE_INT128 GTEST_FLAT_TUPLE_INT128\n\n  // Let's make sure that we can have a very long list of types without blowing\n  // up the template instantiation depth.\n  FlatTuple<GTEST_FLAT_TUPLE_INT256 int> tuple;\n\n  tuple.Get<0>() = 7;\n  tuple.Get<99>() = 17;\n  tuple.Get<256>() = 1000;\n  EXPECT_EQ(7, tuple.Get<0>());\n  EXPECT_EQ(17, tuple.Get<99>());\n  EXPECT_EQ(1000, tuple.Get<256>());\n}\n\n// Tests SkipPrefix().\n\nTEST(SkipPrefixTest, SkipsWhenPrefixMatches) {\n  const char* const str = \"hello\";\n\n  const char* p = str;\n  EXPECT_TRUE(SkipPrefix(\"\", &p));\n  EXPECT_EQ(str, p);\n\n  p = str;\n  EXPECT_TRUE(SkipPrefix(\"hell\", &p));\n  EXPECT_EQ(str + 4, p);\n}\n\nTEST(SkipPrefixTest, DoesNotSkipWhenPrefixDoesNotMatch) {\n  const char* const str = \"world\";\n\n  const char* p = str;\n  EXPECT_FALSE(SkipPrefix(\"W\", &p));\n  EXPECT_EQ(str, p);\n\n  p = str;\n  EXPECT_FALSE(SkipPrefix(\"world!\", &p));\n  EXPECT_EQ(str, p);\n}\n\n// Tests ad_hoc_test_result().\n\nclass AdHocTestResultTest : public testing::Test {\n protected:\n  static void SetUpTestSuite() {\n    FAIL() << \"A failure happened inside SetUpTestSuite().\";\n  }\n};\n\nTEST_F(AdHocTestResultTest, AdHocTestResultForTestSuiteShowsFailure) {\n  const testing::TestResult& test_result = testing::UnitTest::GetInstance()\n                                               ->current_test_suite()\n                                               ->ad_hoc_test_result();\n  EXPECT_TRUE(test_result.Failed());\n}\n\nTEST_F(AdHocTestResultTest, AdHocTestResultTestForUnitTestDoesNotShowFailure) {\n  const testing::TestResult& test_result =\n      testing::UnitTest::GetInstance()->ad_hoc_test_result();\n  EXPECT_FALSE(test_result.Failed());\n}\n\nclass DynamicUnitTestFixture : public testing::Test {};\n\nclass DynamicTest : public DynamicUnitTestFixture {\n  void TestBody() override { EXPECT_TRUE(true); }\n};\n\nauto* dynamic_test = testing::RegisterTest(\n    \"DynamicUnitTestFixture\", \"DynamicTest\", \"TYPE\", \"VALUE\", __FILE__,\n    __LINE__, []() -> DynamicUnitTestFixture* { return new DynamicTest; });\n\nTEST(RegisterTest, WasRegistered) {\n  auto* unittest = testing::UnitTest::GetInstance();\n  for (int i = 0; i < unittest->total_test_suite_count(); ++i) {\n    auto* tests = unittest->GetTestSuite(i);\n    if (tests->name() != std::string(\"DynamicUnitTestFixture\")) continue;\n    for (int j = 0; j < tests->total_test_count(); ++j) {\n      if (tests->GetTestInfo(j)->name() != std::string(\"DynamicTest\")) continue;\n      // Found it.\n      EXPECT_STREQ(tests->GetTestInfo(j)->value_param(), \"VALUE\");\n      EXPECT_STREQ(tests->GetTestInfo(j)->type_param(), \"TYPE\");\n      return;\n    }\n  }\n\n  FAIL() << \"Didn't find the test!\";\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_xml_outfile1_test_.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// gtest_xml_outfile1_test_ writes some xml via TestProperty used by\n// gtest_xml_outfiles_test.py\n\n#include \"gtest/gtest.h\"\n\nclass PropertyOne : public testing::Test {\n protected:\n  void SetUp() override { RecordProperty(\"SetUpProp\", 1); }\n  void TearDown() override { RecordProperty(\"TearDownProp\", 1); }\n};\n\nTEST_F(PropertyOne, TestSomeProperties) {\n  RecordProperty(\"TestSomeProperty\", 1);\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_xml_outfile2_test_.cc",
    "content": "// Copyright 2008, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n//\n// gtest_xml_outfile2_test_ writes some xml via TestProperty used by\n// gtest_xml_outfiles_test.py\n\n#include \"gtest/gtest.h\"\n\nclass PropertyTwo : public testing::Test {\n protected:\n  void SetUp() override { RecordProperty(\"SetUpProp\", 2); }\n  void TearDown() override { RecordProperty(\"TearDownProp\", 2); }\n};\n\nTEST_F(PropertyTwo, TestSomeProperties) {\n  RecordProperty(\"TestSomeProperty\", 2);\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_xml_outfiles_test.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2008, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test for the gtest_xml_output module.\"\"\"\n\nimport os\nfrom xml.dom import minidom, Node\nimport gtest_test_utils\nimport gtest_xml_test_utils\n\nGTEST_OUTPUT_SUBDIR = \"xml_outfiles\"\nGTEST_OUTPUT_1_TEST = \"gtest_xml_outfile1_test_\"\nGTEST_OUTPUT_2_TEST = \"gtest_xml_outfile2_test_\"\n\nEXPECTED_XML_1 = \"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<testsuites tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\" name=\"AllTests\">\n  <testsuite name=\"PropertyOne\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"TestSomeProperties\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"PropertyOne\">\n      <properties>\n        <property name=\"SetUpProp\" value=\"1\"/>\n        <property name=\"TestSomeProperty\" value=\"1\"/>\n        <property name=\"TearDownProp\" value=\"1\"/>\n      </properties>\n    </testcase>\n  </testsuite>\n</testsuites>\n\"\"\"\n\nEXPECTED_XML_2 = \"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<testsuites tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\" name=\"AllTests\">\n  <testsuite name=\"PropertyTwo\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"TestSomeProperties\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"PropertyTwo\">\n      <properties>\n        <property name=\"SetUpProp\" value=\"2\"/>\n        <property name=\"TestSomeProperty\" value=\"2\"/>\n        <property name=\"TearDownProp\" value=\"2\"/>\n      </properties>\n    </testcase>\n  </testsuite>\n</testsuites>\n\"\"\"\n\n\nclass GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):\n  \"\"\"Unit test for Google Test's XML output functionality.\"\"\"\n\n  def setUp(self):\n    # We want the trailing '/' that the last \"\" provides in os.path.join, for\n    # telling Google Test to create an output directory instead of a single file\n    # for xml output.\n    self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),\n                                    GTEST_OUTPUT_SUBDIR, \"\")\n    self.DeleteFilesAndDir()\n\n  def tearDown(self):\n    self.DeleteFilesAndDir()\n\n  def DeleteFilesAndDir(self):\n    try:\n      os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + \".xml\"))\n    except os.error:\n      pass\n    try:\n      os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + \".xml\"))\n    except os.error:\n      pass\n    try:\n      os.rmdir(self.output_dir_)\n    except os.error:\n      pass\n\n  def testOutfile1(self):\n    self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)\n\n  def testOutfile2(self):\n    self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)\n\n  def _TestOutFile(self, test_name, expected_xml):\n    gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)\n    command = [gtest_prog_path, \"--gtest_output=xml:%s\" % self.output_dir_]\n    p = gtest_test_utils.Subprocess(command,\n                                    working_dir=gtest_test_utils.GetTempDir())\n    self.assert_(p.exited)\n    self.assertEquals(0, p.exit_code)\n\n    output_file_name1 = test_name + \".xml\"\n    output_file1 = os.path.join(self.output_dir_, output_file_name1)\n    output_file_name2 = 'lt-' + output_file_name1\n    output_file2 = os.path.join(self.output_dir_, output_file_name2)\n    self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),\n                 output_file1)\n\n    expected = minidom.parseString(expected_xml)\n    if os.path.isfile(output_file1):\n      actual = minidom.parse(output_file1)\n    else:\n      actual = minidom.parse(output_file2)\n    self.NormalizeXml(actual.documentElement)\n    self.AssertEquivalentNodes(expected.documentElement,\n                               actual.documentElement)\n    expected.unlink()\n    actual.unlink()\n\n\nif __name__ == \"__main__\":\n  os.environ[\"GTEST_STACK_TRACE_DEPTH\"] = \"0\"\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/gtest_xml_output_unittest.py",
    "content": "#!/usr/bin/env python\n#\n# Copyright 2006, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test for the gtest_xml_output module\"\"\"\n\nimport datetime\nimport errno\nimport os\nimport re\nimport sys\nfrom xml.dom import minidom, Node\n\nimport gtest_test_utils\nimport gtest_xml_test_utils\n\nGTEST_FILTER_FLAG = '--gtest_filter'\nGTEST_LIST_TESTS_FLAG = '--gtest_list_tests'\nGTEST_OUTPUT_FLAG = '--gtest_output'\nGTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml'\nGTEST_PROGRAM_NAME = 'gtest_xml_output_unittest_'\n\n# The flag indicating stacktraces are not supported\nNO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support'\n\n# The environment variables for test sharding.\nTOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'\nSHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'\nSHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'\n\nSUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv\n\nif SUPPORTS_STACK_TRACES:\n  STACK_TRACE_TEMPLATE = '\\nStack trace:\\n*'\nelse:\n  STACK_TRACE_TEMPLATE = ''\n  # unittest.main() can't handle unknown flags\n  sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG)\n\nEXPECTED_NON_EMPTY_XML = \"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<testsuites tests=\"24\" failures=\"4\" disabled=\"2\" errors=\"0\" time=\"*\" timestamp=\"*\" name=\"AllTests\" ad_hoc_property=\"42\">\n  <testsuite name=\"SuccessfulTest\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"Succeeds\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"SuccessfulTest\"/>\n  </testsuite>\n  <testsuite name=\"FailedTest\" tests=\"1\" failures=\"1\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"Fails\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"FailedTest\">\n      <failure message=\"gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A;  1&#x0A;  2\" type=\"\"><![CDATA[gtest_xml_output_unittest_.cc:*\nExpected equality of these values:\n  1\n  2%(stack)s]]></failure>\n    </testcase>\n  </testsuite>\n  <testsuite name=\"MixedResultTest\" tests=\"3\" failures=\"1\" disabled=\"1\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"Succeeds\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"MixedResultTest\"/>\n    <testcase name=\"Fails\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"MixedResultTest\">\n      <failure message=\"gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A;  1&#x0A;  2\" type=\"\"><![CDATA[gtest_xml_output_unittest_.cc:*\nExpected equality of these values:\n  1\n  2%(stack)s]]></failure>\n      <failure message=\"gtest_xml_output_unittest_.cc:*&#x0A;Expected equality of these values:&#x0A;  2&#x0A;  3\" type=\"\"><![CDATA[gtest_xml_output_unittest_.cc:*\nExpected equality of these values:\n  2\n  3%(stack)s]]></failure>\n    </testcase>\n    <testcase name=\"DISABLED_test\" status=\"notrun\" result=\"suppressed\" time=\"*\" timestamp=\"*\" classname=\"MixedResultTest\"/>\n  </testsuite>\n  <testsuite name=\"XmlQuotingTest\" tests=\"1\" failures=\"1\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"OutputsCData\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"XmlQuotingTest\">\n      <failure message=\"gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;XML output: &lt;?xml encoding=&quot;utf-8&quot;&gt;&lt;top&gt;&lt;![CDATA[cdata text]]&gt;&lt;/top&gt;\" type=\"\"><![CDATA[gtest_xml_output_unittest_.cc:*\nFailed\nXML output: <?xml encoding=\"utf-8\"><top><![CDATA[cdata text]]>]]&gt;<![CDATA[</top>%(stack)s]]></failure>\n    </testcase>\n  </testsuite>\n  <testsuite name=\"InvalidCharactersTest\" tests=\"1\" failures=\"1\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"InvalidCharactersInMessage\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"InvalidCharactersTest\">\n      <failure message=\"gtest_xml_output_unittest_.cc:*&#x0A;Failed&#x0A;Invalid characters in brackets []\" type=\"\"><![CDATA[gtest_xml_output_unittest_.cc:*\nFailed\nInvalid characters in brackets []%(stack)s]]></failure>\n    </testcase>\n  </testsuite>\n  <testsuite name=\"DisabledTest\" tests=\"1\" failures=\"0\" disabled=\"1\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"DISABLED_test_not_run\" status=\"notrun\" result=\"suppressed\" time=\"*\" timestamp=\"*\" classname=\"DisabledTest\"/>\n  </testsuite>\n  <testsuite name=\"SkippedTest\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"Skipped\" status=\"run\" result=\"skipped\" time=\"*\" timestamp=\"*\" classname=\"SkippedTest\"/>\n  </testsuite>\n  <testsuite name=\"PropertyRecordingTest\" tests=\"4\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\" SetUpTestSuite=\"yes\" TearDownTestSuite=\"aye\">\n    <testcase name=\"OneProperty\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"PropertyRecordingTest\">\n      <properties>\n        <property name=\"key_1\" value=\"1\"/>\n      </properties>\n    </testcase>\n    <testcase name=\"IntValuedProperty\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"PropertyRecordingTest\">\n      <properties>\n        <property name=\"key_int\" value=\"1\"/>\n      </properties>\n    </testcase>\n    <testcase name=\"ThreeProperties\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"PropertyRecordingTest\">\n      <properties>\n        <property name=\"key_1\" value=\"1\"/>\n        <property name=\"key_2\" value=\"2\"/>\n        <property name=\"key_3\" value=\"3\"/>\n      </properties>\n    </testcase>\n    <testcase name=\"TwoValuesForOneKeyUsesLastValue\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"PropertyRecordingTest\">\n      <properties>\n        <property name=\"key_1\" value=\"2\"/>\n      </properties>\n    </testcase>\n  </testsuite>\n  <testsuite name=\"NoFixtureTest\" tests=\"3\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n     <testcase name=\"RecordProperty\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"NoFixtureTest\">\n       <properties>\n         <property name=\"key\" value=\"1\"/>\n       </properties>\n     </testcase>\n     <testcase name=\"ExternalUtilityThatCallsRecordIntValuedProperty\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"NoFixtureTest\">\n       <properties>\n         <property name=\"key_for_utility_int\" value=\"1\"/>\n       </properties>\n     </testcase>\n     <testcase name=\"ExternalUtilityThatCallsRecordStringValuedProperty\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"NoFixtureTest\">\n       <properties>\n         <property name=\"key_for_utility_string\" value=\"1\"/>\n       </properties>\n     </testcase>\n  </testsuite>\n  <testsuite name=\"Single/ValueParamTest\" tests=\"4\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"HasValueParamAttribute/0\" value_param=\"33\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"Single/ValueParamTest\" />\n    <testcase name=\"HasValueParamAttribute/1\" value_param=\"42\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"Single/ValueParamTest\" />\n    <testcase name=\"AnotherTestThatHasValueParamAttribute/0\" value_param=\"33\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"Single/ValueParamTest\" />\n    <testcase name=\"AnotherTestThatHasValueParamAttribute/1\" value_param=\"42\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"Single/ValueParamTest\" />\n  </testsuite>\n  <testsuite name=\"TypedTest/0\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"HasTypeParamAttribute\" type_param=\"*\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"TypedTest/0\" />\n  </testsuite>\n  <testsuite name=\"TypedTest/1\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"HasTypeParamAttribute\" type_param=\"*\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"TypedTest/1\" />\n  </testsuite>\n  <testsuite name=\"Single/TypeParameterizedTestSuite/0\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"HasTypeParamAttribute\" type_param=\"*\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"Single/TypeParameterizedTestSuite/0\" />\n  </testsuite>\n  <testsuite name=\"Single/TypeParameterizedTestSuite/1\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"HasTypeParamAttribute\" type_param=\"*\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"Single/TypeParameterizedTestSuite/1\" />\n  </testsuite>\n</testsuites>\"\"\" % {\n    'stack': STACK_TRACE_TEMPLATE\n}\n\nEXPECTED_FILTERED_TEST_XML = \"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<testsuites tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\"\n            timestamp=\"*\" name=\"AllTests\" ad_hoc_property=\"42\">\n  <testsuite name=\"SuccessfulTest\" tests=\"1\" failures=\"0\" disabled=\"0\"\n             errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"Succeeds\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"SuccessfulTest\"/>\n  </testsuite>\n</testsuites>\"\"\"\n\nEXPECTED_SHARDED_TEST_XML = \"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<testsuites tests=\"3\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\" name=\"AllTests\" ad_hoc_property=\"42\">\n  <testsuite name=\"SuccessfulTest\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"Succeeds\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"SuccessfulTest\"/>\n  </testsuite>\n  <testsuite name=\"PropertyRecordingTest\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\" SetUpTestSuite=\"yes\" TearDownTestSuite=\"aye\">\n    <testcase name=\"TwoValuesForOneKeyUsesLastValue\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"PropertyRecordingTest\">\n      <properties>\n        <property name=\"key_1\" value=\"2\"/>\n      </properties>\n    </testcase>\n  </testsuite>\n  <testsuite name=\"Single/ValueParamTest\" tests=\"1\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\" timestamp=\"*\">\n    <testcase name=\"AnotherTestThatHasValueParamAttribute/0\" value_param=\"33\" status=\"run\" result=\"completed\" time=\"*\" timestamp=\"*\" classname=\"Single/ValueParamTest\" />\n  </testsuite>\n</testsuites>\"\"\"\n\nEXPECTED_EMPTY_XML = \"\"\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<testsuites tests=\"0\" failures=\"0\" disabled=\"0\" errors=\"0\" time=\"*\"\n            timestamp=\"*\" name=\"AllTests\">\n</testsuites>\"\"\"\n\nGTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)\n\nSUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(\n    [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output\n\n\nclass GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):\n  \"\"\"\n  Unit test for Google Test's XML output functionality.\n  \"\"\"\n\n  # This test currently breaks on platforms that do not support typed and\n  # type-parameterized tests, so we don't run it under them.\n  if SUPPORTS_TYPED_TESTS:\n    def testNonEmptyXmlOutput(self):\n      \"\"\"\n      Runs a test program that generates a non-empty XML output, and\n      tests that the XML output is expected.\n      \"\"\"\n      self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)\n\n  def testEmptyXmlOutput(self):\n    \"\"\"Verifies XML output for a Google Test binary without actual tests.\n\n    Runs a test program that generates an empty XML output, and\n    tests that the XML output is expected.\n    \"\"\"\n\n    self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)\n\n  def testTimestampValue(self):\n    \"\"\"Checks whether the timestamp attribute in the XML output is valid.\n\n    Runs a test program that generates an empty XML output, and checks if\n    the timestamp attribute in the testsuites tag is valid.\n    \"\"\"\n    actual = self._GetXmlOutput('gtest_no_test_unittest', [], {}, 0)\n    date_time_str = actual.documentElement.getAttributeNode('timestamp').value\n    # datetime.strptime() is only available in Python 2.5+ so we have to\n    # parse the expected datetime manually.\n    match = re.match(r'(\\d+)-(\\d\\d)-(\\d\\d)T(\\d\\d):(\\d\\d):(\\d\\d)', date_time_str)\n    self.assertTrue(\n        re.match,\n        'XML datettime string %s has incorrect format' % date_time_str)\n    date_time_from_xml = datetime.datetime(\n        year=int(match.group(1)), month=int(match.group(2)),\n        day=int(match.group(3)), hour=int(match.group(4)),\n        minute=int(match.group(5)), second=int(match.group(6)))\n\n    time_delta = abs(datetime.datetime.now() - date_time_from_xml)\n    # timestamp value should be near the current local time\n    self.assertTrue(time_delta < datetime.timedelta(seconds=600),\n                    'time_delta is %s' % time_delta)\n    actual.unlink()\n\n  def testDefaultOutputFile(self):\n    \"\"\"\n    Confirms that Google Test produces an XML output file with the expected\n    default name if no name is explicitly specified.\n    \"\"\"\n    output_file = os.path.join(gtest_test_utils.GetTempDir(),\n                               GTEST_DEFAULT_OUTPUT_FILE)\n    gtest_prog_path = gtest_test_utils.GetTestExecutablePath(\n        'gtest_no_test_unittest')\n    try:\n      os.remove(output_file)\n    except OSError:\n      e = sys.exc_info()[1]\n      if e.errno != errno.ENOENT:\n        raise\n\n    p = gtest_test_utils.Subprocess(\n        [gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],\n        working_dir=gtest_test_utils.GetTempDir())\n    self.assert_(p.exited)\n    self.assertEquals(0, p.exit_code)\n    self.assert_(os.path.isfile(output_file))\n\n  def testSuppressedXmlOutput(self):\n    \"\"\"\n    Tests that no XML file is generated if the default XML listener is\n    shut down before RUN_ALL_TESTS is invoked.\n    \"\"\"\n\n    xml_path = os.path.join(gtest_test_utils.GetTempDir(),\n                            GTEST_PROGRAM_NAME + 'out.xml')\n    if os.path.isfile(xml_path):\n      os.remove(xml_path)\n\n    command = [GTEST_PROGRAM_PATH,\n               '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),\n               '--shut_down_xml']\n    p = gtest_test_utils.Subprocess(command)\n    if p.terminated_by_signal:\n      # p.signal is available only if p.terminated_by_signal is True.\n      self.assertFalse(\n          p.terminated_by_signal,\n          '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))\n    else:\n      self.assert_(p.exited)\n      self.assertEquals(1, p.exit_code,\n                        \"'%s' exited with code %s, which doesn't match \"\n                        'the expected exit code %s.'\n                        % (command, p.exit_code, 1))\n\n    self.assert_(not os.path.isfile(xml_path))\n\n  def testFilteredTestXmlOutput(self):\n    \"\"\"Verifies XML output when a filter is applied.\n\n    Runs a test program that executes only some tests and verifies that\n    non-selected tests do not show up in the XML output.\n    \"\"\"\n\n    self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,\n                        extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])\n\n  def testShardedTestXmlOutput(self):\n    \"\"\"Verifies XML output when run using multiple shards.\n\n    Runs a test program that executes only one shard and verifies that tests\n    from other shards do not show up in the XML output.\n    \"\"\"\n\n    self._TestXmlOutput(\n        GTEST_PROGRAM_NAME,\n        EXPECTED_SHARDED_TEST_XML,\n        0,\n        extra_env={SHARD_INDEX_ENV_VAR: '0',\n                   TOTAL_SHARDS_ENV_VAR: '10'})\n\n  def _GetXmlOutput(self, gtest_prog_name, extra_args, extra_env,\n                    expected_exit_code):\n    \"\"\"\n    Returns the xml output generated by running the program gtest_prog_name.\n    Furthermore, the program's exit code must be expected_exit_code.\n    \"\"\"\n    xml_path = os.path.join(gtest_test_utils.GetTempDir(),\n                            gtest_prog_name + 'out.xml')\n    gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)\n\n    command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +\n               extra_args)\n    environ_copy = os.environ.copy()\n    if extra_env:\n      environ_copy.update(extra_env)\n    p = gtest_test_utils.Subprocess(command, env=environ_copy)\n\n    if p.terminated_by_signal:\n      self.assert_(False,\n                   '%s was killed by signal %d' % (gtest_prog_name, p.signal))\n    else:\n      self.assert_(p.exited)\n      self.assertEquals(expected_exit_code, p.exit_code,\n                        \"'%s' exited with code %s, which doesn't match \"\n                        'the expected exit code %s.'\n                        % (command, p.exit_code, expected_exit_code))\n    actual = minidom.parse(xml_path)\n    return actual\n\n  def _TestXmlOutput(self, gtest_prog_name, expected_xml,\n                     expected_exit_code, extra_args=None, extra_env=None):\n    \"\"\"\n    Asserts that the XML document generated by running the program\n    gtest_prog_name matches expected_xml, a string containing another\n    XML document.  Furthermore, the program's exit code must be\n    expected_exit_code.\n    \"\"\"\n\n    actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],\n                                extra_env or {}, expected_exit_code)\n    expected = minidom.parseString(expected_xml)\n    self.NormalizeXml(actual.documentElement)\n    self.AssertEquivalentNodes(expected.documentElement,\n                               actual.documentElement)\n    expected.unlink()\n    actual.unlink()\n\n\nif __name__ == '__main__':\n  os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'\n  gtest_test_utils.Main()\n"
  },
  {
    "path": "test/googletest/test/gtest_xml_output_unittest_.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n// Unit test for Google Test XML output.\n//\n// A user can specify XML output in a Google Test program to run via\n// either the GTEST_OUTPUT environment variable or the --gtest_output\n// flag.  This is used for testing such functionality.\n//\n// This program will be invoked from a Python unit test.  Don't run it\n// directly.\n\n#include \"gtest/gtest.h\"\n\nusing ::testing::InitGoogleTest;\nusing ::testing::TestEventListeners;\nusing ::testing::TestWithParam;\nusing ::testing::UnitTest;\nusing ::testing::Test;\nusing ::testing::Values;\n\nclass SuccessfulTest : public Test {\n};\n\nTEST_F(SuccessfulTest, Succeeds) {\n  SUCCEED() << \"This is a success.\";\n  ASSERT_EQ(1, 1);\n}\n\nclass FailedTest : public Test {\n};\n\nTEST_F(FailedTest, Fails) {\n  ASSERT_EQ(1, 2);\n}\n\nclass DisabledTest : public Test {\n};\n\nTEST_F(DisabledTest, DISABLED_test_not_run) {\n  FAIL() << \"Unexpected failure: Disabled test should not be run\";\n}\n\nclass SkippedTest : public Test {\n};\n\nTEST_F(SkippedTest, Skipped) {\n  GTEST_SKIP();\n}\n\nTEST(MixedResultTest, Succeeds) {\n  EXPECT_EQ(1, 1);\n  ASSERT_EQ(1, 1);\n}\n\nTEST(MixedResultTest, Fails) {\n  EXPECT_EQ(1, 2);\n  ASSERT_EQ(2, 3);\n}\n\nTEST(MixedResultTest, DISABLED_test) {\n  FAIL() << \"Unexpected failure: Disabled test should not be run\";\n}\n\nTEST(XmlQuotingTest, OutputsCData) {\n  FAIL() << \"XML output: \"\n            \"<?xml encoding=\\\"utf-8\\\"><top><![CDATA[cdata text]]></top>\";\n}\n\n// Helps to test that invalid characters produced by test code do not make\n// it into the XML file.\nTEST(InvalidCharactersTest, InvalidCharactersInMessage) {\n  FAIL() << \"Invalid characters in brackets [\\x1\\x2]\";\n}\n\nclass PropertyRecordingTest : public Test {\n public:\n  static void SetUpTestSuite() { RecordProperty(\"SetUpTestSuite\", \"yes\"); }\n  static void TearDownTestSuite() {\n    RecordProperty(\"TearDownTestSuite\", \"aye\");\n  }\n};\n\nTEST_F(PropertyRecordingTest, OneProperty) {\n  RecordProperty(\"key_1\", \"1\");\n}\n\nTEST_F(PropertyRecordingTest, IntValuedProperty) {\n  RecordProperty(\"key_int\", 1);\n}\n\nTEST_F(PropertyRecordingTest, ThreeProperties) {\n  RecordProperty(\"key_1\", \"1\");\n  RecordProperty(\"key_2\", \"2\");\n  RecordProperty(\"key_3\", \"3\");\n}\n\nTEST_F(PropertyRecordingTest, TwoValuesForOneKeyUsesLastValue) {\n  RecordProperty(\"key_1\", \"1\");\n  RecordProperty(\"key_1\", \"2\");\n}\n\nTEST(NoFixtureTest, RecordProperty) {\n  RecordProperty(\"key\", \"1\");\n}\n\nvoid ExternalUtilityThatCallsRecordProperty(const std::string& key, int value) {\n  testing::Test::RecordProperty(key, value);\n}\n\nvoid ExternalUtilityThatCallsRecordProperty(const std::string& key,\n                                            const std::string& value) {\n  testing::Test::RecordProperty(key, value);\n}\n\nTEST(NoFixtureTest, ExternalUtilityThatCallsRecordIntValuedProperty) {\n  ExternalUtilityThatCallsRecordProperty(\"key_for_utility_int\", 1);\n}\n\nTEST(NoFixtureTest, ExternalUtilityThatCallsRecordStringValuedProperty) {\n  ExternalUtilityThatCallsRecordProperty(\"key_for_utility_string\", \"1\");\n}\n\n// Verifies that the test parameter value is output in the 'value_param'\n// XML attribute for value-parameterized tests.\nclass ValueParamTest : public TestWithParam<int> {};\nTEST_P(ValueParamTest, HasValueParamAttribute) {}\nTEST_P(ValueParamTest, AnotherTestThatHasValueParamAttribute) {}\nINSTANTIATE_TEST_SUITE_P(Single, ValueParamTest, Values(33, 42));\n\n#if GTEST_HAS_TYPED_TEST\n// Verifies that the type parameter name is output in the 'type_param'\n// XML attribute for typed tests.\ntemplate <typename T> class TypedTest : public Test {};\ntypedef testing::Types<int, long> TypedTestTypes;\nTYPED_TEST_SUITE(TypedTest, TypedTestTypes);\nTYPED_TEST(TypedTest, HasTypeParamAttribute) {}\n#endif\n\n#if GTEST_HAS_TYPED_TEST_P\n// Verifies that the type parameter name is output in the 'type_param'\n// XML attribute for type-parameterized tests.\ntemplate <typename T>\nclass TypeParameterizedTestSuite : public Test {};\nTYPED_TEST_SUITE_P(TypeParameterizedTestSuite);\nTYPED_TEST_P(TypeParameterizedTestSuite, HasTypeParamAttribute) {}\nREGISTER_TYPED_TEST_SUITE_P(TypeParameterizedTestSuite, HasTypeParamAttribute);\ntypedef testing::Types<int, long> TypeParameterizedTestSuiteTypes;  // NOLINT\nINSTANTIATE_TYPED_TEST_SUITE_P(Single, TypeParameterizedTestSuite,\n                               TypeParameterizedTestSuiteTypes);\n#endif\n\nint main(int argc, char** argv) {\n  InitGoogleTest(&argc, argv);\n\n  if (argc > 1 && strcmp(argv[1], \"--shut_down_xml\") == 0) {\n    TestEventListeners& listeners = UnitTest::GetInstance()->listeners();\n    delete listeners.Release(listeners.default_xml_generator());\n  }\n  testing::Test::RecordProperty(\"ad_hoc_property\", \"42\");\n  return RUN_ALL_TESTS();\n}\n"
  },
  {
    "path": "test/googletest/test/gtest_xml_test_utils.py",
    "content": "# Copyright 2006, Google Inc.\n# All rights reserved.\n#\n# Redistribution and use in source and binary forms, with or without\n# modification, are permitted provided that the following conditions are\n# met:\n#\n#     * Redistributions of source code must retain the above copyright\n# notice, this list of conditions and the following disclaimer.\n#     * Redistributions in binary form must reproduce the above\n# copyright notice, this list of conditions and the following disclaimer\n# in the documentation and/or other materials provided with the\n# distribution.\n#     * Neither the name of Google Inc. nor the names of its\n# contributors may be used to endorse or promote products derived from\n# this software without specific prior written permission.\n#\n# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n# \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\"\"\"Unit test utilities for gtest_xml_output\"\"\"\n\nimport re\nfrom xml.dom import minidom, Node\nimport gtest_test_utils\n\nGTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml'\n\nclass GTestXMLTestCase(gtest_test_utils.TestCase):\n  \"\"\"\n  Base class for tests of Google Test's XML output functionality.\n  \"\"\"\n\n\n  def AssertEquivalentNodes(self, expected_node, actual_node):\n    \"\"\"\n    Asserts that actual_node (a DOM node object) is equivalent to\n    expected_node (another DOM node object), in that either both of\n    them are CDATA nodes and have the same value, or both are DOM\n    elements and actual_node meets all of the following conditions:\n\n    *  It has the same tag name as expected_node.\n    *  It has the same set of attributes as expected_node, each with\n       the same value as the corresponding attribute of expected_node.\n       Exceptions are any attribute named \"time\", which needs only be\n       convertible to a floating-point number and any attribute named\n       \"type_param\" which only has to be non-empty.\n    *  It has an equivalent set of child nodes (including elements and\n       CDATA sections) as expected_node.  Note that we ignore the\n       order of the children as they are not guaranteed to be in any\n       particular order.\n    \"\"\"\n\n    if expected_node.nodeType == Node.CDATA_SECTION_NODE:\n      self.assertEquals(Node.CDATA_SECTION_NODE, actual_node.nodeType)\n      self.assertEquals(expected_node.nodeValue, actual_node.nodeValue)\n      return\n\n    self.assertEquals(Node.ELEMENT_NODE, actual_node.nodeType)\n    self.assertEquals(Node.ELEMENT_NODE, expected_node.nodeType)\n    self.assertEquals(expected_node.tagName, actual_node.tagName)\n\n    expected_attributes = expected_node.attributes\n    actual_attributes   = actual_node  .attributes\n    self.assertEquals(\n        expected_attributes.length, actual_attributes.length,\n        'attribute numbers differ in element %s:\\nExpected: %r\\nActual: %r' % (\n            actual_node.tagName, expected_attributes.keys(),\n            actual_attributes.keys()))\n    for i in range(expected_attributes.length):\n      expected_attr = expected_attributes.item(i)\n      actual_attr   = actual_attributes.get(expected_attr.name)\n      self.assert_(\n          actual_attr is not None,\n          'expected attribute %s not found in element %s' %\n          (expected_attr.name, actual_node.tagName))\n      self.assertEquals(\n          expected_attr.value, actual_attr.value,\n          ' values of attribute %s in element %s differ: %s vs %s' %\n          (expected_attr.name, actual_node.tagName,\n           expected_attr.value, actual_attr.value))\n\n    expected_children = self._GetChildren(expected_node)\n    actual_children = self._GetChildren(actual_node)\n    self.assertEquals(\n        len(expected_children), len(actual_children),\n        'number of child elements differ in element ' + actual_node.tagName)\n    for child_id, child in expected_children.items():\n      self.assert_(child_id in actual_children,\n                   '<%s> is not in <%s> (in element %s)' %\n                   (child_id, actual_children, actual_node.tagName))\n      self.AssertEquivalentNodes(child, actual_children[child_id])\n\n  identifying_attribute = {\n      'testsuites': 'name',\n      'testsuite': 'name',\n      'testcase': 'name',\n      'failure': 'message',\n      'property': 'name',\n  }\n\n  def _GetChildren(self, element):\n    \"\"\"\n    Fetches all of the child nodes of element, a DOM Element object.\n    Returns them as the values of a dictionary keyed by the IDs of the\n    children.  For <testsuites>, <testsuite>, <testcase>, and <property>\n    elements, the ID is the value of their \"name\" attribute; for <failure>\n    elements, it is the value of the \"message\" attribute; for <properties>\n    elements, it is the value of their parent's \"name\" attribute plus the\n    literal string \"properties\"; CDATA sections and non-whitespace\n    text nodes are concatenated into a single CDATA section with ID\n    \"detail\".  An exception is raised if any element other than the above\n    four is encountered, if two child elements with the same identifying\n    attributes are encountered, or if any other type of node is encountered.\n    \"\"\"\n\n    children = {}\n    for child in element.childNodes:\n      if child.nodeType == Node.ELEMENT_NODE:\n        if child.tagName == 'properties':\n          self.assert_(child.parentNode is not None,\n                       'Encountered <properties> element without a parent')\n          child_id = child.parentNode.getAttribute('name') + '-properties'\n        else:\n          self.assert_(child.tagName in self.identifying_attribute,\n                       'Encountered unknown element <%s>' % child.tagName)\n          child_id = child.getAttribute(\n              self.identifying_attribute[child.tagName])\n        self.assert_(child_id not in children)\n        children[child_id] = child\n      elif child.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]:\n        if 'detail' not in children:\n          if (child.nodeType == Node.CDATA_SECTION_NODE or\n              not child.nodeValue.isspace()):\n            children['detail'] = child.ownerDocument.createCDATASection(\n                child.nodeValue)\n        else:\n          children['detail'].nodeValue += child.nodeValue\n      else:\n        self.fail('Encountered unexpected node type %d' % child.nodeType)\n    return children\n\n  def NormalizeXml(self, element):\n    \"\"\"\n    Normalizes Google Test's XML output to eliminate references to transient\n    information that may change from run to run.\n\n    *  The \"time\" attribute of <testsuites>, <testsuite> and <testcase>\n       elements is replaced with a single asterisk, if it contains\n       only digit characters.\n    *  The \"timestamp\" attribute of <testsuites> elements is replaced with a\n       single asterisk, if it contains a valid ISO8601 datetime value.\n    *  The \"type_param\" attribute of <testcase> elements is replaced with a\n       single asterisk (if it sn non-empty) as it is the type name returned\n       by the compiler and is platform dependent.\n    *  The line info reported in the first line of the \"message\"\n       attribute and CDATA section of <failure> elements is replaced with the\n       file's basename and a single asterisk for the line number.\n    *  The directory names in file paths are removed.\n    *  The stack traces are removed.\n    \"\"\"\n\n    if element.tagName in ('testsuites', 'testsuite', 'testcase'):\n      timestamp = element.getAttributeNode('timestamp')\n      timestamp.value = re.sub(r'^\\d{4}-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d$',\n                               '*', timestamp.value)\n    if element.tagName in ('testsuites', 'testsuite', 'testcase'):\n      time = element.getAttributeNode('time')\n      time.value = re.sub(r'^\\d+(\\.\\d+)?$', '*', time.value)\n      type_param = element.getAttributeNode('type_param')\n      if type_param and type_param.value:\n        type_param.value = '*'\n    elif element.tagName == 'failure':\n      source_line_pat = r'^.*[/\\\\](.*:)\\d+\\n'\n      # Replaces the source line information with a normalized form.\n      message = element.getAttributeNode('message')\n      message.value = re.sub(source_line_pat, '\\\\1*\\n', message.value)\n      for child in element.childNodes:\n        if child.nodeType == Node.CDATA_SECTION_NODE:\n          # Replaces the source line information with a normalized form.\n          cdata = re.sub(source_line_pat, '\\\\1*\\n', child.nodeValue)\n          # Removes the actual stack trace.\n          child.nodeValue = re.sub(r'Stack trace:\\n(.|\\n)*',\n                                   'Stack trace:\\n*', cdata)\n    for child in element.childNodes:\n      if child.nodeType == Node.ELEMENT_NODE:\n        self.NormalizeXml(child)\n"
  },
  {
    "path": "test/googletest/test/production.cc",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// This is part of the unit test for gtest_prod.h.\n\n#include \"production.h\"\n\nPrivateCode::PrivateCode() : x_(0) {}\n"
  },
  {
    "path": "test/googletest/test/production.h",
    "content": "// Copyright 2006, Google Inc.\n// All rights reserved.\n//\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions are\n// met:\n//\n//     * Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n//     * Redistributions in binary form must reproduce the above\n// copyright notice, this list of conditions and the following disclaimer\n// in the documentation and/or other materials provided with the\n// distribution.\n//     * Neither the name of Google Inc. nor the names of its\n// contributors may be used to endorse or promote products derived from\n// this software without specific prior written permission.\n//\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n//\n// This is part of the unit test for gtest_prod.h.\n\n#ifndef GTEST_TEST_PRODUCTION_H_\n#define GTEST_TEST_PRODUCTION_H_\n\n#include \"gtest/gtest_prod.h\"\n\nclass PrivateCode {\n public:\n  // Declares a friend test that does not use a fixture.\n  FRIEND_TEST(PrivateCodeTest, CanAccessPrivateMembers);\n\n  // Declares a friend test that uses a fixture.\n  FRIEND_TEST(PrivateCodeFixtureTest, CanAccessPrivateMembers);\n\n  PrivateCode();\n\n  int x() const { return x_; }\n private:\n  void set_x(int an_x) { x_ = an_x; }\n  int x_;\n};\n\n#endif  // GTEST_TEST_PRODUCTION_H_\n"
  },
  {
    "path": "test/inifile_test.cpp",
    "content": "#include <iostream>\n#include <string>\n#include <limits.h>\n#include \"gtest/gtest.h\"\n#define private   public\n#define protected public\n\n#include \"inifile.h\"\n\nusing std::cout;\nusing std::endl;\n\nusing namespace inifile;\nnamespace {\nTEST(inifile, trim)\n{\n    string buf = \"   aaa    \";\n    IniFile::trim(buf);\n\n    EXPECT_EQ(buf, string(\"aaa\"));\n\n    buf =  \"   aaa\";\n    IniFile::trim(buf);\n    EXPECT_EQ(buf, string(\"aaa\"));\n\n    buf =  \"aaa    \";\n    IniFile::trim(buf);\n    EXPECT_EQ(buf, string(\"aaa\"));\n}\n\nTEST(inifile, trimleft)\n{\n    string p = \"   aaa    \";\n    IniFile::trimleft(p);\n\n    EXPECT_EQ(p, string(\"aaa    \"));\n\n    p =  \"   aaa\";\n    IniFile::trimleft(p);\n    EXPECT_EQ(p, string(\"aaa\"));\n\n    p = \"aaa    \";\n    IniFile::trimleft(p);\n    EXPECT_EQ(p, string(\"aaa    \"));\n}\n\nTEST(inifile, trimright)\n{\n    string p = \"   aaa    \";\n    IniFile::trimright(p);\n\n    EXPECT_EQ(p, string(\"   aaa\"));\n\n    p =  \"   aaa\";\n    IniFile::trimright(p);\n    EXPECT_EQ(p, string(\"   aaa\"));\n\n    p = \"aaa    \";\n    IniFile::trimright(p);\n    EXPECT_EQ(p, string(\"aaa\"));\n}\n\n////////////////////////////\n\nTEST(IniFile, pasre)\n{\n    IniFile section;\n    string s = \"DB=sys\";\n    string key, value;\n\n    EXPECT_EQ(section.parse(s, &key, &value), true);\n    EXPECT_EQ(key, \"DB\");\n    EXPECT_EQ(value, \"sys\");\n\n    s = \"DB\";\n    key = value = \"\";\n\n    EXPECT_EQ(section.parse(s, &key, &value), false);\n\n    s = \"DB=\";\n    key = value = \"\";\n\n    EXPECT_EQ(section.parse(s, &key, &value), true);\n    EXPECT_EQ(key, \"DB\");\n    EXPECT_EQ(value, \"\");\n\n    s = \"=sys\";\n    key = value = \"\";\n\n    EXPECT_EQ(section.parse(s, &key, &value), true);\n    EXPECT_EQ(key, \"\");\n    EXPECT_EQ(value, \"sys\");\n}\n\nTEST(IniFile, hasSection_and_getValue)\n{\n    // create a new ini file\n    char filepath[] = \"test.ini\";\n    FILE *fp = fopen(filepath, \"w+\");\n    char content[] = \" USER=root \\r\\n [COMMON] \\n DB=sys   \t\\nPASSWD=tt   \\n#commit   \\n #--------- \\n[DEFINE] \\nname=cxy\\nvalue=1\\nbooltest=True #测试\\n\";\n    fwrite(content, 1, strlen(content), fp);\n    fclose(fp);\n\n    // test\n    IniFile ini;\n    ini.Load(filepath);\n\n    EXPECT_EQ(ini.HasSection(string(\"\")), true);\n    EXPECT_EQ(ini.HasSection(\"COMMON\"), true);\n    EXPECT_EQ(ini.HasSection(\"DEFINE\"), true);\n    EXPECT_EQ(ini.HasSection(\"ss\"), false);\n\n    string value;\n    int intValue;\n    bool boolValue;\n    EXPECT_EQ(ini.GetStringValue(\"\", \"USER\", &value), 0);\n    EXPECT_EQ(value, string(\"root\"));\n    EXPECT_EQ(ini.GetStringValue(\"COMMON\", \"DB\", &value), 0);\n    EXPECT_EQ(value, string(\"sys\"));\n    EXPECT_EQ(ini.GetStringValue(\"COMMON\", \"PASSWD\", &value), 0);\n    EXPECT_EQ(value, string(\"tt\"));\n    EXPECT_EQ(ini.GetStringValue(\"DEFINE\", \"name\", &value), 0);\n    EXPECT_EQ(value, string(\"cxy\"));\n    EXPECT_EQ(ini.GetIntValue(\"DEFINE\", \"value\", &intValue), 0);\n    EXPECT_EQ(intValue, 1);\n    EXPECT_EQ(ini.GetBoolValue(\"DEFINE\", \"booltest\", &boolValue), 0);\n    EXPECT_EQ(boolValue, true);\n}\n\nTEST(IniFile, getValue)\n{\n    // write config\n    char filepath[] = \"getValueTest.ini\";\n    FILE *fp = fopen(filepath, \"w\");\n    char content[] = \" USER=root \\r\\n [COMMON] \\n DB=sys \\nPASSWD=tt \\nPASSWD=dd \\n#commit \\n ;--------- \\n[DEFINE] \\nname=cxy\\nvalue=1 #test\";\n    fwrite(content, sizeof(char), strlen(content), fp);\n    fclose(fp);\n\n    // read config\n    IniFile ini;\n    ini.Load(filepath);\n    string value;\n    vector<string> valueList;\n    ini.getValue(\"\", \"USER\", &value);\n    EXPECT_EQ(value, string(\"root\"));\n    ini.getValue(\"COMMON\", \"DB\", &value);\n    EXPECT_EQ(value, string(\"sys\"));\n    ini.GetValues(\"COMMON\", \"PASSWD\", &valueList);\n    EXPECT_EQ(valueList.size(), 2);\n    EXPECT_EQ(valueList[0], string(\"tt\"));\n    EXPECT_EQ(valueList[1], string(\"dd\"));\n}\n\nTEST(IniFile, reopen)\n{\n    // create a new ini file\n    char filepath[] = \"test.ini\";\n    FILE *fp = fopen(filepath, \"w\");\n    char content[] = \" USER=root \\r\\n [COMMON] \\n DB=sys   \t\\nPASSWD=tt   \\n#commit   \\n #--------- \\n[DEFINE] \\nname=cxy\\n\";\n    fwrite(content, sizeof(char), strlen(content), fp);\n    fclose(fp);\n\n    // test\n    IniFile ini;\n    ini.Load(filepath);\n\n    // reopen\n\n    fp = fopen(filepath, \"w\");\n    strcpy(content, \" USER=root2 \\r\\n [COMMON] \\n DB=sys2   \t\\n\\n#commit   \\n #--------- \\n\\n\\n\");\n    fwrite(content, sizeof(char), strlen(content), fp);\n    fclose(fp);\n\n    // test\n    ini.Load(filepath);\n\n    EXPECT_EQ(ini.HasSection(\"\"), true);\n    EXPECT_EQ(ini.HasSection(\"COMMON\"), true);\n    EXPECT_EQ(ini.HasSection(\"DEFINE\"), false);\n\n    string value;\n    EXPECT_EQ(ini.GetStringValue(\"\", \"USER\", &value), 0);\n    EXPECT_EQ(value, string(\"root2\"));\n    EXPECT_EQ(ini.GetStringValue(\"COMMON\", \"DB\", &value), 0);\n    EXPECT_EQ(value, string(\"sys2\"));\n    EXPECT_EQ(ini.GetStringValue(\"COMMON\", \"PASSWD\", &value), ERR_NOT_FOUND_KEY);\n    EXPECT_EQ(ini.GetStringValue(\"DEFINE\", \"name\", &value), ERR_NOT_FOUND_SECTION);\n}\n\nTEST(IniFile, SaveAs)\n{\n    // create a new ini file\n    char filepath[] = \"test.ini\";\n    FILE *fp = fopen(filepath, \"w\");\n    char content[] = \" USER=root \\r\\n [COMMON] \\n DB=sys   \t\\nPASSWD=tt   \\n#commit   \\n #--------- \\n[DEFINE] \\nname=cxy\\nvalue=1 #测试\";\n    fwrite(content, sizeof(char), strlen(content), fp);\n    fclose(fp);\n\n    IniFile ini;\n    ini.Load(filepath);\n\n    char saveas_path[] = \"test_save_as.ini\";\n\n    ini.SaveAs(saveas_path);\n\n    fp = fopen(saveas_path, \"r\");\n    char buf[200];\n    memset(buf, 0, 200 * sizeof(char));\n\n    fread(buf, sizeof(char), 200, fp);\n    fclose(fp);\n    EXPECT_EQ(buf, string(\"USER=root\\n[COMMON]\\nDB=sys\\nPASSWD=tt\\n#commit\\n#---------\\n[DEFINE]\\nname=cxy\\nvalue=1 #测试\\n\"));\n}\n\nTEST(IniFile, setValue)\n{\n    IniFile ini;\n\n    ini.setValue(\"COMMON\", \"DB\", \"sys\", \"数据库\");\n    ini.setValue(\"COMMON\", \"PASSWD\", \"root\", \"数据库密码\");\n    ini.setValue(\"\", \"NAME\", \"cxy\", \"\");\n\n    char filepath[] = \"test_set.ini\";\n    ini.SaveAs(filepath);\n\n    FILE *fp = fopen(filepath, \"r\");\n    char buf[200];\n    memset(buf, 0, 200 * sizeof(char));\n\n    fread(buf, sizeof(char), 200, fp);\n    fclose(fp);\n    EXPECT_EQ(buf, string(\"NAME=cxy\\n[COMMON]\\n#数据库\\nDB=sys\\n#数据库密码\\nPASSWD=root\\n\"));\n}\nTEST(IniFile, DeleteSection)\n{\n    // create a new ini file\n    char filepath[] = \"test.ini\";\n    FILE *fp = fopen(filepath, \"w\");\n    char content[] = \" USER=root \\r\\n [COMMON] \\n DB=sys   \t\\nPASSWD=tt   \\n#commit   \\n #--------- \\n[DEFINE] \\nname=cxy\\n\";\n    fwrite(content, sizeof(char), strlen(content), fp);\n    fclose(fp);\n\n    IniFile ini;\n    ini.Load(filepath);\n\n    ini.DeleteSection(\"COMMON\");\n\n    char saveas_path[] = \"test_save_deleteS.ini\";\n\n    ini.SaveAs(saveas_path);\n\n    fp = fopen(saveas_path, \"r\");\n    char buf[200];\n    memset(buf, 0, 200 * sizeof(char));\n\n    fread(buf, sizeof(char), 200, fp);\n    fclose(fp);\n    EXPECT_EQ(buf, string(\"USER=root\\n#commit\\n#---------\\n[DEFINE]\\nname=cxy\\n\"));\n}\n\nTEST(IniFile, DeleteKey)\n{\n    // create a new ini file\n    char filepath[] = \"test.ini\";\n    FILE *fp = fopen(filepath, \"w\");\n    char content[] = \" USER=root \\r\\n [COMMON] \\n DB=sys   \t\\nPASSWD=tt   \\n#commit   \\n #--------- \\n[DEFINE] \\nname=cxy\\n\";\n    fwrite(content, sizeof(char), strlen(content), fp);\n    fclose(fp);\n\n    IniFile ini;\n    ini.Load(filepath);\n\n    ini.DeleteKey(\"COMMON\", \"DB\");\n\n    char saveas_path[] = \"test_save_deleteK.ini\";\n\n    ini.SaveAs(saveas_path);\n\n    fp = fopen(saveas_path, \"r\");\n    char buf[200];\n    memset(buf, 0, 200 * sizeof(char));\n\n    fread(buf, sizeof(char), 200, fp);\n    fclose(fp);\n    EXPECT_EQ(buf, string(\"USER=root\\n[COMMON]\\nPASSWD=tt\\n#commit\\n#---------\\n[DEFINE]\\nname=cxy\\n\"));\n}\n\n}\n"
  }
]