[
  {
    "path": ".gitignore",
    "content": "/docker/clipboard.txt\n/projects/LISA/.ipynb_checkpoints/*\n!/projects/LISA/.SOAP.ipynb.icloud\n*/.DS_Store \n.DS_Store\n# output dir\noutput\ninstant_test_output\ninference_test_output\n\n\n\n*.txt\n\n# compilation and distribution\n__pycache__\n_ext\n*.pyc\n*.so\n*egg-info/\nbuild/\ndist/\n\n# pytorch/python/numpy formats\n*.pth\n*.pkl\n*.npy\n\n# ipython/jupyter notebooks\n*.ipynb\n**/.ipynb_checkpoints/\n\n# Editor temporaries\n*.swn\n*.swo\n*.swp\n*~\n\n# Pycharm editor settings\n.idea\n.vscode\n.python-version\n\n# project dirs\n/datasets/coco\n/datasets/lvis\n/datasets/cityscapes\n/models\n/debug\n/weights\neval.sh\n\n*.DS_Store\n"
  },
  {
    "path": "LICENSE",
    "content": "Apache License\nVersion 2.0, January 2004\nhttp://www.apache.org/licenses/\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n1. Definitions.\n\n\"License\" shall mean the terms and conditions for use, reproduction,\nand distribution as defined by Sections 1 through 9 of this document.\n\n\"Licensor\" shall mean the copyright owner or entity authorized by\nthe copyright owner that is granting the License.\n\n\"Legal Entity\" shall mean the union of the acting entity and all\nother entities that control, are controlled by, or are under common\ncontrol with that entity. For the purposes of this definition,\n\"control\" means (i) the power, direct or indirect, to cause the\ndirection or management of such entity, whether by contract or\notherwise, or (ii) ownership of fifty percent (50%) or more of the\noutstanding shares, or (iii) beneficial ownership of such entity.\n\n\"You\" (or \"Your\") shall mean an individual or Legal Entity\nexercising permissions granted by this License.\n\n\"Source\" form shall mean the preferred form for making modifications,\nincluding but not limited to software source code, documentation\nsource, and configuration files.\n\n\"Object\" form shall mean any form resulting from mechanical\ntransformation or translation of a Source form, including but\nnot limited to compiled object code, generated documentation,\nand conversions to other media types.\n\n\"Work\" shall mean the work of authorship, whether in Source or\nObject form, made available under the License, as indicated by a\ncopyright notice that is included in or attached to the work\n(an example is provided in the Appendix below).\n\n\"Derivative Works\" shall mean any work, whether in Source or Object\nform, that is based on (or derived from) the Work and for which the\neditorial revisions, annotations, elaborations, or other modifications\nrepresent, as a whole, an original work of authorship. For the purposes\nof this License, Derivative Works shall not include works that remain\nseparable from, or merely link (or bind by name) to the interfaces of,\nthe Work and Derivative Works thereof.\n\n\"Contribution\" shall mean any work of authorship, including\nthe original version of the Work and any modifications or additions\nto that Work or Derivative Works thereof, that is intentionally\nsubmitted to Licensor for inclusion in the Work by the copyright owner\nor by an individual or Legal Entity authorized to submit on behalf of\nthe copyright owner. For the purposes of this definition, \"submitted\"\nmeans any form of electronic, verbal, or written communication sent\nto the Licensor or its representatives, including but not limited to\ncommunication on electronic mailing lists, source code control systems,\nand issue tracking systems that are managed by, or on behalf of, the\nLicensor for the purpose of discussing and improving the Work, but\nexcluding communication that is conspicuously marked or otherwise\ndesignated in writing by the copyright owner as \"Not a Contribution.\"\n\n\"Contributor\" shall mean Licensor and any individual or Legal Entity\non behalf of whom a Contribution has been received by Licensor and\nsubsequently incorporated within the Work.\n\n2. Grant of Copyright License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\ncopyright license to reproduce, prepare Derivative Works of,\npublicly display, publicly perform, sublicense, and distribute the\nWork and such Derivative Works in Source or Object form.\n\n3. Grant of Patent License. Subject to the terms and conditions of\nthis License, each Contributor hereby grants to You a perpetual,\nworldwide, non-exclusive, no-charge, royalty-free, irrevocable\n(except as stated in this section) patent license to make, have made,\nuse, offer to sell, sell, import, and otherwise transfer the Work,\nwhere such license applies only to those patent claims licensable\nby such Contributor that are necessarily infringed by their\nContribution(s) alone or by combination of their Contribution(s)\nwith the Work to which such Contribution(s) was submitted. If You\ninstitute patent litigation against any entity (including a\ncross-claim or counterclaim in a lawsuit) alleging that the Work\nor a Contribution incorporated within the Work constitutes direct\nor contributory patent infringement, then any patent licenses\ngranted to You under this License for that Work shall terminate\nas of the date such litigation is filed.\n\n4. Redistribution. You may reproduce and distribute copies of the\nWork or Derivative Works thereof in any medium, with or without\nmodifications, and in Source or Object form, provided that You\nmeet the following conditions:\n\n(a) You must give any other recipients of the Work or\nDerivative Works a copy of this License; and\n\n(b) You must cause any modified files to carry prominent notices\nstating that You changed the files; and\n\n(c) You must retain, in the Source form of any Derivative Works\nthat You distribute, all copyright, patent, trademark, and\nattribution notices from the Source form of the Work,\nexcluding those notices that do not pertain to any part of\nthe Derivative Works; and\n\n(d) If the Work includes a \"NOTICE\" text file as part of its\ndistribution, then any Derivative Works that You distribute must\ninclude a readable copy of the attribution notices contained\nwithin such NOTICE file, excluding those notices that do not\npertain to any part of the Derivative Works, in at least one\nof the following places: within a NOTICE text file distributed\nas part of the Derivative Works; within the Source form or\ndocumentation, if provided along with the Derivative Works; or,\nwithin a display generated by the Derivative Works, if and\nwherever such third-party notices normally appear. The contents\nof the NOTICE file are for informational purposes only and\ndo not modify the License. You may add Your own attribution\nnotices within Derivative Works that You distribute, alongside\nor as an addendum to the NOTICE text from the Work, provided\nthat such additional attribution notices cannot be construed\nas modifying the License.\n\nYou may add Your own copyright statement to Your modifications and\nmay provide additional or different license terms and conditions\nfor use, reproduction, or distribution of Your modifications, or\nfor any such Derivative Works as a whole, provided Your use,\nreproduction, and distribution of the Work otherwise complies with\nthe conditions stated in this License.\n\n5. Submission of Contributions. Unless You explicitly state otherwise,\nany Contribution intentionally submitted for inclusion in the Work\nby You to the Licensor shall be under the terms and conditions of\nthis License, without any additional terms or conditions.\nNotwithstanding the above, nothing herein shall supersede or modify\nthe terms of any separate license agreement you may have executed\nwith Licensor regarding such Contributions.\n\n6. Trademarks. This License does not grant permission to use the trade\nnames, trademarks, service marks, or product names of the Licensor,\nexcept as required for reasonable and customary use in describing the\norigin of the Work and reproducing the content of the NOTICE file.\n\n7. Disclaimer of Warranty. Unless required by applicable law or\nagreed to in writing, Licensor provides the Work (and each\nContributor provides its Contributions) on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\nimplied, including, without limitation, any warranties or conditions\nof TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\nPARTICULAR PURPOSE. You are solely responsible for determining the\nappropriateness of using or redistributing the Work and assume any\nrisks associated with Your exercise of permissions under this License.\n\n8. Limitation of Liability. In no event and under no legal theory,\nwhether in tort (including negligence), contract, or otherwise,\nunless required by applicable law (such as deliberate and grossly\nnegligent acts) or agreed to in writing, shall any Contributor be\nliable to You for damages, including any direct, indirect, special,\nincidental, or consequential damages of any character arising as a\nresult of this License or out of the use or inability to use the\nWork (including but not limited to damages for loss of goodwill,\nwork stoppage, computer failure or malfunction, or any and all\nother commercial damages or losses), even if such Contributor\nhas been advised of the possibility of such damages.\n\n9. Accepting Warranty or Additional Liability. While redistributing\nthe Work or Derivative Works thereof, You may choose to offer,\nand charge a fee for, acceptance of support, warranty, indemnity,\nor other liability obligations and/or rights consistent with this\nLicense. However, in accepting such obligations, You may act only\non Your own behalf and on Your sole responsibility, not on behalf\nof any other Contributor, and only if You agree to indemnify,\ndefend, and hold each Contributor harmless for any liability\nincurred by, or claims asserted against, such Contributor by reason\nof your accepting any such warranty or additional liability.\n\nEND OF TERMS AND CONDITIONS\n\nAPPENDIX: How to apply the Apache License to your work.\n\nTo apply the Apache License to your work, attach the following\nboilerplate notice, with the fields enclosed by brackets \"[]\"\nreplaced with your own identifying information. (Don't include\nthe brackets!)  The text should be enclosed in the appropriate\ncomment syntax for the file format. We also recommend that a\nfile or class name and description of purpose be included on the\nsame \"printed page\" as the copyright notice for easier\nidentification within third-party archives.\n\nCopyright 2019, Facebook, Inc\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\nhttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n"
  },
  {
    "path": "PythonAPI/Makefile",
    "content": "all:\n    # install pycocotools locally\n\tpython setup.py build_ext --inplace\n\trm -rf build\n\ninstall:\n\t# install pycocotools to the Python site-packages\n\tpython setup.py build_ext install\n\trm -rf build"
  },
  {
    "path": "PythonAPI/common/gason.cpp",
    "content": "// https://github.com/vivkin/gason - pulled January 10, 2016\n#include \"gason.h\"\n#include <stdlib.h>\n\n#define JSON_ZONE_SIZE 4096\n#define JSON_STACK_SIZE 32\n\nconst char *jsonStrError(int err) {\n    switch (err) {\n#define XX(no, str) \\\n    case JSON_##no: \\\n        return str;\n        JSON_ERRNO_MAP(XX)\n#undef XX\n    default:\n        return \"unknown\";\n    }\n}\n\nvoid *JsonAllocator::allocate(size_t size) {\n    size = (size + 7) & ~7;\n\n    if (head && head->used + size <= JSON_ZONE_SIZE) {\n        char *p = (char *)head + head->used;\n        head->used += size;\n        return p;\n    }\n\n    size_t allocSize = sizeof(Zone) + size;\n    Zone *zone = (Zone *)malloc(allocSize <= JSON_ZONE_SIZE ? JSON_ZONE_SIZE : allocSize);\n    if (zone == nullptr)\n        return nullptr;\n    zone->used = allocSize;\n    if (allocSize <= JSON_ZONE_SIZE || head == nullptr) {\n        zone->next = head;\n        head = zone;\n    } else {\n        zone->next = head->next;\n        head->next = zone;\n    }\n    return (char *)zone + sizeof(Zone);\n}\n\nvoid JsonAllocator::deallocate() {\n    while (head) {\n        Zone *next = head->next;\n        free(head);\n        head = next;\n    }\n}\n\nstatic inline bool isspace(char c) {\n    return c == ' ' || (c >= '\\t' && c <= '\\r');\n}\n\nstatic inline bool isdelim(char c) {\n    return c == ',' || c == ':' || c == ']' || c == '}' || isspace(c) || !c;\n}\n\nstatic inline bool isdigit(char c) {\n    return c >= '0' && c <= '9';\n}\n\nstatic inline bool isxdigit(char c) {\n    return (c >= '0' && c <= '9') || ((c & ~' ') >= 'A' && (c & ~' ') <= 'F');\n}\n\nstatic inline int char2int(char c) {\n    if (c <= '9')\n        return c - '0';\n    return (c & ~' ') - 'A' + 10;\n}\n\nstatic double string2double(char *s, char **endptr) {\n    char ch = *s;\n    if (ch == '-')\n        ++s;\n\n    double result = 0;\n    while (isdigit(*s))\n        result = (result * 10) + (*s++ - '0');\n\n    if (*s == '.') {\n        ++s;\n\n        double fraction = 1;\n        while (isdigit(*s)) {\n            fraction *= 0.1;\n            result += (*s++ - '0') * fraction;\n        }\n    }\n\n    if (*s == 'e' || *s == 'E') {\n        ++s;\n\n        double base = 10;\n        if (*s == '+')\n            ++s;\n        else if (*s == '-') {\n            ++s;\n            base = 0.1;\n        }\n\n        unsigned int exponent = 0;\n        while (isdigit(*s))\n            exponent = (exponent * 10) + (*s++ - '0');\n\n        double power = 1;\n        for (; exponent; exponent >>= 1, base *= base)\n            if (exponent & 1)\n                power *= base;\n\n        result *= power;\n    }\n\n    *endptr = s;\n    return ch == '-' ? -result : result;\n}\n\nstatic inline JsonNode *insertAfter(JsonNode *tail, JsonNode *node) {\n    if (!tail)\n        return node->next = node;\n    node->next = tail->next;\n    tail->next = node;\n    return node;\n}\n\nstatic inline JsonValue listToValue(JsonTag tag, JsonNode *tail) {\n    if (tail) {\n        auto head = tail->next;\n        tail->next = nullptr;\n        return JsonValue(tag, head);\n    }\n    return JsonValue(tag, nullptr);\n}\n\nint jsonParse(char *s, char **endptr, JsonValue *value, JsonAllocator &allocator) {\n    JsonNode *tails[JSON_STACK_SIZE];\n    JsonTag tags[JSON_STACK_SIZE];\n    char *keys[JSON_STACK_SIZE];\n    JsonValue o;\n    int pos = -1;\n    bool separator = true;\n    JsonNode *node;\n    *endptr = s;\n\n    while (*s) {\n        while (isspace(*s)) {\n            ++s;\n            if (!*s) break;\n        }\n        *endptr = s++;\n        switch (**endptr) {\n        case '-':\n            if (!isdigit(*s) && *s != '.') {\n                *endptr = s;\n                return JSON_BAD_NUMBER;\n            }\n        case '0':\n        case '1':\n        case '2':\n        case '3':\n        case '4':\n        case '5':\n        case '6':\n        case '7':\n        case '8':\n        case '9':\n            o = JsonValue(string2double(*endptr, &s));\n            if (!isdelim(*s)) {\n                *endptr = s;\n                return JSON_BAD_NUMBER;\n            }\n            break;\n        case '\"':\n            o = JsonValue(JSON_STRING, s);\n            for (char *it = s; *s; ++it, ++s) {\n                int c = *it = *s;\n                if (c == '\\\\') {\n                    c = *++s;\n                    switch (c) {\n                    case '\\\\':\n                    case '\"':\n                    case '/':\n                        *it = c;\n                        break;\n                    case 'b':\n                        *it = '\\b';\n                        break;\n                    case 'f':\n                        *it = '\\f';\n                        break;\n                    case 'n':\n                        *it = '\\n';\n                        break;\n                    case 'r':\n                        *it = '\\r';\n                        break;\n                    case 't':\n                        *it = '\\t';\n                        break;\n                    case 'u':\n                        c = 0;\n                        for (int i = 0; i < 4; ++i) {\n                            if (isxdigit(*++s)) {\n                                c = c * 16 + char2int(*s);\n                            } else {\n                                *endptr = s;\n                                return JSON_BAD_STRING;\n                            }\n                        }\n                        if (c < 0x80) {\n                            *it = c;\n                        } else if (c < 0x800) {\n                            *it++ = 0xC0 | (c >> 6);\n                            *it = 0x80 | (c & 0x3F);\n                        } else {\n                            *it++ = 0xE0 | (c >> 12);\n                            *it++ = 0x80 | ((c >> 6) & 0x3F);\n                            *it = 0x80 | (c & 0x3F);\n                        }\n                        break;\n                    default:\n                        *endptr = s;\n                        return JSON_BAD_STRING;\n                    }\n                } else if ((unsigned int)c < ' ' || c == '\\x7F') {\n                    *endptr = s;\n                    return JSON_BAD_STRING;\n                } else if (c == '\"') {\n                    *it = 0;\n                    ++s;\n                    break;\n                }\n            }\n            if (!isdelim(*s)) {\n                *endptr = s;\n                return JSON_BAD_STRING;\n            }\n            break;\n        case 't':\n            if (!(s[0] == 'r' && s[1] == 'u' && s[2] == 'e' && isdelim(s[3])))\n                return JSON_BAD_IDENTIFIER;\n            o = JsonValue(JSON_TRUE);\n            s += 3;\n            break;\n        case 'f':\n            if (!(s[0] == 'a' && s[1] == 'l' && s[2] == 's' && s[3] == 'e' && isdelim(s[4])))\n                return JSON_BAD_IDENTIFIER;\n            o = JsonValue(JSON_FALSE);\n            s += 4;\n            break;\n        case 'n':\n            if (!(s[0] == 'u' && s[1] == 'l' && s[2] == 'l' && isdelim(s[3])))\n                return JSON_BAD_IDENTIFIER;\n            o = JsonValue(JSON_NULL);\n            s += 3;\n            break;\n        case ']':\n            if (pos == -1)\n                return JSON_STACK_UNDERFLOW;\n            if (tags[pos] != JSON_ARRAY)\n                return JSON_MISMATCH_BRACKET;\n            o = listToValue(JSON_ARRAY, tails[pos--]);\n            break;\n        case '}':\n            if (pos == -1)\n                return JSON_STACK_UNDERFLOW;\n            if (tags[pos] != JSON_OBJECT)\n                return JSON_MISMATCH_BRACKET;\n            if (keys[pos] != nullptr)\n                return JSON_UNEXPECTED_CHARACTER;\n            o = listToValue(JSON_OBJECT, tails[pos--]);\n            break;\n        case '[':\n            if (++pos == JSON_STACK_SIZE)\n                return JSON_STACK_OVERFLOW;\n            tails[pos] = nullptr;\n            tags[pos] = JSON_ARRAY;\n            keys[pos] = nullptr;\n            separator = true;\n            continue;\n        case '{':\n            if (++pos == JSON_STACK_SIZE)\n                return JSON_STACK_OVERFLOW;\n            tails[pos] = nullptr;\n            tags[pos] = JSON_OBJECT;\n            keys[pos] = nullptr;\n            separator = true;\n            continue;\n        case ':':\n            if (separator || keys[pos] == nullptr)\n                return JSON_UNEXPECTED_CHARACTER;\n            separator = true;\n            continue;\n        case ',':\n            if (separator || keys[pos] != nullptr)\n                return JSON_UNEXPECTED_CHARACTER;\n            separator = true;\n            continue;\n        case '\\0':\n            continue;\n        default:\n            return JSON_UNEXPECTED_CHARACTER;\n        }\n\n        separator = false;\n\n        if (pos == -1) {\n            *endptr = s;\n            *value = o;\n            return JSON_OK;\n        }\n\n        if (tags[pos] == JSON_OBJECT) {\n            if (!keys[pos]) {\n                if (o.getTag() != JSON_STRING)\n                    return JSON_UNQUOTED_KEY;\n                keys[pos] = o.toString();\n                continue;\n            }\n            if ((node = (JsonNode *) allocator.allocate(sizeof(JsonNode))) == nullptr)\n                return JSON_ALLOCATION_FAILURE;\n            tails[pos] = insertAfter(tails[pos], node);\n            tails[pos]->key = keys[pos];\n            keys[pos] = nullptr;\n        } else {\n            if ((node = (JsonNode *) allocator.allocate(sizeof(JsonNode) - sizeof(char *))) == nullptr)\n                return JSON_ALLOCATION_FAILURE;\n            tails[pos] = insertAfter(tails[pos], node);\n        }\n        tails[pos]->value = o;\n    }\n    return JSON_BREAKING_BAD;\n}\n"
  },
  {
    "path": "PythonAPI/common/gason.h",
    "content": "// https://github.com/vivkin/gason - pulled January 10, 2016\n#pragma once\n\n#include <stdint.h>\n#include <stddef.h>\n#include <assert.h>\n\nenum JsonTag {\n    JSON_NUMBER = 0,\n    JSON_STRING,\n    JSON_ARRAY,\n    JSON_OBJECT,\n    JSON_TRUE,\n    JSON_FALSE,\n    JSON_NULL = 0xF\n};\n\nstruct JsonNode;\n\n#define JSON_VALUE_PAYLOAD_MASK 0x00007FFFFFFFFFFFULL\n#define JSON_VALUE_NAN_MASK 0x7FF8000000000000ULL\n#define JSON_VALUE_TAG_MASK 0xF\n#define JSON_VALUE_TAG_SHIFT 47\n\nunion JsonValue {\n    uint64_t ival;\n    double fval;\n\n    JsonValue(double x)\n        : fval(x) {\n    }\n    JsonValue(JsonTag tag = JSON_NULL, void *payload = nullptr) {\n        assert((uintptr_t)payload <= JSON_VALUE_PAYLOAD_MASK);\n        ival = JSON_VALUE_NAN_MASK | ((uint64_t)tag << JSON_VALUE_TAG_SHIFT) | (uintptr_t)payload;\n    }\n    bool isDouble() const {\n        return (int64_t)ival <= (int64_t)JSON_VALUE_NAN_MASK;\n    }\n    JsonTag getTag() const {\n        return isDouble() ? JSON_NUMBER : JsonTag((ival >> JSON_VALUE_TAG_SHIFT) & JSON_VALUE_TAG_MASK);\n    }\n    uint64_t getPayload() const {\n        assert(!isDouble());\n        return ival & JSON_VALUE_PAYLOAD_MASK;\n    }\n    double toNumber() const {\n        assert(getTag() == JSON_NUMBER);\n        return fval;\n    }\n    char *toString() const {\n        assert(getTag() == JSON_STRING);\n        return (char *)getPayload();\n    }\n    JsonNode *toNode() const {\n        assert(getTag() == JSON_ARRAY || getTag() == JSON_OBJECT);\n        return (JsonNode *)getPayload();\n    }\n};\n\nstruct JsonNode {\n    JsonValue value;\n    JsonNode *next;\n    char *key;\n};\n\nstruct JsonIterator {\n    JsonNode *p;\n\n    void operator++() {\n        p = p->next;\n    }\n    bool operator!=(const JsonIterator &x) const {\n        return p != x.p;\n    }\n    JsonNode *operator*() const {\n        return p;\n    }\n    JsonNode *operator->() const {\n        return p;\n    }\n};\n\ninline JsonIterator begin(JsonValue o) {\n    return JsonIterator{o.toNode()};\n}\ninline JsonIterator end(JsonValue) {\n    return JsonIterator{nullptr};\n}\n\n#define JSON_ERRNO_MAP(XX)                           \\\n    XX(OK, \"ok\")                                     \\\n    XX(BAD_NUMBER, \"bad number\")                     \\\n    XX(BAD_STRING, \"bad string\")                     \\\n    XX(BAD_IDENTIFIER, \"bad identifier\")             \\\n    XX(STACK_OVERFLOW, \"stack overflow\")             \\\n    XX(STACK_UNDERFLOW, \"stack underflow\")           \\\n    XX(MISMATCH_BRACKET, \"mismatch bracket\")         \\\n    XX(UNEXPECTED_CHARACTER, \"unexpected character\") \\\n    XX(UNQUOTED_KEY, \"unquoted key\")                 \\\n    XX(BREAKING_BAD, \"breaking bad\")                 \\\n    XX(ALLOCATION_FAILURE, \"allocation failure\")\n\nenum JsonErrno {\n#define XX(no, str) JSON_##no,\n    JSON_ERRNO_MAP(XX)\n#undef XX\n};\n\nconst char *jsonStrError(int err);\n\nclass JsonAllocator {\n    struct Zone {\n        Zone *next;\n        size_t used;\n    } *head = nullptr;\n\npublic:\n    JsonAllocator() = default;\n    JsonAllocator(const JsonAllocator &) = delete;\n    JsonAllocator &operator=(const JsonAllocator &) = delete;\n    JsonAllocator(JsonAllocator &&x) : head(x.head) {\n        x.head = nullptr;\n    }\n    JsonAllocator &operator=(JsonAllocator &&x) {\n        head = x.head;\n        x.head = nullptr;\n        return *this;\n    }\n    ~JsonAllocator() {\n        deallocate();\n    }\n    void *allocate(size_t size);\n    void deallocate();\n};\n\nint jsonParse(char *str, char **endptr, JsonValue *value, JsonAllocator &allocator);\n"
  },
  {
    "path": "PythonAPI/common/maskApi.c",
    "content": "/**************************************************************************\n* Microsoft COCO Toolbox.      version 2.0\n* Data, paper, and tutorials available at:  http://mscoco.org/\n* Code written by Piotr Dollar and Tsung-Yi Lin, 2015.\n* Licensed under the Simplified BSD License [see coco/license.txt]\n**************************************************************************/\n#include \"maskApi.h\"\n#include <math.h>\n#include <stdlib.h>\n\nuint umin( uint a, uint b ) { return (a<b) ? a : b; }\nuint umax( uint a, uint b ) { return (a>b) ? a : b; }\n\nvoid rleInit( RLE *R, siz h, siz w, siz m, uint *cnts ) {\n  R->h=h; R->w=w; R->m=m; R->cnts=(m==0)?0:malloc(sizeof(uint)*m);\n  siz j; if(cnts) for(j=0; j<m; j++) R->cnts[j]=cnts[j];\n}\n\nvoid rleFree( RLE *R ) {\n  free(R->cnts); R->cnts=0;\n}\n\nvoid rlesInit( RLE **R, siz n ) {\n  siz i; *R = (RLE*) malloc(sizeof(RLE)*n);\n  for(i=0; i<n; i++) rleInit((*R)+i,0,0,0,0);\n}\n\nvoid rlesFree( RLE **R, siz n ) {\n  siz i; for(i=0; i<n; i++) rleFree((*R)+i); free(*R); *R=0;\n}\n\nvoid rleEncode( RLE *R, const byte *M, siz h, siz w, siz n ) {\n  siz i, j, k, a=w*h; uint c, *cnts; byte p;\n  cnts = malloc(sizeof(uint)*(a+1));\n  for(i=0; i<n; i++) {\n    const byte *T=M+a*i; k=0; p=0; c=0;\n    for(j=0; j<a; j++) { if(T[j]!=p) { cnts[k++]=c; c=0; p=T[j]; } c++; }\n    cnts[k++]=c; rleInit(R+i,h,w,k,cnts);\n  }\n  free(cnts);\n}\n\nvoid rleDecode( const RLE *R, byte *M, siz n ) {\n  siz i, j, k; for( i=0; i<n; i++ ) {\n    byte v=0; for( j=0; j<R[i].m; j++ ) {\n      for( k=0; k<R[i].cnts[j]; k++ ) *(M++)=v; v=!v; }}\n}\n\nvoid rleMerge( const RLE *R, RLE *M, siz n, int intersect ) {\n  uint *cnts, c, ca, cb, cc, ct; int v, va, vb, vp;\n  siz i, a, b, h=R[0].h, w=R[0].w, m=R[0].m; RLE A, B;\n  if(n==0) { rleInit(M,0,0,0,0); return; }\n  if(n==1) { rleInit(M,h,w,m,R[0].cnts); return; }\n  cnts = malloc(sizeof(uint)*(h*w+1));\n  for( a=0; a<m; a++ ) cnts[a]=R[0].cnts[a];\n  for( i=1; i<n; i++ ) {\n    B=R[i]; if(B.h!=h||B.w!=w) { h=w=m=0; break; }\n    rleInit(&A,h,w,m,cnts); ca=A.cnts[0]; cb=B.cnts[0];\n    v=va=vb=0; m=0; a=b=1; cc=0; ct=1;\n    while( ct>0 ) {\n      c=umin(ca,cb); cc+=c; ct=0;\n      ca-=c; if(!ca && a<A.m) { ca=A.cnts[a++]; va=!va; } ct+=ca;\n      cb-=c; if(!cb && b<B.m) { cb=B.cnts[b++]; vb=!vb; } ct+=cb;\n      vp=v; if(intersect) v=va&&vb; else v=va||vb;\n      if( v!=vp||ct==0 ) { cnts[m++]=cc; cc=0; }\n    }\n    rleFree(&A);\n  }\n  rleInit(M,h,w,m,cnts); free(cnts);\n}\n\nvoid rleArea( const RLE *R, siz n, uint *a ) {\n  siz i, j; for( i=0; i<n; i++ ) {\n    a[i]=0; for( j=1; j<R[i].m; j+=2 ) a[i]+=R[i].cnts[j]; }\n}\n\nvoid rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o ) {\n  siz g, d; BB db, gb; int crowd;\n  db=malloc(sizeof(double)*m*4); rleToBbox(dt,db,m);\n  gb=malloc(sizeof(double)*n*4); rleToBbox(gt,gb,n);\n  bbIou(db,gb,m,n,iscrowd,o); free(db); free(gb);\n  for( g=0; g<n; g++ ) for( d=0; d<m; d++ ) if(o[g*m+d]>0) {\n    crowd=iscrowd!=NULL && iscrowd[g];\n    if(dt[d].h!=gt[g].h || dt[d].w!=gt[g].w) { o[g*m+d]=-1; continue; }\n    siz ka, kb, a, b; uint c, ca, cb, ct, i, u; int va, vb;\n    ca=dt[d].cnts[0]; ka=dt[d].m; va=vb=0;\n    cb=gt[g].cnts[0]; kb=gt[g].m; a=b=1; i=u=0; ct=1;\n    while( ct>0 ) {\n      c=umin(ca,cb); if(va||vb) { u+=c; if(va&&vb) i+=c; } ct=0;\n      ca-=c; if(!ca && a<ka) { ca=dt[d].cnts[a++]; va=!va; } ct+=ca;\n      cb-=c; if(!cb && b<kb) { cb=gt[g].cnts[b++]; vb=!vb; } ct+=cb;\n    }\n    if(i==0) u=1; else if(crowd) rleArea(dt+d,1,&u);\n    o[g*m+d] = (double)i/(double)u;\n  }\n}\n\nvoid rleNms( RLE *dt, siz n, uint *keep, double thr ) {\n  siz i, j; double u;\n  for( i=0; i<n; i++ ) keep[i]=1;\n  for( i=0; i<n; i++ ) if(keep[i]) {\n    for( j=i+1; j<n; j++ ) if(keep[j]) {\n      rleIou(dt+i,dt+j,1,1,0,&u);\n      if(u>thr) keep[j]=0;\n    }\n  }\n}\n\nvoid bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o ) {\n  double h, w, i, u, ga, da; siz g, d; int crowd;\n  for( g=0; g<n; g++ ) {\n    BB G=gt+g*4; ga=G[2]*G[3]; crowd=iscrowd!=NULL && iscrowd[g];\n    for( d=0; d<m; d++ ) {\n      BB D=dt+d*4; da=D[2]*D[3]; o[g*m+d]=0;\n      w=fmin(D[2]+D[0],G[2]+G[0])-fmax(D[0],G[0]); if(w<=0) continue;\n      h=fmin(D[3]+D[1],G[3]+G[1])-fmax(D[1],G[1]); if(h<=0) continue;\n      i=w*h; u = crowd ? da : da+ga-i; o[g*m+d]=i/u;\n    }\n  }\n}\n\nvoid bbNms( BB dt, siz n, uint *keep, double thr ) {\n  siz i, j; double u;\n  for( i=0; i<n; i++ ) keep[i]=1;\n  for( i=0; i<n; i++ ) if(keep[i]) {\n    for( j=i+1; j<n; j++ ) if(keep[j]) {\n      bbIou(dt+i*4,dt+j*4,1,1,0,&u);\n      if(u>thr) keep[j]=0;\n    }\n  }\n}\n\nvoid rleToBbox( const RLE *R, BB bb, siz n ) {\n  siz i; for( i=0; i<n; i++ ) {\n    uint h, w, x, y, xs, ys, xe, ye, xp, cc, t; siz j, m;\n    h=(uint)R[i].h; w=(uint)R[i].w; m=R[i].m;\n    m=((siz)(m/2))*2; xs=w; ys=h; xe=ye=0; cc=0;\n    if(m==0) { bb[4*i+0]=bb[4*i+1]=bb[4*i+2]=bb[4*i+3]=0; continue; }\n    for( j=0; j<m; j++ ) {\n      cc+=R[i].cnts[j]; t=cc-j%2; y=t%h; x=(t-y)/h;\n      if(j%2==0) xp=x; else if(xp<x) { ys=0; ye=h-1; }\n      xs=umin(xs,x); xe=umax(xe,x); ys=umin(ys,y); ye=umax(ye,y);\n    }\n    bb[4*i+0]=xs; bb[4*i+2]=xe-xs+1;\n    bb[4*i+1]=ys; bb[4*i+3]=ye-ys+1;\n  }\n}\n\nvoid rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n ) {\n  siz i; for( i=0; i<n; i++ ) {\n    double xs=bb[4*i+0], xe=xs+bb[4*i+2];\n    double ys=bb[4*i+1], ye=ys+bb[4*i+3];\n    double xy[8] = {xs,ys,xs,ye,xe,ye,xe,ys};\n    rleFrPoly( R+i, xy, 4, h, w );\n  }\n}\n\nint uintCompare(const void *a, const void *b) {\n  uint c=*((uint*)a), d=*((uint*)b); return c>d?1:c<d?-1:0;\n}\n\nvoid rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w ) {\n  /* upsample and get discrete points densely along entire boundary */\n  siz j, m=0; double scale=5; int *x, *y, *u, *v; uint *a, *b;\n  x=malloc(sizeof(int)*(k+1)); y=malloc(sizeof(int)*(k+1));\n  for(j=0; j<k; j++) x[j]=(int)(scale*xy[j*2+0]+.5); x[k]=x[0];\n  for(j=0; j<k; j++) y[j]=(int)(scale*xy[j*2+1]+.5); y[k]=y[0];\n  for(j=0; j<k; j++) m+=umax(abs(x[j]-x[j+1]),abs(y[j]-y[j+1]))+1;\n  u=malloc(sizeof(int)*m); v=malloc(sizeof(int)*m); m=0;\n  for( j=0; j<k; j++ ) {\n    int xs=x[j], xe=x[j+1], ys=y[j], ye=y[j+1], dx, dy, t, d;\n    int flip; double s; dx=abs(xe-xs); dy=abs(ys-ye);\n    flip = (dx>=dy && xs>xe) || (dx<dy && ys>ye);\n    if(flip) { t=xs; xs=xe; xe=t; t=ys; ys=ye; ye=t; }\n    s = dx>=dy ? (double)(ye-ys)/dx : (double)(xe-xs)/dy;\n    if(dx>=dy) for( d=0; d<=dx; d++ ) {\n      t=flip?dx-d:d; u[m]=t+xs; v[m]=(int)(ys+s*t+.5); m++;\n    } else for( d=0; d<=dy; d++ ) {\n      t=flip?dy-d:d; v[m]=t+ys; u[m]=(int)(xs+s*t+.5); m++;\n    }\n  }\n  /* get points along y-boundary and downsample */\n  free(x); free(y); k=m; m=0; double xd, yd;\n  x=malloc(sizeof(int)*k); y=malloc(sizeof(int)*k);\n  for( j=1; j<k; j++ ) if(u[j]!=u[j-1]) {\n    xd=(double)(u[j]<u[j-1]?u[j]:u[j]-1); xd=(xd+.5)/scale-.5;\n    if( floor(xd)!=xd || xd<0 || xd>w-1 ) continue;\n    yd=(double)(v[j]<v[j-1]?v[j]:v[j-1]); yd=(yd+.5)/scale-.5;\n    if(yd<0) yd=0; else if(yd>h) yd=h; yd=ceil(yd);\n    x[m]=(int) xd; y[m]=(int) yd; m++;\n  }\n  /* compute rle encoding given y-boundary points */\n  k=m; a=malloc(sizeof(uint)*(k+1));\n  for( j=0; j<k; j++ ) a[j]=(uint)(x[j]*(int)(h)+y[j]);\n  a[k++]=(uint)(h*w); free(u); free(v); free(x); free(y);\n  qsort(a,k,sizeof(uint),uintCompare); uint p=0;\n  for( j=0; j<k; j++ ) { uint t=a[j]; a[j]-=p; p=t; }\n  b=malloc(sizeof(uint)*k); j=m=0; b[m++]=a[j++];\n  while(j<k) if(a[j]>0) b[m++]=a[j++]; else {\n    j++; if(j<k) b[m-1]+=a[j++]; }\n  rleInit(R,h,w,m,b); free(a); free(b);\n}\n\nchar* rleToString( const RLE *R ) {\n  /* Similar to LEB128 but using 6 bits/char and ascii chars 48-111. */\n  siz i, m=R->m, p=0; long x; int more;\n  char *s=malloc(sizeof(char)*m*6);\n  for( i=0; i<m; i++ ) {\n    x=(long) R->cnts[i]; if(i>2) x-=(long) R->cnts[i-2]; more=1;\n    while( more ) {\n      char c=x & 0x1f; x >>= 5; more=(c & 0x10) ? x!=-1 : x!=0;\n      if(more) c |= 0x20; c+=48; s[p++]=c;\n    }\n  }\n  s[p]=0; return s;\n}\n\nvoid rleFrString( RLE *R, char *s, siz h, siz w ) {\n  siz m=0, p=0, k; long x; int more; uint *cnts;\n  while( s[m] ) m++; cnts=malloc(sizeof(uint)*m); m=0;\n  while( s[p] ) {\n    x=0; k=0; more=1;\n    while( more ) {\n      char c=s[p]-48; x |= (c & 0x1f) << 5*k;\n      more = c & 0x20; p++; k++;\n      if(!more && (c & 0x10)) x |= -1 << 5*k;\n    }\n    if(m>2) x+=(long) cnts[m-2]; cnts[m++]=(uint) x;\n  }\n  rleInit(R,h,w,m,cnts); free(cnts);\n}\n"
  },
  {
    "path": "PythonAPI/common/maskApi.h",
    "content": "/**************************************************************************\n* Microsoft COCO Toolbox.      version 2.0\n* Data, paper, and tutorials available at:  http://mscoco.org/\n* Code written by Piotr Dollar and Tsung-Yi Lin, 2015.\n* Licensed under the Simplified BSD License [see coco/license.txt]\n**************************************************************************/\n#pragma once\n\ntypedef unsigned int uint;\ntypedef unsigned long siz;\ntypedef unsigned char byte;\ntypedef double* BB;\ntypedef struct { siz h, w, m; uint *cnts; } RLE;\n\n/* Initialize/destroy RLE. */\nvoid rleInit( RLE *R, siz h, siz w, siz m, uint *cnts );\nvoid rleFree( RLE *R );\n\n/* Initialize/destroy RLE array. */\nvoid rlesInit( RLE **R, siz n );\nvoid rlesFree( RLE **R, siz n );\n\n/* Encode binary masks using RLE. */\nvoid rleEncode( RLE *R, const byte *mask, siz h, siz w, siz n );\n\n/* Decode binary masks encoded via RLE. */\nvoid rleDecode( const RLE *R, byte *mask, siz n );\n\n/* Compute union or intersection of encoded masks. */\nvoid rleMerge( const RLE *R, RLE *M, siz n, int intersect );\n\n/* Compute area of encoded masks. */\nvoid rleArea( const RLE *R, siz n, uint *a );\n\n/* Compute intersection over union between masks. */\nvoid rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o );\n\n/* Compute non-maximum suppression between bounding masks */\nvoid rleNms( RLE *dt, siz n, uint *keep, double thr );\n\n/* Compute intersection over union between bounding boxes. */\nvoid bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o );\n\n/* Compute non-maximum suppression between bounding boxes */\nvoid bbNms( BB dt, siz n, uint *keep, double thr );\n\n/* Get bounding boxes surrounding encoded masks. */\nvoid rleToBbox( const RLE *R, BB bb, siz n );\n\n/* Convert bounding boxes to encoded masks. */\nvoid rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n );\n\n/* Convert polygon to encoded mask. */\nvoid rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w );\n\n/* Get compressed string representation of encoded mask. */\nchar* rleToString( const RLE *R );\n\n/* Convert from compressed string representation of encoded mask. */\nvoid rleFrString( RLE *R, char *s, siz h, siz w );\n"
  },
  {
    "path": "PythonAPI/pysobatools/__init__.py",
    "content": "__author__ = 'tylin'\n"
  },
  {
    "path": "PythonAPI/pysobatools/_mask.c",
    "content": "/* Generated by Cython 0.29.13 */\n\n#define PY_SSIZE_T_CLEAN\n#include \"Python.h\"\n#ifndef Py_PYTHON_H\n    #error Python headers needed to compile C extensions, please install development version of Python.\n#elif PY_VERSION_HEX < 0x02060000 || (0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03030000)\n    #error Cython requires Python 2.6+ or Python 3.3+.\n#else\n#define CYTHON_ABI \"0_29_13\"\n#define CYTHON_HEX_VERSION 0x001D0DF0\n#define CYTHON_FUTURE_DIVISION 0\n#include <stddef.h>\n#ifndef offsetof\n  #define offsetof(type, member) ( (size_t) & ((type*)0) -> member )\n#endif\n#if !defined(WIN32) && !defined(MS_WINDOWS)\n  #ifndef __stdcall\n    #define __stdcall\n  #endif\n  #ifndef __cdecl\n    #define __cdecl\n  #endif\n  #ifndef __fastcall\n    #define __fastcall\n  #endif\n#endif\n#ifndef DL_IMPORT\n  #define DL_IMPORT(t) t\n#endif\n#ifndef DL_EXPORT\n  #define DL_EXPORT(t) t\n#endif\n#define __PYX_COMMA ,\n#ifndef HAVE_LONG_LONG\n  #if PY_VERSION_HEX >= 0x02070000\n    #define HAVE_LONG_LONG\n  #endif\n#endif\n#ifndef PY_LONG_LONG\n  #define PY_LONG_LONG LONG_LONG\n#endif\n#ifndef Py_HUGE_VAL\n  #define Py_HUGE_VAL HUGE_VAL\n#endif\n#ifdef PYPY_VERSION\n  #define CYTHON_COMPILING_IN_PYPY 1\n  #define CYTHON_COMPILING_IN_PYSTON 0\n  #define CYTHON_COMPILING_IN_CPYTHON 0\n  #undef CYTHON_USE_TYPE_SLOTS\n  #define CYTHON_USE_TYPE_SLOTS 0\n  #undef CYTHON_USE_PYTYPE_LOOKUP\n  #define CYTHON_USE_PYTYPE_LOOKUP 0\n  #if PY_VERSION_HEX < 0x03050000\n    #undef CYTHON_USE_ASYNC_SLOTS\n    #define CYTHON_USE_ASYNC_SLOTS 0\n  #elif !defined(CYTHON_USE_ASYNC_SLOTS)\n    #define CYTHON_USE_ASYNC_SLOTS 1\n  #endif\n  #undef CYTHON_USE_PYLIST_INTERNALS\n  #define CYTHON_USE_PYLIST_INTERNALS 0\n  #undef CYTHON_USE_UNICODE_INTERNALS\n  #define CYTHON_USE_UNICODE_INTERNALS 0\n  #undef CYTHON_USE_UNICODE_WRITER\n  #define CYTHON_USE_UNICODE_WRITER 0\n  #undef CYTHON_USE_PYLONG_INTERNALS\n  #define CYTHON_USE_PYLONG_INTERNALS 0\n  #undef CYTHON_AVOID_BORROWED_REFS\n  #define CYTHON_AVOID_BORROWED_REFS 1\n  #undef CYTHON_ASSUME_SAFE_MACROS\n  #define CYTHON_ASSUME_SAFE_MACROS 0\n  #undef CYTHON_UNPACK_METHODS\n  #define CYTHON_UNPACK_METHODS 0\n  #undef CYTHON_FAST_THREAD_STATE\n  #define CYTHON_FAST_THREAD_STATE 0\n  #undef CYTHON_FAST_PYCALL\n  #define CYTHON_FAST_PYCALL 0\n  #undef CYTHON_PEP489_MULTI_PHASE_INIT\n  #define CYTHON_PEP489_MULTI_PHASE_INIT 0\n  #undef CYTHON_USE_TP_FINALIZE\n  #define CYTHON_USE_TP_FINALIZE 0\n  #undef CYTHON_USE_DICT_VERSIONS\n  #define CYTHON_USE_DICT_VERSIONS 0\n  #undef CYTHON_USE_EXC_INFO_STACK\n  #define CYTHON_USE_EXC_INFO_STACK 0\n#elif defined(PYSTON_VERSION)\n  #define CYTHON_COMPILING_IN_PYPY 0\n  #define CYTHON_COMPILING_IN_PYSTON 1\n  #define CYTHON_COMPILING_IN_CPYTHON 0\n  #ifndef CYTHON_USE_TYPE_SLOTS\n    #define CYTHON_USE_TYPE_SLOTS 1\n  #endif\n  #undef CYTHON_USE_PYTYPE_LOOKUP\n  #define CYTHON_USE_PYTYPE_LOOKUP 0\n  #undef CYTHON_USE_ASYNC_SLOTS\n  #define CYTHON_USE_ASYNC_SLOTS 0\n  #undef CYTHON_USE_PYLIST_INTERNALS\n  #define CYTHON_USE_PYLIST_INTERNALS 0\n  #ifndef CYTHON_USE_UNICODE_INTERNALS\n    #define CYTHON_USE_UNICODE_INTERNALS 1\n  #endif\n  #undef CYTHON_USE_UNICODE_WRITER\n  #define CYTHON_USE_UNICODE_WRITER 0\n  #undef CYTHON_USE_PYLONG_INTERNALS\n  #define CYTHON_USE_PYLONG_INTERNALS 0\n  #ifndef CYTHON_AVOID_BORROWED_REFS\n    #define CYTHON_AVOID_BORROWED_REFS 0\n  #endif\n  #ifndef CYTHON_ASSUME_SAFE_MACROS\n    #define CYTHON_ASSUME_SAFE_MACROS 1\n  #endif\n  #ifndef CYTHON_UNPACK_METHODS\n    #define CYTHON_UNPACK_METHODS 1\n  #endif\n  #undef CYTHON_FAST_THREAD_STATE\n  #define CYTHON_FAST_THREAD_STATE 0\n  #undef CYTHON_FAST_PYCALL\n  #define CYTHON_FAST_PYCALL 0\n  #undef CYTHON_PEP489_MULTI_PHASE_INIT\n  #define CYTHON_PEP489_MULTI_PHASE_INIT 0\n  #undef CYTHON_USE_TP_FINALIZE\n  #define CYTHON_USE_TP_FINALIZE 0\n  #undef CYTHON_USE_DICT_VERSIONS\n  #define CYTHON_USE_DICT_VERSIONS 0\n  #undef CYTHON_USE_EXC_INFO_STACK\n  #define CYTHON_USE_EXC_INFO_STACK 0\n#else\n  #define CYTHON_COMPILING_IN_PYPY 0\n  #define CYTHON_COMPILING_IN_PYSTON 0\n  #define CYTHON_COMPILING_IN_CPYTHON 1\n  #ifndef CYTHON_USE_TYPE_SLOTS\n    #define CYTHON_USE_TYPE_SLOTS 1\n  #endif\n  #if PY_VERSION_HEX < 0x02070000\n    #undef CYTHON_USE_PYTYPE_LOOKUP\n    #define CYTHON_USE_PYTYPE_LOOKUP 0\n  #elif !defined(CYTHON_USE_PYTYPE_LOOKUP)\n    #define CYTHON_USE_PYTYPE_LOOKUP 1\n  #endif\n  #if PY_MAJOR_VERSION < 3\n    #undef CYTHON_USE_ASYNC_SLOTS\n    #define CYTHON_USE_ASYNC_SLOTS 0\n  #elif !defined(CYTHON_USE_ASYNC_SLOTS)\n    #define CYTHON_USE_ASYNC_SLOTS 1\n  #endif\n  #if PY_VERSION_HEX < 0x02070000\n    #undef CYTHON_USE_PYLONG_INTERNALS\n    #define CYTHON_USE_PYLONG_INTERNALS 0\n  #elif !defined(CYTHON_USE_PYLONG_INTERNALS)\n    #define CYTHON_USE_PYLONG_INTERNALS 1\n  #endif\n  #ifndef CYTHON_USE_PYLIST_INTERNALS\n    #define CYTHON_USE_PYLIST_INTERNALS 1\n  #endif\n  #ifndef CYTHON_USE_UNICODE_INTERNALS\n    #define CYTHON_USE_UNICODE_INTERNALS 1\n  #endif\n  #if PY_VERSION_HEX < 0x030300F0\n    #undef CYTHON_USE_UNICODE_WRITER\n    #define CYTHON_USE_UNICODE_WRITER 0\n  #elif !defined(CYTHON_USE_UNICODE_WRITER)\n    #define CYTHON_USE_UNICODE_WRITER 1\n  #endif\n  #ifndef CYTHON_AVOID_BORROWED_REFS\n    #define CYTHON_AVOID_BORROWED_REFS 0\n  #endif\n  #ifndef CYTHON_ASSUME_SAFE_MACROS\n    #define CYTHON_ASSUME_SAFE_MACROS 1\n  #endif\n  #ifndef CYTHON_UNPACK_METHODS\n    #define CYTHON_UNPACK_METHODS 1\n  #endif\n  #ifndef CYTHON_FAST_THREAD_STATE\n    #define CYTHON_FAST_THREAD_STATE 1\n  #endif\n  #ifndef CYTHON_FAST_PYCALL\n    #define CYTHON_FAST_PYCALL 1\n  #endif\n  #ifndef CYTHON_PEP489_MULTI_PHASE_INIT\n    #define CYTHON_PEP489_MULTI_PHASE_INIT (PY_VERSION_HEX >= 0x03050000)\n  #endif\n  #ifndef CYTHON_USE_TP_FINALIZE\n    #define CYTHON_USE_TP_FINALIZE (PY_VERSION_HEX >= 0x030400a1)\n  #endif\n  #ifndef CYTHON_USE_DICT_VERSIONS\n    #define CYTHON_USE_DICT_VERSIONS (PY_VERSION_HEX >= 0x030600B1)\n  #endif\n  #ifndef CYTHON_USE_EXC_INFO_STACK\n    #define CYTHON_USE_EXC_INFO_STACK (PY_VERSION_HEX >= 0x030700A3)\n  #endif\n#endif\n#if !defined(CYTHON_FAST_PYCCALL)\n#define CYTHON_FAST_PYCCALL  (CYTHON_FAST_PYCALL && PY_VERSION_HEX >= 0x030600B1)\n#endif\n#if CYTHON_USE_PYLONG_INTERNALS\n  #include \"longintrepr.h\"\n  #undef SHIFT\n  #undef BASE\n  #undef MASK\n  #ifdef SIZEOF_VOID_P\n    enum { __pyx_check_sizeof_voidp = 1 / (int)(SIZEOF_VOID_P == sizeof(void*)) };\n  #endif\n#endif\n#ifndef __has_attribute\n  #define __has_attribute(x) 0\n#endif\n#ifndef __has_cpp_attribute\n  #define __has_cpp_attribute(x) 0\n#endif\n#ifndef CYTHON_RESTRICT\n  #if defined(__GNUC__)\n    #define CYTHON_RESTRICT __restrict__\n  #elif defined(_MSC_VER) && _MSC_VER >= 1400\n    #define CYTHON_RESTRICT __restrict\n  #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L\n    #define CYTHON_RESTRICT restrict\n  #else\n    #define CYTHON_RESTRICT\n  #endif\n#endif\n#ifndef CYTHON_UNUSED\n# if defined(__GNUC__)\n#   if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))\n#     define CYTHON_UNUSED __attribute__ ((__unused__))\n#   else\n#     define CYTHON_UNUSED\n#   endif\n# elif defined(__ICC) || (defined(__INTEL_COMPILER) && !defined(_MSC_VER))\n#   define CYTHON_UNUSED __attribute__ ((__unused__))\n# else\n#   define CYTHON_UNUSED\n# endif\n#endif\n#ifndef CYTHON_MAYBE_UNUSED_VAR\n#  if defined(__cplusplus)\n     template<class T> void CYTHON_MAYBE_UNUSED_VAR( const T& ) { }\n#  else\n#    define CYTHON_MAYBE_UNUSED_VAR(x) (void)(x)\n#  endif\n#endif\n#ifndef CYTHON_NCP_UNUSED\n# if CYTHON_COMPILING_IN_CPYTHON\n#  define CYTHON_NCP_UNUSED\n# else\n#  define CYTHON_NCP_UNUSED CYTHON_UNUSED\n# endif\n#endif\n#define __Pyx_void_to_None(void_result) ((void)(void_result), Py_INCREF(Py_None), Py_None)\n#ifdef _MSC_VER\n    #ifndef _MSC_STDINT_H_\n        #if _MSC_VER < 1300\n           typedef unsigned char     uint8_t;\n           typedef unsigned int      uint32_t;\n        #else\n           typedef unsigned __int8   uint8_t;\n           typedef unsigned __int32  uint32_t;\n        #endif\n    #endif\n#else\n   #include <stdint.h>\n#endif\n#ifndef CYTHON_FALLTHROUGH\n  #if defined(__cplusplus) && __cplusplus >= 201103L\n    #if __has_cpp_attribute(fallthrough)\n      #define CYTHON_FALLTHROUGH [[fallthrough]]\n    #elif __has_cpp_attribute(clang::fallthrough)\n      #define CYTHON_FALLTHROUGH [[clang::fallthrough]]\n    #elif __has_cpp_attribute(gnu::fallthrough)\n      #define CYTHON_FALLTHROUGH [[gnu::fallthrough]]\n    #endif\n  #endif\n  #ifndef CYTHON_FALLTHROUGH\n    #if __has_attribute(fallthrough)\n      #define CYTHON_FALLTHROUGH __attribute__((fallthrough))\n    #else\n      #define CYTHON_FALLTHROUGH\n    #endif\n  #endif\n  #if defined(__clang__ ) && defined(__apple_build_version__)\n    #if __apple_build_version__ < 7000000\n      #undef  CYTHON_FALLTHROUGH\n      #define CYTHON_FALLTHROUGH\n    #endif\n  #endif\n#endif\n\n#ifndef CYTHON_INLINE\n  #if defined(__clang__)\n    #define CYTHON_INLINE __inline__ __attribute__ ((__unused__))\n  #elif defined(__GNUC__)\n    #define CYTHON_INLINE __inline__\n  #elif defined(_MSC_VER)\n    #define CYTHON_INLINE __inline\n  #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L\n    #define CYTHON_INLINE inline\n  #else\n    #define CYTHON_INLINE\n  #endif\n#endif\n\n#if CYTHON_COMPILING_IN_PYPY && PY_VERSION_HEX < 0x02070600 && !defined(Py_OptimizeFlag)\n  #define Py_OptimizeFlag 0\n#endif\n#define __PYX_BUILD_PY_SSIZE_T \"n\"\n#define CYTHON_FORMAT_SSIZE_T \"z\"\n#if PY_MAJOR_VERSION < 3\n  #define __Pyx_BUILTIN_MODULE_NAME \"__builtin__\"\n  #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\\\n          PyCode_New(a+k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\n  #define __Pyx_DefaultClassType PyClass_Type\n#else\n  #define __Pyx_BUILTIN_MODULE_NAME \"builtins\"\n#if PY_VERSION_HEX >= 0x030800A4 && PY_VERSION_HEX < 0x030800B2\n  #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\\\n          PyCode_New(a, 0, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\n#else\n  #define __Pyx_PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\\\n          PyCode_New(a, k, l, s, f, code, c, n, v, fv, cell, fn, name, fline, lnos)\n#endif\n  #define __Pyx_DefaultClassType PyType_Type\n#endif\n#ifndef Py_TPFLAGS_CHECKTYPES\n  #define Py_TPFLAGS_CHECKTYPES 0\n#endif\n#ifndef Py_TPFLAGS_HAVE_INDEX\n  #define Py_TPFLAGS_HAVE_INDEX 0\n#endif\n#ifndef Py_TPFLAGS_HAVE_NEWBUFFER\n  #define Py_TPFLAGS_HAVE_NEWBUFFER 0\n#endif\n#ifndef Py_TPFLAGS_HAVE_FINALIZE\n  #define Py_TPFLAGS_HAVE_FINALIZE 0\n#endif\n#ifndef METH_STACKLESS\n  #define METH_STACKLESS 0\n#endif\n#if PY_VERSION_HEX <= 0x030700A3 || !defined(METH_FASTCALL)\n  #ifndef METH_FASTCALL\n     #define METH_FASTCALL 0x80\n  #endif\n  typedef PyObject *(*__Pyx_PyCFunctionFast) (PyObject *self, PyObject *const *args, Py_ssize_t nargs);\n  typedef PyObject *(*__Pyx_PyCFunctionFastWithKeywords) (PyObject *self, PyObject *const *args,\n                                                          Py_ssize_t nargs, PyObject *kwnames);\n#else\n  #define __Pyx_PyCFunctionFast _PyCFunctionFast\n  #define __Pyx_PyCFunctionFastWithKeywords _PyCFunctionFastWithKeywords\n#endif\n#if CYTHON_FAST_PYCCALL\n#define __Pyx_PyFastCFunction_Check(func)\\\n    ((PyCFunction_Check(func) && (METH_FASTCALL == (PyCFunction_GET_FLAGS(func) & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)))))\n#else\n#define __Pyx_PyFastCFunction_Check(func) 0\n#endif\n#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Malloc)\n  #define PyObject_Malloc(s)   PyMem_Malloc(s)\n  #define PyObject_Free(p)     PyMem_Free(p)\n  #define PyObject_Realloc(p)  PyMem_Realloc(p)\n#endif\n#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX < 0x030400A1\n  #define PyMem_RawMalloc(n)           PyMem_Malloc(n)\n  #define PyMem_RawRealloc(p, n)       PyMem_Realloc(p, n)\n  #define PyMem_RawFree(p)             PyMem_Free(p)\n#endif\n#if CYTHON_COMPILING_IN_PYSTON\n  #define __Pyx_PyCode_HasFreeVars(co)  PyCode_HasFreeVars(co)\n  #define __Pyx_PyFrame_SetLineNumber(frame, lineno) PyFrame_SetLineNumber(frame, lineno)\n#else\n  #define __Pyx_PyCode_HasFreeVars(co)  (PyCode_GetNumFree(co) > 0)\n  #define __Pyx_PyFrame_SetLineNumber(frame, lineno)  (frame)->f_lineno = (lineno)\n#endif\n#if !CYTHON_FAST_THREAD_STATE || PY_VERSION_HEX < 0x02070000\n  #define __Pyx_PyThreadState_Current PyThreadState_GET()\n#elif PY_VERSION_HEX >= 0x03060000\n  #define __Pyx_PyThreadState_Current _PyThreadState_UncheckedGet()\n#elif PY_VERSION_HEX >= 0x03000000\n  #define __Pyx_PyThreadState_Current PyThreadState_GET()\n#else\n  #define __Pyx_PyThreadState_Current _PyThreadState_Current\n#endif\n#if PY_VERSION_HEX < 0x030700A2 && !defined(PyThread_tss_create) && !defined(Py_tss_NEEDS_INIT)\n#include \"pythread.h\"\n#define Py_tss_NEEDS_INIT 0\ntypedef int Py_tss_t;\nstatic CYTHON_INLINE int PyThread_tss_create(Py_tss_t *key) {\n  *key = PyThread_create_key();\n  return 0;\n}\nstatic CYTHON_INLINE Py_tss_t * PyThread_tss_alloc(void) {\n  Py_tss_t *key = (Py_tss_t *)PyObject_Malloc(sizeof(Py_tss_t));\n  *key = Py_tss_NEEDS_INIT;\n  return key;\n}\nstatic CYTHON_INLINE void PyThread_tss_free(Py_tss_t *key) {\n  PyObject_Free(key);\n}\nstatic CYTHON_INLINE int PyThread_tss_is_created(Py_tss_t *key) {\n  return *key != Py_tss_NEEDS_INIT;\n}\nstatic CYTHON_INLINE void PyThread_tss_delete(Py_tss_t *key) {\n  PyThread_delete_key(*key);\n  *key = Py_tss_NEEDS_INIT;\n}\nstatic CYTHON_INLINE int PyThread_tss_set(Py_tss_t *key, void *value) {\n  return PyThread_set_key_value(*key, value);\n}\nstatic CYTHON_INLINE void * PyThread_tss_get(Py_tss_t *key) {\n  return PyThread_get_key_value(*key);\n}\n#endif\n#if CYTHON_COMPILING_IN_CPYTHON || defined(_PyDict_NewPresized)\n#define __Pyx_PyDict_NewPresized(n)  ((n <= 8) ? PyDict_New() : _PyDict_NewPresized(n))\n#else\n#define __Pyx_PyDict_NewPresized(n)  PyDict_New()\n#endif\n#if PY_MAJOR_VERSION >= 3 || CYTHON_FUTURE_DIVISION\n  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_TrueDivide(x,y)\n  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceTrueDivide(x,y)\n#else\n  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_Divide(x,y)\n  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceDivide(x,y)\n#endif\n#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1 && CYTHON_USE_UNICODE_INTERNALS\n#define __Pyx_PyDict_GetItemStr(dict, name)  _PyDict_GetItem_KnownHash(dict, name, ((PyASCIIObject *) name)->hash)\n#else\n#define __Pyx_PyDict_GetItemStr(dict, name)  PyDict_GetItem(dict, name)\n#endif\n#if PY_VERSION_HEX > 0x03030000 && defined(PyUnicode_KIND)\n  #define CYTHON_PEP393_ENABLED 1\n  #define __Pyx_PyUnicode_READY(op)       (likely(PyUnicode_IS_READY(op)) ?\\\n                                              0 : _PyUnicode_Ready((PyObject *)(op)))\n  #define __Pyx_PyUnicode_GET_LENGTH(u)   PyUnicode_GET_LENGTH(u)\n  #define __Pyx_PyUnicode_READ_CHAR(u, i) PyUnicode_READ_CHAR(u, i)\n  #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u)   PyUnicode_MAX_CHAR_VALUE(u)\n  #define __Pyx_PyUnicode_KIND(u)         PyUnicode_KIND(u)\n  #define __Pyx_PyUnicode_DATA(u)         PyUnicode_DATA(u)\n  #define __Pyx_PyUnicode_READ(k, d, i)   PyUnicode_READ(k, d, i)\n  #define __Pyx_PyUnicode_WRITE(k, d, i, ch)  PyUnicode_WRITE(k, d, i, ch)\n  #define __Pyx_PyUnicode_IS_TRUE(u)      (0 != (likely(PyUnicode_IS_READY(u)) ? PyUnicode_GET_LENGTH(u) : PyUnicode_GET_SIZE(u)))\n#else\n  #define CYTHON_PEP393_ENABLED 0\n  #define PyUnicode_1BYTE_KIND  1\n  #define PyUnicode_2BYTE_KIND  2\n  #define PyUnicode_4BYTE_KIND  4\n  #define __Pyx_PyUnicode_READY(op)       (0)\n  #define __Pyx_PyUnicode_GET_LENGTH(u)   PyUnicode_GET_SIZE(u)\n  #define __Pyx_PyUnicode_READ_CHAR(u, i) ((Py_UCS4)(PyUnicode_AS_UNICODE(u)[i]))\n  #define __Pyx_PyUnicode_MAX_CHAR_VALUE(u)   ((sizeof(Py_UNICODE) == 2) ? 65535 : 1114111)\n  #define __Pyx_PyUnicode_KIND(u)         (sizeof(Py_UNICODE))\n  #define __Pyx_PyUnicode_DATA(u)         ((void*)PyUnicode_AS_UNICODE(u))\n  #define __Pyx_PyUnicode_READ(k, d, i)   ((void)(k), (Py_UCS4)(((Py_UNICODE*)d)[i]))\n  #define __Pyx_PyUnicode_WRITE(k, d, i, ch)  (((void)(k)), ((Py_UNICODE*)d)[i] = ch)\n  #define __Pyx_PyUnicode_IS_TRUE(u)      (0 != PyUnicode_GET_SIZE(u))\n#endif\n#if CYTHON_COMPILING_IN_PYPY\n  #define __Pyx_PyUnicode_Concat(a, b)      PyNumber_Add(a, b)\n  #define __Pyx_PyUnicode_ConcatSafe(a, b)  PyNumber_Add(a, b)\n#else\n  #define __Pyx_PyUnicode_Concat(a, b)      PyUnicode_Concat(a, b)\n  #define __Pyx_PyUnicode_ConcatSafe(a, b)  ((unlikely((a) == Py_None) || unlikely((b) == Py_None)) ?\\\n      PyNumber_Add(a, b) : __Pyx_PyUnicode_Concat(a, b))\n#endif\n#if CYTHON_COMPILING_IN_PYPY && !defined(PyUnicode_Contains)\n  #define PyUnicode_Contains(u, s)  PySequence_Contains(u, s)\n#endif\n#if CYTHON_COMPILING_IN_PYPY && !defined(PyByteArray_Check)\n  #define PyByteArray_Check(obj)  PyObject_TypeCheck(obj, &PyByteArray_Type)\n#endif\n#if CYTHON_COMPILING_IN_PYPY && !defined(PyObject_Format)\n  #define PyObject_Format(obj, fmt)  PyObject_CallMethod(obj, \"__format__\", \"O\", fmt)\n#endif\n#define __Pyx_PyString_FormatSafe(a, b)   ((unlikely((a) == Py_None || (PyString_Check(b) && !PyString_CheckExact(b)))) ? PyNumber_Remainder(a, b) : __Pyx_PyString_Format(a, b))\n#define __Pyx_PyUnicode_FormatSafe(a, b)  ((unlikely((a) == Py_None || (PyUnicode_Check(b) && !PyUnicode_CheckExact(b)))) ? PyNumber_Remainder(a, b) : PyUnicode_Format(a, b))\n#if PY_MAJOR_VERSION >= 3\n  #define __Pyx_PyString_Format(a, b)  PyUnicode_Format(a, b)\n#else\n  #define __Pyx_PyString_Format(a, b)  PyString_Format(a, b)\n#endif\n#if PY_MAJOR_VERSION < 3 && !defined(PyObject_ASCII)\n  #define PyObject_ASCII(o)            PyObject_Repr(o)\n#endif\n#if PY_MAJOR_VERSION >= 3\n  #define PyBaseString_Type            PyUnicode_Type\n  #define PyStringObject               PyUnicodeObject\n  #define PyString_Type                PyUnicode_Type\n  #define PyString_Check               PyUnicode_Check\n  #define PyString_CheckExact          PyUnicode_CheckExact\n  #define PyObject_Unicode             PyObject_Str\n#endif\n#if PY_MAJOR_VERSION >= 3\n  #define __Pyx_PyBaseString_Check(obj) PyUnicode_Check(obj)\n  #define __Pyx_PyBaseString_CheckExact(obj) PyUnicode_CheckExact(obj)\n#else\n  #define __Pyx_PyBaseString_Check(obj) (PyString_Check(obj) || PyUnicode_Check(obj))\n  #define __Pyx_PyBaseString_CheckExact(obj) (PyString_CheckExact(obj) || PyUnicode_CheckExact(obj))\n#endif\n#ifndef PySet_CheckExact\n  #define PySet_CheckExact(obj)        (Py_TYPE(obj) == &PySet_Type)\n#endif\n#if CYTHON_ASSUME_SAFE_MACROS\n  #define __Pyx_PySequence_SIZE(seq)  Py_SIZE(seq)\n#else\n  #define __Pyx_PySequence_SIZE(seq)  PySequence_Size(seq)\n#endif\n#if PY_MAJOR_VERSION >= 3\n  #define PyIntObject                  PyLongObject\n  #define PyInt_Type                   PyLong_Type\n  #define PyInt_Check(op)              PyLong_Check(op)\n  #define PyInt_CheckExact(op)         PyLong_CheckExact(op)\n  #define PyInt_FromString             PyLong_FromString\n  #define PyInt_FromUnicode            PyLong_FromUnicode\n  #define PyInt_FromLong               PyLong_FromLong\n  #define PyInt_FromSize_t             PyLong_FromSize_t\n  #define PyInt_FromSsize_t            PyLong_FromSsize_t\n  #define PyInt_AsLong                 PyLong_AsLong\n  #define PyInt_AS_LONG                PyLong_AS_LONG\n  #define PyInt_AsSsize_t              PyLong_AsSsize_t\n  #define PyInt_AsUnsignedLongMask     PyLong_AsUnsignedLongMask\n  #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask\n  #define PyNumber_Int                 PyNumber_Long\n#endif\n#if PY_MAJOR_VERSION >= 3\n  #define PyBoolObject                 PyLongObject\n#endif\n#if PY_MAJOR_VERSION >= 3 && CYTHON_COMPILING_IN_PYPY\n  #ifndef PyUnicode_InternFromString\n    #define PyUnicode_InternFromString(s) PyUnicode_FromString(s)\n  #endif\n#endif\n#if PY_VERSION_HEX < 0x030200A4\n  typedef long Py_hash_t;\n  #define __Pyx_PyInt_FromHash_t PyInt_FromLong\n  #define __Pyx_PyInt_AsHash_t   PyInt_AsLong\n#else\n  #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t\n  #define __Pyx_PyInt_AsHash_t   PyInt_AsSsize_t\n#endif\n#if PY_MAJOR_VERSION >= 3\n  #define __Pyx_PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : (Py_INCREF(func), func))\n#else\n  #define __Pyx_PyMethod_New(func, self, klass) PyMethod_New(func, self, klass)\n#endif\n#if CYTHON_USE_ASYNC_SLOTS\n  #if PY_VERSION_HEX >= 0x030500B1\n    #define __Pyx_PyAsyncMethodsStruct PyAsyncMethods\n    #define __Pyx_PyType_AsAsync(obj) (Py_TYPE(obj)->tp_as_async)\n  #else\n    #define __Pyx_PyType_AsAsync(obj) ((__Pyx_PyAsyncMethodsStruct*) (Py_TYPE(obj)->tp_reserved))\n  #endif\n#else\n  #define __Pyx_PyType_AsAsync(obj) NULL\n#endif\n#ifndef __Pyx_PyAsyncMethodsStruct\n    typedef struct {\n        unaryfunc am_await;\n        unaryfunc am_aiter;\n        unaryfunc am_anext;\n    } __Pyx_PyAsyncMethodsStruct;\n#endif\n\n#if defined(WIN32) || defined(MS_WINDOWS)\n  #define _USE_MATH_DEFINES\n#endif\n#include <math.h>\n#ifdef NAN\n#define __PYX_NAN() ((float) NAN)\n#else\nstatic CYTHON_INLINE float __PYX_NAN() {\n  float value;\n  memset(&value, 0xFF, sizeof(value));\n  return value;\n}\n#endif\n#if defined(__CYGWIN__) && defined(_LDBL_EQ_DBL)\n#define __Pyx_truncl trunc\n#else\n#define __Pyx_truncl truncl\n#endif\n\n\n#define __PYX_ERR(f_index, lineno, Ln_error) \\\n{ \\\n  __pyx_filename = __pyx_f[f_index]; __pyx_lineno = lineno; __pyx_clineno = __LINE__; goto Ln_error; \\\n}\n\n#ifndef __PYX_EXTERN_C\n  #ifdef __cplusplus\n    #define __PYX_EXTERN_C extern \"C\"\n  #else\n    #define __PYX_EXTERN_C extern\n  #endif\n#endif\n\n#define __PYX_HAVE__pysobatools___mask\n#define __PYX_HAVE_API__pysobatools___mask\n/* Early includes */\n#include <string.h>\n#include <stdio.h>\n#include \"numpy/arrayobject.h\"\n#include \"numpy/ufuncobject.h\"\n#include <stdlib.h>\n#include \"maskApi.h\"\n#ifdef _OPENMP\n#include <omp.h>\n#endif /* _OPENMP */\n\n#if defined(PYREX_WITHOUT_ASSERTIONS) && !defined(CYTHON_WITHOUT_ASSERTIONS)\n#define CYTHON_WITHOUT_ASSERTIONS\n#endif\n\ntypedef struct {PyObject **p; const char *s; const Py_ssize_t n; const char* encoding;\n                const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry;\n\n#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII 0\n#define __PYX_DEFAULT_STRING_ENCODING_IS_UTF8 0\n#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT (PY_MAJOR_VERSION >= 3 && __PYX_DEFAULT_STRING_ENCODING_IS_UTF8)\n#define __PYX_DEFAULT_STRING_ENCODING \"\"\n#define __Pyx_PyObject_FromString __Pyx_PyBytes_FromString\n#define __Pyx_PyObject_FromStringAndSize __Pyx_PyBytes_FromStringAndSize\n#define __Pyx_uchar_cast(c) ((unsigned char)c)\n#define __Pyx_long_cast(x) ((long)x)\n#define __Pyx_fits_Py_ssize_t(v, type, is_signed)  (\\\n    (sizeof(type) < sizeof(Py_ssize_t))  ||\\\n    (sizeof(type) > sizeof(Py_ssize_t) &&\\\n          likely(v < (type)PY_SSIZE_T_MAX ||\\\n                 v == (type)PY_SSIZE_T_MAX)  &&\\\n          (!is_signed || likely(v > (type)PY_SSIZE_T_MIN ||\\\n                                v == (type)PY_SSIZE_T_MIN)))  ||\\\n    (sizeof(type) == sizeof(Py_ssize_t) &&\\\n          (is_signed || likely(v < (type)PY_SSIZE_T_MAX ||\\\n                               v == (type)PY_SSIZE_T_MAX)))  )\nstatic CYTHON_INLINE int __Pyx_is_valid_index(Py_ssize_t i, Py_ssize_t limit) {\n    return (size_t) i < (size_t) limit;\n}\n#if defined (__cplusplus) && __cplusplus >= 201103L\n    #include <cstdlib>\n    #define __Pyx_sst_abs(value) std::abs(value)\n#elif SIZEOF_INT >= SIZEOF_SIZE_T\n    #define __Pyx_sst_abs(value) abs(value)\n#elif SIZEOF_LONG >= SIZEOF_SIZE_T\n    #define __Pyx_sst_abs(value) labs(value)\n#elif defined (_MSC_VER)\n    #define __Pyx_sst_abs(value) ((Py_ssize_t)_abs64(value))\n#elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L\n    #define __Pyx_sst_abs(value) llabs(value)\n#elif defined (__GNUC__)\n    #define __Pyx_sst_abs(value) __builtin_llabs(value)\n#else\n    #define __Pyx_sst_abs(value) ((value<0) ? -value : value)\n#endif\nstatic CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject*);\nstatic CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject*, Py_ssize_t* length);\n#define __Pyx_PyByteArray_FromString(s) PyByteArray_FromStringAndSize((const char*)s, strlen((const char*)s))\n#define __Pyx_PyByteArray_FromStringAndSize(s, l) PyByteArray_FromStringAndSize((const char*)s, l)\n#define __Pyx_PyBytes_FromString        PyBytes_FromString\n#define __Pyx_PyBytes_FromStringAndSize PyBytes_FromStringAndSize\nstatic CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char*);\n#if PY_MAJOR_VERSION < 3\n    #define __Pyx_PyStr_FromString        __Pyx_PyBytes_FromString\n    #define __Pyx_PyStr_FromStringAndSize __Pyx_PyBytes_FromStringAndSize\n#else\n    #define __Pyx_PyStr_FromString        __Pyx_PyUnicode_FromString\n    #define __Pyx_PyStr_FromStringAndSize __Pyx_PyUnicode_FromStringAndSize\n#endif\n#define __Pyx_PyBytes_AsWritableString(s)     ((char*) PyBytes_AS_STRING(s))\n#define __Pyx_PyBytes_AsWritableSString(s)    ((signed char*) PyBytes_AS_STRING(s))\n#define __Pyx_PyBytes_AsWritableUString(s)    ((unsigned char*) PyBytes_AS_STRING(s))\n#define __Pyx_PyBytes_AsString(s)     ((const char*) PyBytes_AS_STRING(s))\n#define __Pyx_PyBytes_AsSString(s)    ((const signed char*) PyBytes_AS_STRING(s))\n#define __Pyx_PyBytes_AsUString(s)    ((const unsigned char*) PyBytes_AS_STRING(s))\n#define __Pyx_PyObject_AsWritableString(s)    ((char*) __Pyx_PyObject_AsString(s))\n#define __Pyx_PyObject_AsWritableSString(s)    ((signed char*) __Pyx_PyObject_AsString(s))\n#define __Pyx_PyObject_AsWritableUString(s)    ((unsigned char*) __Pyx_PyObject_AsString(s))\n#define __Pyx_PyObject_AsSString(s)    ((const signed char*) __Pyx_PyObject_AsString(s))\n#define __Pyx_PyObject_AsUString(s)    ((const unsigned char*) __Pyx_PyObject_AsString(s))\n#define __Pyx_PyObject_FromCString(s)  __Pyx_PyObject_FromString((const char*)s)\n#define __Pyx_PyBytes_FromCString(s)   __Pyx_PyBytes_FromString((const char*)s)\n#define __Pyx_PyByteArray_FromCString(s)   __Pyx_PyByteArray_FromString((const char*)s)\n#define __Pyx_PyStr_FromCString(s)     __Pyx_PyStr_FromString((const char*)s)\n#define __Pyx_PyUnicode_FromCString(s) __Pyx_PyUnicode_FromString((const char*)s)\nstatic CYTHON_INLINE size_t __Pyx_Py_UNICODE_strlen(const Py_UNICODE *u) {\n    const Py_UNICODE *u_end = u;\n    while (*u_end++) ;\n    return (size_t)(u_end - u - 1);\n}\n#define __Pyx_PyUnicode_FromUnicode(u)       PyUnicode_FromUnicode(u, __Pyx_Py_UNICODE_strlen(u))\n#define __Pyx_PyUnicode_FromUnicodeAndLength PyUnicode_FromUnicode\n#define __Pyx_PyUnicode_AsUnicode            PyUnicode_AsUnicode\n#define __Pyx_NewRef(obj) (Py_INCREF(obj), obj)\n#define __Pyx_Owned_Py_None(b) __Pyx_NewRef(Py_None)\nstatic CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b);\nstatic CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);\nstatic CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject*);\nstatic CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x);\n#define __Pyx_PySequence_Tuple(obj)\\\n    (likely(PyTuple_CheckExact(obj)) ? __Pyx_NewRef(obj) : PySequence_Tuple(obj))\nstatic CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);\nstatic CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);\n#if CYTHON_ASSUME_SAFE_MACROS\n#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))\n#else\n#define __pyx_PyFloat_AsDouble(x) PyFloat_AsDouble(x)\n#endif\n#define __pyx_PyFloat_AsFloat(x) ((float) __pyx_PyFloat_AsDouble(x))\n#if PY_MAJOR_VERSION >= 3\n#define __Pyx_PyNumber_Int(x) (PyLong_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Long(x))\n#else\n#define __Pyx_PyNumber_Int(x) (PyInt_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Int(x))\n#endif\n#define __Pyx_PyNumber_Float(x) (PyFloat_CheckExact(x) ? __Pyx_NewRef(x) : PyNumber_Float(x))\n#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII\nstatic int __Pyx_sys_getdefaultencoding_not_ascii;\nstatic int __Pyx_init_sys_getdefaultencoding_params(void) {\n    PyObject* sys;\n    PyObject* default_encoding = NULL;\n    PyObject* ascii_chars_u = NULL;\n    PyObject* ascii_chars_b = NULL;\n    const char* default_encoding_c;\n    sys = PyImport_ImportModule(\"sys\");\n    if (!sys) goto bad;\n    default_encoding = PyObject_CallMethod(sys, (char*) \"getdefaultencoding\", NULL);\n    Py_DECREF(sys);\n    if (!default_encoding) goto bad;\n    default_encoding_c = PyBytes_AsString(default_encoding);\n    if (!default_encoding_c) goto bad;\n    if (strcmp(default_encoding_c, \"ascii\") == 0) {\n        __Pyx_sys_getdefaultencoding_not_ascii = 0;\n    } else {\n        char ascii_chars[128];\n        int c;\n        for (c = 0; c < 128; c++) {\n            ascii_chars[c] = c;\n        }\n        __Pyx_sys_getdefaultencoding_not_ascii = 1;\n        ascii_chars_u = PyUnicode_DecodeASCII(ascii_chars, 128, NULL);\n        if (!ascii_chars_u) goto bad;\n        ascii_chars_b = PyUnicode_AsEncodedString(ascii_chars_u, default_encoding_c, NULL);\n        if (!ascii_chars_b || !PyBytes_Check(ascii_chars_b) || memcmp(ascii_chars, PyBytes_AS_STRING(ascii_chars_b), 128) != 0) {\n            PyErr_Format(\n                PyExc_ValueError,\n                \"This module compiled with c_string_encoding=ascii, but default encoding '%.200s' is not a superset of ascii.\",\n                default_encoding_c);\n            goto bad;\n        }\n        Py_DECREF(ascii_chars_u);\n        Py_DECREF(ascii_chars_b);\n    }\n    Py_DECREF(default_encoding);\n    return 0;\nbad:\n    Py_XDECREF(default_encoding);\n    Py_XDECREF(ascii_chars_u);\n    Py_XDECREF(ascii_chars_b);\n    return -1;\n}\n#endif\n#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT && PY_MAJOR_VERSION >= 3\n#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_DecodeUTF8(c_str, size, NULL)\n#else\n#define __Pyx_PyUnicode_FromStringAndSize(c_str, size) PyUnicode_Decode(c_str, size, __PYX_DEFAULT_STRING_ENCODING, NULL)\n#if __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT\nstatic char* __PYX_DEFAULT_STRING_ENCODING;\nstatic int __Pyx_init_sys_getdefaultencoding_params(void) {\n    PyObject* sys;\n    PyObject* default_encoding = NULL;\n    char* default_encoding_c;\n    sys = PyImport_ImportModule(\"sys\");\n    if (!sys) goto bad;\n    default_encoding = PyObject_CallMethod(sys, (char*) (const char*) \"getdefaultencoding\", NULL);\n    Py_DECREF(sys);\n    if (!default_encoding) goto bad;\n    default_encoding_c = PyBytes_AsString(default_encoding);\n    if (!default_encoding_c) goto bad;\n    __PYX_DEFAULT_STRING_ENCODING = (char*) malloc(strlen(default_encoding_c) + 1);\n    if (!__PYX_DEFAULT_STRING_ENCODING) goto bad;\n    strcpy(__PYX_DEFAULT_STRING_ENCODING, default_encoding_c);\n    Py_DECREF(default_encoding);\n    return 0;\nbad:\n    Py_XDECREF(default_encoding);\n    return -1;\n}\n#endif\n#endif\n\n\n/* Test for GCC > 2.95 */\n#if defined(__GNUC__)     && (__GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)))\n  #define likely(x)   __builtin_expect(!!(x), 1)\n  #define unlikely(x) __builtin_expect(!!(x), 0)\n#else /* !__GNUC__ or GCC < 2.95 */\n  #define likely(x)   (x)\n  #define unlikely(x) (x)\n#endif /* __GNUC__ */\nstatic CYTHON_INLINE void __Pyx_pretend_to_initialize(void* ptr) { (void)ptr; }\n\nstatic PyObject *__pyx_m = NULL;\nstatic PyObject *__pyx_d;\nstatic PyObject *__pyx_b;\nstatic PyObject *__pyx_cython_runtime = NULL;\nstatic PyObject *__pyx_empty_tuple;\nstatic PyObject *__pyx_empty_bytes;\nstatic PyObject *__pyx_empty_unicode;\nstatic int __pyx_lineno;\nstatic int __pyx_clineno = 0;\nstatic const char * __pyx_cfilenm= __FILE__;\nstatic const char *__pyx_filename;\n\n/* Header.proto */\n#if !defined(CYTHON_CCOMPLEX)\n  #if defined(__cplusplus)\n    #define CYTHON_CCOMPLEX 1\n  #elif defined(_Complex_I)\n    #define CYTHON_CCOMPLEX 1\n  #else\n    #define CYTHON_CCOMPLEX 0\n  #endif\n#endif\n#if CYTHON_CCOMPLEX\n  #ifdef __cplusplus\n    #include <complex>\n  #else\n    #include <complex.h>\n  #endif\n#endif\n#if CYTHON_CCOMPLEX && !defined(__cplusplus) && defined(__sun__) && defined(__GNUC__)\n  #undef _Complex_I\n  #define _Complex_I 1.0fj\n#endif\n\n\nstatic const char *__pyx_f[] = {\n  \"pysobatools/_mask.pyx\",\n  \"stringsource\",\n  \"__init__.pxd\",\n  \"type.pxd\",\n};\n/* BufferFormatStructs.proto */\n#define IS_UNSIGNED(type) (((type) -1) > 0)\nstruct __Pyx_StructField_;\n#define __PYX_BUF_FLAGS_PACKED_STRUCT (1 << 0)\ntypedef struct {\n  const char* name;\n  struct __Pyx_StructField_* fields;\n  size_t size;\n  size_t arraysize[8];\n  int ndim;\n  char typegroup;\n  char is_unsigned;\n  int flags;\n} __Pyx_TypeInfo;\ntypedef struct __Pyx_StructField_ {\n  __Pyx_TypeInfo* type;\n  const char* name;\n  size_t offset;\n} __Pyx_StructField;\ntypedef struct {\n  __Pyx_StructField* field;\n  size_t parent_offset;\n} __Pyx_BufFmt_StackElem;\ntypedef struct {\n  __Pyx_StructField root;\n  __Pyx_BufFmt_StackElem* head;\n  size_t fmt_offset;\n  size_t new_count, enc_count;\n  size_t struct_alignment;\n  int is_complex;\n  char enc_type;\n  char new_packmode;\n  char enc_packmode;\n  char is_valid_array;\n} __Pyx_BufFmt_Context;\n\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":776\n * # in Cython to enable them only on the right systems.\n * \n * ctypedef npy_int8       int8_t             # <<<<<<<<<<<<<<\n * ctypedef npy_int16      int16_t\n * ctypedef npy_int32      int32_t\n */\ntypedef npy_int8 __pyx_t_5numpy_int8_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":777\n * \n * ctypedef npy_int8       int8_t\n * ctypedef npy_int16      int16_t             # <<<<<<<<<<<<<<\n * ctypedef npy_int32      int32_t\n * ctypedef npy_int64      int64_t\n */\ntypedef npy_int16 __pyx_t_5numpy_int16_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":778\n * ctypedef npy_int8       int8_t\n * ctypedef npy_int16      int16_t\n * ctypedef npy_int32      int32_t             # <<<<<<<<<<<<<<\n * ctypedef npy_int64      int64_t\n * #ctypedef npy_int96      int96_t\n */\ntypedef npy_int32 __pyx_t_5numpy_int32_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":779\n * ctypedef npy_int16      int16_t\n * ctypedef npy_int32      int32_t\n * ctypedef npy_int64      int64_t             # <<<<<<<<<<<<<<\n * #ctypedef npy_int96      int96_t\n * #ctypedef npy_int128     int128_t\n */\ntypedef npy_int64 __pyx_t_5numpy_int64_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":783\n * #ctypedef npy_int128     int128_t\n * \n * ctypedef npy_uint8      uint8_t             # <<<<<<<<<<<<<<\n * ctypedef npy_uint16     uint16_t\n * ctypedef npy_uint32     uint32_t\n */\ntypedef npy_uint8 __pyx_t_5numpy_uint8_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":784\n * \n * ctypedef npy_uint8      uint8_t\n * ctypedef npy_uint16     uint16_t             # <<<<<<<<<<<<<<\n * ctypedef npy_uint32     uint32_t\n * ctypedef npy_uint64     uint64_t\n */\ntypedef npy_uint16 __pyx_t_5numpy_uint16_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":785\n * ctypedef npy_uint8      uint8_t\n * ctypedef npy_uint16     uint16_t\n * ctypedef npy_uint32     uint32_t             # <<<<<<<<<<<<<<\n * ctypedef npy_uint64     uint64_t\n * #ctypedef npy_uint96     uint96_t\n */\ntypedef npy_uint32 __pyx_t_5numpy_uint32_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":786\n * ctypedef npy_uint16     uint16_t\n * ctypedef npy_uint32     uint32_t\n * ctypedef npy_uint64     uint64_t             # <<<<<<<<<<<<<<\n * #ctypedef npy_uint96     uint96_t\n * #ctypedef npy_uint128    uint128_t\n */\ntypedef npy_uint64 __pyx_t_5numpy_uint64_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":790\n * #ctypedef npy_uint128    uint128_t\n * \n * ctypedef npy_float32    float32_t             # <<<<<<<<<<<<<<\n * ctypedef npy_float64    float64_t\n * #ctypedef npy_float80    float80_t\n */\ntypedef npy_float32 __pyx_t_5numpy_float32_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":791\n * \n * ctypedef npy_float32    float32_t\n * ctypedef npy_float64    float64_t             # <<<<<<<<<<<<<<\n * #ctypedef npy_float80    float80_t\n * #ctypedef npy_float128   float128_t\n */\ntypedef npy_float64 __pyx_t_5numpy_float64_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":800\n * # The int types are mapped a bit surprising --\n * # numpy.int corresponds to 'l' and numpy.long to 'q'\n * ctypedef npy_long       int_t             # <<<<<<<<<<<<<<\n * ctypedef npy_longlong   long_t\n * ctypedef npy_longlong   longlong_t\n */\ntypedef npy_long __pyx_t_5numpy_int_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":801\n * # numpy.int corresponds to 'l' and numpy.long to 'q'\n * ctypedef npy_long       int_t\n * ctypedef npy_longlong   long_t             # <<<<<<<<<<<<<<\n * ctypedef npy_longlong   longlong_t\n * \n */\ntypedef npy_longlong __pyx_t_5numpy_long_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":802\n * ctypedef npy_long       int_t\n * ctypedef npy_longlong   long_t\n * ctypedef npy_longlong   longlong_t             # <<<<<<<<<<<<<<\n * \n * ctypedef npy_ulong      uint_t\n */\ntypedef npy_longlong __pyx_t_5numpy_longlong_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":804\n * ctypedef npy_longlong   longlong_t\n * \n * ctypedef npy_ulong      uint_t             # <<<<<<<<<<<<<<\n * ctypedef npy_ulonglong  ulong_t\n * ctypedef npy_ulonglong  ulonglong_t\n */\ntypedef npy_ulong __pyx_t_5numpy_uint_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":805\n * \n * ctypedef npy_ulong      uint_t\n * ctypedef npy_ulonglong  ulong_t             # <<<<<<<<<<<<<<\n * ctypedef npy_ulonglong  ulonglong_t\n * \n */\ntypedef npy_ulonglong __pyx_t_5numpy_ulong_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":806\n * ctypedef npy_ulong      uint_t\n * ctypedef npy_ulonglong  ulong_t\n * ctypedef npy_ulonglong  ulonglong_t             # <<<<<<<<<<<<<<\n * \n * ctypedef npy_intp       intp_t\n */\ntypedef npy_ulonglong __pyx_t_5numpy_ulonglong_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":808\n * ctypedef npy_ulonglong  ulonglong_t\n * \n * ctypedef npy_intp       intp_t             # <<<<<<<<<<<<<<\n * ctypedef npy_uintp      uintp_t\n * \n */\ntypedef npy_intp __pyx_t_5numpy_intp_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":809\n * \n * ctypedef npy_intp       intp_t\n * ctypedef npy_uintp      uintp_t             # <<<<<<<<<<<<<<\n * \n * ctypedef npy_double     float_t\n */\ntypedef npy_uintp __pyx_t_5numpy_uintp_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":811\n * ctypedef npy_uintp      uintp_t\n * \n * ctypedef npy_double     float_t             # <<<<<<<<<<<<<<\n * ctypedef npy_double     double_t\n * ctypedef npy_longdouble longdouble_t\n */\ntypedef npy_double __pyx_t_5numpy_float_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":812\n * \n * ctypedef npy_double     float_t\n * ctypedef npy_double     double_t             # <<<<<<<<<<<<<<\n * ctypedef npy_longdouble longdouble_t\n * \n */\ntypedef npy_double __pyx_t_5numpy_double_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":813\n * ctypedef npy_double     float_t\n * ctypedef npy_double     double_t\n * ctypedef npy_longdouble longdouble_t             # <<<<<<<<<<<<<<\n * \n * ctypedef npy_cfloat      cfloat_t\n */\ntypedef npy_longdouble __pyx_t_5numpy_longdouble_t;\n/* Declarations.proto */\n#if CYTHON_CCOMPLEX\n  #ifdef __cplusplus\n    typedef ::std::complex< float > __pyx_t_float_complex;\n  #else\n    typedef float _Complex __pyx_t_float_complex;\n  #endif\n#else\n    typedef struct { float real, imag; } __pyx_t_float_complex;\n#endif\nstatic CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float, float);\n\n/* Declarations.proto */\n#if CYTHON_CCOMPLEX\n  #ifdef __cplusplus\n    typedef ::std::complex< double > __pyx_t_double_complex;\n  #else\n    typedef double _Complex __pyx_t_double_complex;\n  #endif\n#else\n    typedef struct { double real, imag; } __pyx_t_double_complex;\n#endif\nstatic CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double, double);\n\n\n/*--- Type declarations ---*/\nstruct __pyx_obj_11pysobatools_5_mask_RLEs;\nstruct __pyx_obj_11pysobatools_5_mask_Masks;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":815\n * ctypedef npy_longdouble longdouble_t\n * \n * ctypedef npy_cfloat      cfloat_t             # <<<<<<<<<<<<<<\n * ctypedef npy_cdouble     cdouble_t\n * ctypedef npy_clongdouble clongdouble_t\n */\ntypedef npy_cfloat __pyx_t_5numpy_cfloat_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":816\n * \n * ctypedef npy_cfloat      cfloat_t\n * ctypedef npy_cdouble     cdouble_t             # <<<<<<<<<<<<<<\n * ctypedef npy_clongdouble clongdouble_t\n * \n */\ntypedef npy_cdouble __pyx_t_5numpy_cdouble_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":817\n * ctypedef npy_cfloat      cfloat_t\n * ctypedef npy_cdouble     cdouble_t\n * ctypedef npy_clongdouble clongdouble_t             # <<<<<<<<<<<<<<\n * \n * ctypedef npy_cdouble     complex_t\n */\ntypedef npy_clongdouble __pyx_t_5numpy_clongdouble_t;\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":819\n * ctypedef npy_clongdouble clongdouble_t\n * \n * ctypedef npy_cdouble     complex_t             # <<<<<<<<<<<<<<\n * \n * cdef inline object PyArray_MultiIterNew1(a):\n */\ntypedef npy_cdouble __pyx_t_5numpy_complex_t;\n\n/* \"pysobatools/_mask.pyx\":56\n * # python class to wrap RLE array in C\n * # the class handles the memory allocation and deallocation\n * cdef class RLEs:             # <<<<<<<<<<<<<<\n *     cdef RLE *_R\n *     cdef siz _n\n */\nstruct __pyx_obj_11pysobatools_5_mask_RLEs {\n  PyObject_HEAD\n  RLE *_R;\n  siz _n;\n};\n\n\n/* \"pysobatools/_mask.pyx\":77\n * # python class to wrap Mask array in C\n * # the class handles the memory allocation and deallocation\n * cdef class Masks:             # <<<<<<<<<<<<<<\n *     cdef byte *_mask\n *     cdef siz _h\n */\nstruct __pyx_obj_11pysobatools_5_mask_Masks {\n  PyObject_HEAD\n  byte *_mask;\n  siz _h;\n  siz _w;\n  siz _n;\n};\n\n\n/* --- Runtime support code (head) --- */\n/* Refnanny.proto */\n#ifndef CYTHON_REFNANNY\n  #define CYTHON_REFNANNY 0\n#endif\n#if CYTHON_REFNANNY\n  typedef struct {\n    void (*INCREF)(void*, PyObject*, int);\n    void (*DECREF)(void*, PyObject*, int);\n    void (*GOTREF)(void*, PyObject*, int);\n    void (*GIVEREF)(void*, PyObject*, int);\n    void* (*SetupContext)(const char*, int, const char*);\n    void (*FinishContext)(void**);\n  } __Pyx_RefNannyAPIStruct;\n  static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;\n  static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname);\n  #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;\n#ifdef WITH_THREAD\n  #define __Pyx_RefNannySetupContext(name, acquire_gil)\\\n          if (acquire_gil) {\\\n              PyGILState_STATE __pyx_gilstate_save = PyGILState_Ensure();\\\n              __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\\\n              PyGILState_Release(__pyx_gilstate_save);\\\n          } else {\\\n              __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__);\\\n          }\n#else\n  #define __Pyx_RefNannySetupContext(name, acquire_gil)\\\n          __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)\n#endif\n  #define __Pyx_RefNannyFinishContext()\\\n          __Pyx_RefNanny->FinishContext(&__pyx_refnanny)\n  #define __Pyx_INCREF(r)  __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)\n  #define __Pyx_DECREF(r)  __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)\n  #define __Pyx_GOTREF(r)  __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)\n  #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)\n  #define __Pyx_XINCREF(r)  do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)\n  #define __Pyx_XDECREF(r)  do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)\n  #define __Pyx_XGOTREF(r)  do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)\n  #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)\n#else\n  #define __Pyx_RefNannyDeclarations\n  #define __Pyx_RefNannySetupContext(name, acquire_gil)\n  #define __Pyx_RefNannyFinishContext()\n  #define __Pyx_INCREF(r) Py_INCREF(r)\n  #define __Pyx_DECREF(r) Py_DECREF(r)\n  #define __Pyx_GOTREF(r)\n  #define __Pyx_GIVEREF(r)\n  #define __Pyx_XINCREF(r) Py_XINCREF(r)\n  #define __Pyx_XDECREF(r) Py_XDECREF(r)\n  #define __Pyx_XGOTREF(r)\n  #define __Pyx_XGIVEREF(r)\n#endif\n#define __Pyx_XDECREF_SET(r, v) do {\\\n        PyObject *tmp = (PyObject *) r;\\\n        r = v; __Pyx_XDECREF(tmp);\\\n    } while (0)\n#define __Pyx_DECREF_SET(r, v) do {\\\n        PyObject *tmp = (PyObject *) r;\\\n        r = v; __Pyx_DECREF(tmp);\\\n    } while (0)\n#define __Pyx_CLEAR(r)    do { PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);} while(0)\n#define __Pyx_XCLEAR(r)   do { if((r) != NULL) {PyObject* tmp = ((PyObject*)(r)); r = NULL; __Pyx_DECREF(tmp);}} while(0)\n\n/* PyObjectGetAttrStr.proto */\n#if CYTHON_USE_TYPE_SLOTS\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name);\n#else\n#define __Pyx_PyObject_GetAttrStr(o,n) PyObject_GetAttr(o,n)\n#endif\n\n/* GetBuiltinName.proto */\nstatic PyObject *__Pyx_GetBuiltinName(PyObject *name);\n\n/* RaiseDoubleKeywords.proto */\nstatic void __Pyx_RaiseDoubleKeywordsError(const char* func_name, PyObject* kw_name);\n\n/* ParseKeywords.proto */\nstatic int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],\\\n    PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,\\\n    const char* function_name);\n\n/* RaiseArgTupleInvalid.proto */\nstatic void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,\n    Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found);\n\n/* IncludeStringH.proto */\n#include <string.h>\n\n/* BytesEquals.proto */\nstatic CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals);\n\n/* UnicodeEquals.proto */\nstatic CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals);\n\n/* StrEquals.proto */\n#if PY_MAJOR_VERSION >= 3\n#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals\n#else\n#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals\n#endif\n\n/* PyCFunctionFastCall.proto */\n#if CYTHON_FAST_PYCCALL\nstatic CYTHON_INLINE PyObject *__Pyx_PyCFunction_FastCall(PyObject *func, PyObject **args, Py_ssize_t nargs);\n#else\n#define __Pyx_PyCFunction_FastCall(func, args, nargs)  (assert(0), NULL)\n#endif\n\n/* PyFunctionFastCall.proto */\n#if CYTHON_FAST_PYCALL\n#define __Pyx_PyFunction_FastCall(func, args, nargs)\\\n    __Pyx_PyFunction_FastCallDict((func), (args), (nargs), NULL)\n#if 1 || PY_VERSION_HEX < 0x030600B1\nstatic PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs);\n#else\n#define __Pyx_PyFunction_FastCallDict(func, args, nargs, kwargs) _PyFunction_FastCallDict(func, args, nargs, kwargs)\n#endif\n#define __Pyx_BUILD_ASSERT_EXPR(cond)\\\n    (sizeof(char [1 - 2*!(cond)]) - 1)\n#ifndef Py_MEMBER_SIZE\n#define Py_MEMBER_SIZE(type, member) sizeof(((type *)0)->member)\n#endif\n  static size_t __pyx_pyframe_localsplus_offset = 0;\n  #include \"frameobject.h\"\n  #define __Pxy_PyFrame_Initialize_Offsets()\\\n    ((void)__Pyx_BUILD_ASSERT_EXPR(sizeof(PyFrameObject) == offsetof(PyFrameObject, f_localsplus) + Py_MEMBER_SIZE(PyFrameObject, f_localsplus)),\\\n     (void)(__pyx_pyframe_localsplus_offset = ((size_t)PyFrame_Type.tp_basicsize) - Py_MEMBER_SIZE(PyFrameObject, f_localsplus)))\n  #define __Pyx_PyFrame_GetLocalsplus(frame)\\\n    (assert(__pyx_pyframe_localsplus_offset), (PyObject **)(((char *)(frame)) + __pyx_pyframe_localsplus_offset))\n#endif\n\n/* PyObjectCall.proto */\n#if CYTHON_COMPILING_IN_CPYTHON\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw);\n#else\n#define __Pyx_PyObject_Call(func, arg, kw) PyObject_Call(func, arg, kw)\n#endif\n\n/* PyObjectCallMethO.proto */\n#if CYTHON_COMPILING_IN_CPYTHON\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg);\n#endif\n\n/* PyObjectCallOneArg.proto */\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg);\n\n/* PyThreadStateGet.proto */\n#if CYTHON_FAST_THREAD_STATE\n#define __Pyx_PyThreadState_declare  PyThreadState *__pyx_tstate;\n#define __Pyx_PyThreadState_assign  __pyx_tstate = __Pyx_PyThreadState_Current;\n#define __Pyx_PyErr_Occurred()  __pyx_tstate->curexc_type\n#else\n#define __Pyx_PyThreadState_declare\n#define __Pyx_PyThreadState_assign\n#define __Pyx_PyErr_Occurred()  PyErr_Occurred()\n#endif\n\n/* PyErrFetchRestore.proto */\n#if CYTHON_FAST_THREAD_STATE\n#define __Pyx_PyErr_Clear() __Pyx_ErrRestore(NULL, NULL, NULL)\n#define __Pyx_ErrRestoreWithState(type, value, tb)  __Pyx_ErrRestoreInState(PyThreadState_GET(), type, value, tb)\n#define __Pyx_ErrFetchWithState(type, value, tb)    __Pyx_ErrFetchInState(PyThreadState_GET(), type, value, tb)\n#define __Pyx_ErrRestore(type, value, tb)  __Pyx_ErrRestoreInState(__pyx_tstate, type, value, tb)\n#define __Pyx_ErrFetch(type, value, tb)    __Pyx_ErrFetchInState(__pyx_tstate, type, value, tb)\nstatic CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);\nstatic CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);\n#if CYTHON_COMPILING_IN_CPYTHON\n#define __Pyx_PyErr_SetNone(exc) (Py_INCREF(exc), __Pyx_ErrRestore((exc), NULL, NULL))\n#else\n#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)\n#endif\n#else\n#define __Pyx_PyErr_Clear() PyErr_Clear()\n#define __Pyx_PyErr_SetNone(exc) PyErr_SetNone(exc)\n#define __Pyx_ErrRestoreWithState(type, value, tb)  PyErr_Restore(type, value, tb)\n#define __Pyx_ErrFetchWithState(type, value, tb)  PyErr_Fetch(type, value, tb)\n#define __Pyx_ErrRestoreInState(tstate, type, value, tb)  PyErr_Restore(type, value, tb)\n#define __Pyx_ErrFetchInState(tstate, type, value, tb)  PyErr_Fetch(type, value, tb)\n#define __Pyx_ErrRestore(type, value, tb)  PyErr_Restore(type, value, tb)\n#define __Pyx_ErrFetch(type, value, tb)  PyErr_Fetch(type, value, tb)\n#endif\n\n/* RaiseException.proto */\nstatic void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause);\n\n/* ExtTypeTest.proto */\nstatic CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type);\n\n/* ArgTypeTest.proto */\n#define __Pyx_ArgTypeTest(obj, type, none_allowed, name, exact)\\\n    ((likely((Py_TYPE(obj) == type) | (none_allowed && (obj == Py_None)))) ? 1 :\\\n        __Pyx__ArgTypeTest(obj, type, name, exact))\nstatic int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact);\n\n/* ListAppend.proto */\n#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS\nstatic CYTHON_INLINE int __Pyx_PyList_Append(PyObject* list, PyObject* x) {\n    PyListObject* L = (PyListObject*) list;\n    Py_ssize_t len = Py_SIZE(list);\n    if (likely(L->allocated > len) & likely(len > (L->allocated >> 1))) {\n        Py_INCREF(x);\n        PyList_SET_ITEM(list, len, x);\n        Py_SIZE(list) = len+1;\n        return 0;\n    }\n    return PyList_Append(list, x);\n}\n#else\n#define __Pyx_PyList_Append(L,x) PyList_Append(L,x)\n#endif\n\n/* PyIntBinop.proto */\n#if !CYTHON_COMPILING_IN_PYPY\nstatic PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, long intval, int inplace, int zerodivision_check);\n#else\n#define __Pyx_PyInt_AddObjC(op1, op2, intval, inplace, zerodivision_check)\\\n    (inplace ? PyNumber_InPlaceAdd(op1, op2) : PyNumber_Add(op1, op2))\n#endif\n\n/* PyIntCompare.proto */\nstatic CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, long intval, long inplace);\n\n/* PyDictVersioning.proto */\n#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS\n#define __PYX_DICT_VERSION_INIT  ((PY_UINT64_T) -1)\n#define __PYX_GET_DICT_VERSION(dict)  (((PyDictObject*)(dict))->ma_version_tag)\n#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\\\n    (version_var) = __PYX_GET_DICT_VERSION(dict);\\\n    (cache_var) = (value);\n#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP) {\\\n    static PY_UINT64_T __pyx_dict_version = 0;\\\n    static PyObject *__pyx_dict_cached_value = NULL;\\\n    if (likely(__PYX_GET_DICT_VERSION(DICT) == __pyx_dict_version)) {\\\n        (VAR) = __pyx_dict_cached_value;\\\n    } else {\\\n        (VAR) = __pyx_dict_cached_value = (LOOKUP);\\\n        __pyx_dict_version = __PYX_GET_DICT_VERSION(DICT);\\\n    }\\\n}\nstatic CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj);\nstatic CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj);\nstatic CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version);\n#else\n#define __PYX_GET_DICT_VERSION(dict)  (0)\n#define __PYX_UPDATE_DICT_CACHE(dict, value, cache_var, version_var)\n#define __PYX_PY_DICT_LOOKUP_IF_MODIFIED(VAR, DICT, LOOKUP)  (VAR) = (LOOKUP);\n#endif\n\n/* GetModuleGlobalName.proto */\n#if CYTHON_USE_DICT_VERSIONS\n#define __Pyx_GetModuleGlobalName(var, name)  {\\\n    static PY_UINT64_T __pyx_dict_version = 0;\\\n    static PyObject *__pyx_dict_cached_value = NULL;\\\n    (var) = (likely(__pyx_dict_version == __PYX_GET_DICT_VERSION(__pyx_d))) ?\\\n        (likely(__pyx_dict_cached_value) ? __Pyx_NewRef(__pyx_dict_cached_value) : __Pyx_GetBuiltinName(name)) :\\\n        __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\\\n}\n#define __Pyx_GetModuleGlobalNameUncached(var, name)  {\\\n    PY_UINT64_T __pyx_dict_version;\\\n    PyObject *__pyx_dict_cached_value;\\\n    (var) = __Pyx__GetModuleGlobalName(name, &__pyx_dict_version, &__pyx_dict_cached_value);\\\n}\nstatic PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value);\n#else\n#define __Pyx_GetModuleGlobalName(var, name)  (var) = __Pyx__GetModuleGlobalName(name)\n#define __Pyx_GetModuleGlobalNameUncached(var, name)  (var) = __Pyx__GetModuleGlobalName(name)\nstatic CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name);\n#endif\n\n/* DictGetItem.proto */\n#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY\nstatic PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key);\n#define __Pyx_PyObject_Dict_GetItem(obj, name)\\\n    (likely(PyDict_CheckExact(obj)) ?\\\n     __Pyx_PyDict_GetItem(obj, name) : PyObject_GetItem(obj, name))\n#else\n#define __Pyx_PyDict_GetItem(d, key) PyObject_GetItem(d, key)\n#define __Pyx_PyObject_Dict_GetItem(obj, name)  PyObject_GetItem(obj, name)\n#endif\n\n/* PyObjectCall2Args.proto */\nstatic CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2);\n\n/* GetItemInt.proto */\n#define __Pyx_GetItemInt(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\\\n    (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\\\n    __Pyx_GetItemInt_Fast(o, (Py_ssize_t)i, is_list, wraparound, boundscheck) :\\\n    (is_list ? (PyErr_SetString(PyExc_IndexError, \"list index out of range\"), (PyObject*)NULL) :\\\n               __Pyx_GetItemInt_Generic(o, to_py_func(i))))\n#define __Pyx_GetItemInt_List(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\\\n    (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\\\n    __Pyx_GetItemInt_List_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\\\n    (PyErr_SetString(PyExc_IndexError, \"list index out of range\"), (PyObject*)NULL))\nstatic CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,\n                                                              int wraparound, int boundscheck);\n#define __Pyx_GetItemInt_Tuple(o, i, type, is_signed, to_py_func, is_list, wraparound, boundscheck)\\\n    (__Pyx_fits_Py_ssize_t(i, type, is_signed) ?\\\n    __Pyx_GetItemInt_Tuple_Fast(o, (Py_ssize_t)i, wraparound, boundscheck) :\\\n    (PyErr_SetString(PyExc_IndexError, \"tuple index out of range\"), (PyObject*)NULL))\nstatic CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,\n                                                              int wraparound, int boundscheck);\nstatic PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j);\nstatic CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i,\n                                                     int is_list, int wraparound, int boundscheck);\n\n/* IsLittleEndian.proto */\nstatic CYTHON_INLINE int __Pyx_Is_Little_Endian(void);\n\n/* BufferFormatCheck.proto */\nstatic const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts);\nstatic void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx,\n                              __Pyx_BufFmt_StackElem* stack,\n                              __Pyx_TypeInfo* type);\n\n/* BufferGetAndValidate.proto */\n#define __Pyx_GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack)\\\n    ((obj == Py_None || obj == NULL) ?\\\n    (__Pyx_ZeroBuffer(buf), 0) :\\\n    __Pyx__GetBufferAndValidate(buf, obj, dtype, flags, nd, cast, stack))\nstatic int  __Pyx__GetBufferAndValidate(Py_buffer* buf, PyObject* obj,\n    __Pyx_TypeInfo* dtype, int flags, int nd, int cast, __Pyx_BufFmt_StackElem* stack);\nstatic void __Pyx_ZeroBuffer(Py_buffer* buf);\nstatic CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info);\nstatic Py_ssize_t __Pyx_minusones[] = { -1, -1, -1, -1, -1, -1, -1, -1 };\nstatic Py_ssize_t __Pyx_zeros[] = { 0, 0, 0, 0, 0, 0, 0, 0 };\n\n/* ListCompAppend.proto */\n#if CYTHON_USE_PYLIST_INTERNALS && CYTHON_ASSUME_SAFE_MACROS\nstatic CYTHON_INLINE int __Pyx_ListComp_Append(PyObject* list, PyObject* x) {\n    PyListObject* L = (PyListObject*) list;\n    Py_ssize_t len = Py_SIZE(list);\n    if (likely(L->allocated > len)) {\n        Py_INCREF(x);\n        PyList_SET_ITEM(list, len, x);\n        Py_SIZE(list) = len+1;\n        return 0;\n    }\n    return PyList_Append(list, x);\n}\n#else\n#define __Pyx_ListComp_Append(L,x) PyList_Append(L,x)\n#endif\n\n/* FetchCommonType.proto */\nstatic PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type);\n\n/* CythonFunction.proto */\n#define __Pyx_CyFunction_USED 1\n#define __Pyx_CYFUNCTION_STATICMETHOD  0x01\n#define __Pyx_CYFUNCTION_CLASSMETHOD   0x02\n#define __Pyx_CYFUNCTION_CCLASS        0x04\n#define __Pyx_CyFunction_GetClosure(f)\\\n    (((__pyx_CyFunctionObject *) (f))->func_closure)\n#define __Pyx_CyFunction_GetClassObj(f)\\\n    (((__pyx_CyFunctionObject *) (f))->func_classobj)\n#define __Pyx_CyFunction_Defaults(type, f)\\\n    ((type *)(((__pyx_CyFunctionObject *) (f))->defaults))\n#define __Pyx_CyFunction_SetDefaultsGetter(f, g)\\\n    ((__pyx_CyFunctionObject *) (f))->defaults_getter = (g)\ntypedef struct {\n    PyCFunctionObject func;\n#if PY_VERSION_HEX < 0x030500A0\n    PyObject *func_weakreflist;\n#endif\n    PyObject *func_dict;\n    PyObject *func_name;\n    PyObject *func_qualname;\n    PyObject *func_doc;\n    PyObject *func_globals;\n    PyObject *func_code;\n    PyObject *func_closure;\n    PyObject *func_classobj;\n    void *defaults;\n    int defaults_pyobjects;\n    int flags;\n    PyObject *defaults_tuple;\n    PyObject *defaults_kwdict;\n    PyObject *(*defaults_getter)(PyObject *);\n    PyObject *func_annotations;\n} __pyx_CyFunctionObject;\nstatic PyTypeObject *__pyx_CyFunctionType = 0;\n#define __Pyx_CyFunction_Check(obj)  (__Pyx_TypeCheck(obj, __pyx_CyFunctionType))\n#define __Pyx_CyFunction_NewEx(ml, flags, qualname, self, module, globals, code)\\\n    __Pyx_CyFunction_New(__pyx_CyFunctionType, ml, flags, qualname, self, module, globals, code)\nstatic PyObject *__Pyx_CyFunction_New(PyTypeObject *, PyMethodDef *ml,\n                                      int flags, PyObject* qualname,\n                                      PyObject *self,\n                                      PyObject *module, PyObject *globals,\n                                      PyObject* code);\nstatic CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *m,\n                                                         size_t size,\n                                                         int pyobjects);\nstatic CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *m,\n                                                            PyObject *tuple);\nstatic CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *m,\n                                                             PyObject *dict);\nstatic CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *m,\n                                                              PyObject *dict);\nstatic int __pyx_CyFunction_init(void);\n\n/* BufferFallbackError.proto */\nstatic void __Pyx_RaiseBufferFallbackError(void);\n\n/* None.proto */\nstatic CYTHON_INLINE Py_ssize_t __Pyx_div_Py_ssize_t(Py_ssize_t, Py_ssize_t);\n\n/* BufferIndexError.proto */\nstatic void __Pyx_RaiseBufferIndexError(int axis);\n\n#define __Pyx_BufPtrStrided1d(type, buf, i0, s0) (type)((char*)buf + i0 * s0)\n/* PySequenceContains.proto */\nstatic CYTHON_INLINE int __Pyx_PySequence_ContainsTF(PyObject* item, PyObject* seq, int eq) {\n    int result = PySequence_Contains(seq, item);\n    return unlikely(result < 0) ? result : (result == (eq == Py_EQ));\n}\n\n/* RaiseTooManyValuesToUnpack.proto */\nstatic CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);\n\n/* RaiseNeedMoreValuesToUnpack.proto */\nstatic CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);\n\n/* RaiseNoneIterError.proto */\nstatic CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void);\n\n/* GetTopmostException.proto */\n#if CYTHON_USE_EXC_INFO_STACK\nstatic _PyErr_StackItem * __Pyx_PyErr_GetTopmostException(PyThreadState *tstate);\n#endif\n\n/* SaveResetException.proto */\n#if CYTHON_FAST_THREAD_STATE\n#define __Pyx_ExceptionSave(type, value, tb)  __Pyx__ExceptionSave(__pyx_tstate, type, value, tb)\nstatic CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);\n#define __Pyx_ExceptionReset(type, value, tb)  __Pyx__ExceptionReset(__pyx_tstate, type, value, tb)\nstatic CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb);\n#else\n#define __Pyx_ExceptionSave(type, value, tb)   PyErr_GetExcInfo(type, value, tb)\n#define __Pyx_ExceptionReset(type, value, tb)  PyErr_SetExcInfo(type, value, tb)\n#endif\n\n/* PyErrExceptionMatches.proto */\n#if CYTHON_FAST_THREAD_STATE\n#define __Pyx_PyErr_ExceptionMatches(err) __Pyx_PyErr_ExceptionMatchesInState(__pyx_tstate, err)\nstatic CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err);\n#else\n#define __Pyx_PyErr_ExceptionMatches(err)  PyErr_ExceptionMatches(err)\n#endif\n\n/* GetException.proto */\n#if CYTHON_FAST_THREAD_STATE\n#define __Pyx_GetException(type, value, tb)  __Pyx__GetException(__pyx_tstate, type, value, tb)\nstatic int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb);\n#else\nstatic int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb);\n#endif\n\n/* PyObject_GenericGetAttrNoDict.proto */\n#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name);\n#else\n#define __Pyx_PyObject_GenericGetAttrNoDict PyObject_GenericGetAttr\n#endif\n\n/* PyObject_GenericGetAttr.proto */\n#if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000\nstatic PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name);\n#else\n#define __Pyx_PyObject_GenericGetAttr PyObject_GenericGetAttr\n#endif\n\n/* SetupReduce.proto */\nstatic int __Pyx_setup_reduce(PyObject* type_obj);\n\n/* TypeImport.proto */\n#ifndef __PYX_HAVE_RT_ImportType_proto\n#define __PYX_HAVE_RT_ImportType_proto\nenum __Pyx_ImportType_CheckSize {\n   __Pyx_ImportType_CheckSize_Error = 0,\n   __Pyx_ImportType_CheckSize_Warn = 1,\n   __Pyx_ImportType_CheckSize_Ignore = 2\n};\nstatic PyTypeObject *__Pyx_ImportType(PyObject* module, const char *module_name, const char *class_name, size_t size, enum __Pyx_ImportType_CheckSize check_size);\n#endif\n\n/* Import.proto */\nstatic PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level);\n\n/* CLineInTraceback.proto */\n#ifdef CYTHON_CLINE_IN_TRACEBACK\n#define __Pyx_CLineForTraceback(tstate, c_line)  (((CYTHON_CLINE_IN_TRACEBACK)) ? c_line : 0)\n#else\nstatic int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line);\n#endif\n\n/* CodeObjectCache.proto */\ntypedef struct {\n    PyCodeObject* code_object;\n    int code_line;\n} __Pyx_CodeObjectCacheEntry;\nstruct __Pyx_CodeObjectCache {\n    int count;\n    int max_count;\n    __Pyx_CodeObjectCacheEntry* entries;\n};\nstatic struct __Pyx_CodeObjectCache __pyx_code_cache = {0,0,NULL};\nstatic int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line);\nstatic PyCodeObject *__pyx_find_code_object(int code_line);\nstatic void __pyx_insert_code_object(int code_line, PyCodeObject* code_object);\n\n/* AddTraceback.proto */\nstatic void __Pyx_AddTraceback(const char *funcname, int c_line,\n                               int py_line, const char *filename);\n\n/* BufferStructDeclare.proto */\ntypedef struct {\n  Py_ssize_t shape, strides, suboffsets;\n} __Pyx_Buf_DimInfo;\ntypedef struct {\n  size_t refcount;\n  Py_buffer pybuffer;\n} __Pyx_Buffer;\ntypedef struct {\n  __Pyx_Buffer *rcbuffer;\n  char *data;\n  __Pyx_Buf_DimInfo diminfo[8];\n} __Pyx_LocalBuf_ND;\n\n#if PY_MAJOR_VERSION < 3\n    static int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags);\n    static void __Pyx_ReleaseBuffer(Py_buffer *view);\n#else\n    #define __Pyx_GetBuffer PyObject_GetBuffer\n    #define __Pyx_ReleaseBuffer PyBuffer_Release\n#endif\n\n\n/* CIntToPy.proto */\nstatic CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value);\n\n/* CIntToPy.proto */\nstatic CYTHON_INLINE PyObject* __Pyx_PyInt_From_siz(siz value);\n\n/* CIntToPy.proto */\nstatic CYTHON_INLINE PyObject* __Pyx_PyInt_From_Py_intptr_t(Py_intptr_t value);\n\n/* RealImag.proto */\n#if CYTHON_CCOMPLEX\n  #ifdef __cplusplus\n    #define __Pyx_CREAL(z) ((z).real())\n    #define __Pyx_CIMAG(z) ((z).imag())\n  #else\n    #define __Pyx_CREAL(z) (__real__(z))\n    #define __Pyx_CIMAG(z) (__imag__(z))\n  #endif\n#else\n    #define __Pyx_CREAL(z) ((z).real)\n    #define __Pyx_CIMAG(z) ((z).imag)\n#endif\n#if defined(__cplusplus) && CYTHON_CCOMPLEX\\\n        && (defined(_WIN32) || defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 5 || __GNUC__ == 4 && __GNUC_MINOR__ >= 4 )) || __cplusplus >= 201103)\n    #define __Pyx_SET_CREAL(z,x) ((z).real(x))\n    #define __Pyx_SET_CIMAG(z,y) ((z).imag(y))\n#else\n    #define __Pyx_SET_CREAL(z,x) __Pyx_CREAL(z) = (x)\n    #define __Pyx_SET_CIMAG(z,y) __Pyx_CIMAG(z) = (y)\n#endif\n\n/* Arithmetic.proto */\n#if CYTHON_CCOMPLEX\n    #define __Pyx_c_eq_float(a, b)   ((a)==(b))\n    #define __Pyx_c_sum_float(a, b)  ((a)+(b))\n    #define __Pyx_c_diff_float(a, b) ((a)-(b))\n    #define __Pyx_c_prod_float(a, b) ((a)*(b))\n    #define __Pyx_c_quot_float(a, b) ((a)/(b))\n    #define __Pyx_c_neg_float(a)     (-(a))\n  #ifdef __cplusplus\n    #define __Pyx_c_is_zero_float(z) ((z)==(float)0)\n    #define __Pyx_c_conj_float(z)    (::std::conj(z))\n    #if 1\n        #define __Pyx_c_abs_float(z)     (::std::abs(z))\n        #define __Pyx_c_pow_float(a, b)  (::std::pow(a, b))\n    #endif\n  #else\n    #define __Pyx_c_is_zero_float(z) ((z)==0)\n    #define __Pyx_c_conj_float(z)    (conjf(z))\n    #if 1\n        #define __Pyx_c_abs_float(z)     (cabsf(z))\n        #define __Pyx_c_pow_float(a, b)  (cpowf(a, b))\n    #endif\n #endif\n#else\n    static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex, __pyx_t_float_complex);\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex, __pyx_t_float_complex);\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex, __pyx_t_float_complex);\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex, __pyx_t_float_complex);\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex, __pyx_t_float_complex);\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex);\n    static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex);\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex);\n    #if 1\n        static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex);\n        static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex, __pyx_t_float_complex);\n    #endif\n#endif\n\n/* Arithmetic.proto */\n#if CYTHON_CCOMPLEX\n    #define __Pyx_c_eq_double(a, b)   ((a)==(b))\n    #define __Pyx_c_sum_double(a, b)  ((a)+(b))\n    #define __Pyx_c_diff_double(a, b) ((a)-(b))\n    #define __Pyx_c_prod_double(a, b) ((a)*(b))\n    #define __Pyx_c_quot_double(a, b) ((a)/(b))\n    #define __Pyx_c_neg_double(a)     (-(a))\n  #ifdef __cplusplus\n    #define __Pyx_c_is_zero_double(z) ((z)==(double)0)\n    #define __Pyx_c_conj_double(z)    (::std::conj(z))\n    #if 1\n        #define __Pyx_c_abs_double(z)     (::std::abs(z))\n        #define __Pyx_c_pow_double(a, b)  (::std::pow(a, b))\n    #endif\n  #else\n    #define __Pyx_c_is_zero_double(z) ((z)==0)\n    #define __Pyx_c_conj_double(z)    (conj(z))\n    #if 1\n        #define __Pyx_c_abs_double(z)     (cabs(z))\n        #define __Pyx_c_pow_double(a, b)  (cpow(a, b))\n    #endif\n #endif\n#else\n    static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex, __pyx_t_double_complex);\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex, __pyx_t_double_complex);\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex, __pyx_t_double_complex);\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex, __pyx_t_double_complex);\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex, __pyx_t_double_complex);\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex);\n    static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex);\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex);\n    #if 1\n        static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex);\n        static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex, __pyx_t_double_complex);\n    #endif\n#endif\n\n/* CIntToPy.proto */\nstatic CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value);\n\n/* CIntToPy.proto */\nstatic CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value);\n\n/* CIntFromPy.proto */\nstatic CYTHON_INLINE siz __Pyx_PyInt_As_siz(PyObject *);\n\n/* CIntFromPy.proto */\nstatic CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *);\n\n/* CIntFromPy.proto */\nstatic CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *);\n\n/* CIntFromPy.proto */\nstatic CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *);\n\n/* FastTypeChecks.proto */\n#if CYTHON_COMPILING_IN_CPYTHON\n#define __Pyx_TypeCheck(obj, type) __Pyx_IsSubtype(Py_TYPE(obj), (PyTypeObject *)type)\nstatic CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b);\nstatic CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject *type);\nstatic CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *type1, PyObject *type2);\n#else\n#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)\n#define __Pyx_PyErr_GivenExceptionMatches(err, type) PyErr_GivenExceptionMatches(err, type)\n#define __Pyx_PyErr_GivenExceptionMatches2(err, type1, type2) (PyErr_GivenExceptionMatches(err, type1) || PyErr_GivenExceptionMatches(err, type2))\n#endif\n#define __Pyx_PyException_Check(obj) __Pyx_TypeCheck(obj, PyExc_Exception)\n\n/* CheckBinaryVersion.proto */\nstatic int __Pyx_check_binary_version(void);\n\n/* InitStrings.proto */\nstatic int __Pyx_InitStrings(__Pyx_StringTabEntry *t);\n\n\n/* Module declarations from 'cpython.buffer' */\n\n/* Module declarations from 'libc.string' */\n\n/* Module declarations from 'libc.stdio' */\n\n/* Module declarations from '__builtin__' */\n\n/* Module declarations from 'cpython.type' */\nstatic PyTypeObject *__pyx_ptype_7cpython_4type_type = 0;\n\n/* Module declarations from 'cpython' */\n\n/* Module declarations from 'cpython.object' */\n\n/* Module declarations from 'cpython.ref' */\n\n/* Module declarations from 'cpython.mem' */\n\n/* Module declarations from 'numpy' */\n\n/* Module declarations from 'numpy' */\nstatic PyTypeObject *__pyx_ptype_5numpy_dtype = 0;\nstatic PyTypeObject *__pyx_ptype_5numpy_flatiter = 0;\nstatic PyTypeObject *__pyx_ptype_5numpy_broadcast = 0;\nstatic PyTypeObject *__pyx_ptype_5numpy_ndarray = 0;\nstatic PyTypeObject *__pyx_ptype_5numpy_ufunc = 0;\nstatic CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *, char *, char *, int *); /*proto*/\nstatic CYTHON_INLINE int __pyx_f_5numpy_import_array(void); /*proto*/\n\n/* Module declarations from 'libc.stdlib' */\n\n/* Module declarations from 'pysobatools._mask' */\nstatic PyTypeObject *__pyx_ptype_11pysobatools_5_mask_RLEs = 0;\nstatic PyTypeObject *__pyx_ptype_11pysobatools_5_mask_Masks = 0;\nstatic __Pyx_TypeInfo __Pyx_TypeInfo_nn___pyx_t_5numpy_uint8_t = { \"uint8_t\", NULL, sizeof(__pyx_t_5numpy_uint8_t), { 0 }, 0, IS_UNSIGNED(__pyx_t_5numpy_uint8_t) ? 'U' : 'I', IS_UNSIGNED(__pyx_t_5numpy_uint8_t), 0 };\nstatic __Pyx_TypeInfo __Pyx_TypeInfo_nn___pyx_t_5numpy_double_t = { \"double_t\", NULL, sizeof(__pyx_t_5numpy_double_t), { 0 }, 0, 'R', 0, 0 };\nstatic __Pyx_TypeInfo __Pyx_TypeInfo_nn___pyx_t_5numpy_uint32_t = { \"uint32_t\", NULL, sizeof(__pyx_t_5numpy_uint32_t), { 0 }, 0, IS_UNSIGNED(__pyx_t_5numpy_uint32_t) ? 'U' : 'I', IS_UNSIGNED(__pyx_t_5numpy_uint32_t), 0 };\n#define __Pyx_MODULE_NAME \"pysobatools._mask\"\nextern int __pyx_module_is_main_pysobatools___mask;\nint __pyx_module_is_main_pysobatools___mask = 0;\n\n/* Implementation of 'pysobatools._mask' */\nstatic PyObject *__pyx_builtin_range;\nstatic PyObject *__pyx_builtin_AttributeError;\nstatic PyObject *__pyx_builtin_TypeError;\nstatic PyObject *__pyx_builtin_enumerate;\nstatic PyObject *__pyx_builtin_ValueError;\nstatic PyObject *__pyx_builtin_RuntimeError;\nstatic PyObject *__pyx_builtin_ImportError;\nstatic const char __pyx_k_F[] = \"F\";\nstatic const char __pyx_k_N[] = \"N\";\nstatic const char __pyx_k_R[] = \"R\";\nstatic const char __pyx_k_a[] = \"_a\";\nstatic const char __pyx_k_h[] = \"h\";\nstatic const char __pyx_k_i[] = \"i\";\nstatic const char __pyx_k_j[] = \"j\";\nstatic const char __pyx_k_m[] = \"m\";\nstatic const char __pyx_k_n[] = \"n\";\nstatic const char __pyx_k_p[] = \"p\";\nstatic const char __pyx_k_w[] = \"w\";\nstatic const char __pyx_k_Rs[] = \"Rs\";\nstatic const char __pyx_k_bb[] = \"bb\";\nstatic const char __pyx_k_dt[] = \"dt\";\nstatic const char __pyx_k_gt[] = \"gt\";\nstatic const char __pyx_k_np[] = \"np\";\nstatic const char __pyx_k_a_2[] = \"a\";\nstatic const char __pyx_k_all[] = \"all\";\nstatic const char __pyx_k_iou[] = \"_iou\";\nstatic const char __pyx_k_len[] = \"_len\";\nstatic const char __pyx_k_obj[] = \"obj\";\nstatic const char __pyx_k_sys[] = \"sys\";\nstatic const char __pyx_k_RLEs[] = \"RLEs\";\nstatic const char __pyx_k_area[] = \"area\";\nstatic const char __pyx_k_bb_2[] = \"_bb\";\nstatic const char __pyx_k_cnts[] = \"cnts\";\nstatic const char __pyx_k_data[] = \"data\";\nstatic const char __pyx_k_main[] = \"__main__\";\nstatic const char __pyx_k_mask[] = \"mask\";\nstatic const char __pyx_k_name[] = \"__name__\";\nstatic const char __pyx_k_objs[] = \"objs\";\nstatic const char __pyx_k_poly[] = \"poly\";\nstatic const char __pyx_k_size[] = \"size\";\nstatic const char __pyx_k_test[] = \"__test__\";\nstatic const char __pyx_k_utf8[] = \"utf8\";\nstatic const char __pyx_k_Masks[] = \"Masks\";\nstatic const char __pyx_k_array[] = \"array\";\nstatic const char __pyx_k_bbIou[] = \"_bbIou\";\nstatic const char __pyx_k_dtype[] = \"dtype\";\nstatic const char __pyx_k_iou_2[] = \"iou\";\nstatic const char __pyx_k_isbox[] = \"isbox\";\nstatic const char __pyx_k_isrle[] = \"isrle\";\nstatic const char __pyx_k_masks[] = \"masks\";\nstatic const char __pyx_k_merge[] = \"merge\";\nstatic const char __pyx_k_numpy[] = \"numpy\";\nstatic const char __pyx_k_order[] = \"order\";\nstatic const char __pyx_k_pyobj[] = \"pyobj\";\nstatic const char __pyx_k_range[] = \"range\";\nstatic const char __pyx_k_shape[] = \"shape\";\nstatic const char __pyx_k_uint8[] = \"uint8\";\nstatic const char __pyx_k_zeros[] = \"zeros\";\nstatic const char __pyx_k_astype[] = \"astype\";\nstatic const char __pyx_k_author[] = \"__author__\";\nstatic const char __pyx_k_counts[] = \"counts\";\nstatic const char __pyx_k_decode[] = \"decode\";\nstatic const char __pyx_k_double[] = \"double\";\nstatic const char __pyx_k_encode[] = \"encode\";\nstatic const char __pyx_k_frBbox[] = \"frBbox\";\nstatic const char __pyx_k_frPoly[] = \"frPoly\";\nstatic const char __pyx_k_import[] = \"__import__\";\nstatic const char __pyx_k_iouFun[] = \"_iouFun\";\nstatic const char __pyx_k_reduce[] = \"__reduce__\";\nstatic const char __pyx_k_rleIou[] = \"_rleIou\";\nstatic const char __pyx_k_toBbox[] = \"toBbox\";\nstatic const char __pyx_k_ucRles[] = \"ucRles\";\nstatic const char __pyx_k_uint32[] = \"uint32\";\nstatic const char __pyx_k_iscrowd[] = \"iscrowd\";\nstatic const char __pyx_k_np_poly[] = \"np_poly\";\nstatic const char __pyx_k_preproc[] = \"_preproc\";\nstatic const char __pyx_k_reshape[] = \"reshape\";\nstatic const char __pyx_k_rleObjs[] = \"rleObjs\";\nstatic const char __pyx_k_tsungyi[] = \"tsungyi\";\nstatic const char __pyx_k_c_string[] = \"c_string\";\nstatic const char __pyx_k_frString[] = \"_frString\";\nstatic const char __pyx_k_getstate[] = \"__getstate__\";\nstatic const char __pyx_k_setstate[] = \"__setstate__\";\nstatic const char __pyx_k_toString[] = \"_toString\";\nstatic const char __pyx_k_TypeError[] = \"TypeError\";\nstatic const char __pyx_k_enumerate[] = \"enumerate\";\nstatic const char __pyx_k_intersect[] = \"intersect\";\nstatic const char __pyx_k_py_string[] = \"py_string\";\nstatic const char __pyx_k_pyiscrowd[] = \"pyiscrowd\";\nstatic const char __pyx_k_reduce_ex[] = \"__reduce_ex__\";\nstatic const char __pyx_k_ValueError[] = \"ValueError\";\nstatic const char __pyx_k_ImportError[] = \"ImportError\";\nstatic const char __pyx_k_frPyObjects[] = \"frPyObjects\";\nstatic const char __pyx_k_RuntimeError[] = \"RuntimeError\";\nstatic const char __pyx_k_version_info[] = \"version_info\";\nstatic const char __pyx_k_reduce_cython[] = \"__reduce_cython__\";\nstatic const char __pyx_k_AttributeError[] = \"AttributeError\";\nstatic const char __pyx_k_PYTHON_VERSION[] = \"PYTHON_VERSION\";\nstatic const char __pyx_k_iou_locals__len[] = \"iou.<locals>._len\";\nstatic const char __pyx_k_setstate_cython[] = \"__setstate_cython__\";\nstatic const char __pyx_k_frUncompressedRLE[] = \"frUncompressedRLE\";\nstatic const char __pyx_k_iou_locals__bbIou[] = \"iou.<locals>._bbIou\";\nstatic const char __pyx_k_pysobatools__mask[] = \"pysobatools._mask\";\nstatic const char __pyx_k_cline_in_traceback[] = \"cline_in_traceback\";\nstatic const char __pyx_k_iou_locals__rleIou[] = \"iou.<locals>._rleIou\";\nstatic const char __pyx_k_iou_locals__preproc[] = \"iou.<locals>._preproc\";\nstatic const char __pyx_k_pysobatools__mask_pyx[] = \"pysobatools/_mask.pyx\";\nstatic const char __pyx_k_input_data_type_not_allowed[] = \"input data type not allowed.\";\nstatic const char __pyx_k_input_type_is_not_supported[] = \"input type is not supported.\";\nstatic const char __pyx_k_ndarray_is_not_C_contiguous[] = \"ndarray is not C contiguous\";\nstatic const char __pyx_k_Python_version_must_be_2_or_3[] = \"Python version must be 2 or 3\";\nstatic const char __pyx_k_numpy_core_multiarray_failed_to[] = \"numpy.core.multiarray failed to import\";\nstatic const char __pyx_k_numpy_ndarray_input_is_only_for[] = \"numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension\";\nstatic const char __pyx_k_unknown_dtype_code_in_numpy_pxd[] = \"unknown dtype code in numpy.pxd (%d)\";\nstatic const char __pyx_k_unrecognized_type_The_following[] = \"unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.\";\nstatic const char __pyx_k_Format_string_allocated_too_shor[] = \"Format string allocated too short, see comment in numpy.pxd\";\nstatic const char __pyx_k_Non_native_byte_order_not_suppor[] = \"Non-native byte order not supported\";\nstatic const char __pyx_k_The_dt_and_gt_should_have_the_sa[] = \"The dt and gt should have the same data type, either RLEs, list or np.ndarray\";\nstatic const char __pyx_k_list_input_can_be_bounding_box_N[] = \"list input can be bounding box (Nx4) or RLEs ([RLE])\";\nstatic const char __pyx_k_ndarray_is_not_Fortran_contiguou[] = \"ndarray is not Fortran contiguous\";\nstatic const char __pyx_k_no_default___reduce___due_to_non[] = \"no default __reduce__ due to non-trivial __cinit__\";\nstatic const char __pyx_k_numpy_core_umath_failed_to_impor[] = \"numpy.core.umath failed to import\";\nstatic const char __pyx_k_Format_string_allocated_too_shor_2[] = \"Format string allocated too short.\";\nstatic PyObject *__pyx_n_s_AttributeError;\nstatic PyObject *__pyx_n_s_F;\nstatic PyObject *__pyx_kp_u_Format_string_allocated_too_shor;\nstatic PyObject *__pyx_kp_u_Format_string_allocated_too_shor_2;\nstatic PyObject *__pyx_n_s_ImportError;\nstatic PyObject *__pyx_n_s_Masks;\nstatic PyObject *__pyx_n_s_N;\nstatic PyObject *__pyx_kp_u_Non_native_byte_order_not_suppor;\nstatic PyObject *__pyx_n_s_PYTHON_VERSION;\nstatic PyObject *__pyx_kp_s_Python_version_must_be_2_or_3;\nstatic PyObject *__pyx_n_s_R;\nstatic PyObject *__pyx_n_s_RLEs;\nstatic PyObject *__pyx_n_s_Rs;\nstatic PyObject *__pyx_n_s_RuntimeError;\nstatic PyObject *__pyx_kp_s_The_dt_and_gt_should_have_the_sa;\nstatic PyObject *__pyx_n_s_TypeError;\nstatic PyObject *__pyx_n_s_ValueError;\nstatic PyObject *__pyx_n_s_a;\nstatic PyObject *__pyx_n_s_a_2;\nstatic PyObject *__pyx_n_s_all;\nstatic PyObject *__pyx_n_s_area;\nstatic PyObject *__pyx_n_s_array;\nstatic PyObject *__pyx_n_s_astype;\nstatic PyObject *__pyx_n_s_author;\nstatic PyObject *__pyx_n_s_bb;\nstatic PyObject *__pyx_n_s_bbIou;\nstatic PyObject *__pyx_n_s_bb_2;\nstatic PyObject *__pyx_n_s_c_string;\nstatic PyObject *__pyx_n_s_cline_in_traceback;\nstatic PyObject *__pyx_n_s_cnts;\nstatic PyObject *__pyx_n_s_counts;\nstatic PyObject *__pyx_n_s_data;\nstatic PyObject *__pyx_n_s_decode;\nstatic PyObject *__pyx_n_s_double;\nstatic PyObject *__pyx_n_s_dt;\nstatic PyObject *__pyx_n_s_dtype;\nstatic PyObject *__pyx_n_s_encode;\nstatic PyObject *__pyx_n_s_enumerate;\nstatic PyObject *__pyx_n_s_frBbox;\nstatic PyObject *__pyx_n_s_frPoly;\nstatic PyObject *__pyx_n_s_frPyObjects;\nstatic PyObject *__pyx_n_s_frString;\nstatic PyObject *__pyx_n_s_frUncompressedRLE;\nstatic PyObject *__pyx_n_s_getstate;\nstatic PyObject *__pyx_n_s_gt;\nstatic PyObject *__pyx_n_s_h;\nstatic PyObject *__pyx_n_s_i;\nstatic PyObject *__pyx_n_s_import;\nstatic PyObject *__pyx_kp_s_input_data_type_not_allowed;\nstatic PyObject *__pyx_kp_s_input_type_is_not_supported;\nstatic PyObject *__pyx_n_s_intersect;\nstatic PyObject *__pyx_n_s_iou;\nstatic PyObject *__pyx_n_s_iouFun;\nstatic PyObject *__pyx_n_s_iou_2;\nstatic PyObject *__pyx_n_s_iou_locals__bbIou;\nstatic PyObject *__pyx_n_s_iou_locals__len;\nstatic PyObject *__pyx_n_s_iou_locals__preproc;\nstatic PyObject *__pyx_n_s_iou_locals__rleIou;\nstatic PyObject *__pyx_n_s_isbox;\nstatic PyObject *__pyx_n_s_iscrowd;\nstatic PyObject *__pyx_n_s_isrle;\nstatic PyObject *__pyx_n_s_j;\nstatic PyObject *__pyx_n_s_len;\nstatic PyObject *__pyx_kp_s_list_input_can_be_bounding_box_N;\nstatic PyObject *__pyx_n_s_m;\nstatic PyObject *__pyx_n_s_main;\nstatic PyObject *__pyx_n_s_mask;\nstatic PyObject *__pyx_n_s_masks;\nstatic PyObject *__pyx_n_s_merge;\nstatic PyObject *__pyx_n_s_n;\nstatic PyObject *__pyx_n_s_name;\nstatic PyObject *__pyx_kp_u_ndarray_is_not_C_contiguous;\nstatic PyObject *__pyx_kp_u_ndarray_is_not_Fortran_contiguou;\nstatic PyObject *__pyx_kp_s_no_default___reduce___due_to_non;\nstatic PyObject *__pyx_n_s_np;\nstatic PyObject *__pyx_n_s_np_poly;\nstatic PyObject *__pyx_n_s_numpy;\nstatic PyObject *__pyx_kp_s_numpy_core_multiarray_failed_to;\nstatic PyObject *__pyx_kp_s_numpy_core_umath_failed_to_impor;\nstatic PyObject *__pyx_kp_s_numpy_ndarray_input_is_only_for;\nstatic PyObject *__pyx_n_s_obj;\nstatic PyObject *__pyx_n_s_objs;\nstatic PyObject *__pyx_n_s_order;\nstatic PyObject *__pyx_n_s_p;\nstatic PyObject *__pyx_n_s_poly;\nstatic PyObject *__pyx_n_s_preproc;\nstatic PyObject *__pyx_n_s_py_string;\nstatic PyObject *__pyx_n_s_pysobatools__mask;\nstatic PyObject *__pyx_kp_s_pysobatools__mask_pyx;\nstatic PyObject *__pyx_n_s_pyiscrowd;\nstatic PyObject *__pyx_n_s_pyobj;\nstatic PyObject *__pyx_n_s_range;\nstatic PyObject *__pyx_n_s_reduce;\nstatic PyObject *__pyx_n_s_reduce_cython;\nstatic PyObject *__pyx_n_s_reduce_ex;\nstatic PyObject *__pyx_n_s_reshape;\nstatic PyObject *__pyx_n_s_rleIou;\nstatic PyObject *__pyx_n_s_rleObjs;\nstatic PyObject *__pyx_n_s_setstate;\nstatic PyObject *__pyx_n_s_setstate_cython;\nstatic PyObject *__pyx_n_s_shape;\nstatic PyObject *__pyx_n_s_size;\nstatic PyObject *__pyx_n_s_sys;\nstatic PyObject *__pyx_n_s_test;\nstatic PyObject *__pyx_n_s_toBbox;\nstatic PyObject *__pyx_n_s_toString;\nstatic PyObject *__pyx_n_s_tsungyi;\nstatic PyObject *__pyx_n_s_ucRles;\nstatic PyObject *__pyx_n_s_uint32;\nstatic PyObject *__pyx_n_s_uint8;\nstatic PyObject *__pyx_kp_u_unknown_dtype_code_in_numpy_pxd;\nstatic PyObject *__pyx_kp_s_unrecognized_type_The_following;\nstatic PyObject *__pyx_n_s_utf8;\nstatic PyObject *__pyx_n_s_version_info;\nstatic PyObject *__pyx_n_s_w;\nstatic PyObject *__pyx_n_s_zeros;\nstatic int __pyx_pf_11pysobatools_5_mask_4RLEs___cinit__(struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self, siz __pyx_v_n); /* proto */\nstatic void __pyx_pf_11pysobatools_5_mask_4RLEs_2__dealloc__(struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4RLEs_4__getattr__(struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self, PyObject *__pyx_v_key); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4RLEs_6__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4RLEs_8__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */\nstatic int __pyx_pf_11pysobatools_5_mask_5Masks___cinit__(struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self, PyObject *__pyx_v_h, PyObject *__pyx_v_w, PyObject *__pyx_v_n); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_5Masks_2__array__(struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_5Masks_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_5Masks_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask__toString(CYTHON_UNUSED PyObject *__pyx_self, struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_2_frString(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4encode(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_mask); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_6decode(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_8merge(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs, PyObject *__pyx_v_intersect); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_10area(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou__preproc(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_objs); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou_2_rleIou(CYTHON_UNUSED PyObject *__pyx_self, struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_dt, struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_gt, PyArrayObject *__pyx_v_iscrowd, siz __pyx_v_m, siz __pyx_v_n, PyArrayObject *__pyx_v__iou); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou_4_bbIou(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_dt, PyArrayObject *__pyx_v_gt, PyArrayObject *__pyx_v_iscrowd, siz __pyx_v_m, siz __pyx_v_n, PyArrayObject *__pyx_v__iou); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou_6_len(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_12iou(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_dt, PyObject *__pyx_v_gt, PyObject *__pyx_v_pyiscrowd); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_14toBbox(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_16frBbox(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_bb, siz __pyx_v_h, siz __pyx_v_w); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_18frPoly(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_poly, siz __pyx_v_h, siz __pyx_v_w); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_20frUncompressedRLE(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_ucRles, CYTHON_UNUSED siz __pyx_v_h, CYTHON_UNUSED siz __pyx_v_w); /* proto */\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_22frPyObjects(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_pyobj, PyObject *__pyx_v_h, PyObject *__pyx_v_w); /* proto */\nstatic int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /* proto */\nstatic void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info); /* proto */\nstatic PyObject *__pyx_tp_new_11pysobatools_5_mask_RLEs(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/\nstatic PyObject *__pyx_tp_new_11pysobatools_5_mask_Masks(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/\nstatic PyObject *__pyx_int_0;\nstatic PyObject *__pyx_int_1;\nstatic PyObject *__pyx_int_2;\nstatic PyObject *__pyx_int_3;\nstatic PyObject *__pyx_int_4;\nstatic PyObject *__pyx_tuple_;\nstatic PyObject *__pyx_tuple__2;\nstatic PyObject *__pyx_tuple__3;\nstatic PyObject *__pyx_tuple__4;\nstatic PyObject *__pyx_tuple__5;\nstatic PyObject *__pyx_tuple__6;\nstatic PyObject *__pyx_tuple__7;\nstatic PyObject *__pyx_tuple__8;\nstatic PyObject *__pyx_tuple__9;\nstatic PyObject *__pyx_tuple__10;\nstatic PyObject *__pyx_tuple__12;\nstatic PyObject *__pyx_tuple__14;\nstatic PyObject *__pyx_tuple__16;\nstatic PyObject *__pyx_tuple__18;\nstatic PyObject *__pyx_tuple__19;\nstatic PyObject *__pyx_tuple__20;\nstatic PyObject *__pyx_tuple__21;\nstatic PyObject *__pyx_tuple__22;\nstatic PyObject *__pyx_tuple__23;\nstatic PyObject *__pyx_tuple__24;\nstatic PyObject *__pyx_tuple__25;\nstatic PyObject *__pyx_tuple__26;\nstatic PyObject *__pyx_tuple__27;\nstatic PyObject *__pyx_tuple__28;\nstatic PyObject *__pyx_tuple__30;\nstatic PyObject *__pyx_tuple__32;\nstatic PyObject *__pyx_tuple__34;\nstatic PyObject *__pyx_tuple__36;\nstatic PyObject *__pyx_tuple__38;\nstatic PyObject *__pyx_tuple__40;\nstatic PyObject *__pyx_tuple__42;\nstatic PyObject *__pyx_tuple__44;\nstatic PyObject *__pyx_tuple__46;\nstatic PyObject *__pyx_tuple__48;\nstatic PyObject *__pyx_tuple__50;\nstatic PyObject *__pyx_codeobj__11;\nstatic PyObject *__pyx_codeobj__13;\nstatic PyObject *__pyx_codeobj__15;\nstatic PyObject *__pyx_codeobj__17;\nstatic PyObject *__pyx_codeobj__29;\nstatic PyObject *__pyx_codeobj__31;\nstatic PyObject *__pyx_codeobj__33;\nstatic PyObject *__pyx_codeobj__35;\nstatic PyObject *__pyx_codeobj__37;\nstatic PyObject *__pyx_codeobj__39;\nstatic PyObject *__pyx_codeobj__41;\nstatic PyObject *__pyx_codeobj__43;\nstatic PyObject *__pyx_codeobj__45;\nstatic PyObject *__pyx_codeobj__47;\nstatic PyObject *__pyx_codeobj__49;\nstatic PyObject *__pyx_codeobj__51;\n/* Late includes */\n\n/* \"pysobatools/_mask.pyx\":60\n *     cdef siz _n\n * \n *     def __cinit__(self, siz n =0):             # <<<<<<<<<<<<<<\n *         rlesInit(&self._R, n)\n *         self._n = n\n */\n\n/* Python wrapper */\nstatic int __pyx_pw_11pysobatools_5_mask_4RLEs_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic int __pyx_pw_11pysobatools_5_mask_4RLEs_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  siz __pyx_v_n;\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__cinit__ (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_n,0};\n    PyObject* values[1] = {0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (kw_args > 0) {\n          PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_n);\n          if (value) { values[0] = value; kw_args--; }\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"__cinit__\") < 0)) __PYX_ERR(0, 60, __pyx_L3_error)\n      }\n    } else {\n      switch (PyTuple_GET_SIZE(__pyx_args)) {\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n    }\n    if (values[0]) {\n      __pyx_v_n = __Pyx_PyInt_As_siz(values[0]); if (unlikely((__pyx_v_n == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 60, __pyx_L3_error)\n    } else {\n      __pyx_v_n = ((siz)0);\n    }\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"__cinit__\", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 60, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.RLEs.__cinit__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return -1;\n  __pyx_L4_argument_unpacking_done:;\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_4RLEs___cinit__(((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_v_self), __pyx_v_n);\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic int __pyx_pf_11pysobatools_5_mask_4RLEs___cinit__(struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self, siz __pyx_v_n) {\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__cinit__\", 0);\n\n  /* \"pysobatools/_mask.pyx\":61\n * \n *     def __cinit__(self, siz n =0):\n *         rlesInit(&self._R, n)             # <<<<<<<<<<<<<<\n *         self._n = n\n * \n */\n  rlesInit((&__pyx_v_self->_R), __pyx_v_n);\n\n  /* \"pysobatools/_mask.pyx\":62\n *     def __cinit__(self, siz n =0):\n *         rlesInit(&self._R, n)\n *         self._n = n             # <<<<<<<<<<<<<<\n * \n *     # free the RLE array here\n */\n  __pyx_v_self->_n = __pyx_v_n;\n\n  /* \"pysobatools/_mask.pyx\":60\n *     cdef siz _n\n * \n *     def __cinit__(self, siz n =0):             # <<<<<<<<<<<<<<\n *         rlesInit(&self._R, n)\n *         self._n = n\n */\n\n  /* function exit code */\n  __pyx_r = 0;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":65\n * \n *     # free the RLE array here\n *     def __dealloc__(self):             # <<<<<<<<<<<<<<\n *         if self._R is not NULL:\n *             for i in range(self._n):\n */\n\n/* Python wrapper */\nstatic void __pyx_pw_11pysobatools_5_mask_4RLEs_3__dealloc__(PyObject *__pyx_v_self); /*proto*/\nstatic void __pyx_pw_11pysobatools_5_mask_4RLEs_3__dealloc__(PyObject *__pyx_v_self) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__dealloc__ (wrapper)\", 0);\n  __pyx_pf_11pysobatools_5_mask_4RLEs_2__dealloc__(((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_v_self));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n}\n\nstatic void __pyx_pf_11pysobatools_5_mask_4RLEs_2__dealloc__(struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self) {\n  siz __pyx_v_i;\n  __Pyx_RefNannyDeclarations\n  int __pyx_t_1;\n  siz __pyx_t_2;\n  siz __pyx_t_3;\n  siz __pyx_t_4;\n  __Pyx_RefNannySetupContext(\"__dealloc__\", 0);\n\n  /* \"pysobatools/_mask.pyx\":66\n *     # free the RLE array here\n *     def __dealloc__(self):\n *         if self._R is not NULL:             # <<<<<<<<<<<<<<\n *             for i in range(self._n):\n *                 free(self._R[i].cnts)\n */\n  __pyx_t_1 = ((__pyx_v_self->_R != NULL) != 0);\n  if (__pyx_t_1) {\n\n    /* \"pysobatools/_mask.pyx\":67\n *     def __dealloc__(self):\n *         if self._R is not NULL:\n *             for i in range(self._n):             # <<<<<<<<<<<<<<\n *                 free(self._R[i].cnts)\n *             free(self._R)\n */\n    __pyx_t_2 = __pyx_v_self->_n;\n    __pyx_t_3 = __pyx_t_2;\n    for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {\n      __pyx_v_i = __pyx_t_4;\n\n      /* \"pysobatools/_mask.pyx\":68\n *         if self._R is not NULL:\n *             for i in range(self._n):\n *                 free(self._R[i].cnts)             # <<<<<<<<<<<<<<\n *             free(self._R)\n *     def __getattr__(self, key):\n */\n      free((__pyx_v_self->_R[__pyx_v_i]).cnts);\n    }\n\n    /* \"pysobatools/_mask.pyx\":69\n *             for i in range(self._n):\n *                 free(self._R[i].cnts)\n *             free(self._R)             # <<<<<<<<<<<<<<\n *     def __getattr__(self, key):\n *         if key == 'n':\n */\n    free(__pyx_v_self->_R);\n\n    /* \"pysobatools/_mask.pyx\":66\n *     # free the RLE array here\n *     def __dealloc__(self):\n *         if self._R is not NULL:             # <<<<<<<<<<<<<<\n *             for i in range(self._n):\n *                 free(self._R[i].cnts)\n */\n  }\n\n  /* \"pysobatools/_mask.pyx\":65\n * \n *     # free the RLE array here\n *     def __dealloc__(self):             # <<<<<<<<<<<<<<\n *         if self._R is not NULL:\n *             for i in range(self._n):\n */\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n}\n\n/* \"pysobatools/_mask.pyx\":70\n *                 free(self._R[i].cnts)\n *             free(self._R)\n *     def __getattr__(self, key):             # <<<<<<<<<<<<<<\n *         if key == 'n':\n *             return self._n\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_4RLEs_5__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key); /*proto*/\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_4RLEs_5__getattr__(PyObject *__pyx_v_self, PyObject *__pyx_v_key) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__getattr__ (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_4RLEs_4__getattr__(((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_v_self), ((PyObject *)__pyx_v_key));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4RLEs_4__getattr__(struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self, PyObject *__pyx_v_key) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  int __pyx_t_1;\n  PyObject *__pyx_t_2 = NULL;\n  __Pyx_RefNannySetupContext(\"__getattr__\", 0);\n\n  /* \"pysobatools/_mask.pyx\":71\n *             free(self._R)\n *     def __getattr__(self, key):\n *         if key == 'n':             # <<<<<<<<<<<<<<\n *             return self._n\n *         raise AttributeError(key)\n */\n  __pyx_t_1 = (__Pyx_PyString_Equals(__pyx_v_key, __pyx_n_s_n, Py_EQ)); if (unlikely(__pyx_t_1 < 0)) __PYX_ERR(0, 71, __pyx_L1_error)\n  if (__pyx_t_1) {\n\n    /* \"pysobatools/_mask.pyx\":72\n *     def __getattr__(self, key):\n *         if key == 'n':\n *             return self._n             # <<<<<<<<<<<<<<\n *         raise AttributeError(key)\n * \n */\n    __Pyx_XDECREF(__pyx_r);\n    __pyx_t_2 = __Pyx_PyInt_From_siz(__pyx_v_self->_n); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 72, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_2);\n    __pyx_r = __pyx_t_2;\n    __pyx_t_2 = 0;\n    goto __pyx_L0;\n\n    /* \"pysobatools/_mask.pyx\":71\n *             free(self._R)\n *     def __getattr__(self, key):\n *         if key == 'n':             # <<<<<<<<<<<<<<\n *             return self._n\n *         raise AttributeError(key)\n */\n  }\n\n  /* \"pysobatools/_mask.pyx\":73\n *         if key == 'n':\n *             return self._n\n *         raise AttributeError(key)             # <<<<<<<<<<<<<<\n * \n * # python class to wrap Mask array in C\n */\n  __pyx_t_2 = __Pyx_PyObject_CallOneArg(__pyx_builtin_AttributeError, __pyx_v_key); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 73, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_Raise(__pyx_t_2, 0, 0, 0);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __PYX_ERR(0, 73, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":70\n *                 free(self._R[i].cnts)\n *             free(self._R)\n *     def __getattr__(self, key):             # <<<<<<<<<<<<<<\n *         if key == 'n':\n *             return self._n\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_AddTraceback(\"pysobatools._mask.RLEs.__getattr__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"(tree fragment)\":1\n * def __reduce_cython__(self):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_4RLEs_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_4RLEs_7__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__reduce_cython__ (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_4RLEs_6__reduce_cython__(((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_v_self));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4RLEs_6__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"__reduce_cython__\", 0);\n\n  /* \"(tree fragment)\":2\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n  __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple_, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_Raise(__pyx_t_1, 0, 0, 0);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __PYX_ERR(1, 2, __pyx_L1_error)\n\n  /* \"(tree fragment)\":1\n * def __reduce_cython__(self):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"pysobatools._mask.RLEs.__reduce_cython__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"(tree fragment)\":3\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_4RLEs_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_4RLEs_9__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__setstate_cython__ (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_4RLEs_8__setstate_cython__(((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4RLEs_8__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"__setstate_cython__\", 0);\n\n  /* \"(tree fragment)\":4\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n */\n  __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__2, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_Raise(__pyx_t_1, 0, 0, 0);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __PYX_ERR(1, 4, __pyx_L1_error)\n\n  /* \"(tree fragment)\":3\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"pysobatools._mask.RLEs.__setstate_cython__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":83\n *     cdef siz _n\n * \n *     def __cinit__(self, h, w, n):             # <<<<<<<<<<<<<<\n *         self._mask = <byte*> malloc(h*w*n* sizeof(byte))\n *         self._h = h\n */\n\n/* Python wrapper */\nstatic int __pyx_pw_11pysobatools_5_mask_5Masks_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic int __pyx_pw_11pysobatools_5_mask_5Masks_1__cinit__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyObject *__pyx_v_h = 0;\n  PyObject *__pyx_v_w = 0;\n  PyObject *__pyx_v_n = 0;\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__cinit__ (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_h,&__pyx_n_s_w,&__pyx_n_s_n,0};\n    PyObject* values[3] = {0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_h)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_w)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"__cinit__\", 1, 3, 3, 1); __PYX_ERR(0, 83, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_n)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"__cinit__\", 1, 3, 3, 2); __PYX_ERR(0, 83, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"__cinit__\") < 0)) __PYX_ERR(0, 83, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n    }\n    __pyx_v_h = values[0];\n    __pyx_v_w = values[1];\n    __pyx_v_n = values[2];\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"__cinit__\", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 83, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.Masks.__cinit__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return -1;\n  __pyx_L4_argument_unpacking_done:;\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_5Masks___cinit__(((struct __pyx_obj_11pysobatools_5_mask_Masks *)__pyx_v_self), __pyx_v_h, __pyx_v_w, __pyx_v_n);\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic int __pyx_pf_11pysobatools_5_mask_5Masks___cinit__(struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self, PyObject *__pyx_v_h, PyObject *__pyx_v_w, PyObject *__pyx_v_n) {\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  size_t __pyx_t_4;\n  siz __pyx_t_5;\n  __Pyx_RefNannySetupContext(\"__cinit__\", 0);\n\n  /* \"pysobatools/_mask.pyx\":84\n * \n *     def __cinit__(self, h, w, n):\n *         self._mask = <byte*> malloc(h*w*n* sizeof(byte))             # <<<<<<<<<<<<<<\n *         self._h = h\n *         self._w = w\n */\n  __pyx_t_1 = PyNumber_Multiply(__pyx_v_h, __pyx_v_w); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 84, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = PyNumber_Multiply(__pyx_t_1, __pyx_v_n); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 84, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_1 = __Pyx_PyInt_FromSize_t((sizeof(byte))); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 84, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_3 = PyNumber_Multiply(__pyx_t_2, __pyx_t_1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 84, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_4 = __Pyx_PyInt_As_size_t(__pyx_t_3); if (unlikely((__pyx_t_4 == (size_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 84, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __pyx_v_self->_mask = ((byte *)malloc(__pyx_t_4));\n\n  /* \"pysobatools/_mask.pyx\":85\n *     def __cinit__(self, h, w, n):\n *         self._mask = <byte*> malloc(h*w*n* sizeof(byte))\n *         self._h = h             # <<<<<<<<<<<<<<\n *         self._w = w\n *         self._n = n\n */\n  __pyx_t_5 = __Pyx_PyInt_As_siz(__pyx_v_h); if (unlikely((__pyx_t_5 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 85, __pyx_L1_error)\n  __pyx_v_self->_h = __pyx_t_5;\n\n  /* \"pysobatools/_mask.pyx\":86\n *         self._mask = <byte*> malloc(h*w*n* sizeof(byte))\n *         self._h = h\n *         self._w = w             # <<<<<<<<<<<<<<\n *         self._n = n\n *     # def __dealloc__(self):\n */\n  __pyx_t_5 = __Pyx_PyInt_As_siz(__pyx_v_w); if (unlikely((__pyx_t_5 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 86, __pyx_L1_error)\n  __pyx_v_self->_w = __pyx_t_5;\n\n  /* \"pysobatools/_mask.pyx\":87\n *         self._h = h\n *         self._w = w\n *         self._n = n             # <<<<<<<<<<<<<<\n *     # def __dealloc__(self):\n *         # the memory management of _mask has been passed to np.ndarray\n */\n  __pyx_t_5 = __Pyx_PyInt_As_siz(__pyx_v_n); if (unlikely((__pyx_t_5 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 87, __pyx_L1_error)\n  __pyx_v_self->_n = __pyx_t_5;\n\n  /* \"pysobatools/_mask.pyx\":83\n *     cdef siz _n\n * \n *     def __cinit__(self, h, w, n):             # <<<<<<<<<<<<<<\n *         self._mask = <byte*> malloc(h*w*n* sizeof(byte))\n *         self._h = h\n */\n\n  /* function exit code */\n  __pyx_r = 0;\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_AddTraceback(\"pysobatools._mask.Masks.__cinit__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = -1;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":93\n * \n *     # called when passing into np.array() and return an np.ndarray in column-major order\n *     def __array__(self):             # <<<<<<<<<<<<<<\n *         cdef np.npy_intp shape[1]\n *         shape[0] = <np.npy_intp> self._h*self._w*self._n\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5Masks_3__array__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5Masks_3__array__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__array__ (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_5Masks_2__array__(((struct __pyx_obj_11pysobatools_5_mask_Masks *)__pyx_v_self));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_5Masks_2__array__(struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self) {\n  npy_intp __pyx_v_shape[1];\n  PyObject *__pyx_v_ndarray = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *__pyx_t_4 = NULL;\n  PyObject *__pyx_t_5 = NULL;\n  __Pyx_RefNannySetupContext(\"__array__\", 0);\n\n  /* \"pysobatools/_mask.pyx\":95\n *     def __array__(self):\n *         cdef np.npy_intp shape[1]\n *         shape[0] = <np.npy_intp> self._h*self._w*self._n             # <<<<<<<<<<<<<<\n *         # Create a 1D array, and reshape it to fortran/Matlab column-major array\n *         ndarray = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT8, self._mask).reshape((self._h, self._w, self._n), order='F')\n */\n  (__pyx_v_shape[0]) = ((((npy_intp)__pyx_v_self->_h) * __pyx_v_self->_w) * __pyx_v_self->_n);\n\n  /* \"pysobatools/_mask.pyx\":97\n *         shape[0] = <np.npy_intp> self._h*self._w*self._n\n *         # Create a 1D array, and reshape it to fortran/Matlab column-major array\n *         ndarray = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT8, self._mask).reshape((self._h, self._w, self._n), order='F')             # <<<<<<<<<<<<<<\n *         # The _mask allocated by Masks is now handled by ndarray\n *         PyArray_ENABLEFLAGS(ndarray, np.NPY_OWNDATA)\n */\n  __pyx_t_1 = PyArray_SimpleNewFromData(1, __pyx_v_shape, NPY_UINT8, __pyx_v_self->_mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_reshape); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_1 = __Pyx_PyInt_From_siz(__pyx_v_self->_h); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_3 = __Pyx_PyInt_From_siz(__pyx_v_self->_w); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __pyx_t_4 = __Pyx_PyInt_From_siz(__pyx_v_self->_n); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_GIVEREF(__pyx_t_1);\n  PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1);\n  __Pyx_GIVEREF(__pyx_t_3);\n  PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_3);\n  __Pyx_GIVEREF(__pyx_t_4);\n  PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4);\n  __pyx_t_1 = 0;\n  __pyx_t_3 = 0;\n  __pyx_t_4 = 0;\n  __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __Pyx_GIVEREF(__pyx_t_5);\n  PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_5);\n  __pyx_t_5 = 0;\n  __pyx_t_5 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_order, __pyx_n_s_F) < 0) __PYX_ERR(0, 97, __pyx_L1_error)\n  __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_4, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 97, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_v_ndarray = __pyx_t_3;\n  __pyx_t_3 = 0;\n\n  /* \"pysobatools/_mask.pyx\":99\n *         ndarray = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT8, self._mask).reshape((self._h, self._w, self._n), order='F')\n *         # The _mask allocated by Masks is now handled by ndarray\n *         PyArray_ENABLEFLAGS(ndarray, np.NPY_OWNDATA)             # <<<<<<<<<<<<<<\n *         return ndarray\n * \n */\n  if (!(likely(((__pyx_v_ndarray) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_ndarray, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 99, __pyx_L1_error)\n  PyArray_ENABLEFLAGS(((PyArrayObject *)__pyx_v_ndarray), NPY_OWNDATA);\n\n  /* \"pysobatools/_mask.pyx\":100\n *         # The _mask allocated by Masks is now handled by ndarray\n *         PyArray_ENABLEFLAGS(ndarray, np.NPY_OWNDATA)\n *         return ndarray             # <<<<<<<<<<<<<<\n * \n * # internal conversion from Python RLEs object to compressed RLE format\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_ndarray);\n  __pyx_r = __pyx_v_ndarray;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":93\n * \n *     # called when passing into np.array() and return an np.ndarray in column-major order\n *     def __array__(self):             # <<<<<<<<<<<<<<\n *         cdef np.npy_intp shape[1]\n *         shape[0] = <np.npy_intp> self._h*self._w*self._n\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_4);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_AddTraceback(\"pysobatools._mask.Masks.__array__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF(__pyx_v_ndarray);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"(tree fragment)\":1\n * def __reduce_cython__(self):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5Masks_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5Masks_5__reduce_cython__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__reduce_cython__ (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_5Masks_4__reduce_cython__(((struct __pyx_obj_11pysobatools_5_mask_Masks *)__pyx_v_self));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_5Masks_4__reduce_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"__reduce_cython__\", 0);\n\n  /* \"(tree fragment)\":2\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n  __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__3, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 2, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_Raise(__pyx_t_1, 0, 0, 0);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __PYX_ERR(1, 2, __pyx_L1_error)\n\n  /* \"(tree fragment)\":1\n * def __reduce_cython__(self):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"pysobatools._mask.Masks.__reduce_cython__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"(tree fragment)\":3\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5Masks_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state); /*proto*/\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5Masks_7__setstate_cython__(PyObject *__pyx_v_self, PyObject *__pyx_v___pyx_state) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__setstate_cython__ (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_5Masks_6__setstate_cython__(((struct __pyx_obj_11pysobatools_5_mask_Masks *)__pyx_v_self), ((PyObject *)__pyx_v___pyx_state));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_5Masks_6__setstate_cython__(CYTHON_UNUSED struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_self, CYTHON_UNUSED PyObject *__pyx_v___pyx_state) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"__setstate_cython__\", 0);\n\n  /* \"(tree fragment)\":4\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n */\n  __pyx_t_1 = __Pyx_PyObject_Call(__pyx_builtin_TypeError, __pyx_tuple__4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(1, 4, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_Raise(__pyx_t_1, 0, 0, 0);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __PYX_ERR(1, 4, __pyx_L1_error)\n\n  /* \"(tree fragment)\":3\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):             # <<<<<<<<<<<<<<\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"pysobatools._mask.Masks.__setstate_cython__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":103\n * \n * # internal conversion from Python RLEs object to compressed RLE format\n * def _toString(RLEs Rs):             # <<<<<<<<<<<<<<\n *     cdef siz n = Rs.n\n *     cdef bytes py_string\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_1_toString(PyObject *__pyx_self, PyObject *__pyx_v_Rs); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_1_toString = {\"_toString\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_1_toString, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_1_toString(PyObject *__pyx_self, PyObject *__pyx_v_Rs) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_toString (wrapper)\", 0);\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_Rs), __pyx_ptype_11pysobatools_5_mask_RLEs, 1, \"Rs\", 0))) __PYX_ERR(0, 103, __pyx_L1_error)\n  __pyx_r = __pyx_pf_11pysobatools_5_mask__toString(__pyx_self, ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_v_Rs));\n\n  /* function exit code */\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask__toString(CYTHON_UNUSED PyObject *__pyx_self, struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs) {\n  siz __pyx_v_n;\n  PyObject *__pyx_v_py_string = 0;\n  char *__pyx_v_c_string;\n  PyObject *__pyx_v_objs = NULL;\n  siz __pyx_v_i;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  siz __pyx_t_2;\n  siz __pyx_t_3;\n  siz __pyx_t_4;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  PyObject *__pyx_t_7 = NULL;\n  int __pyx_t_8;\n  __Pyx_RefNannySetupContext(\"_toString\", 0);\n\n  /* \"pysobatools/_mask.pyx\":104\n * # internal conversion from Python RLEs object to compressed RLE format\n * def _toString(RLEs Rs):\n *     cdef siz n = Rs.n             # <<<<<<<<<<<<<<\n *     cdef bytes py_string\n *     cdef char* c_string\n */\n  __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_Rs), __pyx_n_s_n); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 104, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyInt_As_siz(__pyx_t_1); if (unlikely((__pyx_t_2 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 104, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_v_n = __pyx_t_2;\n\n  /* \"pysobatools/_mask.pyx\":107\n *     cdef bytes py_string\n *     cdef char* c_string\n *     objs = []             # <<<<<<<<<<<<<<\n *     for i in range(n):\n *         c_string = rleToString( <RLE*> &Rs._R[i] )\n */\n  __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 107, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_v_objs = ((PyObject*)__pyx_t_1);\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":108\n *     cdef char* c_string\n *     objs = []\n *     for i in range(n):             # <<<<<<<<<<<<<<\n *         c_string = rleToString( <RLE*> &Rs._R[i] )\n *         py_string = c_string\n */\n  __pyx_t_2 = __pyx_v_n;\n  __pyx_t_3 = __pyx_t_2;\n  for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {\n    __pyx_v_i = __pyx_t_4;\n\n    /* \"pysobatools/_mask.pyx\":109\n *     objs = []\n *     for i in range(n):\n *         c_string = rleToString( <RLE*> &Rs._R[i] )             # <<<<<<<<<<<<<<\n *         py_string = c_string\n *         objs.append({\n */\n    __pyx_v_c_string = rleToString(((RLE *)(&(__pyx_v_Rs->_R[__pyx_v_i]))));\n\n    /* \"pysobatools/_mask.pyx\":110\n *     for i in range(n):\n *         c_string = rleToString( <RLE*> &Rs._R[i] )\n *         py_string = c_string             # <<<<<<<<<<<<<<\n *         objs.append({\n *             'size': [Rs._R[i].h, Rs._R[i].w],\n */\n    __pyx_t_1 = __Pyx_PyBytes_FromString(__pyx_v_c_string); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 110, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __Pyx_XDECREF_SET(__pyx_v_py_string, ((PyObject*)__pyx_t_1));\n    __pyx_t_1 = 0;\n\n    /* \"pysobatools/_mask.pyx\":112\n *         py_string = c_string\n *         objs.append({\n *             'size': [Rs._R[i].h, Rs._R[i].w],             # <<<<<<<<<<<<<<\n *             'counts': py_string\n *         })\n */\n    __pyx_t_1 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 112, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __pyx_t_5 = __Pyx_PyInt_From_siz((__pyx_v_Rs->_R[__pyx_v_i]).h); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 112, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __pyx_t_6 = __Pyx_PyInt_From_siz((__pyx_v_Rs->_R[__pyx_v_i]).w); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 112, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __pyx_t_7 = PyList_New(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 112, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_7);\n    __Pyx_GIVEREF(__pyx_t_5);\n    PyList_SET_ITEM(__pyx_t_7, 0, __pyx_t_5);\n    __Pyx_GIVEREF(__pyx_t_6);\n    PyList_SET_ITEM(__pyx_t_7, 1, __pyx_t_6);\n    __pyx_t_5 = 0;\n    __pyx_t_6 = 0;\n    if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_size, __pyx_t_7) < 0) __PYX_ERR(0, 112, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;\n\n    /* \"pysobatools/_mask.pyx\":113\n *         objs.append({\n *             'size': [Rs._R[i].h, Rs._R[i].w],\n *             'counts': py_string             # <<<<<<<<<<<<<<\n *         })\n *         free(c_string)\n */\n    if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_counts, __pyx_v_py_string) < 0) __PYX_ERR(0, 112, __pyx_L1_error)\n\n    /* \"pysobatools/_mask.pyx\":111\n *         c_string = rleToString( <RLE*> &Rs._R[i] )\n *         py_string = c_string\n *         objs.append({             # <<<<<<<<<<<<<<\n *             'size': [Rs._R[i].h, Rs._R[i].w],\n *             'counts': py_string\n */\n    __pyx_t_8 = __Pyx_PyList_Append(__pyx_v_objs, __pyx_t_1); if (unlikely(__pyx_t_8 == ((int)-1))) __PYX_ERR(0, 111, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n    /* \"pysobatools/_mask.pyx\":115\n *             'counts': py_string\n *         })\n *         free(c_string)             # <<<<<<<<<<<<<<\n *     return objs\n * \n */\n    free(__pyx_v_c_string);\n  }\n\n  /* \"pysobatools/_mask.pyx\":116\n *         })\n *         free(c_string)\n *     return objs             # <<<<<<<<<<<<<<\n * \n * # internal conversion from compressed RLE format to Python RLEs object\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_objs);\n  __pyx_r = __pyx_v_objs;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":103\n * \n * # internal conversion from Python RLEs object to compressed RLE format\n * def _toString(RLEs Rs):             # <<<<<<<<<<<<<<\n *     cdef siz n = Rs.n\n *     cdef bytes py_string\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_7);\n  __Pyx_AddTraceback(\"pysobatools._mask._toString\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF(__pyx_v_py_string);\n  __Pyx_XDECREF(__pyx_v_objs);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":119\n * \n * # internal conversion from compressed RLE format to Python RLEs object\n * def _frString(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef siz n = len(rleObjs)\n *     Rs = RLEs(n)\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3_frString(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_3_frString = {\"_frString\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_3_frString, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3_frString(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_frString (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_2_frString(__pyx_self, ((PyObject *)__pyx_v_rleObjs));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_2_frString(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  siz __pyx_v_n;\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = NULL;\n  PyObject *__pyx_v_py_string = 0;\n  char *__pyx_v_c_string;\n  PyObject *__pyx_v_i = NULL;\n  PyObject *__pyx_v_obj = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  Py_ssize_t __pyx_t_1;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *(*__pyx_t_4)(PyObject *);\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  int __pyx_t_7;\n  PyObject *__pyx_t_8 = NULL;\n  PyObject *__pyx_t_9 = NULL;\n  PyObject *__pyx_t_10 = NULL;\n  char *__pyx_t_11;\n  Py_ssize_t __pyx_t_12;\n  siz __pyx_t_13;\n  siz __pyx_t_14;\n  __Pyx_RefNannySetupContext(\"_frString\", 0);\n\n  /* \"pysobatools/_mask.pyx\":120\n * # internal conversion from compressed RLE format to Python RLEs object\n * def _frString(rleObjs):\n *     cdef siz n = len(rleObjs)             # <<<<<<<<<<<<<<\n *     Rs = RLEs(n)\n *     cdef bytes py_string\n */\n  __pyx_t_1 = PyObject_Length(__pyx_v_rleObjs); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 120, __pyx_L1_error)\n  __pyx_v_n = __pyx_t_1;\n\n  /* \"pysobatools/_mask.pyx\":121\n * def _frString(rleObjs):\n *     cdef siz n = len(rleObjs)\n *     Rs = RLEs(n)             # <<<<<<<<<<<<<<\n *     cdef bytes py_string\n *     cdef char* c_string\n */\n  __pyx_t_2 = __Pyx_PyInt_From_siz(__pyx_v_n); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 121, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 121, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_3);\n  __pyx_t_3 = 0;\n\n  /* \"pysobatools/_mask.pyx\":124\n *     cdef bytes py_string\n *     cdef char* c_string\n *     for i, obj in enumerate(rleObjs):             # <<<<<<<<<<<<<<\n *         if PYTHON_VERSION == 2:\n *             py_string = str(obj['counts']).encode('utf8')\n */\n  __Pyx_INCREF(__pyx_int_0);\n  __pyx_t_3 = __pyx_int_0;\n  if (likely(PyList_CheckExact(__pyx_v_rleObjs)) || PyTuple_CheckExact(__pyx_v_rleObjs)) {\n    __pyx_t_2 = __pyx_v_rleObjs; __Pyx_INCREF(__pyx_t_2); __pyx_t_1 = 0;\n    __pyx_t_4 = NULL;\n  } else {\n    __pyx_t_1 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_rleObjs); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 124, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_2);\n    __pyx_t_4 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 124, __pyx_L1_error)\n  }\n  for (;;) {\n    if (likely(!__pyx_t_4)) {\n      if (likely(PyList_CheckExact(__pyx_t_2))) {\n        if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_2)) break;\n        #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n        __pyx_t_5 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 124, __pyx_L1_error)\n        #else\n        __pyx_t_5 = PySequence_ITEM(__pyx_t_2, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 124, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_5);\n        #endif\n      } else {\n        if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_2)) break;\n        #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n        __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 124, __pyx_L1_error)\n        #else\n        __pyx_t_5 = PySequence_ITEM(__pyx_t_2, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 124, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_5);\n        #endif\n      }\n    } else {\n      __pyx_t_5 = __pyx_t_4(__pyx_t_2);\n      if (unlikely(!__pyx_t_5)) {\n        PyObject* exc_type = PyErr_Occurred();\n        if (exc_type) {\n          if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();\n          else __PYX_ERR(0, 124, __pyx_L1_error)\n        }\n        break;\n      }\n      __Pyx_GOTREF(__pyx_t_5);\n    }\n    __Pyx_XDECREF_SET(__pyx_v_obj, __pyx_t_5);\n    __pyx_t_5 = 0;\n    __Pyx_INCREF(__pyx_t_3);\n    __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3);\n    __pyx_t_5 = __Pyx_PyInt_AddObjC(__pyx_t_3, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 124, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_3);\n    __pyx_t_3 = __pyx_t_5;\n    __pyx_t_5 = 0;\n\n    /* \"pysobatools/_mask.pyx\":125\n *     cdef char* c_string\n *     for i, obj in enumerate(rleObjs):\n *         if PYTHON_VERSION == 2:             # <<<<<<<<<<<<<<\n *             py_string = str(obj['counts']).encode('utf8')\n *         elif PYTHON_VERSION == 3:\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_PYTHON_VERSION); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 125, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __pyx_t_6 = __Pyx_PyInt_EqObjC(__pyx_t_5, __pyx_int_2, 2, 0); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 125, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 125, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    if (__pyx_t_7) {\n\n      /* \"pysobatools/_mask.pyx\":126\n *     for i, obj in enumerate(rleObjs):\n *         if PYTHON_VERSION == 2:\n *             py_string = str(obj['counts']).encode('utf8')             # <<<<<<<<<<<<<<\n *         elif PYTHON_VERSION == 3:\n *             py_string = str.encode(obj['counts']) if type(obj['counts']) == str else obj['counts']\n */\n      __pyx_t_5 = __Pyx_PyObject_Dict_GetItem(__pyx_v_obj, __pyx_n_s_counts); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 126, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __pyx_t_8 = __Pyx_PyObject_CallOneArg(((PyObject *)(&PyString_Type)), __pyx_t_5); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 126, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_8);\n      __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n      __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_encode); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 126, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n      __pyx_t_8 = NULL;\n      if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {\n        __pyx_t_8 = PyMethod_GET_SELF(__pyx_t_5);\n        if (likely(__pyx_t_8)) {\n          PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);\n          __Pyx_INCREF(__pyx_t_8);\n          __Pyx_INCREF(function);\n          __Pyx_DECREF_SET(__pyx_t_5, function);\n        }\n      }\n      __pyx_t_6 = (__pyx_t_8) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_8, __pyx_n_s_utf8) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_n_s_utf8);\n      __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;\n      if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 126, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_6);\n      __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n      if (!(likely(PyBytes_CheckExact(__pyx_t_6))||((__pyx_t_6) == Py_None)||(PyErr_Format(PyExc_TypeError, \"Expected %.16s, got %.200s\", \"bytes\", Py_TYPE(__pyx_t_6)->tp_name), 0))) __PYX_ERR(0, 126, __pyx_L1_error)\n      __Pyx_XDECREF_SET(__pyx_v_py_string, ((PyObject*)__pyx_t_6));\n      __pyx_t_6 = 0;\n\n      /* \"pysobatools/_mask.pyx\":125\n *     cdef char* c_string\n *     for i, obj in enumerate(rleObjs):\n *         if PYTHON_VERSION == 2:             # <<<<<<<<<<<<<<\n *             py_string = str(obj['counts']).encode('utf8')\n *         elif PYTHON_VERSION == 3:\n */\n      goto __pyx_L5;\n    }\n\n    /* \"pysobatools/_mask.pyx\":127\n *         if PYTHON_VERSION == 2:\n *             py_string = str(obj['counts']).encode('utf8')\n *         elif PYTHON_VERSION == 3:             # <<<<<<<<<<<<<<\n *             py_string = str.encode(obj['counts']) if type(obj['counts']) == str else obj['counts']\n *         else:\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_PYTHON_VERSION); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 127, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __pyx_t_5 = __Pyx_PyInt_EqObjC(__pyx_t_6, __pyx_int_3, 3, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 127, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 127, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    if (likely(__pyx_t_7)) {\n\n      /* \"pysobatools/_mask.pyx\":128\n *             py_string = str(obj['counts']).encode('utf8')\n *         elif PYTHON_VERSION == 3:\n *             py_string = str.encode(obj['counts']) if type(obj['counts']) == str else obj['counts']             # <<<<<<<<<<<<<<\n *         else:\n *             raise Exception('Python version must be 2 or 3')\n */\n      __pyx_t_6 = __Pyx_PyObject_Dict_GetItem(__pyx_v_obj, __pyx_n_s_counts); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 128, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_6);\n      __pyx_t_8 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_t_6)), ((PyObject *)(&PyString_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_8); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 128, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n      __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 128, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n      if (__pyx_t_7) {\n        __pyx_t_6 = __Pyx_PyObject_GetAttrStr(((PyObject *)(&PyString_Type)), __pyx_n_s_encode); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 128, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_6);\n        __pyx_t_9 = __Pyx_PyObject_Dict_GetItem(__pyx_v_obj, __pyx_n_s_counts); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 128, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_9);\n        __pyx_t_10 = NULL;\n        if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_6))) {\n          __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_6);\n          if (likely(__pyx_t_10)) {\n            PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);\n            __Pyx_INCREF(__pyx_t_10);\n            __Pyx_INCREF(function);\n            __Pyx_DECREF_SET(__pyx_t_6, function);\n          }\n        }\n        __pyx_t_8 = (__pyx_t_10) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_10, __pyx_t_9) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_9);\n        __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0;\n        __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n        if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 128, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_8);\n        __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n        if (!(likely(PyBytes_CheckExact(__pyx_t_8))||((__pyx_t_8) == Py_None)||(PyErr_Format(PyExc_TypeError, \"Expected %.16s, got %.200s\", \"bytes\", Py_TYPE(__pyx_t_8)->tp_name), 0))) __PYX_ERR(0, 128, __pyx_L1_error)\n        __pyx_t_5 = __pyx_t_8;\n        __pyx_t_8 = 0;\n      } else {\n        __pyx_t_8 = __Pyx_PyObject_Dict_GetItem(__pyx_v_obj, __pyx_n_s_counts); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 128, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_8);\n        if (!(likely(PyBytes_CheckExact(__pyx_t_8))||((__pyx_t_8) == Py_None)||(PyErr_Format(PyExc_TypeError, \"Expected %.16s, got %.200s\", \"bytes\", Py_TYPE(__pyx_t_8)->tp_name), 0))) __PYX_ERR(0, 128, __pyx_L1_error)\n        __pyx_t_5 = __pyx_t_8;\n        __pyx_t_8 = 0;\n      }\n      __Pyx_XDECREF_SET(__pyx_v_py_string, ((PyObject*)__pyx_t_5));\n      __pyx_t_5 = 0;\n\n      /* \"pysobatools/_mask.pyx\":127\n *         if PYTHON_VERSION == 2:\n *             py_string = str(obj['counts']).encode('utf8')\n *         elif PYTHON_VERSION == 3:             # <<<<<<<<<<<<<<\n *             py_string = str.encode(obj['counts']) if type(obj['counts']) == str else obj['counts']\n *         else:\n */\n      goto __pyx_L5;\n    }\n\n    /* \"pysobatools/_mask.pyx\":130\n *             py_string = str.encode(obj['counts']) if type(obj['counts']) == str else obj['counts']\n *         else:\n *             raise Exception('Python version must be 2 or 3')             # <<<<<<<<<<<<<<\n *         c_string = py_string\n *         rleFrString( <RLE*> &Rs._R[i], <char*> c_string, obj['size'][0], obj['size'][1] )\n */\n    /*else*/ {\n      __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple__5, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 130, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __Pyx_Raise(__pyx_t_5, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n      __PYX_ERR(0, 130, __pyx_L1_error)\n    }\n    __pyx_L5:;\n\n    /* \"pysobatools/_mask.pyx\":131\n *         else:\n *             raise Exception('Python version must be 2 or 3')\n *         c_string = py_string             # <<<<<<<<<<<<<<\n *         rleFrString( <RLE*> &Rs._R[i], <char*> c_string, obj['size'][0], obj['size'][1] )\n *     return Rs\n */\n    if (unlikely(__pyx_v_py_string == Py_None)) {\n      PyErr_SetString(PyExc_TypeError, \"expected bytes, NoneType found\");\n      __PYX_ERR(0, 131, __pyx_L1_error)\n    }\n    __pyx_t_11 = __Pyx_PyBytes_AsWritableString(__pyx_v_py_string); if (unlikely((!__pyx_t_11) && PyErr_Occurred())) __PYX_ERR(0, 131, __pyx_L1_error)\n    __pyx_v_c_string = __pyx_t_11;\n\n    /* \"pysobatools/_mask.pyx\":132\n *             raise Exception('Python version must be 2 or 3')\n *         c_string = py_string\n *         rleFrString( <RLE*> &Rs._R[i], <char*> c_string, obj['size'][0], obj['size'][1] )             # <<<<<<<<<<<<<<\n *     return Rs\n * \n */\n    __pyx_t_12 = __Pyx_PyIndex_AsSsize_t(__pyx_v_i); if (unlikely((__pyx_t_12 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 132, __pyx_L1_error)\n    __pyx_t_5 = __Pyx_PyObject_Dict_GetItem(__pyx_v_obj, __pyx_n_s_size); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 132, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __pyx_t_8 = __Pyx_GetItemInt(__pyx_t_5, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 132, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __pyx_t_13 = __Pyx_PyInt_As_siz(__pyx_t_8); if (unlikely((__pyx_t_13 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 132, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_8 = __Pyx_PyObject_Dict_GetItem(__pyx_v_obj, __pyx_n_s_size); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 132, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_8, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 132, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_14 = __Pyx_PyInt_As_siz(__pyx_t_5); if (unlikely((__pyx_t_14 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 132, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    rleFrString(((RLE *)(&(__pyx_v_Rs->_R[__pyx_t_12]))), ((char *)__pyx_v_c_string), __pyx_t_13, __pyx_t_14);\n\n    /* \"pysobatools/_mask.pyx\":124\n *     cdef bytes py_string\n *     cdef char* c_string\n *     for i, obj in enumerate(rleObjs):             # <<<<<<<<<<<<<<\n *         if PYTHON_VERSION == 2:\n *             py_string = str(obj['counts']).encode('utf8')\n */\n  }\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n\n  /* \"pysobatools/_mask.pyx\":133\n *         c_string = py_string\n *         rleFrString( <RLE*> &Rs._R[i], <char*> c_string, obj['size'][0], obj['size'][1] )\n *     return Rs             # <<<<<<<<<<<<<<\n * \n * # encode mask to RLEs objects\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(((PyObject *)__pyx_v_Rs));\n  __pyx_r = ((PyObject *)__pyx_v_Rs);\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":119\n * \n * # internal conversion from compressed RLE format to Python RLEs object\n * def _frString(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef siz n = len(rleObjs)\n *     Rs = RLEs(n)\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_8);\n  __Pyx_XDECREF(__pyx_t_9);\n  __Pyx_XDECREF(__pyx_t_10);\n  __Pyx_AddTraceback(\"pysobatools._mask._frString\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF(__pyx_v_py_string);\n  __Pyx_XDECREF(__pyx_v_i);\n  __Pyx_XDECREF(__pyx_v_obj);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":137\n * # encode mask to RLEs objects\n * # list of RLE string can be generated by RLEs member function\n * def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask):             # <<<<<<<<<<<<<<\n *     h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]\n *     cdef RLEs Rs = RLEs(n)\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5encode(PyObject *__pyx_self, PyObject *__pyx_v_mask); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_5encode = {\"encode\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_5encode, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_5encode(PyObject *__pyx_self, PyObject *__pyx_v_mask) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"encode (wrapper)\", 0);\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_mask), __pyx_ptype_5numpy_ndarray, 1, \"mask\", 0))) __PYX_ERR(0, 137, __pyx_L1_error)\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_4encode(__pyx_self, ((PyArrayObject *)__pyx_v_mask));\n\n  /* function exit code */\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_4encode(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_mask) {\n  npy_intp __pyx_v_h;\n  npy_intp __pyx_v_w;\n  npy_intp __pyx_v_n;\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = 0;\n  PyObject *__pyx_v_objs = NULL;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_mask;\n  __Pyx_Buffer __pyx_pybuffer_mask;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  npy_intp __pyx_t_1;\n  npy_intp __pyx_t_2;\n  npy_intp __pyx_t_3;\n  PyObject *__pyx_t_4 = NULL;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  __Pyx_RefNannySetupContext(\"encode\", 0);\n  __pyx_pybuffer_mask.pybuffer.buf = NULL;\n  __pyx_pybuffer_mask.refcount = 0;\n  __pyx_pybuffernd_mask.data = NULL;\n  __pyx_pybuffernd_mask.rcbuffer = &__pyx_pybuffer_mask;\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_mask.rcbuffer->pybuffer, (PyObject*)__pyx_v_mask, &__Pyx_TypeInfo_nn___pyx_t_5numpy_uint8_t, PyBUF_FORMAT| PyBUF_F_CONTIGUOUS, 3, 0, __pyx_stack) == -1)) __PYX_ERR(0, 137, __pyx_L1_error)\n  }\n  __pyx_pybuffernd_mask.diminfo[0].strides = __pyx_pybuffernd_mask.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_mask.diminfo[0].shape = __pyx_pybuffernd_mask.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_mask.diminfo[1].strides = __pyx_pybuffernd_mask.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_mask.diminfo[1].shape = __pyx_pybuffernd_mask.rcbuffer->pybuffer.shape[1]; __pyx_pybuffernd_mask.diminfo[2].strides = __pyx_pybuffernd_mask.rcbuffer->pybuffer.strides[2]; __pyx_pybuffernd_mask.diminfo[2].shape = __pyx_pybuffernd_mask.rcbuffer->pybuffer.shape[2];\n\n  /* \"pysobatools/_mask.pyx\":138\n * # list of RLE string can be generated by RLEs member function\n * def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask):\n *     h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = RLEs(n)\n *     rleEncode(Rs._R,<byte*>mask.data,h,w,n)\n */\n  __pyx_t_1 = (__pyx_v_mask->dimensions[0]);\n  __pyx_t_2 = (__pyx_v_mask->dimensions[1]);\n  __pyx_t_3 = (__pyx_v_mask->dimensions[2]);\n  __pyx_v_h = __pyx_t_1;\n  __pyx_v_w = __pyx_t_2;\n  __pyx_v_n = __pyx_t_3;\n\n  /* \"pysobatools/_mask.pyx\":139\n * def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask):\n *     h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]\n *     cdef RLEs Rs = RLEs(n)             # <<<<<<<<<<<<<<\n *     rleEncode(Rs._R,<byte*>mask.data,h,w,n)\n *     objs = _toString(Rs)\n */\n  __pyx_t_4 = __Pyx_PyInt_From_Py_intptr_t(__pyx_v_n); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 139, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __pyx_t_5 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), __pyx_t_4); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 139, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_5);\n  __pyx_t_5 = 0;\n\n  /* \"pysobatools/_mask.pyx\":140\n *     h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]\n *     cdef RLEs Rs = RLEs(n)\n *     rleEncode(Rs._R,<byte*>mask.data,h,w,n)             # <<<<<<<<<<<<<<\n *     objs = _toString(Rs)\n *     return objs\n */\n  rleEncode(__pyx_v_Rs->_R, ((byte *)__pyx_v_mask->data), __pyx_v_h, __pyx_v_w, __pyx_v_n);\n\n  /* \"pysobatools/_mask.pyx\":141\n *     cdef RLEs Rs = RLEs(n)\n *     rleEncode(Rs._R,<byte*>mask.data,h,w,n)\n *     objs = _toString(Rs)             # <<<<<<<<<<<<<<\n *     return objs\n * \n */\n  __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_toString); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 141, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __pyx_t_6 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {\n    __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4);\n    if (likely(__pyx_t_6)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);\n      __Pyx_INCREF(__pyx_t_6);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_4, function);\n    }\n  }\n  __pyx_t_5 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_6, ((PyObject *)__pyx_v_Rs)) : __Pyx_PyObject_CallOneArg(__pyx_t_4, ((PyObject *)__pyx_v_Rs));\n  __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n  if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 141, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n  __pyx_v_objs = __pyx_t_5;\n  __pyx_t_5 = 0;\n\n  /* \"pysobatools/_mask.pyx\":142\n *     rleEncode(Rs._R,<byte*>mask.data,h,w,n)\n *     objs = _toString(Rs)\n *     return objs             # <<<<<<<<<<<<<<\n * \n * # decode mask from compressed list of RLE string or RLEs object\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_objs);\n  __pyx_r = __pyx_v_objs;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":137\n * # encode mask to RLEs objects\n * # list of RLE string can be generated by RLEs member function\n * def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask):             # <<<<<<<<<<<<<<\n *     h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]\n *     cdef RLEs Rs = RLEs(n)\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_4);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  { PyObject *__pyx_type, *__pyx_value, *__pyx_tb;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_mask.rcbuffer->pybuffer);\n  __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}\n  __Pyx_AddTraceback(\"pysobatools._mask.encode\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  goto __pyx_L2;\n  __pyx_L0:;\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_mask.rcbuffer->pybuffer);\n  __pyx_L2:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF(__pyx_v_objs);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":145\n * \n * # decode mask from compressed list of RLE string or RLEs object\n * def decode(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_7decode(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_7decode = {\"decode\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_7decode, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_7decode(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"decode (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_6decode(__pyx_self, ((PyObject *)__pyx_v_rleObjs));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_6decode(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = 0;\n  siz __pyx_v_h;\n  siz __pyx_v_w;\n  siz __pyx_v_n;\n  struct __pyx_obj_11pysobatools_5_mask_Masks *__pyx_v_masks = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  siz __pyx_t_4;\n  siz __pyx_t_5;\n  siz __pyx_t_6;\n  PyObject *__pyx_t_7 = NULL;\n  __Pyx_RefNannySetupContext(\"decode\", 0);\n\n  /* \"pysobatools/_mask.pyx\":146\n * # decode mask from compressed list of RLE string or RLEs object\n * def decode(rleObjs):\n *     cdef RLEs Rs = _frString(rleObjs)             # <<<<<<<<<<<<<<\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n *     masks = Masks(h, w, n)\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_frString); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 146, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {\n    __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2);\n    if (likely(__pyx_t_3)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);\n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_2, function);\n    }\n  }\n  __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_rleObjs) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_rleObjs);\n  __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 146, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_11pysobatools_5_mask_RLEs))))) __PYX_ERR(0, 146, __pyx_L1_error)\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_1);\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":147\n * def decode(rleObjs):\n *     cdef RLEs Rs = _frString(rleObjs)\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n             # <<<<<<<<<<<<<<\n *     masks = Masks(h, w, n)\n *     rleDecode(<RLE*>Rs._R, masks._mask, n);\n */\n  __pyx_t_4 = (__pyx_v_Rs->_R[0]).h;\n  __pyx_t_5 = (__pyx_v_Rs->_R[0]).w;\n  __pyx_t_6 = __pyx_v_Rs->_n;\n  __pyx_v_h = __pyx_t_4;\n  __pyx_v_w = __pyx_t_5;\n  __pyx_v_n = __pyx_t_6;\n\n  /* \"pysobatools/_mask.pyx\":148\n *     cdef RLEs Rs = _frString(rleObjs)\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n *     masks = Masks(h, w, n)             # <<<<<<<<<<<<<<\n *     rleDecode(<RLE*>Rs._R, masks._mask, n);\n *     return np.array(masks)\n */\n  __pyx_t_1 = __Pyx_PyInt_From_siz(__pyx_v_h); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 148, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyInt_From_siz(__pyx_v_w); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 148, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = __Pyx_PyInt_From_siz(__pyx_v_n); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __pyx_t_7 = PyTuple_New(3); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 148, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_7);\n  __Pyx_GIVEREF(__pyx_t_1);\n  PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_1);\n  __Pyx_GIVEREF(__pyx_t_2);\n  PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_2);\n  __Pyx_GIVEREF(__pyx_t_3);\n  PyTuple_SET_ITEM(__pyx_t_7, 2, __pyx_t_3);\n  __pyx_t_1 = 0;\n  __pyx_t_2 = 0;\n  __pyx_t_3 = 0;\n  __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_11pysobatools_5_mask_Masks), __pyx_t_7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 148, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;\n  __pyx_v_masks = ((struct __pyx_obj_11pysobatools_5_mask_Masks *)__pyx_t_3);\n  __pyx_t_3 = 0;\n\n  /* \"pysobatools/_mask.pyx\":149\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n *     masks = Masks(h, w, n)\n *     rleDecode(<RLE*>Rs._R, masks._mask, n);             # <<<<<<<<<<<<<<\n *     return np.array(masks)\n * \n */\n  rleDecode(((RLE *)__pyx_v_Rs->_R), __pyx_v_masks->_mask, __pyx_v_n);\n\n  /* \"pysobatools/_mask.pyx\":150\n *     masks = Masks(h, w, n)\n *     rleDecode(<RLE*>Rs._R, masks._mask, n);\n *     return np.array(masks)             # <<<<<<<<<<<<<<\n * \n * def merge(rleObjs, intersect=0):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_np); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 150, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_7);\n  __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_array); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 150, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;\n  __pyx_t_7 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {\n    __pyx_t_7 = PyMethod_GET_SELF(__pyx_t_2);\n    if (likely(__pyx_t_7)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);\n      __Pyx_INCREF(__pyx_t_7);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_2, function);\n    }\n  }\n  __pyx_t_3 = (__pyx_t_7) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_7, ((PyObject *)__pyx_v_masks)) : __Pyx_PyObject_CallOneArg(__pyx_t_2, ((PyObject *)__pyx_v_masks));\n  __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;\n  if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 150, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __pyx_r = __pyx_t_3;\n  __pyx_t_3 = 0;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":145\n * \n * # decode mask from compressed list of RLE string or RLEs object\n * def decode(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_7);\n  __Pyx_AddTraceback(\"pysobatools._mask.decode\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF((PyObject *)__pyx_v_masks);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":152\n *     return np.array(masks)\n * \n * def merge(rleObjs, intersect=0):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef RLEs R = RLEs(1)\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_9merge(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_9merge = {\"merge\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_9merge, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_9merge(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyObject *__pyx_v_rleObjs = 0;\n  PyObject *__pyx_v_intersect = 0;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"merge (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_rleObjs,&__pyx_n_s_intersect,0};\n    PyObject* values[2] = {0,0};\n    values[1] = ((PyObject *)__pyx_int_0);\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_rleObjs)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (kw_args > 0) {\n          PyObject* value = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_intersect);\n          if (value) { values[1] = value; kw_args--; }\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"merge\") < 0)) __PYX_ERR(0, 152, __pyx_L3_error)\n      }\n    } else {\n      switch (PyTuple_GET_SIZE(__pyx_args)) {\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n    }\n    __pyx_v_rleObjs = values[0];\n    __pyx_v_intersect = values[1];\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"merge\", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 152, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.merge\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_8merge(__pyx_self, __pyx_v_rleObjs, __pyx_v_intersect);\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_8merge(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs, PyObject *__pyx_v_intersect) {\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = 0;\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_R = 0;\n  PyObject *__pyx_v_obj = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  int __pyx_t_4;\n  __Pyx_RefNannySetupContext(\"merge\", 0);\n\n  /* \"pysobatools/_mask.pyx\":153\n * \n * def merge(rleObjs, intersect=0):\n *     cdef RLEs Rs = _frString(rleObjs)             # <<<<<<<<<<<<<<\n *     cdef RLEs R = RLEs(1)\n *     rleMerge(<RLE*>Rs._R, <RLE*> R._R, <siz> Rs._n, intersect)\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_frString); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 153, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {\n    __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2);\n    if (likely(__pyx_t_3)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);\n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_2, function);\n    }\n  }\n  __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_rleObjs) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_rleObjs);\n  __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 153, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_11pysobatools_5_mask_RLEs))))) __PYX_ERR(0, 153, __pyx_L1_error)\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_1);\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":154\n * def merge(rleObjs, intersect=0):\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef RLEs R = RLEs(1)             # <<<<<<<<<<<<<<\n *     rleMerge(<RLE*>Rs._R, <RLE*> R._R, <siz> Rs._n, intersect)\n *     obj = _toString(R)[0]\n */\n  __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 154, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_v_R = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_1);\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":155\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef RLEs R = RLEs(1)\n *     rleMerge(<RLE*>Rs._R, <RLE*> R._R, <siz> Rs._n, intersect)             # <<<<<<<<<<<<<<\n *     obj = _toString(R)[0]\n *     return obj\n */\n  __pyx_t_4 = __Pyx_PyInt_As_int(__pyx_v_intersect); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) __PYX_ERR(0, 155, __pyx_L1_error)\n  rleMerge(((RLE *)__pyx_v_Rs->_R), ((RLE *)__pyx_v_R->_R), ((siz)__pyx_v_Rs->_n), __pyx_t_4);\n\n  /* \"pysobatools/_mask.pyx\":156\n *     cdef RLEs R = RLEs(1)\n *     rleMerge(<RLE*>Rs._R, <RLE*> R._R, <siz> Rs._n, intersect)\n *     obj = _toString(R)[0]             # <<<<<<<<<<<<<<\n *     return obj\n * \n */\n  __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_toString); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {\n    __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2);\n    if (likely(__pyx_t_3)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);\n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_2, function);\n    }\n  }\n  __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, ((PyObject *)__pyx_v_R)) : __Pyx_PyObject_CallOneArg(__pyx_t_2, ((PyObject *)__pyx_v_R));\n  __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 156, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __pyx_t_2 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 156, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_v_obj = __pyx_t_2;\n  __pyx_t_2 = 0;\n\n  /* \"pysobatools/_mask.pyx\":157\n *     rleMerge(<RLE*>Rs._R, <RLE*> R._R, <siz> Rs._n, intersect)\n *     obj = _toString(R)[0]\n *     return obj             # <<<<<<<<<<<<<<\n * \n * def area(rleObjs):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_obj);\n  __pyx_r = __pyx_v_obj;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":152\n *     return np.array(masks)\n * \n * def merge(rleObjs, intersect=0):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef RLEs R = RLEs(1)\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_AddTraceback(\"pysobatools._mask.merge\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF((PyObject *)__pyx_v_R);\n  __Pyx_XDECREF(__pyx_v_obj);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":159\n *     return obj\n * \n * def area(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_11area(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_11area = {\"area\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_11area, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_11area(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"area (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_10area(__pyx_self, ((PyObject *)__pyx_v_rleObjs));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_10area(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = 0;\n  uint *__pyx_v__a;\n  npy_intp __pyx_v_shape[1];\n  PyObject *__pyx_v_a = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *__pyx_t_4 = NULL;\n  PyObject *__pyx_t_5 = NULL;\n  __Pyx_RefNannySetupContext(\"area\", 0);\n\n  /* \"pysobatools/_mask.pyx\":160\n * \n * def area(rleObjs):\n *     cdef RLEs Rs = _frString(rleObjs)             # <<<<<<<<<<<<<<\n *     cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))\n *     rleArea(Rs._R, Rs._n, _a)\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_frString); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 160, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {\n    __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2);\n    if (likely(__pyx_t_3)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);\n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_2, function);\n    }\n  }\n  __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_rleObjs) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_rleObjs);\n  __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 160, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_11pysobatools_5_mask_RLEs))))) __PYX_ERR(0, 160, __pyx_L1_error)\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_1);\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":161\n * def area(rleObjs):\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))             # <<<<<<<<<<<<<<\n *     rleArea(Rs._R, Rs._n, _a)\n *     cdef np.npy_intp shape[1]\n */\n  __pyx_v__a = ((uint *)malloc((__pyx_v_Rs->_n * (sizeof(unsigned int)))));\n\n  /* \"pysobatools/_mask.pyx\":162\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))\n *     rleArea(Rs._R, Rs._n, _a)             # <<<<<<<<<<<<<<\n *     cdef np.npy_intp shape[1]\n *     shape[0] = <np.npy_intp> Rs._n\n */\n  rleArea(__pyx_v_Rs->_R, __pyx_v_Rs->_n, __pyx_v__a);\n\n  /* \"pysobatools/_mask.pyx\":164\n *     rleArea(Rs._R, Rs._n, _a)\n *     cdef np.npy_intp shape[1]\n *     shape[0] = <np.npy_intp> Rs._n             # <<<<<<<<<<<<<<\n *     a = np.array((Rs._n, ), dtype=np.uint8)\n *     a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a)\n */\n  (__pyx_v_shape[0]) = ((npy_intp)__pyx_v_Rs->_n);\n\n  /* \"pysobatools/_mask.pyx\":165\n *     cdef np.npy_intp shape[1]\n *     shape[0] = <np.npy_intp> Rs._n\n *     a = np.array((Rs._n, ), dtype=np.uint8)             # <<<<<<<<<<<<<<\n *     a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a)\n *     PyArray_ENABLEFLAGS(a, np.NPY_OWNDATA)\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_np); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_array); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_1 = __Pyx_PyInt_From_siz(__pyx_v_Rs->_n); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_GIVEREF(__pyx_t_1);\n  PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1);\n  __pyx_t_1 = 0;\n  __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_GIVEREF(__pyx_t_3);\n  PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3);\n  __pyx_t_3 = 0;\n  __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_np); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_uint8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n  if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dtype, __pyx_t_5) < 0) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 165, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __pyx_v_a = __pyx_t_5;\n  __pyx_t_5 = 0;\n\n  /* \"pysobatools/_mask.pyx\":166\n *     shape[0] = <np.npy_intp> Rs._n\n *     a = np.array((Rs._n, ), dtype=np.uint8)\n *     a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a)             # <<<<<<<<<<<<<<\n *     PyArray_ENABLEFLAGS(a, np.NPY_OWNDATA)\n *     return a\n */\n  __pyx_t_5 = PyArray_SimpleNewFromData(1, __pyx_v_shape, NPY_UINT32, __pyx_v__a); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 166, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF_SET(__pyx_v_a, __pyx_t_5);\n  __pyx_t_5 = 0;\n\n  /* \"pysobatools/_mask.pyx\":167\n *     a = np.array((Rs._n, ), dtype=np.uint8)\n *     a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a)\n *     PyArray_ENABLEFLAGS(a, np.NPY_OWNDATA)             # <<<<<<<<<<<<<<\n *     return a\n * \n */\n  if (!(likely(((__pyx_v_a) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_a, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 167, __pyx_L1_error)\n  PyArray_ENABLEFLAGS(((PyArrayObject *)__pyx_v_a), NPY_OWNDATA);\n\n  /* \"pysobatools/_mask.pyx\":168\n *     a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a)\n *     PyArray_ENABLEFLAGS(a, np.NPY_OWNDATA)\n *     return a             # <<<<<<<<<<<<<<\n * \n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_a);\n  __pyx_r = __pyx_v_a;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":159\n *     return obj\n * \n * def area(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_4);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_AddTraceback(\"pysobatools._mask.area\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF(__pyx_v_a);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":171\n * \n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):             # <<<<<<<<<<<<<<\n *     def _preproc(objs):\n *         if len(objs) == 0:\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_13iou(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_13iou = {\"iou\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_13iou, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_13iou(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyObject *__pyx_v_dt = 0;\n  PyObject *__pyx_v_gt = 0;\n  PyObject *__pyx_v_pyiscrowd = 0;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"iou (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_dt,&__pyx_n_s_gt,&__pyx_n_s_pyiscrowd,0};\n    PyObject* values[3] = {0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_dt)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_gt)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"iou\", 1, 3, 3, 1); __PYX_ERR(0, 171, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyiscrowd)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"iou\", 1, 3, 3, 2); __PYX_ERR(0, 171, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"iou\") < 0)) __PYX_ERR(0, 171, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n    }\n    __pyx_v_dt = values[0];\n    __pyx_v_gt = values[1];\n    __pyx_v_pyiscrowd = values[2];\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"iou\", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 171, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.iou\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_12iou(__pyx_self, __pyx_v_dt, __pyx_v_gt, __pyx_v_pyiscrowd);\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":172\n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):\n *     def _preproc(objs):             # <<<<<<<<<<<<<<\n *         if len(objs) == 0:\n *             return objs\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_1_preproc(PyObject *__pyx_self, PyObject *__pyx_v_objs); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_3iou_1_preproc = {\"_preproc\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_3iou_1_preproc, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_1_preproc(PyObject *__pyx_self, PyObject *__pyx_v_objs) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_preproc (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_3iou__preproc(__pyx_self, ((PyObject *)__pyx_v_objs));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou__preproc(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_objs) {\n  PyObject *__pyx_v_isbox = NULL;\n  PyObject *__pyx_v_isrle = NULL;\n  PyObject *__pyx_v_obj = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  Py_ssize_t __pyx_t_1;\n  int __pyx_t_2;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *__pyx_t_4 = NULL;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  int __pyx_t_7;\n  int __pyx_t_8;\n  PyObject *__pyx_t_9 = NULL;\n  PyObject *__pyx_t_10 = NULL;\n  PyObject *(*__pyx_t_11)(PyObject *);\n  PyObject *__pyx_t_12 = NULL;\n  Py_ssize_t __pyx_t_13;\n  PyObject *__pyx_t_14 = NULL;\n  __Pyx_RefNannySetupContext(\"_preproc\", 0);\n  __Pyx_INCREF(__pyx_v_objs);\n\n  /* \"pysobatools/_mask.pyx\":173\n * def iou( dt, gt, pyiscrowd ):\n *     def _preproc(objs):\n *         if len(objs) == 0:             # <<<<<<<<<<<<<<\n *             return objs\n *         if type(objs) == np.ndarray:\n */\n  __pyx_t_1 = PyObject_Length(__pyx_v_objs); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 173, __pyx_L1_error)\n  __pyx_t_2 = ((__pyx_t_1 == 0) != 0);\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":174\n *     def _preproc(objs):\n *         if len(objs) == 0:\n *             return objs             # <<<<<<<<<<<<<<\n *         if type(objs) == np.ndarray:\n *             if len(objs.shape) == 1:\n */\n    __Pyx_XDECREF(__pyx_r);\n    __Pyx_INCREF(__pyx_v_objs);\n    __pyx_r = __pyx_v_objs;\n    goto __pyx_L0;\n\n    /* \"pysobatools/_mask.pyx\":173\n * def iou( dt, gt, pyiscrowd ):\n *     def _preproc(objs):\n *         if len(objs) == 0:             # <<<<<<<<<<<<<<\n *             return objs\n *         if type(objs) == np.ndarray:\n */\n  }\n\n  /* \"pysobatools/_mask.pyx\":175\n *         if len(objs) == 0:\n *             return objs\n *         if type(objs) == np.ndarray:             # <<<<<<<<<<<<<<\n *             if len(objs.shape) == 1:\n *                 objs = objs.reshape((objs[0], 1))\n */\n  __pyx_t_3 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_objs)), ((PyObject *)__pyx_ptype_5numpy_ndarray), Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 175, __pyx_L1_error)\n  __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 175, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":176\n *             return objs\n *         if type(objs) == np.ndarray:\n *             if len(objs.shape) == 1:             # <<<<<<<<<<<<<<\n *                 objs = objs.reshape((objs[0], 1))\n *             # check if it's Nx4 bbox\n */\n    __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_shape); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 176, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __pyx_t_1 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 176, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_t_2 = ((__pyx_t_1 == 1) != 0);\n    if (__pyx_t_2) {\n\n      /* \"pysobatools/_mask.pyx\":177\n *         if type(objs) == np.ndarray:\n *             if len(objs.shape) == 1:\n *                 objs = objs.reshape((objs[0], 1))             # <<<<<<<<<<<<<<\n *             # check if it's Nx4 bbox\n *             if not len(objs.shape) == 2 or not objs.shape[1] == 4:\n */\n      __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_reshape); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 177, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_5 = __Pyx_GetItemInt(__pyx_v_objs, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 177, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 177, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_6);\n      __Pyx_GIVEREF(__pyx_t_5);\n      PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5);\n      __Pyx_INCREF(__pyx_int_1);\n      __Pyx_GIVEREF(__pyx_int_1);\n      PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_int_1);\n      __pyx_t_5 = 0;\n      __pyx_t_5 = NULL;\n      if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {\n        __pyx_t_5 = PyMethod_GET_SELF(__pyx_t_4);\n        if (likely(__pyx_t_5)) {\n          PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);\n          __Pyx_INCREF(__pyx_t_5);\n          __Pyx_INCREF(function);\n          __Pyx_DECREF_SET(__pyx_t_4, function);\n        }\n      }\n      __pyx_t_3 = (__pyx_t_5) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_5, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_6);\n      __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0;\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n      if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 177, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_DECREF_SET(__pyx_v_objs, __pyx_t_3);\n      __pyx_t_3 = 0;\n\n      /* \"pysobatools/_mask.pyx\":176\n *             return objs\n *         if type(objs) == np.ndarray:\n *             if len(objs.shape) == 1:             # <<<<<<<<<<<<<<\n *                 objs = objs.reshape((objs[0], 1))\n *             # check if it's Nx4 bbox\n */\n    }\n\n    /* \"pysobatools/_mask.pyx\":179\n *                 objs = objs.reshape((objs[0], 1))\n *             # check if it's Nx4 bbox\n *             if not len(objs.shape) == 2 or not objs.shape[1] == 4:             # <<<<<<<<<<<<<<\n *                 raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')\n *             objs = objs.astype(np.double)\n */\n    __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_shape); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 179, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __pyx_t_1 = PyObject_Length(__pyx_t_3); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 179, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_t_7 = ((!((__pyx_t_1 == 2) != 0)) != 0);\n    if (!__pyx_t_7) {\n    } else {\n      __pyx_t_2 = __pyx_t_7;\n      goto __pyx_L7_bool_binop_done;\n    }\n    __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_shape); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 179, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_3, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 179, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_t_3 = __Pyx_PyInt_EqObjC(__pyx_t_4, __pyx_int_4, 4, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 179, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 179, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_t_8 = ((!__pyx_t_7) != 0);\n    __pyx_t_2 = __pyx_t_8;\n    __pyx_L7_bool_binop_done:;\n    if (unlikely(__pyx_t_2)) {\n\n      /* \"pysobatools/_mask.pyx\":180\n *             # check if it's Nx4 bbox\n *             if not len(objs.shape) == 2 or not objs.shape[1] == 4:\n *                 raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')             # <<<<<<<<<<<<<<\n *             objs = objs.astype(np.double)\n *         elif type(objs) == list:\n */\n      __pyx_t_3 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple__7, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 180, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_Raise(__pyx_t_3, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __PYX_ERR(0, 180, __pyx_L1_error)\n\n      /* \"pysobatools/_mask.pyx\":179\n *                 objs = objs.reshape((objs[0], 1))\n *             # check if it's Nx4 bbox\n *             if not len(objs.shape) == 2 or not objs.shape[1] == 4:             # <<<<<<<<<<<<<<\n *                 raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')\n *             objs = objs.astype(np.double)\n */\n    }\n\n    /* \"pysobatools/_mask.pyx\":181\n *             if not len(objs.shape) == 2 or not objs.shape[1] == 4:\n *                 raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')\n *             objs = objs.astype(np.double)             # <<<<<<<<<<<<<<\n *         elif type(objs) == list:\n *             # check if list is in box format and convert it to np.ndarray\n */\n    __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_astype); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 181, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_np); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 181, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_double); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 181, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __pyx_t_6 = NULL;\n    if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_4))) {\n      __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4);\n      if (likely(__pyx_t_6)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);\n        __Pyx_INCREF(__pyx_t_6);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_4, function);\n      }\n    }\n    __pyx_t_3 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5);\n    __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 181, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    __Pyx_DECREF_SET(__pyx_v_objs, __pyx_t_3);\n    __pyx_t_3 = 0;\n\n    /* \"pysobatools/_mask.pyx\":175\n *         if len(objs) == 0:\n *             return objs\n *         if type(objs) == np.ndarray:             # <<<<<<<<<<<<<<\n *             if len(objs.shape) == 1:\n *                 objs = objs.reshape((objs[0], 1))\n */\n    goto __pyx_L4;\n  }\n\n  /* \"pysobatools/_mask.pyx\":182\n *                 raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')\n *             objs = objs.astype(np.double)\n *         elif type(objs) == list:             # <<<<<<<<<<<<<<\n *             # check if list is in box format and convert it to np.ndarray\n *             isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs]))\n */\n  __pyx_t_3 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_objs)), ((PyObject *)(&PyList_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 182, __pyx_L1_error)\n  __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 182, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (likely(__pyx_t_2)) {\n\n    /* \"pysobatools/_mask.pyx\":184\n *         elif type(objs) == list:\n *             # check if list is in box format and convert it to np.ndarray\n *             isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs]))             # <<<<<<<<<<<<<<\n *             isrle = np.all(np.array([type(obj) == dict for obj in objs]))\n *             if isbox:\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_np); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 184, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_all); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 184, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_np); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 184, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_array); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 184, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_9);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __pyx_t_6 = PyList_New(0); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 184, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    if (likely(PyList_CheckExact(__pyx_v_objs)) || PyTuple_CheckExact(__pyx_v_objs)) {\n      __pyx_t_10 = __pyx_v_objs; __Pyx_INCREF(__pyx_t_10); __pyx_t_1 = 0;\n      __pyx_t_11 = NULL;\n    } else {\n      __pyx_t_1 = -1; __pyx_t_10 = PyObject_GetIter(__pyx_v_objs); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 184, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_10);\n      __pyx_t_11 = Py_TYPE(__pyx_t_10)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 184, __pyx_L1_error)\n    }\n    for (;;) {\n      if (likely(!__pyx_t_11)) {\n        if (likely(PyList_CheckExact(__pyx_t_10))) {\n          if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_10)) break;\n          #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n          __pyx_t_12 = PyList_GET_ITEM(__pyx_t_10, __pyx_t_1); __Pyx_INCREF(__pyx_t_12); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 184, __pyx_L1_error)\n          #else\n          __pyx_t_12 = PySequence_ITEM(__pyx_t_10, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 184, __pyx_L1_error)\n          __Pyx_GOTREF(__pyx_t_12);\n          #endif\n        } else {\n          if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_10)) break;\n          #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n          __pyx_t_12 = PyTuple_GET_ITEM(__pyx_t_10, __pyx_t_1); __Pyx_INCREF(__pyx_t_12); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 184, __pyx_L1_error)\n          #else\n          __pyx_t_12 = PySequence_ITEM(__pyx_t_10, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 184, __pyx_L1_error)\n          __Pyx_GOTREF(__pyx_t_12);\n          #endif\n        }\n      } else {\n        __pyx_t_12 = __pyx_t_11(__pyx_t_10);\n        if (unlikely(!__pyx_t_12)) {\n          PyObject* exc_type = PyErr_Occurred();\n          if (exc_type) {\n            if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();\n            else __PYX_ERR(0, 184, __pyx_L1_error)\n          }\n          break;\n        }\n        __Pyx_GOTREF(__pyx_t_12);\n      }\n      __Pyx_XDECREF_SET(__pyx_v_obj, __pyx_t_12);\n      __pyx_t_12 = 0;\n      __pyx_t_13 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_13 == ((Py_ssize_t)-1))) __PYX_ERR(0, 184, __pyx_L1_error)\n      __pyx_t_2 = (__pyx_t_13 == 4);\n      if (__pyx_t_2) {\n      } else {\n        __pyx_t_14 = __Pyx_PyBool_FromLong(__pyx_t_2); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 184, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_14);\n        __pyx_t_12 = __pyx_t_14;\n        __pyx_t_14 = 0;\n        goto __pyx_L11_bool_binop_done;\n      }\n      __pyx_t_14 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_obj)), ((PyObject *)(&PyList_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_14); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 184, __pyx_L1_error)\n      __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_14); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 184, __pyx_L1_error)\n      if (!__pyx_t_2) {\n        __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;\n      } else {\n        __Pyx_INCREF(__pyx_t_14);\n        __pyx_t_12 = __pyx_t_14;\n        __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;\n        goto __pyx_L11_bool_binop_done;\n      }\n      __pyx_t_14 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_obj)), ((PyObject *)__pyx_ptype_5numpy_ndarray), Py_EQ); __Pyx_XGOTREF(__pyx_t_14); if (unlikely(!__pyx_t_14)) __PYX_ERR(0, 184, __pyx_L1_error)\n      __Pyx_INCREF(__pyx_t_14);\n      __pyx_t_12 = __pyx_t_14;\n      __Pyx_DECREF(__pyx_t_14); __pyx_t_14 = 0;\n      __pyx_L11_bool_binop_done:;\n      if (unlikely(__Pyx_ListComp_Append(__pyx_t_6, (PyObject*)__pyx_t_12))) __PYX_ERR(0, 184, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;\n    __pyx_t_10 = NULL;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_9))) {\n      __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_9);\n      if (likely(__pyx_t_10)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_9);\n        __Pyx_INCREF(__pyx_t_10);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_9, function);\n      }\n    }\n    __pyx_t_4 = (__pyx_t_10) ? __Pyx_PyObject_Call2Args(__pyx_t_9, __pyx_t_10, __pyx_t_6) : __Pyx_PyObject_CallOneArg(__pyx_t_9, __pyx_t_6);\n    __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0;\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 184, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n    __pyx_t_9 = NULL;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {\n      __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_5);\n      if (likely(__pyx_t_9)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);\n        __Pyx_INCREF(__pyx_t_9);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_5, function);\n      }\n    }\n    __pyx_t_3 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_9, __pyx_t_4) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_4);\n    __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;\n    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 184, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __pyx_v_isbox = __pyx_t_3;\n    __pyx_t_3 = 0;\n\n    /* \"pysobatools/_mask.pyx\":185\n *             # check if list is in box format and convert it to np.ndarray\n *             isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs]))\n *             isrle = np.all(np.array([type(obj) == dict for obj in objs]))             # <<<<<<<<<<<<<<\n *             if isbox:\n *                 objs = np.array(objs, dtype=np.double)\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_np); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 185, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_all); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 185, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __Pyx_GetModuleGlobalName(__pyx_t_9, __pyx_n_s_np); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 185, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_9);\n    __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_9, __pyx_n_s_array); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 185, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n    __pyx_t_9 = PyList_New(0); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 185, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_9);\n    if (likely(PyList_CheckExact(__pyx_v_objs)) || PyTuple_CheckExact(__pyx_v_objs)) {\n      __pyx_t_10 = __pyx_v_objs; __Pyx_INCREF(__pyx_t_10); __pyx_t_1 = 0;\n      __pyx_t_11 = NULL;\n    } else {\n      __pyx_t_1 = -1; __pyx_t_10 = PyObject_GetIter(__pyx_v_objs); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 185, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_10);\n      __pyx_t_11 = Py_TYPE(__pyx_t_10)->tp_iternext; if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 185, __pyx_L1_error)\n    }\n    for (;;) {\n      if (likely(!__pyx_t_11)) {\n        if (likely(PyList_CheckExact(__pyx_t_10))) {\n          if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_10)) break;\n          #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n          __pyx_t_12 = PyList_GET_ITEM(__pyx_t_10, __pyx_t_1); __Pyx_INCREF(__pyx_t_12); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 185, __pyx_L1_error)\n          #else\n          __pyx_t_12 = PySequence_ITEM(__pyx_t_10, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 185, __pyx_L1_error)\n          __Pyx_GOTREF(__pyx_t_12);\n          #endif\n        } else {\n          if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_10)) break;\n          #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n          __pyx_t_12 = PyTuple_GET_ITEM(__pyx_t_10, __pyx_t_1); __Pyx_INCREF(__pyx_t_12); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 185, __pyx_L1_error)\n          #else\n          __pyx_t_12 = PySequence_ITEM(__pyx_t_10, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 185, __pyx_L1_error)\n          __Pyx_GOTREF(__pyx_t_12);\n          #endif\n        }\n      } else {\n        __pyx_t_12 = __pyx_t_11(__pyx_t_10);\n        if (unlikely(!__pyx_t_12)) {\n          PyObject* exc_type = PyErr_Occurred();\n          if (exc_type) {\n            if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();\n            else __PYX_ERR(0, 185, __pyx_L1_error)\n          }\n          break;\n        }\n        __Pyx_GOTREF(__pyx_t_12);\n      }\n      __Pyx_XDECREF_SET(__pyx_v_obj, __pyx_t_12);\n      __pyx_t_12 = 0;\n      __pyx_t_12 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_obj)), ((PyObject *)(&PyDict_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_12); if (unlikely(!__pyx_t_12)) __PYX_ERR(0, 185, __pyx_L1_error)\n      if (unlikely(__Pyx_ListComp_Append(__pyx_t_9, (PyObject*)__pyx_t_12))) __PYX_ERR(0, 185, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_12); __pyx_t_12 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;\n    __pyx_t_10 = NULL;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) {\n      __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_6);\n      if (likely(__pyx_t_10)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);\n        __Pyx_INCREF(__pyx_t_10);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_6, function);\n      }\n    }\n    __pyx_t_5 = (__pyx_t_10) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_10, __pyx_t_9) : __Pyx_PyObject_CallOneArg(__pyx_t_6, __pyx_t_9);\n    __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0;\n    __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n    if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 185, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __pyx_t_6 = NULL;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_4))) {\n      __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_4);\n      if (likely(__pyx_t_6)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_4);\n        __Pyx_INCREF(__pyx_t_6);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_4, function);\n      }\n    }\n    __pyx_t_3 = (__pyx_t_6) ? __Pyx_PyObject_Call2Args(__pyx_t_4, __pyx_t_6, __pyx_t_5) : __Pyx_PyObject_CallOneArg(__pyx_t_4, __pyx_t_5);\n    __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 185, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    __pyx_v_isrle = __pyx_t_3;\n    __pyx_t_3 = 0;\n\n    /* \"pysobatools/_mask.pyx\":186\n *             isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs]))\n *             isrle = np.all(np.array([type(obj) == dict for obj in objs]))\n *             if isbox:             # <<<<<<<<<<<<<<\n *                 objs = np.array(objs, dtype=np.double)\n *                 if len(objs.shape) == 1:\n */\n    __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_isbox); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 186, __pyx_L1_error)\n    if (__pyx_t_2) {\n\n      /* \"pysobatools/_mask.pyx\":187\n *             isrle = np.all(np.array([type(obj) == dict for obj in objs]))\n *             if isbox:\n *                 objs = np.array(objs, dtype=np.double)             # <<<<<<<<<<<<<<\n *                 if len(objs.shape) == 1:\n *                     objs = objs.reshape((1,objs.shape[0]))\n */\n      __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_np); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_array); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_INCREF(__pyx_v_objs);\n      __Pyx_GIVEREF(__pyx_v_objs);\n      PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_objs);\n      __pyx_t_5 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_np); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_6);\n      __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_6, __pyx_n_s_double); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_9);\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n      if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_dtype, __pyx_t_9) < 0) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n      __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_3, __pyx_t_5); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 187, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_9);\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n      __Pyx_DECREF_SET(__pyx_v_objs, __pyx_t_9);\n      __pyx_t_9 = 0;\n\n      /* \"pysobatools/_mask.pyx\":188\n *             if isbox:\n *                 objs = np.array(objs, dtype=np.double)\n *                 if len(objs.shape) == 1:             # <<<<<<<<<<<<<<\n *                     objs = objs.reshape((1,objs.shape[0]))\n *             elif isrle:\n */\n      __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_shape); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 188, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_9);\n      __pyx_t_1 = PyObject_Length(__pyx_t_9); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 188, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n      __pyx_t_2 = ((__pyx_t_1 == 1) != 0);\n      if (__pyx_t_2) {\n\n        /* \"pysobatools/_mask.pyx\":189\n *                 objs = np.array(objs, dtype=np.double)\n *                 if len(objs.shape) == 1:\n *                     objs = objs.reshape((1,objs.shape[0]))             # <<<<<<<<<<<<<<\n *             elif isrle:\n *                 objs = _frString(objs)\n */\n        __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_reshape); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 189, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_5);\n        __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_v_objs, __pyx_n_s_shape); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 189, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_3);\n        __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_3, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 189, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_4);\n        __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n        __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 189, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_3);\n        __Pyx_INCREF(__pyx_int_1);\n        __Pyx_GIVEREF(__pyx_int_1);\n        PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_int_1);\n        __Pyx_GIVEREF(__pyx_t_4);\n        PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4);\n        __pyx_t_4 = 0;\n        __pyx_t_4 = NULL;\n        if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_5))) {\n          __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_5);\n          if (likely(__pyx_t_4)) {\n            PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);\n            __Pyx_INCREF(__pyx_t_4);\n            __Pyx_INCREF(function);\n            __Pyx_DECREF_SET(__pyx_t_5, function);\n          }\n        }\n        __pyx_t_9 = (__pyx_t_4) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_4, __pyx_t_3) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_t_3);\n        __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;\n        __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n        if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 189, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_9);\n        __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n        __Pyx_DECREF_SET(__pyx_v_objs, __pyx_t_9);\n        __pyx_t_9 = 0;\n\n        /* \"pysobatools/_mask.pyx\":188\n *             if isbox:\n *                 objs = np.array(objs, dtype=np.double)\n *                 if len(objs.shape) == 1:             # <<<<<<<<<<<<<<\n *                     objs = objs.reshape((1,objs.shape[0]))\n *             elif isrle:\n */\n      }\n\n      /* \"pysobatools/_mask.pyx\":186\n *             isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs]))\n *             isrle = np.all(np.array([type(obj) == dict for obj in objs]))\n *             if isbox:             # <<<<<<<<<<<<<<\n *                 objs = np.array(objs, dtype=np.double)\n *                 if len(objs.shape) == 1:\n */\n      goto __pyx_L16;\n    }\n\n    /* \"pysobatools/_mask.pyx\":190\n *                 if len(objs.shape) == 1:\n *                     objs = objs.reshape((1,objs.shape[0]))\n *             elif isrle:             # <<<<<<<<<<<<<<\n *                 objs = _frString(objs)\n *             else:\n */\n    __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_v_isrle); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 190, __pyx_L1_error)\n    if (likely(__pyx_t_2)) {\n\n      /* \"pysobatools/_mask.pyx\":191\n *                     objs = objs.reshape((1,objs.shape[0]))\n *             elif isrle:\n *                 objs = _frString(objs)             # <<<<<<<<<<<<<<\n *             else:\n *                 raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])')\n */\n      __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_frString); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 191, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __pyx_t_3 = NULL;\n      if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_5))) {\n        __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_5);\n        if (likely(__pyx_t_3)) {\n          PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_5);\n          __Pyx_INCREF(__pyx_t_3);\n          __Pyx_INCREF(function);\n          __Pyx_DECREF_SET(__pyx_t_5, function);\n        }\n      }\n      __pyx_t_9 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_5, __pyx_t_3, __pyx_v_objs) : __Pyx_PyObject_CallOneArg(__pyx_t_5, __pyx_v_objs);\n      __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 191, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_9);\n      __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n      __Pyx_DECREF_SET(__pyx_v_objs, __pyx_t_9);\n      __pyx_t_9 = 0;\n\n      /* \"pysobatools/_mask.pyx\":190\n *                 if len(objs.shape) == 1:\n *                     objs = objs.reshape((1,objs.shape[0]))\n *             elif isrle:             # <<<<<<<<<<<<<<\n *                 objs = _frString(objs)\n *             else:\n */\n      goto __pyx_L16;\n    }\n\n    /* \"pysobatools/_mask.pyx\":193\n *                 objs = _frString(objs)\n *             else:\n *                 raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])')             # <<<<<<<<<<<<<<\n *         else:\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n */\n    /*else*/ {\n      __pyx_t_9 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple__8, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 193, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_9);\n      __Pyx_Raise(__pyx_t_9, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n      __PYX_ERR(0, 193, __pyx_L1_error)\n    }\n    __pyx_L16:;\n\n    /* \"pysobatools/_mask.pyx\":182\n *                 raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')\n *             objs = objs.astype(np.double)\n *         elif type(objs) == list:             # <<<<<<<<<<<<<<\n *             # check if list is in box format and convert it to np.ndarray\n *             isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs]))\n */\n    goto __pyx_L4;\n  }\n\n  /* \"pysobatools/_mask.pyx\":195\n *                 raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])')\n *         else:\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')             # <<<<<<<<<<<<<<\n *         return objs\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n */\n  /*else*/ {\n    __pyx_t_9 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple__9, NULL); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 195, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_9);\n    __Pyx_Raise(__pyx_t_9, 0, 0, 0);\n    __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n    __PYX_ERR(0, 195, __pyx_L1_error)\n  }\n  __pyx_L4:;\n\n  /* \"pysobatools/_mask.pyx\":196\n *         else:\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n *         return objs             # <<<<<<<<<<<<<<\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_objs);\n  __pyx_r = __pyx_v_objs;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":172\n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):\n *     def _preproc(objs):             # <<<<<<<<<<<<<<\n *         if len(objs) == 0:\n *             return objs\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_4);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_9);\n  __Pyx_XDECREF(__pyx_t_10);\n  __Pyx_XDECREF(__pyx_t_12);\n  __Pyx_XDECREF(__pyx_t_14);\n  __Pyx_AddTraceback(\"pysobatools._mask.iou._preproc\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF(__pyx_v_isbox);\n  __Pyx_XDECREF(__pyx_v_isrle);\n  __Pyx_XDECREF(__pyx_v_obj);\n  __Pyx_XDECREF(__pyx_v_objs);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":197\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n *         return objs\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_3_rleIou(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_3iou_3_rleIou = {\"_rleIou\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_3iou_3_rleIou, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_3_rleIou(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_dt = 0;\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_gt = 0;\n  PyArrayObject *__pyx_v_iscrowd = 0;\n  siz __pyx_v_m;\n  siz __pyx_v_n;\n  PyArrayObject *__pyx_v__iou = 0;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_rleIou (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_dt,&__pyx_n_s_gt,&__pyx_n_s_iscrowd,&__pyx_n_s_m,&__pyx_n_s_n,&__pyx_n_s_iou,0};\n    PyObject* values[6] = {0,0,0,0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);\n        CYTHON_FALLTHROUGH;\n        case  5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);\n        CYTHON_FALLTHROUGH;\n        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);\n        CYTHON_FALLTHROUGH;\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_dt)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_gt)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_rleIou\", 1, 6, 6, 1); __PYX_ERR(0, 197, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_iscrowd)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_rleIou\", 1, 6, 6, 2); __PYX_ERR(0, 197, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  3:\n        if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_m)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_rleIou\", 1, 6, 6, 3); __PYX_ERR(0, 197, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  4:\n        if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_n)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_rleIou\", 1, 6, 6, 4); __PYX_ERR(0, 197, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  5:\n        if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_iou)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_rleIou\", 1, 6, 6, 5); __PYX_ERR(0, 197, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"_rleIou\") < 0)) __PYX_ERR(0, 197, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 6) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n      values[3] = PyTuple_GET_ITEM(__pyx_args, 3);\n      values[4] = PyTuple_GET_ITEM(__pyx_args, 4);\n      values[5] = PyTuple_GET_ITEM(__pyx_args, 5);\n    }\n    __pyx_v_dt = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)values[0]);\n    __pyx_v_gt = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)values[1]);\n    __pyx_v_iscrowd = ((PyArrayObject *)values[2]);\n    __pyx_v_m = __Pyx_PyInt_As_siz(values[3]); if (unlikely((__pyx_v_m == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 197, __pyx_L3_error)\n    __pyx_v_n = __Pyx_PyInt_As_siz(values[4]); if (unlikely((__pyx_v_n == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 197, __pyx_L3_error)\n    __pyx_v__iou = ((PyArrayObject *)values[5]);\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"_rleIou\", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 197, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.iou._rleIou\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_dt), __pyx_ptype_11pysobatools_5_mask_RLEs, 1, \"dt\", 0))) __PYX_ERR(0, 197, __pyx_L1_error)\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_gt), __pyx_ptype_11pysobatools_5_mask_RLEs, 1, \"gt\", 0))) __PYX_ERR(0, 197, __pyx_L1_error)\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_iscrowd), __pyx_ptype_5numpy_ndarray, 1, \"iscrowd\", 0))) __PYX_ERR(0, 197, __pyx_L1_error)\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v__iou), __pyx_ptype_5numpy_ndarray, 1, \"_iou\", 0))) __PYX_ERR(0, 197, __pyx_L1_error)\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_3iou_2_rleIou(__pyx_self, __pyx_v_dt, __pyx_v_gt, __pyx_v_iscrowd, __pyx_v_m, __pyx_v_n, __pyx_v__iou);\n\n  /* function exit code */\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou_2_rleIou(CYTHON_UNUSED PyObject *__pyx_self, struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_dt, struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_gt, PyArrayObject *__pyx_v_iscrowd, siz __pyx_v_m, siz __pyx_v_n, PyArrayObject *__pyx_v__iou) {\n  __Pyx_LocalBuf_ND __pyx_pybuffernd__iou;\n  __Pyx_Buffer __pyx_pybuffer__iou;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_iscrowd;\n  __Pyx_Buffer __pyx_pybuffer_iscrowd;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_rleIou\", 0);\n  __pyx_pybuffer_iscrowd.pybuffer.buf = NULL;\n  __pyx_pybuffer_iscrowd.refcount = 0;\n  __pyx_pybuffernd_iscrowd.data = NULL;\n  __pyx_pybuffernd_iscrowd.rcbuffer = &__pyx_pybuffer_iscrowd;\n  __pyx_pybuffer__iou.pybuffer.buf = NULL;\n  __pyx_pybuffer__iou.refcount = 0;\n  __pyx_pybuffernd__iou.data = NULL;\n  __pyx_pybuffernd__iou.rcbuffer = &__pyx_pybuffer__iou;\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer, (PyObject*)__pyx_v_iscrowd, &__Pyx_TypeInfo_nn___pyx_t_5numpy_uint8_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) __PYX_ERR(0, 197, __pyx_L1_error)\n  }\n  __pyx_pybuffernd_iscrowd.diminfo[0].strides = __pyx_pybuffernd_iscrowd.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_iscrowd.diminfo[0].shape = __pyx_pybuffernd_iscrowd.rcbuffer->pybuffer.shape[0];\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd__iou.rcbuffer->pybuffer, (PyObject*)__pyx_v__iou, &__Pyx_TypeInfo_nn___pyx_t_5numpy_double_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) __PYX_ERR(0, 197, __pyx_L1_error)\n  }\n  __pyx_pybuffernd__iou.diminfo[0].strides = __pyx_pybuffernd__iou.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd__iou.diminfo[0].shape = __pyx_pybuffernd__iou.rcbuffer->pybuffer.shape[0];\n\n  /* \"pysobatools/_mask.pyx\":198\n *         return objs\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )             # <<<<<<<<<<<<<<\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n */\n  rleIou(((RLE *)__pyx_v_dt->_R), ((RLE *)__pyx_v_gt->_R), __pyx_v_m, __pyx_v_n, ((byte *)__pyx_v_iscrowd->data), ((double *)__pyx_v__iou->data));\n\n  /* \"pysobatools/_mask.pyx\":197\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n *         return objs\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n */\n\n  /* function exit code */\n  __pyx_r = Py_None; __Pyx_INCREF(Py_None);\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  { PyObject *__pyx_type, *__pyx_value, *__pyx_tb;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd__iou.rcbuffer->pybuffer);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer);\n  __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}\n  __Pyx_AddTraceback(\"pysobatools._mask.iou._rleIou\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  goto __pyx_L2;\n  __pyx_L0:;\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd__iou.rcbuffer->pybuffer);\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer);\n  __pyx_L2:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":199\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_5_bbIou(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_3iou_5_bbIou = {\"_bbIou\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_3iou_5_bbIou, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_5_bbIou(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyArrayObject *__pyx_v_dt = 0;\n  PyArrayObject *__pyx_v_gt = 0;\n  PyArrayObject *__pyx_v_iscrowd = 0;\n  siz __pyx_v_m;\n  siz __pyx_v_n;\n  PyArrayObject *__pyx_v__iou = 0;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_bbIou (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_dt,&__pyx_n_s_gt,&__pyx_n_s_iscrowd,&__pyx_n_s_m,&__pyx_n_s_n,&__pyx_n_s_iou,0};\n    PyObject* values[6] = {0,0,0,0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);\n        CYTHON_FALLTHROUGH;\n        case  5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);\n        CYTHON_FALLTHROUGH;\n        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);\n        CYTHON_FALLTHROUGH;\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_dt)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_gt)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_bbIou\", 1, 6, 6, 1); __PYX_ERR(0, 199, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_iscrowd)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_bbIou\", 1, 6, 6, 2); __PYX_ERR(0, 199, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  3:\n        if (likely((values[3] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_m)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_bbIou\", 1, 6, 6, 3); __PYX_ERR(0, 199, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  4:\n        if (likely((values[4] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_n)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_bbIou\", 1, 6, 6, 4); __PYX_ERR(0, 199, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  5:\n        if (likely((values[5] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_iou)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"_bbIou\", 1, 6, 6, 5); __PYX_ERR(0, 199, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"_bbIou\") < 0)) __PYX_ERR(0, 199, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 6) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n      values[3] = PyTuple_GET_ITEM(__pyx_args, 3);\n      values[4] = PyTuple_GET_ITEM(__pyx_args, 4);\n      values[5] = PyTuple_GET_ITEM(__pyx_args, 5);\n    }\n    __pyx_v_dt = ((PyArrayObject *)values[0]);\n    __pyx_v_gt = ((PyArrayObject *)values[1]);\n    __pyx_v_iscrowd = ((PyArrayObject *)values[2]);\n    __pyx_v_m = __Pyx_PyInt_As_siz(values[3]); if (unlikely((__pyx_v_m == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 199, __pyx_L3_error)\n    __pyx_v_n = __Pyx_PyInt_As_siz(values[4]); if (unlikely((__pyx_v_n == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 199, __pyx_L3_error)\n    __pyx_v__iou = ((PyArrayObject *)values[5]);\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"_bbIou\", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 199, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.iou._bbIou\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_dt), __pyx_ptype_5numpy_ndarray, 1, \"dt\", 0))) __PYX_ERR(0, 199, __pyx_L1_error)\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_gt), __pyx_ptype_5numpy_ndarray, 1, \"gt\", 0))) __PYX_ERR(0, 199, __pyx_L1_error)\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_iscrowd), __pyx_ptype_5numpy_ndarray, 1, \"iscrowd\", 0))) __PYX_ERR(0, 199, __pyx_L1_error)\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v__iou), __pyx_ptype_5numpy_ndarray, 1, \"_iou\", 0))) __PYX_ERR(0, 199, __pyx_L1_error)\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_3iou_4_bbIou(__pyx_self, __pyx_v_dt, __pyx_v_gt, __pyx_v_iscrowd, __pyx_v_m, __pyx_v_n, __pyx_v__iou);\n\n  /* function exit code */\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou_4_bbIou(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_dt, PyArrayObject *__pyx_v_gt, PyArrayObject *__pyx_v_iscrowd, siz __pyx_v_m, siz __pyx_v_n, PyArrayObject *__pyx_v__iou) {\n  __Pyx_LocalBuf_ND __pyx_pybuffernd__iou;\n  __Pyx_Buffer __pyx_pybuffer__iou;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_dt;\n  __Pyx_Buffer __pyx_pybuffer_dt;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_gt;\n  __Pyx_Buffer __pyx_pybuffer_gt;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_iscrowd;\n  __Pyx_Buffer __pyx_pybuffer_iscrowd;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_bbIou\", 0);\n  __pyx_pybuffer_dt.pybuffer.buf = NULL;\n  __pyx_pybuffer_dt.refcount = 0;\n  __pyx_pybuffernd_dt.data = NULL;\n  __pyx_pybuffernd_dt.rcbuffer = &__pyx_pybuffer_dt;\n  __pyx_pybuffer_gt.pybuffer.buf = NULL;\n  __pyx_pybuffer_gt.refcount = 0;\n  __pyx_pybuffernd_gt.data = NULL;\n  __pyx_pybuffernd_gt.rcbuffer = &__pyx_pybuffer_gt;\n  __pyx_pybuffer_iscrowd.pybuffer.buf = NULL;\n  __pyx_pybuffer_iscrowd.refcount = 0;\n  __pyx_pybuffernd_iscrowd.data = NULL;\n  __pyx_pybuffernd_iscrowd.rcbuffer = &__pyx_pybuffer_iscrowd;\n  __pyx_pybuffer__iou.pybuffer.buf = NULL;\n  __pyx_pybuffer__iou.refcount = 0;\n  __pyx_pybuffernd__iou.data = NULL;\n  __pyx_pybuffernd__iou.rcbuffer = &__pyx_pybuffer__iou;\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_dt.rcbuffer->pybuffer, (PyObject*)__pyx_v_dt, &__Pyx_TypeInfo_nn___pyx_t_5numpy_double_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 199, __pyx_L1_error)\n  }\n  __pyx_pybuffernd_dt.diminfo[0].strides = __pyx_pybuffernd_dt.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_dt.diminfo[0].shape = __pyx_pybuffernd_dt.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_dt.diminfo[1].strides = __pyx_pybuffernd_dt.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_dt.diminfo[1].shape = __pyx_pybuffernd_dt.rcbuffer->pybuffer.shape[1];\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_gt.rcbuffer->pybuffer, (PyObject*)__pyx_v_gt, &__Pyx_TypeInfo_nn___pyx_t_5numpy_double_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 199, __pyx_L1_error)\n  }\n  __pyx_pybuffernd_gt.diminfo[0].strides = __pyx_pybuffernd_gt.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_gt.diminfo[0].shape = __pyx_pybuffernd_gt.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_gt.diminfo[1].strides = __pyx_pybuffernd_gt.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_gt.diminfo[1].shape = __pyx_pybuffernd_gt.rcbuffer->pybuffer.shape[1];\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer, (PyObject*)__pyx_v_iscrowd, &__Pyx_TypeInfo_nn___pyx_t_5numpy_uint8_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) __PYX_ERR(0, 199, __pyx_L1_error)\n  }\n  __pyx_pybuffernd_iscrowd.diminfo[0].strides = __pyx_pybuffernd_iscrowd.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_iscrowd.diminfo[0].shape = __pyx_pybuffernd_iscrowd.rcbuffer->pybuffer.shape[0];\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd__iou.rcbuffer->pybuffer, (PyObject*)__pyx_v__iou, &__Pyx_TypeInfo_nn___pyx_t_5numpy_double_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) __PYX_ERR(0, 199, __pyx_L1_error)\n  }\n  __pyx_pybuffernd__iou.diminfo[0].strides = __pyx_pybuffernd__iou.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd__iou.diminfo[0].shape = __pyx_pybuffernd__iou.rcbuffer->pybuffer.shape[0];\n\n  /* \"pysobatools/_mask.pyx\":200\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )             # <<<<<<<<<<<<<<\n *     def _len(obj):\n *         cdef siz N = 0\n */\n  bbIou(((BB)__pyx_v_dt->data), ((BB)__pyx_v_gt->data), __pyx_v_m, __pyx_v_n, ((byte *)__pyx_v_iscrowd->data), ((double *)__pyx_v__iou->data));\n\n  /* \"pysobatools/_mask.pyx\":199\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):\n */\n\n  /* function exit code */\n  __pyx_r = Py_None; __Pyx_INCREF(Py_None);\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  { PyObject *__pyx_type, *__pyx_value, *__pyx_tb;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd__iou.rcbuffer->pybuffer);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_dt.rcbuffer->pybuffer);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_gt.rcbuffer->pybuffer);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer);\n  __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}\n  __Pyx_AddTraceback(\"pysobatools._mask.iou._bbIou\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  goto __pyx_L2;\n  __pyx_L0:;\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd__iou.rcbuffer->pybuffer);\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_dt.rcbuffer->pybuffer);\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_gt.rcbuffer->pybuffer);\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer);\n  __pyx_L2:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":201\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):             # <<<<<<<<<<<<<<\n *         cdef siz N = 0\n *         if type(obj) == RLEs:\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_7_len(PyObject *__pyx_self, PyObject *__pyx_v_obj); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_3iou_7_len = {\"_len\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_3iou_7_len, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_3iou_7_len(PyObject *__pyx_self, PyObject *__pyx_v_obj) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"_len (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_3iou_6_len(__pyx_self, ((PyObject *)__pyx_v_obj));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_3iou_6_len(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_obj) {\n  siz __pyx_v_N;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  int __pyx_t_2;\n  siz __pyx_t_3;\n  Py_ssize_t __pyx_t_4;\n  PyObject *__pyx_t_5 = NULL;\n  __Pyx_RefNannySetupContext(\"_len\", 0);\n\n  /* \"pysobatools/_mask.pyx\":202\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):\n *         cdef siz N = 0             # <<<<<<<<<<<<<<\n *         if type(obj) == RLEs:\n *             N = obj.n\n */\n  __pyx_v_N = 0;\n\n  /* \"pysobatools/_mask.pyx\":203\n *     def _len(obj):\n *         cdef siz N = 0\n *         if type(obj) == RLEs:             # <<<<<<<<<<<<<<\n *             N = obj.n\n *         elif len(obj)==0:\n */\n  __pyx_t_1 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_obj)), ((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 203, __pyx_L1_error)\n  __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 203, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":204\n *         cdef siz N = 0\n *         if type(obj) == RLEs:\n *             N = obj.n             # <<<<<<<<<<<<<<\n *         elif len(obj)==0:\n *             pass\n */\n    __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_obj, __pyx_n_s_n); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 204, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __pyx_t_3 = __Pyx_PyInt_As_siz(__pyx_t_1); if (unlikely((__pyx_t_3 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 204, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __pyx_v_N = __pyx_t_3;\n\n    /* \"pysobatools/_mask.pyx\":203\n *     def _len(obj):\n *         cdef siz N = 0\n *         if type(obj) == RLEs:             # <<<<<<<<<<<<<<\n *             N = obj.n\n *         elif len(obj)==0:\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":205\n *         if type(obj) == RLEs:\n *             N = obj.n\n *         elif len(obj)==0:             # <<<<<<<<<<<<<<\n *             pass\n *         elif type(obj) == np.ndarray:\n */\n  __pyx_t_4 = PyObject_Length(__pyx_v_obj); if (unlikely(__pyx_t_4 == ((Py_ssize_t)-1))) __PYX_ERR(0, 205, __pyx_L1_error)\n  __pyx_t_2 = ((__pyx_t_4 == 0) != 0);\n  if (__pyx_t_2) {\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":207\n *         elif len(obj)==0:\n *             pass\n *         elif type(obj) == np.ndarray:             # <<<<<<<<<<<<<<\n *             N = obj.shape[0]\n *         return N\n */\n  __pyx_t_1 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_obj)), ((PyObject *)__pyx_ptype_5numpy_ndarray), Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 207, __pyx_L1_error)\n  __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 207, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":208\n *             pass\n *         elif type(obj) == np.ndarray:\n *             N = obj.shape[0]             # <<<<<<<<<<<<<<\n *         return N\n *     # convert iscrowd to numpy array\n */\n    __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_v_obj, __pyx_n_s_shape); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 208, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __pyx_t_5 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 208, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __pyx_t_3 = __Pyx_PyInt_As_siz(__pyx_t_5); if (unlikely((__pyx_t_3 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 208, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __pyx_v_N = __pyx_t_3;\n\n    /* \"pysobatools/_mask.pyx\":207\n *         elif len(obj)==0:\n *             pass\n *         elif type(obj) == np.ndarray:             # <<<<<<<<<<<<<<\n *             N = obj.shape[0]\n *         return N\n */\n  }\n  __pyx_L3:;\n\n  /* \"pysobatools/_mask.pyx\":209\n *         elif type(obj) == np.ndarray:\n *             N = obj.shape[0]\n *         return N             # <<<<<<<<<<<<<<\n *     # convert iscrowd to numpy array\n *     cdef np.ndarray[np.uint8_t, ndim=1] iscrowd = np.array(pyiscrowd, dtype=np.uint8)\n */\n  __Pyx_XDECREF(__pyx_r);\n  __pyx_t_5 = __Pyx_PyInt_From_siz(__pyx_v_N); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 209, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __pyx_r = __pyx_t_5;\n  __pyx_t_5 = 0;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":201\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):             # <<<<<<<<<<<<<<\n *         cdef siz N = 0\n *         if type(obj) == RLEs:\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_AddTraceback(\"pysobatools._mask.iou._len\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":171\n * \n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):             # <<<<<<<<<<<<<<\n *     def _preproc(objs):\n *         if len(objs) == 0:\n */\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_12iou(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_dt, PyObject *__pyx_v_gt, PyObject *__pyx_v_pyiscrowd) {\n  PyObject *__pyx_v__preproc = 0;\n  PyObject *__pyx_v__rleIou = 0;\n  PyObject *__pyx_v__bbIou = 0;\n  PyObject *__pyx_v__len = 0;\n  PyArrayObject *__pyx_v_iscrowd = 0;\n  siz __pyx_v_m;\n  siz __pyx_v_n;\n  double *__pyx_v__iou;\n  npy_intp __pyx_v_shape[1];\n  PyObject *__pyx_v__iouFun = NULL;\n  PyObject *__pyx_v_iou = NULL;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_iscrowd;\n  __Pyx_Buffer __pyx_pybuffer_iscrowd;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *__pyx_t_4 = NULL;\n  PyObject *__pyx_t_5 = NULL;\n  PyArrayObject *__pyx_t_6 = NULL;\n  siz __pyx_t_7;\n  int __pyx_t_8;\n  int __pyx_t_9;\n  int __pyx_t_10;\n  PyObject *__pyx_t_11 = NULL;\n  __Pyx_RefNannySetupContext(\"iou\", 0);\n  __Pyx_INCREF(__pyx_v_dt);\n  __Pyx_INCREF(__pyx_v_gt);\n  __pyx_pybuffer_iscrowd.pybuffer.buf = NULL;\n  __pyx_pybuffer_iscrowd.refcount = 0;\n  __pyx_pybuffernd_iscrowd.data = NULL;\n  __pyx_pybuffernd_iscrowd.rcbuffer = &__pyx_pybuffer_iscrowd;\n\n  /* \"pysobatools/_mask.pyx\":172\n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):\n *     def _preproc(objs):             # <<<<<<<<<<<<<<\n *         if len(objs) == 0:\n *             return objs\n */\n  __pyx_t_1 = __Pyx_CyFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_3iou_1_preproc, 0, __pyx_n_s_iou_locals__preproc, NULL, __pyx_n_s_pysobatools__mask, __pyx_d, ((PyObject *)__pyx_codeobj__11)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 172, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_v__preproc = __pyx_t_1;\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":197\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n *         return objs\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n */\n  __pyx_t_1 = __Pyx_CyFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_3iou_3_rleIou, 0, __pyx_n_s_iou_locals__rleIou, NULL, __pyx_n_s_pysobatools__mask, __pyx_d, ((PyObject *)__pyx_codeobj__13)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 197, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_v__rleIou = __pyx_t_1;\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":199\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):\n */\n  __pyx_t_1 = __Pyx_CyFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_3iou_5_bbIou, 0, __pyx_n_s_iou_locals__bbIou, NULL, __pyx_n_s_pysobatools__mask, __pyx_d, ((PyObject *)__pyx_codeobj__15)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 199, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_v__bbIou = __pyx_t_1;\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":201\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):             # <<<<<<<<<<<<<<\n *         cdef siz N = 0\n *         if type(obj) == RLEs:\n */\n  __pyx_t_1 = __Pyx_CyFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_3iou_7_len, 0, __pyx_n_s_iou_locals__len, NULL, __pyx_n_s_pysobatools__mask, __pyx_d, ((PyObject *)__pyx_codeobj__17)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 201, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_v__len = __pyx_t_1;\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":211\n *         return N\n *     # convert iscrowd to numpy array\n *     cdef np.ndarray[np.uint8_t, ndim=1] iscrowd = np.array(pyiscrowd, dtype=np.uint8)             # <<<<<<<<<<<<<<\n *     # simple type checking\n *     cdef siz m, n\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_np); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_array); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_INCREF(__pyx_v_pyiscrowd);\n  __Pyx_GIVEREF(__pyx_v_pyiscrowd);\n  PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_pyiscrowd);\n  __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_GetModuleGlobalName(__pyx_t_4, __pyx_n_s_np); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_4, __pyx_n_s_uint8); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n  if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dtype, __pyx_t_5) < 0) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_t_5 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 211, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (!(likely(((__pyx_t_5) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_5, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 211, __pyx_L1_error)\n  __pyx_t_6 = ((PyArrayObject *)__pyx_t_5);\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer, (PyObject*)__pyx_t_6, &__Pyx_TypeInfo_nn___pyx_t_5numpy_uint8_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {\n      __pyx_v_iscrowd = ((PyArrayObject *)Py_None); __Pyx_INCREF(Py_None); __pyx_pybuffernd_iscrowd.rcbuffer->pybuffer.buf = NULL;\n      __PYX_ERR(0, 211, __pyx_L1_error)\n    } else {__pyx_pybuffernd_iscrowd.diminfo[0].strides = __pyx_pybuffernd_iscrowd.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_iscrowd.diminfo[0].shape = __pyx_pybuffernd_iscrowd.rcbuffer->pybuffer.shape[0];\n    }\n  }\n  __pyx_t_6 = 0;\n  __pyx_v_iscrowd = ((PyArrayObject *)__pyx_t_5);\n  __pyx_t_5 = 0;\n\n  /* \"pysobatools/_mask.pyx\":214\n *     # simple type checking\n *     cdef siz m, n\n *     dt = _preproc(dt)             # <<<<<<<<<<<<<<\n *     gt = _preproc(gt)\n *     m = _len(dt)\n */\n  __pyx_t_5 = __pyx_pf_11pysobatools_5_mask_3iou__preproc(__pyx_v__preproc, __pyx_v_dt); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 214, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF_SET(__pyx_v_dt, __pyx_t_5);\n  __pyx_t_5 = 0;\n\n  /* \"pysobatools/_mask.pyx\":215\n *     cdef siz m, n\n *     dt = _preproc(dt)\n *     gt = _preproc(gt)             # <<<<<<<<<<<<<<\n *     m = _len(dt)\n *     n = _len(gt)\n */\n  __pyx_t_5 = __pyx_pf_11pysobatools_5_mask_3iou__preproc(__pyx_v__preproc, __pyx_v_gt); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 215, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_DECREF_SET(__pyx_v_gt, __pyx_t_5);\n  __pyx_t_5 = 0;\n\n  /* \"pysobatools/_mask.pyx\":216\n *     dt = _preproc(dt)\n *     gt = _preproc(gt)\n *     m = _len(dt)             # <<<<<<<<<<<<<<\n *     n = _len(gt)\n *     if m == 0 or n == 0:\n */\n  __pyx_t_5 = __pyx_pf_11pysobatools_5_mask_3iou_6_len(__pyx_v__len, __pyx_v_dt); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 216, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __pyx_t_7 = __Pyx_PyInt_As_siz(__pyx_t_5); if (unlikely((__pyx_t_7 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 216, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_v_m = __pyx_t_7;\n\n  /* \"pysobatools/_mask.pyx\":217\n *     gt = _preproc(gt)\n *     m = _len(dt)\n *     n = _len(gt)             # <<<<<<<<<<<<<<\n *     if m == 0 or n == 0:\n *         return []\n */\n  __pyx_t_5 = __pyx_pf_11pysobatools_5_mask_3iou_6_len(__pyx_v__len, __pyx_v_gt); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 217, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __pyx_t_7 = __Pyx_PyInt_As_siz(__pyx_t_5); if (unlikely((__pyx_t_7 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 217, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_v_n = __pyx_t_7;\n\n  /* \"pysobatools/_mask.pyx\":218\n *     m = _len(dt)\n *     n = _len(gt)\n *     if m == 0 or n == 0:             # <<<<<<<<<<<<<<\n *         return []\n *     if not type(dt) == type(gt):\n */\n  __pyx_t_9 = ((__pyx_v_m == 0) != 0);\n  if (!__pyx_t_9) {\n  } else {\n    __pyx_t_8 = __pyx_t_9;\n    goto __pyx_L4_bool_binop_done;\n  }\n  __pyx_t_9 = ((__pyx_v_n == 0) != 0);\n  __pyx_t_8 = __pyx_t_9;\n  __pyx_L4_bool_binop_done:;\n  if (__pyx_t_8) {\n\n    /* \"pysobatools/_mask.pyx\":219\n *     n = _len(gt)\n *     if m == 0 or n == 0:\n *         return []             # <<<<<<<<<<<<<<\n *     if not type(dt) == type(gt):\n *         raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray')\n */\n    __Pyx_XDECREF(__pyx_r);\n    __pyx_t_5 = PyList_New(0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 219, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __pyx_r = __pyx_t_5;\n    __pyx_t_5 = 0;\n    goto __pyx_L0;\n\n    /* \"pysobatools/_mask.pyx\":218\n *     m = _len(dt)\n *     n = _len(gt)\n *     if m == 0 or n == 0:             # <<<<<<<<<<<<<<\n *         return []\n *     if not type(dt) == type(gt):\n */\n  }\n\n  /* \"pysobatools/_mask.pyx\":220\n *     if m == 0 or n == 0:\n *         return []\n *     if not type(dt) == type(gt):             # <<<<<<<<<<<<<<\n *         raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray')\n * \n */\n  __pyx_t_5 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_dt)), ((PyObject *)Py_TYPE(__pyx_v_gt)), Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 220, __pyx_L1_error)\n  __pyx_t_8 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_8 < 0)) __PYX_ERR(0, 220, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_t_9 = ((!__pyx_t_8) != 0);\n  if (unlikely(__pyx_t_9)) {\n\n    /* \"pysobatools/_mask.pyx\":221\n *         return []\n *     if not type(dt) == type(gt):\n *         raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray')             # <<<<<<<<<<<<<<\n * \n *     # define local variables\n */\n    __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple__18, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 221, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_Raise(__pyx_t_5, 0, 0, 0);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __PYX_ERR(0, 221, __pyx_L1_error)\n\n    /* \"pysobatools/_mask.pyx\":220\n *     if m == 0 or n == 0:\n *         return []\n *     if not type(dt) == type(gt):             # <<<<<<<<<<<<<<\n *         raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray')\n * \n */\n  }\n\n  /* \"pysobatools/_mask.pyx\":224\n * \n *     # define local variables\n *     cdef double* _iou = <double*> 0             # <<<<<<<<<<<<<<\n *     cdef np.npy_intp shape[1]\n *     # check type and assign iou function\n */\n  __pyx_v__iou = ((double *)0);\n\n  /* \"pysobatools/_mask.pyx\":227\n *     cdef np.npy_intp shape[1]\n *     # check type and assign iou function\n *     if type(dt) == RLEs:             # <<<<<<<<<<<<<<\n *         _iouFun = _rleIou\n *     elif type(dt) == np.ndarray:\n */\n  __pyx_t_5 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_dt)), ((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 227, __pyx_L1_error)\n  __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 227, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  if (__pyx_t_9) {\n\n    /* \"pysobatools/_mask.pyx\":228\n *     # check type and assign iou function\n *     if type(dt) == RLEs:\n *         _iouFun = _rleIou             # <<<<<<<<<<<<<<\n *     elif type(dt) == np.ndarray:\n *         _iouFun = _bbIou\n */\n    __Pyx_INCREF(__pyx_v__rleIou);\n    __pyx_v__iouFun = __pyx_v__rleIou;\n\n    /* \"pysobatools/_mask.pyx\":227\n *     cdef np.npy_intp shape[1]\n *     # check type and assign iou function\n *     if type(dt) == RLEs:             # <<<<<<<<<<<<<<\n *         _iouFun = _rleIou\n *     elif type(dt) == np.ndarray:\n */\n    goto __pyx_L7;\n  }\n\n  /* \"pysobatools/_mask.pyx\":229\n *     if type(dt) == RLEs:\n *         _iouFun = _rleIou\n *     elif type(dt) == np.ndarray:             # <<<<<<<<<<<<<<\n *         _iouFun = _bbIou\n *     else:\n */\n  __pyx_t_5 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_dt)), ((PyObject *)__pyx_ptype_5numpy_ndarray), Py_EQ); __Pyx_XGOTREF(__pyx_t_5); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 229, __pyx_L1_error)\n  __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_5); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 229, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  if (likely(__pyx_t_9)) {\n\n    /* \"pysobatools/_mask.pyx\":230\n *         _iouFun = _rleIou\n *     elif type(dt) == np.ndarray:\n *         _iouFun = _bbIou             # <<<<<<<<<<<<<<\n *     else:\n *         raise Exception('input data type not allowed.')\n */\n    __Pyx_INCREF(__pyx_v__bbIou);\n    __pyx_v__iouFun = __pyx_v__bbIou;\n\n    /* \"pysobatools/_mask.pyx\":229\n *     if type(dt) == RLEs:\n *         _iouFun = _rleIou\n *     elif type(dt) == np.ndarray:             # <<<<<<<<<<<<<<\n *         _iouFun = _bbIou\n *     else:\n */\n    goto __pyx_L7;\n  }\n\n  /* \"pysobatools/_mask.pyx\":232\n *         _iouFun = _bbIou\n *     else:\n *         raise Exception('input data type not allowed.')             # <<<<<<<<<<<<<<\n *     _iou = <double*> malloc(m*n* sizeof(double))\n *     iou = np.zeros((m*n, ), dtype=np.double)\n */\n  /*else*/ {\n    __pyx_t_5 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple__19, NULL); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 232, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_Raise(__pyx_t_5, 0, 0, 0);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __PYX_ERR(0, 232, __pyx_L1_error)\n  }\n  __pyx_L7:;\n\n  /* \"pysobatools/_mask.pyx\":233\n *     else:\n *         raise Exception('input data type not allowed.')\n *     _iou = <double*> malloc(m*n* sizeof(double))             # <<<<<<<<<<<<<<\n *     iou = np.zeros((m*n, ), dtype=np.double)\n *     shape[0] = <np.npy_intp> m*n\n */\n  __pyx_v__iou = ((double *)malloc(((__pyx_v_m * __pyx_v_n) * (sizeof(double)))));\n\n  /* \"pysobatools/_mask.pyx\":234\n *         raise Exception('input data type not allowed.')\n *     _iou = <double*> malloc(m*n* sizeof(double))\n *     iou = np.zeros((m*n, ), dtype=np.double)             # <<<<<<<<<<<<<<\n *     shape[0] = <np.npy_intp> m*n\n *     iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou)\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_np); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __pyx_t_3 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_zeros); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_t_5 = __Pyx_PyInt_From_siz((__pyx_v_m * __pyx_v_n)); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_GIVEREF(__pyx_t_5);\n  PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_5);\n  __pyx_t_5 = 0;\n  __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_GIVEREF(__pyx_t_1);\n  PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1);\n  __pyx_t_1 = 0;\n  __pyx_t_1 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_np); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_double); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  if (PyDict_SetItem(__pyx_t_1, __pyx_n_s_dtype, __pyx_t_4) < 0) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n  __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_5, __pyx_t_1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 234, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_v_iou = __pyx_t_4;\n  __pyx_t_4 = 0;\n\n  /* \"pysobatools/_mask.pyx\":235\n *     _iou = <double*> malloc(m*n* sizeof(double))\n *     iou = np.zeros((m*n, ), dtype=np.double)\n *     shape[0] = <np.npy_intp> m*n             # <<<<<<<<<<<<<<\n *     iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou)\n *     PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA)\n */\n  (__pyx_v_shape[0]) = (((npy_intp)__pyx_v_m) * __pyx_v_n);\n\n  /* \"pysobatools/_mask.pyx\":236\n *     iou = np.zeros((m*n, ), dtype=np.double)\n *     shape[0] = <np.npy_intp> m*n\n *     iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou)             # <<<<<<<<<<<<<<\n *     PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA)\n *     _iouFun(dt, gt, iscrowd, m, n, iou)\n */\n  __pyx_t_4 = PyArray_SimpleNewFromData(1, __pyx_v_shape, NPY_DOUBLE, __pyx_v__iou); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 236, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __Pyx_DECREF_SET(__pyx_v_iou, __pyx_t_4);\n  __pyx_t_4 = 0;\n\n  /* \"pysobatools/_mask.pyx\":237\n *     shape[0] = <np.npy_intp> m*n\n *     iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou)\n *     PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA)             # <<<<<<<<<<<<<<\n *     _iouFun(dt, gt, iscrowd, m, n, iou)\n *     return iou.reshape((m,n), order='F')\n */\n  if (!(likely(((__pyx_v_iou) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_iou, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 237, __pyx_L1_error)\n  PyArray_ENABLEFLAGS(((PyArrayObject *)__pyx_v_iou), NPY_OWNDATA);\n\n  /* \"pysobatools/_mask.pyx\":238\n *     iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou)\n *     PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA)\n *     _iouFun(dt, gt, iscrowd, m, n, iou)             # <<<<<<<<<<<<<<\n *     return iou.reshape((m,n), order='F')\n * \n */\n  __pyx_t_1 = __Pyx_PyInt_From_siz(__pyx_v_m); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 238, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_5 = __Pyx_PyInt_From_siz(__pyx_v_n); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 238, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_INCREF(__pyx_v__iouFun);\n  __pyx_t_3 = __pyx_v__iouFun; __pyx_t_2 = NULL;\n  __pyx_t_10 = 0;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {\n    __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_3);\n    if (likely(__pyx_t_2)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);\n      __Pyx_INCREF(__pyx_t_2);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_3, function);\n      __pyx_t_10 = 1;\n    }\n  }\n  #if CYTHON_FAST_PYCALL\n  if (PyFunction_Check(__pyx_t_3)) {\n    PyObject *__pyx_temp[7] = {__pyx_t_2, __pyx_v_dt, __pyx_v_gt, ((PyObject *)__pyx_v_iscrowd), __pyx_t_1, __pyx_t_5, __pyx_v_iou};\n    __pyx_t_4 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 6+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 238, __pyx_L1_error)\n    __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  } else\n  #endif\n  #if CYTHON_FAST_PYCCALL\n  if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {\n    PyObject *__pyx_temp[7] = {__pyx_t_2, __pyx_v_dt, __pyx_v_gt, ((PyObject *)__pyx_v_iscrowd), __pyx_t_1, __pyx_t_5, __pyx_v_iou};\n    __pyx_t_4 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_10, 6+__pyx_t_10); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 238, __pyx_L1_error)\n    __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  } else\n  #endif\n  {\n    __pyx_t_11 = PyTuple_New(6+__pyx_t_10); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 238, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_11);\n    if (__pyx_t_2) {\n      __Pyx_GIVEREF(__pyx_t_2); PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_2); __pyx_t_2 = NULL;\n    }\n    __Pyx_INCREF(__pyx_v_dt);\n    __Pyx_GIVEREF(__pyx_v_dt);\n    PyTuple_SET_ITEM(__pyx_t_11, 0+__pyx_t_10, __pyx_v_dt);\n    __Pyx_INCREF(__pyx_v_gt);\n    __Pyx_GIVEREF(__pyx_v_gt);\n    PyTuple_SET_ITEM(__pyx_t_11, 1+__pyx_t_10, __pyx_v_gt);\n    __Pyx_INCREF(((PyObject *)__pyx_v_iscrowd));\n    __Pyx_GIVEREF(((PyObject *)__pyx_v_iscrowd));\n    PyTuple_SET_ITEM(__pyx_t_11, 2+__pyx_t_10, ((PyObject *)__pyx_v_iscrowd));\n    __Pyx_GIVEREF(__pyx_t_1);\n    PyTuple_SET_ITEM(__pyx_t_11, 3+__pyx_t_10, __pyx_t_1);\n    __Pyx_GIVEREF(__pyx_t_5);\n    PyTuple_SET_ITEM(__pyx_t_11, 4+__pyx_t_10, __pyx_t_5);\n    __Pyx_INCREF(__pyx_v_iou);\n    __Pyx_GIVEREF(__pyx_v_iou);\n    PyTuple_SET_ITEM(__pyx_t_11, 5+__pyx_t_10, __pyx_v_iou);\n    __pyx_t_1 = 0;\n    __pyx_t_5 = 0;\n    __pyx_t_4 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_11, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 238, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;\n  }\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n\n  /* \"pysobatools/_mask.pyx\":239\n *     PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA)\n *     _iouFun(dt, gt, iscrowd, m, n, iou)\n *     return iou.reshape((m,n), order='F')             # <<<<<<<<<<<<<<\n * \n * def toBbox( rleObjs ):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __pyx_t_4 = __Pyx_PyObject_GetAttrStr(__pyx_v_iou, __pyx_n_s_reshape); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 239, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_4);\n  __pyx_t_3 = __Pyx_PyInt_From_siz(__pyx_v_m); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 239, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __pyx_t_11 = __Pyx_PyInt_From_siz(__pyx_v_n); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 239, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_11);\n  __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 239, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __Pyx_GIVEREF(__pyx_t_3);\n  PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_3);\n  __Pyx_GIVEREF(__pyx_t_11);\n  PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_11);\n  __pyx_t_3 = 0;\n  __pyx_t_11 = 0;\n  __pyx_t_11 = PyTuple_New(1); if (unlikely(!__pyx_t_11)) __PYX_ERR(0, 239, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_11);\n  __Pyx_GIVEREF(__pyx_t_5);\n  PyTuple_SET_ITEM(__pyx_t_11, 0, __pyx_t_5);\n  __pyx_t_5 = 0;\n  __pyx_t_5 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 239, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  if (PyDict_SetItem(__pyx_t_5, __pyx_n_s_order, __pyx_n_s_F) < 0) __PYX_ERR(0, 239, __pyx_L1_error)\n  __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_4, __pyx_t_11, __pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 239, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n  __Pyx_DECREF(__pyx_t_11); __pyx_t_11 = 0;\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  __pyx_r = __pyx_t_3;\n  __pyx_t_3 = 0;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":171\n * \n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):             # <<<<<<<<<<<<<<\n *     def _preproc(objs):\n *         if len(objs) == 0:\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_4);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_11);\n  { PyObject *__pyx_type, *__pyx_value, *__pyx_tb;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer);\n  __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}\n  __Pyx_AddTraceback(\"pysobatools._mask.iou\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  goto __pyx_L2;\n  __pyx_L0:;\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_iscrowd.rcbuffer->pybuffer);\n  __pyx_L2:;\n  __Pyx_XDECREF(__pyx_v__preproc);\n  __Pyx_XDECREF(__pyx_v__rleIou);\n  __Pyx_XDECREF(__pyx_v__bbIou);\n  __Pyx_XDECREF(__pyx_v__len);\n  __Pyx_XDECREF((PyObject *)__pyx_v_iscrowd);\n  __Pyx_XDECREF(__pyx_v__iouFun);\n  __Pyx_XDECREF(__pyx_v_iou);\n  __Pyx_XDECREF(__pyx_v_dt);\n  __Pyx_XDECREF(__pyx_v_gt);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":241\n *     return iou.reshape((m,n), order='F')\n * \n * def toBbox( rleObjs ):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef siz n = Rs.n\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_15toBbox(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_15toBbox = {\"toBbox\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_15toBbox, METH_O, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_15toBbox(PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"toBbox (wrapper)\", 0);\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_14toBbox(__pyx_self, ((PyObject *)__pyx_v_rleObjs));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_14toBbox(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_rleObjs) {\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = 0;\n  siz __pyx_v_n;\n  BB __pyx_v__bb;\n  npy_intp __pyx_v_shape[1];\n  PyObject *__pyx_v_bb = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  siz __pyx_t_4;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  __Pyx_RefNannySetupContext(\"toBbox\", 0);\n\n  /* \"pysobatools/_mask.pyx\":242\n * \n * def toBbox( rleObjs ):\n *     cdef RLEs Rs = _frString(rleObjs)             # <<<<<<<<<<<<<<\n *     cdef siz n = Rs.n\n *     cdef BB _bb = <BB> malloc(4*n* sizeof(double))\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_frString); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 242, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {\n    __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_2);\n    if (likely(__pyx_t_3)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);\n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_2, function);\n    }\n  }\n  __pyx_t_1 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_3, __pyx_v_rleObjs) : __Pyx_PyObject_CallOneArg(__pyx_t_2, __pyx_v_rleObjs);\n  __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 242, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  if (!(likely(((__pyx_t_1) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_1, __pyx_ptype_11pysobatools_5_mask_RLEs))))) __PYX_ERR(0, 242, __pyx_L1_error)\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_1);\n  __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":243\n * def toBbox( rleObjs ):\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef siz n = Rs.n             # <<<<<<<<<<<<<<\n *     cdef BB _bb = <BB> malloc(4*n* sizeof(double))\n *     rleToBbox( <const RLE*> Rs._R, _bb, n )\n */\n  __pyx_t_1 = __Pyx_PyObject_GetAttrStr(((PyObject *)__pyx_v_Rs), __pyx_n_s_n); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 243, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_4 = __Pyx_PyInt_As_siz(__pyx_t_1); if (unlikely((__pyx_t_4 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 243, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_v_n = __pyx_t_4;\n\n  /* \"pysobatools/_mask.pyx\":244\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef siz n = Rs.n\n *     cdef BB _bb = <BB> malloc(4*n* sizeof(double))             # <<<<<<<<<<<<<<\n *     rleToBbox( <const RLE*> Rs._R, _bb, n )\n *     cdef np.npy_intp shape[1]\n */\n  __pyx_v__bb = ((BB)malloc(((4 * __pyx_v_n) * (sizeof(double)))));\n\n  /* \"pysobatools/_mask.pyx\":245\n *     cdef siz n = Rs.n\n *     cdef BB _bb = <BB> malloc(4*n* sizeof(double))\n *     rleToBbox( <const RLE*> Rs._R, _bb, n )             # <<<<<<<<<<<<<<\n *     cdef np.npy_intp shape[1]\n *     shape[0] = <np.npy_intp> 4*n\n */\n  rleToBbox(((RLE const *)__pyx_v_Rs->_R), __pyx_v__bb, __pyx_v_n);\n\n  /* \"pysobatools/_mask.pyx\":247\n *     rleToBbox( <const RLE*> Rs._R, _bb, n )\n *     cdef np.npy_intp shape[1]\n *     shape[0] = <np.npy_intp> 4*n             # <<<<<<<<<<<<<<\n *     bb = np.array((1,4*n), dtype=np.double)\n *     bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4))\n */\n  (__pyx_v_shape[0]) = (((npy_intp)4) * __pyx_v_n);\n\n  /* \"pysobatools/_mask.pyx\":248\n *     cdef np.npy_intp shape[1]\n *     shape[0] = <np.npy_intp> 4*n\n *     bb = np.array((1,4*n), dtype=np.double)             # <<<<<<<<<<<<<<\n *     bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4))\n *     PyArray_ENABLEFLAGS(bb, np.NPY_OWNDATA)\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_np); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_array); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_1 = __Pyx_PyInt_From_siz((4 * __pyx_v_n)); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_INCREF(__pyx_int_1);\n  __Pyx_GIVEREF(__pyx_int_1);\n  PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_int_1);\n  __Pyx_GIVEREF(__pyx_t_1);\n  PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1);\n  __pyx_t_1 = 0;\n  __pyx_t_1 = PyTuple_New(1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_GIVEREF(__pyx_t_3);\n  PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_3);\n  __pyx_t_3 = 0;\n  __pyx_t_3 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_np); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_5);\n  __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_double); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_6);\n  __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n  if (PyDict_SetItem(__pyx_t_3, __pyx_n_s_dtype, __pyx_t_6) < 0) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n  __pyx_t_6 = __Pyx_PyObject_Call(__pyx_t_2, __pyx_t_1, __pyx_t_3); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 248, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_6);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __pyx_v_bb = __pyx_t_6;\n  __pyx_t_6 = 0;\n\n  /* \"pysobatools/_mask.pyx\":249\n *     shape[0] = <np.npy_intp> 4*n\n *     bb = np.array((1,4*n), dtype=np.double)\n *     bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4))             # <<<<<<<<<<<<<<\n *     PyArray_ENABLEFLAGS(bb, np.NPY_OWNDATA)\n *     return bb\n */\n  __pyx_t_3 = PyArray_SimpleNewFromData(1, __pyx_v_shape, NPY_DOUBLE, __pyx_v__bb); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 249, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __pyx_t_1 = __Pyx_PyObject_GetAttrStr(__pyx_t_3, __pyx_n_s_reshape); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 249, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __pyx_t_3 = __Pyx_PyInt_From_siz(__pyx_v_n); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 249, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 249, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_GIVEREF(__pyx_t_3);\n  PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_3);\n  __Pyx_INCREF(__pyx_int_4);\n  __Pyx_GIVEREF(__pyx_int_4);\n  PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_int_4);\n  __pyx_t_3 = 0;\n  __pyx_t_3 = NULL;\n  if (CYTHON_UNPACK_METHODS && likely(PyMethod_Check(__pyx_t_1))) {\n    __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);\n    if (likely(__pyx_t_3)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);\n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_1, function);\n    }\n  }\n  __pyx_t_6 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_3, __pyx_t_2) : __Pyx_PyObject_CallOneArg(__pyx_t_1, __pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 249, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_6);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __Pyx_DECREF_SET(__pyx_v_bb, __pyx_t_6);\n  __pyx_t_6 = 0;\n\n  /* \"pysobatools/_mask.pyx\":250\n *     bb = np.array((1,4*n), dtype=np.double)\n *     bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4))\n *     PyArray_ENABLEFLAGS(bb, np.NPY_OWNDATA)             # <<<<<<<<<<<<<<\n *     return bb\n * \n */\n  if (!(likely(((__pyx_v_bb) == Py_None) || likely(__Pyx_TypeTest(__pyx_v_bb, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 250, __pyx_L1_error)\n  PyArray_ENABLEFLAGS(((PyArrayObject *)__pyx_v_bb), NPY_OWNDATA);\n\n  /* \"pysobatools/_mask.pyx\":251\n *     bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4))\n *     PyArray_ENABLEFLAGS(bb, np.NPY_OWNDATA)\n *     return bb             # <<<<<<<<<<<<<<\n * \n * def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_bb);\n  __pyx_r = __pyx_v_bb;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":241\n *     return iou.reshape((m,n), order='F')\n * \n * def toBbox( rleObjs ):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef siz n = Rs.n\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_AddTraceback(\"pysobatools._mask.toBbox\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF(__pyx_v_bb);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":253\n *     return bb\n * \n * def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef siz n = bb.shape[0]\n *     Rs = RLEs(n)\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_17frBbox(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_17frBbox = {\"frBbox\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_17frBbox, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_17frBbox(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyArrayObject *__pyx_v_bb = 0;\n  siz __pyx_v_h;\n  siz __pyx_v_w;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"frBbox (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_bb,&__pyx_n_s_h,&__pyx_n_s_w,0};\n    PyObject* values[3] = {0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_bb)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_h)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frBbox\", 1, 3, 3, 1); __PYX_ERR(0, 253, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_w)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frBbox\", 1, 3, 3, 2); __PYX_ERR(0, 253, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"frBbox\") < 0)) __PYX_ERR(0, 253, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n    }\n    __pyx_v_bb = ((PyArrayObject *)values[0]);\n    __pyx_v_h = __Pyx_PyInt_As_siz(values[1]); if (unlikely((__pyx_v_h == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 253, __pyx_L3_error)\n    __pyx_v_w = __Pyx_PyInt_As_siz(values[2]); if (unlikely((__pyx_v_w == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 253, __pyx_L3_error)\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"frBbox\", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 253, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.frBbox\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_bb), __pyx_ptype_5numpy_ndarray, 1, \"bb\", 0))) __PYX_ERR(0, 253, __pyx_L1_error)\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_16frBbox(__pyx_self, __pyx_v_bb, __pyx_v_h, __pyx_v_w);\n\n  /* function exit code */\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_16frBbox(CYTHON_UNUSED PyObject *__pyx_self, PyArrayObject *__pyx_v_bb, siz __pyx_v_h, siz __pyx_v_w) {\n  siz __pyx_v_n;\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = NULL;\n  PyObject *__pyx_v_objs = NULL;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_bb;\n  __Pyx_Buffer __pyx_pybuffer_bb;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  __Pyx_RefNannySetupContext(\"frBbox\", 0);\n  __pyx_pybuffer_bb.pybuffer.buf = NULL;\n  __pyx_pybuffer_bb.refcount = 0;\n  __pyx_pybuffernd_bb.data = NULL;\n  __pyx_pybuffernd_bb.rcbuffer = &__pyx_pybuffer_bb;\n  {\n    __Pyx_BufFmt_StackElem __pyx_stack[1];\n    if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_bb.rcbuffer->pybuffer, (PyObject*)__pyx_v_bb, &__Pyx_TypeInfo_nn___pyx_t_5numpy_double_t, PyBUF_FORMAT| PyBUF_STRIDES, 2, 0, __pyx_stack) == -1)) __PYX_ERR(0, 253, __pyx_L1_error)\n  }\n  __pyx_pybuffernd_bb.diminfo[0].strides = __pyx_pybuffernd_bb.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_bb.diminfo[0].shape = __pyx_pybuffernd_bb.rcbuffer->pybuffer.shape[0]; __pyx_pybuffernd_bb.diminfo[1].strides = __pyx_pybuffernd_bb.rcbuffer->pybuffer.strides[1]; __pyx_pybuffernd_bb.diminfo[1].shape = __pyx_pybuffernd_bb.rcbuffer->pybuffer.shape[1];\n\n  /* \"pysobatools/_mask.pyx\":254\n * \n * def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):\n *     cdef siz n = bb.shape[0]             # <<<<<<<<<<<<<<\n *     Rs = RLEs(n)\n *     rleFrBbox( <RLE*> Rs._R, <const BB> bb.data, h, w, n )\n */\n  __pyx_v_n = (__pyx_v_bb->dimensions[0]);\n\n  /* \"pysobatools/_mask.pyx\":255\n * def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):\n *     cdef siz n = bb.shape[0]\n *     Rs = RLEs(n)             # <<<<<<<<<<<<<<\n *     rleFrBbox( <RLE*> Rs._R, <const BB> bb.data, h, w, n )\n *     objs = _toString(Rs)\n */\n  __pyx_t_1 = __Pyx_PyInt_From_siz(__pyx_v_n); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 255, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), __pyx_t_1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 255, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_2);\n  __pyx_t_2 = 0;\n\n  /* \"pysobatools/_mask.pyx\":256\n *     cdef siz n = bb.shape[0]\n *     Rs = RLEs(n)\n *     rleFrBbox( <RLE*> Rs._R, <const BB> bb.data, h, w, n )             # <<<<<<<<<<<<<<\n *     objs = _toString(Rs)\n *     return objs\n */\n  rleFrBbox(((RLE *)__pyx_v_Rs->_R), ((BB const )__pyx_v_bb->data), __pyx_v_h, __pyx_v_w, __pyx_v_n);\n\n  /* \"pysobatools/_mask.pyx\":257\n *     Rs = RLEs(n)\n *     rleFrBbox( <RLE*> Rs._R, <const BB> bb.data, h, w, n )\n *     objs = _toString(Rs)             # <<<<<<<<<<<<<<\n *     return objs\n * \n */\n  __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_toString); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 257, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_3 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {\n    __pyx_t_3 = PyMethod_GET_SELF(__pyx_t_1);\n    if (likely(__pyx_t_3)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);\n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_1, function);\n    }\n  }\n  __pyx_t_2 = (__pyx_t_3) ? __Pyx_PyObject_Call2Args(__pyx_t_1, __pyx_t_3, ((PyObject *)__pyx_v_Rs)) : __Pyx_PyObject_CallOneArg(__pyx_t_1, ((PyObject *)__pyx_v_Rs));\n  __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 257, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_v_objs = __pyx_t_2;\n  __pyx_t_2 = 0;\n\n  /* \"pysobatools/_mask.pyx\":258\n *     rleFrBbox( <RLE*> Rs._R, <const BB> bb.data, h, w, n )\n *     objs = _toString(Rs)\n *     return objs             # <<<<<<<<<<<<<<\n * \n * def frPoly( poly, siz h, siz w ):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_objs);\n  __pyx_r = __pyx_v_objs;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":253\n *     return bb\n * \n * def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef siz n = bb.shape[0]\n *     Rs = RLEs(n)\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  { PyObject *__pyx_type, *__pyx_value, *__pyx_tb;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_bb.rcbuffer->pybuffer);\n  __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}\n  __Pyx_AddTraceback(\"pysobatools._mask.frBbox\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  goto __pyx_L2;\n  __pyx_L0:;\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_bb.rcbuffer->pybuffer);\n  __pyx_L2:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF(__pyx_v_objs);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":260\n *     return objs\n * \n * def frPoly( poly, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.double_t, ndim=1] np_poly\n *     n = len(poly)\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_19frPoly(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_19frPoly = {\"frPoly\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_19frPoly, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_19frPoly(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyObject *__pyx_v_poly = 0;\n  siz __pyx_v_h;\n  siz __pyx_v_w;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"frPoly (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_poly,&__pyx_n_s_h,&__pyx_n_s_w,0};\n    PyObject* values[3] = {0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_poly)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_h)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frPoly\", 1, 3, 3, 1); __PYX_ERR(0, 260, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_w)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frPoly\", 1, 3, 3, 2); __PYX_ERR(0, 260, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"frPoly\") < 0)) __PYX_ERR(0, 260, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n    }\n    __pyx_v_poly = values[0];\n    __pyx_v_h = __Pyx_PyInt_As_siz(values[1]); if (unlikely((__pyx_v_h == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 260, __pyx_L3_error)\n    __pyx_v_w = __Pyx_PyInt_As_siz(values[2]); if (unlikely((__pyx_v_w == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 260, __pyx_L3_error)\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"frPoly\", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 260, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.frPoly\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_18frPoly(__pyx_self, __pyx_v_poly, __pyx_v_h, __pyx_v_w);\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_18frPoly(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_poly, siz __pyx_v_h, siz __pyx_v_w) {\n  PyArrayObject *__pyx_v_np_poly = 0;\n  Py_ssize_t __pyx_v_n;\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = NULL;\n  PyObject *__pyx_v_i = NULL;\n  PyObject *__pyx_v_p = NULL;\n  PyObject *__pyx_v_objs = NULL;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_np_poly;\n  __Pyx_Buffer __pyx_pybuffer_np_poly;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  Py_ssize_t __pyx_t_1;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *(*__pyx_t_4)(PyObject *);\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  PyObject *__pyx_t_7 = NULL;\n  PyObject *__pyx_t_8 = NULL;\n  PyObject *__pyx_t_9 = NULL;\n  PyArrayObject *__pyx_t_10 = NULL;\n  int __pyx_t_11;\n  PyObject *__pyx_t_12 = NULL;\n  PyObject *__pyx_t_13 = NULL;\n  PyObject *__pyx_t_14 = NULL;\n  Py_ssize_t __pyx_t_15;\n  Py_ssize_t __pyx_t_16;\n  __Pyx_RefNannySetupContext(\"frPoly\", 0);\n  __pyx_pybuffer_np_poly.pybuffer.buf = NULL;\n  __pyx_pybuffer_np_poly.refcount = 0;\n  __pyx_pybuffernd_np_poly.data = NULL;\n  __pyx_pybuffernd_np_poly.rcbuffer = &__pyx_pybuffer_np_poly;\n\n  /* \"pysobatools/_mask.pyx\":262\n * def frPoly( poly, siz h, siz w ):\n *     cdef np.ndarray[np.double_t, ndim=1] np_poly\n *     n = len(poly)             # <<<<<<<<<<<<<<\n *     Rs = RLEs(n)\n *     for i, p in enumerate(poly):\n */\n  __pyx_t_1 = PyObject_Length(__pyx_v_poly); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 262, __pyx_L1_error)\n  __pyx_v_n = __pyx_t_1;\n\n  /* \"pysobatools/_mask.pyx\":263\n *     cdef np.ndarray[np.double_t, ndim=1] np_poly\n *     n = len(poly)\n *     Rs = RLEs(n)             # <<<<<<<<<<<<<<\n *     for i, p in enumerate(poly):\n *         np_poly = np.array(p, dtype=np.double, order='F')\n */\n  __pyx_t_2 = PyInt_FromSsize_t(__pyx_v_n); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 263, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_3 = __Pyx_PyObject_CallOneArg(((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), __pyx_t_2); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 263, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __pyx_v_Rs = ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_3);\n  __pyx_t_3 = 0;\n\n  /* \"pysobatools/_mask.pyx\":264\n *     n = len(poly)\n *     Rs = RLEs(n)\n *     for i, p in enumerate(poly):             # <<<<<<<<<<<<<<\n *         np_poly = np.array(p, dtype=np.double, order='F')\n *         rleFrPoly( <RLE*>&Rs._R[i], <const double*> np_poly.data, int(len(p)/2), h, w )\n */\n  __Pyx_INCREF(__pyx_int_0);\n  __pyx_t_3 = __pyx_int_0;\n  if (likely(PyList_CheckExact(__pyx_v_poly)) || PyTuple_CheckExact(__pyx_v_poly)) {\n    __pyx_t_2 = __pyx_v_poly; __Pyx_INCREF(__pyx_t_2); __pyx_t_1 = 0;\n    __pyx_t_4 = NULL;\n  } else {\n    __pyx_t_1 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_v_poly); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 264, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_2);\n    __pyx_t_4 = Py_TYPE(__pyx_t_2)->tp_iternext; if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 264, __pyx_L1_error)\n  }\n  for (;;) {\n    if (likely(!__pyx_t_4)) {\n      if (likely(PyList_CheckExact(__pyx_t_2))) {\n        if (__pyx_t_1 >= PyList_GET_SIZE(__pyx_t_2)) break;\n        #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n        __pyx_t_5 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 264, __pyx_L1_error)\n        #else\n        __pyx_t_5 = PySequence_ITEM(__pyx_t_2, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 264, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_5);\n        #endif\n      } else {\n        if (__pyx_t_1 >= PyTuple_GET_SIZE(__pyx_t_2)) break;\n        #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n        __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_1); __Pyx_INCREF(__pyx_t_5); __pyx_t_1++; if (unlikely(0 < 0)) __PYX_ERR(0, 264, __pyx_L1_error)\n        #else\n        __pyx_t_5 = PySequence_ITEM(__pyx_t_2, __pyx_t_1); __pyx_t_1++; if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 264, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_5);\n        #endif\n      }\n    } else {\n      __pyx_t_5 = __pyx_t_4(__pyx_t_2);\n      if (unlikely(!__pyx_t_5)) {\n        PyObject* exc_type = PyErr_Occurred();\n        if (exc_type) {\n          if (likely(__Pyx_PyErr_GivenExceptionMatches(exc_type, PyExc_StopIteration))) PyErr_Clear();\n          else __PYX_ERR(0, 264, __pyx_L1_error)\n        }\n        break;\n      }\n      __Pyx_GOTREF(__pyx_t_5);\n    }\n    __Pyx_XDECREF_SET(__pyx_v_p, __pyx_t_5);\n    __pyx_t_5 = 0;\n    __Pyx_INCREF(__pyx_t_3);\n    __Pyx_XDECREF_SET(__pyx_v_i, __pyx_t_3);\n    __pyx_t_5 = __Pyx_PyInt_AddObjC(__pyx_t_3, __pyx_int_1, 1, 0, 0); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 264, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_3);\n    __pyx_t_3 = __pyx_t_5;\n    __pyx_t_5 = 0;\n\n    /* \"pysobatools/_mask.pyx\":265\n *     Rs = RLEs(n)\n *     for i, p in enumerate(poly):\n *         np_poly = np.array(p, dtype=np.double, order='F')             # <<<<<<<<<<<<<<\n *         rleFrPoly( <RLE*>&Rs._R[i], <const double*> np_poly.data, int(len(p)/2), h, w )\n *     objs = _toString(Rs)\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_5, __pyx_n_s_np); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __pyx_t_6 = __Pyx_PyObject_GetAttrStr(__pyx_t_5, __pyx_n_s_array); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __pyx_t_5 = PyTuple_New(1); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_INCREF(__pyx_v_p);\n    __Pyx_GIVEREF(__pyx_v_p);\n    PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_v_p);\n    __pyx_t_7 = __Pyx_PyDict_NewPresized(2); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_7);\n    __Pyx_GetModuleGlobalName(__pyx_t_8, __pyx_n_s_np); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __pyx_t_9 = __Pyx_PyObject_GetAttrStr(__pyx_t_8, __pyx_n_s_double); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_9);\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    if (PyDict_SetItem(__pyx_t_7, __pyx_n_s_dtype, __pyx_t_9) < 0) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0;\n    if (PyDict_SetItem(__pyx_t_7, __pyx_n_s_order, __pyx_n_s_F) < 0) __PYX_ERR(0, 265, __pyx_L1_error)\n    __pyx_t_9 = __Pyx_PyObject_Call(__pyx_t_6, __pyx_t_5, __pyx_t_7); if (unlikely(!__pyx_t_9)) __PYX_ERR(0, 265, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_9);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;\n    if (!(likely(((__pyx_t_9) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_9, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 265, __pyx_L1_error)\n    __pyx_t_10 = ((PyArrayObject *)__pyx_t_9);\n    {\n      __Pyx_BufFmt_StackElem __pyx_stack[1];\n      __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_np_poly.rcbuffer->pybuffer);\n      __pyx_t_11 = __Pyx_GetBufferAndValidate(&__pyx_pybuffernd_np_poly.rcbuffer->pybuffer, (PyObject*)__pyx_t_10, &__Pyx_TypeInfo_nn___pyx_t_5numpy_double_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack);\n      if (unlikely(__pyx_t_11 < 0)) {\n        PyErr_Fetch(&__pyx_t_12, &__pyx_t_13, &__pyx_t_14);\n        if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_np_poly.rcbuffer->pybuffer, (PyObject*)__pyx_v_np_poly, &__Pyx_TypeInfo_nn___pyx_t_5numpy_double_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {\n          Py_XDECREF(__pyx_t_12); Py_XDECREF(__pyx_t_13); Py_XDECREF(__pyx_t_14);\n          __Pyx_RaiseBufferFallbackError();\n        } else {\n          PyErr_Restore(__pyx_t_12, __pyx_t_13, __pyx_t_14);\n        }\n        __pyx_t_12 = __pyx_t_13 = __pyx_t_14 = 0;\n      }\n      __pyx_pybuffernd_np_poly.diminfo[0].strides = __pyx_pybuffernd_np_poly.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_np_poly.diminfo[0].shape = __pyx_pybuffernd_np_poly.rcbuffer->pybuffer.shape[0];\n      if (unlikely(__pyx_t_11 < 0)) __PYX_ERR(0, 265, __pyx_L1_error)\n    }\n    __pyx_t_10 = 0;\n    __Pyx_XDECREF_SET(__pyx_v_np_poly, ((PyArrayObject *)__pyx_t_9));\n    __pyx_t_9 = 0;\n\n    /* \"pysobatools/_mask.pyx\":266\n *     for i, p in enumerate(poly):\n *         np_poly = np.array(p, dtype=np.double, order='F')\n *         rleFrPoly( <RLE*>&Rs._R[i], <const double*> np_poly.data, int(len(p)/2), h, w )             # <<<<<<<<<<<<<<\n *     objs = _toString(Rs)\n *     return objs\n */\n    __pyx_t_15 = __Pyx_PyIndex_AsSsize_t(__pyx_v_i); if (unlikely((__pyx_t_15 == (Py_ssize_t)-1) && PyErr_Occurred())) __PYX_ERR(0, 266, __pyx_L1_error)\n    __pyx_t_16 = PyObject_Length(__pyx_v_p); if (unlikely(__pyx_t_16 == ((Py_ssize_t)-1))) __PYX_ERR(0, 266, __pyx_L1_error)\n    rleFrPoly(((RLE *)(&(__pyx_v_Rs->_R[__pyx_t_15]))), ((double const *)__pyx_v_np_poly->data), ((siz)__Pyx_div_Py_ssize_t(__pyx_t_16, 2)), __pyx_v_h, __pyx_v_w);\n\n    /* \"pysobatools/_mask.pyx\":264\n *     n = len(poly)\n *     Rs = RLEs(n)\n *     for i, p in enumerate(poly):             # <<<<<<<<<<<<<<\n *         np_poly = np.array(p, dtype=np.double, order='F')\n *         rleFrPoly( <RLE*>&Rs._R[i], <const double*> np_poly.data, int(len(p)/2), h, w )\n */\n  }\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n\n  /* \"pysobatools/_mask.pyx\":267\n *         np_poly = np.array(p, dtype=np.double, order='F')\n *         rleFrPoly( <RLE*>&Rs._R[i], <const double*> np_poly.data, int(len(p)/2), h, w )\n *     objs = _toString(Rs)             # <<<<<<<<<<<<<<\n *     return objs\n * \n */\n  __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_toString); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 267, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_t_9 = NULL;\n  if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_2))) {\n    __pyx_t_9 = PyMethod_GET_SELF(__pyx_t_2);\n    if (likely(__pyx_t_9)) {\n      PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_2);\n      __Pyx_INCREF(__pyx_t_9);\n      __Pyx_INCREF(function);\n      __Pyx_DECREF_SET(__pyx_t_2, function);\n    }\n  }\n  __pyx_t_3 = (__pyx_t_9) ? __Pyx_PyObject_Call2Args(__pyx_t_2, __pyx_t_9, ((PyObject *)__pyx_v_Rs)) : __Pyx_PyObject_CallOneArg(__pyx_t_2, ((PyObject *)__pyx_v_Rs));\n  __Pyx_XDECREF(__pyx_t_9); __pyx_t_9 = 0;\n  if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 267, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  __pyx_v_objs = __pyx_t_3;\n  __pyx_t_3 = 0;\n\n  /* \"pysobatools/_mask.pyx\":268\n *         rleFrPoly( <RLE*>&Rs._R[i], <const double*> np_poly.data, int(len(p)/2), h, w )\n *     objs = _toString(Rs)\n *     return objs             # <<<<<<<<<<<<<<\n * \n * def frUncompressedRLE(ucRles, siz h, siz w):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_objs);\n  __pyx_r = __pyx_v_objs;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":260\n *     return objs\n * \n * def frPoly( poly, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.double_t, ndim=1] np_poly\n *     n = len(poly)\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_7);\n  __Pyx_XDECREF(__pyx_t_8);\n  __Pyx_XDECREF(__pyx_t_9);\n  { PyObject *__pyx_type, *__pyx_value, *__pyx_tb;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_np_poly.rcbuffer->pybuffer);\n  __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}\n  __Pyx_AddTraceback(\"pysobatools._mask.frPoly\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  goto __pyx_L2;\n  __pyx_L0:;\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_np_poly.rcbuffer->pybuffer);\n  __pyx_L2:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_np_poly);\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XDECREF(__pyx_v_i);\n  __Pyx_XDECREF(__pyx_v_p);\n  __Pyx_XDECREF(__pyx_v_objs);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":270\n *     return objs\n * \n * def frUncompressedRLE(ucRles, siz h, siz w):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.uint32_t, ndim=1] cnts\n *     cdef RLE R\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_21frUncompressedRLE(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_21frUncompressedRLE = {\"frUncompressedRLE\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_21frUncompressedRLE, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_21frUncompressedRLE(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyObject *__pyx_v_ucRles = 0;\n  CYTHON_UNUSED siz __pyx_v_h;\n  CYTHON_UNUSED siz __pyx_v_w;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"frUncompressedRLE (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_ucRles,&__pyx_n_s_h,&__pyx_n_s_w,0};\n    PyObject* values[3] = {0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_ucRles)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_h)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frUncompressedRLE\", 1, 3, 3, 1); __PYX_ERR(0, 270, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_w)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frUncompressedRLE\", 1, 3, 3, 2); __PYX_ERR(0, 270, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"frUncompressedRLE\") < 0)) __PYX_ERR(0, 270, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n    }\n    __pyx_v_ucRles = values[0];\n    __pyx_v_h = __Pyx_PyInt_As_siz(values[1]); if (unlikely((__pyx_v_h == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 270, __pyx_L3_error)\n    __pyx_v_w = __Pyx_PyInt_As_siz(values[2]); if (unlikely((__pyx_v_w == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 270, __pyx_L3_error)\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"frUncompressedRLE\", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 270, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.frUncompressedRLE\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_20frUncompressedRLE(__pyx_self, __pyx_v_ucRles, __pyx_v_h, __pyx_v_w);\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_20frUncompressedRLE(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_ucRles, CYTHON_UNUSED siz __pyx_v_h, CYTHON_UNUSED siz __pyx_v_w) {\n  PyArrayObject *__pyx_v_cnts = 0;\n  RLE __pyx_v_R;\n  uint *__pyx_v_data;\n  Py_ssize_t __pyx_v_n;\n  PyObject *__pyx_v_objs = NULL;\n  Py_ssize_t __pyx_v_i;\n  struct __pyx_obj_11pysobatools_5_mask_RLEs *__pyx_v_Rs = NULL;\n  Py_ssize_t __pyx_v_j;\n  __Pyx_LocalBuf_ND __pyx_pybuffernd_cnts;\n  __Pyx_Buffer __pyx_pybuffer_cnts;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  Py_ssize_t __pyx_t_1;\n  PyObject *__pyx_t_2 = NULL;\n  Py_ssize_t __pyx_t_3;\n  Py_ssize_t __pyx_t_4;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  PyObject *__pyx_t_7 = NULL;\n  PyObject *__pyx_t_8 = NULL;\n  PyArrayObject *__pyx_t_9 = NULL;\n  int __pyx_t_10;\n  PyObject *__pyx_t_11 = NULL;\n  PyObject *__pyx_t_12 = NULL;\n  PyObject *__pyx_t_13 = NULL;\n  Py_ssize_t __pyx_t_14;\n  Py_ssize_t __pyx_t_15;\n  Py_ssize_t __pyx_t_16;\n  Py_ssize_t __pyx_t_17;\n  RLE __pyx_t_18;\n  siz __pyx_t_19;\n  int __pyx_t_20;\n  __Pyx_RefNannySetupContext(\"frUncompressedRLE\", 0);\n  __pyx_pybuffer_cnts.pybuffer.buf = NULL;\n  __pyx_pybuffer_cnts.refcount = 0;\n  __pyx_pybuffernd_cnts.data = NULL;\n  __pyx_pybuffernd_cnts.rcbuffer = &__pyx_pybuffer_cnts;\n\n  /* \"pysobatools/_mask.pyx\":274\n *     cdef RLE R\n *     cdef uint *data\n *     n = len(ucRles)             # <<<<<<<<<<<<<<\n *     objs = []\n *     for i in range(n):\n */\n  __pyx_t_1 = PyObject_Length(__pyx_v_ucRles); if (unlikely(__pyx_t_1 == ((Py_ssize_t)-1))) __PYX_ERR(0, 274, __pyx_L1_error)\n  __pyx_v_n = __pyx_t_1;\n\n  /* \"pysobatools/_mask.pyx\":275\n *     cdef uint *data\n *     n = len(ucRles)\n *     objs = []             # <<<<<<<<<<<<<<\n *     for i in range(n):\n *         Rs = RLEs(1)\n */\n  __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 275, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __pyx_v_objs = ((PyObject*)__pyx_t_2);\n  __pyx_t_2 = 0;\n\n  /* \"pysobatools/_mask.pyx\":276\n *     n = len(ucRles)\n *     objs = []\n *     for i in range(n):             # <<<<<<<<<<<<<<\n *         Rs = RLEs(1)\n *         cnts = np.array(ucRles[i]['counts'], dtype=np.uint32)\n */\n  __pyx_t_1 = __pyx_v_n;\n  __pyx_t_3 = __pyx_t_1;\n  for (__pyx_t_4 = 0; __pyx_t_4 < __pyx_t_3; __pyx_t_4+=1) {\n    __pyx_v_i = __pyx_t_4;\n\n    /* \"pysobatools/_mask.pyx\":277\n *     objs = []\n *     for i in range(n):\n *         Rs = RLEs(1)             # <<<<<<<<<<<<<<\n *         cnts = np.array(ucRles[i]['counts'], dtype=np.uint32)\n *         # time for malloc can be saved here but it's fine\n */\n    __pyx_t_2 = __Pyx_PyObject_Call(((PyObject *)__pyx_ptype_11pysobatools_5_mask_RLEs), __pyx_tuple__6, NULL); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 277, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_2);\n    __Pyx_XDECREF_SET(__pyx_v_Rs, ((struct __pyx_obj_11pysobatools_5_mask_RLEs *)__pyx_t_2));\n    __pyx_t_2 = 0;\n\n    /* \"pysobatools/_mask.pyx\":278\n *     for i in range(n):\n *         Rs = RLEs(1)\n *         cnts = np.array(ucRles[i]['counts'], dtype=np.uint32)             # <<<<<<<<<<<<<<\n *         # time for malloc can be saved here but it's fine\n *         data = <uint*> malloc(len(cnts)* sizeof(uint))\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_2, __pyx_n_s_np); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_2);\n    __pyx_t_5 = __Pyx_PyObject_GetAttrStr(__pyx_t_2, __pyx_n_s_array); if (unlikely(!__pyx_t_5)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_5);\n    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_ucRles, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_2);\n    __pyx_t_6 = __Pyx_PyObject_Dict_GetItem(__pyx_t_2, __pyx_n_s_counts); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_2);\n    __Pyx_GIVEREF(__pyx_t_6);\n    PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_6);\n    __pyx_t_6 = 0;\n    __pyx_t_6 = __Pyx_PyDict_NewPresized(1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_GetModuleGlobalName(__pyx_t_7, __pyx_n_s_np); if (unlikely(!__pyx_t_7)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_7);\n    __pyx_t_8 = __Pyx_PyObject_GetAttrStr(__pyx_t_7, __pyx_n_s_uint32); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;\n    if (PyDict_SetItem(__pyx_t_6, __pyx_n_s_dtype, __pyx_t_8) < 0) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_8 = __Pyx_PyObject_Call(__pyx_t_5, __pyx_t_2, __pyx_t_6); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 278, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;\n    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    if (!(likely(((__pyx_t_8) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_8, __pyx_ptype_5numpy_ndarray))))) __PYX_ERR(0, 278, __pyx_L1_error)\n    __pyx_t_9 = ((PyArrayObject *)__pyx_t_8);\n    {\n      __Pyx_BufFmt_StackElem __pyx_stack[1];\n      __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_cnts.rcbuffer->pybuffer);\n      __pyx_t_10 = __Pyx_GetBufferAndValidate(&__pyx_pybuffernd_cnts.rcbuffer->pybuffer, (PyObject*)__pyx_t_9, &__Pyx_TypeInfo_nn___pyx_t_5numpy_uint32_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack);\n      if (unlikely(__pyx_t_10 < 0)) {\n        PyErr_Fetch(&__pyx_t_11, &__pyx_t_12, &__pyx_t_13);\n        if (unlikely(__Pyx_GetBufferAndValidate(&__pyx_pybuffernd_cnts.rcbuffer->pybuffer, (PyObject*)__pyx_v_cnts, &__Pyx_TypeInfo_nn___pyx_t_5numpy_uint32_t, PyBUF_FORMAT| PyBUF_STRIDES, 1, 0, __pyx_stack) == -1)) {\n          Py_XDECREF(__pyx_t_11); Py_XDECREF(__pyx_t_12); Py_XDECREF(__pyx_t_13);\n          __Pyx_RaiseBufferFallbackError();\n        } else {\n          PyErr_Restore(__pyx_t_11, __pyx_t_12, __pyx_t_13);\n        }\n        __pyx_t_11 = __pyx_t_12 = __pyx_t_13 = 0;\n      }\n      __pyx_pybuffernd_cnts.diminfo[0].strides = __pyx_pybuffernd_cnts.rcbuffer->pybuffer.strides[0]; __pyx_pybuffernd_cnts.diminfo[0].shape = __pyx_pybuffernd_cnts.rcbuffer->pybuffer.shape[0];\n      if (unlikely(__pyx_t_10 < 0)) __PYX_ERR(0, 278, __pyx_L1_error)\n    }\n    __pyx_t_9 = 0;\n    __Pyx_XDECREF_SET(__pyx_v_cnts, ((PyArrayObject *)__pyx_t_8));\n    __pyx_t_8 = 0;\n\n    /* \"pysobatools/_mask.pyx\":280\n *         cnts = np.array(ucRles[i]['counts'], dtype=np.uint32)\n *         # time for malloc can be saved here but it's fine\n *         data = <uint*> malloc(len(cnts)* sizeof(uint))             # <<<<<<<<<<<<<<\n *         for j in range(len(cnts)):\n *             data[j] = <uint> cnts[j]\n */\n    __pyx_t_14 = PyObject_Length(((PyObject *)__pyx_v_cnts)); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-1))) __PYX_ERR(0, 280, __pyx_L1_error)\n    __pyx_v_data = ((uint *)malloc((__pyx_t_14 * (sizeof(unsigned int)))));\n\n    /* \"pysobatools/_mask.pyx\":281\n *         # time for malloc can be saved here but it's fine\n *         data = <uint*> malloc(len(cnts)* sizeof(uint))\n *         for j in range(len(cnts)):             # <<<<<<<<<<<<<<\n *             data[j] = <uint> cnts[j]\n *         R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), <uint*> data)\n */\n    __pyx_t_14 = PyObject_Length(((PyObject *)__pyx_v_cnts)); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-1))) __PYX_ERR(0, 281, __pyx_L1_error)\n    __pyx_t_15 = __pyx_t_14;\n    for (__pyx_t_16 = 0; __pyx_t_16 < __pyx_t_15; __pyx_t_16+=1) {\n      __pyx_v_j = __pyx_t_16;\n\n      /* \"pysobatools/_mask.pyx\":282\n *         data = <uint*> malloc(len(cnts)* sizeof(uint))\n *         for j in range(len(cnts)):\n *             data[j] = <uint> cnts[j]             # <<<<<<<<<<<<<<\n *         R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), <uint*> data)\n *         Rs._R[0] = R\n */\n      __pyx_t_17 = __pyx_v_j;\n      __pyx_t_10 = -1;\n      if (__pyx_t_17 < 0) {\n        __pyx_t_17 += __pyx_pybuffernd_cnts.diminfo[0].shape;\n        if (unlikely(__pyx_t_17 < 0)) __pyx_t_10 = 0;\n      } else if (unlikely(__pyx_t_17 >= __pyx_pybuffernd_cnts.diminfo[0].shape)) __pyx_t_10 = 0;\n      if (unlikely(__pyx_t_10 != -1)) {\n        __Pyx_RaiseBufferIndexError(__pyx_t_10);\n        __PYX_ERR(0, 282, __pyx_L1_error)\n      }\n      (__pyx_v_data[__pyx_v_j]) = ((uint)(*__Pyx_BufPtrStrided1d(__pyx_t_5numpy_uint32_t *, __pyx_pybuffernd_cnts.rcbuffer->pybuffer.buf, __pyx_t_17, __pyx_pybuffernd_cnts.diminfo[0].strides)));\n    }\n\n    /* \"pysobatools/_mask.pyx\":283\n *         for j in range(len(cnts)):\n *             data[j] = <uint> cnts[j]\n *         R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), <uint*> data)             # <<<<<<<<<<<<<<\n *         Rs._R[0] = R\n *         objs.append(_toString(Rs)[0])\n */\n    __pyx_t_8 = __Pyx_GetItemInt(__pyx_v_ucRles, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __pyx_t_6 = __Pyx_PyObject_Dict_GetItem(__pyx_t_8, __pyx_n_s_size); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_8 = __Pyx_GetItemInt(__pyx_t_6, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __pyx_t_19 = __Pyx_PyInt_As_siz(__pyx_t_8); if (unlikely((__pyx_t_19 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_18.h = __pyx_t_19;\n    __pyx_t_8 = __Pyx_GetItemInt(__pyx_v_ucRles, __pyx_v_i, Py_ssize_t, 1, PyInt_FromSsize_t, 0, 1, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __pyx_t_6 = __Pyx_PyObject_Dict_GetItem(__pyx_t_8, __pyx_n_s_size); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_8 = __Pyx_GetItemInt(__pyx_t_6, 1, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __pyx_t_19 = __Pyx_PyInt_As_siz(__pyx_t_8); if (unlikely((__pyx_t_19 == ((siz)-1)) && PyErr_Occurred())) __PYX_ERR(0, 283, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_18.w = __pyx_t_19;\n    __pyx_t_14 = PyObject_Length(((PyObject *)__pyx_v_cnts)); if (unlikely(__pyx_t_14 == ((Py_ssize_t)-1))) __PYX_ERR(0, 283, __pyx_L1_error)\n    __pyx_t_18.m = __pyx_t_14;\n    __pyx_t_18.cnts = ((uint *)__pyx_v_data);\n    __pyx_v_R = __pyx_t_18;\n\n    /* \"pysobatools/_mask.pyx\":284\n *             data[j] = <uint> cnts[j]\n *         R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), <uint*> data)\n *         Rs._R[0] = R             # <<<<<<<<<<<<<<\n *         objs.append(_toString(Rs)[0])\n *     return objs\n */\n    (__pyx_v_Rs->_R[0]) = __pyx_v_R;\n\n    /* \"pysobatools/_mask.pyx\":285\n *         R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), <uint*> data)\n *         Rs._R[0] = R\n *         objs.append(_toString(Rs)[0])             # <<<<<<<<<<<<<<\n *     return objs\n * \n */\n    __Pyx_GetModuleGlobalName(__pyx_t_6, __pyx_n_s_toString); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 285, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __pyx_t_2 = NULL;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_6))) {\n      __pyx_t_2 = PyMethod_GET_SELF(__pyx_t_6);\n      if (likely(__pyx_t_2)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_6);\n        __Pyx_INCREF(__pyx_t_2);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_6, function);\n      }\n    }\n    __pyx_t_8 = (__pyx_t_2) ? __Pyx_PyObject_Call2Args(__pyx_t_6, __pyx_t_2, ((PyObject *)__pyx_v_Rs)) : __Pyx_PyObject_CallOneArg(__pyx_t_6, ((PyObject *)__pyx_v_Rs));\n    __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;\n    if (unlikely(!__pyx_t_8)) __PYX_ERR(0, 285, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_8);\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    __pyx_t_6 = __Pyx_GetItemInt(__pyx_t_8, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 285, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n    __pyx_t_20 = __Pyx_PyList_Append(__pyx_v_objs, __pyx_t_6); if (unlikely(__pyx_t_20 == ((int)-1))) __PYX_ERR(0, 285, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n  }\n\n  /* \"pysobatools/_mask.pyx\":286\n *         Rs._R[0] = R\n *         objs.append(_toString(Rs)[0])\n *     return objs             # <<<<<<<<<<<<<<\n * \n * def frPyObjects(pyobj, h, w):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_objs);\n  __pyx_r = __pyx_v_objs;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":270\n *     return objs\n * \n * def frUncompressedRLE(ucRles, siz h, siz w):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.uint32_t, ndim=1] cnts\n *     cdef RLE R\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_2);\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_7);\n  __Pyx_XDECREF(__pyx_t_8);\n  { PyObject *__pyx_type, *__pyx_value, *__pyx_tb;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&__pyx_type, &__pyx_value, &__pyx_tb);\n    __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_cnts.rcbuffer->pybuffer);\n  __Pyx_ErrRestore(__pyx_type, __pyx_value, __pyx_tb);}\n  __Pyx_AddTraceback(\"pysobatools._mask.frUncompressedRLE\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  goto __pyx_L2;\n  __pyx_L0:;\n  __Pyx_SafeReleaseBuffer(&__pyx_pybuffernd_cnts.rcbuffer->pybuffer);\n  __pyx_L2:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_cnts);\n  __Pyx_XDECREF(__pyx_v_objs);\n  __Pyx_XDECREF((PyObject *)__pyx_v_Rs);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"pysobatools/_mask.pyx\":288\n *     return objs\n * \n * def frPyObjects(pyobj, h, w):             # <<<<<<<<<<<<<<\n *     # encode rle from a list of python objects\n *     if type(pyobj) == np.ndarray:\n */\n\n/* Python wrapper */\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_23frPyObjects(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/\nstatic PyMethodDef __pyx_mdef_11pysobatools_5_mask_23frPyObjects = {\"frPyObjects\", (PyCFunction)(void*)(PyCFunctionWithKeywords)__pyx_pw_11pysobatools_5_mask_23frPyObjects, METH_VARARGS|METH_KEYWORDS, 0};\nstatic PyObject *__pyx_pw_11pysobatools_5_mask_23frPyObjects(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {\n  PyObject *__pyx_v_pyobj = 0;\n  PyObject *__pyx_v_h = 0;\n  PyObject *__pyx_v_w = 0;\n  PyObject *__pyx_r = 0;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"frPyObjects (wrapper)\", 0);\n  {\n    static PyObject **__pyx_pyargnames[] = {&__pyx_n_s_pyobj,&__pyx_n_s_h,&__pyx_n_s_w,0};\n    PyObject* values[3] = {0,0,0};\n    if (unlikely(__pyx_kwds)) {\n      Py_ssize_t kw_args;\n      const Py_ssize_t pos_args = PyTuple_GET_SIZE(__pyx_args);\n      switch (pos_args) {\n        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n        CYTHON_FALLTHROUGH;\n        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n        CYTHON_FALLTHROUGH;\n        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n        CYTHON_FALLTHROUGH;\n        case  0: break;\n        default: goto __pyx_L5_argtuple_error;\n      }\n      kw_args = PyDict_Size(__pyx_kwds);\n      switch (pos_args) {\n        case  0:\n        if (likely((values[0] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_pyobj)) != 0)) kw_args--;\n        else goto __pyx_L5_argtuple_error;\n        CYTHON_FALLTHROUGH;\n        case  1:\n        if (likely((values[1] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_h)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frPyObjects\", 1, 3, 3, 1); __PYX_ERR(0, 288, __pyx_L3_error)\n        }\n        CYTHON_FALLTHROUGH;\n        case  2:\n        if (likely((values[2] = __Pyx_PyDict_GetItemStr(__pyx_kwds, __pyx_n_s_w)) != 0)) kw_args--;\n        else {\n          __Pyx_RaiseArgtupleInvalid(\"frPyObjects\", 1, 3, 3, 2); __PYX_ERR(0, 288, __pyx_L3_error)\n        }\n      }\n      if (unlikely(kw_args > 0)) {\n        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, pos_args, \"frPyObjects\") < 0)) __PYX_ERR(0, 288, __pyx_L3_error)\n      }\n    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {\n      goto __pyx_L5_argtuple_error;\n    } else {\n      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);\n      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);\n      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);\n    }\n    __pyx_v_pyobj = values[0];\n    __pyx_v_h = values[1];\n    __pyx_v_w = values[2];\n  }\n  goto __pyx_L4_argument_unpacking_done;\n  __pyx_L5_argtuple_error:;\n  __Pyx_RaiseArgtupleInvalid(\"frPyObjects\", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); __PYX_ERR(0, 288, __pyx_L3_error)\n  __pyx_L3_error:;\n  __Pyx_AddTraceback(\"pysobatools._mask.frPyObjects\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __Pyx_RefNannyFinishContext();\n  return NULL;\n  __pyx_L4_argument_unpacking_done:;\n  __pyx_r = __pyx_pf_11pysobatools_5_mask_22frPyObjects(__pyx_self, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w);\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_pf_11pysobatools_5_mask_22frPyObjects(CYTHON_UNUSED PyObject *__pyx_self, PyObject *__pyx_v_pyobj, PyObject *__pyx_v_h, PyObject *__pyx_v_w) {\n  PyObject *__pyx_v_objs = NULL;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  int __pyx_t_2;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *__pyx_t_4 = NULL;\n  int __pyx_t_5;\n  PyObject *__pyx_t_6 = NULL;\n  int __pyx_t_7;\n  Py_ssize_t __pyx_t_8;\n  int __pyx_t_9;\n  PyObject *__pyx_t_10 = NULL;\n  __Pyx_RefNannySetupContext(\"frPyObjects\", 0);\n\n  /* \"pysobatools/_mask.pyx\":290\n * def frPyObjects(pyobj, h, w):\n *     # encode rle from a list of python objects\n *     if type(pyobj) == np.ndarray:             # <<<<<<<<<<<<<<\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:\n */\n  __pyx_t_1 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_pyobj)), ((PyObject *)__pyx_ptype_5numpy_ndarray), Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 290, __pyx_L1_error)\n  __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) __PYX_ERR(0, 290, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":291\n *     # encode rle from a list of python objects\n *     if type(pyobj) == np.ndarray:\n *         objs = frBbox(pyobj, h, w)             # <<<<<<<<<<<<<<\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:\n *         objs = frBbox(pyobj, h, w)\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_frBbox); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 291, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __pyx_t_4 = NULL;\n    __pyx_t_5 = 0;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {\n      __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);\n      if (likely(__pyx_t_4)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);\n        __Pyx_INCREF(__pyx_t_4);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_3, function);\n        __pyx_t_5 = 1;\n      }\n    }\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 291, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n    } else\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 291, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n    } else\n    #endif\n    {\n      __pyx_t_6 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 291, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_6);\n      if (__pyx_t_4) {\n        __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL;\n      }\n      __Pyx_INCREF(__pyx_v_pyobj);\n      __Pyx_GIVEREF(__pyx_v_pyobj);\n      PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_pyobj);\n      __Pyx_INCREF(__pyx_v_h);\n      __Pyx_GIVEREF(__pyx_v_h);\n      PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_h);\n      __Pyx_INCREF(__pyx_v_w);\n      __Pyx_GIVEREF(__pyx_v_w);\n      PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, __pyx_v_w);\n      __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 291, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_1);\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_v_objs = __pyx_t_1;\n    __pyx_t_1 = 0;\n\n    /* \"pysobatools/_mask.pyx\":290\n * def frPyObjects(pyobj, h, w):\n *     # encode rle from a list of python objects\n *     if type(pyobj) == np.ndarray:             # <<<<<<<<<<<<<<\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":292\n *     if type(pyobj) == np.ndarray:\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:             # <<<<<<<<<<<<<<\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n */\n  __pyx_t_1 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_pyobj)), ((PyObject *)(&PyList_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 292, __pyx_L1_error)\n  __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 292, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  if (__pyx_t_7) {\n  } else {\n    __pyx_t_2 = __pyx_t_7;\n    goto __pyx_L4_bool_binop_done;\n  }\n  __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_pyobj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 292, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_8 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_8 == ((Py_ssize_t)-1))) __PYX_ERR(0, 292, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_7 = ((__pyx_t_8 == 4) != 0);\n  __pyx_t_2 = __pyx_t_7;\n  __pyx_L4_bool_binop_done:;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":293\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:\n *         objs = frBbox(pyobj, h, w)             # <<<<<<<<<<<<<<\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n *         objs = frPoly(pyobj, h, w)\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_frBbox); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 293, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __pyx_t_6 = NULL;\n    __pyx_t_5 = 0;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {\n      __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_3);\n      if (likely(__pyx_t_6)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);\n        __Pyx_INCREF(__pyx_t_6);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_3, function);\n        __pyx_t_5 = 1;\n      }\n    }\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 293, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n    } else\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 293, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n    } else\n    #endif\n    {\n      __pyx_t_4 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 293, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      if (__pyx_t_6) {\n        __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL;\n      }\n      __Pyx_INCREF(__pyx_v_pyobj);\n      __Pyx_GIVEREF(__pyx_v_pyobj);\n      PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_v_pyobj);\n      __Pyx_INCREF(__pyx_v_h);\n      __Pyx_GIVEREF(__pyx_v_h);\n      PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, __pyx_v_h);\n      __Pyx_INCREF(__pyx_v_w);\n      __Pyx_GIVEREF(__pyx_v_w);\n      PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_5, __pyx_v_w);\n      __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_4, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 293, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_1);\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_v_objs = __pyx_t_1;\n    __pyx_t_1 = 0;\n\n    /* \"pysobatools/_mask.pyx\":292\n *     if type(pyobj) == np.ndarray:\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:             # <<<<<<<<<<<<<<\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":294\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:             # <<<<<<<<<<<<<<\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\\n */\n  __pyx_t_1 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_pyobj)), ((PyObject *)(&PyList_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 294, __pyx_L1_error)\n  __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 294, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  if (__pyx_t_7) {\n  } else {\n    __pyx_t_2 = __pyx_t_7;\n    goto __pyx_L6_bool_binop_done;\n  }\n  __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_pyobj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 294, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_8 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_8 == ((Py_ssize_t)-1))) __PYX_ERR(0, 294, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_7 = ((__pyx_t_8 > 4) != 0);\n  __pyx_t_2 = __pyx_t_7;\n  __pyx_L6_bool_binop_done:;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":295\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n *         objs = frPoly(pyobj, h, w)             # <<<<<<<<<<<<<<\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_frPoly); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 295, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __pyx_t_4 = NULL;\n    __pyx_t_5 = 0;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {\n      __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);\n      if (likely(__pyx_t_4)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);\n        __Pyx_INCREF(__pyx_t_4);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_3, function);\n        __pyx_t_5 = 1;\n      }\n    }\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 295, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n    } else\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 295, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n    } else\n    #endif\n    {\n      __pyx_t_6 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 295, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_6);\n      if (__pyx_t_4) {\n        __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL;\n      }\n      __Pyx_INCREF(__pyx_v_pyobj);\n      __Pyx_GIVEREF(__pyx_v_pyobj);\n      PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_v_pyobj);\n      __Pyx_INCREF(__pyx_v_h);\n      __Pyx_GIVEREF(__pyx_v_h);\n      PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_h);\n      __Pyx_INCREF(__pyx_v_w);\n      __Pyx_GIVEREF(__pyx_v_w);\n      PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, __pyx_v_w);\n      __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 295, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_1);\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_v_objs = __pyx_t_1;\n    __pyx_t_1 = 0;\n\n    /* \"pysobatools/_mask.pyx\":294\n *     elif type(pyobj) == list and len(pyobj[0]) == 4:\n *         objs = frBbox(pyobj, h, w)\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:             # <<<<<<<<<<<<<<\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":296\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\             # <<<<<<<<<<<<<<\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:\n *         objs = frUncompressedRLE(pyobj, h, w)\n */\n  __pyx_t_1 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_pyobj)), ((PyObject *)(&PyList_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 296, __pyx_L1_error)\n  __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 296, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  if (__pyx_t_7) {\n  } else {\n    __pyx_t_2 = __pyx_t_7;\n    goto __pyx_L8_bool_binop_done;\n  }\n\n  /* \"pysobatools/_mask.pyx\":297\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:             # <<<<<<<<<<<<<<\n *         objs = frUncompressedRLE(pyobj, h, w)\n *     # encode rle from single python object\n */\n  __pyx_t_1 = __Pyx_GetItemInt(__pyx_v_pyobj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 296, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n\n  /* \"pysobatools/_mask.pyx\":296\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\             # <<<<<<<<<<<<<<\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:\n *         objs = frUncompressedRLE(pyobj, h, w)\n */\n  __pyx_t_3 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_t_1)), ((PyObject *)(&PyDict_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 296, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 296, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (__pyx_t_7) {\n  } else {\n    __pyx_t_2 = __pyx_t_7;\n    goto __pyx_L8_bool_binop_done;\n  }\n\n  /* \"pysobatools/_mask.pyx\":297\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:             # <<<<<<<<<<<<<<\n *         objs = frUncompressedRLE(pyobj, h, w)\n *     # encode rle from single python object\n */\n  __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_pyobj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 297, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_n_s_counts, __pyx_t_3, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 297, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __pyx_t_9 = (__pyx_t_7 != 0);\n  if (__pyx_t_9) {\n  } else {\n    __pyx_t_2 = __pyx_t_9;\n    goto __pyx_L8_bool_binop_done;\n  }\n  __pyx_t_3 = __Pyx_GetItemInt(__pyx_v_pyobj, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 297, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_3);\n  __pyx_t_9 = (__Pyx_PySequence_ContainsTF(__pyx_n_s_size, __pyx_t_3, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 297, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  __pyx_t_7 = (__pyx_t_9 != 0);\n  __pyx_t_2 = __pyx_t_7;\n  __pyx_L8_bool_binop_done:;\n\n  /* \"pysobatools/_mask.pyx\":296\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\             # <<<<<<<<<<<<<<\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:\n *         objs = frUncompressedRLE(pyobj, h, w)\n */\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":298\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:\n *         objs = frUncompressedRLE(pyobj, h, w)             # <<<<<<<<<<<<<<\n *     # encode rle from single python object\n *     elif type(pyobj) == list and len(pyobj) == 4:\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_frUncompressedRLE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 298, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __pyx_t_6 = NULL;\n    __pyx_t_5 = 0;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {\n      __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1);\n      if (likely(__pyx_t_6)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);\n        __Pyx_INCREF(__pyx_t_6);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_1, function);\n        __pyx_t_5 = 1;\n      }\n    }\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(__pyx_t_1)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 298, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n      __Pyx_GOTREF(__pyx_t_3);\n    } else\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_v_pyobj, __pyx_v_h, __pyx_v_w};\n      __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 298, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n      __Pyx_GOTREF(__pyx_t_3);\n    } else\n    #endif\n    {\n      __pyx_t_4 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 298, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      if (__pyx_t_6) {\n        __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6); __pyx_t_6 = NULL;\n      }\n      __Pyx_INCREF(__pyx_v_pyobj);\n      __Pyx_GIVEREF(__pyx_v_pyobj);\n      PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_v_pyobj);\n      __Pyx_INCREF(__pyx_v_h);\n      __Pyx_GIVEREF(__pyx_v_h);\n      PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, __pyx_v_h);\n      __Pyx_INCREF(__pyx_v_w);\n      __Pyx_GIVEREF(__pyx_v_w);\n      PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_5, __pyx_v_w);\n      __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 298, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __pyx_v_objs = __pyx_t_3;\n    __pyx_t_3 = 0;\n\n    /* \"pysobatools/_mask.pyx\":296\n *     elif type(pyobj) == list and len(pyobj[0]) > 4:\n *         objs = frPoly(pyobj, h, w)\n *     elif type(pyobj) == list and type(pyobj[0]) == dict \\             # <<<<<<<<<<<<<<\n *         and 'counts' in pyobj[0] and 'size' in pyobj[0]:\n *         objs = frUncompressedRLE(pyobj, h, w)\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":300\n *         objs = frUncompressedRLE(pyobj, h, w)\n *     # encode rle from single python object\n *     elif type(pyobj) == list and len(pyobj) == 4:             # <<<<<<<<<<<<<<\n *         objs = frBbox([pyobj], h, w)[0]\n *     elif type(pyobj) == list and len(pyobj) > 4:\n */\n  __pyx_t_3 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_pyobj)), ((PyObject *)(&PyList_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 300, __pyx_L1_error)\n  __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 300, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (__pyx_t_7) {\n  } else {\n    __pyx_t_2 = __pyx_t_7;\n    goto __pyx_L12_bool_binop_done;\n  }\n  __pyx_t_8 = PyObject_Length(__pyx_v_pyobj); if (unlikely(__pyx_t_8 == ((Py_ssize_t)-1))) __PYX_ERR(0, 300, __pyx_L1_error)\n  __pyx_t_7 = ((__pyx_t_8 == 4) != 0);\n  __pyx_t_2 = __pyx_t_7;\n  __pyx_L12_bool_binop_done:;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":301\n *     # encode rle from single python object\n *     elif type(pyobj) == list and len(pyobj) == 4:\n *         objs = frBbox([pyobj], h, w)[0]             # <<<<<<<<<<<<<<\n *     elif type(pyobj) == list and len(pyobj) > 4:\n *         objs = frPoly([pyobj], h, w)[0]\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_frBbox); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 301, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __pyx_t_4 = PyList_New(1); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 301, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __Pyx_INCREF(__pyx_v_pyobj);\n    __Pyx_GIVEREF(__pyx_v_pyobj);\n    PyList_SET_ITEM(__pyx_t_4, 0, __pyx_v_pyobj);\n    __pyx_t_6 = NULL;\n    __pyx_t_5 = 0;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {\n      __pyx_t_6 = PyMethod_GET_SELF(__pyx_t_1);\n      if (likely(__pyx_t_6)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);\n        __Pyx_INCREF(__pyx_t_6);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_1, function);\n        __pyx_t_5 = 1;\n      }\n    }\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(__pyx_t_1)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_t_4, __pyx_v_h, __pyx_v_w};\n      __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 301, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    } else\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_6, __pyx_t_4, __pyx_v_h, __pyx_v_w};\n      __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 301, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_6); __pyx_t_6 = 0;\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    } else\n    #endif\n    {\n      __pyx_t_10 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 301, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_10);\n      if (__pyx_t_6) {\n        __Pyx_GIVEREF(__pyx_t_6); PyTuple_SET_ITEM(__pyx_t_10, 0, __pyx_t_6); __pyx_t_6 = NULL;\n      }\n      __Pyx_GIVEREF(__pyx_t_4);\n      PyTuple_SET_ITEM(__pyx_t_10, 0+__pyx_t_5, __pyx_t_4);\n      __Pyx_INCREF(__pyx_v_h);\n      __Pyx_GIVEREF(__pyx_v_h);\n      PyTuple_SET_ITEM(__pyx_t_10, 1+__pyx_t_5, __pyx_v_h);\n      __Pyx_INCREF(__pyx_v_w);\n      __Pyx_GIVEREF(__pyx_v_w);\n      PyTuple_SET_ITEM(__pyx_t_10, 2+__pyx_t_5, __pyx_v_w);\n      __pyx_t_4 = 0;\n      __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_10, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 301, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __pyx_t_1 = __Pyx_GetItemInt(__pyx_t_3, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 301, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_v_objs = __pyx_t_1;\n    __pyx_t_1 = 0;\n\n    /* \"pysobatools/_mask.pyx\":300\n *         objs = frUncompressedRLE(pyobj, h, w)\n *     # encode rle from single python object\n *     elif type(pyobj) == list and len(pyobj) == 4:             # <<<<<<<<<<<<<<\n *         objs = frBbox([pyobj], h, w)[0]\n *     elif type(pyobj) == list and len(pyobj) > 4:\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":302\n *     elif type(pyobj) == list and len(pyobj) == 4:\n *         objs = frBbox([pyobj], h, w)[0]\n *     elif type(pyobj) == list and len(pyobj) > 4:             # <<<<<<<<<<<<<<\n *         objs = frPoly([pyobj], h, w)[0]\n *     elif type(pyobj) == dict and 'counts' in pyobj and 'size' in pyobj:\n */\n  __pyx_t_1 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_pyobj)), ((PyObject *)(&PyList_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 302, __pyx_L1_error)\n  __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 302, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  if (__pyx_t_7) {\n  } else {\n    __pyx_t_2 = __pyx_t_7;\n    goto __pyx_L14_bool_binop_done;\n  }\n  __pyx_t_8 = PyObject_Length(__pyx_v_pyobj); if (unlikely(__pyx_t_8 == ((Py_ssize_t)-1))) __PYX_ERR(0, 302, __pyx_L1_error)\n  __pyx_t_7 = ((__pyx_t_8 > 4) != 0);\n  __pyx_t_2 = __pyx_t_7;\n  __pyx_L14_bool_binop_done:;\n  if (__pyx_t_2) {\n\n    /* \"pysobatools/_mask.pyx\":303\n *         objs = frBbox([pyobj], h, w)[0]\n *     elif type(pyobj) == list and len(pyobj) > 4:\n *         objs = frPoly([pyobj], h, w)[0]             # <<<<<<<<<<<<<<\n *     elif type(pyobj) == dict and 'counts' in pyobj and 'size' in pyobj:\n *         objs = frUncompressedRLE([pyobj], h, w)[0]\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_3, __pyx_n_s_frPoly); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 303, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __pyx_t_10 = PyList_New(1); if (unlikely(!__pyx_t_10)) __PYX_ERR(0, 303, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_10);\n    __Pyx_INCREF(__pyx_v_pyobj);\n    __Pyx_GIVEREF(__pyx_v_pyobj);\n    PyList_SET_ITEM(__pyx_t_10, 0, __pyx_v_pyobj);\n    __pyx_t_4 = NULL;\n    __pyx_t_5 = 0;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_3))) {\n      __pyx_t_4 = PyMethod_GET_SELF(__pyx_t_3);\n      if (likely(__pyx_t_4)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_3);\n        __Pyx_INCREF(__pyx_t_4);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_3, function);\n        __pyx_t_5 = 1;\n      }\n    }\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_t_10, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 303, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n      __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;\n    } else\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(__pyx_t_3)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_4, __pyx_t_10, __pyx_v_h, __pyx_v_w};\n      __pyx_t_1 = __Pyx_PyCFunction_FastCall(__pyx_t_3, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 303, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __Pyx_GOTREF(__pyx_t_1);\n      __Pyx_DECREF(__pyx_t_10); __pyx_t_10 = 0;\n    } else\n    #endif\n    {\n      __pyx_t_6 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 303, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_6);\n      if (__pyx_t_4) {\n        __Pyx_GIVEREF(__pyx_t_4); PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4); __pyx_t_4 = NULL;\n      }\n      __Pyx_GIVEREF(__pyx_t_10);\n      PyTuple_SET_ITEM(__pyx_t_6, 0+__pyx_t_5, __pyx_t_10);\n      __Pyx_INCREF(__pyx_v_h);\n      __Pyx_GIVEREF(__pyx_v_h);\n      PyTuple_SET_ITEM(__pyx_t_6, 1+__pyx_t_5, __pyx_v_h);\n      __Pyx_INCREF(__pyx_v_w);\n      __Pyx_GIVEREF(__pyx_v_w);\n      PyTuple_SET_ITEM(__pyx_t_6, 2+__pyx_t_5, __pyx_v_w);\n      __pyx_t_10 = 0;\n      __pyx_t_1 = __Pyx_PyObject_Call(__pyx_t_3, __pyx_t_6, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 303, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_1);\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_1, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 303, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __pyx_v_objs = __pyx_t_3;\n    __pyx_t_3 = 0;\n\n    /* \"pysobatools/_mask.pyx\":302\n *     elif type(pyobj) == list and len(pyobj) == 4:\n *         objs = frBbox([pyobj], h, w)[0]\n *     elif type(pyobj) == list and len(pyobj) > 4:             # <<<<<<<<<<<<<<\n *         objs = frPoly([pyobj], h, w)[0]\n *     elif type(pyobj) == dict and 'counts' in pyobj and 'size' in pyobj:\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":304\n *     elif type(pyobj) == list and len(pyobj) > 4:\n *         objs = frPoly([pyobj], h, w)[0]\n *     elif type(pyobj) == dict and 'counts' in pyobj and 'size' in pyobj:             # <<<<<<<<<<<<<<\n *         objs = frUncompressedRLE([pyobj], h, w)[0]\n *     else:\n */\n  __pyx_t_3 = PyObject_RichCompare(((PyObject *)Py_TYPE(__pyx_v_pyobj)), ((PyObject *)(&PyDict_Type)), Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 304, __pyx_L1_error)\n  __pyx_t_7 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 304, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n  if (__pyx_t_7) {\n  } else {\n    __pyx_t_2 = __pyx_t_7;\n    goto __pyx_L16_bool_binop_done;\n  }\n  __pyx_t_7 = (__Pyx_PySequence_ContainsTF(__pyx_n_s_counts, __pyx_v_pyobj, Py_EQ)); if (unlikely(__pyx_t_7 < 0)) __PYX_ERR(0, 304, __pyx_L1_error)\n  __pyx_t_9 = (__pyx_t_7 != 0);\n  if (__pyx_t_9) {\n  } else {\n    __pyx_t_2 = __pyx_t_9;\n    goto __pyx_L16_bool_binop_done;\n  }\n  __pyx_t_9 = (__Pyx_PySequence_ContainsTF(__pyx_n_s_size, __pyx_v_pyobj, Py_EQ)); if (unlikely(__pyx_t_9 < 0)) __PYX_ERR(0, 304, __pyx_L1_error)\n  __pyx_t_7 = (__pyx_t_9 != 0);\n  __pyx_t_2 = __pyx_t_7;\n  __pyx_L16_bool_binop_done:;\n  if (likely(__pyx_t_2)) {\n\n    /* \"pysobatools/_mask.pyx\":305\n *         objs = frPoly([pyobj], h, w)[0]\n *     elif type(pyobj) == dict and 'counts' in pyobj and 'size' in pyobj:\n *         objs = frUncompressedRLE([pyobj], h, w)[0]             # <<<<<<<<<<<<<<\n *     else:\n *         raise Exception('input type is not supported.')\n */\n    __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_frUncompressedRLE); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 305, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __pyx_t_6 = PyList_New(1); if (unlikely(!__pyx_t_6)) __PYX_ERR(0, 305, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_6);\n    __Pyx_INCREF(__pyx_v_pyobj);\n    __Pyx_GIVEREF(__pyx_v_pyobj);\n    PyList_SET_ITEM(__pyx_t_6, 0, __pyx_v_pyobj);\n    __pyx_t_10 = NULL;\n    __pyx_t_5 = 0;\n    if (CYTHON_UNPACK_METHODS && unlikely(PyMethod_Check(__pyx_t_1))) {\n      __pyx_t_10 = PyMethod_GET_SELF(__pyx_t_1);\n      if (likely(__pyx_t_10)) {\n        PyObject* function = PyMethod_GET_FUNCTION(__pyx_t_1);\n        __Pyx_INCREF(__pyx_t_10);\n        __Pyx_INCREF(function);\n        __Pyx_DECREF_SET(__pyx_t_1, function);\n        __pyx_t_5 = 1;\n      }\n    }\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(__pyx_t_1)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_t_6, __pyx_v_h, __pyx_v_w};\n      __pyx_t_3 = __Pyx_PyFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 305, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0;\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    } else\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(__pyx_t_1)) {\n      PyObject *__pyx_temp[4] = {__pyx_t_10, __pyx_t_6, __pyx_v_h, __pyx_v_w};\n      __pyx_t_3 = __Pyx_PyCFunction_FastCall(__pyx_t_1, __pyx_temp+1-__pyx_t_5, 3+__pyx_t_5); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 305, __pyx_L1_error)\n      __Pyx_XDECREF(__pyx_t_10); __pyx_t_10 = 0;\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;\n    } else\n    #endif\n    {\n      __pyx_t_4 = PyTuple_New(3+__pyx_t_5); if (unlikely(!__pyx_t_4)) __PYX_ERR(0, 305, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      if (__pyx_t_10) {\n        __Pyx_GIVEREF(__pyx_t_10); PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_10); __pyx_t_10 = NULL;\n      }\n      __Pyx_GIVEREF(__pyx_t_6);\n      PyTuple_SET_ITEM(__pyx_t_4, 0+__pyx_t_5, __pyx_t_6);\n      __Pyx_INCREF(__pyx_v_h);\n      __Pyx_GIVEREF(__pyx_v_h);\n      PyTuple_SET_ITEM(__pyx_t_4, 1+__pyx_t_5, __pyx_v_h);\n      __Pyx_INCREF(__pyx_v_w);\n      __Pyx_GIVEREF(__pyx_v_w);\n      PyTuple_SET_ITEM(__pyx_t_4, 2+__pyx_t_5, __pyx_v_w);\n      __pyx_t_6 = 0;\n      __pyx_t_3 = __Pyx_PyObject_Call(__pyx_t_1, __pyx_t_4, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(0, 305, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    }\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __pyx_t_1 = __Pyx_GetItemInt(__pyx_t_3, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 305, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_v_objs = __pyx_t_1;\n    __pyx_t_1 = 0;\n\n    /* \"pysobatools/_mask.pyx\":304\n *     elif type(pyobj) == list and len(pyobj) > 4:\n *         objs = frPoly([pyobj], h, w)[0]\n *     elif type(pyobj) == dict and 'counts' in pyobj and 'size' in pyobj:             # <<<<<<<<<<<<<<\n *         objs = frUncompressedRLE([pyobj], h, w)[0]\n *     else:\n */\n    goto __pyx_L3;\n  }\n\n  /* \"pysobatools/_mask.pyx\":307\n *         objs = frUncompressedRLE([pyobj], h, w)[0]\n *     else:\n *         raise Exception('input type is not supported.')             # <<<<<<<<<<<<<<\n *     return objs\n */\n  /*else*/ {\n    __pyx_t_1 = __Pyx_PyObject_Call(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])), __pyx_tuple__20, NULL); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 307, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_1);\n    __Pyx_Raise(__pyx_t_1, 0, 0, 0);\n    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __PYX_ERR(0, 307, __pyx_L1_error)\n  }\n  __pyx_L3:;\n\n  /* \"pysobatools/_mask.pyx\":308\n *     else:\n *         raise Exception('input type is not supported.')\n *     return objs             # <<<<<<<<<<<<<<\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(__pyx_v_objs);\n  __pyx_r = __pyx_v_objs;\n  goto __pyx_L0;\n\n  /* \"pysobatools/_mask.pyx\":288\n *     return objs\n * \n * def frPyObjects(pyobj, h, w):             # <<<<<<<<<<<<<<\n *     # encode rle from a list of python objects\n *     if type(pyobj) == np.ndarray:\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_4);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_10);\n  __Pyx_AddTraceback(\"pysobatools._mask.frPyObjects\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF(__pyx_v_objs);\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":258\n *         # experimental exception made for __getbuffer__ and __releasebuffer__\n *         # -- the details of this may change.\n *         def __getbuffer__(ndarray self, Py_buffer* info, int flags):             # <<<<<<<<<<<<<<\n *             # This implementation of getbuffer is geared towards Cython\n *             # requirements, and does not yet fulfill the PEP.\n */\n\n/* Python wrapper */\nstatic CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags); /*proto*/\nstatic CYTHON_UNUSED int __pyx_pw_5numpy_7ndarray_1__getbuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__getbuffer__ (wrapper)\", 0);\n  __pyx_r = __pyx_pf_5numpy_7ndarray___getbuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info), ((int)__pyx_v_flags));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic int __pyx_pf_5numpy_7ndarray___getbuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info, int __pyx_v_flags) {\n  int __pyx_v_i;\n  int __pyx_v_ndim;\n  int __pyx_v_endian_detector;\n  int __pyx_v_little_endian;\n  int __pyx_v_t;\n  char *__pyx_v_f;\n  PyArray_Descr *__pyx_v_descr = 0;\n  int __pyx_v_offset;\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  int __pyx_t_1;\n  int __pyx_t_2;\n  PyObject *__pyx_t_3 = NULL;\n  int __pyx_t_4;\n  int __pyx_t_5;\n  int __pyx_t_6;\n  PyArray_Descr *__pyx_t_7;\n  PyObject *__pyx_t_8 = NULL;\n  char *__pyx_t_9;\n  if (__pyx_v_info == NULL) {\n    PyErr_SetString(PyExc_BufferError, \"PyObject_GetBuffer: view==NULL argument is obsolete\");\n    return -1;\n  }\n  __Pyx_RefNannySetupContext(\"__getbuffer__\", 0);\n  __pyx_v_info->obj = Py_None; __Pyx_INCREF(Py_None);\n  __Pyx_GIVEREF(__pyx_v_info->obj);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":265\n * \n *             cdef int i, ndim\n *             cdef int endian_detector = 1             # <<<<<<<<<<<<<<\n *             cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)\n * \n */\n  __pyx_v_endian_detector = 1;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":266\n *             cdef int i, ndim\n *             cdef int endian_detector = 1\n *             cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)             # <<<<<<<<<<<<<<\n * \n *             ndim = PyArray_NDIM(self)\n */\n  __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":268\n *             cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)\n * \n *             ndim = PyArray_NDIM(self)             # <<<<<<<<<<<<<<\n * \n *             if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)\n */\n  __pyx_v_ndim = PyArray_NDIM(__pyx_v_self);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":270\n *             ndim = PyArray_NDIM(self)\n * \n *             if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)             # <<<<<<<<<<<<<<\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not C contiguous\")\n */\n  __pyx_t_2 = (((__pyx_v_flags & PyBUF_C_CONTIGUOUS) == PyBUF_C_CONTIGUOUS) != 0);\n  if (__pyx_t_2) {\n  } else {\n    __pyx_t_1 = __pyx_t_2;\n    goto __pyx_L4_bool_binop_done;\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":271\n * \n *             if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):             # <<<<<<<<<<<<<<\n *                 raise ValueError(u\"ndarray is not C contiguous\")\n * \n */\n  __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_C_CONTIGUOUS) != 0)) != 0);\n  __pyx_t_1 = __pyx_t_2;\n  __pyx_L4_bool_binop_done:;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":270\n *             ndim = PyArray_NDIM(self)\n * \n *             if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)             # <<<<<<<<<<<<<<\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not C contiguous\")\n */\n  if (unlikely(__pyx_t_1)) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":272\n *             if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not C contiguous\")             # <<<<<<<<<<<<<<\n * \n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)\n */\n    __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__21, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 272, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_Raise(__pyx_t_3, 0, 0, 0);\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __PYX_ERR(2, 272, __pyx_L1_error)\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":270\n *             ndim = PyArray_NDIM(self)\n * \n *             if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)             # <<<<<<<<<<<<<<\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not C contiguous\")\n */\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":274\n *                 raise ValueError(u\"ndarray is not C contiguous\")\n * \n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)             # <<<<<<<<<<<<<<\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not Fortran contiguous\")\n */\n  __pyx_t_2 = (((__pyx_v_flags & PyBUF_F_CONTIGUOUS) == PyBUF_F_CONTIGUOUS) != 0);\n  if (__pyx_t_2) {\n  } else {\n    __pyx_t_1 = __pyx_t_2;\n    goto __pyx_L7_bool_binop_done;\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":275\n * \n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):             # <<<<<<<<<<<<<<\n *                 raise ValueError(u\"ndarray is not Fortran contiguous\")\n * \n */\n  __pyx_t_2 = ((!(PyArray_CHKFLAGS(__pyx_v_self, NPY_ARRAY_F_CONTIGUOUS) != 0)) != 0);\n  __pyx_t_1 = __pyx_t_2;\n  __pyx_L7_bool_binop_done:;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":274\n *                 raise ValueError(u\"ndarray is not C contiguous\")\n * \n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)             # <<<<<<<<<<<<<<\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not Fortran contiguous\")\n */\n  if (unlikely(__pyx_t_1)) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":276\n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not Fortran contiguous\")             # <<<<<<<<<<<<<<\n * \n *             info.buf = PyArray_DATA(self)\n */\n    __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__22, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 276, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_Raise(__pyx_t_3, 0, 0, 0);\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __PYX_ERR(2, 276, __pyx_L1_error)\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":274\n *                 raise ValueError(u\"ndarray is not C contiguous\")\n * \n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)             # <<<<<<<<<<<<<<\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not Fortran contiguous\")\n */\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":278\n *                 raise ValueError(u\"ndarray is not Fortran contiguous\")\n * \n *             info.buf = PyArray_DATA(self)             # <<<<<<<<<<<<<<\n *             info.ndim = ndim\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):\n */\n  __pyx_v_info->buf = PyArray_DATA(__pyx_v_self);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":279\n * \n *             info.buf = PyArray_DATA(self)\n *             info.ndim = ndim             # <<<<<<<<<<<<<<\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):\n *                 # Allocate new buffer for strides and shape info.\n */\n  __pyx_v_info->ndim = __pyx_v_ndim;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":280\n *             info.buf = PyArray_DATA(self)\n *             info.ndim = ndim\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):             # <<<<<<<<<<<<<<\n *                 # Allocate new buffer for strides and shape info.\n *                 # This is allocated as one block, strides first.\n */\n  __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);\n  if (__pyx_t_1) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":283\n *                 # Allocate new buffer for strides and shape info.\n *                 # This is allocated as one block, strides first.\n *                 info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim)             # <<<<<<<<<<<<<<\n *                 info.shape = info.strides + ndim\n *                 for i in range(ndim):\n */\n    __pyx_v_info->strides = ((Py_ssize_t *)PyObject_Malloc((((sizeof(Py_ssize_t)) * 2) * ((size_t)__pyx_v_ndim))));\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":284\n *                 # This is allocated as one block, strides first.\n *                 info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim)\n *                 info.shape = info.strides + ndim             # <<<<<<<<<<<<<<\n *                 for i in range(ndim):\n *                     info.strides[i] = PyArray_STRIDES(self)[i]\n */\n    __pyx_v_info->shape = (__pyx_v_info->strides + __pyx_v_ndim);\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":285\n *                 info.strides = <Py_ssize_t*>PyObject_Malloc(sizeof(Py_ssize_t) * 2 * <size_t>ndim)\n *                 info.shape = info.strides + ndim\n *                 for i in range(ndim):             # <<<<<<<<<<<<<<\n *                     info.strides[i] = PyArray_STRIDES(self)[i]\n *                     info.shape[i] = PyArray_DIMS(self)[i]\n */\n    __pyx_t_4 = __pyx_v_ndim;\n    __pyx_t_5 = __pyx_t_4;\n    for (__pyx_t_6 = 0; __pyx_t_6 < __pyx_t_5; __pyx_t_6+=1) {\n      __pyx_v_i = __pyx_t_6;\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":286\n *                 info.shape = info.strides + ndim\n *                 for i in range(ndim):\n *                     info.strides[i] = PyArray_STRIDES(self)[i]             # <<<<<<<<<<<<<<\n *                     info.shape[i] = PyArray_DIMS(self)[i]\n *             else:\n */\n      (__pyx_v_info->strides[__pyx_v_i]) = (PyArray_STRIDES(__pyx_v_self)[__pyx_v_i]);\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":287\n *                 for i in range(ndim):\n *                     info.strides[i] = PyArray_STRIDES(self)[i]\n *                     info.shape[i] = PyArray_DIMS(self)[i]             # <<<<<<<<<<<<<<\n *             else:\n *                 info.strides = <Py_ssize_t*>PyArray_STRIDES(self)\n */\n      (__pyx_v_info->shape[__pyx_v_i]) = (PyArray_DIMS(__pyx_v_self)[__pyx_v_i]);\n    }\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":280\n *             info.buf = PyArray_DATA(self)\n *             info.ndim = ndim\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):             # <<<<<<<<<<<<<<\n *                 # Allocate new buffer for strides and shape info.\n *                 # This is allocated as one block, strides first.\n */\n    goto __pyx_L9;\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":289\n *                     info.shape[i] = PyArray_DIMS(self)[i]\n *             else:\n *                 info.strides = <Py_ssize_t*>PyArray_STRIDES(self)             # <<<<<<<<<<<<<<\n *                 info.shape = <Py_ssize_t*>PyArray_DIMS(self)\n *             info.suboffsets = NULL\n */\n  /*else*/ {\n    __pyx_v_info->strides = ((Py_ssize_t *)PyArray_STRIDES(__pyx_v_self));\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":290\n *             else:\n *                 info.strides = <Py_ssize_t*>PyArray_STRIDES(self)\n *                 info.shape = <Py_ssize_t*>PyArray_DIMS(self)             # <<<<<<<<<<<<<<\n *             info.suboffsets = NULL\n *             info.itemsize = PyArray_ITEMSIZE(self)\n */\n    __pyx_v_info->shape = ((Py_ssize_t *)PyArray_DIMS(__pyx_v_self));\n  }\n  __pyx_L9:;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":291\n *                 info.strides = <Py_ssize_t*>PyArray_STRIDES(self)\n *                 info.shape = <Py_ssize_t*>PyArray_DIMS(self)\n *             info.suboffsets = NULL             # <<<<<<<<<<<<<<\n *             info.itemsize = PyArray_ITEMSIZE(self)\n *             info.readonly = not PyArray_ISWRITEABLE(self)\n */\n  __pyx_v_info->suboffsets = NULL;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":292\n *                 info.shape = <Py_ssize_t*>PyArray_DIMS(self)\n *             info.suboffsets = NULL\n *             info.itemsize = PyArray_ITEMSIZE(self)             # <<<<<<<<<<<<<<\n *             info.readonly = not PyArray_ISWRITEABLE(self)\n * \n */\n  __pyx_v_info->itemsize = PyArray_ITEMSIZE(__pyx_v_self);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":293\n *             info.suboffsets = NULL\n *             info.itemsize = PyArray_ITEMSIZE(self)\n *             info.readonly = not PyArray_ISWRITEABLE(self)             # <<<<<<<<<<<<<<\n * \n *             cdef int t\n */\n  __pyx_v_info->readonly = (!(PyArray_ISWRITEABLE(__pyx_v_self) != 0));\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":296\n * \n *             cdef int t\n *             cdef char* f = NULL             # <<<<<<<<<<<<<<\n *             cdef dtype descr = <dtype>PyArray_DESCR(self)\n *             cdef int offset\n */\n  __pyx_v_f = NULL;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":297\n *             cdef int t\n *             cdef char* f = NULL\n *             cdef dtype descr = <dtype>PyArray_DESCR(self)             # <<<<<<<<<<<<<<\n *             cdef int offset\n * \n */\n  __pyx_t_7 = PyArray_DESCR(__pyx_v_self);\n  __pyx_t_3 = ((PyObject *)__pyx_t_7);\n  __Pyx_INCREF(__pyx_t_3);\n  __pyx_v_descr = ((PyArray_Descr *)__pyx_t_3);\n  __pyx_t_3 = 0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":300\n *             cdef int offset\n * \n *             info.obj = self             # <<<<<<<<<<<<<<\n * \n *             if not PyDataType_HASFIELDS(descr):\n */\n  __Pyx_INCREF(((PyObject *)__pyx_v_self));\n  __Pyx_GIVEREF(((PyObject *)__pyx_v_self));\n  __Pyx_GOTREF(__pyx_v_info->obj);\n  __Pyx_DECREF(__pyx_v_info->obj);\n  __pyx_v_info->obj = ((PyObject *)__pyx_v_self);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":302\n *             info.obj = self\n * \n *             if not PyDataType_HASFIELDS(descr):             # <<<<<<<<<<<<<<\n *                 t = descr.type_num\n *                 if ((descr.byteorder == c'>' and little_endian) or\n */\n  __pyx_t_1 = ((!(PyDataType_HASFIELDS(__pyx_v_descr) != 0)) != 0);\n  if (__pyx_t_1) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":303\n * \n *             if not PyDataType_HASFIELDS(descr):\n *                 t = descr.type_num             # <<<<<<<<<<<<<<\n *                 if ((descr.byteorder == c'>' and little_endian) or\n *                     (descr.byteorder == c'<' and not little_endian)):\n */\n    __pyx_t_4 = __pyx_v_descr->type_num;\n    __pyx_v_t = __pyx_t_4;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":304\n *             if not PyDataType_HASFIELDS(descr):\n *                 t = descr.type_num\n *                 if ((descr.byteorder == c'>' and little_endian) or             # <<<<<<<<<<<<<<\n *                     (descr.byteorder == c'<' and not little_endian)):\n *                     raise ValueError(u\"Non-native byte order not supported\")\n */\n    __pyx_t_2 = ((__pyx_v_descr->byteorder == '>') != 0);\n    if (!__pyx_t_2) {\n      goto __pyx_L15_next_or;\n    } else {\n    }\n    __pyx_t_2 = (__pyx_v_little_endian != 0);\n    if (!__pyx_t_2) {\n    } else {\n      __pyx_t_1 = __pyx_t_2;\n      goto __pyx_L14_bool_binop_done;\n    }\n    __pyx_L15_next_or:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":305\n *                 t = descr.type_num\n *                 if ((descr.byteorder == c'>' and little_endian) or\n *                     (descr.byteorder == c'<' and not little_endian)):             # <<<<<<<<<<<<<<\n *                     raise ValueError(u\"Non-native byte order not supported\")\n *                 if   t == NPY_BYTE:        f = \"b\"\n */\n    __pyx_t_2 = ((__pyx_v_descr->byteorder == '<') != 0);\n    if (__pyx_t_2) {\n    } else {\n      __pyx_t_1 = __pyx_t_2;\n      goto __pyx_L14_bool_binop_done;\n    }\n    __pyx_t_2 = ((!(__pyx_v_little_endian != 0)) != 0);\n    __pyx_t_1 = __pyx_t_2;\n    __pyx_L14_bool_binop_done:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":304\n *             if not PyDataType_HASFIELDS(descr):\n *                 t = descr.type_num\n *                 if ((descr.byteorder == c'>' and little_endian) or             # <<<<<<<<<<<<<<\n *                     (descr.byteorder == c'<' and not little_endian)):\n *                     raise ValueError(u\"Non-native byte order not supported\")\n */\n    if (unlikely(__pyx_t_1)) {\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":306\n *                 if ((descr.byteorder == c'>' and little_endian) or\n *                     (descr.byteorder == c'<' and not little_endian)):\n *                     raise ValueError(u\"Non-native byte order not supported\")             # <<<<<<<<<<<<<<\n *                 if   t == NPY_BYTE:        f = \"b\"\n *                 elif t == NPY_UBYTE:       f = \"B\"\n */\n      __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 306, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_Raise(__pyx_t_3, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __PYX_ERR(2, 306, __pyx_L1_error)\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":304\n *             if not PyDataType_HASFIELDS(descr):\n *                 t = descr.type_num\n *                 if ((descr.byteorder == c'>' and little_endian) or             # <<<<<<<<<<<<<<\n *                     (descr.byteorder == c'<' and not little_endian)):\n *                     raise ValueError(u\"Non-native byte order not supported\")\n */\n    }\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":307\n *                     (descr.byteorder == c'<' and not little_endian)):\n *                     raise ValueError(u\"Non-native byte order not supported\")\n *                 if   t == NPY_BYTE:        f = \"b\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_UBYTE:       f = \"B\"\n *                 elif t == NPY_SHORT:       f = \"h\"\n */\n    switch (__pyx_v_t) {\n      case NPY_BYTE:\n      __pyx_v_f = ((char *)\"b\");\n      break;\n      case NPY_UBYTE:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":308\n *                     raise ValueError(u\"Non-native byte order not supported\")\n *                 if   t == NPY_BYTE:        f = \"b\"\n *                 elif t == NPY_UBYTE:       f = \"B\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_SHORT:       f = \"h\"\n *                 elif t == NPY_USHORT:      f = \"H\"\n */\n      __pyx_v_f = ((char *)\"B\");\n      break;\n      case NPY_SHORT:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":309\n *                 if   t == NPY_BYTE:        f = \"b\"\n *                 elif t == NPY_UBYTE:       f = \"B\"\n *                 elif t == NPY_SHORT:       f = \"h\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_USHORT:      f = \"H\"\n *                 elif t == NPY_INT:         f = \"i\"\n */\n      __pyx_v_f = ((char *)\"h\");\n      break;\n      case NPY_USHORT:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":310\n *                 elif t == NPY_UBYTE:       f = \"B\"\n *                 elif t == NPY_SHORT:       f = \"h\"\n *                 elif t == NPY_USHORT:      f = \"H\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_INT:         f = \"i\"\n *                 elif t == NPY_UINT:        f = \"I\"\n */\n      __pyx_v_f = ((char *)\"H\");\n      break;\n      case NPY_INT:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":311\n *                 elif t == NPY_SHORT:       f = \"h\"\n *                 elif t == NPY_USHORT:      f = \"H\"\n *                 elif t == NPY_INT:         f = \"i\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_UINT:        f = \"I\"\n *                 elif t == NPY_LONG:        f = \"l\"\n */\n      __pyx_v_f = ((char *)\"i\");\n      break;\n      case NPY_UINT:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":312\n *                 elif t == NPY_USHORT:      f = \"H\"\n *                 elif t == NPY_INT:         f = \"i\"\n *                 elif t == NPY_UINT:        f = \"I\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_LONG:        f = \"l\"\n *                 elif t == NPY_ULONG:       f = \"L\"\n */\n      __pyx_v_f = ((char *)\"I\");\n      break;\n      case NPY_LONG:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":313\n *                 elif t == NPY_INT:         f = \"i\"\n *                 elif t == NPY_UINT:        f = \"I\"\n *                 elif t == NPY_LONG:        f = \"l\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_ULONG:       f = \"L\"\n *                 elif t == NPY_LONGLONG:    f = \"q\"\n */\n      __pyx_v_f = ((char *)\"l\");\n      break;\n      case NPY_ULONG:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":314\n *                 elif t == NPY_UINT:        f = \"I\"\n *                 elif t == NPY_LONG:        f = \"l\"\n *                 elif t == NPY_ULONG:       f = \"L\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_LONGLONG:    f = \"q\"\n *                 elif t == NPY_ULONGLONG:   f = \"Q\"\n */\n      __pyx_v_f = ((char *)\"L\");\n      break;\n      case NPY_LONGLONG:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":315\n *                 elif t == NPY_LONG:        f = \"l\"\n *                 elif t == NPY_ULONG:       f = \"L\"\n *                 elif t == NPY_LONGLONG:    f = \"q\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_ULONGLONG:   f = \"Q\"\n *                 elif t == NPY_FLOAT:       f = \"f\"\n */\n      __pyx_v_f = ((char *)\"q\");\n      break;\n      case NPY_ULONGLONG:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":316\n *                 elif t == NPY_ULONG:       f = \"L\"\n *                 elif t == NPY_LONGLONG:    f = \"q\"\n *                 elif t == NPY_ULONGLONG:   f = \"Q\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_FLOAT:       f = \"f\"\n *                 elif t == NPY_DOUBLE:      f = \"d\"\n */\n      __pyx_v_f = ((char *)\"Q\");\n      break;\n      case NPY_FLOAT:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":317\n *                 elif t == NPY_LONGLONG:    f = \"q\"\n *                 elif t == NPY_ULONGLONG:   f = \"Q\"\n *                 elif t == NPY_FLOAT:       f = \"f\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_DOUBLE:      f = \"d\"\n *                 elif t == NPY_LONGDOUBLE:  f = \"g\"\n */\n      __pyx_v_f = ((char *)\"f\");\n      break;\n      case NPY_DOUBLE:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":318\n *                 elif t == NPY_ULONGLONG:   f = \"Q\"\n *                 elif t == NPY_FLOAT:       f = \"f\"\n *                 elif t == NPY_DOUBLE:      f = \"d\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_LONGDOUBLE:  f = \"g\"\n *                 elif t == NPY_CFLOAT:      f = \"Zf\"\n */\n      __pyx_v_f = ((char *)\"d\");\n      break;\n      case NPY_LONGDOUBLE:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":319\n *                 elif t == NPY_FLOAT:       f = \"f\"\n *                 elif t == NPY_DOUBLE:      f = \"d\"\n *                 elif t == NPY_LONGDOUBLE:  f = \"g\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_CFLOAT:      f = \"Zf\"\n *                 elif t == NPY_CDOUBLE:     f = \"Zd\"\n */\n      __pyx_v_f = ((char *)\"g\");\n      break;\n      case NPY_CFLOAT:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":320\n *                 elif t == NPY_DOUBLE:      f = \"d\"\n *                 elif t == NPY_LONGDOUBLE:  f = \"g\"\n *                 elif t == NPY_CFLOAT:      f = \"Zf\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_CDOUBLE:     f = \"Zd\"\n *                 elif t == NPY_CLONGDOUBLE: f = \"Zg\"\n */\n      __pyx_v_f = ((char *)\"Zf\");\n      break;\n      case NPY_CDOUBLE:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":321\n *                 elif t == NPY_LONGDOUBLE:  f = \"g\"\n *                 elif t == NPY_CFLOAT:      f = \"Zf\"\n *                 elif t == NPY_CDOUBLE:     f = \"Zd\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_CLONGDOUBLE: f = \"Zg\"\n *                 elif t == NPY_OBJECT:      f = \"O\"\n */\n      __pyx_v_f = ((char *)\"Zd\");\n      break;\n      case NPY_CLONGDOUBLE:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":322\n *                 elif t == NPY_CFLOAT:      f = \"Zf\"\n *                 elif t == NPY_CDOUBLE:     f = \"Zd\"\n *                 elif t == NPY_CLONGDOUBLE: f = \"Zg\"             # <<<<<<<<<<<<<<\n *                 elif t == NPY_OBJECT:      f = \"O\"\n *                 else:\n */\n      __pyx_v_f = ((char *)\"Zg\");\n      break;\n      case NPY_OBJECT:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":323\n *                 elif t == NPY_CDOUBLE:     f = \"Zd\"\n *                 elif t == NPY_CLONGDOUBLE: f = \"Zg\"\n *                 elif t == NPY_OBJECT:      f = \"O\"             # <<<<<<<<<<<<<<\n *                 else:\n *                     raise ValueError(u\"unknown dtype code in numpy.pxd (%d)\" % t)\n */\n      __pyx_v_f = ((char *)\"O\");\n      break;\n      default:\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":325\n *                 elif t == NPY_OBJECT:      f = \"O\"\n *                 else:\n *                     raise ValueError(u\"unknown dtype code in numpy.pxd (%d)\" % t)             # <<<<<<<<<<<<<<\n *                 info.format = f\n *                 return\n */\n      __pyx_t_3 = __Pyx_PyInt_From_int(__pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 325, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_8 = PyUnicode_Format(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_t_3); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 325, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_8);\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_3 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_8); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 325, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n      __Pyx_Raise(__pyx_t_3, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __PYX_ERR(2, 325, __pyx_L1_error)\n      break;\n    }\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":326\n *                 else:\n *                     raise ValueError(u\"unknown dtype code in numpy.pxd (%d)\" % t)\n *                 info.format = f             # <<<<<<<<<<<<<<\n *                 return\n *             else:\n */\n    __pyx_v_info->format = __pyx_v_f;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":327\n *                     raise ValueError(u\"unknown dtype code in numpy.pxd (%d)\" % t)\n *                 info.format = f\n *                 return             # <<<<<<<<<<<<<<\n *             else:\n *                 info.format = <char*>PyObject_Malloc(_buffer_format_string_len)\n */\n    __pyx_r = 0;\n    goto __pyx_L0;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":302\n *             info.obj = self\n * \n *             if not PyDataType_HASFIELDS(descr):             # <<<<<<<<<<<<<<\n *                 t = descr.type_num\n *                 if ((descr.byteorder == c'>' and little_endian) or\n */\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":329\n *                 return\n *             else:\n *                 info.format = <char*>PyObject_Malloc(_buffer_format_string_len)             # <<<<<<<<<<<<<<\n *                 info.format[0] = c'^' # Native data types, manual alignment\n *                 offset = 0\n */\n  /*else*/ {\n    __pyx_v_info->format = ((char *)PyObject_Malloc(0xFF));\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":330\n *             else:\n *                 info.format = <char*>PyObject_Malloc(_buffer_format_string_len)\n *                 info.format[0] = c'^' # Native data types, manual alignment             # <<<<<<<<<<<<<<\n *                 offset = 0\n *                 f = _util_dtypestring(descr, info.format + 1,\n */\n    (__pyx_v_info->format[0]) = '^';\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":331\n *                 info.format = <char*>PyObject_Malloc(_buffer_format_string_len)\n *                 info.format[0] = c'^' # Native data types, manual alignment\n *                 offset = 0             # <<<<<<<<<<<<<<\n *                 f = _util_dtypestring(descr, info.format + 1,\n *                                       info.format + _buffer_format_string_len,\n */\n    __pyx_v_offset = 0;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":332\n *                 info.format[0] = c'^' # Native data types, manual alignment\n *                 offset = 0\n *                 f = _util_dtypestring(descr, info.format + 1,             # <<<<<<<<<<<<<<\n *                                       info.format + _buffer_format_string_len,\n *                                       &offset)\n */\n    __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_descr, (__pyx_v_info->format + 1), (__pyx_v_info->format + 0xFF), (&__pyx_v_offset)); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(2, 332, __pyx_L1_error)\n    __pyx_v_f = __pyx_t_9;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":335\n *                                       info.format + _buffer_format_string_len,\n *                                       &offset)\n *                 f[0] = c'\\0' # Terminate format string             # <<<<<<<<<<<<<<\n * \n *         def __releasebuffer__(ndarray self, Py_buffer* info):\n */\n    (__pyx_v_f[0]) = '\\x00';\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":258\n *         # experimental exception made for __getbuffer__ and __releasebuffer__\n *         # -- the details of this may change.\n *         def __getbuffer__(ndarray self, Py_buffer* info, int flags):             # <<<<<<<<<<<<<<\n *             # This implementation of getbuffer is geared towards Cython\n *             # requirements, and does not yet fulfill the PEP.\n */\n\n  /* function exit code */\n  __pyx_r = 0;\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_8);\n  __Pyx_AddTraceback(\"numpy.ndarray.__getbuffer__\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = -1;\n  if (__pyx_v_info->obj != NULL) {\n    __Pyx_GOTREF(__pyx_v_info->obj);\n    __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;\n  }\n  goto __pyx_L2;\n  __pyx_L0:;\n  if (__pyx_v_info->obj == Py_None) {\n    __Pyx_GOTREF(__pyx_v_info->obj);\n    __Pyx_DECREF(__pyx_v_info->obj); __pyx_v_info->obj = 0;\n  }\n  __pyx_L2:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_descr);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":337\n *                 f[0] = c'\\0' # Terminate format string\n * \n *         def __releasebuffer__(ndarray self, Py_buffer* info):             # <<<<<<<<<<<<<<\n *             if PyArray_HASFIELDS(self):\n *                 PyObject_Free(info.format)\n */\n\n/* Python wrapper */\nstatic CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info); /*proto*/\nstatic CYTHON_UNUSED void __pyx_pw_5numpy_7ndarray_3__releasebuffer__(PyObject *__pyx_v_self, Py_buffer *__pyx_v_info) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__releasebuffer__ (wrapper)\", 0);\n  __pyx_pf_5numpy_7ndarray_2__releasebuffer__(((PyArrayObject *)__pyx_v_self), ((Py_buffer *)__pyx_v_info));\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n}\n\nstatic void __pyx_pf_5numpy_7ndarray_2__releasebuffer__(PyArrayObject *__pyx_v_self, Py_buffer *__pyx_v_info) {\n  __Pyx_RefNannyDeclarations\n  int __pyx_t_1;\n  __Pyx_RefNannySetupContext(\"__releasebuffer__\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":338\n * \n *         def __releasebuffer__(ndarray self, Py_buffer* info):\n *             if PyArray_HASFIELDS(self):             # <<<<<<<<<<<<<<\n *                 PyObject_Free(info.format)\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):\n */\n  __pyx_t_1 = (PyArray_HASFIELDS(__pyx_v_self) != 0);\n  if (__pyx_t_1) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":339\n *         def __releasebuffer__(ndarray self, Py_buffer* info):\n *             if PyArray_HASFIELDS(self):\n *                 PyObject_Free(info.format)             # <<<<<<<<<<<<<<\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):\n *                 PyObject_Free(info.strides)\n */\n    PyObject_Free(__pyx_v_info->format);\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":338\n * \n *         def __releasebuffer__(ndarray self, Py_buffer* info):\n *             if PyArray_HASFIELDS(self):             # <<<<<<<<<<<<<<\n *                 PyObject_Free(info.format)\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):\n */\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":340\n *             if PyArray_HASFIELDS(self):\n *                 PyObject_Free(info.format)\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):             # <<<<<<<<<<<<<<\n *                 PyObject_Free(info.strides)\n *                 # info.shape was stored after info.strides in the same block\n */\n  __pyx_t_1 = (((sizeof(npy_intp)) != (sizeof(Py_ssize_t))) != 0);\n  if (__pyx_t_1) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":341\n *                 PyObject_Free(info.format)\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):\n *                 PyObject_Free(info.strides)             # <<<<<<<<<<<<<<\n *                 # info.shape was stored after info.strides in the same block\n * \n */\n    PyObject_Free(__pyx_v_info->strides);\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":340\n *             if PyArray_HASFIELDS(self):\n *                 PyObject_Free(info.format)\n *             if sizeof(npy_intp) != sizeof(Py_ssize_t):             # <<<<<<<<<<<<<<\n *                 PyObject_Free(info.strides)\n *                 # info.shape was stored after info.strides in the same block\n */\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":337\n *                 f[0] = c'\\0' # Terminate format string\n * \n *         def __releasebuffer__(ndarray self, Py_buffer* info):             # <<<<<<<<<<<<<<\n *             if PyArray_HASFIELDS(self):\n *                 PyObject_Free(info.format)\n */\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":821\n * ctypedef npy_cdouble     complex_t\n * \n * cdef inline object PyArray_MultiIterNew1(a):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(1, <void*>a)\n * \n */\n\nstatic CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew1(PyObject *__pyx_v_a) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"PyArray_MultiIterNew1\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":822\n * \n * cdef inline object PyArray_MultiIterNew1(a):\n *     return PyArray_MultiIterNew(1, <void*>a)             # <<<<<<<<<<<<<<\n * \n * cdef inline object PyArray_MultiIterNew2(a, b):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __pyx_t_1 = PyArray_MultiIterNew(1, ((void *)__pyx_v_a)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 822, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_r = __pyx_t_1;\n  __pyx_t_1 = 0;\n  goto __pyx_L0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":821\n * ctypedef npy_cdouble     complex_t\n * \n * cdef inline object PyArray_MultiIterNew1(a):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(1, <void*>a)\n * \n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"numpy.PyArray_MultiIterNew1\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = 0;\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":824\n *     return PyArray_MultiIterNew(1, <void*>a)\n * \n * cdef inline object PyArray_MultiIterNew2(a, b):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(2, <void*>a, <void*>b)\n * \n */\n\nstatic CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew2(PyObject *__pyx_v_a, PyObject *__pyx_v_b) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"PyArray_MultiIterNew2\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":825\n * \n * cdef inline object PyArray_MultiIterNew2(a, b):\n *     return PyArray_MultiIterNew(2, <void*>a, <void*>b)             # <<<<<<<<<<<<<<\n * \n * cdef inline object PyArray_MultiIterNew3(a, b, c):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __pyx_t_1 = PyArray_MultiIterNew(2, ((void *)__pyx_v_a), ((void *)__pyx_v_b)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 825, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_r = __pyx_t_1;\n  __pyx_t_1 = 0;\n  goto __pyx_L0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":824\n *     return PyArray_MultiIterNew(1, <void*>a)\n * \n * cdef inline object PyArray_MultiIterNew2(a, b):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(2, <void*>a, <void*>b)\n * \n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"numpy.PyArray_MultiIterNew2\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = 0;\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":827\n *     return PyArray_MultiIterNew(2, <void*>a, <void*>b)\n * \n * cdef inline object PyArray_MultiIterNew3(a, b, c):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)\n * \n */\n\nstatic CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew3(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"PyArray_MultiIterNew3\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":828\n * \n * cdef inline object PyArray_MultiIterNew3(a, b, c):\n *     return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)             # <<<<<<<<<<<<<<\n * \n * cdef inline object PyArray_MultiIterNew4(a, b, c, d):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __pyx_t_1 = PyArray_MultiIterNew(3, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 828, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_r = __pyx_t_1;\n  __pyx_t_1 = 0;\n  goto __pyx_L0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":827\n *     return PyArray_MultiIterNew(2, <void*>a, <void*>b)\n * \n * cdef inline object PyArray_MultiIterNew3(a, b, c):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)\n * \n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"numpy.PyArray_MultiIterNew3\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = 0;\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":830\n *     return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)\n * \n * cdef inline object PyArray_MultiIterNew4(a, b, c, d):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)\n * \n */\n\nstatic CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew4(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"PyArray_MultiIterNew4\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":831\n * \n * cdef inline object PyArray_MultiIterNew4(a, b, c, d):\n *     return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)             # <<<<<<<<<<<<<<\n * \n * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __pyx_t_1 = PyArray_MultiIterNew(4, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 831, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_r = __pyx_t_1;\n  __pyx_t_1 = 0;\n  goto __pyx_L0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":830\n *     return PyArray_MultiIterNew(3, <void*>a, <void*>b, <void*> c)\n * \n * cdef inline object PyArray_MultiIterNew4(a, b, c, d):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)\n * \n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"numpy.PyArray_MultiIterNew4\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = 0;\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":833\n *     return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)\n * \n * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)\n * \n */\n\nstatic CYTHON_INLINE PyObject *__pyx_f_5numpy_PyArray_MultiIterNew5(PyObject *__pyx_v_a, PyObject *__pyx_v_b, PyObject *__pyx_v_c, PyObject *__pyx_v_d, PyObject *__pyx_v_e) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"PyArray_MultiIterNew5\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":834\n * \n * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):\n *     return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)             # <<<<<<<<<<<<<<\n * \n * cdef inline tuple PyDataType_SHAPE(dtype d):\n */\n  __Pyx_XDECREF(__pyx_r);\n  __pyx_t_1 = PyArray_MultiIterNew(5, ((void *)__pyx_v_a), ((void *)__pyx_v_b), ((void *)__pyx_v_c), ((void *)__pyx_v_d), ((void *)__pyx_v_e)); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 834, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_r = __pyx_t_1;\n  __pyx_t_1 = 0;\n  goto __pyx_L0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":833\n *     return PyArray_MultiIterNew(4, <void*>a, <void*>b, <void*>c, <void*> d)\n * \n * cdef inline object PyArray_MultiIterNew5(a, b, c, d, e):             # <<<<<<<<<<<<<<\n *     return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)\n * \n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_AddTraceback(\"numpy.PyArray_MultiIterNew5\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = 0;\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":836\n *     return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)\n * \n * cdef inline tuple PyDataType_SHAPE(dtype d):             # <<<<<<<<<<<<<<\n *     if PyDataType_HASSUBARRAY(d):\n *         return <tuple>d.subarray.shape\n */\n\nstatic CYTHON_INLINE PyObject *__pyx_f_5numpy_PyDataType_SHAPE(PyArray_Descr *__pyx_v_d) {\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  int __pyx_t_1;\n  __Pyx_RefNannySetupContext(\"PyDataType_SHAPE\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":837\n * \n * cdef inline tuple PyDataType_SHAPE(dtype d):\n *     if PyDataType_HASSUBARRAY(d):             # <<<<<<<<<<<<<<\n *         return <tuple>d.subarray.shape\n *     else:\n */\n  __pyx_t_1 = (PyDataType_HASSUBARRAY(__pyx_v_d) != 0);\n  if (__pyx_t_1) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":838\n * cdef inline tuple PyDataType_SHAPE(dtype d):\n *     if PyDataType_HASSUBARRAY(d):\n *         return <tuple>d.subarray.shape             # <<<<<<<<<<<<<<\n *     else:\n *         return ()\n */\n    __Pyx_XDECREF(__pyx_r);\n    __Pyx_INCREF(((PyObject*)__pyx_v_d->subarray->shape));\n    __pyx_r = ((PyObject*)__pyx_v_d->subarray->shape);\n    goto __pyx_L0;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":837\n * \n * cdef inline tuple PyDataType_SHAPE(dtype d):\n *     if PyDataType_HASSUBARRAY(d):             # <<<<<<<<<<<<<<\n *         return <tuple>d.subarray.shape\n *     else:\n */\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":840\n *         return <tuple>d.subarray.shape\n *     else:\n *         return ()             # <<<<<<<<<<<<<<\n * \n * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL:\n */\n  /*else*/ {\n    __Pyx_XDECREF(__pyx_r);\n    __Pyx_INCREF(__pyx_empty_tuple);\n    __pyx_r = __pyx_empty_tuple;\n    goto __pyx_L0;\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":836\n *     return PyArray_MultiIterNew(5, <void*>a, <void*>b, <void*>c, <void*> d, <void*> e)\n * \n * cdef inline tuple PyDataType_SHAPE(dtype d):             # <<<<<<<<<<<<<<\n *     if PyDataType_HASSUBARRAY(d):\n *         return <tuple>d.subarray.shape\n */\n\n  /* function exit code */\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":842\n *         return ()\n * \n * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL:             # <<<<<<<<<<<<<<\n *     # Recursive utility function used in __getbuffer__ to get format\n *     # string. The new location in the format string is returned.\n */\n\nstatic CYTHON_INLINE char *__pyx_f_5numpy__util_dtypestring(PyArray_Descr *__pyx_v_descr, char *__pyx_v_f, char *__pyx_v_end, int *__pyx_v_offset) {\n  PyArray_Descr *__pyx_v_child = 0;\n  int __pyx_v_endian_detector;\n  int __pyx_v_little_endian;\n  PyObject *__pyx_v_fields = 0;\n  PyObject *__pyx_v_childname = NULL;\n  PyObject *__pyx_v_new_offset = NULL;\n  PyObject *__pyx_v_t = NULL;\n  char *__pyx_r;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  Py_ssize_t __pyx_t_2;\n  PyObject *__pyx_t_3 = NULL;\n  PyObject *__pyx_t_4 = NULL;\n  int __pyx_t_5;\n  int __pyx_t_6;\n  int __pyx_t_7;\n  long __pyx_t_8;\n  char *__pyx_t_9;\n  __Pyx_RefNannySetupContext(\"_util_dtypestring\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":847\n * \n *     cdef dtype child\n *     cdef int endian_detector = 1             # <<<<<<<<<<<<<<\n *     cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)\n *     cdef tuple fields\n */\n  __pyx_v_endian_detector = 1;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":848\n *     cdef dtype child\n *     cdef int endian_detector = 1\n *     cdef bint little_endian = ((<char*>&endian_detector)[0] != 0)             # <<<<<<<<<<<<<<\n *     cdef tuple fields\n * \n */\n  __pyx_v_little_endian = ((((char *)(&__pyx_v_endian_detector))[0]) != 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":851\n *     cdef tuple fields\n * \n *     for childname in descr.names:             # <<<<<<<<<<<<<<\n *         fields = descr.fields[childname]\n *         child, new_offset = fields\n */\n  if (unlikely(__pyx_v_descr->names == Py_None)) {\n    PyErr_SetString(PyExc_TypeError, \"'NoneType' object is not iterable\");\n    __PYX_ERR(2, 851, __pyx_L1_error)\n  }\n  __pyx_t_1 = __pyx_v_descr->names; __Pyx_INCREF(__pyx_t_1); __pyx_t_2 = 0;\n  for (;;) {\n    if (__pyx_t_2 >= PyTuple_GET_SIZE(__pyx_t_1)) break;\n    #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n    __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_1, __pyx_t_2); __Pyx_INCREF(__pyx_t_3); __pyx_t_2++; if (unlikely(0 < 0)) __PYX_ERR(2, 851, __pyx_L1_error)\n    #else\n    __pyx_t_3 = PySequence_ITEM(__pyx_t_1, __pyx_t_2); __pyx_t_2++; if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 851, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    #endif\n    __Pyx_XDECREF_SET(__pyx_v_childname, __pyx_t_3);\n    __pyx_t_3 = 0;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":852\n * \n *     for childname in descr.names:\n *         fields = descr.fields[childname]             # <<<<<<<<<<<<<<\n *         child, new_offset = fields\n * \n */\n    if (unlikely(__pyx_v_descr->fields == Py_None)) {\n      PyErr_SetString(PyExc_TypeError, \"'NoneType' object is not subscriptable\");\n      __PYX_ERR(2, 852, __pyx_L1_error)\n    }\n    __pyx_t_3 = __Pyx_PyDict_GetItem(__pyx_v_descr->fields, __pyx_v_childname); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 852, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    if (!(likely(PyTuple_CheckExact(__pyx_t_3))||((__pyx_t_3) == Py_None)||(PyErr_Format(PyExc_TypeError, \"Expected %.16s, got %.200s\", \"tuple\", Py_TYPE(__pyx_t_3)->tp_name), 0))) __PYX_ERR(2, 852, __pyx_L1_error)\n    __Pyx_XDECREF_SET(__pyx_v_fields, ((PyObject*)__pyx_t_3));\n    __pyx_t_3 = 0;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":853\n *     for childname in descr.names:\n *         fields = descr.fields[childname]\n *         child, new_offset = fields             # <<<<<<<<<<<<<<\n * \n *         if (end - f) - <int>(new_offset - offset[0]) < 15:\n */\n    if (likely(__pyx_v_fields != Py_None)) {\n      PyObject* sequence = __pyx_v_fields;\n      Py_ssize_t size = __Pyx_PySequence_SIZE(sequence);\n      if (unlikely(size != 2)) {\n        if (size > 2) __Pyx_RaiseTooManyValuesError(2);\n        else if (size >= 0) __Pyx_RaiseNeedMoreValuesError(size);\n        __PYX_ERR(2, 853, __pyx_L1_error)\n      }\n      #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n      __pyx_t_3 = PyTuple_GET_ITEM(sequence, 0); \n      __pyx_t_4 = PyTuple_GET_ITEM(sequence, 1); \n      __Pyx_INCREF(__pyx_t_3);\n      __Pyx_INCREF(__pyx_t_4);\n      #else\n      __pyx_t_3 = PySequence_ITEM(sequence, 0); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 853, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PySequence_ITEM(sequence, 1); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 853, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      #endif\n    } else {\n      __Pyx_RaiseNoneNotIterableError(); __PYX_ERR(2, 853, __pyx_L1_error)\n    }\n    if (!(likely(((__pyx_t_3) == Py_None) || likely(__Pyx_TypeTest(__pyx_t_3, __pyx_ptype_5numpy_dtype))))) __PYX_ERR(2, 853, __pyx_L1_error)\n    __Pyx_XDECREF_SET(__pyx_v_child, ((PyArray_Descr *)__pyx_t_3));\n    __pyx_t_3 = 0;\n    __Pyx_XDECREF_SET(__pyx_v_new_offset, __pyx_t_4);\n    __pyx_t_4 = 0;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":855\n *         child, new_offset = fields\n * \n *         if (end - f) - <int>(new_offset - offset[0]) < 15:             # <<<<<<<<<<<<<<\n *             raise RuntimeError(u\"Format string allocated too short, see comment in numpy.pxd\")\n * \n */\n    __pyx_t_4 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 855, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_4);\n    __pyx_t_3 = PyNumber_Subtract(__pyx_v_new_offset, __pyx_t_4); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 855, __pyx_L1_error)\n    __Pyx_GOTREF(__pyx_t_3);\n    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n    __pyx_t_5 = __Pyx_PyInt_As_int(__pyx_t_3); if (unlikely((__pyx_t_5 == (int)-1) && PyErr_Occurred())) __PYX_ERR(2, 855, __pyx_L1_error)\n    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n    __pyx_t_6 = ((((__pyx_v_end - __pyx_v_f) - ((int)__pyx_t_5)) < 15) != 0);\n    if (unlikely(__pyx_t_6)) {\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":856\n * \n *         if (end - f) - <int>(new_offset - offset[0]) < 15:\n *             raise RuntimeError(u\"Format string allocated too short, see comment in numpy.pxd\")             # <<<<<<<<<<<<<<\n * \n *         if ((child.byteorder == c'>' and little_endian) or\n */\n      __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__24, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 856, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_Raise(__pyx_t_3, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __PYX_ERR(2, 856, __pyx_L1_error)\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":855\n *         child, new_offset = fields\n * \n *         if (end - f) - <int>(new_offset - offset[0]) < 15:             # <<<<<<<<<<<<<<\n *             raise RuntimeError(u\"Format string allocated too short, see comment in numpy.pxd\")\n * \n */\n    }\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":858\n *             raise RuntimeError(u\"Format string allocated too short, see comment in numpy.pxd\")\n * \n *         if ((child.byteorder == c'>' and little_endian) or             # <<<<<<<<<<<<<<\n *             (child.byteorder == c'<' and not little_endian)):\n *             raise ValueError(u\"Non-native byte order not supported\")\n */\n    __pyx_t_7 = ((__pyx_v_child->byteorder == '>') != 0);\n    if (!__pyx_t_7) {\n      goto __pyx_L8_next_or;\n    } else {\n    }\n    __pyx_t_7 = (__pyx_v_little_endian != 0);\n    if (!__pyx_t_7) {\n    } else {\n      __pyx_t_6 = __pyx_t_7;\n      goto __pyx_L7_bool_binop_done;\n    }\n    __pyx_L8_next_or:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":859\n * \n *         if ((child.byteorder == c'>' and little_endian) or\n *             (child.byteorder == c'<' and not little_endian)):             # <<<<<<<<<<<<<<\n *             raise ValueError(u\"Non-native byte order not supported\")\n *             # One could encode it in the format string and have Cython\n */\n    __pyx_t_7 = ((__pyx_v_child->byteorder == '<') != 0);\n    if (__pyx_t_7) {\n    } else {\n      __pyx_t_6 = __pyx_t_7;\n      goto __pyx_L7_bool_binop_done;\n    }\n    __pyx_t_7 = ((!(__pyx_v_little_endian != 0)) != 0);\n    __pyx_t_6 = __pyx_t_7;\n    __pyx_L7_bool_binop_done:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":858\n *             raise RuntimeError(u\"Format string allocated too short, see comment in numpy.pxd\")\n * \n *         if ((child.byteorder == c'>' and little_endian) or             # <<<<<<<<<<<<<<\n *             (child.byteorder == c'<' and not little_endian)):\n *             raise ValueError(u\"Non-native byte order not supported\")\n */\n    if (unlikely(__pyx_t_6)) {\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":860\n *         if ((child.byteorder == c'>' and little_endian) or\n *             (child.byteorder == c'<' and not little_endian)):\n *             raise ValueError(u\"Non-native byte order not supported\")             # <<<<<<<<<<<<<<\n *             # One could encode it in the format string and have Cython\n *             # complain instead, BUT: < and > in format strings also imply\n */\n      __pyx_t_3 = __Pyx_PyObject_Call(__pyx_builtin_ValueError, __pyx_tuple__23, NULL); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 860, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __Pyx_Raise(__pyx_t_3, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __PYX_ERR(2, 860, __pyx_L1_error)\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":858\n *             raise RuntimeError(u\"Format string allocated too short, see comment in numpy.pxd\")\n * \n *         if ((child.byteorder == c'>' and little_endian) or             # <<<<<<<<<<<<<<\n *             (child.byteorder == c'<' and not little_endian)):\n *             raise ValueError(u\"Non-native byte order not supported\")\n */\n    }\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":870\n * \n *         # Output padding bytes\n *         while offset[0] < new_offset:             # <<<<<<<<<<<<<<\n *             f[0] = 120 # \"x\"; pad byte\n *             f += 1\n */\n    while (1) {\n      __pyx_t_3 = __Pyx_PyInt_From_int((__pyx_v_offset[0])); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 870, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_t_3, __pyx_v_new_offset, Py_LT); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 870, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 870, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (!__pyx_t_6) break;\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":871\n *         # Output padding bytes\n *         while offset[0] < new_offset:\n *             f[0] = 120 # \"x\"; pad byte             # <<<<<<<<<<<<<<\n *             f += 1\n *             offset[0] += 1\n */\n      (__pyx_v_f[0]) = 0x78;\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":872\n *         while offset[0] < new_offset:\n *             f[0] = 120 # \"x\"; pad byte\n *             f += 1             # <<<<<<<<<<<<<<\n *             offset[0] += 1\n * \n */\n      __pyx_v_f = (__pyx_v_f + 1);\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":873\n *             f[0] = 120 # \"x\"; pad byte\n *             f += 1\n *             offset[0] += 1             # <<<<<<<<<<<<<<\n * \n *         offset[0] += child.itemsize\n */\n      __pyx_t_8 = 0;\n      (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + 1);\n    }\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":875\n *             offset[0] += 1\n * \n *         offset[0] += child.itemsize             # <<<<<<<<<<<<<<\n * \n *         if not PyDataType_HASFIELDS(child):\n */\n    __pyx_t_8 = 0;\n    (__pyx_v_offset[__pyx_t_8]) = ((__pyx_v_offset[__pyx_t_8]) + __pyx_v_child->elsize);\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":877\n *         offset[0] += child.itemsize\n * \n *         if not PyDataType_HASFIELDS(child):             # <<<<<<<<<<<<<<\n *             t = child.type_num\n *             if end - f < 5:\n */\n    __pyx_t_6 = ((!(PyDataType_HASFIELDS(__pyx_v_child) != 0)) != 0);\n    if (__pyx_t_6) {\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":878\n * \n *         if not PyDataType_HASFIELDS(child):\n *             t = child.type_num             # <<<<<<<<<<<<<<\n *             if end - f < 5:\n *                 raise RuntimeError(u\"Format string allocated too short.\")\n */\n      __pyx_t_4 = __Pyx_PyInt_From_int(__pyx_v_child->type_num); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 878, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __Pyx_XDECREF_SET(__pyx_v_t, __pyx_t_4);\n      __pyx_t_4 = 0;\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":879\n *         if not PyDataType_HASFIELDS(child):\n *             t = child.type_num\n *             if end - f < 5:             # <<<<<<<<<<<<<<\n *                 raise RuntimeError(u\"Format string allocated too short.\")\n * \n */\n      __pyx_t_6 = (((__pyx_v_end - __pyx_v_f) < 5) != 0);\n      if (unlikely(__pyx_t_6)) {\n\n        /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":880\n *             t = child.type_num\n *             if end - f < 5:\n *                 raise RuntimeError(u\"Format string allocated too short.\")             # <<<<<<<<<<<<<<\n * \n *             # Until ticket #99 is fixed, use integers to avoid warnings\n */\n        __pyx_t_4 = __Pyx_PyObject_Call(__pyx_builtin_RuntimeError, __pyx_tuple__25, NULL); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 880, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_4);\n        __Pyx_Raise(__pyx_t_4, 0, 0, 0);\n        __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n        __PYX_ERR(2, 880, __pyx_L1_error)\n\n        /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":879\n *         if not PyDataType_HASFIELDS(child):\n *             t = child.type_num\n *             if end - f < 5:             # <<<<<<<<<<<<<<\n *                 raise RuntimeError(u\"Format string allocated too short.\")\n * \n */\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":883\n * \n *             # Until ticket #99 is fixed, use integers to avoid warnings\n *             if   t == NPY_BYTE:        f[0] =  98 #\"b\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_UBYTE:       f[0] =  66 #\"B\"\n *             elif t == NPY_SHORT:       f[0] = 104 #\"h\"\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_BYTE); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 883, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 883, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 883, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 98;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":884\n *             # Until ticket #99 is fixed, use integers to avoid warnings\n *             if   t == NPY_BYTE:        f[0] =  98 #\"b\"\n *             elif t == NPY_UBYTE:       f[0] =  66 #\"B\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_SHORT:       f[0] = 104 #\"h\"\n *             elif t == NPY_USHORT:      f[0] =  72 #\"H\"\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UBYTE); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 884, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 884, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 884, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 66;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":885\n *             if   t == NPY_BYTE:        f[0] =  98 #\"b\"\n *             elif t == NPY_UBYTE:       f[0] =  66 #\"B\"\n *             elif t == NPY_SHORT:       f[0] = 104 #\"h\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_USHORT:      f[0] =  72 #\"H\"\n *             elif t == NPY_INT:         f[0] = 105 #\"i\"\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_SHORT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 885, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 885, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 885, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 0x68;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":886\n *             elif t == NPY_UBYTE:       f[0] =  66 #\"B\"\n *             elif t == NPY_SHORT:       f[0] = 104 #\"h\"\n *             elif t == NPY_USHORT:      f[0] =  72 #\"H\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_INT:         f[0] = 105 #\"i\"\n *             elif t == NPY_UINT:        f[0] =  73 #\"I\"\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_USHORT); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 886, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 886, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 886, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 72;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":887\n *             elif t == NPY_SHORT:       f[0] = 104 #\"h\"\n *             elif t == NPY_USHORT:      f[0] =  72 #\"H\"\n *             elif t == NPY_INT:         f[0] = 105 #\"i\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_UINT:        f[0] =  73 #\"I\"\n *             elif t == NPY_LONG:        f[0] = 108 #\"l\"\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_INT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 887, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 887, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 887, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 0x69;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":888\n *             elif t == NPY_USHORT:      f[0] =  72 #\"H\"\n *             elif t == NPY_INT:         f[0] = 105 #\"i\"\n *             elif t == NPY_UINT:        f[0] =  73 #\"I\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_LONG:        f[0] = 108 #\"l\"\n *             elif t == NPY_ULONG:       f[0] = 76  #\"L\"\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_UINT); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 888, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 888, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 888, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 73;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":889\n *             elif t == NPY_INT:         f[0] = 105 #\"i\"\n *             elif t == NPY_UINT:        f[0] =  73 #\"I\"\n *             elif t == NPY_LONG:        f[0] = 108 #\"l\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_ULONG:       f[0] = 76  #\"L\"\n *             elif t == NPY_LONGLONG:    f[0] = 113 #\"q\"\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 889, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 889, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 889, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 0x6C;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":890\n *             elif t == NPY_UINT:        f[0] =  73 #\"I\"\n *             elif t == NPY_LONG:        f[0] = 108 #\"l\"\n *             elif t == NPY_ULONG:       f[0] = 76  #\"L\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_LONGLONG:    f[0] = 113 #\"q\"\n *             elif t == NPY_ULONGLONG:   f[0] = 81  #\"Q\"\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 890, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 890, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 890, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 76;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":891\n *             elif t == NPY_LONG:        f[0] = 108 #\"l\"\n *             elif t == NPY_ULONG:       f[0] = 76  #\"L\"\n *             elif t == NPY_LONGLONG:    f[0] = 113 #\"q\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_ULONGLONG:   f[0] = 81  #\"Q\"\n *             elif t == NPY_FLOAT:       f[0] = 102 #\"f\"\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGLONG); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 891, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 891, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 891, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 0x71;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":892\n *             elif t == NPY_ULONG:       f[0] = 76  #\"L\"\n *             elif t == NPY_LONGLONG:    f[0] = 113 #\"q\"\n *             elif t == NPY_ULONGLONG:   f[0] = 81  #\"Q\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_FLOAT:       f[0] = 102 #\"f\"\n *             elif t == NPY_DOUBLE:      f[0] = 100 #\"d\"\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_ULONGLONG); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 892, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 892, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 892, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 81;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":893\n *             elif t == NPY_LONGLONG:    f[0] = 113 #\"q\"\n *             elif t == NPY_ULONGLONG:   f[0] = 81  #\"Q\"\n *             elif t == NPY_FLOAT:       f[0] = 102 #\"f\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_DOUBLE:      f[0] = 100 #\"d\"\n *             elif t == NPY_LONGDOUBLE:  f[0] = 103 #\"g\"\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_FLOAT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 893, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 893, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 893, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 0x66;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":894\n *             elif t == NPY_ULONGLONG:   f[0] = 81  #\"Q\"\n *             elif t == NPY_FLOAT:       f[0] = 102 #\"f\"\n *             elif t == NPY_DOUBLE:      f[0] = 100 #\"d\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_LONGDOUBLE:  f[0] = 103 #\"g\"\n *             elif t == NPY_CFLOAT:      f[0] = 90; f[1] = 102; f += 1 # Zf\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_DOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 894, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 894, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 894, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 0x64;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":895\n *             elif t == NPY_FLOAT:       f[0] = 102 #\"f\"\n *             elif t == NPY_DOUBLE:      f[0] = 100 #\"d\"\n *             elif t == NPY_LONGDOUBLE:  f[0] = 103 #\"g\"             # <<<<<<<<<<<<<<\n *             elif t == NPY_CFLOAT:      f[0] = 90; f[1] = 102; f += 1 # Zf\n *             elif t == NPY_CDOUBLE:     f[0] = 90; f[1] = 100; f += 1 # Zd\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_LONGDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 895, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 895, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 895, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 0x67;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":896\n *             elif t == NPY_DOUBLE:      f[0] = 100 #\"d\"\n *             elif t == NPY_LONGDOUBLE:  f[0] = 103 #\"g\"\n *             elif t == NPY_CFLOAT:      f[0] = 90; f[1] = 102; f += 1 # Zf             # <<<<<<<<<<<<<<\n *             elif t == NPY_CDOUBLE:     f[0] = 90; f[1] = 100; f += 1 # Zd\n *             elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CFLOAT); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 896, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 896, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 896, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 90;\n        (__pyx_v_f[1]) = 0x66;\n        __pyx_v_f = (__pyx_v_f + 1);\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":897\n *             elif t == NPY_LONGDOUBLE:  f[0] = 103 #\"g\"\n *             elif t == NPY_CFLOAT:      f[0] = 90; f[1] = 102; f += 1 # Zf\n *             elif t == NPY_CDOUBLE:     f[0] = 90; f[1] = 100; f += 1 # Zd             # <<<<<<<<<<<<<<\n *             elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg\n *             elif t == NPY_OBJECT:      f[0] = 79 #\"O\"\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CDOUBLE); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 897, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 897, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 897, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 90;\n        (__pyx_v_f[1]) = 0x64;\n        __pyx_v_f = (__pyx_v_f + 1);\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":898\n *             elif t == NPY_CFLOAT:      f[0] = 90; f[1] = 102; f += 1 # Zf\n *             elif t == NPY_CDOUBLE:     f[0] = 90; f[1] = 100; f += 1 # Zd\n *             elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg             # <<<<<<<<<<<<<<\n *             elif t == NPY_OBJECT:      f[0] = 79 #\"O\"\n *             else:\n */\n      __pyx_t_3 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_CLONGDOUBLE); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 898, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_3);\n      __pyx_t_4 = PyObject_RichCompare(__pyx_v_t, __pyx_t_3, Py_EQ); __Pyx_XGOTREF(__pyx_t_4); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 898, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 898, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      if (__pyx_t_6) {\n        (__pyx_v_f[0]) = 90;\n        (__pyx_v_f[1]) = 0x67;\n        __pyx_v_f = (__pyx_v_f + 1);\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":899\n *             elif t == NPY_CDOUBLE:     f[0] = 90; f[1] = 100; f += 1 # Zd\n *             elif t == NPY_CLONGDOUBLE: f[0] = 90; f[1] = 103; f += 1 # Zg\n *             elif t == NPY_OBJECT:      f[0] = 79 #\"O\"             # <<<<<<<<<<<<<<\n *             else:\n *                 raise ValueError(u\"unknown dtype code in numpy.pxd (%d)\" % t)\n */\n      __pyx_t_4 = __Pyx_PyInt_From_enum__NPY_TYPES(NPY_OBJECT); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 899, __pyx_L1_error)\n      __Pyx_GOTREF(__pyx_t_4);\n      __pyx_t_3 = PyObject_RichCompare(__pyx_v_t, __pyx_t_4, Py_EQ); __Pyx_XGOTREF(__pyx_t_3); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 899, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n      __pyx_t_6 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_6 < 0)) __PYX_ERR(2, 899, __pyx_L1_error)\n      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n      if (likely(__pyx_t_6)) {\n        (__pyx_v_f[0]) = 79;\n        goto __pyx_L15;\n      }\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":901\n *             elif t == NPY_OBJECT:      f[0] = 79 #\"O\"\n *             else:\n *                 raise ValueError(u\"unknown dtype code in numpy.pxd (%d)\" % t)             # <<<<<<<<<<<<<<\n *             f += 1\n *         else:\n */\n      /*else*/ {\n        __pyx_t_3 = __Pyx_PyUnicode_FormatSafe(__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_v_t); if (unlikely(!__pyx_t_3)) __PYX_ERR(2, 901, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_3);\n        __pyx_t_4 = __Pyx_PyObject_CallOneArg(__pyx_builtin_ValueError, __pyx_t_3); if (unlikely(!__pyx_t_4)) __PYX_ERR(2, 901, __pyx_L1_error)\n        __Pyx_GOTREF(__pyx_t_4);\n        __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;\n        __Pyx_Raise(__pyx_t_4, 0, 0, 0);\n        __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;\n        __PYX_ERR(2, 901, __pyx_L1_error)\n      }\n      __pyx_L15:;\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":902\n *             else:\n *                 raise ValueError(u\"unknown dtype code in numpy.pxd (%d)\" % t)\n *             f += 1             # <<<<<<<<<<<<<<\n *         else:\n *             # Cython ignores struct boundary information (\"T{...}\"),\n */\n      __pyx_v_f = (__pyx_v_f + 1);\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":877\n *         offset[0] += child.itemsize\n * \n *         if not PyDataType_HASFIELDS(child):             # <<<<<<<<<<<<<<\n *             t = child.type_num\n *             if end - f < 5:\n */\n      goto __pyx_L13;\n    }\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":906\n *             # Cython ignores struct boundary information (\"T{...}\"),\n *             # so don't output it\n *             f = _util_dtypestring(child, f, end, offset)             # <<<<<<<<<<<<<<\n *     return f\n * \n */\n    /*else*/ {\n      __pyx_t_9 = __pyx_f_5numpy__util_dtypestring(__pyx_v_child, __pyx_v_f, __pyx_v_end, __pyx_v_offset); if (unlikely(__pyx_t_9 == ((char *)NULL))) __PYX_ERR(2, 906, __pyx_L1_error)\n      __pyx_v_f = __pyx_t_9;\n    }\n    __pyx_L13:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":851\n *     cdef tuple fields\n * \n *     for childname in descr.names:             # <<<<<<<<<<<<<<\n *         fields = descr.fields[childname]\n *         child, new_offset = fields\n */\n  }\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":907\n *             # so don't output it\n *             f = _util_dtypestring(child, f, end, offset)\n *     return f             # <<<<<<<<<<<<<<\n * \n * \n */\n  __pyx_r = __pyx_v_f;\n  goto __pyx_L0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":842\n *         return ()\n * \n * cdef inline char* _util_dtypestring(dtype descr, char* f, char* end, int* offset) except NULL:             # <<<<<<<<<<<<<<\n *     # Recursive utility function used in __getbuffer__ to get format\n *     # string. The new location in the format string is returned.\n */\n\n  /* function exit code */\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_3);\n  __Pyx_XDECREF(__pyx_t_4);\n  __Pyx_AddTraceback(\"numpy._util_dtypestring\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = NULL;\n  __pyx_L0:;\n  __Pyx_XDECREF((PyObject *)__pyx_v_child);\n  __Pyx_XDECREF(__pyx_v_fields);\n  __Pyx_XDECREF(__pyx_v_childname);\n  __Pyx_XDECREF(__pyx_v_new_offset);\n  __Pyx_XDECREF(__pyx_v_t);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1022\n *     int _import_umath() except -1\n * \n * cdef inline void set_array_base(ndarray arr, object base):             # <<<<<<<<<<<<<<\n *     Py_INCREF(base) # important to do this before stealing the reference below!\n *     PyArray_SetBaseObject(arr, base)\n */\n\nstatic CYTHON_INLINE void __pyx_f_5numpy_set_array_base(PyArrayObject *__pyx_v_arr, PyObject *__pyx_v_base) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"set_array_base\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1023\n * \n * cdef inline void set_array_base(ndarray arr, object base):\n *     Py_INCREF(base) # important to do this before stealing the reference below!             # <<<<<<<<<<<<<<\n *     PyArray_SetBaseObject(arr, base)\n * \n */\n  Py_INCREF(__pyx_v_base);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1024\n * cdef inline void set_array_base(ndarray arr, object base):\n *     Py_INCREF(base) # important to do this before stealing the reference below!\n *     PyArray_SetBaseObject(arr, base)             # <<<<<<<<<<<<<<\n * \n * cdef inline object get_array_base(ndarray arr):\n */\n  (void)(PyArray_SetBaseObject(__pyx_v_arr, __pyx_v_base));\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1022\n *     int _import_umath() except -1\n * \n * cdef inline void set_array_base(ndarray arr, object base):             # <<<<<<<<<<<<<<\n *     Py_INCREF(base) # important to do this before stealing the reference below!\n *     PyArray_SetBaseObject(arr, base)\n */\n\n  /* function exit code */\n  __Pyx_RefNannyFinishContext();\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1026\n *     PyArray_SetBaseObject(arr, base)\n * \n * cdef inline object get_array_base(ndarray arr):             # <<<<<<<<<<<<<<\n *     base = PyArray_BASE(arr)\n *     if base is NULL:\n */\n\nstatic CYTHON_INLINE PyObject *__pyx_f_5numpy_get_array_base(PyArrayObject *__pyx_v_arr) {\n  PyObject *__pyx_v_base;\n  PyObject *__pyx_r = NULL;\n  __Pyx_RefNannyDeclarations\n  int __pyx_t_1;\n  __Pyx_RefNannySetupContext(\"get_array_base\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1027\n * \n * cdef inline object get_array_base(ndarray arr):\n *     base = PyArray_BASE(arr)             # <<<<<<<<<<<<<<\n *     if base is NULL:\n *         return None\n */\n  __pyx_v_base = PyArray_BASE(__pyx_v_arr);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1028\n * cdef inline object get_array_base(ndarray arr):\n *     base = PyArray_BASE(arr)\n *     if base is NULL:             # <<<<<<<<<<<<<<\n *         return None\n *     return <object>base\n */\n  __pyx_t_1 = ((__pyx_v_base == NULL) != 0);\n  if (__pyx_t_1) {\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1029\n *     base = PyArray_BASE(arr)\n *     if base is NULL:\n *         return None             # <<<<<<<<<<<<<<\n *     return <object>base\n * \n */\n    __Pyx_XDECREF(__pyx_r);\n    __pyx_r = Py_None; __Pyx_INCREF(Py_None);\n    goto __pyx_L0;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1028\n * cdef inline object get_array_base(ndarray arr):\n *     base = PyArray_BASE(arr)\n *     if base is NULL:             # <<<<<<<<<<<<<<\n *         return None\n *     return <object>base\n */\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1030\n *     if base is NULL:\n *         return None\n *     return <object>base             # <<<<<<<<<<<<<<\n * \n * # Versions of the import_* functions which are more suitable for\n */\n  __Pyx_XDECREF(__pyx_r);\n  __Pyx_INCREF(((PyObject *)__pyx_v_base));\n  __pyx_r = ((PyObject *)__pyx_v_base);\n  goto __pyx_L0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1026\n *     PyArray_SetBaseObject(arr, base)\n * \n * cdef inline object get_array_base(ndarray arr):             # <<<<<<<<<<<<<<\n *     base = PyArray_BASE(arr)\n *     if base is NULL:\n */\n\n  /* function exit code */\n  __pyx_L0:;\n  __Pyx_XGIVEREF(__pyx_r);\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1034\n * # Versions of the import_* functions which are more suitable for\n * # Cython code.\n * cdef inline int import_array() except -1:             # <<<<<<<<<<<<<<\n *     try:\n *         _import_array()\n */\n\nstatic CYTHON_INLINE int __pyx_f_5numpy_import_array(void) {\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  int __pyx_t_4;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  PyObject *__pyx_t_7 = NULL;\n  PyObject *__pyx_t_8 = NULL;\n  __Pyx_RefNannySetupContext(\"import_array\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1035\n * # Cython code.\n * cdef inline int import_array() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_array()\n *     except Exception:\n */\n  {\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3);\n    __Pyx_XGOTREF(__pyx_t_1);\n    __Pyx_XGOTREF(__pyx_t_2);\n    __Pyx_XGOTREF(__pyx_t_3);\n    /*try:*/ {\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1036\n * cdef inline int import_array() except -1:\n *     try:\n *         _import_array()             # <<<<<<<<<<<<<<\n *     except Exception:\n *         raise ImportError(\"numpy.core.multiarray failed to import\")\n */\n      __pyx_t_4 = _import_array(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 1036, __pyx_L3_error)\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1035\n * # Cython code.\n * cdef inline int import_array() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_array()\n *     except Exception:\n */\n    }\n    __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n    goto __pyx_L8_try_end;\n    __pyx_L3_error:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1037\n *     try:\n *         _import_array()\n *     except Exception:             # <<<<<<<<<<<<<<\n *         raise ImportError(\"numpy.core.multiarray failed to import\")\n * \n */\n    __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])));\n    if (__pyx_t_4) {\n      __Pyx_AddTraceback(\"numpy.import_array\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n      if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(2, 1037, __pyx_L5_except_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __Pyx_GOTREF(__pyx_t_6);\n      __Pyx_GOTREF(__pyx_t_7);\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1038\n *         _import_array()\n *     except Exception:\n *         raise ImportError(\"numpy.core.multiarray failed to import\")             # <<<<<<<<<<<<<<\n * \n * cdef inline int import_umath() except -1:\n */\n      __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__26, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 1038, __pyx_L5_except_error)\n      __Pyx_GOTREF(__pyx_t_8);\n      __Pyx_Raise(__pyx_t_8, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n      __PYX_ERR(2, 1038, __pyx_L5_except_error)\n    }\n    goto __pyx_L5_except_error;\n    __pyx_L5_except_error:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1035\n * # Cython code.\n * cdef inline int import_array() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_array()\n *     except Exception:\n */\n    __Pyx_XGIVEREF(__pyx_t_1);\n    __Pyx_XGIVEREF(__pyx_t_2);\n    __Pyx_XGIVEREF(__pyx_t_3);\n    __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3);\n    goto __pyx_L1_error;\n    __pyx_L8_try_end:;\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1034\n * # Versions of the import_* functions which are more suitable for\n * # Cython code.\n * cdef inline int import_array() except -1:             # <<<<<<<<<<<<<<\n *     try:\n *         _import_array()\n */\n\n  /* function exit code */\n  __pyx_r = 0;\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_7);\n  __Pyx_XDECREF(__pyx_t_8);\n  __Pyx_AddTraceback(\"numpy.import_array\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = -1;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1040\n *         raise ImportError(\"numpy.core.multiarray failed to import\")\n * \n * cdef inline int import_umath() except -1:             # <<<<<<<<<<<<<<\n *     try:\n *         _import_umath()\n */\n\nstatic CYTHON_INLINE int __pyx_f_5numpy_import_umath(void) {\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  int __pyx_t_4;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  PyObject *__pyx_t_7 = NULL;\n  PyObject *__pyx_t_8 = NULL;\n  __Pyx_RefNannySetupContext(\"import_umath\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1041\n * \n * cdef inline int import_umath() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_umath()\n *     except Exception:\n */\n  {\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3);\n    __Pyx_XGOTREF(__pyx_t_1);\n    __Pyx_XGOTREF(__pyx_t_2);\n    __Pyx_XGOTREF(__pyx_t_3);\n    /*try:*/ {\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1042\n * cdef inline int import_umath() except -1:\n *     try:\n *         _import_umath()             # <<<<<<<<<<<<<<\n *     except Exception:\n *         raise ImportError(\"numpy.core.umath failed to import\")\n */\n      __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 1042, __pyx_L3_error)\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1041\n * \n * cdef inline int import_umath() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_umath()\n *     except Exception:\n */\n    }\n    __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n    goto __pyx_L8_try_end;\n    __pyx_L3_error:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1043\n *     try:\n *         _import_umath()\n *     except Exception:             # <<<<<<<<<<<<<<\n *         raise ImportError(\"numpy.core.umath failed to import\")\n * \n */\n    __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])));\n    if (__pyx_t_4) {\n      __Pyx_AddTraceback(\"numpy.import_umath\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n      if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(2, 1043, __pyx_L5_except_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __Pyx_GOTREF(__pyx_t_6);\n      __Pyx_GOTREF(__pyx_t_7);\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1044\n *         _import_umath()\n *     except Exception:\n *         raise ImportError(\"numpy.core.umath failed to import\")             # <<<<<<<<<<<<<<\n * \n * cdef inline int import_ufunc() except -1:\n */\n      __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 1044, __pyx_L5_except_error)\n      __Pyx_GOTREF(__pyx_t_8);\n      __Pyx_Raise(__pyx_t_8, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n      __PYX_ERR(2, 1044, __pyx_L5_except_error)\n    }\n    goto __pyx_L5_except_error;\n    __pyx_L5_except_error:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1041\n * \n * cdef inline int import_umath() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_umath()\n *     except Exception:\n */\n    __Pyx_XGIVEREF(__pyx_t_1);\n    __Pyx_XGIVEREF(__pyx_t_2);\n    __Pyx_XGIVEREF(__pyx_t_3);\n    __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3);\n    goto __pyx_L1_error;\n    __pyx_L8_try_end:;\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1040\n *         raise ImportError(\"numpy.core.multiarray failed to import\")\n * \n * cdef inline int import_umath() except -1:             # <<<<<<<<<<<<<<\n *     try:\n *         _import_umath()\n */\n\n  /* function exit code */\n  __pyx_r = 0;\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_7);\n  __Pyx_XDECREF(__pyx_t_8);\n  __Pyx_AddTraceback(\"numpy.import_umath\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = -1;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\n/* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1046\n *         raise ImportError(\"numpy.core.umath failed to import\")\n * \n * cdef inline int import_ufunc() except -1:             # <<<<<<<<<<<<<<\n *     try:\n *         _import_umath()\n */\n\nstatic CYTHON_INLINE int __pyx_f_5numpy_import_ufunc(void) {\n  int __pyx_r;\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  PyObject *__pyx_t_3 = NULL;\n  int __pyx_t_4;\n  PyObject *__pyx_t_5 = NULL;\n  PyObject *__pyx_t_6 = NULL;\n  PyObject *__pyx_t_7 = NULL;\n  PyObject *__pyx_t_8 = NULL;\n  __Pyx_RefNannySetupContext(\"import_ufunc\", 0);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1047\n * \n * cdef inline int import_ufunc() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_umath()\n *     except Exception:\n */\n  {\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ExceptionSave(&__pyx_t_1, &__pyx_t_2, &__pyx_t_3);\n    __Pyx_XGOTREF(__pyx_t_1);\n    __Pyx_XGOTREF(__pyx_t_2);\n    __Pyx_XGOTREF(__pyx_t_3);\n    /*try:*/ {\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1048\n * cdef inline int import_ufunc() except -1:\n *     try:\n *         _import_umath()             # <<<<<<<<<<<<<<\n *     except Exception:\n *         raise ImportError(\"numpy.core.umath failed to import\")\n */\n      __pyx_t_4 = _import_umath(); if (unlikely(__pyx_t_4 == ((int)-1))) __PYX_ERR(2, 1048, __pyx_L3_error)\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1047\n * \n * cdef inline int import_ufunc() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_umath()\n *     except Exception:\n */\n    }\n    __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;\n    __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;\n    __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;\n    goto __pyx_L8_try_end;\n    __pyx_L3_error:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1049\n *     try:\n *         _import_umath()\n *     except Exception:             # <<<<<<<<<<<<<<\n *         raise ImportError(\"numpy.core.umath failed to import\")\n */\n    __pyx_t_4 = __Pyx_PyErr_ExceptionMatches(((PyObject *)(&((PyTypeObject*)PyExc_Exception)[0])));\n    if (__pyx_t_4) {\n      __Pyx_AddTraceback(\"numpy.import_ufunc\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n      if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_6, &__pyx_t_7) < 0) __PYX_ERR(2, 1049, __pyx_L5_except_error)\n      __Pyx_GOTREF(__pyx_t_5);\n      __Pyx_GOTREF(__pyx_t_6);\n      __Pyx_GOTREF(__pyx_t_7);\n\n      /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1050\n *         _import_umath()\n *     except Exception:\n *         raise ImportError(\"numpy.core.umath failed to import\")             # <<<<<<<<<<<<<<\n */\n      __pyx_t_8 = __Pyx_PyObject_Call(__pyx_builtin_ImportError, __pyx_tuple__27, NULL); if (unlikely(!__pyx_t_8)) __PYX_ERR(2, 1050, __pyx_L5_except_error)\n      __Pyx_GOTREF(__pyx_t_8);\n      __Pyx_Raise(__pyx_t_8, 0, 0, 0);\n      __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0;\n      __PYX_ERR(2, 1050, __pyx_L5_except_error)\n    }\n    goto __pyx_L5_except_error;\n    __pyx_L5_except_error:;\n\n    /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1047\n * \n * cdef inline int import_ufunc() except -1:\n *     try:             # <<<<<<<<<<<<<<\n *         _import_umath()\n *     except Exception:\n */\n    __Pyx_XGIVEREF(__pyx_t_1);\n    __Pyx_XGIVEREF(__pyx_t_2);\n    __Pyx_XGIVEREF(__pyx_t_3);\n    __Pyx_ExceptionReset(__pyx_t_1, __pyx_t_2, __pyx_t_3);\n    goto __pyx_L1_error;\n    __pyx_L8_try_end:;\n  }\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1046\n *         raise ImportError(\"numpy.core.umath failed to import\")\n * \n * cdef inline int import_ufunc() except -1:             # <<<<<<<<<<<<<<\n *     try:\n *         _import_umath()\n */\n\n  /* function exit code */\n  __pyx_r = 0;\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_5);\n  __Pyx_XDECREF(__pyx_t_6);\n  __Pyx_XDECREF(__pyx_t_7);\n  __Pyx_XDECREF(__pyx_t_8);\n  __Pyx_AddTraceback(\"numpy.import_ufunc\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n  __pyx_r = -1;\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  return __pyx_r;\n}\n\nstatic PyObject *__pyx_tp_new_11pysobatools_5_mask_RLEs(PyTypeObject *t, PyObject *a, PyObject *k) {\n  PyObject *o;\n  if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {\n    o = (*t->tp_alloc)(t, 0);\n  } else {\n    o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);\n  }\n  if (unlikely(!o)) return 0;\n  if (unlikely(__pyx_pw_11pysobatools_5_mask_4RLEs_1__cinit__(o, a, k) < 0)) goto bad;\n  return o;\n  bad:\n  Py_DECREF(o); o = 0;\n  return NULL;\n}\n\nstatic void __pyx_tp_dealloc_11pysobatools_5_mask_RLEs(PyObject *o) {\n  #if CYTHON_USE_TP_FINALIZE\n  if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && (!PyType_IS_GC(Py_TYPE(o)) || !_PyGC_FINALIZED(o))) {\n    if (PyObject_CallFinalizerFromDealloc(o)) return;\n  }\n  #endif\n  {\n    PyObject *etype, *eval, *etb;\n    PyErr_Fetch(&etype, &eval, &etb);\n    Py_INCREF(o);\n    __pyx_pw_11pysobatools_5_mask_4RLEs_3__dealloc__(o);\n    Py_DECREF(o);\n    PyErr_Restore(etype, eval, etb);\n  }\n  (*Py_TYPE(o)->tp_free)(o);\n}\n\nstatic PyObject *__pyx_tp_getattro_11pysobatools_5_mask_RLEs(PyObject *o, PyObject *n) {\n  PyObject *v = __Pyx_PyObject_GenericGetAttr(o, n);\n  if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) {\n    PyErr_Clear();\n    v = __pyx_pw_11pysobatools_5_mask_4RLEs_5__getattr__(o, n);\n  }\n  return v;\n}\n\nstatic PyMethodDef __pyx_methods_11pysobatools_5_mask_RLEs[] = {\n  {\"__getattr__\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_4RLEs_5__getattr__, METH_O|METH_COEXIST, 0},\n  {\"__reduce_cython__\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_4RLEs_7__reduce_cython__, METH_NOARGS, 0},\n  {\"__setstate_cython__\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_4RLEs_9__setstate_cython__, METH_O, 0},\n  {0, 0, 0, 0}\n};\n\nstatic PyTypeObject __pyx_type_11pysobatools_5_mask_RLEs = {\n  PyVarObject_HEAD_INIT(0, 0)\n  \"pysobatools._mask.RLEs\", /*tp_name*/\n  sizeof(struct __pyx_obj_11pysobatools_5_mask_RLEs), /*tp_basicsize*/\n  0, /*tp_itemsize*/\n  __pyx_tp_dealloc_11pysobatools_5_mask_RLEs, /*tp_dealloc*/\n  0, /*tp_print*/\n  0, /*tp_getattr*/\n  0, /*tp_setattr*/\n  #if PY_MAJOR_VERSION < 3\n  0, /*tp_compare*/\n  #endif\n  #if PY_MAJOR_VERSION >= 3\n  0, /*tp_as_async*/\n  #endif\n  0, /*tp_repr*/\n  0, /*tp_as_number*/\n  0, /*tp_as_sequence*/\n  0, /*tp_as_mapping*/\n  0, /*tp_hash*/\n  0, /*tp_call*/\n  0, /*tp_str*/\n  __pyx_tp_getattro_11pysobatools_5_mask_RLEs, /*tp_getattro*/\n  0, /*tp_setattro*/\n  0, /*tp_as_buffer*/\n  Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE, /*tp_flags*/\n  0, /*tp_doc*/\n  0, /*tp_traverse*/\n  0, /*tp_clear*/\n  0, /*tp_richcompare*/\n  0, /*tp_weaklistoffset*/\n  0, /*tp_iter*/\n  0, /*tp_iternext*/\n  __pyx_methods_11pysobatools_5_mask_RLEs, /*tp_methods*/\n  0, /*tp_members*/\n  0, /*tp_getset*/\n  0, /*tp_base*/\n  0, /*tp_dict*/\n  0, /*tp_descr_get*/\n  0, /*tp_descr_set*/\n  0, /*tp_dictoffset*/\n  0, /*tp_init*/\n  0, /*tp_alloc*/\n  __pyx_tp_new_11pysobatools_5_mask_RLEs, /*tp_new*/\n  0, /*tp_free*/\n  0, /*tp_is_gc*/\n  0, /*tp_bases*/\n  0, /*tp_mro*/\n  0, /*tp_cache*/\n  0, /*tp_subclasses*/\n  0, /*tp_weaklist*/\n  0, /*tp_del*/\n  0, /*tp_version_tag*/\n  #if PY_VERSION_HEX >= 0x030400a1\n  0, /*tp_finalize*/\n  #endif\n  #if PY_VERSION_HEX >= 0x030800b1\n  0, /*tp_vectorcall*/\n  #endif\n};\n\nstatic PyObject *__pyx_tp_new_11pysobatools_5_mask_Masks(PyTypeObject *t, PyObject *a, PyObject *k) {\n  PyObject *o;\n  if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {\n    o = (*t->tp_alloc)(t, 0);\n  } else {\n    o = (PyObject *) PyBaseObject_Type.tp_new(t, __pyx_empty_tuple, 0);\n  }\n  if (unlikely(!o)) return 0;\n  if (unlikely(__pyx_pw_11pysobatools_5_mask_5Masks_1__cinit__(o, a, k) < 0)) goto bad;\n  return o;\n  bad:\n  Py_DECREF(o); o = 0;\n  return NULL;\n}\n\nstatic void __pyx_tp_dealloc_11pysobatools_5_mask_Masks(PyObject *o) {\n  #if CYTHON_USE_TP_FINALIZE\n  if (unlikely(PyType_HasFeature(Py_TYPE(o), Py_TPFLAGS_HAVE_FINALIZE) && Py_TYPE(o)->tp_finalize) && (!PyType_IS_GC(Py_TYPE(o)) || !_PyGC_FINALIZED(o))) {\n    if (PyObject_CallFinalizerFromDealloc(o)) return;\n  }\n  #endif\n  (*Py_TYPE(o)->tp_free)(o);\n}\n\nstatic PyMethodDef __pyx_methods_11pysobatools_5_mask_Masks[] = {\n  {\"__array__\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_5Masks_3__array__, METH_NOARGS, 0},\n  {\"__reduce_cython__\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_5Masks_5__reduce_cython__, METH_NOARGS, 0},\n  {\"__setstate_cython__\", (PyCFunction)__pyx_pw_11pysobatools_5_mask_5Masks_7__setstate_cython__, METH_O, 0},\n  {0, 0, 0, 0}\n};\n\nstatic PyTypeObject __pyx_type_11pysobatools_5_mask_Masks = {\n  PyVarObject_HEAD_INIT(0, 0)\n  \"pysobatools._mask.Masks\", /*tp_name*/\n  sizeof(struct __pyx_obj_11pysobatools_5_mask_Masks), /*tp_basicsize*/\n  0, /*tp_itemsize*/\n  __pyx_tp_dealloc_11pysobatools_5_mask_Masks, /*tp_dealloc*/\n  0, /*tp_print*/\n  0, /*tp_getattr*/\n  0, /*tp_setattr*/\n  #if PY_MAJOR_VERSION < 3\n  0, /*tp_compare*/\n  #endif\n  #if PY_MAJOR_VERSION >= 3\n  0, /*tp_as_async*/\n  #endif\n  0, /*tp_repr*/\n  0, /*tp_as_number*/\n  0, /*tp_as_sequence*/\n  0, /*tp_as_mapping*/\n  0, /*tp_hash*/\n  0, /*tp_call*/\n  0, /*tp_str*/\n  0, /*tp_getattro*/\n  0, /*tp_setattro*/\n  0, /*tp_as_buffer*/\n  Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_VERSION_TAG|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE, /*tp_flags*/\n  0, /*tp_doc*/\n  0, /*tp_traverse*/\n  0, /*tp_clear*/\n  0, /*tp_richcompare*/\n  0, /*tp_weaklistoffset*/\n  0, /*tp_iter*/\n  0, /*tp_iternext*/\n  __pyx_methods_11pysobatools_5_mask_Masks, /*tp_methods*/\n  0, /*tp_members*/\n  0, /*tp_getset*/\n  0, /*tp_base*/\n  0, /*tp_dict*/\n  0, /*tp_descr_get*/\n  0, /*tp_descr_set*/\n  0, /*tp_dictoffset*/\n  0, /*tp_init*/\n  0, /*tp_alloc*/\n  __pyx_tp_new_11pysobatools_5_mask_Masks, /*tp_new*/\n  0, /*tp_free*/\n  0, /*tp_is_gc*/\n  0, /*tp_bases*/\n  0, /*tp_mro*/\n  0, /*tp_cache*/\n  0, /*tp_subclasses*/\n  0, /*tp_weaklist*/\n  0, /*tp_del*/\n  0, /*tp_version_tag*/\n  #if PY_VERSION_HEX >= 0x030400a1\n  0, /*tp_finalize*/\n  #endif\n  #if PY_VERSION_HEX >= 0x030800b1\n  0, /*tp_vectorcall*/\n  #endif\n};\n\nstatic PyMethodDef __pyx_methods[] = {\n  {0, 0, 0, 0}\n};\n\n#if PY_MAJOR_VERSION >= 3\n#if CYTHON_PEP489_MULTI_PHASE_INIT\nstatic PyObject* __pyx_pymod_create(PyObject *spec, PyModuleDef *def); /*proto*/\nstatic int __pyx_pymod_exec__mask(PyObject* module); /*proto*/\nstatic PyModuleDef_Slot __pyx_moduledef_slots[] = {\n  {Py_mod_create, (void*)__pyx_pymod_create},\n  {Py_mod_exec, (void*)__pyx_pymod_exec__mask},\n  {0, NULL}\n};\n#endif\n\nstatic struct PyModuleDef __pyx_moduledef = {\n    PyModuleDef_HEAD_INIT,\n    \"_mask\",\n    0, /* m_doc */\n  #if CYTHON_PEP489_MULTI_PHASE_INIT\n    0, /* m_size */\n  #else\n    -1, /* m_size */\n  #endif\n    __pyx_methods /* m_methods */,\n  #if CYTHON_PEP489_MULTI_PHASE_INIT\n    __pyx_moduledef_slots, /* m_slots */\n  #else\n    NULL, /* m_reload */\n  #endif\n    NULL, /* m_traverse */\n    NULL, /* m_clear */\n    NULL /* m_free */\n};\n#endif\n#ifndef CYTHON_SMALL_CODE\n#if defined(__clang__)\n    #define CYTHON_SMALL_CODE\n#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 3))\n    #define CYTHON_SMALL_CODE __attribute__((cold))\n#else\n    #define CYTHON_SMALL_CODE\n#endif\n#endif\n\nstatic __Pyx_StringTabEntry __pyx_string_tab[] = {\n  {&__pyx_n_s_AttributeError, __pyx_k_AttributeError, sizeof(__pyx_k_AttributeError), 0, 0, 1, 1},\n  {&__pyx_n_s_F, __pyx_k_F, sizeof(__pyx_k_F), 0, 0, 1, 1},\n  {&__pyx_kp_u_Format_string_allocated_too_shor, __pyx_k_Format_string_allocated_too_shor, sizeof(__pyx_k_Format_string_allocated_too_shor), 0, 1, 0, 0},\n  {&__pyx_kp_u_Format_string_allocated_too_shor_2, __pyx_k_Format_string_allocated_too_shor_2, sizeof(__pyx_k_Format_string_allocated_too_shor_2), 0, 1, 0, 0},\n  {&__pyx_n_s_ImportError, __pyx_k_ImportError, sizeof(__pyx_k_ImportError), 0, 0, 1, 1},\n  {&__pyx_n_s_Masks, __pyx_k_Masks, sizeof(__pyx_k_Masks), 0, 0, 1, 1},\n  {&__pyx_n_s_N, __pyx_k_N, sizeof(__pyx_k_N), 0, 0, 1, 1},\n  {&__pyx_kp_u_Non_native_byte_order_not_suppor, __pyx_k_Non_native_byte_order_not_suppor, sizeof(__pyx_k_Non_native_byte_order_not_suppor), 0, 1, 0, 0},\n  {&__pyx_n_s_PYTHON_VERSION, __pyx_k_PYTHON_VERSION, sizeof(__pyx_k_PYTHON_VERSION), 0, 0, 1, 1},\n  {&__pyx_kp_s_Python_version_must_be_2_or_3, __pyx_k_Python_version_must_be_2_or_3, sizeof(__pyx_k_Python_version_must_be_2_or_3), 0, 0, 1, 0},\n  {&__pyx_n_s_R, __pyx_k_R, sizeof(__pyx_k_R), 0, 0, 1, 1},\n  {&__pyx_n_s_RLEs, __pyx_k_RLEs, sizeof(__pyx_k_RLEs), 0, 0, 1, 1},\n  {&__pyx_n_s_Rs, __pyx_k_Rs, sizeof(__pyx_k_Rs), 0, 0, 1, 1},\n  {&__pyx_n_s_RuntimeError, __pyx_k_RuntimeError, sizeof(__pyx_k_RuntimeError), 0, 0, 1, 1},\n  {&__pyx_kp_s_The_dt_and_gt_should_have_the_sa, __pyx_k_The_dt_and_gt_should_have_the_sa, sizeof(__pyx_k_The_dt_and_gt_should_have_the_sa), 0, 0, 1, 0},\n  {&__pyx_n_s_TypeError, __pyx_k_TypeError, sizeof(__pyx_k_TypeError), 0, 0, 1, 1},\n  {&__pyx_n_s_ValueError, __pyx_k_ValueError, sizeof(__pyx_k_ValueError), 0, 0, 1, 1},\n  {&__pyx_n_s_a, __pyx_k_a, sizeof(__pyx_k_a), 0, 0, 1, 1},\n  {&__pyx_n_s_a_2, __pyx_k_a_2, sizeof(__pyx_k_a_2), 0, 0, 1, 1},\n  {&__pyx_n_s_all, __pyx_k_all, sizeof(__pyx_k_all), 0, 0, 1, 1},\n  {&__pyx_n_s_area, __pyx_k_area, sizeof(__pyx_k_area), 0, 0, 1, 1},\n  {&__pyx_n_s_array, __pyx_k_array, sizeof(__pyx_k_array), 0, 0, 1, 1},\n  {&__pyx_n_s_astype, __pyx_k_astype, sizeof(__pyx_k_astype), 0, 0, 1, 1},\n  {&__pyx_n_s_author, __pyx_k_author, sizeof(__pyx_k_author), 0, 0, 1, 1},\n  {&__pyx_n_s_bb, __pyx_k_bb, sizeof(__pyx_k_bb), 0, 0, 1, 1},\n  {&__pyx_n_s_bbIou, __pyx_k_bbIou, sizeof(__pyx_k_bbIou), 0, 0, 1, 1},\n  {&__pyx_n_s_bb_2, __pyx_k_bb_2, sizeof(__pyx_k_bb_2), 0, 0, 1, 1},\n  {&__pyx_n_s_c_string, __pyx_k_c_string, sizeof(__pyx_k_c_string), 0, 0, 1, 1},\n  {&__pyx_n_s_cline_in_traceback, __pyx_k_cline_in_traceback, sizeof(__pyx_k_cline_in_traceback), 0, 0, 1, 1},\n  {&__pyx_n_s_cnts, __pyx_k_cnts, sizeof(__pyx_k_cnts), 0, 0, 1, 1},\n  {&__pyx_n_s_counts, __pyx_k_counts, sizeof(__pyx_k_counts), 0, 0, 1, 1},\n  {&__pyx_n_s_data, __pyx_k_data, sizeof(__pyx_k_data), 0, 0, 1, 1},\n  {&__pyx_n_s_decode, __pyx_k_decode, sizeof(__pyx_k_decode), 0, 0, 1, 1},\n  {&__pyx_n_s_double, __pyx_k_double, sizeof(__pyx_k_double), 0, 0, 1, 1},\n  {&__pyx_n_s_dt, __pyx_k_dt, sizeof(__pyx_k_dt), 0, 0, 1, 1},\n  {&__pyx_n_s_dtype, __pyx_k_dtype, sizeof(__pyx_k_dtype), 0, 0, 1, 1},\n  {&__pyx_n_s_encode, __pyx_k_encode, sizeof(__pyx_k_encode), 0, 0, 1, 1},\n  {&__pyx_n_s_enumerate, __pyx_k_enumerate, sizeof(__pyx_k_enumerate), 0, 0, 1, 1},\n  {&__pyx_n_s_frBbox, __pyx_k_frBbox, sizeof(__pyx_k_frBbox), 0, 0, 1, 1},\n  {&__pyx_n_s_frPoly, __pyx_k_frPoly, sizeof(__pyx_k_frPoly), 0, 0, 1, 1},\n  {&__pyx_n_s_frPyObjects, __pyx_k_frPyObjects, sizeof(__pyx_k_frPyObjects), 0, 0, 1, 1},\n  {&__pyx_n_s_frString, __pyx_k_frString, sizeof(__pyx_k_frString), 0, 0, 1, 1},\n  {&__pyx_n_s_frUncompressedRLE, __pyx_k_frUncompressedRLE, sizeof(__pyx_k_frUncompressedRLE), 0, 0, 1, 1},\n  {&__pyx_n_s_getstate, __pyx_k_getstate, sizeof(__pyx_k_getstate), 0, 0, 1, 1},\n  {&__pyx_n_s_gt, __pyx_k_gt, sizeof(__pyx_k_gt), 0, 0, 1, 1},\n  {&__pyx_n_s_h, __pyx_k_h, sizeof(__pyx_k_h), 0, 0, 1, 1},\n  {&__pyx_n_s_i, __pyx_k_i, sizeof(__pyx_k_i), 0, 0, 1, 1},\n  {&__pyx_n_s_import, __pyx_k_import, sizeof(__pyx_k_import), 0, 0, 1, 1},\n  {&__pyx_kp_s_input_data_type_not_allowed, __pyx_k_input_data_type_not_allowed, sizeof(__pyx_k_input_data_type_not_allowed), 0, 0, 1, 0},\n  {&__pyx_kp_s_input_type_is_not_supported, __pyx_k_input_type_is_not_supported, sizeof(__pyx_k_input_type_is_not_supported), 0, 0, 1, 0},\n  {&__pyx_n_s_intersect, __pyx_k_intersect, sizeof(__pyx_k_intersect), 0, 0, 1, 1},\n  {&__pyx_n_s_iou, __pyx_k_iou, sizeof(__pyx_k_iou), 0, 0, 1, 1},\n  {&__pyx_n_s_iouFun, __pyx_k_iouFun, sizeof(__pyx_k_iouFun), 0, 0, 1, 1},\n  {&__pyx_n_s_iou_2, __pyx_k_iou_2, sizeof(__pyx_k_iou_2), 0, 0, 1, 1},\n  {&__pyx_n_s_iou_locals__bbIou, __pyx_k_iou_locals__bbIou, sizeof(__pyx_k_iou_locals__bbIou), 0, 0, 1, 1},\n  {&__pyx_n_s_iou_locals__len, __pyx_k_iou_locals__len, sizeof(__pyx_k_iou_locals__len), 0, 0, 1, 1},\n  {&__pyx_n_s_iou_locals__preproc, __pyx_k_iou_locals__preproc, sizeof(__pyx_k_iou_locals__preproc), 0, 0, 1, 1},\n  {&__pyx_n_s_iou_locals__rleIou, __pyx_k_iou_locals__rleIou, sizeof(__pyx_k_iou_locals__rleIou), 0, 0, 1, 1},\n  {&__pyx_n_s_isbox, __pyx_k_isbox, sizeof(__pyx_k_isbox), 0, 0, 1, 1},\n  {&__pyx_n_s_iscrowd, __pyx_k_iscrowd, sizeof(__pyx_k_iscrowd), 0, 0, 1, 1},\n  {&__pyx_n_s_isrle, __pyx_k_isrle, sizeof(__pyx_k_isrle), 0, 0, 1, 1},\n  {&__pyx_n_s_j, __pyx_k_j, sizeof(__pyx_k_j), 0, 0, 1, 1},\n  {&__pyx_n_s_len, __pyx_k_len, sizeof(__pyx_k_len), 0, 0, 1, 1},\n  {&__pyx_kp_s_list_input_can_be_bounding_box_N, __pyx_k_list_input_can_be_bounding_box_N, sizeof(__pyx_k_list_input_can_be_bounding_box_N), 0, 0, 1, 0},\n  {&__pyx_n_s_m, __pyx_k_m, sizeof(__pyx_k_m), 0, 0, 1, 1},\n  {&__pyx_n_s_main, __pyx_k_main, sizeof(__pyx_k_main), 0, 0, 1, 1},\n  {&__pyx_n_s_mask, __pyx_k_mask, sizeof(__pyx_k_mask), 0, 0, 1, 1},\n  {&__pyx_n_s_masks, __pyx_k_masks, sizeof(__pyx_k_masks), 0, 0, 1, 1},\n  {&__pyx_n_s_merge, __pyx_k_merge, sizeof(__pyx_k_merge), 0, 0, 1, 1},\n  {&__pyx_n_s_n, __pyx_k_n, sizeof(__pyx_k_n), 0, 0, 1, 1},\n  {&__pyx_n_s_name, __pyx_k_name, sizeof(__pyx_k_name), 0, 0, 1, 1},\n  {&__pyx_kp_u_ndarray_is_not_C_contiguous, __pyx_k_ndarray_is_not_C_contiguous, sizeof(__pyx_k_ndarray_is_not_C_contiguous), 0, 1, 0, 0},\n  {&__pyx_kp_u_ndarray_is_not_Fortran_contiguou, __pyx_k_ndarray_is_not_Fortran_contiguou, sizeof(__pyx_k_ndarray_is_not_Fortran_contiguou), 0, 1, 0, 0},\n  {&__pyx_kp_s_no_default___reduce___due_to_non, __pyx_k_no_default___reduce___due_to_non, sizeof(__pyx_k_no_default___reduce___due_to_non), 0, 0, 1, 0},\n  {&__pyx_n_s_np, __pyx_k_np, sizeof(__pyx_k_np), 0, 0, 1, 1},\n  {&__pyx_n_s_np_poly, __pyx_k_np_poly, sizeof(__pyx_k_np_poly), 0, 0, 1, 1},\n  {&__pyx_n_s_numpy, __pyx_k_numpy, sizeof(__pyx_k_numpy), 0, 0, 1, 1},\n  {&__pyx_kp_s_numpy_core_multiarray_failed_to, __pyx_k_numpy_core_multiarray_failed_to, sizeof(__pyx_k_numpy_core_multiarray_failed_to), 0, 0, 1, 0},\n  {&__pyx_kp_s_numpy_core_umath_failed_to_impor, __pyx_k_numpy_core_umath_failed_to_impor, sizeof(__pyx_k_numpy_core_umath_failed_to_impor), 0, 0, 1, 0},\n  {&__pyx_kp_s_numpy_ndarray_input_is_only_for, __pyx_k_numpy_ndarray_input_is_only_for, sizeof(__pyx_k_numpy_ndarray_input_is_only_for), 0, 0, 1, 0},\n  {&__pyx_n_s_obj, __pyx_k_obj, sizeof(__pyx_k_obj), 0, 0, 1, 1},\n  {&__pyx_n_s_objs, __pyx_k_objs, sizeof(__pyx_k_objs), 0, 0, 1, 1},\n  {&__pyx_n_s_order, __pyx_k_order, sizeof(__pyx_k_order), 0, 0, 1, 1},\n  {&__pyx_n_s_p, __pyx_k_p, sizeof(__pyx_k_p), 0, 0, 1, 1},\n  {&__pyx_n_s_poly, __pyx_k_poly, sizeof(__pyx_k_poly), 0, 0, 1, 1},\n  {&__pyx_n_s_preproc, __pyx_k_preproc, sizeof(__pyx_k_preproc), 0, 0, 1, 1},\n  {&__pyx_n_s_py_string, __pyx_k_py_string, sizeof(__pyx_k_py_string), 0, 0, 1, 1},\n  {&__pyx_n_s_pysobatools__mask, __pyx_k_pysobatools__mask, sizeof(__pyx_k_pysobatools__mask), 0, 0, 1, 1},\n  {&__pyx_kp_s_pysobatools__mask_pyx, __pyx_k_pysobatools__mask_pyx, sizeof(__pyx_k_pysobatools__mask_pyx), 0, 0, 1, 0},\n  {&__pyx_n_s_pyiscrowd, __pyx_k_pyiscrowd, sizeof(__pyx_k_pyiscrowd), 0, 0, 1, 1},\n  {&__pyx_n_s_pyobj, __pyx_k_pyobj, sizeof(__pyx_k_pyobj), 0, 0, 1, 1},\n  {&__pyx_n_s_range, __pyx_k_range, sizeof(__pyx_k_range), 0, 0, 1, 1},\n  {&__pyx_n_s_reduce, __pyx_k_reduce, sizeof(__pyx_k_reduce), 0, 0, 1, 1},\n  {&__pyx_n_s_reduce_cython, __pyx_k_reduce_cython, sizeof(__pyx_k_reduce_cython), 0, 0, 1, 1},\n  {&__pyx_n_s_reduce_ex, __pyx_k_reduce_ex, sizeof(__pyx_k_reduce_ex), 0, 0, 1, 1},\n  {&__pyx_n_s_reshape, __pyx_k_reshape, sizeof(__pyx_k_reshape), 0, 0, 1, 1},\n  {&__pyx_n_s_rleIou, __pyx_k_rleIou, sizeof(__pyx_k_rleIou), 0, 0, 1, 1},\n  {&__pyx_n_s_rleObjs, __pyx_k_rleObjs, sizeof(__pyx_k_rleObjs), 0, 0, 1, 1},\n  {&__pyx_n_s_setstate, __pyx_k_setstate, sizeof(__pyx_k_setstate), 0, 0, 1, 1},\n  {&__pyx_n_s_setstate_cython, __pyx_k_setstate_cython, sizeof(__pyx_k_setstate_cython), 0, 0, 1, 1},\n  {&__pyx_n_s_shape, __pyx_k_shape, sizeof(__pyx_k_shape), 0, 0, 1, 1},\n  {&__pyx_n_s_size, __pyx_k_size, sizeof(__pyx_k_size), 0, 0, 1, 1},\n  {&__pyx_n_s_sys, __pyx_k_sys, sizeof(__pyx_k_sys), 0, 0, 1, 1},\n  {&__pyx_n_s_test, __pyx_k_test, sizeof(__pyx_k_test), 0, 0, 1, 1},\n  {&__pyx_n_s_toBbox, __pyx_k_toBbox, sizeof(__pyx_k_toBbox), 0, 0, 1, 1},\n  {&__pyx_n_s_toString, __pyx_k_toString, sizeof(__pyx_k_toString), 0, 0, 1, 1},\n  {&__pyx_n_s_tsungyi, __pyx_k_tsungyi, sizeof(__pyx_k_tsungyi), 0, 0, 1, 1},\n  {&__pyx_n_s_ucRles, __pyx_k_ucRles, sizeof(__pyx_k_ucRles), 0, 0, 1, 1},\n  {&__pyx_n_s_uint32, __pyx_k_uint32, sizeof(__pyx_k_uint32), 0, 0, 1, 1},\n  {&__pyx_n_s_uint8, __pyx_k_uint8, sizeof(__pyx_k_uint8), 0, 0, 1, 1},\n  {&__pyx_kp_u_unknown_dtype_code_in_numpy_pxd, __pyx_k_unknown_dtype_code_in_numpy_pxd, sizeof(__pyx_k_unknown_dtype_code_in_numpy_pxd), 0, 1, 0, 0},\n  {&__pyx_kp_s_unrecognized_type_The_following, __pyx_k_unrecognized_type_The_following, sizeof(__pyx_k_unrecognized_type_The_following), 0, 0, 1, 0},\n  {&__pyx_n_s_utf8, __pyx_k_utf8, sizeof(__pyx_k_utf8), 0, 0, 1, 1},\n  {&__pyx_n_s_version_info, __pyx_k_version_info, sizeof(__pyx_k_version_info), 0, 0, 1, 1},\n  {&__pyx_n_s_w, __pyx_k_w, sizeof(__pyx_k_w), 0, 0, 1, 1},\n  {&__pyx_n_s_zeros, __pyx_k_zeros, sizeof(__pyx_k_zeros), 0, 0, 1, 1},\n  {0, 0, 0, 0, 0, 0, 0}\n};\nstatic CYTHON_SMALL_CODE int __Pyx_InitCachedBuiltins(void) {\n  __pyx_builtin_range = __Pyx_GetBuiltinName(__pyx_n_s_range); if (!__pyx_builtin_range) __PYX_ERR(0, 67, __pyx_L1_error)\n  __pyx_builtin_AttributeError = __Pyx_GetBuiltinName(__pyx_n_s_AttributeError); if (!__pyx_builtin_AttributeError) __PYX_ERR(0, 73, __pyx_L1_error)\n  __pyx_builtin_TypeError = __Pyx_GetBuiltinName(__pyx_n_s_TypeError); if (!__pyx_builtin_TypeError) __PYX_ERR(1, 2, __pyx_L1_error)\n  __pyx_builtin_enumerate = __Pyx_GetBuiltinName(__pyx_n_s_enumerate); if (!__pyx_builtin_enumerate) __PYX_ERR(0, 124, __pyx_L1_error)\n  __pyx_builtin_ValueError = __Pyx_GetBuiltinName(__pyx_n_s_ValueError); if (!__pyx_builtin_ValueError) __PYX_ERR(2, 272, __pyx_L1_error)\n  __pyx_builtin_RuntimeError = __Pyx_GetBuiltinName(__pyx_n_s_RuntimeError); if (!__pyx_builtin_RuntimeError) __PYX_ERR(2, 856, __pyx_L1_error)\n  __pyx_builtin_ImportError = __Pyx_GetBuiltinName(__pyx_n_s_ImportError); if (!__pyx_builtin_ImportError) __PYX_ERR(2, 1038, __pyx_L1_error)\n  return 0;\n  __pyx_L1_error:;\n  return -1;\n}\n\nstatic CYTHON_SMALL_CODE int __Pyx_InitCachedConstants(void) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__Pyx_InitCachedConstants\", 0);\n\n  /* \"(tree fragment)\":2\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n  __pyx_tuple_ = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple_)) __PYX_ERR(1, 2, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple_);\n  __Pyx_GIVEREF(__pyx_tuple_);\n\n  /* \"(tree fragment)\":4\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n */\n  __pyx_tuple__2 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__2)) __PYX_ERR(1, 4, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__2);\n  __Pyx_GIVEREF(__pyx_tuple__2);\n\n  /* \"(tree fragment)\":2\n * def __reduce_cython__(self):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n */\n  __pyx_tuple__3 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__3)) __PYX_ERR(1, 2, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__3);\n  __Pyx_GIVEREF(__pyx_tuple__3);\n\n  /* \"(tree fragment)\":4\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")\n * def __setstate_cython__(self, __pyx_state):\n *     raise TypeError(\"no default __reduce__ due to non-trivial __cinit__\")             # <<<<<<<<<<<<<<\n */\n  __pyx_tuple__4 = PyTuple_Pack(1, __pyx_kp_s_no_default___reduce___due_to_non); if (unlikely(!__pyx_tuple__4)) __PYX_ERR(1, 4, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__4);\n  __Pyx_GIVEREF(__pyx_tuple__4);\n\n  /* \"pysobatools/_mask.pyx\":130\n *             py_string = str.encode(obj['counts']) if type(obj['counts']) == str else obj['counts']\n *         else:\n *             raise Exception('Python version must be 2 or 3')             # <<<<<<<<<<<<<<\n *         c_string = py_string\n *         rleFrString( <RLE*> &Rs._R[i], <char*> c_string, obj['size'][0], obj['size'][1] )\n */\n  __pyx_tuple__5 = PyTuple_Pack(1, __pyx_kp_s_Python_version_must_be_2_or_3); if (unlikely(!__pyx_tuple__5)) __PYX_ERR(0, 130, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__5);\n  __Pyx_GIVEREF(__pyx_tuple__5);\n\n  /* \"pysobatools/_mask.pyx\":154\n * def merge(rleObjs, intersect=0):\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef RLEs R = RLEs(1)             # <<<<<<<<<<<<<<\n *     rleMerge(<RLE*>Rs._R, <RLE*> R._R, <siz> Rs._n, intersect)\n *     obj = _toString(R)[0]\n */\n  __pyx_tuple__6 = PyTuple_Pack(1, __pyx_int_1); if (unlikely(!__pyx_tuple__6)) __PYX_ERR(0, 154, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__6);\n  __Pyx_GIVEREF(__pyx_tuple__6);\n\n  /* \"pysobatools/_mask.pyx\":180\n *             # check if it's Nx4 bbox\n *             if not len(objs.shape) == 2 or not objs.shape[1] == 4:\n *                 raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')             # <<<<<<<<<<<<<<\n *             objs = objs.astype(np.double)\n *         elif type(objs) == list:\n */\n  __pyx_tuple__7 = PyTuple_Pack(1, __pyx_kp_s_numpy_ndarray_input_is_only_for); if (unlikely(!__pyx_tuple__7)) __PYX_ERR(0, 180, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__7);\n  __Pyx_GIVEREF(__pyx_tuple__7);\n\n  /* \"pysobatools/_mask.pyx\":193\n *                 objs = _frString(objs)\n *             else:\n *                 raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])')             # <<<<<<<<<<<<<<\n *         else:\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n */\n  __pyx_tuple__8 = PyTuple_Pack(1, __pyx_kp_s_list_input_can_be_bounding_box_N); if (unlikely(!__pyx_tuple__8)) __PYX_ERR(0, 193, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__8);\n  __Pyx_GIVEREF(__pyx_tuple__8);\n\n  /* \"pysobatools/_mask.pyx\":195\n *                 raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])')\n *         else:\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')             # <<<<<<<<<<<<<<\n *         return objs\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n */\n  __pyx_tuple__9 = PyTuple_Pack(1, __pyx_kp_s_unrecognized_type_The_following); if (unlikely(!__pyx_tuple__9)) __PYX_ERR(0, 195, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__9);\n  __Pyx_GIVEREF(__pyx_tuple__9);\n\n  /* \"pysobatools/_mask.pyx\":172\n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):\n *     def _preproc(objs):             # <<<<<<<<<<<<<<\n *         if len(objs) == 0:\n *             return objs\n */\n  __pyx_tuple__10 = PyTuple_Pack(4, __pyx_n_s_objs, __pyx_n_s_isbox, __pyx_n_s_isrle, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__10)) __PYX_ERR(0, 172, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__10);\n  __Pyx_GIVEREF(__pyx_tuple__10);\n  __pyx_codeobj__11 = (PyObject*)__Pyx_PyCode_New(1, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__10, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_preproc, 172, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__11)) __PYX_ERR(0, 172, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":197\n *             raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n *         return objs\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n */\n  __pyx_tuple__12 = PyTuple_Pack(6, __pyx_n_s_dt, __pyx_n_s_gt, __pyx_n_s_iscrowd, __pyx_n_s_m, __pyx_n_s_n, __pyx_n_s_iou); if (unlikely(!__pyx_tuple__12)) __PYX_ERR(0, 197, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__12);\n  __Pyx_GIVEREF(__pyx_tuple__12);\n  __pyx_codeobj__13 = (PyObject*)__Pyx_PyCode_New(6, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__12, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_rleIou, 197, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__13)) __PYX_ERR(0, 197, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":199\n *     def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n *         rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):             # <<<<<<<<<<<<<<\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):\n */\n  __pyx_tuple__14 = PyTuple_Pack(6, __pyx_n_s_dt, __pyx_n_s_gt, __pyx_n_s_iscrowd, __pyx_n_s_m, __pyx_n_s_n, __pyx_n_s_iou); if (unlikely(!__pyx_tuple__14)) __PYX_ERR(0, 199, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__14);\n  __Pyx_GIVEREF(__pyx_tuple__14);\n  __pyx_codeobj__15 = (PyObject*)__Pyx_PyCode_New(6, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__14, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_bbIou, 199, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__15)) __PYX_ERR(0, 199, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":201\n *     def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n *         bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n *     def _len(obj):             # <<<<<<<<<<<<<<\n *         cdef siz N = 0\n *         if type(obj) == RLEs:\n */\n  __pyx_tuple__16 = PyTuple_Pack(2, __pyx_n_s_obj, __pyx_n_s_N); if (unlikely(!__pyx_tuple__16)) __PYX_ERR(0, 201, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__16);\n  __Pyx_GIVEREF(__pyx_tuple__16);\n  __pyx_codeobj__17 = (PyObject*)__Pyx_PyCode_New(1, 0, 2, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__16, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_len, 201, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__17)) __PYX_ERR(0, 201, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":221\n *         return []\n *     if not type(dt) == type(gt):\n *         raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray')             # <<<<<<<<<<<<<<\n * \n *     # define local variables\n */\n  __pyx_tuple__18 = PyTuple_Pack(1, __pyx_kp_s_The_dt_and_gt_should_have_the_sa); if (unlikely(!__pyx_tuple__18)) __PYX_ERR(0, 221, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__18);\n  __Pyx_GIVEREF(__pyx_tuple__18);\n\n  /* \"pysobatools/_mask.pyx\":232\n *         _iouFun = _bbIou\n *     else:\n *         raise Exception('input data type not allowed.')             # <<<<<<<<<<<<<<\n *     _iou = <double*> malloc(m*n* sizeof(double))\n *     iou = np.zeros((m*n, ), dtype=np.double)\n */\n  __pyx_tuple__19 = PyTuple_Pack(1, __pyx_kp_s_input_data_type_not_allowed); if (unlikely(!__pyx_tuple__19)) __PYX_ERR(0, 232, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__19);\n  __Pyx_GIVEREF(__pyx_tuple__19);\n\n  /* \"pysobatools/_mask.pyx\":307\n *         objs = frUncompressedRLE([pyobj], h, w)[0]\n *     else:\n *         raise Exception('input type is not supported.')             # <<<<<<<<<<<<<<\n *     return objs\n */\n  __pyx_tuple__20 = PyTuple_Pack(1, __pyx_kp_s_input_type_is_not_supported); if (unlikely(!__pyx_tuple__20)) __PYX_ERR(0, 307, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__20);\n  __Pyx_GIVEREF(__pyx_tuple__20);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":272\n *             if ((flags & pybuf.PyBUF_C_CONTIGUOUS == pybuf.PyBUF_C_CONTIGUOUS)\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_C_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not C contiguous\")             # <<<<<<<<<<<<<<\n * \n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)\n */\n  __pyx_tuple__21 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_C_contiguous); if (unlikely(!__pyx_tuple__21)) __PYX_ERR(2, 272, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__21);\n  __Pyx_GIVEREF(__pyx_tuple__21);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":276\n *             if ((flags & pybuf.PyBUF_F_CONTIGUOUS == pybuf.PyBUF_F_CONTIGUOUS)\n *                 and not PyArray_CHKFLAGS(self, NPY_ARRAY_F_CONTIGUOUS)):\n *                 raise ValueError(u\"ndarray is not Fortran contiguous\")             # <<<<<<<<<<<<<<\n * \n *             info.buf = PyArray_DATA(self)\n */\n  __pyx_tuple__22 = PyTuple_Pack(1, __pyx_kp_u_ndarray_is_not_Fortran_contiguou); if (unlikely(!__pyx_tuple__22)) __PYX_ERR(2, 276, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__22);\n  __Pyx_GIVEREF(__pyx_tuple__22);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":306\n *                 if ((descr.byteorder == c'>' and little_endian) or\n *                     (descr.byteorder == c'<' and not little_endian)):\n *                     raise ValueError(u\"Non-native byte order not supported\")             # <<<<<<<<<<<<<<\n *                 if   t == NPY_BYTE:        f = \"b\"\n *                 elif t == NPY_UBYTE:       f = \"B\"\n */\n  __pyx_tuple__23 = PyTuple_Pack(1, __pyx_kp_u_Non_native_byte_order_not_suppor); if (unlikely(!__pyx_tuple__23)) __PYX_ERR(2, 306, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__23);\n  __Pyx_GIVEREF(__pyx_tuple__23);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":856\n * \n *         if (end - f) - <int>(new_offset - offset[0]) < 15:\n *             raise RuntimeError(u\"Format string allocated too short, see comment in numpy.pxd\")             # <<<<<<<<<<<<<<\n * \n *         if ((child.byteorder == c'>' and little_endian) or\n */\n  __pyx_tuple__24 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor); if (unlikely(!__pyx_tuple__24)) __PYX_ERR(2, 856, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__24);\n  __Pyx_GIVEREF(__pyx_tuple__24);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":880\n *             t = child.type_num\n *             if end - f < 5:\n *                 raise RuntimeError(u\"Format string allocated too short.\")             # <<<<<<<<<<<<<<\n * \n *             # Until ticket #99 is fixed, use integers to avoid warnings\n */\n  __pyx_tuple__25 = PyTuple_Pack(1, __pyx_kp_u_Format_string_allocated_too_shor_2); if (unlikely(!__pyx_tuple__25)) __PYX_ERR(2, 880, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__25);\n  __Pyx_GIVEREF(__pyx_tuple__25);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1038\n *         _import_array()\n *     except Exception:\n *         raise ImportError(\"numpy.core.multiarray failed to import\")             # <<<<<<<<<<<<<<\n * \n * cdef inline int import_umath() except -1:\n */\n  __pyx_tuple__26 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_multiarray_failed_to); if (unlikely(!__pyx_tuple__26)) __PYX_ERR(2, 1038, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__26);\n  __Pyx_GIVEREF(__pyx_tuple__26);\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1044\n *         _import_umath()\n *     except Exception:\n *         raise ImportError(\"numpy.core.umath failed to import\")             # <<<<<<<<<<<<<<\n * \n * cdef inline int import_ufunc() except -1:\n */\n  __pyx_tuple__27 = PyTuple_Pack(1, __pyx_kp_s_numpy_core_umath_failed_to_impor); if (unlikely(!__pyx_tuple__27)) __PYX_ERR(2, 1044, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__27);\n  __Pyx_GIVEREF(__pyx_tuple__27);\n\n  /* \"pysobatools/_mask.pyx\":103\n * \n * # internal conversion from Python RLEs object to compressed RLE format\n * def _toString(RLEs Rs):             # <<<<<<<<<<<<<<\n *     cdef siz n = Rs.n\n *     cdef bytes py_string\n */\n  __pyx_tuple__28 = PyTuple_Pack(6, __pyx_n_s_Rs, __pyx_n_s_n, __pyx_n_s_py_string, __pyx_n_s_c_string, __pyx_n_s_objs, __pyx_n_s_i); if (unlikely(!__pyx_tuple__28)) __PYX_ERR(0, 103, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__28);\n  __Pyx_GIVEREF(__pyx_tuple__28);\n  __pyx_codeobj__29 = (PyObject*)__Pyx_PyCode_New(1, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__28, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_toString, 103, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__29)) __PYX_ERR(0, 103, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":119\n * \n * # internal conversion from compressed RLE format to Python RLEs object\n * def _frString(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef siz n = len(rleObjs)\n *     Rs = RLEs(n)\n */\n  __pyx_tuple__30 = PyTuple_Pack(7, __pyx_n_s_rleObjs, __pyx_n_s_n, __pyx_n_s_Rs, __pyx_n_s_py_string, __pyx_n_s_c_string, __pyx_n_s_i, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__30)) __PYX_ERR(0, 119, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__30);\n  __Pyx_GIVEREF(__pyx_tuple__30);\n  __pyx_codeobj__31 = (PyObject*)__Pyx_PyCode_New(1, 0, 7, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__30, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_frString, 119, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__31)) __PYX_ERR(0, 119, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":137\n * # encode mask to RLEs objects\n * # list of RLE string can be generated by RLEs member function\n * def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask):             # <<<<<<<<<<<<<<\n *     h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]\n *     cdef RLEs Rs = RLEs(n)\n */\n  __pyx_tuple__32 = PyTuple_Pack(6, __pyx_n_s_mask, __pyx_n_s_h, __pyx_n_s_w, __pyx_n_s_n, __pyx_n_s_Rs, __pyx_n_s_objs); if (unlikely(!__pyx_tuple__32)) __PYX_ERR(0, 137, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__32);\n  __Pyx_GIVEREF(__pyx_tuple__32);\n  __pyx_codeobj__33 = (PyObject*)__Pyx_PyCode_New(1, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__32, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_encode, 137, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__33)) __PYX_ERR(0, 137, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":145\n * \n * # decode mask from compressed list of RLE string or RLEs object\n * def decode(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n */\n  __pyx_tuple__34 = PyTuple_Pack(6, __pyx_n_s_rleObjs, __pyx_n_s_Rs, __pyx_n_s_h, __pyx_n_s_w, __pyx_n_s_n, __pyx_n_s_masks); if (unlikely(!__pyx_tuple__34)) __PYX_ERR(0, 145, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__34);\n  __Pyx_GIVEREF(__pyx_tuple__34);\n  __pyx_codeobj__35 = (PyObject*)__Pyx_PyCode_New(1, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__34, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_decode, 145, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__35)) __PYX_ERR(0, 145, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":152\n *     return np.array(masks)\n * \n * def merge(rleObjs, intersect=0):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef RLEs R = RLEs(1)\n */\n  __pyx_tuple__36 = PyTuple_Pack(5, __pyx_n_s_rleObjs, __pyx_n_s_intersect, __pyx_n_s_Rs, __pyx_n_s_R, __pyx_n_s_obj); if (unlikely(!__pyx_tuple__36)) __PYX_ERR(0, 152, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__36);\n  __Pyx_GIVEREF(__pyx_tuple__36);\n  __pyx_codeobj__37 = (PyObject*)__Pyx_PyCode_New(2, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__36, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_merge, 152, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__37)) __PYX_ERR(0, 152, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":159\n *     return obj\n * \n * def area(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))\n */\n  __pyx_tuple__38 = PyTuple_Pack(5, __pyx_n_s_rleObjs, __pyx_n_s_Rs, __pyx_n_s_a, __pyx_n_s_shape, __pyx_n_s_a_2); if (unlikely(!__pyx_tuple__38)) __PYX_ERR(0, 159, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__38);\n  __Pyx_GIVEREF(__pyx_tuple__38);\n  __pyx_codeobj__39 = (PyObject*)__Pyx_PyCode_New(1, 0, 5, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__38, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_area, 159, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__39)) __PYX_ERR(0, 159, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":171\n * \n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):             # <<<<<<<<<<<<<<\n *     def _preproc(objs):\n *         if len(objs) == 0:\n */\n  __pyx_tuple__40 = PyTuple_Pack(18, __pyx_n_s_dt, __pyx_n_s_gt, __pyx_n_s_pyiscrowd, __pyx_n_s_preproc, __pyx_n_s_preproc, __pyx_n_s_rleIou, __pyx_n_s_rleIou, __pyx_n_s_bbIou, __pyx_n_s_bbIou, __pyx_n_s_len, __pyx_n_s_len, __pyx_n_s_iscrowd, __pyx_n_s_m, __pyx_n_s_n, __pyx_n_s_iou, __pyx_n_s_shape, __pyx_n_s_iouFun, __pyx_n_s_iou_2); if (unlikely(!__pyx_tuple__40)) __PYX_ERR(0, 171, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__40);\n  __Pyx_GIVEREF(__pyx_tuple__40);\n  __pyx_codeobj__41 = (PyObject*)__Pyx_PyCode_New(3, 0, 18, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__40, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_iou_2, 171, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__41)) __PYX_ERR(0, 171, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":241\n *     return iou.reshape((m,n), order='F')\n * \n * def toBbox( rleObjs ):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef siz n = Rs.n\n */\n  __pyx_tuple__42 = PyTuple_Pack(6, __pyx_n_s_rleObjs, __pyx_n_s_Rs, __pyx_n_s_n, __pyx_n_s_bb_2, __pyx_n_s_shape, __pyx_n_s_bb); if (unlikely(!__pyx_tuple__42)) __PYX_ERR(0, 241, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__42);\n  __Pyx_GIVEREF(__pyx_tuple__42);\n  __pyx_codeobj__43 = (PyObject*)__Pyx_PyCode_New(1, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__42, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_toBbox, 241, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__43)) __PYX_ERR(0, 241, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":253\n *     return bb\n * \n * def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef siz n = bb.shape[0]\n *     Rs = RLEs(n)\n */\n  __pyx_tuple__44 = PyTuple_Pack(6, __pyx_n_s_bb, __pyx_n_s_h, __pyx_n_s_w, __pyx_n_s_n, __pyx_n_s_Rs, __pyx_n_s_objs); if (unlikely(!__pyx_tuple__44)) __PYX_ERR(0, 253, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__44);\n  __Pyx_GIVEREF(__pyx_tuple__44);\n  __pyx_codeobj__45 = (PyObject*)__Pyx_PyCode_New(3, 0, 6, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__44, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_frBbox, 253, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__45)) __PYX_ERR(0, 253, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":260\n *     return objs\n * \n * def frPoly( poly, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.double_t, ndim=1] np_poly\n *     n = len(poly)\n */\n  __pyx_tuple__46 = PyTuple_Pack(9, __pyx_n_s_poly, __pyx_n_s_h, __pyx_n_s_w, __pyx_n_s_np_poly, __pyx_n_s_n, __pyx_n_s_Rs, __pyx_n_s_i, __pyx_n_s_p, __pyx_n_s_objs); if (unlikely(!__pyx_tuple__46)) __PYX_ERR(0, 260, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__46);\n  __Pyx_GIVEREF(__pyx_tuple__46);\n  __pyx_codeobj__47 = (PyObject*)__Pyx_PyCode_New(3, 0, 9, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__46, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_frPoly, 260, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__47)) __PYX_ERR(0, 260, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":270\n *     return objs\n * \n * def frUncompressedRLE(ucRles, siz h, siz w):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.uint32_t, ndim=1] cnts\n *     cdef RLE R\n */\n  __pyx_tuple__48 = PyTuple_Pack(11, __pyx_n_s_ucRles, __pyx_n_s_h, __pyx_n_s_w, __pyx_n_s_cnts, __pyx_n_s_R, __pyx_n_s_data, __pyx_n_s_n, __pyx_n_s_objs, __pyx_n_s_i, __pyx_n_s_Rs, __pyx_n_s_j); if (unlikely(!__pyx_tuple__48)) __PYX_ERR(0, 270, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__48);\n  __Pyx_GIVEREF(__pyx_tuple__48);\n  __pyx_codeobj__49 = (PyObject*)__Pyx_PyCode_New(3, 0, 11, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__48, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_frUncompressedRLE, 270, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__49)) __PYX_ERR(0, 270, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":288\n *     return objs\n * \n * def frPyObjects(pyobj, h, w):             # <<<<<<<<<<<<<<\n *     # encode rle from a list of python objects\n *     if type(pyobj) == np.ndarray:\n */\n  __pyx_tuple__50 = PyTuple_Pack(4, __pyx_n_s_pyobj, __pyx_n_s_h, __pyx_n_s_w, __pyx_n_s_objs); if (unlikely(!__pyx_tuple__50)) __PYX_ERR(0, 288, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_tuple__50);\n  __Pyx_GIVEREF(__pyx_tuple__50);\n  __pyx_codeobj__51 = (PyObject*)__Pyx_PyCode_New(3, 0, 4, 0, CO_OPTIMIZED|CO_NEWLOCALS, __pyx_empty_bytes, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_tuple__50, __pyx_empty_tuple, __pyx_empty_tuple, __pyx_kp_s_pysobatools__mask_pyx, __pyx_n_s_frPyObjects, 288, __pyx_empty_bytes); if (unlikely(!__pyx_codeobj__51)) __PYX_ERR(0, 288, __pyx_L1_error)\n  __Pyx_RefNannyFinishContext();\n  return 0;\n  __pyx_L1_error:;\n  __Pyx_RefNannyFinishContext();\n  return -1;\n}\n\nstatic CYTHON_SMALL_CODE int __Pyx_InitGlobals(void) {\n  if (__Pyx_InitStrings(__pyx_string_tab) < 0) __PYX_ERR(0, 1, __pyx_L1_error);\n  __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) __PYX_ERR(0, 1, __pyx_L1_error)\n  __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) __PYX_ERR(0, 1, __pyx_L1_error)\n  __pyx_int_2 = PyInt_FromLong(2); if (unlikely(!__pyx_int_2)) __PYX_ERR(0, 1, __pyx_L1_error)\n  __pyx_int_3 = PyInt_FromLong(3); if (unlikely(!__pyx_int_3)) __PYX_ERR(0, 1, __pyx_L1_error)\n  __pyx_int_4 = PyInt_FromLong(4); if (unlikely(!__pyx_int_4)) __PYX_ERR(0, 1, __pyx_L1_error)\n  return 0;\n  __pyx_L1_error:;\n  return -1;\n}\n\nstatic CYTHON_SMALL_CODE int __Pyx_modinit_global_init_code(void); /*proto*/\nstatic CYTHON_SMALL_CODE int __Pyx_modinit_variable_export_code(void); /*proto*/\nstatic CYTHON_SMALL_CODE int __Pyx_modinit_function_export_code(void); /*proto*/\nstatic CYTHON_SMALL_CODE int __Pyx_modinit_type_init_code(void); /*proto*/\nstatic CYTHON_SMALL_CODE int __Pyx_modinit_type_import_code(void); /*proto*/\nstatic CYTHON_SMALL_CODE int __Pyx_modinit_variable_import_code(void); /*proto*/\nstatic CYTHON_SMALL_CODE int __Pyx_modinit_function_import_code(void); /*proto*/\n\nstatic int __Pyx_modinit_global_init_code(void) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__Pyx_modinit_global_init_code\", 0);\n  /*--- Global init code ---*/\n  __Pyx_RefNannyFinishContext();\n  return 0;\n}\n\nstatic int __Pyx_modinit_variable_export_code(void) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__Pyx_modinit_variable_export_code\", 0);\n  /*--- Variable export code ---*/\n  __Pyx_RefNannyFinishContext();\n  return 0;\n}\n\nstatic int __Pyx_modinit_function_export_code(void) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__Pyx_modinit_function_export_code\", 0);\n  /*--- Function export code ---*/\n  __Pyx_RefNannyFinishContext();\n  return 0;\n}\n\nstatic int __Pyx_modinit_type_init_code(void) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__Pyx_modinit_type_init_code\", 0);\n  /*--- Type init code ---*/\n  if (PyType_Ready(&__pyx_type_11pysobatools_5_mask_RLEs) < 0) __PYX_ERR(0, 56, __pyx_L1_error)\n  #if PY_VERSION_HEX < 0x030800B1\n  __pyx_type_11pysobatools_5_mask_RLEs.tp_print = 0;\n  #endif\n  if (PyObject_SetAttr(__pyx_m, __pyx_n_s_RLEs, (PyObject *)&__pyx_type_11pysobatools_5_mask_RLEs) < 0) __PYX_ERR(0, 56, __pyx_L1_error)\n  if (__Pyx_setup_reduce((PyObject*)&__pyx_type_11pysobatools_5_mask_RLEs) < 0) __PYX_ERR(0, 56, __pyx_L1_error)\n  __pyx_ptype_11pysobatools_5_mask_RLEs = &__pyx_type_11pysobatools_5_mask_RLEs;\n  if (PyType_Ready(&__pyx_type_11pysobatools_5_mask_Masks) < 0) __PYX_ERR(0, 77, __pyx_L1_error)\n  #if PY_VERSION_HEX < 0x030800B1\n  __pyx_type_11pysobatools_5_mask_Masks.tp_print = 0;\n  #endif\n  if ((CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP) && likely(!__pyx_type_11pysobatools_5_mask_Masks.tp_dictoffset && __pyx_type_11pysobatools_5_mask_Masks.tp_getattro == PyObject_GenericGetAttr)) {\n    __pyx_type_11pysobatools_5_mask_Masks.tp_getattro = __Pyx_PyObject_GenericGetAttr;\n  }\n  if (PyObject_SetAttr(__pyx_m, __pyx_n_s_Masks, (PyObject *)&__pyx_type_11pysobatools_5_mask_Masks) < 0) __PYX_ERR(0, 77, __pyx_L1_error)\n  if (__Pyx_setup_reduce((PyObject*)&__pyx_type_11pysobatools_5_mask_Masks) < 0) __PYX_ERR(0, 77, __pyx_L1_error)\n  __pyx_ptype_11pysobatools_5_mask_Masks = &__pyx_type_11pysobatools_5_mask_Masks;\n  __Pyx_RefNannyFinishContext();\n  return 0;\n  __pyx_L1_error:;\n  __Pyx_RefNannyFinishContext();\n  return -1;\n}\n\nstatic int __Pyx_modinit_type_import_code(void) {\n  __Pyx_RefNannyDeclarations\n  PyObject *__pyx_t_1 = NULL;\n  __Pyx_RefNannySetupContext(\"__Pyx_modinit_type_import_code\", 0);\n  /*--- Type import code ---*/\n  __pyx_t_1 = PyImport_ImportModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_t_1)) __PYX_ERR(3, 9, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_ptype_7cpython_4type_type = __Pyx_ImportType(__pyx_t_1, __Pyx_BUILTIN_MODULE_NAME, \"type\", \n  #if defined(PYPY_VERSION_NUM) && PYPY_VERSION_NUM < 0x050B0000\n  sizeof(PyTypeObject),\n  #else\n  sizeof(PyHeapTypeObject),\n  #endif\n  __Pyx_ImportType_CheckSize_Warn);\n   if (!__pyx_ptype_7cpython_4type_type) __PYX_ERR(3, 9, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_1 = PyImport_ImportModule(\"numpy\"); if (unlikely(!__pyx_t_1)) __PYX_ERR(2, 206, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_ptype_5numpy_dtype = __Pyx_ImportType(__pyx_t_1, \"numpy\", \"dtype\", sizeof(PyArray_Descr), __Pyx_ImportType_CheckSize_Ignore);\n   if (!__pyx_ptype_5numpy_dtype) __PYX_ERR(2, 206, __pyx_L1_error)\n  __pyx_ptype_5numpy_flatiter = __Pyx_ImportType(__pyx_t_1, \"numpy\", \"flatiter\", sizeof(PyArrayIterObject), __Pyx_ImportType_CheckSize_Warn);\n   if (!__pyx_ptype_5numpy_flatiter) __PYX_ERR(2, 229, __pyx_L1_error)\n  __pyx_ptype_5numpy_broadcast = __Pyx_ImportType(__pyx_t_1, \"numpy\", \"broadcast\", sizeof(PyArrayMultiIterObject), __Pyx_ImportType_CheckSize_Warn);\n   if (!__pyx_ptype_5numpy_broadcast) __PYX_ERR(2, 233, __pyx_L1_error)\n  __pyx_ptype_5numpy_ndarray = __Pyx_ImportType(__pyx_t_1, \"numpy\", \"ndarray\", sizeof(PyArrayObject), __Pyx_ImportType_CheckSize_Ignore);\n   if (!__pyx_ptype_5numpy_ndarray) __PYX_ERR(2, 242, __pyx_L1_error)\n  __pyx_ptype_5numpy_ufunc = __Pyx_ImportType(__pyx_t_1, \"numpy\", \"ufunc\", sizeof(PyUFuncObject), __Pyx_ImportType_CheckSize_Warn);\n   if (!__pyx_ptype_5numpy_ufunc) __PYX_ERR(2, 918, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __Pyx_RefNannyFinishContext();\n  return 0;\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_RefNannyFinishContext();\n  return -1;\n}\n\nstatic int __Pyx_modinit_variable_import_code(void) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__Pyx_modinit_variable_import_code\", 0);\n  /*--- Variable import code ---*/\n  __Pyx_RefNannyFinishContext();\n  return 0;\n}\n\nstatic int __Pyx_modinit_function_import_code(void) {\n  __Pyx_RefNannyDeclarations\n  __Pyx_RefNannySetupContext(\"__Pyx_modinit_function_import_code\", 0);\n  /*--- Function import code ---*/\n  __Pyx_RefNannyFinishContext();\n  return 0;\n}\n\n\n#if PY_MAJOR_VERSION < 3\n#ifdef CYTHON_NO_PYINIT_EXPORT\n#define __Pyx_PyMODINIT_FUNC void\n#else\n#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC\n#endif\n#else\n#ifdef CYTHON_NO_PYINIT_EXPORT\n#define __Pyx_PyMODINIT_FUNC PyObject *\n#else\n#define __Pyx_PyMODINIT_FUNC PyMODINIT_FUNC\n#endif\n#endif\n\n\n#if PY_MAJOR_VERSION < 3\n__Pyx_PyMODINIT_FUNC init_mask(void) CYTHON_SMALL_CODE; /*proto*/\n__Pyx_PyMODINIT_FUNC init_mask(void)\n#else\n__Pyx_PyMODINIT_FUNC PyInit__mask(void) CYTHON_SMALL_CODE; /*proto*/\n__Pyx_PyMODINIT_FUNC PyInit__mask(void)\n#if CYTHON_PEP489_MULTI_PHASE_INIT\n{\n  return PyModuleDef_Init(&__pyx_moduledef);\n}\nstatic CYTHON_SMALL_CODE int __Pyx_check_single_interpreter(void) {\n    #if PY_VERSION_HEX >= 0x030700A1\n    static PY_INT64_T main_interpreter_id = -1;\n    PY_INT64_T current_id = PyInterpreterState_GetID(PyThreadState_Get()->interp);\n    if (main_interpreter_id == -1) {\n        main_interpreter_id = current_id;\n        return (unlikely(current_id == -1)) ? -1 : 0;\n    } else if (unlikely(main_interpreter_id != current_id))\n    #else\n    static PyInterpreterState *main_interpreter = NULL;\n    PyInterpreterState *current_interpreter = PyThreadState_Get()->interp;\n    if (!main_interpreter) {\n        main_interpreter = current_interpreter;\n    } else if (unlikely(main_interpreter != current_interpreter))\n    #endif\n    {\n        PyErr_SetString(\n            PyExc_ImportError,\n            \"Interpreter change detected - this module can only be loaded into one interpreter per process.\");\n        return -1;\n    }\n    return 0;\n}\nstatic CYTHON_SMALL_CODE int __Pyx_copy_spec_to_module(PyObject *spec, PyObject *moddict, const char* from_name, const char* to_name, int allow_none) {\n    PyObject *value = PyObject_GetAttrString(spec, from_name);\n    int result = 0;\n    if (likely(value)) {\n        if (allow_none || value != Py_None) {\n            result = PyDict_SetItemString(moddict, to_name, value);\n        }\n        Py_DECREF(value);\n    } else if (PyErr_ExceptionMatches(PyExc_AttributeError)) {\n        PyErr_Clear();\n    } else {\n        result = -1;\n    }\n    return result;\n}\nstatic CYTHON_SMALL_CODE PyObject* __pyx_pymod_create(PyObject *spec, CYTHON_UNUSED PyModuleDef *def) {\n    PyObject *module = NULL, *moddict, *modname;\n    if (__Pyx_check_single_interpreter())\n        return NULL;\n    if (__pyx_m)\n        return __Pyx_NewRef(__pyx_m);\n    modname = PyObject_GetAttrString(spec, \"name\");\n    if (unlikely(!modname)) goto bad;\n    module = PyModule_NewObject(modname);\n    Py_DECREF(modname);\n    if (unlikely(!module)) goto bad;\n    moddict = PyModule_GetDict(module);\n    if (unlikely(!moddict)) goto bad;\n    if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, \"loader\", \"__loader__\", 1) < 0)) goto bad;\n    if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, \"origin\", \"__file__\", 1) < 0)) goto bad;\n    if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, \"parent\", \"__package__\", 1) < 0)) goto bad;\n    if (unlikely(__Pyx_copy_spec_to_module(spec, moddict, \"submodule_search_locations\", \"__path__\", 0) < 0)) goto bad;\n    return module;\nbad:\n    Py_XDECREF(module);\n    return NULL;\n}\n\n\nstatic CYTHON_SMALL_CODE int __pyx_pymod_exec__mask(PyObject *__pyx_pyinit_module)\n#endif\n#endif\n{\n  PyObject *__pyx_t_1 = NULL;\n  PyObject *__pyx_t_2 = NULL;\n  int __pyx_t_3;\n  __Pyx_RefNannyDeclarations\n  #if CYTHON_PEP489_MULTI_PHASE_INIT\n  if (__pyx_m) {\n    if (__pyx_m == __pyx_pyinit_module) return 0;\n    PyErr_SetString(PyExc_RuntimeError, \"Module '_mask' has already been imported. Re-initialisation is not supported.\");\n    return -1;\n  }\n  #elif PY_MAJOR_VERSION >= 3\n  if (__pyx_m) return __Pyx_NewRef(__pyx_m);\n  #endif\n  #if CYTHON_REFNANNY\n__Pyx_RefNanny = __Pyx_RefNannyImportAPI(\"refnanny\");\nif (!__Pyx_RefNanny) {\n  PyErr_Clear();\n  __Pyx_RefNanny = __Pyx_RefNannyImportAPI(\"Cython.Runtime.refnanny\");\n  if (!__Pyx_RefNanny)\n      Py_FatalError(\"failed to import 'refnanny' module\");\n}\n#endif\n  __Pyx_RefNannySetupContext(\"__Pyx_PyMODINIT_FUNC PyInit__mask(void)\", 0);\n  if (__Pyx_check_binary_version() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #ifdef __Pxy_PyFrame_Initialize_Offsets\n  __Pxy_PyFrame_Initialize_Offsets();\n  #endif\n  __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) __PYX_ERR(0, 1, __pyx_L1_error)\n  __pyx_empty_bytes = PyBytes_FromStringAndSize(\"\", 0); if (unlikely(!__pyx_empty_bytes)) __PYX_ERR(0, 1, __pyx_L1_error)\n  __pyx_empty_unicode = PyUnicode_FromStringAndSize(\"\", 0); if (unlikely(!__pyx_empty_unicode)) __PYX_ERR(0, 1, __pyx_L1_error)\n  #ifdef __Pyx_CyFunction_USED\n  if (__pyx_CyFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  #ifdef __Pyx_FusedFunction_USED\n  if (__pyx_FusedFunction_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  #ifdef __Pyx_Coroutine_USED\n  if (__pyx_Coroutine_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  #ifdef __Pyx_Generator_USED\n  if (__pyx_Generator_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  #ifdef __Pyx_AsyncGen_USED\n  if (__pyx_AsyncGen_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  #ifdef __Pyx_StopAsyncIteration_USED\n  if (__pyx_StopAsyncIteration_init() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  /*--- Library function declarations ---*/\n  /*--- Threads initialization code ---*/\n  #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS\n  #ifdef WITH_THREAD /* Python build with threading support? */\n  PyEval_InitThreads();\n  #endif\n  #endif\n  /*--- Module creation code ---*/\n  #if CYTHON_PEP489_MULTI_PHASE_INIT\n  __pyx_m = __pyx_pyinit_module;\n  Py_INCREF(__pyx_m);\n  #else\n  #if PY_MAJOR_VERSION < 3\n  __pyx_m = Py_InitModule4(\"_mask\", __pyx_methods, 0, 0, PYTHON_API_VERSION); Py_XINCREF(__pyx_m);\n  #else\n  __pyx_m = PyModule_Create(&__pyx_moduledef);\n  #endif\n  if (unlikely(!__pyx_m)) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  __pyx_d = PyModule_GetDict(__pyx_m); if (unlikely(!__pyx_d)) __PYX_ERR(0, 1, __pyx_L1_error)\n  Py_INCREF(__pyx_d);\n  __pyx_b = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); if (unlikely(!__pyx_b)) __PYX_ERR(0, 1, __pyx_L1_error)\n  Py_INCREF(__pyx_b);\n  __pyx_cython_runtime = PyImport_AddModule((char *) \"cython_runtime\"); if (unlikely(!__pyx_cython_runtime)) __PYX_ERR(0, 1, __pyx_L1_error)\n  Py_INCREF(__pyx_cython_runtime);\n  if (PyObject_SetAttrString(__pyx_m, \"__builtins__\", __pyx_b) < 0) __PYX_ERR(0, 1, __pyx_L1_error);\n  /*--- Initialize various global constants etc. ---*/\n  if (__Pyx_InitGlobals() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)\n  if (__Pyx_init_sys_getdefaultencoding_params() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n  if (__pyx_module_is_main_pysobatools___mask) {\n    if (PyObject_SetAttr(__pyx_m, __pyx_n_s_name, __pyx_n_s_main) < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  }\n  #if PY_MAJOR_VERSION >= 3\n  {\n    PyObject *modules = PyImport_GetModuleDict(); if (unlikely(!modules)) __PYX_ERR(0, 1, __pyx_L1_error)\n    if (!PyDict_GetItemString(modules, \"pysobatools._mask\")) {\n      if (unlikely(PyDict_SetItemString(modules, \"pysobatools._mask\", __pyx_m) < 0)) __PYX_ERR(0, 1, __pyx_L1_error)\n    }\n  }\n  #endif\n  /*--- Builtin init code ---*/\n  if (__Pyx_InitCachedBuiltins() < 0) goto __pyx_L1_error;\n  /*--- Constants init code ---*/\n  if (__Pyx_InitCachedConstants() < 0) goto __pyx_L1_error;\n  /*--- Global type/function init code ---*/\n  (void)__Pyx_modinit_global_init_code();\n  (void)__Pyx_modinit_variable_export_code();\n  (void)__Pyx_modinit_function_export_code();\n  if (unlikely(__Pyx_modinit_type_init_code() != 0)) goto __pyx_L1_error;\n  if (unlikely(__Pyx_modinit_type_import_code() != 0)) goto __pyx_L1_error;\n  (void)__Pyx_modinit_variable_import_code();\n  (void)__Pyx_modinit_function_import_code();\n  /*--- Execution code ---*/\n  #if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)\n  if (__Pyx_patch_abc() < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  #endif\n\n  /* \"pysobatools/_mask.pyx\":11\n * #**************************************************************************\n * \n * __author__ = 'tsungyi'             # <<<<<<<<<<<<<<\n * \n * import sys\n */\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_author, __pyx_n_s_tsungyi) < 0) __PYX_ERR(0, 11, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":13\n * __author__ = 'tsungyi'\n * \n * import sys             # <<<<<<<<<<<<<<\n * PYTHON_VERSION = sys.version_info[0]\n * \n */\n  __pyx_t_1 = __Pyx_Import(__pyx_n_s_sys, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 13, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_sys, __pyx_t_1) < 0) __PYX_ERR(0, 13, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":14\n * \n * import sys\n * PYTHON_VERSION = sys.version_info[0]             # <<<<<<<<<<<<<<\n * \n * # import both Python-level and C-level symbols of Numpy\n */\n  __Pyx_GetModuleGlobalName(__pyx_t_1, __pyx_n_s_sys); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __pyx_t_2 = __Pyx_PyObject_GetAttrStr(__pyx_t_1, __pyx_n_s_version_info); if (unlikely(!__pyx_t_2)) __PYX_ERR(0, 14, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_2);\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n  __pyx_t_1 = __Pyx_GetItemInt(__pyx_t_2, 0, long, 1, __Pyx_PyInt_From_long, 0, 0, 1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 14, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_PYTHON_VERSION, __pyx_t_1) < 0) __PYX_ERR(0, 14, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":18\n * # import both Python-level and C-level symbols of Numpy\n * # the API uses Numpy to interface C and Python\n * import numpy as np             # <<<<<<<<<<<<<<\n * cimport numpy as np\n * from libc.stdlib cimport malloc, free\n */\n  __pyx_t_1 = __Pyx_Import(__pyx_n_s_numpy, 0, -1); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 18, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_np, __pyx_t_1) < 0) __PYX_ERR(0, 18, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":23\n * \n * # intialized Numpy. must do.\n * np.import_array()             # <<<<<<<<<<<<<<\n * \n * # import numpy C function\n */\n  __pyx_t_3 = __pyx_f_5numpy_import_array(); if (unlikely(__pyx_t_3 == ((int)-1))) __PYX_ERR(0, 23, __pyx_L1_error)\n\n  /* \"pysobatools/_mask.pyx\":103\n * \n * # internal conversion from Python RLEs object to compressed RLE format\n * def _toString(RLEs Rs):             # <<<<<<<<<<<<<<\n *     cdef siz n = Rs.n\n *     cdef bytes py_string\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_1_toString, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 103, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_toString, __pyx_t_1) < 0) __PYX_ERR(0, 103, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":119\n * \n * # internal conversion from compressed RLE format to Python RLEs object\n * def _frString(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef siz n = len(rleObjs)\n *     Rs = RLEs(n)\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_3_frString, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 119, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_frString, __pyx_t_1) < 0) __PYX_ERR(0, 119, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":137\n * # encode mask to RLEs objects\n * # list of RLE string can be generated by RLEs member function\n * def encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask):             # <<<<<<<<<<<<<<\n *     h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]\n *     cdef RLEs Rs = RLEs(n)\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_5encode, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 137, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_encode, __pyx_t_1) < 0) __PYX_ERR(0, 137, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":145\n * \n * # decode mask from compressed list of RLE string or RLEs object\n * def decode(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_7decode, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 145, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_decode, __pyx_t_1) < 0) __PYX_ERR(0, 145, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":152\n *     return np.array(masks)\n * \n * def merge(rleObjs, intersect=0):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef RLEs R = RLEs(1)\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_9merge, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 152, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_merge, __pyx_t_1) < 0) __PYX_ERR(0, 152, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":159\n *     return obj\n * \n * def area(rleObjs):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_11area, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 159, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_area, __pyx_t_1) < 0) __PYX_ERR(0, 159, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":171\n * \n * # iou computation. support function overload (RLEs-RLEs and bbox-bbox).\n * def iou( dt, gt, pyiscrowd ):             # <<<<<<<<<<<<<<\n *     def _preproc(objs):\n *         if len(objs) == 0:\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_13iou, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 171, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_iou_2, __pyx_t_1) < 0) __PYX_ERR(0, 171, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":241\n *     return iou.reshape((m,n), order='F')\n * \n * def toBbox( rleObjs ):             # <<<<<<<<<<<<<<\n *     cdef RLEs Rs = _frString(rleObjs)\n *     cdef siz n = Rs.n\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_15toBbox, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 241, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_toBbox, __pyx_t_1) < 0) __PYX_ERR(0, 241, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":253\n *     return bb\n * \n * def frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef siz n = bb.shape[0]\n *     Rs = RLEs(n)\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_17frBbox, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 253, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_frBbox, __pyx_t_1) < 0) __PYX_ERR(0, 253, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":260\n *     return objs\n * \n * def frPoly( poly, siz h, siz w ):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.double_t, ndim=1] np_poly\n *     n = len(poly)\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_19frPoly, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 260, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_frPoly, __pyx_t_1) < 0) __PYX_ERR(0, 260, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":270\n *     return objs\n * \n * def frUncompressedRLE(ucRles, siz h, siz w):             # <<<<<<<<<<<<<<\n *     cdef np.ndarray[np.uint32_t, ndim=1] cnts\n *     cdef RLE R\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_21frUncompressedRLE, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 270, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_frUncompressedRLE, __pyx_t_1) < 0) __PYX_ERR(0, 270, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":288\n *     return objs\n * \n * def frPyObjects(pyobj, h, w):             # <<<<<<<<<<<<<<\n *     # encode rle from a list of python objects\n *     if type(pyobj) == np.ndarray:\n */\n  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11pysobatools_5_mask_23frPyObjects, NULL, __pyx_n_s_pysobatools__mask); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 288, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_frPyObjects, __pyx_t_1) < 0) __PYX_ERR(0, 288, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"pysobatools/_mask.pyx\":1\n * # distutils: language = c             # <<<<<<<<<<<<<<\n * # distutils: sources = ../common/maskApi.c\n * \n */\n  __pyx_t_1 = __Pyx_PyDict_NewPresized(0); if (unlikely(!__pyx_t_1)) __PYX_ERR(0, 1, __pyx_L1_error)\n  __Pyx_GOTREF(__pyx_t_1);\n  if (PyDict_SetItem(__pyx_d, __pyx_n_s_test, __pyx_t_1) < 0) __PYX_ERR(0, 1, __pyx_L1_error)\n  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;\n\n  /* \"../../miniconda/envs/py36/lib/python3.6/site-packages/Cython/Includes/numpy/__init__.pxd\":1046\n *         raise ImportError(\"numpy.core.umath failed to import\")\n * \n * cdef inline int import_ufunc() except -1:             # <<<<<<<<<<<<<<\n *     try:\n *         _import_umath()\n */\n\n  /*--- Wrapped vars code ---*/\n\n  goto __pyx_L0;\n  __pyx_L1_error:;\n  __Pyx_XDECREF(__pyx_t_1);\n  __Pyx_XDECREF(__pyx_t_2);\n  if (__pyx_m) {\n    if (__pyx_d) {\n      __Pyx_AddTraceback(\"init pysobatools._mask\", __pyx_clineno, __pyx_lineno, __pyx_filename);\n    }\n    Py_CLEAR(__pyx_m);\n  } else if (!PyErr_Occurred()) {\n    PyErr_SetString(PyExc_ImportError, \"init pysobatools._mask\");\n  }\n  __pyx_L0:;\n  __Pyx_RefNannyFinishContext();\n  #if CYTHON_PEP489_MULTI_PHASE_INIT\n  return (__pyx_m != NULL) ? 0 : -1;\n  #elif PY_MAJOR_VERSION >= 3\n  return __pyx_m;\n  #else\n  return;\n  #endif\n}\n\n/* --- Runtime support code --- */\n/* Refnanny */\n#if CYTHON_REFNANNY\nstatic __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {\n    PyObject *m = NULL, *p = NULL;\n    void *r = NULL;\n    m = PyImport_ImportModule(modname);\n    if (!m) goto end;\n    p = PyObject_GetAttrString(m, \"RefNannyAPI\");\n    if (!p) goto end;\n    r = PyLong_AsVoidPtr(p);\nend:\n    Py_XDECREF(p);\n    Py_XDECREF(m);\n    return (__Pyx_RefNannyAPIStruct *)r;\n}\n#endif\n\n/* PyObjectGetAttrStr */\n#if CYTHON_USE_TYPE_SLOTS\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_GetAttrStr(PyObject* obj, PyObject* attr_name) {\n    PyTypeObject* tp = Py_TYPE(obj);\n    if (likely(tp->tp_getattro))\n        return tp->tp_getattro(obj, attr_name);\n#if PY_MAJOR_VERSION < 3\n    if (likely(tp->tp_getattr))\n        return tp->tp_getattr(obj, PyString_AS_STRING(attr_name));\n#endif\n    return PyObject_GetAttr(obj, attr_name);\n}\n#endif\n\n/* GetBuiltinName */\nstatic PyObject *__Pyx_GetBuiltinName(PyObject *name) {\n    PyObject* result = __Pyx_PyObject_GetAttrStr(__pyx_b, name);\n    if (unlikely(!result)) {\n        PyErr_Format(PyExc_NameError,\n#if PY_MAJOR_VERSION >= 3\n            \"name '%U' is not defined\", name);\n#else\n            \"name '%.200s' is not defined\", PyString_AS_STRING(name));\n#endif\n    }\n    return result;\n}\n\n/* RaiseDoubleKeywords */\nstatic void __Pyx_RaiseDoubleKeywordsError(\n    const char* func_name,\n    PyObject* kw_name)\n{\n    PyErr_Format(PyExc_TypeError,\n        #if PY_MAJOR_VERSION >= 3\n        \"%s() got multiple values for keyword argument '%U'\", func_name, kw_name);\n        #else\n        \"%s() got multiple values for keyword argument '%s'\", func_name,\n        PyString_AsString(kw_name));\n        #endif\n}\n\n/* ParseKeywords */\nstatic int __Pyx_ParseOptionalKeywords(\n    PyObject *kwds,\n    PyObject **argnames[],\n    PyObject *kwds2,\n    PyObject *values[],\n    Py_ssize_t num_pos_args,\n    const char* function_name)\n{\n    PyObject *key = 0, *value = 0;\n    Py_ssize_t pos = 0;\n    PyObject*** name;\n    PyObject*** first_kw_arg = argnames + num_pos_args;\n    while (PyDict_Next(kwds, &pos, &key, &value)) {\n        name = first_kw_arg;\n        while (*name && (**name != key)) name++;\n        if (*name) {\n            values[name-argnames] = value;\n            continue;\n        }\n        name = first_kw_arg;\n        #if PY_MAJOR_VERSION < 3\n        if (likely(PyString_CheckExact(key)) || likely(PyString_Check(key))) {\n            while (*name) {\n                if ((CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**name) == PyString_GET_SIZE(key))\n                        && _PyString_Eq(**name, key)) {\n                    values[name-argnames] = value;\n                    break;\n                }\n                name++;\n            }\n            if (*name) continue;\n            else {\n                PyObject*** argname = argnames;\n                while (argname != first_kw_arg) {\n                    if ((**argname == key) || (\n                            (CYTHON_COMPILING_IN_PYPY || PyString_GET_SIZE(**argname) == PyString_GET_SIZE(key))\n                             && _PyString_Eq(**argname, key))) {\n                        goto arg_passed_twice;\n                    }\n                    argname++;\n                }\n            }\n        } else\n        #endif\n        if (likely(PyUnicode_Check(key))) {\n            while (*name) {\n                int cmp = (**name == key) ? 0 :\n                #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3\n                    (PyUnicode_GET_SIZE(**name) != PyUnicode_GET_SIZE(key)) ? 1 :\n                #endif\n                    PyUnicode_Compare(**name, key);\n                if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;\n                if (cmp == 0) {\n                    values[name-argnames] = value;\n                    break;\n                }\n                name++;\n            }\n            if (*name) continue;\n            else {\n                PyObject*** argname = argnames;\n                while (argname != first_kw_arg) {\n                    int cmp = (**argname == key) ? 0 :\n                    #if !CYTHON_COMPILING_IN_PYPY && PY_MAJOR_VERSION >= 3\n                        (PyUnicode_GET_SIZE(**argname) != PyUnicode_GET_SIZE(key)) ? 1 :\n                    #endif\n                        PyUnicode_Compare(**argname, key);\n                    if (cmp < 0 && unlikely(PyErr_Occurred())) goto bad;\n                    if (cmp == 0) goto arg_passed_twice;\n                    argname++;\n                }\n            }\n        } else\n            goto invalid_keyword_type;\n        if (kwds2) {\n            if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;\n        } else {\n            goto invalid_keyword;\n        }\n    }\n    return 0;\narg_passed_twice:\n    __Pyx_RaiseDoubleKeywordsError(function_name, key);\n    goto bad;\ninvalid_keyword_type:\n    PyErr_Format(PyExc_TypeError,\n        \"%.200s() keywords must be strings\", function_name);\n    goto bad;\ninvalid_keyword:\n    PyErr_Format(PyExc_TypeError,\n    #if PY_MAJOR_VERSION < 3\n        \"%.200s() got an unexpected keyword argument '%.200s'\",\n        function_name, PyString_AsString(key));\n    #else\n        \"%s() got an unexpected keyword argument '%U'\",\n        function_name, key);\n    #endif\nbad:\n    return -1;\n}\n\n/* RaiseArgTupleInvalid */\nstatic void __Pyx_RaiseArgtupleInvalid(\n    const char* func_name,\n    int exact,\n    Py_ssize_t num_min,\n    Py_ssize_t num_max,\n    Py_ssize_t num_found)\n{\n    Py_ssize_t num_expected;\n    const char *more_or_less;\n    if (num_found < num_min) {\n        num_expected = num_min;\n        more_or_less = \"at least\";\n    } else {\n        num_expected = num_max;\n        more_or_less = \"at most\";\n    }\n    if (exact) {\n        more_or_less = \"exactly\";\n    }\n    PyErr_Format(PyExc_TypeError,\n                 \"%.200s() takes %.8s %\" CYTHON_FORMAT_SSIZE_T \"d positional argument%.1s (%\" CYTHON_FORMAT_SSIZE_T \"d given)\",\n                 func_name, more_or_less, num_expected,\n                 (num_expected == 1) ? \"\" : \"s\", num_found);\n}\n\n/* BytesEquals */\nstatic CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) {\n#if CYTHON_COMPILING_IN_PYPY\n    return PyObject_RichCompareBool(s1, s2, equals);\n#else\n    if (s1 == s2) {\n        return (equals == Py_EQ);\n    } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) {\n        const char *ps1, *ps2;\n        Py_ssize_t length = PyBytes_GET_SIZE(s1);\n        if (length != PyBytes_GET_SIZE(s2))\n            return (equals == Py_NE);\n        ps1 = PyBytes_AS_STRING(s1);\n        ps2 = PyBytes_AS_STRING(s2);\n        if (ps1[0] != ps2[0]) {\n            return (equals == Py_NE);\n        } else if (length == 1) {\n            return (equals == Py_EQ);\n        } else {\n            int result;\n#if CYTHON_USE_UNICODE_INTERNALS\n            Py_hash_t hash1, hash2;\n            hash1 = ((PyBytesObject*)s1)->ob_shash;\n            hash2 = ((PyBytesObject*)s2)->ob_shash;\n            if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {\n                return (equals == Py_NE);\n            }\n#endif\n            result = memcmp(ps1, ps2, (size_t)length);\n            return (equals == Py_EQ) ? (result == 0) : (result != 0);\n        }\n    } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {\n        return (equals == Py_NE);\n    } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) {\n        return (equals == Py_NE);\n    } else {\n        int result;\n        PyObject* py_result = PyObject_RichCompare(s1, s2, equals);\n        if (!py_result)\n            return -1;\n        result = __Pyx_PyObject_IsTrue(py_result);\n        Py_DECREF(py_result);\n        return result;\n    }\n#endif\n}\n\n/* UnicodeEquals */\nstatic CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) {\n#if CYTHON_COMPILING_IN_PYPY\n    return PyObject_RichCompareBool(s1, s2, equals);\n#else\n#if PY_MAJOR_VERSION < 3\n    PyObject* owned_ref = NULL;\n#endif\n    int s1_is_unicode, s2_is_unicode;\n    if (s1 == s2) {\n        goto return_eq;\n    }\n    s1_is_unicode = PyUnicode_CheckExact(s1);\n    s2_is_unicode = PyUnicode_CheckExact(s2);\n#if PY_MAJOR_VERSION < 3\n    if ((s1_is_unicode & (!s2_is_unicode)) && PyString_CheckExact(s2)) {\n        owned_ref = PyUnicode_FromObject(s2);\n        if (unlikely(!owned_ref))\n            return -1;\n        s2 = owned_ref;\n        s2_is_unicode = 1;\n    } else if ((s2_is_unicode & (!s1_is_unicode)) && PyString_CheckExact(s1)) {\n        owned_ref = PyUnicode_FromObject(s1);\n        if (unlikely(!owned_ref))\n            return -1;\n        s1 = owned_ref;\n        s1_is_unicode = 1;\n    } else if (((!s2_is_unicode) & (!s1_is_unicode))) {\n        return __Pyx_PyBytes_Equals(s1, s2, equals);\n    }\n#endif\n    if (s1_is_unicode & s2_is_unicode) {\n        Py_ssize_t length;\n        int kind;\n        void *data1, *data2;\n        if (unlikely(__Pyx_PyUnicode_READY(s1) < 0) || unlikely(__Pyx_PyUnicode_READY(s2) < 0))\n            return -1;\n        length = __Pyx_PyUnicode_GET_LENGTH(s1);\n        if (length != __Pyx_PyUnicode_GET_LENGTH(s2)) {\n            goto return_ne;\n        }\n#if CYTHON_USE_UNICODE_INTERNALS\n        {\n            Py_hash_t hash1, hash2;\n        #if CYTHON_PEP393_ENABLED\n            hash1 = ((PyASCIIObject*)s1)->hash;\n            hash2 = ((PyASCIIObject*)s2)->hash;\n        #else\n            hash1 = ((PyUnicodeObject*)s1)->hash;\n            hash2 = ((PyUnicodeObject*)s2)->hash;\n        #endif\n            if (hash1 != hash2 && hash1 != -1 && hash2 != -1) {\n                goto return_ne;\n            }\n        }\n#endif\n        kind = __Pyx_PyUnicode_KIND(s1);\n        if (kind != __Pyx_PyUnicode_KIND(s2)) {\n            goto return_ne;\n        }\n        data1 = __Pyx_PyUnicode_DATA(s1);\n        data2 = __Pyx_PyUnicode_DATA(s2);\n        if (__Pyx_PyUnicode_READ(kind, data1, 0) != __Pyx_PyUnicode_READ(kind, data2, 0)) {\n            goto return_ne;\n        } else if (length == 1) {\n            goto return_eq;\n        } else {\n            int result = memcmp(data1, data2, (size_t)(length * kind));\n            #if PY_MAJOR_VERSION < 3\n            Py_XDECREF(owned_ref);\n            #endif\n            return (equals == Py_EQ) ? (result == 0) : (result != 0);\n        }\n    } else if ((s1 == Py_None) & s2_is_unicode) {\n        goto return_ne;\n    } else if ((s2 == Py_None) & s1_is_unicode) {\n        goto return_ne;\n    } else {\n        int result;\n        PyObject* py_result = PyObject_RichCompare(s1, s2, equals);\n        #if PY_MAJOR_VERSION < 3\n        Py_XDECREF(owned_ref);\n        #endif\n        if (!py_result)\n            return -1;\n        result = __Pyx_PyObject_IsTrue(py_result);\n        Py_DECREF(py_result);\n        return result;\n    }\nreturn_eq:\n    #if PY_MAJOR_VERSION < 3\n    Py_XDECREF(owned_ref);\n    #endif\n    return (equals == Py_EQ);\nreturn_ne:\n    #if PY_MAJOR_VERSION < 3\n    Py_XDECREF(owned_ref);\n    #endif\n    return (equals == Py_NE);\n#endif\n}\n\n/* PyCFunctionFastCall */\n#if CYTHON_FAST_PYCCALL\nstatic CYTHON_INLINE PyObject * __Pyx_PyCFunction_FastCall(PyObject *func_obj, PyObject **args, Py_ssize_t nargs) {\n    PyCFunctionObject *func = (PyCFunctionObject*)func_obj;\n    PyCFunction meth = PyCFunction_GET_FUNCTION(func);\n    PyObject *self = PyCFunction_GET_SELF(func);\n    int flags = PyCFunction_GET_FLAGS(func);\n    assert(PyCFunction_Check(func));\n    assert(METH_FASTCALL == (flags & ~(METH_CLASS | METH_STATIC | METH_COEXIST | METH_KEYWORDS | METH_STACKLESS)));\n    assert(nargs >= 0);\n    assert(nargs == 0 || args != NULL);\n    /* _PyCFunction_FastCallDict() must not be called with an exception set,\n       because it may clear it (directly or indirectly) and so the\n       caller loses its exception */\n    assert(!PyErr_Occurred());\n    if ((PY_VERSION_HEX < 0x030700A0) || unlikely(flags & METH_KEYWORDS)) {\n        return (*((__Pyx_PyCFunctionFastWithKeywords)(void*)meth)) (self, args, nargs, NULL);\n    } else {\n        return (*((__Pyx_PyCFunctionFast)(void*)meth)) (self, args, nargs);\n    }\n}\n#endif\n\n/* PyFunctionFastCall */\n#if CYTHON_FAST_PYCALL\nstatic PyObject* __Pyx_PyFunction_FastCallNoKw(PyCodeObject *co, PyObject **args, Py_ssize_t na,\n                                               PyObject *globals) {\n    PyFrameObject *f;\n    PyThreadState *tstate = __Pyx_PyThreadState_Current;\n    PyObject **fastlocals;\n    Py_ssize_t i;\n    PyObject *result;\n    assert(globals != NULL);\n    /* XXX Perhaps we should create a specialized\n       PyFrame_New() that doesn't take locals, but does\n       take builtins without sanity checking them.\n       */\n    assert(tstate != NULL);\n    f = PyFrame_New(tstate, co, globals, NULL);\n    if (f == NULL) {\n        return NULL;\n    }\n    fastlocals = __Pyx_PyFrame_GetLocalsplus(f);\n    for (i = 0; i < na; i++) {\n        Py_INCREF(*args);\n        fastlocals[i] = *args++;\n    }\n    result = PyEval_EvalFrameEx(f,0);\n    ++tstate->recursion_depth;\n    Py_DECREF(f);\n    --tstate->recursion_depth;\n    return result;\n}\n#if 1 || PY_VERSION_HEX < 0x030600B1\nstatic PyObject *__Pyx_PyFunction_FastCallDict(PyObject *func, PyObject **args, Py_ssize_t nargs, PyObject *kwargs) {\n    PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);\n    PyObject *globals = PyFunction_GET_GLOBALS(func);\n    PyObject *argdefs = PyFunction_GET_DEFAULTS(func);\n    PyObject *closure;\n#if PY_MAJOR_VERSION >= 3\n    PyObject *kwdefs;\n#endif\n    PyObject *kwtuple, **k;\n    PyObject **d;\n    Py_ssize_t nd;\n    Py_ssize_t nk;\n    PyObject *result;\n    assert(kwargs == NULL || PyDict_Check(kwargs));\n    nk = kwargs ? PyDict_Size(kwargs) : 0;\n    if (Py_EnterRecursiveCall((char*)\" while calling a Python object\")) {\n        return NULL;\n    }\n    if (\n#if PY_MAJOR_VERSION >= 3\n            co->co_kwonlyargcount == 0 &&\n#endif\n            likely(kwargs == NULL || nk == 0) &&\n            co->co_flags == (CO_OPTIMIZED | CO_NEWLOCALS | CO_NOFREE)) {\n        if (argdefs == NULL && co->co_argcount == nargs) {\n            result = __Pyx_PyFunction_FastCallNoKw(co, args, nargs, globals);\n            goto done;\n        }\n        else if (nargs == 0 && argdefs != NULL\n                 && co->co_argcount == Py_SIZE(argdefs)) {\n            /* function called with no arguments, but all parameters have\n               a default value: use default values as arguments .*/\n            args = &PyTuple_GET_ITEM(argdefs, 0);\n            result =__Pyx_PyFunction_FastCallNoKw(co, args, Py_SIZE(argdefs), globals);\n            goto done;\n        }\n    }\n    if (kwargs != NULL) {\n        Py_ssize_t pos, i;\n        kwtuple = PyTuple_New(2 * nk);\n        if (kwtuple == NULL) {\n            result = NULL;\n            goto done;\n        }\n        k = &PyTuple_GET_ITEM(kwtuple, 0);\n        pos = i = 0;\n        while (PyDict_Next(kwargs, &pos, &k[i], &k[i+1])) {\n            Py_INCREF(k[i]);\n            Py_INCREF(k[i+1]);\n            i += 2;\n        }\n        nk = i / 2;\n    }\n    else {\n        kwtuple = NULL;\n        k = NULL;\n    }\n    closure = PyFunction_GET_CLOSURE(func);\n#if PY_MAJOR_VERSION >= 3\n    kwdefs = PyFunction_GET_KW_DEFAULTS(func);\n#endif\n    if (argdefs != NULL) {\n        d = &PyTuple_GET_ITEM(argdefs, 0);\n        nd = Py_SIZE(argdefs);\n    }\n    else {\n        d = NULL;\n        nd = 0;\n    }\n#if PY_MAJOR_VERSION >= 3\n    result = PyEval_EvalCodeEx((PyObject*)co, globals, (PyObject *)NULL,\n                               args, (int)nargs,\n                               k, (int)nk,\n                               d, (int)nd, kwdefs, closure);\n#else\n    result = PyEval_EvalCodeEx(co, globals, (PyObject *)NULL,\n                               args, (int)nargs,\n                               k, (int)nk,\n                               d, (int)nd, closure);\n#endif\n    Py_XDECREF(kwtuple);\ndone:\n    Py_LeaveRecursiveCall();\n    return result;\n}\n#endif\n#endif\n\n/* PyObjectCall */\n#if CYTHON_COMPILING_IN_CPYTHON\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_Call(PyObject *func, PyObject *arg, PyObject *kw) {\n    PyObject *result;\n    ternaryfunc call = func->ob_type->tp_call;\n    if (unlikely(!call))\n        return PyObject_Call(func, arg, kw);\n    if (unlikely(Py_EnterRecursiveCall((char*)\" while calling a Python object\")))\n        return NULL;\n    result = (*call)(func, arg, kw);\n    Py_LeaveRecursiveCall();\n    if (unlikely(!result) && unlikely(!PyErr_Occurred())) {\n        PyErr_SetString(\n            PyExc_SystemError,\n            \"NULL result without error in PyObject_Call\");\n    }\n    return result;\n}\n#endif\n\n/* PyObjectCallMethO */\n#if CYTHON_COMPILING_IN_CPYTHON\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_CallMethO(PyObject *func, PyObject *arg) {\n    PyObject *self, *result;\n    PyCFunction cfunc;\n    cfunc = PyCFunction_GET_FUNCTION(func);\n    self = PyCFunction_GET_SELF(func);\n    if (unlikely(Py_EnterRecursiveCall((char*)\" while calling a Python object\")))\n        return NULL;\n    result = cfunc(self, arg);\n    Py_LeaveRecursiveCall();\n    if (unlikely(!result) && unlikely(!PyErr_Occurred())) {\n        PyErr_SetString(\n            PyExc_SystemError,\n            \"NULL result without error in PyObject_Call\");\n    }\n    return result;\n}\n#endif\n\n/* PyObjectCallOneArg */\n#if CYTHON_COMPILING_IN_CPYTHON\nstatic PyObject* __Pyx__PyObject_CallOneArg(PyObject *func, PyObject *arg) {\n    PyObject *result;\n    PyObject *args = PyTuple_New(1);\n    if (unlikely(!args)) return NULL;\n    Py_INCREF(arg);\n    PyTuple_SET_ITEM(args, 0, arg);\n    result = __Pyx_PyObject_Call(func, args, NULL);\n    Py_DECREF(args);\n    return result;\n}\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {\n#if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(func)) {\n        return __Pyx_PyFunction_FastCall(func, &arg, 1);\n    }\n#endif\n    if (likely(PyCFunction_Check(func))) {\n        if (likely(PyCFunction_GET_FLAGS(func) & METH_O)) {\n            return __Pyx_PyObject_CallMethO(func, arg);\n#if CYTHON_FAST_PYCCALL\n        } else if (PyCFunction_GET_FLAGS(func) & METH_FASTCALL) {\n            return __Pyx_PyCFunction_FastCall(func, &arg, 1);\n#endif\n        }\n    }\n    return __Pyx__PyObject_CallOneArg(func, arg);\n}\n#else\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_CallOneArg(PyObject *func, PyObject *arg) {\n    PyObject *result;\n    PyObject *args = PyTuple_Pack(1, arg);\n    if (unlikely(!args)) return NULL;\n    result = __Pyx_PyObject_Call(func, args, NULL);\n    Py_DECREF(args);\n    return result;\n}\n#endif\n\n/* PyErrFetchRestore */\n#if CYTHON_FAST_THREAD_STATE\nstatic CYTHON_INLINE void __Pyx_ErrRestoreInState(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {\n    PyObject *tmp_type, *tmp_value, *tmp_tb;\n    tmp_type = tstate->curexc_type;\n    tmp_value = tstate->curexc_value;\n    tmp_tb = tstate->curexc_traceback;\n    tstate->curexc_type = type;\n    tstate->curexc_value = value;\n    tstate->curexc_traceback = tb;\n    Py_XDECREF(tmp_type);\n    Py_XDECREF(tmp_value);\n    Py_XDECREF(tmp_tb);\n}\nstatic CYTHON_INLINE void __Pyx_ErrFetchInState(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {\n    *type = tstate->curexc_type;\n    *value = tstate->curexc_value;\n    *tb = tstate->curexc_traceback;\n    tstate->curexc_type = 0;\n    tstate->curexc_value = 0;\n    tstate->curexc_traceback = 0;\n}\n#endif\n\n/* RaiseException */\n#if PY_MAJOR_VERSION < 3\nstatic void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb,\n                        CYTHON_UNUSED PyObject *cause) {\n    __Pyx_PyThreadState_declare\n    Py_XINCREF(type);\n    if (!value || value == Py_None)\n        value = NULL;\n    else\n        Py_INCREF(value);\n    if (!tb || tb == Py_None)\n        tb = NULL;\n    else {\n        Py_INCREF(tb);\n        if (!PyTraceBack_Check(tb)) {\n            PyErr_SetString(PyExc_TypeError,\n                \"raise: arg 3 must be a traceback or None\");\n            goto raise_error;\n        }\n    }\n    if (PyType_Check(type)) {\n#if CYTHON_COMPILING_IN_PYPY\n        if (!value) {\n            Py_INCREF(Py_None);\n            value = Py_None;\n        }\n#endif\n        PyErr_NormalizeException(&type, &value, &tb);\n    } else {\n        if (value) {\n            PyErr_SetString(PyExc_TypeError,\n                \"instance exception may not have a separate value\");\n            goto raise_error;\n        }\n        value = type;\n        type = (PyObject*) Py_TYPE(type);\n        Py_INCREF(type);\n        if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {\n            PyErr_SetString(PyExc_TypeError,\n                \"raise: exception class must be a subclass of BaseException\");\n            goto raise_error;\n        }\n    }\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrRestore(type, value, tb);\n    return;\nraise_error:\n    Py_XDECREF(value);\n    Py_XDECREF(type);\n    Py_XDECREF(tb);\n    return;\n}\n#else\nstatic void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {\n    PyObject* owned_instance = NULL;\n    if (tb == Py_None) {\n        tb = 0;\n    } else if (tb && !PyTraceBack_Check(tb)) {\n        PyErr_SetString(PyExc_TypeError,\n            \"raise: arg 3 must be a traceback or None\");\n        goto bad;\n    }\n    if (value == Py_None)\n        value = 0;\n    if (PyExceptionInstance_Check(type)) {\n        if (value) {\n            PyErr_SetString(PyExc_TypeError,\n                \"instance exception may not have a separate value\");\n            goto bad;\n        }\n        value = type;\n        type = (PyObject*) Py_TYPE(value);\n    } else if (PyExceptionClass_Check(type)) {\n        PyObject *instance_class = NULL;\n        if (value && PyExceptionInstance_Check(value)) {\n            instance_class = (PyObject*) Py_TYPE(value);\n            if (instance_class != type) {\n                int is_subclass = PyObject_IsSubclass(instance_class, type);\n                if (!is_subclass) {\n                    instance_class = NULL;\n                } else if (unlikely(is_subclass == -1)) {\n                    goto bad;\n                } else {\n                    type = instance_class;\n                }\n            }\n        }\n        if (!instance_class) {\n            PyObject *args;\n            if (!value)\n                args = PyTuple_New(0);\n            else if (PyTuple_Check(value)) {\n                Py_INCREF(value);\n                args = value;\n            } else\n                args = PyTuple_Pack(1, value);\n            if (!args)\n                goto bad;\n            owned_instance = PyObject_Call(type, args, NULL);\n            Py_DECREF(args);\n            if (!owned_instance)\n                goto bad;\n            value = owned_instance;\n            if (!PyExceptionInstance_Check(value)) {\n                PyErr_Format(PyExc_TypeError,\n                             \"calling %R should have returned an instance of \"\n                             \"BaseException, not %R\",\n                             type, Py_TYPE(value));\n                goto bad;\n            }\n        }\n    } else {\n        PyErr_SetString(PyExc_TypeError,\n            \"raise: exception class must be a subclass of BaseException\");\n        goto bad;\n    }\n    if (cause) {\n        PyObject *fixed_cause;\n        if (cause == Py_None) {\n            fixed_cause = NULL;\n        } else if (PyExceptionClass_Check(cause)) {\n            fixed_cause = PyObject_CallObject(cause, NULL);\n            if (fixed_cause == NULL)\n                goto bad;\n        } else if (PyExceptionInstance_Check(cause)) {\n            fixed_cause = cause;\n            Py_INCREF(fixed_cause);\n        } else {\n            PyErr_SetString(PyExc_TypeError,\n                            \"exception causes must derive from \"\n                            \"BaseException\");\n            goto bad;\n        }\n        PyException_SetCause(value, fixed_cause);\n    }\n    PyErr_SetObject(type, value);\n    if (tb) {\n#if CYTHON_COMPILING_IN_PYPY\n        PyObject *tmp_type, *tmp_value, *tmp_tb;\n        PyErr_Fetch(&tmp_type, &tmp_value, &tmp_tb);\n        Py_INCREF(tb);\n        PyErr_Restore(tmp_type, tmp_value, tb);\n        Py_XDECREF(tmp_tb);\n#else\n        PyThreadState *tstate = __Pyx_PyThreadState_Current;\n        PyObject* tmp_tb = tstate->curexc_traceback;\n        if (tb != tmp_tb) {\n            Py_INCREF(tb);\n            tstate->curexc_traceback = tb;\n            Py_XDECREF(tmp_tb);\n        }\n#endif\n    }\nbad:\n    Py_XDECREF(owned_instance);\n    return;\n}\n#endif\n\n/* ExtTypeTest */\nstatic CYTHON_INLINE int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type) {\n    if (unlikely(!type)) {\n        PyErr_SetString(PyExc_SystemError, \"Missing type object\");\n        return 0;\n    }\n    if (likely(__Pyx_TypeCheck(obj, type)))\n        return 1;\n    PyErr_Format(PyExc_TypeError, \"Cannot convert %.200s to %.200s\",\n                 Py_TYPE(obj)->tp_name, type->tp_name);\n    return 0;\n}\n\n/* ArgTypeTest */\nstatic int __Pyx__ArgTypeTest(PyObject *obj, PyTypeObject *type, const char *name, int exact)\n{\n    if (unlikely(!type)) {\n        PyErr_SetString(PyExc_SystemError, \"Missing type object\");\n        return 0;\n    }\n    else if (exact) {\n        #if PY_MAJOR_VERSION == 2\n        if ((type == &PyBaseString_Type) && likely(__Pyx_PyBaseString_CheckExact(obj))) return 1;\n        #endif\n    }\n    else {\n        if (likely(__Pyx_TypeCheck(obj, type))) return 1;\n    }\n    PyErr_Format(PyExc_TypeError,\n        \"Argument '%.200s' has incorrect type (expected %.200s, got %.200s)\",\n        name, type->tp_name, Py_TYPE(obj)->tp_name);\n    return 0;\n}\n\n/* PyIntBinop */\n#if !CYTHON_COMPILING_IN_PYPY\nstatic PyObject* __Pyx_PyInt_AddObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, int inplace, int zerodivision_check) {\n    (void)inplace;\n    (void)zerodivision_check;\n    #if PY_MAJOR_VERSION < 3\n    if (likely(PyInt_CheckExact(op1))) {\n        const long b = intval;\n        long x;\n        long a = PyInt_AS_LONG(op1);\n            x = (long)((unsigned long)a + b);\n            if (likely((x^a) >= 0 || (x^b) >= 0))\n                return PyInt_FromLong(x);\n            return PyLong_Type.tp_as_number->nb_add(op1, op2);\n    }\n    #endif\n    #if CYTHON_USE_PYLONG_INTERNALS\n    if (likely(PyLong_CheckExact(op1))) {\n        const long b = intval;\n        long a, x;\n#ifdef HAVE_LONG_LONG\n        const PY_LONG_LONG llb = intval;\n        PY_LONG_LONG lla, llx;\n#endif\n        const digit* digits = ((PyLongObject*)op1)->ob_digit;\n        const Py_ssize_t size = Py_SIZE(op1);\n        if (likely(__Pyx_sst_abs(size) <= 1)) {\n            a = likely(size) ? digits[0] : 0;\n            if (size == -1) a = -a;\n        } else {\n            switch (size) {\n                case -2:\n                    if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {\n                        a = -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));\n                        break;\n#ifdef HAVE_LONG_LONG\n                    } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) {\n                        lla = -(PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));\n                        goto long_long;\n#endif\n                    }\n                    CYTHON_FALLTHROUGH;\n                case 2:\n                    if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {\n                        a = (long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));\n                        break;\n#ifdef HAVE_LONG_LONG\n                    } else if (8 * sizeof(PY_LONG_LONG) - 1 > 2 * PyLong_SHIFT) {\n                        lla = (PY_LONG_LONG) (((((unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));\n                        goto long_long;\n#endif\n                    }\n                    CYTHON_FALLTHROUGH;\n                case -3:\n                    if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {\n                        a = -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));\n                        break;\n#ifdef HAVE_LONG_LONG\n                    } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) {\n                        lla = -(PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));\n                        goto long_long;\n#endif\n                    }\n                    CYTHON_FALLTHROUGH;\n                case 3:\n                    if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {\n                        a = (long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));\n                        break;\n#ifdef HAVE_LONG_LONG\n                    } else if (8 * sizeof(PY_LONG_LONG) - 1 > 3 * PyLong_SHIFT) {\n                        lla = (PY_LONG_LONG) (((((((unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));\n                        goto long_long;\n#endif\n                    }\n                    CYTHON_FALLTHROUGH;\n                case -4:\n                    if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {\n                        a = -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));\n                        break;\n#ifdef HAVE_LONG_LONG\n                    } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) {\n                        lla = -(PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));\n                        goto long_long;\n#endif\n                    }\n                    CYTHON_FALLTHROUGH;\n                case 4:\n                    if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {\n                        a = (long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0]));\n                        break;\n#ifdef HAVE_LONG_LONG\n                    } else if (8 * sizeof(PY_LONG_LONG) - 1 > 4 * PyLong_SHIFT) {\n                        lla = (PY_LONG_LONG) (((((((((unsigned PY_LONG_LONG)digits[3]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[2]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[1]) << PyLong_SHIFT) | (unsigned PY_LONG_LONG)digits[0]));\n                        goto long_long;\n#endif\n                    }\n                    CYTHON_FALLTHROUGH;\n                default: return PyLong_Type.tp_as_number->nb_add(op1, op2);\n            }\n        }\n                x = a + b;\n            return PyLong_FromLong(x);\n#ifdef HAVE_LONG_LONG\n        long_long:\n                llx = lla + llb;\n            return PyLong_FromLongLong(llx);\n#endif\n        \n        \n    }\n    #endif\n    if (PyFloat_CheckExact(op1)) {\n        const long b = intval;\n        double a = PyFloat_AS_DOUBLE(op1);\n            double result;\n            PyFPE_START_PROTECT(\"add\", return NULL)\n            result = ((double)a) + (double)b;\n            PyFPE_END_PROTECT(result)\n            return PyFloat_FromDouble(result);\n    }\n    return (inplace ? PyNumber_InPlaceAdd : PyNumber_Add)(op1, op2);\n}\n#endif\n\n/* PyIntCompare */\nstatic CYTHON_INLINE PyObject* __Pyx_PyInt_EqObjC(PyObject *op1, PyObject *op2, CYTHON_UNUSED long intval, CYTHON_UNUSED long inplace) {\n    if (op1 == op2) {\n        Py_RETURN_TRUE;\n    }\n    #if PY_MAJOR_VERSION < 3\n    if (likely(PyInt_CheckExact(op1))) {\n        const long b = intval;\n        long a = PyInt_AS_LONG(op1);\n        if (a == b) Py_RETURN_TRUE; else Py_RETURN_FALSE;\n    }\n    #endif\n    #if CYTHON_USE_PYLONG_INTERNALS\n    if (likely(PyLong_CheckExact(op1))) {\n        int unequal;\n        unsigned long uintval;\n        Py_ssize_t size = Py_SIZE(op1);\n        const digit* digits = ((PyLongObject*)op1)->ob_digit;\n        if (intval == 0) {\n            if (size == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE;\n        } else if (intval < 0) {\n            if (size >= 0)\n                Py_RETURN_FALSE;\n            intval = -intval;\n            size = -size;\n        } else {\n            if (size <= 0)\n                Py_RETURN_FALSE;\n        }\n        uintval = (unsigned long) intval;\n#if PyLong_SHIFT * 4 < SIZEOF_LONG*8\n        if (uintval >> (PyLong_SHIFT * 4)) {\n            unequal = (size != 5) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))\n                 | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[4] != ((uintval >> (4 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));\n        } else\n#endif\n#if PyLong_SHIFT * 3 < SIZEOF_LONG*8\n        if (uintval >> (PyLong_SHIFT * 3)) {\n            unequal = (size != 4) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))\n                 | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[3] != ((uintval >> (3 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));\n        } else\n#endif\n#if PyLong_SHIFT * 2 < SIZEOF_LONG*8\n        if (uintval >> (PyLong_SHIFT * 2)) {\n            unequal = (size != 3) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))\n                 | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK)) | (digits[2] != ((uintval >> (2 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));\n        } else\n#endif\n#if PyLong_SHIFT * 1 < SIZEOF_LONG*8\n        if (uintval >> (PyLong_SHIFT * 1)) {\n            unequal = (size != 2) || (digits[0] != (uintval & (unsigned long) PyLong_MASK))\n                 | (digits[1] != ((uintval >> (1 * PyLong_SHIFT)) & (unsigned long) PyLong_MASK));\n        } else\n#endif\n            unequal = (size != 1) || (((unsigned long) digits[0]) != (uintval & (unsigned long) PyLong_MASK));\n        if (unequal == 0) Py_RETURN_TRUE; else Py_RETURN_FALSE;\n    }\n    #endif\n    if (PyFloat_CheckExact(op1)) {\n        const long b = intval;\n        double a = PyFloat_AS_DOUBLE(op1);\n        if ((double)a == (double)b) Py_RETURN_TRUE; else Py_RETURN_FALSE;\n    }\n    return (\n        PyObject_RichCompare(op1, op2, Py_EQ));\n}\n\n/* PyDictVersioning */\n#if CYTHON_USE_DICT_VERSIONS && CYTHON_USE_TYPE_SLOTS\nstatic CYTHON_INLINE PY_UINT64_T __Pyx_get_tp_dict_version(PyObject *obj) {\n    PyObject *dict = Py_TYPE(obj)->tp_dict;\n    return likely(dict) ? __PYX_GET_DICT_VERSION(dict) : 0;\n}\nstatic CYTHON_INLINE PY_UINT64_T __Pyx_get_object_dict_version(PyObject *obj) {\n    PyObject **dictptr = NULL;\n    Py_ssize_t offset = Py_TYPE(obj)->tp_dictoffset;\n    if (offset) {\n#if CYTHON_COMPILING_IN_CPYTHON\n        dictptr = (likely(offset > 0)) ? (PyObject **) ((char *)obj + offset) : _PyObject_GetDictPtr(obj);\n#else\n        dictptr = _PyObject_GetDictPtr(obj);\n#endif\n    }\n    return (dictptr && *dictptr) ? __PYX_GET_DICT_VERSION(*dictptr) : 0;\n}\nstatic CYTHON_INLINE int __Pyx_object_dict_version_matches(PyObject* obj, PY_UINT64_T tp_dict_version, PY_UINT64_T obj_dict_version) {\n    PyObject *dict = Py_TYPE(obj)->tp_dict;\n    if (unlikely(!dict) || unlikely(tp_dict_version != __PYX_GET_DICT_VERSION(dict)))\n        return 0;\n    return obj_dict_version == __Pyx_get_object_dict_version(obj);\n}\n#endif\n\n/* GetModuleGlobalName */\n#if CYTHON_USE_DICT_VERSIONS\nstatic PyObject *__Pyx__GetModuleGlobalName(PyObject *name, PY_UINT64_T *dict_version, PyObject **dict_cached_value)\n#else\nstatic CYTHON_INLINE PyObject *__Pyx__GetModuleGlobalName(PyObject *name)\n#endif\n{\n    PyObject *result;\n#if !CYTHON_AVOID_BORROWED_REFS\n#if CYTHON_COMPILING_IN_CPYTHON && PY_VERSION_HEX >= 0x030500A1\n    result = _PyDict_GetItem_KnownHash(__pyx_d, name, ((PyASCIIObject *) name)->hash);\n    __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)\n    if (likely(result)) {\n        return __Pyx_NewRef(result);\n    } else if (unlikely(PyErr_Occurred())) {\n        return NULL;\n    }\n#else\n    result = PyDict_GetItem(__pyx_d, name);\n    __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)\n    if (likely(result)) {\n        return __Pyx_NewRef(result);\n    }\n#endif\n#else\n    result = PyObject_GetItem(__pyx_d, name);\n    __PYX_UPDATE_DICT_CACHE(__pyx_d, result, *dict_cached_value, *dict_version)\n    if (likely(result)) {\n        return __Pyx_NewRef(result);\n    }\n    PyErr_Clear();\n#endif\n    return __Pyx_GetBuiltinName(name);\n}\n\n/* DictGetItem */\n#if PY_MAJOR_VERSION >= 3 && !CYTHON_COMPILING_IN_PYPY\nstatic PyObject *__Pyx_PyDict_GetItem(PyObject *d, PyObject* key) {\n    PyObject *value;\n    value = PyDict_GetItemWithError(d, key);\n    if (unlikely(!value)) {\n        if (!PyErr_Occurred()) {\n            if (unlikely(PyTuple_Check(key))) {\n                PyObject* args = PyTuple_Pack(1, key);\n                if (likely(args)) {\n                    PyErr_SetObject(PyExc_KeyError, args);\n                    Py_DECREF(args);\n                }\n            } else {\n                PyErr_SetObject(PyExc_KeyError, key);\n            }\n        }\n        return NULL;\n    }\n    Py_INCREF(value);\n    return value;\n}\n#endif\n\n/* PyObjectCall2Args */\nstatic CYTHON_UNUSED PyObject* __Pyx_PyObject_Call2Args(PyObject* function, PyObject* arg1, PyObject* arg2) {\n    PyObject *args, *result = NULL;\n    #if CYTHON_FAST_PYCALL\n    if (PyFunction_Check(function)) {\n        PyObject *args[2] = {arg1, arg2};\n        return __Pyx_PyFunction_FastCall(function, args, 2);\n    }\n    #endif\n    #if CYTHON_FAST_PYCCALL\n    if (__Pyx_PyFastCFunction_Check(function)) {\n        PyObject *args[2] = {arg1, arg2};\n        return __Pyx_PyCFunction_FastCall(function, args, 2);\n    }\n    #endif\n    args = PyTuple_New(2);\n    if (unlikely(!args)) goto done;\n    Py_INCREF(arg1);\n    PyTuple_SET_ITEM(args, 0, arg1);\n    Py_INCREF(arg2);\n    PyTuple_SET_ITEM(args, 1, arg2);\n    Py_INCREF(function);\n    result = __Pyx_PyObject_Call(function, args, NULL);\n    Py_DECREF(args);\n    Py_DECREF(function);\ndone:\n    return result;\n}\n\n/* GetItemInt */\nstatic PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {\n    PyObject *r;\n    if (!j) return NULL;\n    r = PyObject_GetItem(o, j);\n    Py_DECREF(j);\n    return r;\n}\nstatic CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i,\n                                                              CYTHON_NCP_UNUSED int wraparound,\n                                                              CYTHON_NCP_UNUSED int boundscheck) {\n#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n    Py_ssize_t wrapped_i = i;\n    if (wraparound & unlikely(i < 0)) {\n        wrapped_i += PyList_GET_SIZE(o);\n    }\n    if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyList_GET_SIZE(o)))) {\n        PyObject *r = PyList_GET_ITEM(o, wrapped_i);\n        Py_INCREF(r);\n        return r;\n    }\n    return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));\n#else\n    return PySequence_GetItem(o, i);\n#endif\n}\nstatic CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i,\n                                                              CYTHON_NCP_UNUSED int wraparound,\n                                                              CYTHON_NCP_UNUSED int boundscheck) {\n#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n    Py_ssize_t wrapped_i = i;\n    if (wraparound & unlikely(i < 0)) {\n        wrapped_i += PyTuple_GET_SIZE(o);\n    }\n    if ((!boundscheck) || likely(__Pyx_is_valid_index(wrapped_i, PyTuple_GET_SIZE(o)))) {\n        PyObject *r = PyTuple_GET_ITEM(o, wrapped_i);\n        Py_INCREF(r);\n        return r;\n    }\n    return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));\n#else\n    return PySequence_GetItem(o, i);\n#endif\n}\nstatic CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i, int is_list,\n                                                     CYTHON_NCP_UNUSED int wraparound,\n                                                     CYTHON_NCP_UNUSED int boundscheck) {\n#if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS && CYTHON_USE_TYPE_SLOTS\n    if (is_list || PyList_CheckExact(o)) {\n        Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyList_GET_SIZE(o);\n        if ((!boundscheck) || (likely(__Pyx_is_valid_index(n, PyList_GET_SIZE(o))))) {\n            PyObject *r = PyList_GET_ITEM(o, n);\n            Py_INCREF(r);\n            return r;\n        }\n    }\n    else if (PyTuple_CheckExact(o)) {\n        Py_ssize_t n = ((!wraparound) | likely(i >= 0)) ? i : i + PyTuple_GET_SIZE(o);\n        if ((!boundscheck) || likely(__Pyx_is_valid_index(n, PyTuple_GET_SIZE(o)))) {\n            PyObject *r = PyTuple_GET_ITEM(o, n);\n            Py_INCREF(r);\n            return r;\n        }\n    } else {\n        PySequenceMethods *m = Py_TYPE(o)->tp_as_sequence;\n        if (likely(m && m->sq_item)) {\n            if (wraparound && unlikely(i < 0) && likely(m->sq_length)) {\n                Py_ssize_t l = m->sq_length(o);\n                if (likely(l >= 0)) {\n                    i += l;\n                } else {\n                    if (!PyErr_ExceptionMatches(PyExc_OverflowError))\n                        return NULL;\n                    PyErr_Clear();\n                }\n            }\n            return m->sq_item(o, i);\n        }\n    }\n#else\n    if (is_list || PySequence_Check(o)) {\n        return PySequence_GetItem(o, i);\n    }\n#endif\n    return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));\n}\n\n/* IsLittleEndian */\nstatic CYTHON_INLINE int __Pyx_Is_Little_Endian(void)\n{\n  union {\n    uint32_t u32;\n    uint8_t u8[4];\n  } S;\n  S.u32 = 0x01020304;\n  return S.u8[0] == 4;\n}\n\n/* BufferFormatCheck */\nstatic void __Pyx_BufFmt_Init(__Pyx_BufFmt_Context* ctx,\n                              __Pyx_BufFmt_StackElem* stack,\n                              __Pyx_TypeInfo* type) {\n  stack[0].field = &ctx->root;\n  stack[0].parent_offset = 0;\n  ctx->root.type = type;\n  ctx->root.name = \"buffer dtype\";\n  ctx->root.offset = 0;\n  ctx->head = stack;\n  ctx->head->field = &ctx->root;\n  ctx->fmt_offset = 0;\n  ctx->head->parent_offset = 0;\n  ctx->new_packmode = '@';\n  ctx->enc_packmode = '@';\n  ctx->new_count = 1;\n  ctx->enc_count = 0;\n  ctx->enc_type = 0;\n  ctx->is_complex = 0;\n  ctx->is_valid_array = 0;\n  ctx->struct_alignment = 0;\n  while (type->typegroup == 'S') {\n    ++ctx->head;\n    ctx->head->field = type->fields;\n    ctx->head->parent_offset = 0;\n    type = type->fields->type;\n  }\n}\nstatic int __Pyx_BufFmt_ParseNumber(const char** ts) {\n    int count;\n    const char* t = *ts;\n    if (*t < '0' || *t > '9') {\n      return -1;\n    } else {\n        count = *t++ - '0';\n        while (*t >= '0' && *t <= '9') {\n            count *= 10;\n            count += *t++ - '0';\n        }\n    }\n    *ts = t;\n    return count;\n}\nstatic int __Pyx_BufFmt_ExpectNumber(const char **ts) {\n    int number = __Pyx_BufFmt_ParseNumber(ts);\n    if (number == -1)\n        PyErr_Format(PyExc_ValueError,\\\n                     \"Does not understand character buffer dtype format string ('%c')\", **ts);\n    return number;\n}\nstatic void __Pyx_BufFmt_RaiseUnexpectedChar(char ch) {\n  PyErr_Format(PyExc_ValueError,\n               \"Unexpected format string character: '%c'\", ch);\n}\nstatic const char* __Pyx_BufFmt_DescribeTypeChar(char ch, int is_complex) {\n  switch (ch) {\n    case 'c': return \"'char'\";\n    case 'b': return \"'signed char'\";\n    case 'B': return \"'unsigned char'\";\n    case 'h': return \"'short'\";\n    case 'H': return \"'unsigned short'\";\n    case 'i': return \"'int'\";\n    case 'I': return \"'unsigned int'\";\n    case 'l': return \"'long'\";\n    case 'L': return \"'unsigned long'\";\n    case 'q': return \"'long long'\";\n    case 'Q': return \"'unsigned long long'\";\n    case 'f': return (is_complex ? \"'complex float'\" : \"'float'\");\n    case 'd': return (is_complex ? \"'complex double'\" : \"'double'\");\n    case 'g': return (is_complex ? \"'complex long double'\" : \"'long double'\");\n    case 'T': return \"a struct\";\n    case 'O': return \"Python object\";\n    case 'P': return \"a pointer\";\n    case 's': case 'p': return \"a string\";\n    case 0: return \"end\";\n    default: return \"unparseable format string\";\n  }\n}\nstatic size_t __Pyx_BufFmt_TypeCharToStandardSize(char ch, int is_complex) {\n  switch (ch) {\n    case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1;\n    case 'h': case 'H': return 2;\n    case 'i': case 'I': case 'l': case 'L': return 4;\n    case 'q': case 'Q': return 8;\n    case 'f': return (is_complex ? 8 : 4);\n    case 'd': return (is_complex ? 16 : 8);\n    case 'g': {\n      PyErr_SetString(PyExc_ValueError, \"Python does not define a standard format string size for long double ('g')..\");\n      return 0;\n    }\n    case 'O': case 'P': return sizeof(void*);\n    default:\n      __Pyx_BufFmt_RaiseUnexpectedChar(ch);\n      return 0;\n    }\n}\nstatic size_t __Pyx_BufFmt_TypeCharToNativeSize(char ch, int is_complex) {\n  switch (ch) {\n    case 'c': case 'b': case 'B': case 's': case 'p': return 1;\n    case 'h': case 'H': return sizeof(short);\n    case 'i': case 'I': return sizeof(int);\n    case 'l': case 'L': return sizeof(long);\n    #ifdef HAVE_LONG_LONG\n    case 'q': case 'Q': return sizeof(PY_LONG_LONG);\n    #endif\n    case 'f': return sizeof(float) * (is_complex ? 2 : 1);\n    case 'd': return sizeof(double) * (is_complex ? 2 : 1);\n    case 'g': return sizeof(long double) * (is_complex ? 2 : 1);\n    case 'O': case 'P': return sizeof(void*);\n    default: {\n      __Pyx_BufFmt_RaiseUnexpectedChar(ch);\n      return 0;\n    }\n  }\n}\ntypedef struct { char c; short x; } __Pyx_st_short;\ntypedef struct { char c; int x; } __Pyx_st_int;\ntypedef struct { char c; long x; } __Pyx_st_long;\ntypedef struct { char c; float x; } __Pyx_st_float;\ntypedef struct { char c; double x; } __Pyx_st_double;\ntypedef struct { char c; long double x; } __Pyx_st_longdouble;\ntypedef struct { char c; void *x; } __Pyx_st_void_p;\n#ifdef HAVE_LONG_LONG\ntypedef struct { char c; PY_LONG_LONG x; } __Pyx_st_longlong;\n#endif\nstatic size_t __Pyx_BufFmt_TypeCharToAlignment(char ch, CYTHON_UNUSED int is_complex) {\n  switch (ch) {\n    case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1;\n    case 'h': case 'H': return sizeof(__Pyx_st_short) - sizeof(short);\n    case 'i': case 'I': return sizeof(__Pyx_st_int) - sizeof(int);\n    case 'l': case 'L': return sizeof(__Pyx_st_long) - sizeof(long);\n#ifdef HAVE_LONG_LONG\n    case 'q': case 'Q': return sizeof(__Pyx_st_longlong) - sizeof(PY_LONG_LONG);\n#endif\n    case 'f': return sizeof(__Pyx_st_float) - sizeof(float);\n    case 'd': return sizeof(__Pyx_st_double) - sizeof(double);\n    case 'g': return sizeof(__Pyx_st_longdouble) - sizeof(long double);\n    case 'P': case 'O': return sizeof(__Pyx_st_void_p) - sizeof(void*);\n    default:\n      __Pyx_BufFmt_RaiseUnexpectedChar(ch);\n      return 0;\n    }\n}\n/* These are for computing the padding at the end of the struct to align\n   on the first member of the struct. This will probably the same as above,\n   but we don't have any guarantees.\n */\ntypedef struct { short x; char c; } __Pyx_pad_short;\ntypedef struct { int x; char c; } __Pyx_pad_int;\ntypedef struct { long x; char c; } __Pyx_pad_long;\ntypedef struct { float x; char c; } __Pyx_pad_float;\ntypedef struct { double x; char c; } __Pyx_pad_double;\ntypedef struct { long double x; char c; } __Pyx_pad_longdouble;\ntypedef struct { void *x; char c; } __Pyx_pad_void_p;\n#ifdef HAVE_LONG_LONG\ntypedef struct { PY_LONG_LONG x; char c; } __Pyx_pad_longlong;\n#endif\nstatic size_t __Pyx_BufFmt_TypeCharToPadding(char ch, CYTHON_UNUSED int is_complex) {\n  switch (ch) {\n    case '?': case 'c': case 'b': case 'B': case 's': case 'p': return 1;\n    case 'h': case 'H': return sizeof(__Pyx_pad_short) - sizeof(short);\n    case 'i': case 'I': return sizeof(__Pyx_pad_int) - sizeof(int);\n    case 'l': case 'L': return sizeof(__Pyx_pad_long) - sizeof(long);\n#ifdef HAVE_LONG_LONG\n    case 'q': case 'Q': return sizeof(__Pyx_pad_longlong) - sizeof(PY_LONG_LONG);\n#endif\n    case 'f': return sizeof(__Pyx_pad_float) - sizeof(float);\n    case 'd': return sizeof(__Pyx_pad_double) - sizeof(double);\n    case 'g': return sizeof(__Pyx_pad_longdouble) - sizeof(long double);\n    case 'P': case 'O': return sizeof(__Pyx_pad_void_p) - sizeof(void*);\n    default:\n      __Pyx_BufFmt_RaiseUnexpectedChar(ch);\n      return 0;\n    }\n}\nstatic char __Pyx_BufFmt_TypeCharToGroup(char ch, int is_complex) {\n  switch (ch) {\n    case 'c':\n        return 'H';\n    case 'b': case 'h': case 'i':\n    case 'l': case 'q': case 's': case 'p':\n        return 'I';\n    case 'B': case 'H': case 'I': case 'L': case 'Q':\n        return 'U';\n    case 'f': case 'd': case 'g':\n        return (is_complex ? 'C' : 'R');\n    case 'O':\n        return 'O';\n    case 'P':\n        return 'P';\n    default: {\n      __Pyx_BufFmt_RaiseUnexpectedChar(ch);\n      return 0;\n    }\n  }\n}\nstatic void __Pyx_BufFmt_RaiseExpected(__Pyx_BufFmt_Context* ctx) {\n  if (ctx->head == NULL || ctx->head->field == &ctx->root) {\n    const char* expected;\n    const char* quote;\n    if (ctx->head == NULL) {\n      expected = \"end\";\n      quote = \"\";\n    } else {\n      expected = ctx->head->field->type->name;\n      quote = \"'\";\n    }\n    PyErr_Format(PyExc_ValueError,\n                 \"Buffer dtype mismatch, expected %s%s%s but got %s\",\n                 quote, expected, quote,\n                 __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex));\n  } else {\n    __Pyx_StructField* field = ctx->head->field;\n    __Pyx_StructField* parent = (ctx->head - 1)->field;\n    PyErr_Format(PyExc_ValueError,\n                 \"Buffer dtype mismatch, expected '%s' but got %s in '%s.%s'\",\n                 field->type->name, __Pyx_BufFmt_DescribeTypeChar(ctx->enc_type, ctx->is_complex),\n                 parent->type->name, field->name);\n  }\n}\nstatic int __Pyx_BufFmt_ProcessTypeChunk(__Pyx_BufFmt_Context* ctx) {\n  char group;\n  size_t size, offset, arraysize = 1;\n  if (ctx->enc_type == 0) return 0;\n  if (ctx->head->field->type->arraysize[0]) {\n    int i, ndim = 0;\n    if (ctx->enc_type == 's' || ctx->enc_type == 'p') {\n        ctx->is_valid_array = ctx->head->field->type->ndim == 1;\n        ndim = 1;\n        if (ctx->enc_count != ctx->head->field->type->arraysize[0]) {\n            PyErr_Format(PyExc_ValueError,\n                         \"Expected a dimension of size %zu, got %zu\",\n                         ctx->head->field->type->arraysize[0], ctx->enc_count);\n            return -1;\n        }\n    }\n    if (!ctx->is_valid_array) {\n      PyErr_Format(PyExc_ValueError, \"Expected %d dimensions, got %d\",\n                   ctx->head->field->type->ndim, ndim);\n      return -1;\n    }\n    for (i = 0; i < ctx->head->field->type->ndim; i++) {\n      arraysize *= ctx->head->field->type->arraysize[i];\n    }\n    ctx->is_valid_array = 0;\n    ctx->enc_count = 1;\n  }\n  group = __Pyx_BufFmt_TypeCharToGroup(ctx->enc_type, ctx->is_complex);\n  do {\n    __Pyx_StructField* field = ctx->head->field;\n    __Pyx_TypeInfo* type = field->type;\n    if (ctx->enc_packmode == '@' || ctx->enc_packmode == '^') {\n      size = __Pyx_BufFmt_TypeCharToNativeSize(ctx->enc_type, ctx->is_complex);\n    } else {\n      size = __Pyx_BufFmt_TypeCharToStandardSize(ctx->enc_type, ctx->is_complex);\n    }\n    if (ctx->enc_packmode == '@') {\n      size_t align_at = __Pyx_BufFmt_TypeCharToAlignment(ctx->enc_type, ctx->is_complex);\n      size_t align_mod_offset;\n      if (align_at == 0) return -1;\n      align_mod_offset = ctx->fmt_offset % align_at;\n      if (align_mod_offset > 0) ctx->fmt_offset += align_at - align_mod_offset;\n      if (ctx->struct_alignment == 0)\n          ctx->struct_alignment = __Pyx_BufFmt_TypeCharToPadding(ctx->enc_type,\n                                                                 ctx->is_complex);\n    }\n    if (type->size != size || type->typegroup != group) {\n      if (type->typegroup == 'C' && type->fields != NULL) {\n        size_t parent_offset = ctx->head->parent_offset + field->offset;\n        ++ctx->head;\n        ctx->head->field = type->fields;\n        ctx->head->parent_offset = parent_offset;\n        continue;\n      }\n      if ((type->typegroup == 'H' || group == 'H') && type->size == size) {\n      } else {\n          __Pyx_BufFmt_RaiseExpected(ctx);\n          return -1;\n      }\n    }\n    offset = ctx->head->parent_offset + field->offset;\n    if (ctx->fmt_offset != offset) {\n      PyErr_Format(PyExc_ValueError,\n                   \"Buffer dtype mismatch; next field is at offset %\" CYTHON_FORMAT_SSIZE_T \"d but %\" CYTHON_FORMAT_SSIZE_T \"d expected\",\n                   (Py_ssize_t)ctx->fmt_offset, (Py_ssize_t)offset);\n      return -1;\n    }\n    ctx->fmt_offset += size;\n    if (arraysize)\n      ctx->fmt_offset += (arraysize - 1) * size;\n    --ctx->enc_count;\n    while (1) {\n      if (field == &ctx->root) {\n        ctx->head = NULL;\n        if (ctx->enc_count != 0) {\n          __Pyx_BufFmt_RaiseExpected(ctx);\n          return -1;\n        }\n        break;\n      }\n      ctx->head->field = ++field;\n      if (field->type == NULL) {\n        --ctx->head;\n        field = ctx->head->field;\n        continue;\n      } else if (field->type->typegroup == 'S') {\n        size_t parent_offset = ctx->head->parent_offset + field->offset;\n        if (field->type->fields->type == NULL) continue;\n        field = field->type->fields;\n        ++ctx->head;\n        ctx->head->field = field;\n        ctx->head->parent_offset = parent_offset;\n        break;\n      } else {\n        break;\n      }\n    }\n  } while (ctx->enc_count);\n  ctx->enc_type = 0;\n  ctx->is_complex = 0;\n  return 0;\n}\nstatic PyObject *\n__pyx_buffmt_parse_array(__Pyx_BufFmt_Context* ctx, const char** tsp)\n{\n    const char *ts = *tsp;\n    int i = 0, number;\n    int ndim = ctx->head->field->type->ndim;\n;\n    ++ts;\n    if (ctx->new_count != 1) {\n        PyErr_SetString(PyExc_ValueError,\n                        \"Cannot handle repeated arrays in format string\");\n        return NULL;\n    }\n    if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;\n    while (*ts && *ts != ')') {\n        switch (*ts) {\n            case ' ': case '\\f': case '\\r': case '\\n': case '\\t': case '\\v':  continue;\n            default:  break;\n        }\n        number = __Pyx_BufFmt_ExpectNumber(&ts);\n        if (number == -1) return NULL;\n        if (i < ndim && (size_t) number != ctx->head->field->type->arraysize[i])\n            return PyErr_Format(PyExc_ValueError,\n                        \"Expected a dimension of size %zu, got %d\",\n                        ctx->head->field->type->arraysize[i], number);\n        if (*ts != ',' && *ts != ')')\n            return PyErr_Format(PyExc_ValueError,\n                                \"Expected a comma in format string, got '%c'\", *ts);\n        if (*ts == ',') ts++;\n        i++;\n    }\n    if (i != ndim)\n        return PyErr_Format(PyExc_ValueError, \"Expected %d dimension(s), got %d\",\n                            ctx->head->field->type->ndim, i);\n    if (!*ts) {\n        PyErr_SetString(PyExc_ValueError,\n                        \"Unexpected end of format string, expected ')'\");\n        return NULL;\n    }\n    ctx->is_valid_array = 1;\n    ctx->new_count = 1;\n    *tsp = ++ts;\n    return Py_None;\n}\nstatic const char* __Pyx_BufFmt_CheckString(__Pyx_BufFmt_Context* ctx, const char* ts) {\n  int got_Z = 0;\n  while (1) {\n    switch(*ts) {\n      case 0:\n        if (ctx->enc_type != 0 && ctx->head == NULL) {\n          __Pyx_BufFmt_RaiseExpected(ctx);\n          return NULL;\n        }\n        if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;\n        if (ctx->head != NULL) {\n          __Pyx_BufFmt_RaiseExpected(ctx);\n          return NULL;\n        }\n        return ts;\n      case ' ':\n      case '\\r':\n      case '\\n':\n        ++ts;\n        break;\n      case '<':\n        if (!__Pyx_Is_Little_Endian()) {\n          PyErr_SetString(PyExc_ValueError, \"Little-endian buffer not supported on big-endian compiler\");\n          return NULL;\n        }\n        ctx->new_packmode = '=';\n        ++ts;\n        break;\n      case '>':\n      case '!':\n        if (__Pyx_Is_Little_Endian()) {\n          PyErr_SetString(PyExc_ValueError, \"Big-endian buffer not supported on little-endian compiler\");\n          return NULL;\n        }\n        ctx->new_packmode = '=';\n        ++ts;\n        break;\n      case '=':\n      case '@':\n      case '^':\n        ctx->new_packmode = *ts++;\n        break;\n      case 'T':\n        {\n          const char* ts_after_sub;\n          size_t i, struct_count = ctx->new_count;\n          size_t struct_alignment = ctx->struct_alignment;\n          ctx->new_count = 1;\n          ++ts;\n          if (*ts != '{') {\n            PyErr_SetString(PyExc_ValueError, \"Buffer acquisition: Expected '{' after 'T'\");\n            return NULL;\n          }\n          if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;\n          ctx->enc_type = 0;\n          ctx->enc_count = 0;\n          ctx->struct_alignment = 0;\n          ++ts;\n          ts_after_sub = ts;\n          for (i = 0; i != struct_count; ++i) {\n            ts_after_sub = __Pyx_BufFmt_CheckString(ctx, ts);\n            if (!ts_after_sub) return NULL;\n          }\n          ts = ts_after_sub;\n          if (struct_alignment) ctx->struct_alignment = struct_alignment;\n        }\n        break;\n      case '}':\n        {\n          size_t alignment = ctx->struct_alignment;\n          ++ts;\n          if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;\n          ctx->enc_type = 0;\n          if (alignment && ctx->fmt_offset % alignment) {\n            ctx->fmt_offset += alignment - (ctx->fmt_offset % alignment);\n          }\n        }\n        return ts;\n      case 'x':\n        if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;\n        ctx->fmt_offset += ctx->new_count;\n        ctx->new_count = 1;\n        ctx->enc_count = 0;\n        ctx->enc_type = 0;\n        ctx->enc_packmode = ctx->new_packmode;\n        ++ts;\n        break;\n      case 'Z':\n        got_Z = 1;\n        ++ts;\n        if (*ts != 'f' && *ts != 'd' && *ts != 'g') {\n          __Pyx_BufFmt_RaiseUnexpectedChar('Z');\n          return NULL;\n        }\n        CYTHON_FALLTHROUGH;\n      case 'c': case 'b': case 'B': case 'h': case 'H': case 'i': case 'I':\n      case 'l': case 'L': case 'q': case 'Q':\n      case 'f': case 'd': case 'g':\n      case 'O': case 'p':\n        if (ctx->enc_type == *ts && got_Z == ctx->is_complex &&\n            ctx->enc_packmode == ctx->new_packmode) {\n          ctx->enc_count += ctx->new_count;\n          ctx->new_count = 1;\n          got_Z = 0;\n          ++ts;\n          break;\n        }\n        CYTHON_FALLTHROUGH;\n      case 's':\n        if (__Pyx_BufFmt_ProcessTypeChunk(ctx) == -1) return NULL;\n        ctx->enc_count = ctx->new_count;\n        ctx->enc_packmode = ctx->new_packmode;\n        ctx->enc_type = *ts;\n        ctx->is_complex = got_Z;\n        ++ts;\n        ctx->new_count = 1;\n        got_Z = 0;\n        break;\n      case ':':\n        ++ts;\n        while(*ts != ':') ++ts;\n        ++ts;\n        break;\n      case '(':\n        if (!__pyx_buffmt_parse_array(ctx, &ts)) return NULL;\n        break;\n      default:\n        {\n          int number = __Pyx_BufFmt_ExpectNumber(&ts);\n          if (number == -1) return NULL;\n          ctx->new_count = (size_t)number;\n        }\n    }\n  }\n}\n\n/* BufferGetAndValidate */\n  static CYTHON_INLINE void __Pyx_SafeReleaseBuffer(Py_buffer* info) {\n  if (unlikely(info->buf == NULL)) return;\n  if (info->suboffsets == __Pyx_minusones) info->suboffsets = NULL;\n  __Pyx_ReleaseBuffer(info);\n}\nstatic void __Pyx_ZeroBuffer(Py_buffer* buf) {\n  buf->buf = NULL;\n  buf->obj = NULL;\n  buf->strides = __Pyx_zeros;\n  buf->shape = __Pyx_zeros;\n  buf->suboffsets = __Pyx_minusones;\n}\nstatic int __Pyx__GetBufferAndValidate(\n        Py_buffer* buf, PyObject* obj,  __Pyx_TypeInfo* dtype, int flags,\n        int nd, int cast, __Pyx_BufFmt_StackElem* stack)\n{\n  buf->buf = NULL;\n  if (unlikely(__Pyx_GetBuffer(obj, buf, flags) == -1)) {\n    __Pyx_ZeroBuffer(buf);\n    return -1;\n  }\n  if (unlikely(buf->ndim != nd)) {\n    PyErr_Format(PyExc_ValueError,\n                 \"Buffer has wrong number of dimensions (expected %d, got %d)\",\n                 nd, buf->ndim);\n    goto fail;\n  }\n  if (!cast) {\n    __Pyx_BufFmt_Context ctx;\n    __Pyx_BufFmt_Init(&ctx, stack, dtype);\n    if (!__Pyx_BufFmt_CheckString(&ctx, buf->format)) goto fail;\n  }\n  if (unlikely((size_t)buf->itemsize != dtype->size)) {\n    PyErr_Format(PyExc_ValueError,\n      \"Item size of buffer (%\" CYTHON_FORMAT_SSIZE_T \"d byte%s) does not match size of '%s' (%\" CYTHON_FORMAT_SSIZE_T \"d byte%s)\",\n      buf->itemsize, (buf->itemsize > 1) ? \"s\" : \"\",\n      dtype->name, (Py_ssize_t)dtype->size, (dtype->size > 1) ? \"s\" : \"\");\n    goto fail;\n  }\n  if (buf->suboffsets == NULL) buf->suboffsets = __Pyx_minusones;\n  return 0;\nfail:;\n  __Pyx_SafeReleaseBuffer(buf);\n  return -1;\n}\n\n/* FetchCommonType */\n  static PyTypeObject* __Pyx_FetchCommonType(PyTypeObject* type) {\n    PyObject* fake_module;\n    PyTypeObject* cached_type = NULL;\n    fake_module = PyImport_AddModule((char*) \"_cython_\" CYTHON_ABI);\n    if (!fake_module) return NULL;\n    Py_INCREF(fake_module);\n    cached_type = (PyTypeObject*) PyObject_GetAttrString(fake_module, type->tp_name);\n    if (cached_type) {\n        if (!PyType_Check((PyObject*)cached_type)) {\n            PyErr_Format(PyExc_TypeError,\n                \"Shared Cython type %.200s is not a type object\",\n                type->tp_name);\n            goto bad;\n        }\n        if (cached_type->tp_basicsize != type->tp_basicsize) {\n            PyErr_Format(PyExc_TypeError,\n                \"Shared Cython type %.200s has the wrong size, try recompiling\",\n                type->tp_name);\n            goto bad;\n        }\n    } else {\n        if (!PyErr_ExceptionMatches(PyExc_AttributeError)) goto bad;\n        PyErr_Clear();\n        if (PyType_Ready(type) < 0) goto bad;\n        if (PyObject_SetAttrString(fake_module, type->tp_name, (PyObject*) type) < 0)\n            goto bad;\n        Py_INCREF(type);\n        cached_type = type;\n    }\ndone:\n    Py_DECREF(fake_module);\n    return cached_type;\nbad:\n    Py_XDECREF(cached_type);\n    cached_type = NULL;\n    goto done;\n}\n\n/* CythonFunction */\n  #include <structmember.h>\nstatic PyObject *\n__Pyx_CyFunction_get_doc(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *closure)\n{\n    if (unlikely(op->func_doc == NULL)) {\n        if (op->func.m_ml->ml_doc) {\n#if PY_MAJOR_VERSION >= 3\n            op->func_doc = PyUnicode_FromString(op->func.m_ml->ml_doc);\n#else\n            op->func_doc = PyString_FromString(op->func.m_ml->ml_doc);\n#endif\n            if (unlikely(op->func_doc == NULL))\n                return NULL;\n        } else {\n            Py_INCREF(Py_None);\n            return Py_None;\n        }\n    }\n    Py_INCREF(op->func_doc);\n    return op->func_doc;\n}\nstatic int\n__Pyx_CyFunction_set_doc(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context)\n{\n    PyObject *tmp = op->func_doc;\n    if (value == NULL) {\n        value = Py_None;\n    }\n    Py_INCREF(value);\n    op->func_doc = value;\n    Py_XDECREF(tmp);\n    return 0;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_name(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context)\n{\n    if (unlikely(op->func_name == NULL)) {\n#if PY_MAJOR_VERSION >= 3\n        op->func_name = PyUnicode_InternFromString(op->func.m_ml->ml_name);\n#else\n        op->func_name = PyString_InternFromString(op->func.m_ml->ml_name);\n#endif\n        if (unlikely(op->func_name == NULL))\n            return NULL;\n    }\n    Py_INCREF(op->func_name);\n    return op->func_name;\n}\nstatic int\n__Pyx_CyFunction_set_name(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context)\n{\n    PyObject *tmp;\n#if PY_MAJOR_VERSION >= 3\n    if (unlikely(value == NULL || !PyUnicode_Check(value)))\n#else\n    if (unlikely(value == NULL || !PyString_Check(value)))\n#endif\n    {\n        PyErr_SetString(PyExc_TypeError,\n                        \"__name__ must be set to a string object\");\n        return -1;\n    }\n    tmp = op->func_name;\n    Py_INCREF(value);\n    op->func_name = value;\n    Py_XDECREF(tmp);\n    return 0;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_qualname(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context)\n{\n    Py_INCREF(op->func_qualname);\n    return op->func_qualname;\n}\nstatic int\n__Pyx_CyFunction_set_qualname(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context)\n{\n    PyObject *tmp;\n#if PY_MAJOR_VERSION >= 3\n    if (unlikely(value == NULL || !PyUnicode_Check(value)))\n#else\n    if (unlikely(value == NULL || !PyString_Check(value)))\n#endif\n    {\n        PyErr_SetString(PyExc_TypeError,\n                        \"__qualname__ must be set to a string object\");\n        return -1;\n    }\n    tmp = op->func_qualname;\n    Py_INCREF(value);\n    op->func_qualname = value;\n    Py_XDECREF(tmp);\n    return 0;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_self(__pyx_CyFunctionObject *m, CYTHON_UNUSED void *closure)\n{\n    PyObject *self;\n    self = m->func_closure;\n    if (self == NULL)\n        self = Py_None;\n    Py_INCREF(self);\n    return self;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_dict(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context)\n{\n    if (unlikely(op->func_dict == NULL)) {\n        op->func_dict = PyDict_New();\n        if (unlikely(op->func_dict == NULL))\n            return NULL;\n    }\n    Py_INCREF(op->func_dict);\n    return op->func_dict;\n}\nstatic int\n__Pyx_CyFunction_set_dict(__pyx_CyFunctionObject *op, PyObject *value, CYTHON_UNUSED void *context)\n{\n    PyObject *tmp;\n    if (unlikely(value == NULL)) {\n        PyErr_SetString(PyExc_TypeError,\n               \"function's dictionary may not be deleted\");\n        return -1;\n    }\n    if (unlikely(!PyDict_Check(value))) {\n        PyErr_SetString(PyExc_TypeError,\n               \"setting function's dictionary to a non-dict\");\n        return -1;\n    }\n    tmp = op->func_dict;\n    Py_INCREF(value);\n    op->func_dict = value;\n    Py_XDECREF(tmp);\n    return 0;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_globals(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context)\n{\n    Py_INCREF(op->func_globals);\n    return op->func_globals;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_closure(CYTHON_UNUSED __pyx_CyFunctionObject *op, CYTHON_UNUSED void *context)\n{\n    Py_INCREF(Py_None);\n    return Py_None;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_code(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context)\n{\n    PyObject* result = (op->func_code) ? op->func_code : Py_None;\n    Py_INCREF(result);\n    return result;\n}\nstatic int\n__Pyx_CyFunction_init_defaults(__pyx_CyFunctionObject *op) {\n    int result = 0;\n    PyObject *res = op->defaults_getter((PyObject *) op);\n    if (unlikely(!res))\n        return -1;\n    #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n    op->defaults_tuple = PyTuple_GET_ITEM(res, 0);\n    Py_INCREF(op->defaults_tuple);\n    op->defaults_kwdict = PyTuple_GET_ITEM(res, 1);\n    Py_INCREF(op->defaults_kwdict);\n    #else\n    op->defaults_tuple = PySequence_ITEM(res, 0);\n    if (unlikely(!op->defaults_tuple)) result = -1;\n    else {\n        op->defaults_kwdict = PySequence_ITEM(res, 1);\n        if (unlikely(!op->defaults_kwdict)) result = -1;\n    }\n    #endif\n    Py_DECREF(res);\n    return result;\n}\nstatic int\n__Pyx_CyFunction_set_defaults(__pyx_CyFunctionObject *op, PyObject* value, CYTHON_UNUSED void *context) {\n    PyObject* tmp;\n    if (!value) {\n        value = Py_None;\n    } else if (value != Py_None && !PyTuple_Check(value)) {\n        PyErr_SetString(PyExc_TypeError,\n                        \"__defaults__ must be set to a tuple object\");\n        return -1;\n    }\n    Py_INCREF(value);\n    tmp = op->defaults_tuple;\n    op->defaults_tuple = value;\n    Py_XDECREF(tmp);\n    return 0;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_defaults(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) {\n    PyObject* result = op->defaults_tuple;\n    if (unlikely(!result)) {\n        if (op->defaults_getter) {\n            if (__Pyx_CyFunction_init_defaults(op) < 0) return NULL;\n            result = op->defaults_tuple;\n        } else {\n            result = Py_None;\n        }\n    }\n    Py_INCREF(result);\n    return result;\n}\nstatic int\n__Pyx_CyFunction_set_kwdefaults(__pyx_CyFunctionObject *op, PyObject* value, CYTHON_UNUSED void *context) {\n    PyObject* tmp;\n    if (!value) {\n        value = Py_None;\n    } else if (value != Py_None && !PyDict_Check(value)) {\n        PyErr_SetString(PyExc_TypeError,\n                        \"__kwdefaults__ must be set to a dict object\");\n        return -1;\n    }\n    Py_INCREF(value);\n    tmp = op->defaults_kwdict;\n    op->defaults_kwdict = value;\n    Py_XDECREF(tmp);\n    return 0;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_kwdefaults(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) {\n    PyObject* result = op->defaults_kwdict;\n    if (unlikely(!result)) {\n        if (op->defaults_getter) {\n            if (__Pyx_CyFunction_init_defaults(op) < 0) return NULL;\n            result = op->defaults_kwdict;\n        } else {\n            result = Py_None;\n        }\n    }\n    Py_INCREF(result);\n    return result;\n}\nstatic int\n__Pyx_CyFunction_set_annotations(__pyx_CyFunctionObject *op, PyObject* value, CYTHON_UNUSED void *context) {\n    PyObject* tmp;\n    if (!value || value == Py_None) {\n        value = NULL;\n    } else if (!PyDict_Check(value)) {\n        PyErr_SetString(PyExc_TypeError,\n                        \"__annotations__ must be set to a dict object\");\n        return -1;\n    }\n    Py_XINCREF(value);\n    tmp = op->func_annotations;\n    op->func_annotations = value;\n    Py_XDECREF(tmp);\n    return 0;\n}\nstatic PyObject *\n__Pyx_CyFunction_get_annotations(__pyx_CyFunctionObject *op, CYTHON_UNUSED void *context) {\n    PyObject* result = op->func_annotations;\n    if (unlikely(!result)) {\n        result = PyDict_New();\n        if (unlikely(!result)) return NULL;\n        op->func_annotations = result;\n    }\n    Py_INCREF(result);\n    return result;\n}\nstatic PyGetSetDef __pyx_CyFunction_getsets[] = {\n    {(char *) \"func_doc\", (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0},\n    {(char *) \"__doc__\",  (getter)__Pyx_CyFunction_get_doc, (setter)__Pyx_CyFunction_set_doc, 0, 0},\n    {(char *) \"func_name\", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0},\n    {(char *) \"__name__\", (getter)__Pyx_CyFunction_get_name, (setter)__Pyx_CyFunction_set_name, 0, 0},\n    {(char *) \"__qualname__\", (getter)__Pyx_CyFunction_get_qualname, (setter)__Pyx_CyFunction_set_qualname, 0, 0},\n    {(char *) \"__self__\", (getter)__Pyx_CyFunction_get_self, 0, 0, 0},\n    {(char *) \"func_dict\", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0},\n    {(char *) \"__dict__\", (getter)__Pyx_CyFunction_get_dict, (setter)__Pyx_CyFunction_set_dict, 0, 0},\n    {(char *) \"func_globals\", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0},\n    {(char *) \"__globals__\", (getter)__Pyx_CyFunction_get_globals, 0, 0, 0},\n    {(char *) \"func_closure\", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0},\n    {(char *) \"__closure__\", (getter)__Pyx_CyFunction_get_closure, 0, 0, 0},\n    {(char *) \"func_code\", (getter)__Pyx_CyFunction_get_code, 0, 0, 0},\n    {(char *) \"__code__\", (getter)__Pyx_CyFunction_get_code, 0, 0, 0},\n    {(char *) \"func_defaults\", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0},\n    {(char *) \"__defaults__\", (getter)__Pyx_CyFunction_get_defaults, (setter)__Pyx_CyFunction_set_defaults, 0, 0},\n    {(char *) \"__kwdefaults__\", (getter)__Pyx_CyFunction_get_kwdefaults, (setter)__Pyx_CyFunction_set_kwdefaults, 0, 0},\n    {(char *) \"__annotations__\", (getter)__Pyx_CyFunction_get_annotations, (setter)__Pyx_CyFunction_set_annotations, 0, 0},\n    {0, 0, 0, 0, 0}\n};\nstatic PyMemberDef __pyx_CyFunction_members[] = {\n    {(char *) \"__module__\", T_OBJECT, offsetof(PyCFunctionObject, m_module), PY_WRITE_RESTRICTED, 0},\n    {0, 0, 0,  0, 0}\n};\nstatic PyObject *\n__Pyx_CyFunction_reduce(__pyx_CyFunctionObject *m, CYTHON_UNUSED PyObject *args)\n{\n#if PY_MAJOR_VERSION >= 3\n    return PyUnicode_FromString(m->func.m_ml->ml_name);\n#else\n    return PyString_FromString(m->func.m_ml->ml_name);\n#endif\n}\nstatic PyMethodDef __pyx_CyFunction_methods[] = {\n    {\"__reduce__\", (PyCFunction)__Pyx_CyFunction_reduce, METH_VARARGS, 0},\n    {0, 0, 0, 0}\n};\n#if PY_VERSION_HEX < 0x030500A0\n#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func_weakreflist)\n#else\n#define __Pyx_CyFunction_weakreflist(cyfunc) ((cyfunc)->func.m_weakreflist)\n#endif\nstatic PyObject *__Pyx_CyFunction_New(PyTypeObject *type, PyMethodDef *ml, int flags, PyObject* qualname,\n                                      PyObject *closure, PyObject *module, PyObject* globals, PyObject* code) {\n    __pyx_CyFunctionObject *op = PyObject_GC_New(__pyx_CyFunctionObject, type);\n    if (op == NULL)\n        return NULL;\n    op->flags = flags;\n    __Pyx_CyFunction_weakreflist(op) = NULL;\n    op->func.m_ml = ml;\n    op->func.m_self = (PyObject *) op;\n    Py_XINCREF(closure);\n    op->func_closure = closure;\n    Py_XINCREF(module);\n    op->func.m_module = module;\n    op->func_dict = NULL;\n    op->func_name = NULL;\n    Py_INCREF(qualname);\n    op->func_qualname = qualname;\n    op->func_doc = NULL;\n    op->func_classobj = NULL;\n    op->func_globals = globals;\n    Py_INCREF(op->func_globals);\n    Py_XINCREF(code);\n    op->func_code = code;\n    op->defaults_pyobjects = 0;\n    op->defaults = NULL;\n    op->defaults_tuple = NULL;\n    op->defaults_kwdict = NULL;\n    op->defaults_getter = NULL;\n    op->func_annotations = NULL;\n    PyObject_GC_Track(op);\n    return (PyObject *) op;\n}\nstatic int\n__Pyx_CyFunction_clear(__pyx_CyFunctionObject *m)\n{\n    Py_CLEAR(m->func_closure);\n    Py_CLEAR(m->func.m_module);\n    Py_CLEAR(m->func_dict);\n    Py_CLEAR(m->func_name);\n    Py_CLEAR(m->func_qualname);\n    Py_CLEAR(m->func_doc);\n    Py_CLEAR(m->func_globals);\n    Py_CLEAR(m->func_code);\n    Py_CLEAR(m->func_classobj);\n    Py_CLEAR(m->defaults_tuple);\n    Py_CLEAR(m->defaults_kwdict);\n    Py_CLEAR(m->func_annotations);\n    if (m->defaults) {\n        PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m);\n        int i;\n        for (i = 0; i < m->defaults_pyobjects; i++)\n            Py_XDECREF(pydefaults[i]);\n        PyObject_Free(m->defaults);\n        m->defaults = NULL;\n    }\n    return 0;\n}\nstatic void __Pyx__CyFunction_dealloc(__pyx_CyFunctionObject *m)\n{\n    if (__Pyx_CyFunction_weakreflist(m) != NULL)\n        PyObject_ClearWeakRefs((PyObject *) m);\n    __Pyx_CyFunction_clear(m);\n    PyObject_GC_Del(m);\n}\nstatic void __Pyx_CyFunction_dealloc(__pyx_CyFunctionObject *m)\n{\n    PyObject_GC_UnTrack(m);\n    __Pyx__CyFunction_dealloc(m);\n}\nstatic int __Pyx_CyFunction_traverse(__pyx_CyFunctionObject *m, visitproc visit, void *arg)\n{\n    Py_VISIT(m->func_closure);\n    Py_VISIT(m->func.m_module);\n    Py_VISIT(m->func_dict);\n    Py_VISIT(m->func_name);\n    Py_VISIT(m->func_qualname);\n    Py_VISIT(m->func_doc);\n    Py_VISIT(m->func_globals);\n    Py_VISIT(m->func_code);\n    Py_VISIT(m->func_classobj);\n    Py_VISIT(m->defaults_tuple);\n    Py_VISIT(m->defaults_kwdict);\n    if (m->defaults) {\n        PyObject **pydefaults = __Pyx_CyFunction_Defaults(PyObject *, m);\n        int i;\n        for (i = 0; i < m->defaults_pyobjects; i++)\n            Py_VISIT(pydefaults[i]);\n    }\n    return 0;\n}\nstatic PyObject *__Pyx_CyFunction_descr_get(PyObject *func, PyObject *obj, PyObject *type)\n{\n    __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;\n    if (m->flags & __Pyx_CYFUNCTION_STATICMETHOD) {\n        Py_INCREF(func);\n        return func;\n    }\n    if (m->flags & __Pyx_CYFUNCTION_CLASSMETHOD) {\n        if (type == NULL)\n            type = (PyObject *)(Py_TYPE(obj));\n        return __Pyx_PyMethod_New(func, type, (PyObject *)(Py_TYPE(type)));\n    }\n    if (obj == Py_None)\n        obj = NULL;\n    return __Pyx_PyMethod_New(func, obj, type);\n}\nstatic PyObject*\n__Pyx_CyFunction_repr(__pyx_CyFunctionObject *op)\n{\n#if PY_MAJOR_VERSION >= 3\n    return PyUnicode_FromFormat(\"<cyfunction %U at %p>\",\n                                op->func_qualname, (void *)op);\n#else\n    return PyString_FromFormat(\"<cyfunction %s at %p>\",\n                               PyString_AsString(op->func_qualname), (void *)op);\n#endif\n}\nstatic PyObject * __Pyx_CyFunction_CallMethod(PyObject *func, PyObject *self, PyObject *arg, PyObject *kw) {\n    PyCFunctionObject* f = (PyCFunctionObject*)func;\n    PyCFunction meth = f->m_ml->ml_meth;\n    Py_ssize_t size;\n    switch (f->m_ml->ml_flags & (METH_VARARGS | METH_KEYWORDS | METH_NOARGS | METH_O)) {\n    case METH_VARARGS:\n        if (likely(kw == NULL || PyDict_Size(kw) == 0))\n            return (*meth)(self, arg);\n        break;\n    case METH_VARARGS | METH_KEYWORDS:\n        return (*(PyCFunctionWithKeywords)(void*)meth)(self, arg, kw);\n    case METH_NOARGS:\n        if (likely(kw == NULL || PyDict_Size(kw) == 0)) {\n            size = PyTuple_GET_SIZE(arg);\n            if (likely(size == 0))\n                return (*meth)(self, NULL);\n            PyErr_Format(PyExc_TypeError,\n                \"%.200s() takes no arguments (%\" CYTHON_FORMAT_SSIZE_T \"d given)\",\n                f->m_ml->ml_name, size);\n            return NULL;\n        }\n        break;\n    case METH_O:\n        if (likely(kw == NULL || PyDict_Size(kw) == 0)) {\n            size = PyTuple_GET_SIZE(arg);\n            if (likely(size == 1)) {\n                PyObject *result, *arg0;\n                #if CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS\n                arg0 = PyTuple_GET_ITEM(arg, 0);\n                #else\n                arg0 = PySequence_ITEM(arg, 0); if (unlikely(!arg0)) return NULL;\n                #endif\n                result = (*meth)(self, arg0);\n                #if !(CYTHON_ASSUME_SAFE_MACROS && !CYTHON_AVOID_BORROWED_REFS)\n                Py_DECREF(arg0);\n                #endif\n                return result;\n            }\n            PyErr_Format(PyExc_TypeError,\n                \"%.200s() takes exactly one argument (%\" CYTHON_FORMAT_SSIZE_T \"d given)\",\n                f->m_ml->ml_name, size);\n            return NULL;\n        }\n        break;\n    default:\n        PyErr_SetString(PyExc_SystemError, \"Bad call flags in \"\n                        \"__Pyx_CyFunction_Call. METH_OLDARGS is no \"\n                        \"longer supported!\");\n        return NULL;\n    }\n    PyErr_Format(PyExc_TypeError, \"%.200s() takes no keyword arguments\",\n                 f->m_ml->ml_name);\n    return NULL;\n}\nstatic CYTHON_INLINE PyObject *__Pyx_CyFunction_Call(PyObject *func, PyObject *arg, PyObject *kw) {\n    return __Pyx_CyFunction_CallMethod(func, ((PyCFunctionObject*)func)->m_self, arg, kw);\n}\nstatic PyObject *__Pyx_CyFunction_CallAsMethod(PyObject *func, PyObject *args, PyObject *kw) {\n    PyObject *result;\n    __pyx_CyFunctionObject *cyfunc = (__pyx_CyFunctionObject *) func;\n    if ((cyfunc->flags & __Pyx_CYFUNCTION_CCLASS) && !(cyfunc->flags & __Pyx_CYFUNCTION_STATICMETHOD)) {\n        Py_ssize_t argc;\n        PyObject *new_args;\n        PyObject *self;\n        argc = PyTuple_GET_SIZE(args);\n        new_args = PyTuple_GetSlice(args, 1, argc);\n        if (unlikely(!new_args))\n            return NULL;\n        self = PyTuple_GetItem(args, 0);\n        if (unlikely(!self)) {\n            Py_DECREF(new_args);\n            return NULL;\n        }\n        result = __Pyx_CyFunction_CallMethod(func, self, new_args, kw);\n        Py_DECREF(new_args);\n    } else {\n        result = __Pyx_CyFunction_Call(func, args, kw);\n    }\n    return result;\n}\nstatic PyTypeObject __pyx_CyFunctionType_type = {\n    PyVarObject_HEAD_INIT(0, 0)\n    \"cython_function_or_method\",\n    sizeof(__pyx_CyFunctionObject),\n    0,\n    (destructor) __Pyx_CyFunction_dealloc,\n    0,\n    0,\n    0,\n#if PY_MAJOR_VERSION < 3\n    0,\n#else\n    0,\n#endif\n    (reprfunc) __Pyx_CyFunction_repr,\n    0,\n    0,\n    0,\n    0,\n    __Pyx_CyFunction_CallAsMethod,\n    0,\n    0,\n    0,\n    0,\n    Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,\n    0,\n    (traverseproc) __Pyx_CyFunction_traverse,\n    (inquiry) __Pyx_CyFunction_clear,\n    0,\n#if PY_VERSION_HEX < 0x030500A0\n    offsetof(__pyx_CyFunctionObject, func_weakreflist),\n#else\n    offsetof(PyCFunctionObject, m_weakreflist),\n#endif\n    0,\n    0,\n    __pyx_CyFunction_methods,\n    __pyx_CyFunction_members,\n    __pyx_CyFunction_getsets,\n    0,\n    0,\n    __Pyx_CyFunction_descr_get,\n    0,\n    offsetof(__pyx_CyFunctionObject, func_dict),\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n    0,\n#if PY_VERSION_HEX >= 0x030400a1\n    0,\n#endif\n#if PY_VERSION_HEX >= 0x030800b1\n    0,\n#endif\n};\nstatic int __pyx_CyFunction_init(void) {\n    __pyx_CyFunctionType = __Pyx_FetchCommonType(&__pyx_CyFunctionType_type);\n    if (unlikely(__pyx_CyFunctionType == NULL)) {\n        return -1;\n    }\n    return 0;\n}\nstatic CYTHON_INLINE void *__Pyx_CyFunction_InitDefaults(PyObject *func, size_t size, int pyobjects) {\n    __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;\n    m->defaults = PyObject_Malloc(size);\n    if (unlikely(!m->defaults))\n        return PyErr_NoMemory();\n    memset(m->defaults, 0, size);\n    m->defaults_pyobjects = pyobjects;\n    return m->defaults;\n}\nstatic CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsTuple(PyObject *func, PyObject *tuple) {\n    __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;\n    m->defaults_tuple = tuple;\n    Py_INCREF(tuple);\n}\nstatic CYTHON_INLINE void __Pyx_CyFunction_SetDefaultsKwDict(PyObject *func, PyObject *dict) {\n    __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;\n    m->defaults_kwdict = dict;\n    Py_INCREF(dict);\n}\nstatic CYTHON_INLINE void __Pyx_CyFunction_SetAnnotationsDict(PyObject *func, PyObject *dict) {\n    __pyx_CyFunctionObject *m = (__pyx_CyFunctionObject *) func;\n    m->func_annotations = dict;\n    Py_INCREF(dict);\n}\n\n/* BufferFallbackError */\n  static void __Pyx_RaiseBufferFallbackError(void) {\n  PyErr_SetString(PyExc_ValueError,\n     \"Buffer acquisition failed on assignment; and then reacquiring the old buffer failed too!\");\n}\n\n/* None */\n  static CYTHON_INLINE Py_ssize_t __Pyx_div_Py_ssize_t(Py_ssize_t a, Py_ssize_t b) {\n    Py_ssize_t q = a / b;\n    Py_ssize_t r = a - q*b;\n    q -= ((r != 0) & ((r ^ b) < 0));\n    return q;\n}\n\n/* BufferIndexError */\n  static void __Pyx_RaiseBufferIndexError(int axis) {\n  PyErr_Format(PyExc_IndexError,\n     \"Out of bounds on buffer access (axis %d)\", axis);\n}\n\n/* RaiseTooManyValuesToUnpack */\n  static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {\n    PyErr_Format(PyExc_ValueError,\n                 \"too many values to unpack (expected %\" CYTHON_FORMAT_SSIZE_T \"d)\", expected);\n}\n\n/* RaiseNeedMoreValuesToUnpack */\n  static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {\n    PyErr_Format(PyExc_ValueError,\n                 \"need more than %\" CYTHON_FORMAT_SSIZE_T \"d value%.1s to unpack\",\n                 index, (index == 1) ? \"\" : \"s\");\n}\n\n/* RaiseNoneIterError */\n  static CYTHON_INLINE void __Pyx_RaiseNoneNotIterableError(void) {\n    PyErr_SetString(PyExc_TypeError, \"'NoneType' object is not iterable\");\n}\n\n/* GetTopmostException */\n  #if CYTHON_USE_EXC_INFO_STACK\nstatic _PyErr_StackItem *\n__Pyx_PyErr_GetTopmostException(PyThreadState *tstate)\n{\n    _PyErr_StackItem *exc_info = tstate->exc_info;\n    while ((exc_info->exc_type == NULL || exc_info->exc_type == Py_None) &&\n           exc_info->previous_item != NULL)\n    {\n        exc_info = exc_info->previous_item;\n    }\n    return exc_info;\n}\n#endif\n\n/* SaveResetException */\n  #if CYTHON_FAST_THREAD_STATE\nstatic CYTHON_INLINE void __Pyx__ExceptionSave(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb) {\n    #if CYTHON_USE_EXC_INFO_STACK\n    _PyErr_StackItem *exc_info = __Pyx_PyErr_GetTopmostException(tstate);\n    *type = exc_info->exc_type;\n    *value = exc_info->exc_value;\n    *tb = exc_info->exc_traceback;\n    #else\n    *type = tstate->exc_type;\n    *value = tstate->exc_value;\n    *tb = tstate->exc_traceback;\n    #endif\n    Py_XINCREF(*type);\n    Py_XINCREF(*value);\n    Py_XINCREF(*tb);\n}\nstatic CYTHON_INLINE void __Pyx__ExceptionReset(PyThreadState *tstate, PyObject *type, PyObject *value, PyObject *tb) {\n    PyObject *tmp_type, *tmp_value, *tmp_tb;\n    #if CYTHON_USE_EXC_INFO_STACK\n    _PyErr_StackItem *exc_info = tstate->exc_info;\n    tmp_type = exc_info->exc_type;\n    tmp_value = exc_info->exc_value;\n    tmp_tb = exc_info->exc_traceback;\n    exc_info->exc_type = type;\n    exc_info->exc_value = value;\n    exc_info->exc_traceback = tb;\n    #else\n    tmp_type = tstate->exc_type;\n    tmp_value = tstate->exc_value;\n    tmp_tb = tstate->exc_traceback;\n    tstate->exc_type = type;\n    tstate->exc_value = value;\n    tstate->exc_traceback = tb;\n    #endif\n    Py_XDECREF(tmp_type);\n    Py_XDECREF(tmp_value);\n    Py_XDECREF(tmp_tb);\n}\n#endif\n\n/* PyErrExceptionMatches */\n  #if CYTHON_FAST_THREAD_STATE\nstatic int __Pyx_PyErr_ExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {\n    Py_ssize_t i, n;\n    n = PyTuple_GET_SIZE(tuple);\n#if PY_MAJOR_VERSION >= 3\n    for (i=0; i<n; i++) {\n        if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;\n    }\n#endif\n    for (i=0; i<n; i++) {\n        if (__Pyx_PyErr_GivenExceptionMatches(exc_type, PyTuple_GET_ITEM(tuple, i))) return 1;\n    }\n    return 0;\n}\nstatic CYTHON_INLINE int __Pyx_PyErr_ExceptionMatchesInState(PyThreadState* tstate, PyObject* err) {\n    PyObject *exc_type = tstate->curexc_type;\n    if (exc_type == err) return 1;\n    if (unlikely(!exc_type)) return 0;\n    if (unlikely(PyTuple_Check(err)))\n        return __Pyx_PyErr_ExceptionMatchesTuple(exc_type, err);\n    return __Pyx_PyErr_GivenExceptionMatches(exc_type, err);\n}\n#endif\n\n/* GetException */\n  #if CYTHON_FAST_THREAD_STATE\nstatic int __Pyx__GetException(PyThreadState *tstate, PyObject **type, PyObject **value, PyObject **tb)\n#else\nstatic int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb)\n#endif\n{\n    PyObject *local_type, *local_value, *local_tb;\n#if CYTHON_FAST_THREAD_STATE\n    PyObject *tmp_type, *tmp_value, *tmp_tb;\n    local_type = tstate->curexc_type;\n    local_value = tstate->curexc_value;\n    local_tb = tstate->curexc_traceback;\n    tstate->curexc_type = 0;\n    tstate->curexc_value = 0;\n    tstate->curexc_traceback = 0;\n#else\n    PyErr_Fetch(&local_type, &local_value, &local_tb);\n#endif\n    PyErr_NormalizeException(&local_type, &local_value, &local_tb);\n#if CYTHON_FAST_THREAD_STATE\n    if (unlikely(tstate->curexc_type))\n#else\n    if (unlikely(PyErr_Occurred()))\n#endif\n        goto bad;\n    #if PY_MAJOR_VERSION >= 3\n    if (local_tb) {\n        if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0))\n            goto bad;\n    }\n    #endif\n    Py_XINCREF(local_tb);\n    Py_XINCREF(local_type);\n    Py_XINCREF(local_value);\n    *type = local_type;\n    *value = local_value;\n    *tb = local_tb;\n#if CYTHON_FAST_THREAD_STATE\n    #if CYTHON_USE_EXC_INFO_STACK\n    {\n        _PyErr_StackItem *exc_info = tstate->exc_info;\n        tmp_type = exc_info->exc_type;\n        tmp_value = exc_info->exc_value;\n        tmp_tb = exc_info->exc_traceback;\n        exc_info->exc_type = local_type;\n        exc_info->exc_value = local_value;\n        exc_info->exc_traceback = local_tb;\n    }\n    #else\n    tmp_type = tstate->exc_type;\n    tmp_value = tstate->exc_value;\n    tmp_tb = tstate->exc_traceback;\n    tstate->exc_type = local_type;\n    tstate->exc_value = local_value;\n    tstate->exc_traceback = local_tb;\n    #endif\n    Py_XDECREF(tmp_type);\n    Py_XDECREF(tmp_value);\n    Py_XDECREF(tmp_tb);\n#else\n    PyErr_SetExcInfo(local_type, local_value, local_tb);\n#endif\n    return 0;\nbad:\n    *type = 0;\n    *value = 0;\n    *tb = 0;\n    Py_XDECREF(local_type);\n    Py_XDECREF(local_value);\n    Py_XDECREF(local_tb);\n    return -1;\n}\n\n/* PyObject_GenericGetAttrNoDict */\n  #if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000\nstatic PyObject *__Pyx_RaiseGenericGetAttributeError(PyTypeObject *tp, PyObject *attr_name) {\n    PyErr_Format(PyExc_AttributeError,\n#if PY_MAJOR_VERSION >= 3\n                 \"'%.50s' object has no attribute '%U'\",\n                 tp->tp_name, attr_name);\n#else\n                 \"'%.50s' object has no attribute '%.400s'\",\n                 tp->tp_name, PyString_AS_STRING(attr_name));\n#endif\n    return NULL;\n}\nstatic CYTHON_INLINE PyObject* __Pyx_PyObject_GenericGetAttrNoDict(PyObject* obj, PyObject* attr_name) {\n    PyObject *descr;\n    PyTypeObject *tp = Py_TYPE(obj);\n    if (unlikely(!PyString_Check(attr_name))) {\n        return PyObject_GenericGetAttr(obj, attr_name);\n    }\n    assert(!tp->tp_dictoffset);\n    descr = _PyType_Lookup(tp, attr_name);\n    if (unlikely(!descr)) {\n        return __Pyx_RaiseGenericGetAttributeError(tp, attr_name);\n    }\n    Py_INCREF(descr);\n    #if PY_MAJOR_VERSION < 3\n    if (likely(PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_HAVE_CLASS)))\n    #endif\n    {\n        descrgetfunc f = Py_TYPE(descr)->tp_descr_get;\n        if (unlikely(f)) {\n            PyObject *res = f(descr, obj, (PyObject *)tp);\n            Py_DECREF(descr);\n            return res;\n        }\n    }\n    return descr;\n}\n#endif\n\n/* PyObject_GenericGetAttr */\n  #if CYTHON_USE_TYPE_SLOTS && CYTHON_USE_PYTYPE_LOOKUP && PY_VERSION_HEX < 0x03070000\nstatic PyObject* __Pyx_PyObject_GenericGetAttr(PyObject* obj, PyObject* attr_name) {\n    if (unlikely(Py_TYPE(obj)->tp_dictoffset)) {\n        return PyObject_GenericGetAttr(obj, attr_name);\n    }\n    return __Pyx_PyObject_GenericGetAttrNoDict(obj, attr_name);\n}\n#endif\n\n/* SetupReduce */\n  static int __Pyx_setup_reduce_is_named(PyObject* meth, PyObject* name) {\n  int ret;\n  PyObject *name_attr;\n  name_attr = __Pyx_PyObject_GetAttrStr(meth, __pyx_n_s_name);\n  if (likely(name_attr)) {\n      ret = PyObject_RichCompareBool(name_attr, name, Py_EQ);\n  } else {\n      ret = -1;\n  }\n  if (unlikely(ret < 0)) {\n      PyErr_Clear();\n      ret = 0;\n  }\n  Py_XDECREF(name_attr);\n  return ret;\n}\nstatic int __Pyx_setup_reduce(PyObject* type_obj) {\n    int ret = 0;\n    PyObject *object_reduce = NULL;\n    PyObject *object_reduce_ex = NULL;\n    PyObject *reduce = NULL;\n    PyObject *reduce_ex = NULL;\n    PyObject *reduce_cython = NULL;\n    PyObject *setstate = NULL;\n    PyObject *setstate_cython = NULL;\n#if CYTHON_USE_PYTYPE_LOOKUP\n    if (_PyType_Lookup((PyTypeObject*)type_obj, __pyx_n_s_getstate)) goto GOOD;\n#else\n    if (PyObject_HasAttr(type_obj, __pyx_n_s_getstate)) goto GOOD;\n#endif\n#if CYTHON_USE_PYTYPE_LOOKUP\n    object_reduce_ex = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;\n#else\n    object_reduce_ex = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce_ex); if (!object_reduce_ex) goto BAD;\n#endif\n    reduce_ex = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_ex); if (unlikely(!reduce_ex)) goto BAD;\n    if (reduce_ex == object_reduce_ex) {\n#if CYTHON_USE_PYTYPE_LOOKUP\n        object_reduce = _PyType_Lookup(&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;\n#else\n        object_reduce = __Pyx_PyObject_GetAttrStr((PyObject*)&PyBaseObject_Type, __pyx_n_s_reduce); if (!object_reduce) goto BAD;\n#endif\n        reduce = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce); if (unlikely(!reduce)) goto BAD;\n        if (reduce == object_reduce || __Pyx_setup_reduce_is_named(reduce, __pyx_n_s_reduce_cython)) {\n            reduce_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_reduce_cython); if (unlikely(!reduce_cython)) goto BAD;\n            ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce, reduce_cython); if (unlikely(ret < 0)) goto BAD;\n            ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_reduce_cython); if (unlikely(ret < 0)) goto BAD;\n            setstate = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate);\n            if (!setstate) PyErr_Clear();\n            if (!setstate || __Pyx_setup_reduce_is_named(setstate, __pyx_n_s_setstate_cython)) {\n                setstate_cython = __Pyx_PyObject_GetAttrStr(type_obj, __pyx_n_s_setstate_cython); if (unlikely(!setstate_cython)) goto BAD;\n                ret = PyDict_SetItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate, setstate_cython); if (unlikely(ret < 0)) goto BAD;\n                ret = PyDict_DelItem(((PyTypeObject*)type_obj)->tp_dict, __pyx_n_s_setstate_cython); if (unlikely(ret < 0)) goto BAD;\n            }\n            PyType_Modified((PyTypeObject*)type_obj);\n        }\n    }\n    goto GOOD;\nBAD:\n    if (!PyErr_Occurred())\n        PyErr_Format(PyExc_RuntimeError, \"Unable to initialize pickling for %s\", ((PyTypeObject*)type_obj)->tp_name);\n    ret = -1;\nGOOD:\n#if !CYTHON_USE_PYTYPE_LOOKUP\n    Py_XDECREF(object_reduce);\n    Py_XDECREF(object_reduce_ex);\n#endif\n    Py_XDECREF(reduce);\n    Py_XDECREF(reduce_ex);\n    Py_XDECREF(reduce_cython);\n    Py_XDECREF(setstate);\n    Py_XDECREF(setstate_cython);\n    return ret;\n}\n\n/* TypeImport */\n  #ifndef __PYX_HAVE_RT_ImportType\n#define __PYX_HAVE_RT_ImportType\nstatic PyTypeObject *__Pyx_ImportType(PyObject *module, const char *module_name, const char *class_name,\n    size_t size, enum __Pyx_ImportType_CheckSize check_size)\n{\n    PyObject *result = 0;\n    char warning[200];\n    Py_ssize_t basicsize;\n#ifdef Py_LIMITED_API\n    PyObject *py_basicsize;\n#endif\n    result = PyObject_GetAttrString(module, class_name);\n    if (!result)\n        goto bad;\n    if (!PyType_Check(result)) {\n        PyErr_Format(PyExc_TypeError,\n            \"%.200s.%.200s is not a type object\",\n            module_name, class_name);\n        goto bad;\n    }\n#ifndef Py_LIMITED_API\n    basicsize = ((PyTypeObject *)result)->tp_basicsize;\n#else\n    py_basicsize = PyObject_GetAttrString(result, \"__basicsize__\");\n    if (!py_basicsize)\n        goto bad;\n    basicsize = PyLong_AsSsize_t(py_basicsize);\n    Py_DECREF(py_basicsize);\n    py_basicsize = 0;\n    if (basicsize == (Py_ssize_t)-1 && PyErr_Occurred())\n        goto bad;\n#endif\n    if ((size_t)basicsize < size) {\n        PyErr_Format(PyExc_ValueError,\n            \"%.200s.%.200s size changed, may indicate binary incompatibility. \"\n            \"Expected %zd from C header, got %zd from PyObject\",\n            module_name, class_name, size, basicsize);\n        goto bad;\n    }\n    if (check_size == __Pyx_ImportType_CheckSize_Error && (size_t)basicsize != size) {\n        PyErr_Format(PyExc_ValueError,\n            \"%.200s.%.200s size changed, may indicate binary incompatibility. \"\n            \"Expected %zd from C header, got %zd from PyObject\",\n            module_name, class_name, size, basicsize);\n        goto bad;\n    }\n    else if (check_size == __Pyx_ImportType_CheckSize_Warn && (size_t)basicsize > size) {\n        PyOS_snprintf(warning, sizeof(warning),\n            \"%s.%s size changed, may indicate binary incompatibility. \"\n            \"Expected %zd from C header, got %zd from PyObject\",\n            module_name, class_name, size, basicsize);\n        if (PyErr_WarnEx(NULL, warning, 0) < 0) goto bad;\n    }\n    return (PyTypeObject *)result;\nbad:\n    Py_XDECREF(result);\n    return NULL;\n}\n#endif\n\n/* Import */\n  static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, int level) {\n    PyObject *empty_list = 0;\n    PyObject *module = 0;\n    PyObject *global_dict = 0;\n    PyObject *empty_dict = 0;\n    PyObject *list;\n    #if PY_MAJOR_VERSION < 3\n    PyObject *py_import;\n    py_import = __Pyx_PyObject_GetAttrStr(__pyx_b, __pyx_n_s_import);\n    if (!py_import)\n        goto bad;\n    #endif\n    if (from_list)\n        list = from_list;\n    else {\n        empty_list = PyList_New(0);\n        if (!empty_list)\n            goto bad;\n        list = empty_list;\n    }\n    global_dict = PyModule_GetDict(__pyx_m);\n    if (!global_dict)\n        goto bad;\n    empty_dict = PyDict_New();\n    if (!empty_dict)\n        goto bad;\n    {\n        #if PY_MAJOR_VERSION >= 3\n        if (level == -1) {\n            if (strchr(__Pyx_MODULE_NAME, '.')) {\n                module = PyImport_ImportModuleLevelObject(\n                    name, global_dict, empty_dict, list, 1);\n                if (!module) {\n                    if (!PyErr_ExceptionMatches(PyExc_ImportError))\n                        goto bad;\n                    PyErr_Clear();\n                }\n            }\n            level = 0;\n        }\n        #endif\n        if (!module) {\n            #if PY_MAJOR_VERSION < 3\n            PyObject *py_level = PyInt_FromLong(level);\n            if (!py_level)\n                goto bad;\n            module = PyObject_CallFunctionObjArgs(py_import,\n                name, global_dict, empty_dict, list, py_level, (PyObject *)NULL);\n            Py_DECREF(py_level);\n            #else\n            module = PyImport_ImportModuleLevelObject(\n                name, global_dict, empty_dict, list, level);\n            #endif\n        }\n    }\nbad:\n    #if PY_MAJOR_VERSION < 3\n    Py_XDECREF(py_import);\n    #endif\n    Py_XDECREF(empty_list);\n    Py_XDECREF(empty_dict);\n    return module;\n}\n\n/* CLineInTraceback */\n  #ifndef CYTHON_CLINE_IN_TRACEBACK\nstatic int __Pyx_CLineForTraceback(PyThreadState *tstate, int c_line) {\n    PyObject *use_cline;\n    PyObject *ptype, *pvalue, *ptraceback;\n#if CYTHON_COMPILING_IN_CPYTHON\n    PyObject **cython_runtime_dict;\n#endif\n    if (unlikely(!__pyx_cython_runtime)) {\n        return c_line;\n    }\n    __Pyx_ErrFetchInState(tstate, &ptype, &pvalue, &ptraceback);\n#if CYTHON_COMPILING_IN_CPYTHON\n    cython_runtime_dict = _PyObject_GetDictPtr(__pyx_cython_runtime);\n    if (likely(cython_runtime_dict)) {\n        __PYX_PY_DICT_LOOKUP_IF_MODIFIED(\n            use_cline, *cython_runtime_dict,\n            __Pyx_PyDict_GetItemStr(*cython_runtime_dict, __pyx_n_s_cline_in_traceback))\n    } else\n#endif\n    {\n      PyObject *use_cline_obj = __Pyx_PyObject_GetAttrStr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback);\n      if (use_cline_obj) {\n        use_cline = PyObject_Not(use_cline_obj) ? Py_False : Py_True;\n        Py_DECREF(use_cline_obj);\n      } else {\n        PyErr_Clear();\n        use_cline = NULL;\n      }\n    }\n    if (!use_cline) {\n        c_line = 0;\n        PyObject_SetAttr(__pyx_cython_runtime, __pyx_n_s_cline_in_traceback, Py_False);\n    }\n    else if (use_cline == Py_False || (use_cline != Py_True && PyObject_Not(use_cline) != 0)) {\n        c_line = 0;\n    }\n    __Pyx_ErrRestoreInState(tstate, ptype, pvalue, ptraceback);\n    return c_line;\n}\n#endif\n\n/* CodeObjectCache */\n  static int __pyx_bisect_code_objects(__Pyx_CodeObjectCacheEntry* entries, int count, int code_line) {\n    int start = 0, mid = 0, end = count - 1;\n    if (end >= 0 && code_line > entries[end].code_line) {\n        return count;\n    }\n    while (start < end) {\n        mid = start + (end - start) / 2;\n        if (code_line < entries[mid].code_line) {\n            end = mid;\n        } else if (code_line > entries[mid].code_line) {\n             start = mid + 1;\n        } else {\n            return mid;\n        }\n    }\n    if (code_line <= entries[mid].code_line) {\n        return mid;\n    } else {\n        return mid + 1;\n    }\n}\nstatic PyCodeObject *__pyx_find_code_object(int code_line) {\n    PyCodeObject* code_object;\n    int pos;\n    if (unlikely(!code_line) || unlikely(!__pyx_code_cache.entries)) {\n        return NULL;\n    }\n    pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);\n    if (unlikely(pos >= __pyx_code_cache.count) || unlikely(__pyx_code_cache.entries[pos].code_line != code_line)) {\n        return NULL;\n    }\n    code_object = __pyx_code_cache.entries[pos].code_object;\n    Py_INCREF(code_object);\n    return code_object;\n}\nstatic void __pyx_insert_code_object(int code_line, PyCodeObject* code_object) {\n    int pos, i;\n    __Pyx_CodeObjectCacheEntry* entries = __pyx_code_cache.entries;\n    if (unlikely(!code_line)) {\n        return;\n    }\n    if (unlikely(!entries)) {\n        entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Malloc(64*sizeof(__Pyx_CodeObjectCacheEntry));\n        if (likely(entries)) {\n            __pyx_code_cache.entries = entries;\n            __pyx_code_cache.max_count = 64;\n            __pyx_code_cache.count = 1;\n            entries[0].code_line = code_line;\n            entries[0].code_object = code_object;\n            Py_INCREF(code_object);\n        }\n        return;\n    }\n    pos = __pyx_bisect_code_objects(__pyx_code_cache.entries, __pyx_code_cache.count, code_line);\n    if ((pos < __pyx_code_cache.count) && unlikely(__pyx_code_cache.entries[pos].code_line == code_line)) {\n        PyCodeObject* tmp = entries[pos].code_object;\n        entries[pos].code_object = code_object;\n        Py_DECREF(tmp);\n        return;\n    }\n    if (__pyx_code_cache.count == __pyx_code_cache.max_count) {\n        int new_max = __pyx_code_cache.max_count + 64;\n        entries = (__Pyx_CodeObjectCacheEntry*)PyMem_Realloc(\n            __pyx_code_cache.entries, (size_t)new_max*sizeof(__Pyx_CodeObjectCacheEntry));\n        if (unlikely(!entries)) {\n            return;\n        }\n        __pyx_code_cache.entries = entries;\n        __pyx_code_cache.max_count = new_max;\n    }\n    for (i=__pyx_code_cache.count; i>pos; i--) {\n        entries[i] = entries[i-1];\n    }\n    entries[pos].code_line = code_line;\n    entries[pos].code_object = code_object;\n    __pyx_code_cache.count++;\n    Py_INCREF(code_object);\n}\n\n/* AddTraceback */\n  #include \"compile.h\"\n#include \"frameobject.h\"\n#include \"traceback.h\"\nstatic PyCodeObject* __Pyx_CreateCodeObjectForTraceback(\n            const char *funcname, int c_line,\n            int py_line, const char *filename) {\n    PyCodeObject *py_code = 0;\n    PyObject *py_srcfile = 0;\n    PyObject *py_funcname = 0;\n    #if PY_MAJOR_VERSION < 3\n    py_srcfile = PyString_FromString(filename);\n    #else\n    py_srcfile = PyUnicode_FromString(filename);\n    #endif\n    if (!py_srcfile) goto bad;\n    if (c_line) {\n        #if PY_MAJOR_VERSION < 3\n        py_funcname = PyString_FromFormat( \"%s (%s:%d)\", funcname, __pyx_cfilenm, c_line);\n        #else\n        py_funcname = PyUnicode_FromFormat( \"%s (%s:%d)\", funcname, __pyx_cfilenm, c_line);\n        #endif\n    }\n    else {\n        #if PY_MAJOR_VERSION < 3\n        py_funcname = PyString_FromString(funcname);\n        #else\n        py_funcname = PyUnicode_FromString(funcname);\n        #endif\n    }\n    if (!py_funcname) goto bad;\n    py_code = __Pyx_PyCode_New(\n        0,\n        0,\n        0,\n        0,\n        0,\n        __pyx_empty_bytes, /*PyObject *code,*/\n        __pyx_empty_tuple, /*PyObject *consts,*/\n        __pyx_empty_tuple, /*PyObject *names,*/\n        __pyx_empty_tuple, /*PyObject *varnames,*/\n        __pyx_empty_tuple, /*PyObject *freevars,*/\n        __pyx_empty_tuple, /*PyObject *cellvars,*/\n        py_srcfile,   /*PyObject *filename,*/\n        py_funcname,  /*PyObject *name,*/\n        py_line,\n        __pyx_empty_bytes  /*PyObject *lnotab*/\n    );\n    Py_DECREF(py_srcfile);\n    Py_DECREF(py_funcname);\n    return py_code;\nbad:\n    Py_XDECREF(py_srcfile);\n    Py_XDECREF(py_funcname);\n    return NULL;\n}\nstatic void __Pyx_AddTraceback(const char *funcname, int c_line,\n                               int py_line, const char *filename) {\n    PyCodeObject *py_code = 0;\n    PyFrameObject *py_frame = 0;\n    PyThreadState *tstate = __Pyx_PyThreadState_Current;\n    if (c_line) {\n        c_line = __Pyx_CLineForTraceback(tstate, c_line);\n    }\n    py_code = __pyx_find_code_object(c_line ? -c_line : py_line);\n    if (!py_code) {\n        py_code = __Pyx_CreateCodeObjectForTraceback(\n            funcname, c_line, py_line, filename);\n        if (!py_code) goto bad;\n        __pyx_insert_code_object(c_line ? -c_line : py_line, py_code);\n    }\n    py_frame = PyFrame_New(\n        tstate,            /*PyThreadState *tstate,*/\n        py_code,           /*PyCodeObject *code,*/\n        __pyx_d,    /*PyObject *globals,*/\n        0                  /*PyObject *locals*/\n    );\n    if (!py_frame) goto bad;\n    __Pyx_PyFrame_SetLineNumber(py_frame, py_line);\n    PyTraceBack_Here(py_frame);\nbad:\n    Py_XDECREF(py_code);\n    Py_XDECREF(py_frame);\n}\n\n#if PY_MAJOR_VERSION < 3\nstatic int __Pyx_GetBuffer(PyObject *obj, Py_buffer *view, int flags) {\n    if (PyObject_CheckBuffer(obj)) return PyObject_GetBuffer(obj, view, flags);\n        if (__Pyx_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) return __pyx_pw_5numpy_7ndarray_1__getbuffer__(obj, view, flags);\n    PyErr_Format(PyExc_TypeError, \"'%.200s' does not have the buffer interface\", Py_TYPE(obj)->tp_name);\n    return -1;\n}\nstatic void __Pyx_ReleaseBuffer(Py_buffer *view) {\n    PyObject *obj = view->obj;\n    if (!obj) return;\n    if (PyObject_CheckBuffer(obj)) {\n        PyBuffer_Release(view);\n        return;\n    }\n    if ((0)) {}\n        else if (__Pyx_TypeCheck(obj, __pyx_ptype_5numpy_ndarray)) __pyx_pw_5numpy_7ndarray_3__releasebuffer__(obj, view);\n    view->obj = NULL;\n    Py_DECREF(obj);\n}\n#endif\n\n\n  /* CIntToPy */\n  static CYTHON_INLINE PyObject* __Pyx_PyInt_From_long(long value) {\n    const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;\n    const int is_unsigned = neg_one > const_zero;\n    if (is_unsigned) {\n        if (sizeof(long) < sizeof(long)) {\n            return PyInt_FromLong((long) value);\n        } else if (sizeof(long) <= sizeof(unsigned long)) {\n            return PyLong_FromUnsignedLong((unsigned long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {\n            return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);\n#endif\n        }\n    } else {\n        if (sizeof(long) <= sizeof(long)) {\n            return PyInt_FromLong((long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {\n            return PyLong_FromLongLong((PY_LONG_LONG) value);\n#endif\n        }\n    }\n    {\n        int one = 1; int little = (int)*(unsigned char *)&one;\n        unsigned char *bytes = (unsigned char *)&value;\n        return _PyLong_FromByteArray(bytes, sizeof(long),\n                                     little, !is_unsigned);\n    }\n}\n\n/* CIntFromPyVerify */\n  #define __PYX_VERIFY_RETURN_INT(target_type, func_type, func_value)\\\n    __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 0)\n#define __PYX_VERIFY_RETURN_INT_EXC(target_type, func_type, func_value)\\\n    __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, 1)\n#define __PYX__VERIFY_RETURN_INT(target_type, func_type, func_value, exc)\\\n    {\\\n        func_type value = func_value;\\\n        if (sizeof(target_type) < sizeof(func_type)) {\\\n            if (unlikely(value != (func_type) (target_type) value)) {\\\n                func_type zero = 0;\\\n                if (exc && unlikely(value == (func_type)-1 && PyErr_Occurred()))\\\n                    return (target_type) -1;\\\n                if (is_unsigned && unlikely(value < zero))\\\n                    goto raise_neg_overflow;\\\n                else\\\n                    goto raise_overflow;\\\n            }\\\n        }\\\n        return (target_type) value;\\\n    }\n\n/* CIntToPy */\n  static CYTHON_INLINE PyObject* __Pyx_PyInt_From_siz(siz value) {\n    const siz neg_one = (siz) ((siz) 0 - (siz) 1), const_zero = (siz) 0;\n    const int is_unsigned = neg_one > const_zero;\n    if (is_unsigned) {\n        if (sizeof(siz) < sizeof(long)) {\n            return PyInt_FromLong((long) value);\n        } else if (sizeof(siz) <= sizeof(unsigned long)) {\n            return PyLong_FromUnsignedLong((unsigned long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(siz) <= sizeof(unsigned PY_LONG_LONG)) {\n            return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);\n#endif\n        }\n    } else {\n        if (sizeof(siz) <= sizeof(long)) {\n            return PyInt_FromLong((long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(siz) <= sizeof(PY_LONG_LONG)) {\n            return PyLong_FromLongLong((PY_LONG_LONG) value);\n#endif\n        }\n    }\n    {\n        int one = 1; int little = (int)*(unsigned char *)&one;\n        unsigned char *bytes = (unsigned char *)&value;\n        return _PyLong_FromByteArray(bytes, sizeof(siz),\n                                     little, !is_unsigned);\n    }\n}\n\n/* CIntToPy */\n  static CYTHON_INLINE PyObject* __Pyx_PyInt_From_Py_intptr_t(Py_intptr_t value) {\n    const Py_intptr_t neg_one = (Py_intptr_t) ((Py_intptr_t) 0 - (Py_intptr_t) 1), const_zero = (Py_intptr_t) 0;\n    const int is_unsigned = neg_one > const_zero;\n    if (is_unsigned) {\n        if (sizeof(Py_intptr_t) < sizeof(long)) {\n            return PyInt_FromLong((long) value);\n        } else if (sizeof(Py_intptr_t) <= sizeof(unsigned long)) {\n            return PyLong_FromUnsignedLong((unsigned long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(Py_intptr_t) <= sizeof(unsigned PY_LONG_LONG)) {\n            return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);\n#endif\n        }\n    } else {\n        if (sizeof(Py_intptr_t) <= sizeof(long)) {\n            return PyInt_FromLong((long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(Py_intptr_t) <= sizeof(PY_LONG_LONG)) {\n            return PyLong_FromLongLong((PY_LONG_LONG) value);\n#endif\n        }\n    }\n    {\n        int one = 1; int little = (int)*(unsigned char *)&one;\n        unsigned char *bytes = (unsigned char *)&value;\n        return _PyLong_FromByteArray(bytes, sizeof(Py_intptr_t),\n                                     little, !is_unsigned);\n    }\n}\n\n/* Declarations */\n  #if CYTHON_CCOMPLEX\n  #ifdef __cplusplus\n    static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) {\n      return ::std::complex< float >(x, y);\n    }\n  #else\n    static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) {\n      return x + y*(__pyx_t_float_complex)_Complex_I;\n    }\n  #endif\n#else\n    static CYTHON_INLINE __pyx_t_float_complex __pyx_t_float_complex_from_parts(float x, float y) {\n      __pyx_t_float_complex z;\n      z.real = x;\n      z.imag = y;\n      return z;\n    }\n#endif\n\n/* Arithmetic */\n  #if CYTHON_CCOMPLEX\n#else\n    static CYTHON_INLINE int __Pyx_c_eq_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {\n       return (a.real == b.real) && (a.imag == b.imag);\n    }\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_sum_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {\n        __pyx_t_float_complex z;\n        z.real = a.real + b.real;\n        z.imag = a.imag + b.imag;\n        return z;\n    }\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_diff_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {\n        __pyx_t_float_complex z;\n        z.real = a.real - b.real;\n        z.imag = a.imag - b.imag;\n        return z;\n    }\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_prod_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {\n        __pyx_t_float_complex z;\n        z.real = a.real * b.real - a.imag * b.imag;\n        z.imag = a.real * b.imag + a.imag * b.real;\n        return z;\n    }\n    #if 1\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {\n        if (b.imag == 0) {\n            return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real);\n        } else if (fabsf(b.real) >= fabsf(b.imag)) {\n            if (b.real == 0 && b.imag == 0) {\n                return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.imag);\n            } else {\n                float r = b.imag / b.real;\n                float s = (float)(1.0) / (b.real + b.imag * r);\n                return __pyx_t_float_complex_from_parts(\n                    (a.real + a.imag * r) * s, (a.imag - a.real * r) * s);\n            }\n        } else {\n            float r = b.real / b.imag;\n            float s = (float)(1.0) / (b.imag + b.real * r);\n            return __pyx_t_float_complex_from_parts(\n                (a.real * r + a.imag) * s, (a.imag * r - a.real) * s);\n        }\n    }\n    #else\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_quot_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {\n        if (b.imag == 0) {\n            return __pyx_t_float_complex_from_parts(a.real / b.real, a.imag / b.real);\n        } else {\n            float denom = b.real * b.real + b.imag * b.imag;\n            return __pyx_t_float_complex_from_parts(\n                (a.real * b.real + a.imag * b.imag) / denom,\n                (a.imag * b.real - a.real * b.imag) / denom);\n        }\n    }\n    #endif\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_neg_float(__pyx_t_float_complex a) {\n        __pyx_t_float_complex z;\n        z.real = -a.real;\n        z.imag = -a.imag;\n        return z;\n    }\n    static CYTHON_INLINE int __Pyx_c_is_zero_float(__pyx_t_float_complex a) {\n       return (a.real == 0) && (a.imag == 0);\n    }\n    static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_conj_float(__pyx_t_float_complex a) {\n        __pyx_t_float_complex z;\n        z.real =  a.real;\n        z.imag = -a.imag;\n        return z;\n    }\n    #if 1\n        static CYTHON_INLINE float __Pyx_c_abs_float(__pyx_t_float_complex z) {\n          #if !defined(HAVE_HYPOT) || defined(_MSC_VER)\n            return sqrtf(z.real*z.real + z.imag*z.imag);\n          #else\n            return hypotf(z.real, z.imag);\n          #endif\n        }\n        static CYTHON_INLINE __pyx_t_float_complex __Pyx_c_pow_float(__pyx_t_float_complex a, __pyx_t_float_complex b) {\n            __pyx_t_float_complex z;\n            float r, lnr, theta, z_r, z_theta;\n            if (b.imag == 0 && b.real == (int)b.real) {\n                if (b.real < 0) {\n                    float denom = a.real * a.real + a.imag * a.imag;\n                    a.real = a.real / denom;\n                    a.imag = -a.imag / denom;\n                    b.real = -b.real;\n                }\n                switch ((int)b.real) {\n                    case 0:\n                        z.real = 1;\n                        z.imag = 0;\n                        return z;\n                    case 1:\n                        return a;\n                    case 2:\n                        z = __Pyx_c_prod_float(a, a);\n                        return __Pyx_c_prod_float(a, a);\n                    case 3:\n                        z = __Pyx_c_prod_float(a, a);\n                        return __Pyx_c_prod_float(z, a);\n                    case 4:\n                        z = __Pyx_c_prod_float(a, a);\n                        return __Pyx_c_prod_float(z, z);\n                }\n            }\n            if (a.imag == 0) {\n                if (a.real == 0) {\n                    return a;\n                } else if (b.imag == 0) {\n                    z.real = powf(a.real, b.real);\n                    z.imag = 0;\n                    return z;\n                } else if (a.real > 0) {\n                    r = a.real;\n                    theta = 0;\n                } else {\n                    r = -a.real;\n                    theta = atan2f(0.0, -1.0);\n                }\n            } else {\n                r = __Pyx_c_abs_float(a);\n                theta = atan2f(a.imag, a.real);\n            }\n            lnr = logf(r);\n            z_r = expf(lnr * b.real - theta * b.imag);\n            z_theta = theta * b.real + lnr * b.imag;\n            z.real = z_r * cosf(z_theta);\n            z.imag = z_r * sinf(z_theta);\n            return z;\n        }\n    #endif\n#endif\n\n/* Declarations */\n  #if CYTHON_CCOMPLEX\n  #ifdef __cplusplus\n    static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) {\n      return ::std::complex< double >(x, y);\n    }\n  #else\n    static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) {\n      return x + y*(__pyx_t_double_complex)_Complex_I;\n    }\n  #endif\n#else\n    static CYTHON_INLINE __pyx_t_double_complex __pyx_t_double_complex_from_parts(double x, double y) {\n      __pyx_t_double_complex z;\n      z.real = x;\n      z.imag = y;\n      return z;\n    }\n#endif\n\n/* Arithmetic */\n  #if CYTHON_CCOMPLEX\n#else\n    static CYTHON_INLINE int __Pyx_c_eq_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {\n       return (a.real == b.real) && (a.imag == b.imag);\n    }\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_sum_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {\n        __pyx_t_double_complex z;\n        z.real = a.real + b.real;\n        z.imag = a.imag + b.imag;\n        return z;\n    }\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_diff_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {\n        __pyx_t_double_complex z;\n        z.real = a.real - b.real;\n        z.imag = a.imag - b.imag;\n        return z;\n    }\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_prod_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {\n        __pyx_t_double_complex z;\n        z.real = a.real * b.real - a.imag * b.imag;\n        z.imag = a.real * b.imag + a.imag * b.real;\n        return z;\n    }\n    #if 1\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {\n        if (b.imag == 0) {\n            return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real);\n        } else if (fabs(b.real) >= fabs(b.imag)) {\n            if (b.real == 0 && b.imag == 0) {\n                return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.imag);\n            } else {\n                double r = b.imag / b.real;\n                double s = (double)(1.0) / (b.real + b.imag * r);\n                return __pyx_t_double_complex_from_parts(\n                    (a.real + a.imag * r) * s, (a.imag - a.real * r) * s);\n            }\n        } else {\n            double r = b.real / b.imag;\n            double s = (double)(1.0) / (b.imag + b.real * r);\n            return __pyx_t_double_complex_from_parts(\n                (a.real * r + a.imag) * s, (a.imag * r - a.real) * s);\n        }\n    }\n    #else\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_quot_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {\n        if (b.imag == 0) {\n            return __pyx_t_double_complex_from_parts(a.real / b.real, a.imag / b.real);\n        } else {\n            double denom = b.real * b.real + b.imag * b.imag;\n            return __pyx_t_double_complex_from_parts(\n                (a.real * b.real + a.imag * b.imag) / denom,\n                (a.imag * b.real - a.real * b.imag) / denom);\n        }\n    }\n    #endif\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_neg_double(__pyx_t_double_complex a) {\n        __pyx_t_double_complex z;\n        z.real = -a.real;\n        z.imag = -a.imag;\n        return z;\n    }\n    static CYTHON_INLINE int __Pyx_c_is_zero_double(__pyx_t_double_complex a) {\n       return (a.real == 0) && (a.imag == 0);\n    }\n    static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_conj_double(__pyx_t_double_complex a) {\n        __pyx_t_double_complex z;\n        z.real =  a.real;\n        z.imag = -a.imag;\n        return z;\n    }\n    #if 1\n        static CYTHON_INLINE double __Pyx_c_abs_double(__pyx_t_double_complex z) {\n          #if !defined(HAVE_HYPOT) || defined(_MSC_VER)\n            return sqrt(z.real*z.real + z.imag*z.imag);\n          #else\n            return hypot(z.real, z.imag);\n          #endif\n        }\n        static CYTHON_INLINE __pyx_t_double_complex __Pyx_c_pow_double(__pyx_t_double_complex a, __pyx_t_double_complex b) {\n            __pyx_t_double_complex z;\n            double r, lnr, theta, z_r, z_theta;\n            if (b.imag == 0 && b.real == (int)b.real) {\n                if (b.real < 0) {\n                    double denom = a.real * a.real + a.imag * a.imag;\n                    a.real = a.real / denom;\n                    a.imag = -a.imag / denom;\n                    b.real = -b.real;\n                }\n                switch ((int)b.real) {\n                    case 0:\n                        z.real = 1;\n                        z.imag = 0;\n                        return z;\n                    case 1:\n                        return a;\n                    case 2:\n                        z = __Pyx_c_prod_double(a, a);\n                        return __Pyx_c_prod_double(a, a);\n                    case 3:\n                        z = __Pyx_c_prod_double(a, a);\n                        return __Pyx_c_prod_double(z, a);\n                    case 4:\n                        z = __Pyx_c_prod_double(a, a);\n                        return __Pyx_c_prod_double(z, z);\n                }\n            }\n            if (a.imag == 0) {\n                if (a.real == 0) {\n                    return a;\n                } else if (b.imag == 0) {\n                    z.real = pow(a.real, b.real);\n                    z.imag = 0;\n                    return z;\n                } else if (a.real > 0) {\n                    r = a.real;\n                    theta = 0;\n                } else {\n                    r = -a.real;\n                    theta = atan2(0.0, -1.0);\n                }\n            } else {\n                r = __Pyx_c_abs_double(a);\n                theta = atan2(a.imag, a.real);\n            }\n            lnr = log(r);\n            z_r = exp(lnr * b.real - theta * b.imag);\n            z_theta = theta * b.real + lnr * b.imag;\n            z.real = z_r * cos(z_theta);\n            z.imag = z_r * sin(z_theta);\n            return z;\n        }\n    #endif\n#endif\n\n/* CIntToPy */\n  static CYTHON_INLINE PyObject* __Pyx_PyInt_From_int(int value) {\n    const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;\n    const int is_unsigned = neg_one > const_zero;\n    if (is_unsigned) {\n        if (sizeof(int) < sizeof(long)) {\n            return PyInt_FromLong((long) value);\n        } else if (sizeof(int) <= sizeof(unsigned long)) {\n            return PyLong_FromUnsignedLong((unsigned long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {\n            return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);\n#endif\n        }\n    } else {\n        if (sizeof(int) <= sizeof(long)) {\n            return PyInt_FromLong((long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {\n            return PyLong_FromLongLong((PY_LONG_LONG) value);\n#endif\n        }\n    }\n    {\n        int one = 1; int little = (int)*(unsigned char *)&one;\n        unsigned char *bytes = (unsigned char *)&value;\n        return _PyLong_FromByteArray(bytes, sizeof(int),\n                                     little, !is_unsigned);\n    }\n}\n\n/* CIntToPy */\n  static CYTHON_INLINE PyObject* __Pyx_PyInt_From_enum__NPY_TYPES(enum NPY_TYPES value) {\n    const enum NPY_TYPES neg_one = (enum NPY_TYPES) ((enum NPY_TYPES) 0 - (enum NPY_TYPES) 1), const_zero = (enum NPY_TYPES) 0;\n    const int is_unsigned = neg_one > const_zero;\n    if (is_unsigned) {\n        if (sizeof(enum NPY_TYPES) < sizeof(long)) {\n            return PyInt_FromLong((long) value);\n        } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned long)) {\n            return PyLong_FromUnsignedLong((unsigned long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(enum NPY_TYPES) <= sizeof(unsigned PY_LONG_LONG)) {\n            return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG) value);\n#endif\n        }\n    } else {\n        if (sizeof(enum NPY_TYPES) <= sizeof(long)) {\n            return PyInt_FromLong((long) value);\n#ifdef HAVE_LONG_LONG\n        } else if (sizeof(enum NPY_TYPES) <= sizeof(PY_LONG_LONG)) {\n            return PyLong_FromLongLong((PY_LONG_LONG) value);\n#endif\n        }\n    }\n    {\n        int one = 1; int little = (int)*(unsigned char *)&one;\n        unsigned char *bytes = (unsigned char *)&value;\n        return _PyLong_FromByteArray(bytes, sizeof(enum NPY_TYPES),\n                                     little, !is_unsigned);\n    }\n}\n\n/* CIntFromPy */\n  static CYTHON_INLINE siz __Pyx_PyInt_As_siz(PyObject *x) {\n    const siz neg_one = (siz) ((siz) 0 - (siz) 1), const_zero = (siz) 0;\n    const int is_unsigned = neg_one > const_zero;\n#if PY_MAJOR_VERSION < 3\n    if (likely(PyInt_Check(x))) {\n        if (sizeof(siz) < sizeof(long)) {\n            __PYX_VERIFY_RETURN_INT(siz, long, PyInt_AS_LONG(x))\n        } else {\n            long val = PyInt_AS_LONG(x);\n            if (is_unsigned && unlikely(val < 0)) {\n                goto raise_neg_overflow;\n            }\n            return (siz) val;\n        }\n    } else\n#endif\n    if (likely(PyLong_Check(x))) {\n        if (is_unsigned) {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (siz) 0;\n                case  1: __PYX_VERIFY_RETURN_INT(siz, digit, digits[0])\n                case 2:\n                    if (8 * sizeof(siz) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) >= 2 * PyLong_SHIFT) {\n                            return (siz) (((((siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0]));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(siz) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) >= 3 * PyLong_SHIFT) {\n                            return (siz) (((((((siz)digits[2]) << PyLong_SHIFT) | (siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0]));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(siz) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) >= 4 * PyLong_SHIFT) {\n                            return (siz) (((((((((siz)digits[3]) << PyLong_SHIFT) | (siz)digits[2]) << PyLong_SHIFT) | (siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0]));\n                        }\n                    }\n                    break;\n            }\n#endif\n#if CYTHON_COMPILING_IN_CPYTHON\n            if (unlikely(Py_SIZE(x) < 0)) {\n                goto raise_neg_overflow;\n            }\n#else\n            {\n                int result = PyObject_RichCompareBool(x, Py_False, Py_LT);\n                if (unlikely(result < 0))\n                    return (siz) -1;\n                if (unlikely(result == 1))\n                    goto raise_neg_overflow;\n            }\n#endif\n            if (sizeof(siz) <= sizeof(unsigned long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(siz, unsigned long, PyLong_AsUnsignedLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(siz) <= sizeof(unsigned PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(siz, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))\n#endif\n            }\n        } else {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (siz) 0;\n                case -1: __PYX_VERIFY_RETURN_INT(siz, sdigit, (sdigit) (-(sdigit)digits[0]))\n                case  1: __PYX_VERIFY_RETURN_INT(siz,  digit, +digits[0])\n                case -2:\n                    if (8 * sizeof(siz) - 1 > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) - 1 > 2 * PyLong_SHIFT) {\n                            return (siz) (((siz)-1)*(((((siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0])));\n                        }\n                    }\n                    break;\n                case 2:\n                    if (8 * sizeof(siz) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) - 1 > 2 * PyLong_SHIFT) {\n                            return (siz) ((((((siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0])));\n                        }\n                    }\n                    break;\n                case -3:\n                    if (8 * sizeof(siz) - 1 > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) - 1 > 3 * PyLong_SHIFT) {\n                            return (siz) (((siz)-1)*(((((((siz)digits[2]) << PyLong_SHIFT) | (siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0])));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(siz) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) - 1 > 3 * PyLong_SHIFT) {\n                            return (siz) ((((((((siz)digits[2]) << PyLong_SHIFT) | (siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0])));\n                        }\n                    }\n                    break;\n                case -4:\n                    if (8 * sizeof(siz) - 1 > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) - 1 > 4 * PyLong_SHIFT) {\n                            return (siz) (((siz)-1)*(((((((((siz)digits[3]) << PyLong_SHIFT) | (siz)digits[2]) << PyLong_SHIFT) | (siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0])));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(siz) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(siz, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(siz) - 1 > 4 * PyLong_SHIFT) {\n                            return (siz) ((((((((((siz)digits[3]) << PyLong_SHIFT) | (siz)digits[2]) << PyLong_SHIFT) | (siz)digits[1]) << PyLong_SHIFT) | (siz)digits[0])));\n                        }\n                    }\n                    break;\n            }\n#endif\n            if (sizeof(siz) <= sizeof(long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(siz, long, PyLong_AsLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(siz) <= sizeof(PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(siz, PY_LONG_LONG, PyLong_AsLongLong(x))\n#endif\n            }\n        }\n        {\n#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)\n            PyErr_SetString(PyExc_RuntimeError,\n                            \"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers\");\n#else\n            siz val;\n            PyObject *v = __Pyx_PyNumber_IntOrLong(x);\n #if PY_MAJOR_VERSION < 3\n            if (likely(v) && !PyLong_Check(v)) {\n                PyObject *tmp = v;\n                v = PyNumber_Long(tmp);\n                Py_DECREF(tmp);\n            }\n #endif\n            if (likely(v)) {\n                int one = 1; int is_little = (int)*(unsigned char *)&one;\n                unsigned char *bytes = (unsigned char *)&val;\n                int ret = _PyLong_AsByteArray((PyLongObject *)v,\n                                              bytes, sizeof(val),\n                                              is_little, !is_unsigned);\n                Py_DECREF(v);\n                if (likely(!ret))\n                    return val;\n            }\n#endif\n            return (siz) -1;\n        }\n    } else {\n        siz val;\n        PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);\n        if (!tmp) return (siz) -1;\n        val = __Pyx_PyInt_As_siz(tmp);\n        Py_DECREF(tmp);\n        return val;\n    }\nraise_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"value too large to convert to siz\");\n    return (siz) -1;\nraise_neg_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"can't convert negative value to siz\");\n    return (siz) -1;\n}\n\n/* CIntFromPy */\n  static CYTHON_INLINE size_t __Pyx_PyInt_As_size_t(PyObject *x) {\n    const size_t neg_one = (size_t) ((size_t) 0 - (size_t) 1), const_zero = (size_t) 0;\n    const int is_unsigned = neg_one > const_zero;\n#if PY_MAJOR_VERSION < 3\n    if (likely(PyInt_Check(x))) {\n        if (sizeof(size_t) < sizeof(long)) {\n            __PYX_VERIFY_RETURN_INT(size_t, long, PyInt_AS_LONG(x))\n        } else {\n            long val = PyInt_AS_LONG(x);\n            if (is_unsigned && unlikely(val < 0)) {\n                goto raise_neg_overflow;\n            }\n            return (size_t) val;\n        }\n    } else\n#endif\n    if (likely(PyLong_Check(x))) {\n        if (is_unsigned) {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (size_t) 0;\n                case  1: __PYX_VERIFY_RETURN_INT(size_t, digit, digits[0])\n                case 2:\n                    if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) >= 2 * PyLong_SHIFT) {\n                            return (size_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) >= 3 * PyLong_SHIFT) {\n                            return (size_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) >= 4 * PyLong_SHIFT) {\n                            return (size_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n                        }\n                    }\n                    break;\n            }\n#endif\n#if CYTHON_COMPILING_IN_CPYTHON\n            if (unlikely(Py_SIZE(x) < 0)) {\n                goto raise_neg_overflow;\n            }\n#else\n            {\n                int result = PyObject_RichCompareBool(x, Py_False, Py_LT);\n                if (unlikely(result < 0))\n                    return (size_t) -1;\n                if (unlikely(result == 1))\n                    goto raise_neg_overflow;\n            }\n#endif\n            if (sizeof(size_t) <= sizeof(unsigned long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned long, PyLong_AsUnsignedLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(size_t) <= sizeof(unsigned PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(size_t, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))\n#endif\n            }\n        } else {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (size_t) 0;\n                case -1: __PYX_VERIFY_RETURN_INT(size_t, sdigit, (sdigit) (-(sdigit)digits[0]))\n                case  1: __PYX_VERIFY_RETURN_INT(size_t,  digit, +digits[0])\n                case -2:\n                    if (8 * sizeof(size_t) - 1 > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) {\n                            return (size_t) (((size_t)-1)*(((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));\n                        }\n                    }\n                    break;\n                case 2:\n                    if (8 * sizeof(size_t) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) {\n                            return (size_t) ((((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));\n                        }\n                    }\n                    break;\n                case -3:\n                    if (8 * sizeof(size_t) - 1 > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) {\n                            return (size_t) (((size_t)-1)*(((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(size_t) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) {\n                            return (size_t) ((((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));\n                        }\n                    }\n                    break;\n                case -4:\n                    if (8 * sizeof(size_t) - 1 > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) {\n                            return (size_t) (((size_t)-1)*(((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(size_t) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(size_t, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(size_t) - 1 > 4 * PyLong_SHIFT) {\n                            return (size_t) ((((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0])));\n                        }\n                    }\n                    break;\n            }\n#endif\n            if (sizeof(size_t) <= sizeof(long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(size_t, long, PyLong_AsLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(size_t) <= sizeof(PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(size_t, PY_LONG_LONG, PyLong_AsLongLong(x))\n#endif\n            }\n        }\n        {\n#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)\n            PyErr_SetString(PyExc_RuntimeError,\n                            \"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers\");\n#else\n            size_t val;\n            PyObject *v = __Pyx_PyNumber_IntOrLong(x);\n #if PY_MAJOR_VERSION < 3\n            if (likely(v) && !PyLong_Check(v)) {\n                PyObject *tmp = v;\n                v = PyNumber_Long(tmp);\n                Py_DECREF(tmp);\n            }\n #endif\n            if (likely(v)) {\n                int one = 1; int is_little = (int)*(unsigned char *)&one;\n                unsigned char *bytes = (unsigned char *)&val;\n                int ret = _PyLong_AsByteArray((PyLongObject *)v,\n                                              bytes, sizeof(val),\n                                              is_little, !is_unsigned);\n                Py_DECREF(v);\n                if (likely(!ret))\n                    return val;\n            }\n#endif\n            return (size_t) -1;\n        }\n    } else {\n        size_t val;\n        PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);\n        if (!tmp) return (size_t) -1;\n        val = __Pyx_PyInt_As_size_t(tmp);\n        Py_DECREF(tmp);\n        return val;\n    }\nraise_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"value too large to convert to size_t\");\n    return (size_t) -1;\nraise_neg_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"can't convert negative value to size_t\");\n    return (size_t) -1;\n}\n\n/* CIntFromPy */\n  static CYTHON_INLINE int __Pyx_PyInt_As_int(PyObject *x) {\n    const int neg_one = (int) ((int) 0 - (int) 1), const_zero = (int) 0;\n    const int is_unsigned = neg_one > const_zero;\n#if PY_MAJOR_VERSION < 3\n    if (likely(PyInt_Check(x))) {\n        if (sizeof(int) < sizeof(long)) {\n            __PYX_VERIFY_RETURN_INT(int, long, PyInt_AS_LONG(x))\n        } else {\n            long val = PyInt_AS_LONG(x);\n            if (is_unsigned && unlikely(val < 0)) {\n                goto raise_neg_overflow;\n            }\n            return (int) val;\n        }\n    } else\n#endif\n    if (likely(PyLong_Check(x))) {\n        if (is_unsigned) {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (int) 0;\n                case  1: __PYX_VERIFY_RETURN_INT(int, digit, digits[0])\n                case 2:\n                    if (8 * sizeof(int) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) >= 2 * PyLong_SHIFT) {\n                            return (int) (((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(int) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) >= 3 * PyLong_SHIFT) {\n                            return (int) (((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(int) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) >= 4 * PyLong_SHIFT) {\n                            return (int) (((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0]));\n                        }\n                    }\n                    break;\n            }\n#endif\n#if CYTHON_COMPILING_IN_CPYTHON\n            if (unlikely(Py_SIZE(x) < 0)) {\n                goto raise_neg_overflow;\n            }\n#else\n            {\n                int result = PyObject_RichCompareBool(x, Py_False, Py_LT);\n                if (unlikely(result < 0))\n                    return (int) -1;\n                if (unlikely(result == 1))\n                    goto raise_neg_overflow;\n            }\n#endif\n            if (sizeof(int) <= sizeof(unsigned long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(int, unsigned long, PyLong_AsUnsignedLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(int) <= sizeof(unsigned PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(int, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))\n#endif\n            }\n        } else {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (int) 0;\n                case -1: __PYX_VERIFY_RETURN_INT(int, sdigit, (sdigit) (-(sdigit)digits[0]))\n                case  1: __PYX_VERIFY_RETURN_INT(int,  digit, +digits[0])\n                case -2:\n                    if (8 * sizeof(int) - 1 > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {\n                            return (int) (((int)-1)*(((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));\n                        }\n                    }\n                    break;\n                case 2:\n                    if (8 * sizeof(int) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {\n                            return (int) ((((((int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));\n                        }\n                    }\n                    break;\n                case -3:\n                    if (8 * sizeof(int) - 1 > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {\n                            return (int) (((int)-1)*(((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(int) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {\n                            return (int) ((((((((int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));\n                        }\n                    }\n                    break;\n                case -4:\n                    if (8 * sizeof(int) - 1 > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {\n                            return (int) (((int)-1)*(((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(int) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(int, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(int) - 1 > 4 * PyLong_SHIFT) {\n                            return (int) ((((((((((int)digits[3]) << PyLong_SHIFT) | (int)digits[2]) << PyLong_SHIFT) | (int)digits[1]) << PyLong_SHIFT) | (int)digits[0])));\n                        }\n                    }\n                    break;\n            }\n#endif\n            if (sizeof(int) <= sizeof(long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(int, long, PyLong_AsLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(int) <= sizeof(PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(int, PY_LONG_LONG, PyLong_AsLongLong(x))\n#endif\n            }\n        }\n        {\n#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)\n            PyErr_SetString(PyExc_RuntimeError,\n                            \"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers\");\n#else\n            int val;\n            PyObject *v = __Pyx_PyNumber_IntOrLong(x);\n #if PY_MAJOR_VERSION < 3\n            if (likely(v) && !PyLong_Check(v)) {\n                PyObject *tmp = v;\n                v = PyNumber_Long(tmp);\n                Py_DECREF(tmp);\n            }\n #endif\n            if (likely(v)) {\n                int one = 1; int is_little = (int)*(unsigned char *)&one;\n                unsigned char *bytes = (unsigned char *)&val;\n                int ret = _PyLong_AsByteArray((PyLongObject *)v,\n                                              bytes, sizeof(val),\n                                              is_little, !is_unsigned);\n                Py_DECREF(v);\n                if (likely(!ret))\n                    return val;\n            }\n#endif\n            return (int) -1;\n        }\n    } else {\n        int val;\n        PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);\n        if (!tmp) return (int) -1;\n        val = __Pyx_PyInt_As_int(tmp);\n        Py_DECREF(tmp);\n        return val;\n    }\nraise_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"value too large to convert to int\");\n    return (int) -1;\nraise_neg_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"can't convert negative value to int\");\n    return (int) -1;\n}\n\n/* CIntFromPy */\n  static CYTHON_INLINE long __Pyx_PyInt_As_long(PyObject *x) {\n    const long neg_one = (long) ((long) 0 - (long) 1), const_zero = (long) 0;\n    const int is_unsigned = neg_one > const_zero;\n#if PY_MAJOR_VERSION < 3\n    if (likely(PyInt_Check(x))) {\n        if (sizeof(long) < sizeof(long)) {\n            __PYX_VERIFY_RETURN_INT(long, long, PyInt_AS_LONG(x))\n        } else {\n            long val = PyInt_AS_LONG(x);\n            if (is_unsigned && unlikely(val < 0)) {\n                goto raise_neg_overflow;\n            }\n            return (long) val;\n        }\n    } else\n#endif\n    if (likely(PyLong_Check(x))) {\n        if (is_unsigned) {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (long) 0;\n                case  1: __PYX_VERIFY_RETURN_INT(long, digit, digits[0])\n                case 2:\n                    if (8 * sizeof(long) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) >= 2 * PyLong_SHIFT) {\n                            return (long) (((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(long) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) >= 3 * PyLong_SHIFT) {\n                            return (long) (((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(long) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) >= 4 * PyLong_SHIFT) {\n                            return (long) (((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0]));\n                        }\n                    }\n                    break;\n            }\n#endif\n#if CYTHON_COMPILING_IN_CPYTHON\n            if (unlikely(Py_SIZE(x) < 0)) {\n                goto raise_neg_overflow;\n            }\n#else\n            {\n                int result = PyObject_RichCompareBool(x, Py_False, Py_LT);\n                if (unlikely(result < 0))\n                    return (long) -1;\n                if (unlikely(result == 1))\n                    goto raise_neg_overflow;\n            }\n#endif\n            if (sizeof(long) <= sizeof(unsigned long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(long, unsigned long, PyLong_AsUnsignedLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(long) <= sizeof(unsigned PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(long, unsigned PY_LONG_LONG, PyLong_AsUnsignedLongLong(x))\n#endif\n            }\n        } else {\n#if CYTHON_USE_PYLONG_INTERNALS\n            const digit* digits = ((PyLongObject*)x)->ob_digit;\n            switch (Py_SIZE(x)) {\n                case  0: return (long) 0;\n                case -1: __PYX_VERIFY_RETURN_INT(long, sdigit, (sdigit) (-(sdigit)digits[0]))\n                case  1: __PYX_VERIFY_RETURN_INT(long,  digit, +digits[0])\n                case -2:\n                    if (8 * sizeof(long) - 1 > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {\n                            return (long) (((long)-1)*(((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));\n                        }\n                    }\n                    break;\n                case 2:\n                    if (8 * sizeof(long) > 1 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 2 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {\n                            return (long) ((((((long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));\n                        }\n                    }\n                    break;\n                case -3:\n                    if (8 * sizeof(long) - 1 > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {\n                            return (long) (((long)-1)*(((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));\n                        }\n                    }\n                    break;\n                case 3:\n                    if (8 * sizeof(long) > 2 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 3 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {\n                            return (long) ((((((((long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));\n                        }\n                    }\n                    break;\n                case -4:\n                    if (8 * sizeof(long) - 1 > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, long, -(long) (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {\n                            return (long) (((long)-1)*(((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));\n                        }\n                    }\n                    break;\n                case 4:\n                    if (8 * sizeof(long) > 3 * PyLong_SHIFT) {\n                        if (8 * sizeof(unsigned long) > 4 * PyLong_SHIFT) {\n                            __PYX_VERIFY_RETURN_INT(long, unsigned long, (((((((((unsigned long)digits[3]) << PyLong_SHIFT) | (unsigned long)digits[2]) << PyLong_SHIFT) | (unsigned long)digits[1]) << PyLong_SHIFT) | (unsigned long)digits[0])))\n                        } else if (8 * sizeof(long) - 1 > 4 * PyLong_SHIFT) {\n                            return (long) ((((((((((long)digits[3]) << PyLong_SHIFT) | (long)digits[2]) << PyLong_SHIFT) | (long)digits[1]) << PyLong_SHIFT) | (long)digits[0])));\n                        }\n                    }\n                    break;\n            }\n#endif\n            if (sizeof(long) <= sizeof(long)) {\n                __PYX_VERIFY_RETURN_INT_EXC(long, long, PyLong_AsLong(x))\n#ifdef HAVE_LONG_LONG\n            } else if (sizeof(long) <= sizeof(PY_LONG_LONG)) {\n                __PYX_VERIFY_RETURN_INT_EXC(long, PY_LONG_LONG, PyLong_AsLongLong(x))\n#endif\n            }\n        }\n        {\n#if CYTHON_COMPILING_IN_PYPY && !defined(_PyLong_AsByteArray)\n            PyErr_SetString(PyExc_RuntimeError,\n                            \"_PyLong_AsByteArray() not available in PyPy, cannot convert large numbers\");\n#else\n            long val;\n            PyObject *v = __Pyx_PyNumber_IntOrLong(x);\n #if PY_MAJOR_VERSION < 3\n            if (likely(v) && !PyLong_Check(v)) {\n                PyObject *tmp = v;\n                v = PyNumber_Long(tmp);\n                Py_DECREF(tmp);\n            }\n #endif\n            if (likely(v)) {\n                int one = 1; int is_little = (int)*(unsigned char *)&one;\n                unsigned char *bytes = (unsigned char *)&val;\n                int ret = _PyLong_AsByteArray((PyLongObject *)v,\n                                              bytes, sizeof(val),\n                                              is_little, !is_unsigned);\n                Py_DECREF(v);\n                if (likely(!ret))\n                    return val;\n            }\n#endif\n            return (long) -1;\n        }\n    } else {\n        long val;\n        PyObject *tmp = __Pyx_PyNumber_IntOrLong(x);\n        if (!tmp) return (long) -1;\n        val = __Pyx_PyInt_As_long(tmp);\n        Py_DECREF(tmp);\n        return val;\n    }\nraise_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"value too large to convert to long\");\n    return (long) -1;\nraise_neg_overflow:\n    PyErr_SetString(PyExc_OverflowError,\n        \"can't convert negative value to long\");\n    return (long) -1;\n}\n\n/* FastTypeChecks */\n  #if CYTHON_COMPILING_IN_CPYTHON\nstatic int __Pyx_InBases(PyTypeObject *a, PyTypeObject *b) {\n    while (a) {\n        a = a->tp_base;\n        if (a == b)\n            return 1;\n    }\n    return b == &PyBaseObject_Type;\n}\nstatic CYTHON_INLINE int __Pyx_IsSubtype(PyTypeObject *a, PyTypeObject *b) {\n    PyObject *mro;\n    if (a == b) return 1;\n    mro = a->tp_mro;\n    if (likely(mro)) {\n        Py_ssize_t i, n;\n        n = PyTuple_GET_SIZE(mro);\n        for (i = 0; i < n; i++) {\n            if (PyTuple_GET_ITEM(mro, i) == (PyObject *)b)\n                return 1;\n        }\n        return 0;\n    }\n    return __Pyx_InBases(a, b);\n}\n#if PY_MAJOR_VERSION == 2\nstatic int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject* exc_type2) {\n    PyObject *exception, *value, *tb;\n    int res;\n    __Pyx_PyThreadState_declare\n    __Pyx_PyThreadState_assign\n    __Pyx_ErrFetch(&exception, &value, &tb);\n    res = exc_type1 ? PyObject_IsSubclass(err, exc_type1) : 0;\n    if (unlikely(res == -1)) {\n        PyErr_WriteUnraisable(err);\n        res = 0;\n    }\n    if (!res) {\n        res = PyObject_IsSubclass(err, exc_type2);\n        if (unlikely(res == -1)) {\n            PyErr_WriteUnraisable(err);\n            res = 0;\n        }\n    }\n    __Pyx_ErrRestore(exception, value, tb);\n    return res;\n}\n#else\nstatic CYTHON_INLINE int __Pyx_inner_PyErr_GivenExceptionMatches2(PyObject *err, PyObject* exc_type1, PyObject *exc_type2) {\n    int res = exc_type1 ? __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type1) : 0;\n    if (!res) {\n        res = __Pyx_IsSubtype((PyTypeObject*)err, (PyTypeObject*)exc_type2);\n    }\n    return res;\n}\n#endif\nstatic int __Pyx_PyErr_GivenExceptionMatchesTuple(PyObject *exc_type, PyObject *tuple) {\n    Py_ssize_t i, n;\n    assert(PyExceptionClass_Check(exc_type));\n    n = PyTuple_GET_SIZE(tuple);\n#if PY_MAJOR_VERSION >= 3\n    for (i=0; i<n; i++) {\n        if (exc_type == PyTuple_GET_ITEM(tuple, i)) return 1;\n    }\n#endif\n    for (i=0; i<n; i++) {\n        PyObject *t = PyTuple_GET_ITEM(tuple, i);\n        #if PY_MAJOR_VERSION < 3\n        if (likely(exc_type == t)) return 1;\n        #endif\n        if (likely(PyExceptionClass_Check(t))) {\n            if (__Pyx_inner_PyErr_GivenExceptionMatches2(exc_type, NULL, t)) return 1;\n        } else {\n        }\n    }\n    return 0;\n}\nstatic CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches(PyObject *err, PyObject* exc_type) {\n    if (likely(err == exc_type)) return 1;\n    if (likely(PyExceptionClass_Check(err))) {\n        if (likely(PyExceptionClass_Check(exc_type))) {\n            return __Pyx_inner_PyErr_GivenExceptionMatches2(err, NULL, exc_type);\n        } else if (likely(PyTuple_Check(exc_type))) {\n            return __Pyx_PyErr_GivenExceptionMatchesTuple(err, exc_type);\n        } else {\n        }\n    }\n    return PyErr_GivenExceptionMatches(err, exc_type);\n}\nstatic CYTHON_INLINE int __Pyx_PyErr_GivenExceptionMatches2(PyObject *err, PyObject *exc_type1, PyObject *exc_type2) {\n    assert(PyExceptionClass_Check(exc_type1));\n    assert(PyExceptionClass_Check(exc_type2));\n    if (likely(err == exc_type1 || err == exc_type2)) return 1;\n    if (likely(PyExceptionClass_Check(err))) {\n        return __Pyx_inner_PyErr_GivenExceptionMatches2(err, exc_type1, exc_type2);\n    }\n    return (PyErr_GivenExceptionMatches(err, exc_type1) || PyErr_GivenExceptionMatches(err, exc_type2));\n}\n#endif\n\n/* CheckBinaryVersion */\n  static int __Pyx_check_binary_version(void) {\n    char ctversion[4], rtversion[4];\n    PyOS_snprintf(ctversion, 4, \"%d.%d\", PY_MAJOR_VERSION, PY_MINOR_VERSION);\n    PyOS_snprintf(rtversion, 4, \"%s\", Py_GetVersion());\n    if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {\n        char message[200];\n        PyOS_snprintf(message, sizeof(message),\n                      \"compiletime version %s of module '%.100s' \"\n                      \"does not match runtime version %s\",\n                      ctversion, __Pyx_MODULE_NAME, rtversion);\n        return PyErr_WarnEx(NULL, message, 1);\n    }\n    return 0;\n}\n\n/* InitStrings */\n  static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {\n    while (t->p) {\n        #if PY_MAJOR_VERSION < 3\n        if (t->is_unicode) {\n            *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);\n        } else if (t->intern) {\n            *t->p = PyString_InternFromString(t->s);\n        } else {\n            *t->p = PyString_FromStringAndSize(t->s, t->n - 1);\n        }\n        #else\n        if (t->is_unicode | t->is_str) {\n            if (t->intern) {\n                *t->p = PyUnicode_InternFromString(t->s);\n            } else if (t->encoding) {\n                *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);\n            } else {\n                *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);\n            }\n        } else {\n            *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);\n        }\n        #endif\n        if (!*t->p)\n            return -1;\n        if (PyObject_Hash(*t->p) == -1)\n            return -1;\n        ++t;\n    }\n    return 0;\n}\n\nstatic CYTHON_INLINE PyObject* __Pyx_PyUnicode_FromString(const char* c_str) {\n    return __Pyx_PyUnicode_FromStringAndSize(c_str, (Py_ssize_t)strlen(c_str));\n}\nstatic CYTHON_INLINE const char* __Pyx_PyObject_AsString(PyObject* o) {\n    Py_ssize_t ignore;\n    return __Pyx_PyObject_AsStringAndSize(o, &ignore);\n}\n#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT\n#if !CYTHON_PEP393_ENABLED\nstatic const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {\n    char* defenc_c;\n    PyObject* defenc = _PyUnicode_AsDefaultEncodedString(o, NULL);\n    if (!defenc) return NULL;\n    defenc_c = PyBytes_AS_STRING(defenc);\n#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII\n    {\n        char* end = defenc_c + PyBytes_GET_SIZE(defenc);\n        char* c;\n        for (c = defenc_c; c < end; c++) {\n            if ((unsigned char) (*c) >= 128) {\n                PyUnicode_AsASCIIString(o);\n                return NULL;\n            }\n        }\n    }\n#endif\n    *length = PyBytes_GET_SIZE(defenc);\n    return defenc_c;\n}\n#else\nstatic CYTHON_INLINE const char* __Pyx_PyUnicode_AsStringAndSize(PyObject* o, Py_ssize_t *length) {\n    if (unlikely(__Pyx_PyUnicode_READY(o) == -1)) return NULL;\n#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII\n    if (likely(PyUnicode_IS_ASCII(o))) {\n        *length = PyUnicode_GET_LENGTH(o);\n        return PyUnicode_AsUTF8(o);\n    } else {\n        PyUnicode_AsASCIIString(o);\n        return NULL;\n    }\n#else\n    return PyUnicode_AsUTF8AndSize(o, length);\n#endif\n}\n#endif\n#endif\nstatic CYTHON_INLINE const char* __Pyx_PyObject_AsStringAndSize(PyObject* o, Py_ssize_t *length) {\n#if __PYX_DEFAULT_STRING_ENCODING_IS_ASCII || __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT\n    if (\n#if PY_MAJOR_VERSION < 3 && __PYX_DEFAULT_STRING_ENCODING_IS_ASCII\n            __Pyx_sys_getdefaultencoding_not_ascii &&\n#endif\n            PyUnicode_Check(o)) {\n        return __Pyx_PyUnicode_AsStringAndSize(o, length);\n    } else\n#endif\n#if (!CYTHON_COMPILING_IN_PYPY) || (defined(PyByteArray_AS_STRING) && defined(PyByteArray_GET_SIZE))\n    if (PyByteArray_Check(o)) {\n        *length = PyByteArray_GET_SIZE(o);\n        return PyByteArray_AS_STRING(o);\n    } else\n#endif\n    {\n        char* result;\n        int r = PyBytes_AsStringAndSize(o, &result, length);\n        if (unlikely(r < 0)) {\n            return NULL;\n        } else {\n            return result;\n        }\n    }\n}\nstatic CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {\n   int is_true = x == Py_True;\n   if (is_true | (x == Py_False) | (x == Py_None)) return is_true;\n   else return PyObject_IsTrue(x);\n}\nstatic CYTHON_INLINE int __Pyx_PyObject_IsTrueAndDecref(PyObject* x) {\n    int retval;\n    if (unlikely(!x)) return -1;\n    retval = __Pyx_PyObject_IsTrue(x);\n    Py_DECREF(x);\n    return retval;\n}\nstatic PyObject* __Pyx_PyNumber_IntOrLongWrongResultType(PyObject* result, const char* type_name) {\n#if PY_MAJOR_VERSION >= 3\n    if (PyLong_Check(result)) {\n        if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1,\n                \"__int__ returned non-int (type %.200s).  \"\n                \"The ability to return an instance of a strict subclass of int \"\n                \"is deprecated, and may be removed in a future version of Python.\",\n                Py_TYPE(result)->tp_name)) {\n            Py_DECREF(result);\n            return NULL;\n        }\n        return result;\n    }\n#endif\n    PyErr_Format(PyExc_TypeError,\n                 \"__%.4s__ returned non-%.4s (type %.200s)\",\n                 type_name, type_name, Py_TYPE(result)->tp_name);\n    Py_DECREF(result);\n    return NULL;\n}\nstatic CYTHON_INLINE PyObject* __Pyx_PyNumber_IntOrLong(PyObject* x) {\n#if CYTHON_USE_TYPE_SLOTS\n  PyNumberMethods *m;\n#endif\n  const char *name = NULL;\n  PyObject *res = NULL;\n#if PY_MAJOR_VERSION < 3\n  if (likely(PyInt_Check(x) || PyLong_Check(x)))\n#else\n  if (likely(PyLong_Check(x)))\n#endif\n    return __Pyx_NewRef(x);\n#if CYTHON_USE_TYPE_SLOTS\n  m = Py_TYPE(x)->tp_as_number;\n  #if PY_MAJOR_VERSION < 3\n  if (m && m->nb_int) {\n    name = \"int\";\n    res = m->nb_int(x);\n  }\n  else if (m && m->nb_long) {\n    name = \"long\";\n    res = m->nb_long(x);\n  }\n  #else\n  if (likely(m && m->nb_int)) {\n    name = \"int\";\n    res = m->nb_int(x);\n  }\n  #endif\n#else\n  if (!PyBytes_CheckExact(x) && !PyUnicode_CheckExact(x)) {\n    res = PyNumber_Int(x);\n  }\n#endif\n  if (likely(res)) {\n#if PY_MAJOR_VERSION < 3\n    if (unlikely(!PyInt_Check(res) && !PyLong_Check(res))) {\n#else\n    if (unlikely(!PyLong_CheckExact(res))) {\n#endif\n        return __Pyx_PyNumber_IntOrLongWrongResultType(res, name);\n    }\n  }\n  else if (!PyErr_Occurred()) {\n    PyErr_SetString(PyExc_TypeError,\n                    \"an integer is required\");\n  }\n  return res;\n}\nstatic CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {\n  Py_ssize_t ival;\n  PyObject *x;\n#if PY_MAJOR_VERSION < 3\n  if (likely(PyInt_CheckExact(b))) {\n    if (sizeof(Py_ssize_t) >= sizeof(long))\n        return PyInt_AS_LONG(b);\n    else\n        return PyInt_AsSsize_t(b);\n  }\n#endif\n  if (likely(PyLong_CheckExact(b))) {\n    #if CYTHON_USE_PYLONG_INTERNALS\n    const digit* digits = ((PyLongObject*)b)->ob_digit;\n    const Py_ssize_t size = Py_SIZE(b);\n    if (likely(__Pyx_sst_abs(size) <= 1)) {\n        ival = likely(size) ? digits[0] : 0;\n        if (size == -1) ival = -ival;\n        return ival;\n    } else {\n      switch (size) {\n         case 2:\n           if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {\n             return (Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n           }\n           break;\n         case -2:\n           if (8 * sizeof(Py_ssize_t) > 2 * PyLong_SHIFT) {\n             return -(Py_ssize_t) (((((size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n           }\n           break;\n         case 3:\n           if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {\n             return (Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n           }\n           break;\n         case -3:\n           if (8 * sizeof(Py_ssize_t) > 3 * PyLong_SHIFT) {\n             return -(Py_ssize_t) (((((((size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n           }\n           break;\n         case 4:\n           if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {\n             return (Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n           }\n           break;\n         case -4:\n           if (8 * sizeof(Py_ssize_t) > 4 * PyLong_SHIFT) {\n             return -(Py_ssize_t) (((((((((size_t)digits[3]) << PyLong_SHIFT) | (size_t)digits[2]) << PyLong_SHIFT) | (size_t)digits[1]) << PyLong_SHIFT) | (size_t)digits[0]));\n           }\n           break;\n      }\n    }\n    #endif\n    return PyLong_AsSsize_t(b);\n  }\n  x = PyNumber_Index(b);\n  if (!x) return -1;\n  ival = PyInt_AsSsize_t(x);\n  Py_DECREF(x);\n  return ival;\n}\nstatic CYTHON_INLINE PyObject * __Pyx_PyBool_FromLong(long b) {\n  return b ? __Pyx_NewRef(Py_True) : __Pyx_NewRef(Py_False);\n}\nstatic CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {\n    return PyInt_FromSize_t(ival);\n}\n\n\n#endif /* Py_PYTHON_H */\n"
  },
  {
    "path": "PythonAPI/pysobatools/_mask.pyx",
    "content": "# distutils: language = c\n# distutils: sources = ../common/maskApi.c\n\n#**************************************************************************\n# Microsoft COCO Toolbox.      version 2.0\n# Data, paper, and tutorials available at:  http://mscoco.org/\n# Code written by Piotr Dollar and Tsung-Yi Lin, 2015.\n# Licensed under the Simplified BSD License [see coco/license.txt]\n#**************************************************************************\n\n__author__ = 'tsungyi'\n\nimport sys\nPYTHON_VERSION = sys.version_info[0]\n\n# import both Python-level and C-level symbols of Numpy\n# the API uses Numpy to interface C and Python\nimport numpy as np\ncimport numpy as np\nfrom libc.stdlib cimport malloc, free\n\n# intialized Numpy. must do.\nnp.import_array()\n\n# import numpy C function\n# we use PyArray_ENABLEFLAGS to make Numpy ndarray responsible to memoery management\ncdef extern from \"numpy/arrayobject.h\":\n    void PyArray_ENABLEFLAGS(np.ndarray arr, int flags)\n\n# Declare the prototype of the C functions in MaskApi.h\ncdef extern from \"maskApi.h\":\n    ctypedef unsigned int uint\n    ctypedef unsigned long siz\n    ctypedef unsigned char byte\n    ctypedef double* BB\n    ctypedef struct RLE:\n        siz h,\n        siz w,\n        siz m,\n        uint* cnts,\n    void rlesInit( RLE **R, siz n )\n    void rleEncode( RLE *R, const byte *M, siz h, siz w, siz n )\n    void rleDecode( const RLE *R, byte *mask, siz n )\n    void rleMerge( const RLE *R, RLE *M, siz n, int intersect )\n    void rleArea( const RLE *R, siz n, uint *a )\n    void rleIou( RLE *dt, RLE *gt, siz m, siz n, byte *iscrowd, double *o )\n    void bbIou( BB dt, BB gt, siz m, siz n, byte *iscrowd, double *o )\n    void rleToBbox( const RLE *R, BB bb, siz n )\n    void rleFrBbox( RLE *R, const BB bb, siz h, siz w, siz n )\n    void rleFrPoly( RLE *R, const double *xy, siz k, siz h, siz w )\n    char* rleToString( const RLE *R )\n    void rleFrString( RLE *R, char *s, siz h, siz w )\n\n# python class to wrap RLE array in C\n# the class handles the memory allocation and deallocation\ncdef class RLEs:\n    cdef RLE *_R\n    cdef siz _n\n\n    def __cinit__(self, siz n =0):\n        rlesInit(&self._R, n)\n        self._n = n\n\n    # free the RLE array here\n    def __dealloc__(self):\n        if self._R is not NULL:\n            for i in range(self._n):\n                free(self._R[i].cnts)\n            free(self._R)\n    def __getattr__(self, key):\n        if key == 'n':\n            return self._n\n        raise AttributeError(key)\n\n# python class to wrap Mask array in C\n# the class handles the memory allocation and deallocation\ncdef class Masks:\n    cdef byte *_mask\n    cdef siz _h\n    cdef siz _w\n    cdef siz _n\n\n    def __cinit__(self, h, w, n):\n        self._mask = <byte*> malloc(h*w*n* sizeof(byte))\n        self._h = h\n        self._w = w\n        self._n = n\n    # def __dealloc__(self):\n        # the memory management of _mask has been passed to np.ndarray\n        # it doesn't need to be freed here\n\n    # called when passing into np.array() and return an np.ndarray in column-major order\n    def __array__(self):\n        cdef np.npy_intp shape[1]\n        shape[0] = <np.npy_intp> self._h*self._w*self._n\n        # Create a 1D array, and reshape it to fortran/Matlab column-major array\n        ndarray = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT8, self._mask).reshape((self._h, self._w, self._n), order='F')\n        # The _mask allocated by Masks is now handled by ndarray\n        PyArray_ENABLEFLAGS(ndarray, np.NPY_OWNDATA)\n        return ndarray\n\n# internal conversion from Python RLEs object to compressed RLE format\ndef _toString(RLEs Rs):\n    cdef siz n = Rs.n\n    cdef bytes py_string\n    cdef char* c_string\n    objs = []\n    for i in range(n):\n        c_string = rleToString( <RLE*> &Rs._R[i] )\n        py_string = c_string\n        objs.append({\n            'size': [Rs._R[i].h, Rs._R[i].w],\n            'counts': py_string\n        })\n        free(c_string)\n    return objs\n\n# internal conversion from compressed RLE format to Python RLEs object\ndef _frString(rleObjs):\n    cdef siz n = len(rleObjs)\n    Rs = RLEs(n)\n    cdef bytes py_string\n    cdef char* c_string\n    for i, obj in enumerate(rleObjs):\n        if PYTHON_VERSION == 2:\n            py_string = str(obj['counts']).encode('utf8')\n        elif PYTHON_VERSION == 3:\n            py_string = str.encode(obj['counts']) if type(obj['counts']) == str else obj['counts']\n        else:\n            raise Exception('Python version must be 2 or 3')\n        c_string = py_string\n        rleFrString( <RLE*> &Rs._R[i], <char*> c_string, obj['size'][0], obj['size'][1] )\n    return Rs\n\n# encode mask to RLEs objects\n# list of RLE string can be generated by RLEs member function\ndef encode(np.ndarray[np.uint8_t, ndim=3, mode='fortran'] mask):\n    h, w, n = mask.shape[0], mask.shape[1], mask.shape[2]\n    cdef RLEs Rs = RLEs(n)\n    rleEncode(Rs._R,<byte*>mask.data,h,w,n)\n    objs = _toString(Rs)\n    return objs\n\n# decode mask from compressed list of RLE string or RLEs object\ndef decode(rleObjs):\n    cdef RLEs Rs = _frString(rleObjs)\n    h, w, n = Rs._R[0].h, Rs._R[0].w, Rs._n\n    masks = Masks(h, w, n)\n    rleDecode(<RLE*>Rs._R, masks._mask, n);\n    return np.array(masks)\n\ndef merge(rleObjs, intersect=0):\n    cdef RLEs Rs = _frString(rleObjs)\n    cdef RLEs R = RLEs(1)\n    rleMerge(<RLE*>Rs._R, <RLE*> R._R, <siz> Rs._n, intersect)\n    obj = _toString(R)[0]\n    return obj\n\ndef area(rleObjs):\n    cdef RLEs Rs = _frString(rleObjs)\n    cdef uint* _a = <uint*> malloc(Rs._n* sizeof(uint))\n    rleArea(Rs._R, Rs._n, _a)\n    cdef np.npy_intp shape[1]\n    shape[0] = <np.npy_intp> Rs._n\n    a = np.array((Rs._n, ), dtype=np.uint8)\n    a = np.PyArray_SimpleNewFromData(1, shape, np.NPY_UINT32, _a)\n    PyArray_ENABLEFLAGS(a, np.NPY_OWNDATA)\n    return a\n\n# iou computation. support function overload (RLEs-RLEs and bbox-bbox).\ndef iou( dt, gt, pyiscrowd ):\n    def _preproc(objs):\n        if len(objs) == 0:\n            return objs\n        if type(objs) == np.ndarray:\n            if len(objs.shape) == 1:\n                objs = objs.reshape((objs[0], 1))\n            # check if it's Nx4 bbox\n            if not len(objs.shape) == 2 or not objs.shape[1] == 4:\n                raise Exception('numpy ndarray input is only for *bounding boxes* and should have Nx4 dimension')\n            objs = objs.astype(np.double)\n        elif type(objs) == list:\n            # check if list is in box format and convert it to np.ndarray\n            isbox = np.all(np.array([(len(obj)==4) and ((type(obj)==list) or (type(obj)==np.ndarray)) for obj in objs]))\n            isrle = np.all(np.array([type(obj) == dict for obj in objs]))\n            if isbox:\n                objs = np.array(objs, dtype=np.double)\n                if len(objs.shape) == 1:\n                    objs = objs.reshape((1,objs.shape[0]))\n            elif isrle:\n                objs = _frString(objs)\n            else:\n                raise Exception('list input can be bounding box (Nx4) or RLEs ([RLE])')\n        else:\n            raise Exception('unrecognized type.  The following type: RLEs (rle), np.ndarray (box), and list (box) are supported.')\n        return objs\n    def _rleIou(RLEs dt, RLEs gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t,  ndim=1] _iou):\n        rleIou( <RLE*> dt._R, <RLE*> gt._R, m, n, <byte*> iscrowd.data, <double*> _iou.data )\n    def _bbIou(np.ndarray[np.double_t, ndim=2] dt, np.ndarray[np.double_t, ndim=2] gt, np.ndarray[np.uint8_t, ndim=1] iscrowd, siz m, siz n, np.ndarray[np.double_t, ndim=1] _iou):\n        bbIou( <BB> dt.data, <BB> gt.data, m, n, <byte*> iscrowd.data, <double*>_iou.data )\n    def _len(obj):\n        cdef siz N = 0\n        if type(obj) == RLEs:\n            N = obj.n\n        elif len(obj)==0:\n            pass\n        elif type(obj) == np.ndarray:\n            N = obj.shape[0]\n        return N\n    # convert iscrowd to numpy array\n    cdef np.ndarray[np.uint8_t, ndim=1] iscrowd = np.array(pyiscrowd, dtype=np.uint8)\n    # simple type checking\n    cdef siz m, n\n    dt = _preproc(dt)\n    gt = _preproc(gt)\n    m = _len(dt)\n    n = _len(gt)\n    if m == 0 or n == 0:\n        return []\n    if not type(dt) == type(gt):\n        raise Exception('The dt and gt should have the same data type, either RLEs, list or np.ndarray')\n\n    # define local variables\n    cdef double* _iou = <double*> 0\n    cdef np.npy_intp shape[1]\n    # check type and assign iou function\n    if type(dt) == RLEs:\n        _iouFun = _rleIou\n    elif type(dt) == np.ndarray:\n        _iouFun = _bbIou\n    else:\n        raise Exception('input data type not allowed.')\n    _iou = <double*> malloc(m*n* sizeof(double))\n    iou = np.zeros((m*n, ), dtype=np.double)\n    shape[0] = <np.npy_intp> m*n\n    iou = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _iou)\n    PyArray_ENABLEFLAGS(iou, np.NPY_OWNDATA)\n    _iouFun(dt, gt, iscrowd, m, n, iou)\n    return iou.reshape((m,n), order='F')\n\ndef toBbox( rleObjs ):\n    cdef RLEs Rs = _frString(rleObjs)\n    cdef siz n = Rs.n\n    cdef BB _bb = <BB> malloc(4*n* sizeof(double))\n    rleToBbox( <const RLE*> Rs._R, _bb, n )\n    cdef np.npy_intp shape[1]\n    shape[0] = <np.npy_intp> 4*n\n    bb = np.array((1,4*n), dtype=np.double)\n    bb = np.PyArray_SimpleNewFromData(1, shape, np.NPY_DOUBLE, _bb).reshape((n, 4))\n    PyArray_ENABLEFLAGS(bb, np.NPY_OWNDATA)\n    return bb\n\ndef frBbox(np.ndarray[np.double_t, ndim=2] bb, siz h, siz w ):\n    cdef siz n = bb.shape[0]\n    Rs = RLEs(n)\n    rleFrBbox( <RLE*> Rs._R, <const BB> bb.data, h, w, n )\n    objs = _toString(Rs)\n    return objs\n\ndef frPoly( poly, siz h, siz w ):\n    cdef np.ndarray[np.double_t, ndim=1] np_poly\n    n = len(poly)\n    Rs = RLEs(n)\n    for i, p in enumerate(poly):\n        np_poly = np.array(p, dtype=np.double, order='F')\n        rleFrPoly( <RLE*>&Rs._R[i], <const double*> np_poly.data, int(len(p)/2), h, w )\n    objs = _toString(Rs)\n    return objs\n\ndef frUncompressedRLE(ucRles, siz h, siz w):\n    cdef np.ndarray[np.uint32_t, ndim=1] cnts\n    cdef RLE R\n    cdef uint *data\n    n = len(ucRles)\n    objs = []\n    for i in range(n):\n        Rs = RLEs(1)\n        cnts = np.array(ucRles[i]['counts'], dtype=np.uint32)\n        # time for malloc can be saved here but it's fine\n        data = <uint*> malloc(len(cnts)* sizeof(uint))\n        for j in range(len(cnts)):\n            data[j] = <uint> cnts[j]\n        R = RLE(ucRles[i]['size'][0], ucRles[i]['size'][1], len(cnts), <uint*> data)\n        Rs._R[0] = R\n        objs.append(_toString(Rs)[0])\n    return objs\n\ndef frPyObjects(pyobj, h, w):\n    # encode rle from a list of python objects\n    if type(pyobj) == np.ndarray:\n        objs = frBbox(pyobj, h, w)\n    elif type(pyobj) == list and len(pyobj[0]) == 4:\n        objs = frBbox(pyobj, h, w)\n    elif type(pyobj) == list and len(pyobj[0]) > 4:\n        objs = frPoly(pyobj, h, w)\n    elif type(pyobj) == list and type(pyobj[0]) == dict \\\n        and 'counts' in pyobj[0] and 'size' in pyobj[0]:\n        objs = frUncompressedRLE(pyobj, h, w)\n    # encode rle from single python object\n    elif type(pyobj) == list and len(pyobj) == 4:\n        objs = frBbox([pyobj], h, w)[0]\n    elif type(pyobj) == list and len(pyobj) > 4:\n        objs = frPoly([pyobj], h, w)[0]\n    elif type(pyobj) == dict and 'counts' in pyobj and 'size' in pyobj:\n        objs = frUncompressedRLE([pyobj], h, w)[0]\n    else:\n        raise Exception('input type is not supported.')\n    return objs\n"
  },
  {
    "path": "PythonAPI/pysobatools/cocoeval.py",
    "content": "__author__ = 'tsungyi'\n\nimport numpy as np\nimport datetime\nimport time\nfrom collections import defaultdict\nfrom . import mask as maskUtils\nimport copy\n\nclass COCOeval:\n    # Interface for evaluating detection on the Microsoft COCO dataset.\n    #\n    # The usage for CocoEval is as follows:\n    #  cocoGt=..., cocoDt=...       # load dataset and results\n    #  E = CocoEval(cocoGt,cocoDt); # initialize CocoEval object\n    #  E.params.recThrs = ...;      # set parameters as desired\n    #  E.evaluate();                # run per image evaluation\n    #  E.accumulate();              # accumulate per image results\n    #  E.summarize();               # display summary metrics of results\n    # For example usage see evalDemo.m and http://mscoco.org/.\n    #\n    # The evaluation parameters are as follows (defaults in brackets):\n    #  imgIds     - [all] N img ids to use for evaluation\n    #  catIds     - [all] K cat ids to use for evaluation\n    #  iouThrs    - [.5:.05:.95] T=10 IoU thresholds for evaluation\n    #  recThrs    - [0:.01:1] R=101 recall thresholds for evaluation\n    #  areaRng    - [...] A=4 object area ranges for evaluation\n    #  maxDets    - [1 10 100] M=3 thresholds on max detections per image\n    #  iouType    - ['segm'] set iouType to 'segm', 'bbox' or 'keypoints'\n    #  iouType replaced the now DEPRECATED useSegm parameter.\n    #  useCats    - [1] if true use category labels for evaluation\n    # Note: if useCats=0 category labels are ignored as in proposal scoring.\n    # Note: multiple areaRngs [Ax2] and maxDets [Mx1] can be specified.\n    #\n    # evaluate(): evaluates detections on every image and every category and\n    # concats the results into the \"evalImgs\" with fields:\n    #  dtIds      - [1xD] id for each of the D detections (dt)\n    #  gtIds      - [1xG] id for each of the G ground truths (gt)\n    #  dtMatches  - [TxD] matching gt id at each IoU or 0\n    #  gtMatches  - [TxG] matching dt id at each IoU or 0\n    #  dtScores   - [1xD] confidence of each dt\n    #  gtIgnore   - [1xG] ignore flag for each gt\n    #  dtIgnore   - [TxD] ignore flag for each dt at each IoU\n    #\n    # accumulate(): accumulates the per-image, per-category evaluation\n    # results in \"evalImgs\" into the dictionary \"eval\" with fields:\n    #  params     - parameters used for evaluation\n    #  date       - date evaluation was performed\n    #  counts     - [T,R,K,A,M] parameter dimensions (see above)\n    #  precision  - [TxRxKxAxM] precision for every evaluation setting\n    #  recall     - [TxKxAxM] max recall for every evaluation setting\n    # Note: precision and recall==-1 for settings with no gt objects.\n    #\n    # See also coco, mask, pycocoDemo, pycocoEvalDemo\n    #\n    # Microsoft COCO Toolbox.      version 2.0\n    # Data, paper, and tutorials available at:  http://mscoco.org/\n    # Code written by Piotr Dollar and Tsung-Yi Lin, 2015.\n    # Licensed under the Simplified BSD License [see coco/license.txt]\n    def __init__(self, cocoGt=None, cocoDt=None, iouType='segm'):\n        '''\n        Initialize CocoEval using coco APIs for gt and dt\n        :param cocoGt: coco object with ground truth annotations\n        :param cocoDt: coco object with detection results\n        :return: None\n        '''\n        if not iouType:\n            print('iouType not specified. use default iouType segm')\n        self.cocoGt   = cocoGt              # ground truth COCO API\n        self.cocoDt   = cocoDt              # detections COCO API\n        self.evalImgs = defaultdict(list)   # per-image per-category evaluation results [KxAxI] elements\n        self.eval     = {}                  # accumulated evaluation results\n        self._gts = defaultdict(list)       # gt for evaluation\n        self._dts = defaultdict(list)       # dt for evaluation\n        self.params = Params(iouType=iouType) # parameters\n        self._paramsEval = {}               # parameters for evaluation\n        self.stats = []                     # result summarization\n        self.ious = {}                      # ious between all gts and dts\n        if not cocoGt is None:\n            self.params.imgIds = sorted(cocoGt.getImgIds())\n            self.params.catIds = sorted(cocoGt.getCatIds())\n\n\n    def _prepare(self):\n        '''\n        Prepare ._gts and ._dts for evaluation based on params\n        :return: None\n        '''\n        def _toMask(anns, coco):\n            # modify ann['segmentation'] by reference\n            for ann in anns:\n                rle = coco.annToRLE(ann)\n                ann['segmentation'] = rle\n        p = self.params\n        if p.useCats:\n            gts=self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds))\n            dts=self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds))\n        else:\n            gts=self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds))\n            dts=self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds))\n\n        # convert ground truth to mask if iouType == 'segm'\n        if p.iouType == 'segm':\n            _toMask(gts, self.cocoGt)\n            _toMask(dts, self.cocoDt)\n        # set ignore flag\n        for gt in gts:\n            gt['ignore'] = gt['ignore'] if 'ignore' in gt else 0\n            gt['ignore'] = 'iscrowd' in gt and gt['iscrowd']\n            if p.iouType == 'keypoints':\n                gt['ignore'] = (gt['num_keypoints'] == 0) or gt['ignore']\n        self._gts = defaultdict(list)       # gt for evaluation\n        self._dts = defaultdict(list)       # dt for evaluation\n        for gt in gts:\n            self._gts[gt['image_id'], gt['category_id']].append(gt)\n        for dt in dts:\n            self._dts[dt['image_id'], dt['category_id']].append(dt)\n        self.evalImgs = defaultdict(list)   # per-image per-category evaluation results\n        self.eval     = {}                  # accumulated evaluation results\n\n    def _prepare_asso(self):\n        '''\n        Prepare ._gts and ._dts for evaluation based on params\n        :return: None\n        '''\n        def _toMask(anns, coco):\n            # modify ann['segmentation'] by reference\n            for ann in anns:\n                rle = coco.annToRLE(ann)\n                ann['segmentation'] = rle\n        self.params.catIds = sorted(self.cocoGt.getAssoIds())\n        p = self.params\n        if p.useCats:\n            gts=self.cocoGt.loadAssoAnns(self.cocoGt.getAssoAnnIds(imgIds=p.imgIds, catIds=p.catIds))\n            dts=self.cocoDt.loadAssoAnns(self.cocoDt.getAssoAnnIds(imgIds=p.imgIds, catIds=p.catIds))\n        else:\n            gts=self.cocoGt.loadAssoAnns(self.cocoGt.getAssoAnnIds(imgIds=p.imgIds))\n            dts=self.cocoDt.loadAssoAnns(self.cocoDt.getAssoAnnIds(imgIds=p.imgIds))\n\n        # convert ground truth to mask if iouType == 'segm'\n        if p.iouType == 'segm':\n            _toMask(gts, self.cocoGt)\n            _toMask(dts, self.cocoDt)\n        # set ignore flag\n        for gt in gts:\n            gt['ignore'] = gt['ignore'] if 'ignore' in gt else 0\n            gt['ignore'] = 'iscrowd' in gt and gt['iscrowd']\n            if p.iouType == 'keypoints':\n                gt['ignore'] = (gt['num_keypoints'] == 0) or gt['ignore']\n        self._gts = defaultdict(list)       # gt for evaluation\n        self._dts = defaultdict(list)       # dt for evaluation\n        for gt in gts:\n            self._gts[gt['image_id'], gt['category_id']].append(gt)\n        for dt in dts:\n            self._dts[dt['image_id'], dt['category_id']].append(dt)\n        self.evalImgs = defaultdict(list)   # per-image per-category evaluation results\n        self.eval     = {}                  # accumulated evaluation results\n\n    def evaluate(self):\n        '''\n        Run per image evaluation on given images and store results (a list of dict) in self.evalImgs\n        :return: None\n        '''\n        tic = time.time()\n        print('Running per image evaluation...')\n        p = self.params\n        # add backward compatibility if useSegm is specified in params\n        if not p.useSegm is None:\n            p.iouType = 'segm' if p.useSegm == 1 else 'bbox'\n            print('useSegm (deprecated) is not None. Running {} evaluation'.format(p.iouType))\n        print('Evaluate annotation type *{}*'.format(p.iouType))\n        p.imgIds = list(np.unique(p.imgIds))\n        if p.useCats:\n            p.catIds = list(np.unique(p.catIds))\n        p.maxDets = sorted(p.maxDets)\n        self.params=p\n\n        self._prepare()\n        # loop through images, area range, max detection number\n        catIds = p.catIds if p.useCats else [-1]\n\n        if p.iouType == 'segm' or p.iouType == 'bbox':\n            computeIoU = self.computeIoU\n        elif p.iouType == 'keypoints':\n            computeIoU = self.computeOks\n        self.ious = {(imgId, catId): computeIoU(imgId, catId) \\\n                        for imgId in p.imgIds\n                        for catId in catIds}\n\n        evaluateImg = self.evaluateImg\n        maxDet = p.maxDets[-1]\n        self.evalImgs = [evaluateImg(imgId, catId, areaRng, maxDet)\n                 for catId in catIds\n                 for areaRng in p.areaRng\n                 for imgId in p.imgIds\n             ]\n        self._paramsEval = copy.deepcopy(self.params)\n        toc = time.time()\n        print('DONE (t={:0.2f}s).'.format(toc-tic))\n\n    def evaluate_asso(self):\n        '''\n        Run per image evaluation on given images and store results (a list of dict) in self.evalImgs\n        :return: None\n        '''\n        tic = time.time()\n        print('Running per image evaluation...')\n        p = self.params\n        # add backward compatibility if useSegm is specified in params\n        if not p.useSegm is None:\n            p.iouType = 'segm' if p.useSegm == 1 else 'bbox'\n            print('useSegm (deprecated) is not None. Running {} evaluation'.format(p.iouType))\n        print('Evaluate annotation type *{}*'.format(p.iouType))\n        p.imgIds = list(np.unique(p.imgIds))\n        if p.useCats:\n            p.catIds = list(np.unique(p.catIds))\n        p.maxDets = sorted(p.maxDets)\n        self.params=p\n\n        self._prepare_asso()\n        # loop through images, area range, max detection number\n        catIds = p.catIds if p.useCats else [-1]\n\n        if p.iouType == 'segm' or p.iouType == 'bbox':\n            computeIoU = self.computeIoU\n        elif p.iouType == 'keypoints':\n            computeIoU = self.computeOks\n        self.ious = {(imgId, catId): computeIoU(imgId, catId) \\\n                        for imgId in p.imgIds\n                        for catId in catIds}\n\n        evaluateImg = self.evaluateImg\n        maxDet = p.maxDets[-2]\n        self.evalImgs = [evaluateImg(imgId, catId, areaRng, maxDet)\n                 for catId in catIds\n                 for areaRng in p.areaRng\n                 for imgId in p.imgIds\n             ]\n        self._paramsEval = copy.deepcopy(self.params)\n        toc = time.time()\n        print('DONE (t={:0.2f}s).'.format(toc-tic))\n\n    def computeIoU(self, imgId, catId):\n        p = self.params\n        if p.useCats:\n            gt = self._gts[imgId,catId]\n            dt = self._dts[imgId,catId]\n        else:\n            gt = [_ for cId in p.catIds for _ in self._gts[imgId,cId]]\n            dt = [_ for cId in p.catIds for _ in self._dts[imgId,cId]]\n        if len(gt) == 0 and len(dt) ==0:\n            return []\n        inds = np.argsort([-d['score'] for d in dt], kind='mergesort')\n        dt = [dt[i] for i in inds]\n        if len(dt) > p.maxDets[-1]:\n            dt=dt[0:p.maxDets[-1]]\n\n        if p.iouType == 'segm':\n            g = [g['segmentation'] for g in gt]\n            d = [d['segmentation'] for d in dt]\n        elif p.iouType == 'bbox':\n            g = [g['bbox'] for g in gt]\n            d = [d['bbox'] for d in dt]\n        else:\n            raise Exception('unknown iouType for iou computation')\n\n        # compute iou between each dt and gt region\n        iscrowd = [int(o['iscrowd']) for o in gt]\n        ious = maskUtils.iou(d,g,iscrowd)\n        return ious\n\n    def computeOks(self, imgId, catId):\n        p = self.params\n        # dimention here should be Nxm\n        gts = self._gts[imgId, catId]\n        dts = self._dts[imgId, catId]\n        inds = np.argsort([-d['score'] for d in dts], kind='mergesort')\n        dts = [dts[i] for i in inds]\n        if len(dts) > p.maxDets[-1]:\n            dts = dts[0:p.maxDets[-1]]\n        # if len(gts) == 0 and len(dts) == 0:\n        if len(gts) == 0 or len(dts) == 0:\n            return []\n        ious = np.zeros((len(dts), len(gts)))\n        sigmas = p.kpt_oks_sigmas\n        vars = (sigmas * 2)**2\n        k = len(sigmas)\n        # compute oks between each detection and ground truth object\n        for j, gt in enumerate(gts):\n            # create bounds for ignore regions(double the gt bbox)\n            g = np.array(gt['keypoints']) \n            # print(g)\n            xg = g[0::3]; yg = g[1::3]; vg = g[2::3]\n            k1 = np.count_nonzero(vg > 0)\n            bb = gt['bbox']\n            x0 = bb[0] - bb[2]; x1 = bb[0] + bb[2] * 2\n            y0 = bb[1] - bb[3]; y1 = bb[1] + bb[3] * 2\n            for i, dt in enumerate(dts):\n                d = np.array(dt['keypoints'])\n                # print(d)\n                xd = d[0::3]; yd = d[1::3]\n                if k1>0:\n                    # measure the per-keypoint distance if keypoints visible\n                    dx = xd - xg\n                    dy = yd - yg\n                else:\n                    # measure minimum distance to keypoints in (x0,y0) & (x1,y1)\n                    z = np.zeros((k))\n                    dx = np.max((z, x0-xd),axis=0)+np.max((z, xd-x1),axis=0)\n                    dy = np.max((z, y0-yd),axis=0)+np.max((z, yd-y1),axis=0)\n                # print(dx,dy,vars,gt['area'])\n                e = (dx**2 + dy**2) / vars / (gt['area']+np.spacing(1)) / 2\n                if k1 > 0:\n                    e=e[vg > 0]\n                ious[i, j] = np.sum(np.exp(-e)) / e.shape[0]\n        return ious\n\n    def evaluateImg(self, imgId, catId, aRng, maxDet):\n        '''\n        perform evaluation for single category and image\n        :return: dict (single image results)\n        '''\n        p = self.params\n        if p.useCats:\n            gt = self._gts[imgId,catId]\n            dt = self._dts[imgId,catId]\n        else:\n            gt = [_ for cId in p.catIds for _ in self._gts[imgId,cId]]\n            dt = [_ for cId in p.catIds for _ in self._dts[imgId,cId]]\n        if len(gt) == 0 and len(dt) ==0:\n            return None\n\n        for g in gt:\n            if g['ignore'] or (g['area']<aRng[0] or g['area']>aRng[1]):\n                g['_ignore'] = 1\n            else:\n                g['_ignore'] = 0\n\n        # sort dt highest score first, sort gt ignore last\n        gtind = np.argsort([g['_ignore'] for g in gt], kind='mergesort')\n        gt = [gt[i] for i in gtind]\n        dtind = np.argsort([-d['score'] for d in dt], kind='mergesort')\n        dt = [dt[i] for i in dtind[0:maxDet]]\n        iscrowd = [int(o['iscrowd']) for o in gt]\n        # load computed ious\n        ious = self.ious[imgId, catId][:, gtind] if len(self.ious[imgId, catId]) > 0 else self.ious[imgId, catId]\n\n        T = len(p.iouThrs)\n        G = len(gt)\n        D = len(dt)\n        gtm  = np.zeros((T,G))\n        dtm  = np.zeros((T,D))\n        gtIg = np.array([g['_ignore'] for g in gt])\n        dtIg = np.zeros((T,D))\n        if not len(ious)==0:\n            for tind, t in enumerate(p.iouThrs):\n                for dind, d in enumerate(dt):\n                    # information about best match so far (m=-1 -> unmatched)\n                    iou = min([t,1-1e-10])\n                    m   = -1\n                    for gind, g in enumerate(gt):\n                        # if this gt already matched, and not a crowd, continue\n                        if gtm[tind,gind]>0 and not iscrowd[gind]:\n                            continue\n                        # if dt matched to reg gt, and on ignore gt, stop\n                        if m>-1 and gtIg[m]==0 and gtIg[gind]==1:\n                            break\n                        # continue to next gt unless better match made\n                        if ious[dind,gind] < iou:\n                        #######HERE ADD Assotion！！！！##########\n                            # print(dind,gind)\n                            continue\n                        # if match successful and best so far, store appropriately\n                        iou=ious[dind,gind]\n                        m=gind\n                    # if match made store id of match for both dt and gt\n                    if m ==-1:\n                        continue\n                    dtIg[tind,dind] = gtIg[m]\n                    dtm[tind,dind]  = gt[m]['id']\n                    gtm[tind,m]     = d['id']\n        # set unmatched detections outside of area range to ignore\n        a = np.array([d['area']<aRng[0] or d['area']>aRng[1] for d in dt]).reshape((1, len(dt)))\n        dtIg = np.logical_or(dtIg, np.logical_and(dtm==0, np.repeat(a,T,0)))\n        # store results for given image and category\n        return {\n                'image_id':     imgId,\n                'category_id':  catId,\n                'aRng':         aRng,\n                'maxDet':       maxDet,\n                'dtIds':        [d['id'] for d in dt],\n                'gtIds':        [g['id'] for g in gt],\n                'dtMatches':    dtm,\n                'gtMatches':    gtm,\n                'dtScores':     [d['score'] for d in dt],\n                'gtIgnore':     gtIg,\n                'dtIgnore':     dtIg,\n            }\n\n    def accumulate(self, p = None):\n        '''\n        Accumulate per image evaluation results and store the result in self.eval\n        :param p: input params for evaluation\n        :return: None\n        '''\n        print('Accumulating evaluation results...')\n        tic = time.time()\n        if not self.evalImgs:\n            print('Please run evaluate() first')\n        # allows input customized parameters\n        if p is None:\n            p = self.params\n        p.catIds = p.catIds if p.useCats == 1 else [-1]\n        T           = len(p.iouThrs)\n        R           = len(p.recThrs)\n        K           = len(p.catIds) if p.useCats else 1\n        A           = len(p.areaRng)\n        M           = len(p.maxDets)\n        precision   = -np.ones((T,R,K,A,M)) # -1 for the precision of absent categories\n        recall      = -np.ones((T,K,A,M))\n        scores      = -np.ones((T,R,K,A,M))\n\n        # create dictionary for future indexing\n        _pe = self._paramsEval\n        catIds = _pe.catIds if _pe.useCats else [-1]\n        setK = set(catIds)\n        setA = set(map(tuple, _pe.areaRng))\n        setM = set(_pe.maxDets)\n        setI = set(_pe.imgIds)\n        # get inds to evaluate\n        k_list = [n for n, k in enumerate(p.catIds)  if k in setK]\n        m_list = [m for n, m in enumerate(p.maxDets) if m in setM]\n        a_list = [n for n, a in enumerate(map(lambda x: tuple(x), p.areaRng)) if a in setA]\n        i_list = [n for n, i in enumerate(p.imgIds)  if i in setI]\n        I0 = len(_pe.imgIds)\n        A0 = len(_pe.areaRng)\n        # retrieve E at each category, area range, and max number of detections\n        for k, k0 in enumerate(k_list):\n            Nk = k0*A0*I0\n            for a, a0 in enumerate(a_list):\n                Na = a0*I0\n                for m, maxDet in enumerate(m_list):\n                    E = [self.evalImgs[Nk + Na + i] for i in i_list]\n                    E = [e for e in E if not e is None]\n                    if len(E) == 0:\n                        continue\n                    dtScores = np.concatenate([e['dtScores'][0:maxDet] for e in E])\n\n                    # different sorting method generates slightly different results.\n                    # mergesort is used to be consistent as Matlab implementation.\n                    inds = np.argsort(-dtScores, kind='mergesort')\n                    dtScoresSorted = dtScores[inds]\n\n                    dtm  = np.concatenate([e['dtMatches'][:,0:maxDet] for e in E], axis=1)[:,inds]\n                    dtIg = np.concatenate([e['dtIgnore'][:,0:maxDet]  for e in E], axis=1)[:,inds]\n                    gtIg = np.concatenate([e['gtIgnore'] for e in E])\n                    npig = np.count_nonzero(gtIg==0 )\n                    if npig == 0:\n                        continue\n                    tps = np.logical_and(               dtm,  np.logical_not(dtIg) )\n                    fps = np.logical_and(np.logical_not(dtm), np.logical_not(dtIg) )\n\n                    tp_sum = np.cumsum(tps, axis=1).astype(dtype=np.float)\n                    fp_sum = np.cumsum(fps, axis=1).astype(dtype=np.float)\n                    for t, (tp, fp) in enumerate(zip(tp_sum, fp_sum)):\n                        tp = np.array(tp)\n                        fp = np.array(fp)\n                        nd = len(tp)\n                        rc = tp / npig\n                        pr = tp / (fp+tp+np.spacing(1))\n                        q  = np.zeros((R,))\n                        ss = np.zeros((R,))\n\n                        if nd:\n                            recall[t,k,a,m] = rc[-1]\n                        else:\n                            recall[t,k,a,m] = 0\n\n                        # numpy is slow without cython optimization for accessing elements\n                        # use python array gets significant speed improvement\n                        pr = pr.tolist(); q = q.tolist()\n\n                        for i in range(nd-1, 0, -1):\n                            if pr[i] > pr[i-1]:\n                                pr[i-1] = pr[i]\n\n                        inds = np.searchsorted(rc, p.recThrs, side='left')\n                        try:\n                            for ri, pi in enumerate(inds):\n                                q[ri] = pr[pi]\n                                ss[ri] = dtScoresSorted[pi]\n                        except:\n                            pass\n                        precision[t,:,k,a,m] = np.array(q)\n                        scores[t,:,k,a,m] = np.array(ss)\n        self.eval = {\n            'params': p,\n            'counts': [T, R, K, A, M],\n            'date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),\n            'precision': precision,\n            'recall':   recall,\n            'scores': scores,\n        }\n        toc = time.time()\n        print('DONE (t={:0.2f}s).'.format( toc-tic))\n\n    def summarize(self):\n        '''\n        Compute and display summary metrics for evaluation results.\n        Note this functin can *only* be applied on the default parameter setting\n        '''\n        def _summarize( ap=1, iouThr=None, areaRng='all', maxDets=100 ):\n            p = self.params\n            iStr = ' {:<18} {} @[ IoU={:<9} | area={:>6s} | maxDets={:>3d} ] = {:0.3f}'\n            titleStr = 'Average Precision' if ap == 1 else 'Average Recall'\n            typeStr = '(AP)' if ap==1 else '(AR)'\n            iouStr = '{:0.2f}:{:0.2f}'.format(p.iouThrs[0], p.iouThrs[-1]) \\\n                if iouThr is None else '{:0.2f}'.format(iouThr)\n\n            aind = [i for i, aRng in enumerate(p.areaRngLbl) if aRng == areaRng]\n            mind = [i for i, mDet in enumerate(p.maxDets) if mDet == maxDets]\n            if ap == 1:\n                # dimension of precision: [TxRxKxAxM]\n                s = self.eval['precision']\n                # IoU\n                if iouThr is not None:\n                    t = np.where(iouThr == p.iouThrs)[0]\n                    s = s[t]\n                s = s[:,:,:,aind,mind]\n            else:\n                # dimension of recall: [TxKxAxM]\n                s = self.eval['recall']\n                if iouThr is not None:\n                    t = np.where(iouThr == p.iouThrs)[0]\n                    s = s[t]\n                s = s[:,:,aind,mind]\n            if len(s[s>-1])==0:\n                mean_s = -1\n            else:\n                mean_s = np.mean(s[s>-1])\n            print(iStr.format(titleStr, typeStr, iouStr, areaRng, maxDets, mean_s))\n            return mean_s\n        def _summarizeDets():\n            stats = np.zeros((12,))\n            stats[0] = _summarize(1)\n            stats[1] = _summarize(1, iouThr=.5, maxDets=self.params.maxDets[2])\n            stats[2] = _summarize(1, iouThr=.75, maxDets=self.params.maxDets[2])\n            stats[3] = _summarize(1, areaRng='small', maxDets=self.params.maxDets[2])\n            stats[4] = _summarize(1, areaRng='medium', maxDets=self.params.maxDets[2])\n            stats[5] = _summarize(1, areaRng='large', maxDets=self.params.maxDets[2])\n            stats[6] = _summarize(0, maxDets=self.params.maxDets[0])\n            stats[7] = _summarize(0, maxDets=self.params.maxDets[1])\n            stats[8] = _summarize(0, maxDets=self.params.maxDets[2])\n            stats[9] = _summarize(0, areaRng='small', maxDets=self.params.maxDets[2])\n            stats[10] = _summarize(0, areaRng='medium', maxDets=self.params.maxDets[2])\n            stats[11] = _summarize(0, areaRng='large', maxDets=self.params.maxDets[2])\n            return stats\n        def _summarizeKps():\n            stats = np.zeros((10,))\n            stats[0] = _summarize(1, maxDets=20)\n            stats[1] = _summarize(1, maxDets=20, iouThr=.5)\n            stats[2] = _summarize(1, maxDets=20, iouThr=.75)\n            stats[3] = _summarize(1, maxDets=20, areaRng='medium')\n            stats[4] = _summarize(1, maxDets=20, areaRng='large')\n            stats[5] = _summarize(0, maxDets=20)\n            stats[6] = _summarize(0, maxDets=20, iouThr=.5)\n            stats[7] = _summarize(0, maxDets=20, iouThr=.75)\n            stats[8] = _summarize(0, maxDets=20, areaRng='medium')\n            stats[9] = _summarize(0, maxDets=20, areaRng='large')\n            return stats\n        if not self.eval:\n            raise Exception('Please run accumulate() first')\n        iouType = self.params.iouType\n        if iouType == 'segm' or iouType == 'bbox':\n            summarize = _summarizeDets\n        elif iouType == 'keypoints':\n            summarize = _summarizeKps\n        self.stats = summarize()\n\n    def __str__(self):\n        self.summarize()\n\nclass Params:\n    '''\n    Params for coco evaluation api\n    '''\n    def setDetParams(self):\n        self.imgIds = []\n        self.catIds = []\n        # np.arange causes trouble.  the data point on arange is slightly larger than the true value\n        self.iouThrs = np.linspace(.5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True)\n        self.recThrs = np.linspace(.0, 1.00, int(np.round((1.00 - .0) / .01)) + 1, endpoint=True)\n        self.maxDets = [1, 10, 100]\n        self.areaRng = [[0 ** 2, 1e5 ** 2], [0 ** 2, 32 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]]\n        self.areaRngLbl = ['all', 'small', 'medium', 'large']\n        self.useCats = 1\n\n    def setKpParams(self):\n        self.imgIds = []\n        self.catIds = []\n        # np.arange causes trouble.  the data point on arange is slightly larger than the true value\n        self.iouThrs = np.linspace(.5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True)\n        self.recThrs = np.linspace(.0, 1.00, int(np.round((1.00 - .0) / .01)) + 1, endpoint=True)\n        self.maxDets = [20,20,20]\n        self.areaRng = [[0 ** 2, 1e5 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]]\n        self.areaRngLbl = ['all', 'medium', 'large']\n        self.useCats = 1\n        self.kpt_oks_sigmas = np.array([.26, .25])/10.0\n\n    def __init__(self, iouType='segm'):\n        if iouType == 'segm' or iouType == 'bbox':\n            self.setDetParams()\n        elif iouType == 'keypoints':\n            self.setKpParams()\n        else:\n            raise Exception('iouType not supported')\n        self.iouType = iouType\n        # useSegm is deprecated\n        self.useSegm = None\n"
  },
  {
    "path": "PythonAPI/pysobatools/mask.py",
    "content": "__author__ = 'tsungyi'\n\nimport pysobatools._mask as _mask\n\n# Interface for manipulating masks stored in RLE format.\n#\n# RLE is a simple yet efficient format for storing binary masks. RLE\n# first divides a vector (or vectorized image) into a series of piecewise\n# constant regions and then for each piece simply stores the length of\n# that piece. For example, given M=[0 0 1 1 1 0 1] the RLE counts would\n# be [2 3 1 1], or for M=[1 1 1 1 1 1 0] the counts would be [0 6 1]\n# (note that the odd counts are always the numbers of zeros). Instead of\n# storing the counts directly, additional compression is achieved with a\n# variable bitrate representation based on a common scheme called LEB128.\n#\n# Compression is greatest given large piecewise constant regions.\n# Specifically, the size of the RLE is proportional to the number of\n# *boundaries* in M (or for an image the number of boundaries in the y\n# direction). Assuming fairly simple shapes, the RLE representation is\n# O(sqrt(n)) where n is number of pixels in the object. Hence space usage\n# is substantially lower, especially for large simple objects (large n).\n#\n# Many common operations on masks can be computed directly using the RLE\n# (without need for decoding). This includes computations such as area,\n# union, intersection, etc. All of these operations are linear in the\n# size of the RLE, in other words they are O(sqrt(n)) where n is the area\n# of the object. Computing these operations on the original mask is O(n).\n# Thus, using the RLE can result in substantial computational savings.\n#\n# The following API functions are defined:\n#  encode         - Encode binary masks using RLE.\n#  decode         - Decode binary masks encoded via RLE.\n#  merge          - Compute union or intersection of encoded masks.\n#  iou            - Compute intersection over union between masks.\n#  area           - Compute area of encoded masks.\n#  toBbox         - Get bounding boxes surrounding encoded masks.\n#  frPyObjects    - Convert polygon, bbox, and uncompressed RLE to encoded RLE mask.\n#\n# Usage:\n#  Rs     = encode( masks )\n#  masks  = decode( Rs )\n#  R      = merge( Rs, intersect=false )\n#  o      = iou( dt, gt, iscrowd )\n#  a      = area( Rs )\n#  bbs    = toBbox( Rs )\n#  Rs     = frPyObjects( [pyObjects], h, w )\n#\n# In the API the following formats are used:\n#  Rs      - [dict] Run-length encoding of binary masks\n#  R       - dict Run-length encoding of binary mask\n#  masks   - [hxwxn] Binary mask(s) (must have type np.ndarray(dtype=uint8) in column-major order)\n#  iscrowd - [nx1] list of np.ndarray. 1 indicates corresponding gt image has crowd region to ignore\n#  bbs     - [nx4] Bounding box(es) stored as [x y w h]\n#  poly    - Polygon stored as [[x1 y1 x2 y2...],[x1 y1 ...],...] (2D list)\n#  dt,gt   - May be either bounding boxes or encoded masks\n# Both poly and bbs are 0-indexed (bbox=[0 0 1 1] encloses first pixel).\n#\n# Finally, a note about the intersection over union (iou) computation.\n# The standard iou of a ground truth (gt) and detected (dt) object is\n#  iou(gt,dt) = area(intersect(gt,dt)) / area(union(gt,dt))\n# For \"crowd\" regions, we use a modified criteria. If a gt object is\n# marked as \"iscrowd\", we allow a dt to match any subregion of the gt.\n# Choosing gt' in the crowd gt that best matches the dt can be done using\n# gt'=intersect(dt,gt). Since by definition union(gt',dt)=dt, computing\n#  iou(gt,dt,iscrowd) = iou(gt',dt) = area(intersect(gt,dt)) / area(dt)\n# For crowd gt regions we use this modified criteria above for the iou.\n#\n# To compile run \"python setup.py build_ext --inplace\"\n# Please do not contact us for help with compiling.\n#\n# Microsoft soba Toolbox.      version 2.0\n# Data, paper, and tutorials available at:  http://mssoba.org/\n# Code written by Piotr Dollar and Tsung-Yi Lin, 2015.\n# Licensed under the Simplified BSD License [see soba/license.txt]\n\niou         = _mask.iou\nmerge       = _mask.merge\nfrPyObjects = _mask.frPyObjects\n\ndef encode(bimask):\n    if len(bimask.shape) == 3:\n        return _mask.encode(bimask)\n    elif len(bimask.shape) == 2:\n        h, w = bimask.shape\n        return _mask.encode(bimask.reshape((h, w, 1), order='F'))[0]\n\ndef decode(rleObjs):\n    if type(rleObjs) == list:\n        return _mask.decode(rleObjs)\n    else:\n        return _mask.decode([rleObjs])[:,:,0]\n\ndef area(rleObjs):\n    if type(rleObjs) == list:\n        return _mask.area(rleObjs)\n    else:\n        return _mask.area([rleObjs])[0]\n\ndef toBbox(rleObjs):\n    if type(rleObjs) == list:\n        return _mask.toBbox(rleObjs)\n    else:\n        return _mask.toBbox([rleObjs])[0]"
  },
  {
    "path": "PythonAPI/pysobatools/soba.py",
    "content": "__author__ = 'tylin'\n__version__ = '2.0'\n# Interface for accessing the Microsoft SOBA dataset.\n\n# Microsoft SOBA is a large image dataset designed for object detection,\n# segmentation, and caption generation. pysobatools is a Python API that\n# assists in loading, parsing and visualizing the annotations in SOBA.\n# Please visit http://mssoba.org/ for more information on SOBA, including\n# for the data, paper, and tutorials. The exact format of the annotations\n# is also described on the SOBA website. For example usage of the pysobatools\n# please see pysobatools_demo.ipynb. In addition to this API, please download both\n# the SOBA images and annotations in order to run the demo.\n\n# An alternative to using the API is to load the annotations directly\n# into Python dictionary\n# Using the API provides additional utility functions. Note that this API\n# supports both *instance* and *caption* annotations. In the case of\n# captions not all functions are defined (e.g. categories are undefined).\n\n# The following API functions are defined:\n#  SOBA       - SOBA api class that loads SOBA annotation file and prepare data structures.\n#  decodeMask - Decode binary mask M encoded via run-length encoding.\n#  encodeMask - Encode binary mask M using run-length encoding.\n#  getAnnIds  - Get ann ids that satisfy given filter conditions.\n#  getCatIds  - Get cat ids that satisfy given filter conditions.\n#  getImgIds  - Get img ids that satisfy given filter conditions.\n#  loadAnns   - Load anns with the specified ids.\n#  loadCats   - Load cats with the specified ids.\n#  loadImgs   - Load imgs with the specified ids.\n#  annToMask  - Convert segmentation in an annotation to binary mask.\n#  showAnns   - Display the specified annotations.\n#  loadRes    - Load algorithm results and create API for accessing them.\n#  download   - Download SOBA images from mssoba.org server.\n# Throughout the API \"ann\"=annotation, \"cat\"=category, and \"img\"=image.\n# Help on each functions can be accessed by: \"help SOBA>function\".\n\n# See also SOBA>decodeMask,\n# SOBA>encodeMask, SOBA>getAnnIds, SOBA>getCatIds,\n# SOBA>getImgIds, SOBA>loadAnns, SOBA>loadCats,\n# SOBA>loadImgs, SOBA>annToMask, SOBA>showAnns\n\n# Microsoft SOBA Toolbox.      version 2.0\n# Data, paper, and tutorials available at:  http://mssoba.org/\n# Code written by Piotr Dollar and Tsung-Yi Lin, 2014.\n# Licensed under the Simplified BSD License [see bsd.txt]\n\nimport json\nimport time\nimport matplotlib.pyplot as plt\nfrom matplotlib.collections import PatchCollection\nfrom matplotlib.patches import Polygon\nimport numpy as np\nimport copy\nimport itertools\nfrom . import mask as maskUtils\nimport os\nfrom collections import defaultdict\nimport sys\nPYTHON_VERSION = sys.version_info[0]\nif PYTHON_VERSION == 2:\n    from urllib import urlretrieve\nelif PYTHON_VERSION == 3:\n    from urllib.request import urlretrieve\n\n\ndef _isArrayLike(obj):\n    return hasattr(obj, '__iter__') and hasattr(obj, '__len__')\n\n\nclass SOBA:\n    def __init__(self, annotation_file=None):\n        \"\"\"\n        Constructor of Microsoft SOBA helper class for reading and visualizing annotations.\n        :param annotation_file (str): location of annotation file\n        :param image_folder (str): location to the folder that hosts images.\n        :return:\n        \"\"\"\n        # load dataset\n        self.dataset,self.anns,self.cats,self.imgs = dict(),dict(),dict(),dict()\n        self.association,self.association_anns = dict(),dict()\n        self.imgToAnns, self.catToImgs = defaultdict(list), defaultdict(list)\n        self.imgToAssoAnns,self.assoToImgs = defaultdict(list), defaultdict(list)\n        if not annotation_file == None:\n            print('loading annotations into memory...')\n            tic = time.time()\n            dataset = json.load(open(annotation_file, 'r'))\n            assert type(dataset)==dict, 'annotation file format {} not supported'.format(type(dataset))\n            print('Done (t={:0.2f}s)'.format(time.time()- tic))\n            self.dataset = dataset\n            self.createIndex()\n\n    def createIndex(self):\n        # create index\n        print('creating index...')\n        anns, cats, imgs = {}, {}, {}\n        association,association_anns = {},{}\n        imgToAnns,catToImgs = defaultdict(list),defaultdict(list)\n        imgToAssoAnns,assoToImgs = defaultdict(list),defaultdict(list)\n        if 'annotations' in self.dataset:\n            for ann in self.dataset['annotations']:\n                imgToAnns[ann['image_id']].append(ann)\n                anns[ann['id']] = ann\n\n        if 'images' in self.dataset:\n            for img in self.dataset['images']:\n                imgs[img['id']] = img\n\n        if 'categories' in self.dataset:\n            for cat in self.dataset['categories']:\n                cats[cat['id']] = cat\n\n        if 'annotations' in self.dataset and 'categories' in self.dataset:\n            for ann in self.dataset['annotations']:\n                catToImgs[ann['category_id']].append(ann['image_id'])\n\n        if 'association' in self.dataset:\n            for cat in self.dataset['association']:\n                association[cat['id']] = cat\n\n        if 'association_anno'in self.dataset:\n            for ann in self.dataset['association_anno']:\n                imgToAssoAnns[ann['image_id']].append(ann)\n                association_anns[ann['id']] = ann\n\n        if 'association_anno' in self.dataset and 'association' in self.dataset:\n            for ann in self.dataset['association_anno']:\n                assoToImgs[ann['category_id']].append(ann['image_id'])\n\n\n\n        print('index created!')\n\n        # create class members\n        self.anns = anns\n        self.imgToAnns = imgToAnns\n        self.catToImgs = catToImgs\n        self.imgs = imgs\n        self.cats = cats\n        self.association_anns = association_anns\n        self.assoToImgs = assoToImgs\n        self.association = association\n        self.imgToAssoAnns = imgToAssoAnns\n\n    def info(self):\n        \"\"\"\n        Print information about the annotation file.\n        :return:\n        \"\"\"\n        for key, value in self.dataset['info'].items():\n            print('{}: {}'.format(key, value))\n\n    def getAssoAnnIds(self, imgIds=[], catIds=[], areaRng=[], iscrowd=None):\n        \"\"\"\n        Get ann ids that satisfy given filter conditions. default skips that filter\n        :param imgIds  (int array)     : get anns for given imgs\n               catIds  (int array)     : get anns for given cats\n               areaRng (float array)   : get anns for given area range (e.g. [0 inf])\n               iscrowd (boolean)       : get anns for given crowd label (False or True)\n        :return: ids (int array)       : integer array of ann ids\n        \"\"\"\n        imgIds = imgIds if _isArrayLike(imgIds) else [imgIds]\n        catIds = catIds if _isArrayLike(catIds) else [catIds]\n\n        if len(imgIds) == len(catIds) == len(areaRng) == 0:\n            anns = self.dataset['association_anno']\n        else:\n            if not len(imgIds) == 0:\n                lists = [self.imgToAssoAnns[imgId] for imgId in imgIds if imgId in self.imgToAssoAnns]\n                anns = list(itertools.chain.from_iterable(lists))\n            else:\n                anns = self.dataset['association_anno']\n            anns = anns if len(catIds)  == 0 else [ann for ann in anns if ann['category_id'] in catIds]\n            anns = anns if len(areaRng) == 0 else [ann for ann in anns if ann['area'] > areaRng[0] and ann['area'] < areaRng[1]]\n        if not iscrowd == None:\n            ids = [ann['id'] for ann in anns if ann['iscrowd'] == iscrowd]\n        else:\n            ids = [ann['id'] for ann in anns]\n        return ids\n\n    def getAnnIds(self, imgIds=[], catIds=[], areaRng=[], iscrowd=None):\n        \"\"\"\n        Get ann ids that satisfy given filter conditions. default skips that filter\n        :param imgIds  (int array)     : get anns for given imgs\n               catIds  (int array)     : get anns for given cats\n               areaRng (float array)   : get anns for given area range (e.g. [0 inf])\n               iscrowd (boolean)       : get anns for given crowd label (False or True)\n        :return: ids (int array)       : integer array of ann ids\n        \"\"\"\n        imgIds = imgIds if _isArrayLike(imgIds) else [imgIds]\n        catIds = catIds if _isArrayLike(catIds) else [catIds]\n\n        if len(imgIds) == len(catIds) == len(areaRng) == 0:\n            anns = self.dataset['annotations']\n        else:\n            if not len(imgIds) == 0:\n                lists = [self.imgToAnns[imgId] for imgId in imgIds if imgId in self.imgToAnns]\n                anns = list(itertools.chain.from_iterable(lists))\n            else:\n                anns = self.dataset['annotations']\n            anns = anns if len(catIds)  == 0 else [ann for ann in anns if ann['category_id'] in catIds]\n            anns = anns if len(areaRng) == 0 else [ann for ann in anns if ann['area'] > areaRng[0] and ann['area'] < areaRng[1]]\n        if not iscrowd == None:\n            ids = [ann['id'] for ann in anns if ann['iscrowd'] == iscrowd]\n        else:\n            ids = [ann['id'] for ann in anns]\n        return ids\n\n    def getAssoIds(self, catNms=[], supNms=[], catIds=[]):\n        \"\"\"\n        filtering parameters. default skips that filter.\n        :param catNms (str array)  : get cats for given cat names\n        :param supNms (str array)  : get cats for given supercategory names\n        :param catIds (int array)  : get cats for given cat ids\n        :return: ids (int array)   : integer array of cat ids\n        \"\"\"\n        catNms = catNms if _isArrayLike(catNms) else [catNms]\n        supNms = supNms if _isArrayLike(supNms) else [supNms]\n        catIds = catIds if _isArrayLike(catIds) else [catIds]\n\n        if len(catNms) == len(supNms) == len(catIds) == 0:\n            cats = self.dataset['association']\n        else:\n            cats = self.dataset['association']\n            cats = cats if len(catNms) == 0 else [cat for cat in cats if cat['name']          in catNms]\n            cats = cats if len(supNms) == 0 else [cat for cat in cats if cat['supercategory'] in supNms]\n            cats = cats if len(catIds) == 0 else [cat for cat in cats if cat['id']            in catIds]\n        ids = [cat['id'] for cat in cats]\n        return ids\n\n    def getCatIds(self, catNms=[], supNms=[], catIds=[]):\n        \"\"\"\n        filtering parameters. default skips that filter.\n        :param catNms (str array)  : get cats for given cat names\n        :param supNms (str array)  : get cats for given supercategory names\n        :param catIds (int array)  : get cats for given cat ids\n        :return: ids (int array)   : integer array of cat ids\n        \"\"\"\n        catNms = catNms if _isArrayLike(catNms) else [catNms]\n        supNms = supNms if _isArrayLike(supNms) else [supNms]\n        catIds = catIds if _isArrayLike(catIds) else [catIds]\n\n        if len(catNms) == len(supNms) == len(catIds) == 0:\n            cats = self.dataset['categories']\n        else:\n            cats = self.dataset['categories']\n            cats = cats if len(catNms) == 0 else [cat for cat in cats if cat['name']          in catNms]\n            cats = cats if len(supNms) == 0 else [cat for cat in cats if cat['supercategory'] in supNms]\n            cats = cats if len(catIds) == 0 else [cat for cat in cats if cat['id']            in catIds]\n        ids = [cat['id'] for cat in cats]\n        return ids\n\n    def getImgIds(self, imgIds=[], catIds=[]):\n        '''\n        Get img ids that satisfy given filter conditions.\n        :param imgIds (int array) : get imgs for given ids\n        :param catIds (int array) : get imgs with all given cats\n        :return: ids (int array)  : integer array of img ids\n        '''\n        imgIds = imgIds if _isArrayLike(imgIds) else [imgIds]\n        catIds = catIds if _isArrayLike(catIds) else [catIds]\n\n        if len(imgIds) == len(catIds) == 0:\n            ids = self.imgs.keys()\n        else:\n            ids = set(imgIds)\n            for i, catId in enumerate(catIds):\n                if i == 0 and len(ids) == 0:\n                    ids = set(self.catToImgs[catId])\n                else:\n                    ids &= set(self.catToImgs[catId])\n        return list(ids)\n\n    def loadAnns(self, ids=[]):\n        \"\"\"\n        Load anns with the specified ids.\n        :param ids (int array)       : integer ids specifying anns\n        :return: anns (object array) : loaded ann objects\n        \"\"\"\n        if _isArrayLike(ids):\n            return [self.anns[id] for id in ids]\n        elif type(ids) == int:\n            return [self.anns[ids]]\n    \n    def loadAssoAnns(self, ids=[]):\n        if _isArrayLike(ids):\n            return [self.association_anns[id] for id in ids]\n        elif type(ids) == int:\n            return [self.association_anns[ids]]\n\n    def loadCats(self, ids=[]):\n        \"\"\"\n        Load cats with the specified ids.\n        :param ids (int array)       : integer ids specifying cats\n        :return: cats (object array) : loaded cat objects\n        \"\"\"\n        if _isArrayLike(ids):\n            return [self.cats[id] for id in ids]\n        elif type(ids) == int:\n            return [self.cats[ids]]\n    \n    def loadAsso(self, ids=[]):\n        if _isArrayLike(ids):\n            return [self.association[id] for id in ids]\n        elif type(ids) == int:\n            return [self.association[ids]]\n\n    def loadImgs(self, ids=[]):\n        \"\"\"\n        Load anns with the specified ids.\n        :param ids (int array)       : integer ids specifying img\n        :return: imgs (object array) : loaded img objects\n        \"\"\"\n        if _isArrayLike(ids):\n            return [self.imgs[id] for id in ids]\n        elif type(ids) == int:\n            return [self.imgs[ids]]\n\n    def showAnns(self, anns):\n        \"\"\"\n        Display the specified annotations.\n        :param anns (array of object): annotations to display\n        :return: None\n        \"\"\"\n        if len(anns) == 0:\n            return 0\n        if 'segmentation' in anns[0] or 'keypoints' in anns[0]:\n            datasetType = 'instances'\n        elif 'caption' in anns[0]:\n            datasetType = 'captions'\n        else:\n            raise Exception('datasetType not supported')\n        if datasetType == 'instances':\n            ax = plt.gca()\n            ax.set_autoscale_on(False)\n            polygons = []\n            color = []\n            for ann in anns:\n                c = (np.random.random((1, 3))*0.6+0.4).tolist()[0]\n                if 'segmentation' in ann:\n                    if type(ann['segmentation']) == list:\n                        # polygon\n                        for seg in ann['segmentation']:\n                            poly = np.array(seg).reshape((int(len(seg)/2), 2))\n                            polygons.append(Polygon(poly))\n                            color.append(c)\n                    else:\n                        # mask\n                        t = self.imgs[ann['image_id']]\n                        if type(ann['segmentation']['counts']) == list:\n                            rle = maskUtils.frPyObjects([ann['segmentation']], t['height'], t['width'])\n                        else:\n                            rle = [ann['segmentation']]\n                        m = maskUtils.decode(rle)\n                        img = np.ones( (m.shape[0], m.shape[1], 3) )\n                        if ann['iscrowd'] == 1:\n                            color_mask = np.array([2.0,166.0,101.0])/255\n                        if ann['iscrowd'] == 0:\n                            color_mask = np.random.random((1, 3)).tolist()[0]\n                        for i in range(3):\n                            img[:,:,i] = color_mask[i]\n                        ax.imshow(np.dstack( (img, m*0.5) ))\n                if 'keypoints' in ann and type(ann['keypoints']) == list:\n                    # turn skeleton into zero-based index\n                    sks = np.array(self.loadCats(ann['category_id'])[0]['skeleton'])-1\n                    kp = np.array(ann['keypoints'])\n                    x = kp[0::3]\n                    y = kp[1::3]\n                    v = kp[2::3]\n                    for sk in sks:\n                        if np.all(v[sk]>0):\n                            plt.plot(x[sk],y[sk], linewidth=3, color=c)\n                    plt.plot(x[v>0], y[v>0],'o',markersize=8, markerfacecolor=c, markeredgecolor='k',markeredgewidth=2)\n                    plt.plot(x[v>1], y[v>1],'o',markersize=8, markerfacecolor=c, markeredgecolor=c, markeredgewidth=2)\n            p = PatchCollection(polygons, facecolor=color, linewidths=0, alpha=0.4)\n            ax.add_collection(p)\n            p = PatchCollection(polygons, facecolor='none', edgecolors=color, linewidths=2)\n            ax.add_collection(p)\n        elif datasetType == 'captions':\n            for ann in anns:\n                print(ann['caption'])\n\n    def loadRes(self, resFile):\n        \"\"\"\n        Load result file and return a result api object.\n        :param   resFile (str)     : file name of result file\n        :return: res (obj)         : result api object\n        \"\"\"\n        res = SOBA()\n        res.dataset['images'] = [img for img in self.dataset['images']]\n\n        print('Loading and preparing results...')\n        tic = time.time()\n        if type(resFile) == str or (PYTHON_VERSION == 2 and type(resFile) == unicode):\n            anns = json.load(open(resFile))\n        elif type(resFile) == np.ndarray:\n            anns = self.loadNumpyAnnotations(resFile)\n        else:\n            anns = resFile\n        assert type(anns) == list, 'results in not an array of objects'\n        annsImgIds = [ann['image_id'] for ann in anns]\n        assert set(annsImgIds) == (set(annsImgIds) & set(self.getImgIds())), \\\n               'Results do not correspond to current soba set'\n        if 'caption' in anns[0]:\n            imgIds = set([img['id'] for img in res.dataset['images']]) & set([ann['image_id'] for ann in anns])\n            res.dataset['images'] = [img for img in res.dataset['images'] if img['id'] in imgIds]\n            for id, ann in enumerate(anns):\n                ann['id'] = id+1\n        elif 'bbox' in anns[0] and not anns[0]['bbox'] == []:\n            res.dataset['categories'] = copy.deepcopy(self.dataset['categories'])\n            for id, ann in enumerate(anns):\n                bb = ann['bbox']\n                x1, x2, y1, y2 = [bb[0], bb[0]+bb[2], bb[1], bb[1]+bb[3]]\n                if not 'segmentation' in ann:\n                    ann['segmentation'] = [[x1, y1, x1, y2, x2, y2, x2, y1]]\n                ann['area'] = bb[2]*bb[3]\n                ann['id'] = id+1\n                ann['iscrowd'] = 0\n        elif 'segmentation' in anns[0]:\n            res.dataset['categories'] = copy.deepcopy(self.dataset['categories'])\n            for id, ann in enumerate(anns):\n                # now only support compressed RLE format as segmentation results\n                ann['area'] = maskUtils.area(ann['segmentation'])\n                if not 'bbox' in ann:\n                    ann['bbox'] = maskUtils.toBbox(ann['segmentation'])\n                ann['id'] = id+1\n                ann['iscrowd'] = 0\n        elif 'keypoints' in anns[0]:\n            res.dataset['categories'] = copy.deepcopy(self.dataset['categories'])\n            for id, ann in enumerate(anns):\n                s = ann['keypoints']\n                x = s[0::3]\n                y = s[1::3]\n                x0,x1,y0,y1 = np.min(x), np.max(x), np.min(y), np.max(y)\n                ann['area'] = (x1-x0)*(y1-y0)\n                ann['id'] = id + 1\n                ann['bbox'] = [x0,y0,x1-x0,y1-y0]\n        print('DONE (t={:0.2f}s)'.format(time.time()- tic))\n\n        res.dataset['annotations'] = anns\n        res.createIndex()\n        return res\n\n    def loadRes_asso(self, resFile):\n        \"\"\"\n        Load result file and return a result api object.\n        :param   resFile (str)     : file name of result file\n        :return: res (obj)         : result api object\n        \"\"\"\n        res = SOBA()\n        res.dataset['images'] = [img for img in self.dataset['images']]\n\n        print('Loading and preparing results...')\n        tic = time.time()\n        if type(resFile) == str or (PYTHON_VERSION == 2 and type(resFile) == unicode):\n            anns = json.load(open(resFile))\n        elif type(resFile) == np.ndarray:\n            anns = self.loadNumpyAnnotations(resFile)\n        else:\n            anns = resFile\n        assert type(anns) == list, 'results in not an array of objects'\n        annsImgIds = [ann['image_id'] for ann in anns]\n        assert set(annsImgIds) == (set(annsImgIds) & set(self.getImgIds())), \\\n               'Results do not correspond to current soba set'\n        if 'caption' in anns[0]:\n            imgIds = set([img['id'] for img in res.dataset['images']]) & set([ann['image_id'] for ann in anns])\n            res.dataset['images'] = [img for img in res.dataset['images'] if img['id'] in imgIds]\n            for id, ann in enumerate(anns):\n                ann['id'] = id+1\n        elif 'bbox' in anns[0] and not anns[0]['bbox'] == []:\n            res.dataset['categories'] = copy.deepcopy(self.dataset['categories'])\n            for id, ann in enumerate(anns):\n                bb = ann['bbox']\n                x1, x2, y1, y2 = [bb[0], bb[0]+bb[2], bb[1], bb[1]+bb[3]]\n                if not 'segmentation' in ann:\n                    ann['segmentation'] = [[x1, y1, x1, y2, x2, y2, x2, y1]]\n                ann['area'] = bb[2]*bb[3]\n                ann['id'] = id+1\n                ann['iscrowd'] = 0\n        elif 'segmentation' in anns[0]:\n            res.dataset['categories'] = copy.deepcopy(self.dataset['categories'])\n            for id, ann in enumerate(anns):\n                # now only support compressed RLE format as segmentation results\n                ann['area'] = maskUtils.area(ann['segmentation'])\n                if not 'bbox' in ann:\n                    ann['bbox'] = maskUtils.toBbox(ann['segmentation'])\n                ann['id'] = id+1\n                ann['iscrowd'] = 0\n        elif 'keypoints' in anns[0]:\n            res.dataset['categories'] = copy.deepcopy(self.dataset['categories'])\n            for id, ann in enumerate(anns):\n                s = ann['keypoints']\n                x = s[0::3]\n                y = s[1::3]\n                x0,x1,y0,y1 = np.min(x), np.max(x), np.min(y), np.max(y)\n                ann['area'] = (x1-x0)*(y1-y0)\n                ann['id'] = id + 1\n                ann['bbox'] = [x0,y0,x1-x0,y1-y0]\n        print('DONE (t={:0.2f}s)'.format(time.time()- tic))\n\n        res.dataset['association_anno'] = anns\n        res.createIndex()\n        return res\n\n    def download(self, tarDir = None, imgIds = [] ):\n        '''\n        Download SOBA images from mssoba.org server.\n        :param tarDir (str): SOBA results directory name\n               imgIds (list): images to be downloaded\n        :return:\n        '''\n        if tarDir is None:\n            print('Please specify target directory')\n            return -1\n        if len(imgIds) == 0:\n            imgs = self.imgs.values()\n        else:\n            imgs = self.loadImgs(imgIds)\n        N = len(imgs)\n        if not os.path.exists(tarDir):\n            os.makedirs(tarDir)\n        for i, img in enumerate(imgs):\n            tic = time.time()\n            fname = os.path.join(tarDir, img['file_name'])\n            if not os.path.exists(fname):\n                urlretrieve(img['soba_url'], fname)\n            print('downloaded {}/{} images (t={:0.1f}s)'.format(i, N, time.time()- tic))\n\n    def loadNumpyAnnotations(self, data):\n        \"\"\"\n        Convert result data from a numpy array [Nx7] where each row contains {imageID,x1,y1,w,h,score,class}\n        :param  data (numpy.ndarray)\n        :return: annotations (python nested list)\n        \"\"\"\n        print('Converting ndarray to lists...')\n        assert(type(data) == np.ndarray)\n        print(data.shape)\n        assert(data.shape[1] == 7)\n        N = data.shape[0]\n        ann = []\n        for i in range(N):\n            if i % 1000000 == 0:\n                print('{}/{}'.format(i,N))\n            ann += [{\n                'image_id'  : int(data[i, 0]),\n                'bbox'  : [ data[i, 1], data[i, 2], data[i, 3], data[i, 4] ],\n                'score' : data[i, 5],\n                'category_id': int(data[i, 6]),\n                }]\n        return ann\n\n    def annToRLE(self, ann):\n        \"\"\"\n        Convert annotation which can be polygons, uncompressed RLE to RLE.\n        :return: binary mask (numpy 2D array)\n        \"\"\"\n        t = self.imgs[ann['image_id']]\n        h, w = t['height'], t['width']\n        segm = ann['segmentation']\n        if type(segm) == list:\n            # polygon -- a single object might consist of multiple parts\n            # we merge all parts into one mask rle code\n            rles = maskUtils.frPyObjects(segm, h, w)\n            rle = maskUtils.merge(rles)\n        elif type(segm['counts']) == list:\n            # uncompressed RLE\n            rle = maskUtils.frPyObjects(segm, h, w)\n        else:\n            # rle\n            rle = ann['segmentation']\n        return rle\n\n    def annToMask(self, ann):\n        \"\"\"\n        Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask.\n        :return: binary mask (numpy 2D array)\n        \"\"\"\n        rle = self.annToRLE(ann)\n        m = maskUtils.decode(rle)\n        return m"
  },
  {
    "path": "PythonAPI/pysobatools/sobaeval.py",
    "content": "__author__ = 'tianyu'\n\nimport numpy as np\nimport datetime\nimport time\nfrom collections import defaultdict\nfrom . import mask as maskUtils\nimport copy\n\nclass SOAPeval:\n    # Interface for evaluating detection on the SOBA  dataset.\n    #\n    # The usage for isdEval is as follows:\n    #  isdGt=..., isdDt=...       # load dataset and results\n    #  E = isdEval(isdGt,isdDt); # initialize isdEval object\n    #  E.params.recThrs = ...;      # set parameters as desired\n    #  E.evaluate();                # run per image evaluation\n    #  E.accumulate();              # accumulate per image results\n    #  E.summarize();               # display summary metrics of results\n    # For example usage see evalDemo.m and http://msisd.org/.\n    #\n    # The evaluation parameters are as follows (defaults in brackets):\n    #  imgIds     - [all] N img ids to use for evaluation\n    #  catIds     - [all] K cat ids to use for evaluation\n    #  iouThrs    - [.5:.05:.95] T=10 IoU thresholds for evaluation\n    #  recThrs    - [0:.01:1] R=101 recall thresholds for evaluation\n    #  areaRng    - [...] A=4 object area ranges for evaluation\n    #  maxDets    - [1 10 100] M=3 thresholds on max detections per image\n    #  iouType    - ['segm'] set iouType to 'segm', 'bbox' or 'keypoints'\n    #  iouType replaced the now DEPRECATED useSegm parameter.\n    #  useCats    - [1] if true use category labels for evaluation\n    # Note: if useCats=0 category labels are ignored as in proposal scoring.\n    # Note: multiple areaRngs [Ax2] and maxDets [Mx1] can be specified.\n    #\n    # evaluate(): evaluates detections on every image and every category and\n    # concats the results into the \"evalImgs\" with fields:\n    #  dtIds      - [1xD] id for each of the D detections (dt)\n    #  gtIds      - [1xG] id for each of the G ground truths (gt)\n    #  dtMatches  - [TxD] matching gt id at each IoU or 0\n    #  gtMatches  - [TxG] matching dt id at each IoU or 0\n    #  dtScores   - [1xD] confidence of each dt\n    #  gtIgnore   - [1xG] ignore flag for each gt\n    #  dtIgnore   - [TxD] ignore flag for each dt at each IoU\n    #\n    # accumulate(): accumulates the per-image, per-category evaluation\n    # results in \"evalImgs\" into the dictionary \"eval\" with fields:\n    #  params     - parameters used for evaluation\n    #  date       - date evaluation was performed\n    #  counts     - [T,R,K,A,M] parameter dimensions (see above)\n    #  precision  - [TxRxKxAxM] precision for every evaluation setting\n    #  recall     - [TxKxAxM] max recall for every evaluation setting\n    # Note: precision and recall==-1 for settings with no gt objects.\n    #\n    # See also isd, mask, pyisdDemo, pyisdEvalDemo\n    #\n    # Microsoft isd Toolbox.      version 2.0\n    # Data, paper, and tutorials available at:  http://msisd.org/\n    # Code written by Piotr Dollar and Tsung-Yi Lin, 2015.\n    # Licensed under the Simplified BSD License [see isd/license.txt]\n    def __init__(self, isdGt=None, isdDt=None, isdRel = None,iouType='segm'):\n        '''\n        Initialize isdEval using isd APIs for gt and dt\n        :param isdGt: isd object with ground truth annotations\n        :param isdDt: isd object with detection results\n        :return: None\n        '''\n        if not iouType:\n            print('iouType not specified. use default iouType segm')\n        self.isdGt   = isdGt              # ground truth isd API\n        self.isdDt   = isdDt              # detections isd API\n        self.isdRel  = isdRel             # associations isd API\n        self.evalImgs = defaultdict(list)   # per-image per-category evaluation results [KxAxI] elements\n        self.eval     = {}                  # accumulated evaluation results\n        self._gts = defaultdict(list)       # gt for evaluation\n        self._dts = defaultdict(list)       # dt for evaluation\n        self._rel_gts = defaultdict(list)       # gt for evaluation\n        self._rel_dts = defaultdict(list)       # dt for evaluation\n        self.params = Params(iouType=iouType) # parameters\n        self._paramsEval = {}               # parameters for evaluation\n        self.stats = []                     # result summarization\n        self.ious = {}                      # ious between all gts and dts\n        if not isdGt is None:\n            self.params.imgIds = sorted(isdGt.getImgIds())\n            # self.params.catIds = sorted(isdGt.getCatIds())\n\n\n    def _prepare(self):\n        '''\n        Prepare ._gts and ._dts for evaluation based on params\n        :return: None\n        '''\n        def _toMask(anns, isd):\n            # modify ann['segmentation'] by reference\n            for ann in anns:\n                rle = isd.annToRLE(ann)\n                ann['segmentation'] = rle\n        p = self.params\n        if p.useCats:\n            gts=self.isdGt.loadAnns(self.isdGt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds))\n            dts=self.isdDt.loadAnns(self.isdDt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds))\n        else:\n            gts=self.isdGt.loadAnns(self.isdGt.getAnnIds(imgIds=p.imgIds))\n            dts=self.isdDt.loadAnns(self.isdDt.getAnnIds(imgIds=p.imgIds))\n\n        # convert ground truth to mask if iouType == 'segm'\n        if p.iouType == 'segm':\n            _toMask(gts, self.isdGt)\n            _toMask(dts, self.isdDt)\n        # set ignore flag\n        for gt in gts:\n            gt['ignore'] = gt['ignore'] if 'ignore' in gt else 0\n            gt['ignore'] = 'iscrowd' in gt and gt['iscrowd']\n            if p.iouType == 'keypoints':\n                gt['ignore'] = (gt['num_keypoints'] == 0) or gt['ignore']\n        self._gts = defaultdict(list)       # gt for evaluation\n        self._dts = defaultdict(list)       # dt for evaluation\n        for gt in gts:\n            self._gts[gt['image_id'], gt['category_id']].append(gt)\n        for dt in dts:\n            self._dts[dt['image_id'], dt['category_id']].append(dt)\n        # self.evalImgs = defaultdict(list)   # per-image per-category evaluation results\n        # self.eval     = {}                  # accumulated evaluation results\n\n    def _prepare_asso(self):\n        '''\n        Prepare ._gts and ._dts for evaluation based on params\n        :return: None\n        '''\n        def _toMask(anns, isd):\n            # modify ann['segmentation'] by reference\n            for ann in anns:\n                rle = isd.annToRLE(ann)\n                ann['segmentation'] = rle\n        self.params.catIds = sorted(self.isdGt.getAssoIds())\n        p = self.params\n        if p.useCats:\n            rel_gts=self.isdGt.loadAssoAnns(self.isdGt.getAssoAnnIds(imgIds=p.imgIds, catIds=1))\n            rel_dts=self.isdRel.loadAssoAnns(self.isdRel.getAssoAnnIds(imgIds=p.imgIds, catIds=1))\n        else:\n            rel_gts=self.isdGt.loadAssoAnns(self.isdGt.getAssoAnnIds(imgIds=p.imgIds))\n            rel_dts=self.isdDt.loadAssoAnns(self.isdDt.getAssoAnnIds(imgIds=p.imgIds))\n\n        # convert ground truth to mask if iouType == 'segm'\n        if p.iouType == 'segm':\n            _toMask(rel_gts, self.isdGt)\n            _toMask(rel_dts, self.isdRel)\n        # set ignore flag\n        for gt in rel_gts:\n            gt['ignore'] = gt['ignore'] if 'ignore' in gt else 0\n            gt['ignore'] = 'iscrowd' in gt and gt['iscrowd']\n            if p.iouType == 'keypoints':\n                gt['ignore'] = (gt['num_keypoints'] == 0) or gt['ignore']\n        self._rel_gts = defaultdict(list)       # gt for evaluation\n        self._rel_dts = defaultdict(list)       # dt for evaluation\n        for gt in rel_gts:\n            self._rel_gts[gt['image_id'], gt['category_id']].append(gt)\n        for dt in rel_dts:\n            self._rel_dts[dt['image_id'], dt['category_id']].append(dt)\n        self.evalImgs = defaultdict(list)   # per-image per-category evaluation results\n        self.eval     = {}                  # accumulated evaluation results\n\n    def evaluate(self):\n        '''\n        Run per image evaluation on given images and store results (a list of dict) in self.evalImgs\n        :return: None\n        '''\n        tic = time.time()\n        print('Running per image evaluation...')\n        p = self.params\n        # add backward compatibility if useSegm is specified in params\n        if not p.useSegm is None:\n            p.iouType = 'segm' if p.useSegm == 1 else 'bbox'\n            print('useSegm (deprecated) is not None. Running {} evaluation'.format(p.iouType))\n        print('Evaluate annotation type *{}*'.format(p.iouType))\n        p.imgIds = list(np.unique(p.imgIds))\n        if p.useCats:\n            p.catIds = list(np.unique(p.catIds))\n        p.maxDets = sorted(p.maxDets)\n        self.params=p\n\n        self._prepare()\n        # loop through images, area range, max detection number\n        catIds = p.catIds if p.useCats else [-1]\n\n        if p.iouType == 'segm' or p.iouType == 'bbox':\n            computeIoU = self.computeIoU\n        elif p.iouType == 'keypoints':\n            computeIoU = self.computeOks\n        self.ious = {(imgId, catId): computeIoU(imgId, catId) \\\n                        for imgId in p.imgIds\n                        for catId in catIds}\n\n        evaluateImg = self.evaluateImg\n        maxDet = p.maxDets[-1]\n        self.evalImgs = [evaluateImg(imgId, catId, areaRng, maxDet)\n                 for catId in catIds\n                 for areaRng in p.areaRng\n                 for imgId in p.imgIds\n             ]\n        self._paramsEval = copy.deepcopy(self.params)\n        toc = time.time()\n        print('DONE (t={:0.2f}s).'.format(toc-tic))\n\n    def evaluate_asso(self):\n        '''\n        Run per image evaluation on given images and store results (a list of dict) in self.evalImgs\n        :return: None\n        '''\n        tic = time.time()\n        print('Running per image evaluation...')\n        p = self.params\n        # add backward compatibility if useSegm is specified in params\n        if not p.useSegm is None:\n            p.iouType = 'segm' if p.useSegm == 1 else 'bbox'\n            print('useSegm (deprecated) is not None. Running {} evaluation'.format(p.iouType))\n        print('Evaluate annotation type *{}*'.format(p.iouType))\n        p.imgIds = list(np.unique(p.imgIds))\n        if p.useCats:\n            p.catIds = list(np.unique(p.catIds))\n        p.maxDets = sorted(p.maxDets)\n        self.params=p\n        self._prepare()\n        self._prepare_asso()\n        # loop through images, area range, max detection number\n        catIds = [1]\n\n        if p.iouType == 'segm' or p.iouType == 'bbox':\n            computeIoU = self.computeIoU\n        elif p.iouType == 'keypoints':\n            computeIoU = self.computeOks\n        self.ious = {(imgId, catId): computeIoU(imgId, catId) \\\n                        for imgId in p.imgIds\n                        for catId in catIds}\n\n        evaluateImg = self.evaluateImg\n        maxDet = p.maxDets[-2]\n        # a,b,c,d = evaluateImg(9,1,p.areaRng[0],maxDet)\n        # return a,b,c,d\n        self.evalImgs = [evaluateImg(imgId, catId, areaRng, maxDet)\n                 for catId in catIds\n                 for areaRng in p.areaRng\n                 for imgId in p.imgIds\n             ]\n        self._paramsEval = copy.deepcopy(self.params)\n        toc = time.time()\n        print('DONE (t={:0.2f}s).'.format(toc-tic))\n\n    def computeIoU(self, imgId, catId):\n        p = self.params\n        if p.useCats:\n            gt = self._gts[imgId,catId]\n            dt = self._dts[imgId,catId]\n        else:\n            gt = [_ for cId in p.catIds for _ in self._gts[imgId,cId]]\n            dt = [_ for cId in p.catIds for _ in self._dts[imgId,cId]]\n        if len(gt) == 0 and len(dt) ==0:\n            return []\n        inds = np.argsort([-d['score'] for d in dt], kind='mergesort')\n        dt = [dt[i] for i in inds]\n        if len(dt) > p.maxDets[-1]:\n            dt=dt[0:p.maxDets[-1]]\n\n        if p.iouType == 'segm':\n            g = [g['segmentation'] for g in gt]\n            d = [d['segmentation'] for d in dt]\n        elif p.iouType == 'bbox':\n            g = [g['bbox'] for g in gt]\n            d = [d['bbox'] for d in dt]\n        else:\n            raise Exception('unknown iouType for iou computation')\n\n        # compute iou between each dt and gt region\n        iscrowd = [int(o['iscrowd']) for o in gt]\n        ious = maskUtils.iou(d,g,iscrowd)\n        return ious\n\n    def computeOks(self, imgId, catId):\n        p = self.params\n        # dimention here should be Nxm\n        gts = self._gts[imgId, catId]\n        dts = self._dts[imgId, catId]\n        inds = np.argsort([-d['score'] for d in dts], kind='mergesort')\n        dts = [dts[i] for i in inds]\n        if len(dts) > p.maxDets[-1]:\n            dts = dts[0:p.maxDets[-1]]\n        # if len(gts) == 0 and len(dts) == 0:\n        if len(gts) == 0 or len(dts) == 0:\n            return []\n        ious = np.zeros((len(dts), len(gts)))\n        sigmas = p.kpt_oks_sigmas\n        vars = (sigmas * 2)**2\n        k = len(sigmas)\n        # compute oks between each detection and ground truth object\n        for j, gt in enumerate(gts):\n            # create bounds for ignore regions(double the gt bbox)\n            g = np.array(gt['keypoints'])\n            xg = g[0::3]; yg = g[1::3]; vg = g[2::3]\n            k1 = np.count_nonzero(vg > 0)\n            bb = gt['bbox']\n            x0 = bb[0] - bb[2]; x1 = bb[0] + bb[2] * 2\n            y0 = bb[1] - bb[3]; y1 = bb[1] + bb[3] * 2\n            for i, dt in enumerate(dts):\n                d = np.array(dt['keypoints'])\n                xd = d[0::3]; yd = d[1::3]\n                if k1>0:\n                    # measure the per-keypoint distance if keypoints visible\n                    dx = xd - xg\n                    dy = yd - yg\n                else:\n                    # measure minimum distance to keypoints in (x0,y0) & (x1,y1)\n                    z = np.zeros((k))\n                    dx = np.max((z, x0-xd),axis=0)+np.max((z, xd-x1),axis=0)\n                    dy = np.max((z, y0-yd),axis=0)+np.max((z, yd-y1),axis=0)\n                e = (dx**2 + dy**2) / vars / (gt['area']+np.spacing(1)) / 2\n                if k1 > 0:\n                    e=e[vg > 0]\n                ious[i, j] = np.sum(np.exp(-e)) / e.shape[0]\n        return ious\n\n    def evaluateImg(self, imgId, catId, aRng, maxDet):\n        '''\n        perform evaluation for single category and image\n        :return: dict (single image results)\n        '''\n        p = self.params\n\n        gt = self._rel_gts[imgId,1]\n        dt = self._rel_dts[imgId,1]\n        ins_gt = [_ for cId in [1,2] for _ in self._gts[imgId,cId]]\n        ins_dt = [_ for cId in [1,2] for _ in self._dts[imgId,cId]]\n        ins_pred_class_ids = [cId for cId in [1,2] for _ in self._dts[imgId,cId]]\n        if len(gt) == 0 and len(dt) ==0:\n            return None\n\n        for g in gt:\n            if g['ignore'] or (g['area']<aRng[0] or g['area']>aRng[1]):\n                g['_ignore'] = 1\n            else:\n                g['_ignore'] = 0\n\n        # sort dt highest score first, sort gt ignore last\n        gtind = np.argsort([g['_ignore'] for g in gt], kind='mergesort')\n        gt = [gt[i] for i in gtind]\n        dtind = np.argsort([-d['score'] for d in dt], kind='mergesort')\n        dt = [dt[i] for i in dtind[0:maxDet]]\n        iscrowd = [int(o['iscrowd']) for o in gt]\n        # load computed ious\n        ious = self.ious[imgId, catId][:, gtind] if len(self.ious[imgId, catId]) > 0 else self.ious[imgId, catId]\n\n        # match association with instances\n        pred_association = [dt_['association_id'] for dt_ in dt]\n        instance_association = [dt_['association_id'] for dt_ in ins_dt]\n        \n        association_map = ['']*len(pred_association)\n        for x,i in enumerate(pred_association):\n            if i == 0:\n                association_map[x]=(None,None)\n                continue\n            idx = []\n            for j,r in enumerate(instance_association):\n                if r == i:\n                    if len(idx)==1:\n                        if ins_pred_class_ids[j] == 1:\n                            idx = [j,idx[0]]\n                        else:\n                            idx.append(j)\n                        if len(idx) == 2:\n                            idx = tuple(idx)\n                            break\n                    else:\n                        idx.append(j)\n            association_map[x] = idx\n\n        \n\n        T = len(p.iouThrs) #Threshold\n        G = len(gt)\n        D = len(dt)\n        gtm  = np.zeros((T,G))\n        dtm  = np.zeros((T,D))\n        gtIg = np.array([g['_ignore'] for g in gt])\n        dtIg = np.zeros((T,D))\n        if not len(ious)==0:\n            for tind, t in enumerate(p.iouThrs):\n                for dind, d in enumerate(dt):\n                    # information about best match so far (m=-1 -> unmatched)\n                    iou = min([t,1-1e-10])\n                    m   = -1\n                    for gind, g in enumerate(gt):\n                        # if this gt already matched, and not a crowd, continue\n                        if gtm[tind,gind]>0 and not iscrowd[gind]:\n                            continue\n                        # if dt matched to reg gt, and on ignore gt, stop\n                        if m>-1 and gtIg[m]==0 and gtIg[gind]==1:\n                            break\n                        # continue to next gt unless better match made\n                        if p.iouType =='segm':\n                            ious = maskUtils.iou([gt[gind]['segmentation']],[dt[dind]['segmentation']],[0])\n                        elif p.iouType =='bbox':\n                            ious = maskUtils.iou([gt[gind]['bbox']],[dt[dind]['bbox']],[0])\n                        if ious < iou:\n                        #######HERE ADD Assotion！！！！##########\n                            # print(dind,gind)\n                            continue\n                        # print(dind,gind)\n                        if p.iouType =='segm':\n                            gt_o = [ins_gt[gind]['segmentation']]\n                            gt_s = [ins_gt[gind+G]['segmentation']]\n                            ins_o = [ins_dt[association_map[dind][0]]['segmentation']]\n                            ins_s = [ins_dt[association_map[dind][1]]['segmentation']]\n                            ious_o = maskUtils.iou(gt_o,ins_o,[0])\n                            ious_s = maskUtils.iou(gt_s,ins_s,[0])\n                            \n                            if ious_s[0][0] < iou or ious_o[0][0] < iou:\n                                continue\n                        elif p.iouType =='bbox':\n\n                            gt_o = [ins_gt[gind]['bbox']]\n                            gt_s = [ins_gt[gind+G]['bbox']]\n                            try:\n                                ins_o = [ins_dt[association_map[dind][0]]['bbox']]\n                                ins_s = [ins_dt[association_map[dind][1]]['bbox']]\n                            except:\n                                continue\n                            \n                            ious_o = maskUtils.iou(ins_o,gt_o,[0])\n                            ious_s = maskUtils.iou(ins_s,gt_s,[0])\n                            if ious_s[0][0] < iou or ious_o[0][0] < iou:\n                                \n                                continue\n                        # if match successful and best so far, store appropriately\n                        iou=ious\n                        m=gind\n                    # if match made store id of match for both dt and gt\n                    if m ==-1:\n                        continue\n                    dtIg[tind,dind] = gtIg[m]\n                    dtm[tind,dind]  = gt[m]['id']\n                    gtm[tind,m]     = d['id']\n        # set unmatched detections outside of area range to ignore\n        a = np.array([d['area']<aRng[0] or d['area']>aRng[1] for d in dt]).reshape((1, len(dt)))\n        dtIg = np.logical_or(dtIg, np.logical_and(dtm==0, np.repeat(a,T,0)))\n        # store results for given image and category\n        return {\n                'image_id':     imgId,\n                'category_id':  catId,\n                'aRng':         aRng,\n                'maxDet':       maxDet,\n                'dtIds':        [d['id'] for d in dt],\n                'gtIds':        [g['id'] for g in gt],\n                'dtMatches':    dtm,\n                'gtMatches':    gtm,\n                'dtScores':     [d['score'] for d in dt],\n                'gtIgnore':     gtIg,\n                'dtIgnore':     dtIg,\n            }\n\n    def accumulate(self, p = None):\n        '''\n        Accumulate per image evaluation results and store the result in self.eval\n        :param p: input params for evaluation\n        :return: None\n        '''\n        print('Accumulating evaluation results...')\n        tic = time.time()\n        if not self.evalImgs:\n            print('Please run evaluate() first')\n        # allows input customized parameters\n        if p is None:\n            p = self.params\n        p.catIds = p.catIds if p.useCats == 1 else [-1]\n        T           = len(p.iouThrs)\n        R           = len(p.recThrs)\n        K           = len(p.catIds) if p.useCats else 1\n        A           = len(p.areaRng)\n        M           = len(p.maxDets)\n        precision   = -np.ones((T,R,K,A,M)) # -1 for the precision of absent categories\n        recall      = -np.ones((T,K,A,M))\n        scores      = -np.ones((T,R,K,A,M))\n\n        # create dictionary for future indexing\n        _pe = self._paramsEval\n        catIds = _pe.catIds if _pe.useCats else [-1]\n        setK = set(catIds)\n        setA = set(map(tuple, _pe.areaRng))\n        setM = set(_pe.maxDets)\n        setI = set(_pe.imgIds)\n        # get inds to evaluate\n        k_list = [n for n, k in enumerate(p.catIds)  if k in setK]\n        m_list = [m for n, m in enumerate(p.maxDets) if m in setM]\n        a_list = [n for n, a in enumerate(map(lambda x: tuple(x), p.areaRng)) if a in setA]\n        i_list = [n for n, i in enumerate(p.imgIds)  if i in setI]\n        I0 = len(_pe.imgIds)\n        A0 = len(_pe.areaRng)\n        # retrieve E at each category, area range, and max number of detections\n        for k, k0 in enumerate(k_list):\n            Nk = k0*A0*I0\n            for a, a0 in enumerate(a_list):\n                Na = a0*I0\n                for m, maxDet in enumerate(m_list):\n                    E = [self.evalImgs[Nk + Na + i] for i in i_list]\n                    E = [e for e in E if not e is None]\n                    if len(E) == 0:\n                        continue\n                    dtScores = np.concatenate([e['dtScores'][0:maxDet] for e in E])\n\n                    # different sorting method generates slightly different results.\n                    # mergesort is used to be consistent as Matlab implementation.\n                    inds = np.argsort(-dtScores, kind='mergesort')\n                    dtScoresSorted = dtScores[inds]\n\n                    dtm  = np.concatenate([e['dtMatches'][:,0:maxDet] for e in E], axis=1)[:,inds]\n                    dtIg = np.concatenate([e['dtIgnore'][:,0:maxDet]  for e in E], axis=1)[:,inds]\n                    gtIg = np.concatenate([e['gtIgnore'] for e in E])\n                    npig = np.count_nonzero(gtIg==0 )\n                    if npig == 0:\n                        continue\n                    tps = np.logical_and(               dtm,  np.logical_not(dtIg) )\n                    fps = np.logical_and(np.logical_not(dtm), np.logical_not(dtIg) )\n\n                    tp_sum = np.cumsum(tps, axis=1).astype(dtype=np.float)\n                    fp_sum = np.cumsum(fps, axis=1).astype(dtype=np.float)\n                    for t, (tp, fp) in enumerate(zip(tp_sum, fp_sum)):\n                        tp = np.array(tp)\n                        fp = np.array(fp)\n                        nd = len(tp)\n                        rc = tp / npig\n                        pr = tp / (fp+tp+np.spacing(1))\n                        q  = np.zeros((R,))\n                        ss = np.zeros((R,))\n\n                        if nd:\n                            recall[t,k,a,m] = rc[-1]\n                        else:\n                            recall[t,k,a,m] = 0\n\n                        # numpy is slow without cython optimization for accessing elements\n                        # use python array gets significant speed improvement\n                        pr = pr.tolist(); q = q.tolist()\n\n                        for i in range(nd-1, 0, -1):\n                            if pr[i] > pr[i-1]:\n                                pr[i-1] = pr[i]\n\n                        inds = np.searchsorted(rc, p.recThrs, side='left')\n                        try:\n                            for ri, pi in enumerate(inds):\n                                q[ri] = pr[pi]\n                                ss[ri] = dtScoresSorted[pi]\n                        except:\n                            pass\n                        precision[t,:,k,a,m] = np.array(q)\n                        scores[t,:,k,a,m] = np.array(ss)\n        self.eval = {\n            'params': p,\n            'counts': [T, R, K, A, M],\n            'date': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),\n            'precision': precision,\n            'recall':   recall,\n            'scores': scores,\n        }\n        toc = time.time()\n        print('DONE (t={:0.2f}s).'.format( toc-tic))\n\n    def summarize(self):\n        '''\n        Compute and display summary metrics for evaluation results.\n        Note this functin can *only* be applied on the default parameter setting\n        '''\n        def _summarize( ap=1, iouThr=None, areaRng='all', maxDets=100 ):\n            p = self.params\n            iStr = ' {:<18} {} @[ IoU={:<9} | area={:>6s} | maxDets={:>3d} ] = {:0.3f}'\n            titleStr = 'Average Precision' if ap == 1 else 'Average Recall'\n            typeStr = '(AP)' if ap==1 else '(AR)'\n            iouStr = '{:0.2f}:{:0.2f}'.format(p.iouThrs[0], p.iouThrs[-1]) \\\n                if iouThr is None else '{:0.2f}'.format(iouThr)\n\n            aind = [i for i, aRng in enumerate(p.areaRngLbl) if aRng == areaRng]\n            mind = [i for i, mDet in enumerate(p.maxDets) if mDet == maxDets]\n            if ap == 1:\n                # dimension of precision: [TxRxKxAxM]\n                s = self.eval['precision']\n                # IoU\n                if iouThr is not None:\n                    t = np.where(iouThr == p.iouThrs)[0]\n                    s = s[t]\n                s = s[:,:,:,aind,mind]\n            else:\n                # dimension of recall: [TxKxAxM]\n                s = self.eval['recall']\n                if iouThr is not None:\n                    t = np.where(iouThr == p.iouThrs)[0]\n                    s = s[t]\n                s = s[:,:,aind,mind]\n            if len(s[s>-1])==0:\n                mean_s = -1\n            else:\n                mean_s = np.mean(s[s>-1])\n            print(iStr.format(titleStr, typeStr, iouStr, areaRng, maxDets, mean_s))\n            return mean_s\n        def _summarizeDets():\n            stats = np.zeros((12,))\n            stats[0] = _summarize(1)\n            stats[1] = _summarize(1, iouThr=.5, maxDets=self.params.maxDets[2])\n            stats[2] = _summarize(1, iouThr=.75, maxDets=self.params.maxDets[2])\n            # stats[3] = _summarize(1, areaRng='small', maxDets=self.params.maxDets[2])\n            # stats[4] = _summarize(1, areaRng='medium', maxDets=self.params.maxDets[2])\n            # stats[5] = _summarize(1, areaRng='large', maxDets=self.params.maxDets[2])\n            stats[6] = _summarize(0, maxDets=self.params.maxDets[0])\n            stats[7] = _summarize(0, maxDets=self.params.maxDets[1])\n            stats[8] = _summarize(0, maxDets=self.params.maxDets[2])\n            # stats[9] = _summarize(0, areaRng='small', maxDets=self.params.maxDets[2])\n            # stats[10] = _summarize(0, areaRng='medium', maxDets=self.params.maxDets[2])\n            # stats[11] = _summarize(0, areaRng='large', maxDets=self.params.maxDets[2])\n            return stats\n        def _summarizeKps():\n            stats = np.zeros((10,))\n            stats[0] = _summarize(1, maxDets=20)\n            stats[1] = _summarize(1, maxDets=20, iouThr=.5)\n            stats[2] = _summarize(1, maxDets=20, iouThr=.75)\n            stats[3] = _summarize(1, maxDets=20, areaRng='medium')\n            stats[4] = _summarize(1, maxDets=20, areaRng='large')\n            stats[5] = _summarize(0, maxDets=20)\n            stats[6] = _summarize(0, maxDets=20, iouThr=.5)\n            stats[7] = _summarize(0, maxDets=20, iouThr=.75)\n            stats[8] = _summarize(0, maxDets=20, areaRng='medium')\n            stats[9] = _summarize(0, maxDets=20, areaRng='large')\n            return stats\n        if not self.eval:\n            raise Exception('Please run accumulate() first')\n        iouType = self.params.iouType\n        if iouType == 'segm' or iouType == 'bbox':\n            summarize = _summarizeDets\n        elif iouType == 'keypoints':\n            summarize = _summarizeKps\n        self.stats = summarize()\n\n    def __str__(self):\n        self.summarize()\n\nclass Params:\n    '''\n    Params for isd evaluation api\n    '''\n    def setDetParams(self):\n        self.imgIds = []\n        self.catIds = []\n        # np.arange causes trouble.  the data point on arange is slightly larger than the true value\n        self.iouThrs = np.linspace(.5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True)\n        self.recThrs = np.linspace(.0, 1.00, int(np.round((1.00 - .0) / .01)) + 1, endpoint=True)\n        self.maxDets = [1, 20, 100]\n        self.areaRng = [[0 ** 2, 1e5 ** 2]]#, [0 ** 2, 32 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]]\n        self.areaRngLbl = ['all', 'small', 'medium', 'large']\n        self.useCats = 1\n\n    def setKpParams(self):\n        self.imgIds = []\n        self.catIds = []\n        # np.arange causes trouble.  the data point on arange is slightly larger than the true value\n        self.iouThrs = np.linspace(.5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True)\n        self.recThrs = np.linspace(.0, 1.00, int(np.round((1.00 - .0) / .01)) + 1, endpoint=True)\n        self.maxDets = [20]\n        self.areaRng = [[0 ** 2, 1e5 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]]\n        self.areaRngLbl = ['all', 'medium', 'large']\n        self.useCats = 1\n        self.kpt_oks_sigmas = np.array([.26, .25, .25, .35, .35, .79, .79, .72, .72, .62,.62, 1.07, 1.07, .87, .87, .89, .89])/10.0\n\n    def __init__(self, iouType='segm'):\n        if iouType == 'segm' or iouType == 'bbox':\n            self.setDetParams()\n        elif iouType == 'keypoints':\n            self.setKpParams()\n        else:\n            raise Exception('iouType not supported')\n        self.iouType = iouType\n        # useSegm is deprecated\n        self.useSegm = None\n"
  },
  {
    "path": "PythonAPI/setup.py",
    "content": "from setuptools import setup, Extension\nimport numpy as np\n\n# To compile and install locally run \"python setup.py build_ext --inplace\"\n# To install library to Python site-packages run \"python setup.py build_ext install\"\n\next_modules = [\n    Extension(\n        'pysobatools._mask',\n        sources=['./common/maskApi.c', 'pysobatools/_mask.pyx'],\n        include_dirs = [np.get_include(), './common'],\n        extra_compile_args=['-Wno-cpp', '-Wno-unused-function', '-std=c99'],\n    )\n]\n\nsetup(\n    name='pysobatools',\n    packages=['pysobatools'],\n    package_dir = {'pysobatools': 'pysobatools'},\n    install_requires=[\n        'setuptools>=18.0',\n        'cython>=0.27.3',\n        'matplotlib>=2.1.0'\n    ],\n    version='2.0',\n    ext_modules= ext_modules\n)\n"
  },
  {
    "path": "README.md",
    "content": "# Instance Shadow Detection (CVPR’ 20)\n\n[Tianyu Wang](https://stevewongv.github.io)\\*, [Xiaowei Hu](https://xw-hu.github.io)\\*, Qiong Wang,  Pheng-Ann Heng,  and [Chi-Wing Fu](http://www.cse.cuhk.edu.hk/~cwfu/)\n (\\* Joint first authors.)\n\n[[`openaccess`](http://openaccess.thecvf.com/content_CVPR_2020/papers/Wang_Instance_Shadow_Detection_CVPR_2020_paper.pdf)][[`arXiv`](https://arxiv.org/abs/1911.07034)] [[`BibTeX`](#CitingLISA)]\n\n**News: Our new work on instance shadow detection was accepted in CVPR 2021 as Oral presentation, check [here](https://github.com/stevewongv/SSIS)!**\n\n\n\n\n![-c](demo/demo.jpeg)\n\nInstance shadow detection aims to find shadow instances paired with object instances. We present a dataset, a deep framework, and an evaluation metric to approach this new task. This repo is implemented on [Detectron2](https://github.com/facebookresearch/detectron2).\n\n\n## Dependences\n\n* python>=3.6\n* torch (tested on 1.3.0+cu100 and 1.12.0+cu113)\n* torchvision (tested on 0.4.1+cu100 and 0.13.0+cu113)\n* tensorboard\n* cython\n* jupyter\n* scikit-image\n* numpy\n* opencv-python\n* pycocotools\n\n\n\n\n## Installation \n\nInstall LISA and pysobatools\n\n```bash\n$ cd InstanceShadowDetection\n$ python setup.py install\n$ cd PythonAPI\n$ python setup.py install\n```\n\n## Docker\n\n```bash\n$ cd InstanceShadowDetection/docker\n\n$ docker build --network=host --tag=\"instanceshadow\" -f ./Dockerfile .\n\n$ docker run --gpus all -it --ipc=host --name=instanceshadow --network=host -v /YOURPATH:/data instanceshadow:latest\n```\n\n**(Nvidia-docker)[https://github.com/NVIDIA/nvidia-docker] is needed.**\n\n## Model, dataset and our results\n\nPlease download from [Google Drive](https://drive.google.com/drive/folders/1MKxyq3R6AUeyLai9i9XWzG2C_n5f0ppP). Put the model and dataset follow the directory layout below.\n\n    .\n    ├── ...\n    ├── dataset\n    │   ├── SOBA                # put dataset here\n    ├── InstanceShadowDetection # this repo\n    │   ├── projects\n    │   │   ├── LISA\n    │   │   │   ├── output_light\n    │   │   │   │   ├── last_checkpoint.pth\n    │   │   │   │   └── ...\n    │   │   │   └── ...\n    │   └── ...\n    └── ...\n\n\n## Demo\n\n```bash\n$ cd projects/LISA/\n$ python demo.py --input ./demo/web-shadow0573.jpg --output ./ --config ./config/LISA_101_FPN_3x_demo.yaml\n```\n\n## Train\n\n```bash\n$ python train_net.py --num-gpus 2 --config-file ./config/LISA_101_FPN_3x.yaml\n\n```\n## Evaluation\n\n```bash\n$ python train_net.py --num-gpus 2 --config-file ./config/LISA_101_FPN_3x.yaml --eval-only --resume\n$ python SOAP.py\n```\n\n## Visualize\n```bash\npython visualize_json_results.py --ins_input ./output_light/inference/soba_instances_results.json --ass_input ./output_light/inference/soba_association_results.json --output ./output_light/results --dataset soba_cast_shadow_val_full\n```\n## <a name=\"CitingLISA\"></a> Citation\nIf you use LISA, SISS, SOBA, or SOAP, please use the following BibTeX entry.\n\n```\n@InProceedings{Wang_2020_CVPR,\nauthor = {Wang, Tianyu and Hu, Xiaowei and Wang, Qiong and Heng, Pheng-Ann and Fu, Chi-Wing},\ntitle = {Instance Shadow Detection},\nbooktitle = {IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},\nmonth = {June},\nyear = {2020}\n}\n\n@InProceedings{Wang_2021_CVPR,\nauthor    = {Wang, Tianyu and Hu, Xiaowei and Fu, Chi-Wing and Heng, Pheng-Ann},\ntitle     = {Single-Stage Instance Shadow Detection With Bidirectional Relation Learning},\nbooktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)},\nmonth     = {June},\nYear      = {2021},\npages     = {1-11}\n}\n```\n"
  },
  {
    "path": "configs/Base-RCNN-C4.yaml",
    "content": "MODEL:\n  META_ARCHITECTURE: \"GeneralizedRCNN\"\n  RPN:\n    PRE_NMS_TOPK_TEST: 6000\n    POST_NMS_TOPK_TEST: 1000\n  ROI_HEADS:\n    NAME: \"Res5ROIHeads\"\nDATASETS:\n  TRAIN: (\"coco_2017_train\",)\n  TEST: (\"coco_2017_val\",)\nSOLVER:\n  IMS_PER_BATCH: 16\n  BASE_LR: 0.02\n  STEPS: (60000, 80000)\n  MAX_ITER: 90000\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nVERSION: 2\n"
  },
  {
    "path": "configs/Base-RCNN-DilatedC5.yaml",
    "content": "MODEL:\n  META_ARCHITECTURE: \"GeneralizedRCNN\"\n  RESNETS:\n    OUT_FEATURES: [\"res5\"]\n    RES5_DILATION: 2\n  RPN:\n    IN_FEATURES: [\"res5\"]\n    PRE_NMS_TOPK_TEST: 6000\n    POST_NMS_TOPK_TEST: 1000\n  ROI_HEADS:\n    NAME: \"StandardROIHeads\"\n    IN_FEATURES: [\"res5\"]\n  ROI_BOX_HEAD:\n    NAME: \"FastRCNNConvFCHead\"\n    NUM_FC: 2\n    POOLER_RESOLUTION: 7\n  ROI_MASK_HEAD:\n    NAME: \"MaskRCNNConvUpsampleHead\"\n    NUM_CONV: 4\n    POOLER_RESOLUTION: 14\nDATASETS:\n  TRAIN: (\"coco_2017_train\",)\n  TEST: (\"coco_2017_val\",)\nSOLVER:\n  IMS_PER_BATCH: 16\n  BASE_LR: 0.02\n  STEPS: (60000, 80000)\n  MAX_ITER: 90000\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nVERSION: 2\n"
  },
  {
    "path": "configs/Base-RCNN-FPN.yaml",
    "content": "MODEL:\n  META_ARCHITECTURE: \"GeneralizedRCNN\"\n  BACKBONE:\n    NAME: \"build_resnet_fpn_backbone\"\n  RESNETS:\n    OUT_FEATURES: [\"res2\", \"res3\", \"res4\", \"res5\"]\n  FPN:\n    IN_FEATURES: [\"res2\", \"res3\", \"res4\", \"res5\"]\n  ANCHOR_GENERATOR:\n    SIZES: [[32], [64], [128], [256], [512]]  # One size for each in feature map\n    ASPECT_RATIOS: [[0.5, 1.0, 2.0]]  # Three aspect ratios (same for all in feature maps)\n  RPN:\n    IN_FEATURES: [\"p2\", \"p3\", \"p4\", \"p5\", \"p6\"]\n    PRE_NMS_TOPK_TRAIN: 2000  # Per FPN level\n    PRE_NMS_TOPK_TEST: 1000  # Per FPN level\n    # Detectron1 uses 2000 proposals per-batch,\n    # (See \"modeling/rpn/rpn_outputs.py\" for details of this legacy issue)\n    # which is approximately 1000 proposals per-image since the default batch size for FPN is 2.\n    POST_NMS_TOPK_TRAIN: 1000\n    POST_NMS_TOPK_TEST: 1000\n  ROI_HEADS:\n    NAME: \"StandardROIHeads\"\n    IN_FEATURES: [\"p2\", \"p3\", \"p4\", \"p5\"]\n  ROI_BOX_HEAD:\n    NAME: \"FastRCNNConvFCHead\"\n    NUM_FC: 2\n    POOLER_RESOLUTION: 7\n  ROI_MASK_HEAD:\n    NAME: \"MaskRCNNConvUpsampleHead\"\n    NUM_CONV: 4\n    POOLER_RESOLUTION: 14\nDATASETS:\n  TRAIN: (\"coco_2017_train\",)\n  TEST: (\"coco_2017_val\",)\nSOLVER:\n  IMS_PER_BATCH: 16\n  BASE_LR: 0.02\n  STEPS: (60000, 80000)\n  MAX_ITER: 90000\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nVERSION: 2\n"
  },
  {
    "path": "configs/Base-RetinaNet.yaml",
    "content": "MODEL:\n  META_ARCHITECTURE: \"RetinaNet\"\n  BACKBONE:\n    NAME: \"build_retinanet_resnet_fpn_backbone\"\n  RESNETS:\n    OUT_FEATURES: [\"res3\", \"res4\", \"res5\"]\n  ANCHOR_GENERATOR:\n    SIZES: !!python/object/apply:eval [\"[[x, x * 2**(1.0/3), x * 2**(2.0/3) ] for x in [32, 64, 128, 256, 512 ]]\"]\n  FPN:\n    IN_FEATURES: [\"res3\", \"res4\", \"res5\"]\n  RETINANET:\n    IOU_THRESHOLDS: [0.4, 0.5]\n    IOU_LABELS: [0, -1, 1]\nDATASETS:\n  TRAIN: (\"coco_2017_train\",)\n  TEST: (\"coco_2017_val\",)\nSOLVER:\n  IMS_PER_BATCH: 16\n  BASE_LR: 0.01  # Note that RetinaNet uses a different default learning rate\n  STEPS: (60000, 80000)\n  MAX_ITER: 90000\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nVERSION: 2\n"
  },
  {
    "path": "configs/COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  LOAD_PROPOSALS: True\n  RESNETS:\n    DEPTH: 50\n  PROPOSAL_GENERATOR:\n    NAME: \"PrecomputedProposals\"\nDATASETS:\n  TRAIN: (\"coco_2017_train\",)\n  PROPOSAL_FILES_TRAIN: (\"detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_train_box_proposals_21bc3a.pkl\", )\n  TEST: (\"coco_2017_val\",)\n  PROPOSAL_FILES_TEST: (\"detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl\", )\nDATALOADER:\n  # proposals are part of the dataset_dicts, and take a lot of RAM\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_101_C4_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_101_DC5_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-DilatedC5.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_101_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_50_C4_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_50_C4_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_50_DC5_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-DilatedC5.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_50_DC5_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-DilatedC5.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_R_50_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  MASK_ON: False\n  WEIGHTS: \"detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl\"\n  PIXEL_STD: [57.375, 57.120, 58.395]\n  RESNETS:\n    STRIDE_IN_1X1: False  # this is a C2 model\n    NUM_GROUPS: 32\n    WIDTH_PER_GROUP: 8\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/retinanet_R_101_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RetinaNet.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RetinaNet.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-Detection/retinanet_R_50_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RetinaNet.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Detection/rpn_R_50_C4_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"ProposalNetwork\"\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n  RPN:\n    PRE_NMS_TOPK_TEST: 12000\n    POST_NMS_TOPK_TEST: 2000\n"
  },
  {
    "path": "configs/COCO-Detection/rpn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"ProposalNetwork\"\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n  RPN:\n    POST_NMS_TOPK_TEST: 2000\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-DilatedC5.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-DilatedC5.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-DilatedC5.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  MASK_ON: True\n  WEIGHTS: \"detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl\"\n  PIXEL_STD: [57.375, 57.120, 58.395]\n  RESNETS:\n    STRIDE_IN_1X1: False  # this is a C2 model\n    NUM_GROUPS: 32\n    WIDTH_PER_GROUP: 8\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Keypoints/Base-Keypoint-RCNN-FPN.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  KEYPOINT_ON: True\n  ROI_HEADS:\n    NUM_CLASSES: 1\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 0.5  # Keypoint AP degrades (though box AP improves) when using plain L1 loss\n  RPN:\n    # Detectron1 uses 2000 proposals per-batch, but this option is per-image in detectron2.\n    # 1000 proposals per-image is found to hurt box AP.\n    # Therefore we increase it to 1500 per-image.\n    POST_NMS_TOPK_TRAIN: 1500\nDATASETS:\n  TRAIN: (\"keypoints_coco_2017_train\",)\n  TEST: (\"keypoints_coco_2017_val\",)\n"
  },
  {
    "path": "configs/COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x.yaml",
    "content": "_BASE_: \"Base-Keypoint-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"Base-Keypoint-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml",
    "content": "_BASE_: \"Base-Keypoint-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x.yaml",
    "content": "_BASE_: \"Base-Keypoint-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl\"\n  PIXEL_STD: [57.375, 57.120, 58.395]\n  RESNETS:\n    STRIDE_IN_1X1: False  # this is a C2 model\n    NUM_GROUPS: 32\n    WIDTH_PER_GROUP: 8\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"PanopticFPN\"\n  MASK_ON: True\n  SEM_SEG_HEAD:\n    LOSS_WEIGHT: 0.5\nDATASETS:\n  TRAIN: (\"coco_2017_train_panoptic_separated\",)\n  TEST: (\"coco_2017_val_panoptic_separated\",)\n"
  },
  {
    "path": "configs/COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml",
    "content": "_BASE_: \"Base-Panoptic-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  RESNETS:\n    DEPTH: 101\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_1x.yaml",
    "content": "_BASE_: \"Base-Panoptic-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\n"
  },
  {
    "path": "configs/COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml",
    "content": "_BASE_: \"Base-Panoptic-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/Cityscapes/mask_rcnn_R_50_FPN.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  # WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  # For better, more stable performance initialize from COCO\n  WEIGHTS: \"detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl\"\n  MASK_ON: True\n  ROI_HEADS:\n    NUM_CLASSES: 8\n# This is similar to the setting used in Mask R-CNN paper, Appendix A\n# But there are some differences, e.g., we did not initialize the output\n# layer using the corresponding classes from COCO\nINPUT:\n  MIN_SIZE_TRAIN: (800, 832, 864, 896, 928, 960, 992, 1024)\n  MIN_SIZE_TRAIN_SAMPLING: \"choice\"\n  MIN_SIZE_TEST: 1024\n  MAX_SIZE_TRAIN: 2048\n  MAX_SIZE_TEST: 2048\nDATASETS:\n  TRAIN: (\"cityscapes_fine_instance_seg_train\",)\n  TEST: (\"cityscapes_fine_instance_seg_val\",)\nSOLVER:\n  BASE_LR: 0.01\n  STEPS: (18000,)\n  MAX_ITER: 24000\n  IMS_PER_BATCH: 8\nTEST:\n  EVAL_PERIOD: 8000\n"
  },
  {
    "path": "configs/Detectron1-Comparisons/README.md",
    "content": "\nDetectron2's default settings and a few implementation details are different from Detectron.\n\nThe differences in implementation details are shared in\n[Compatibility with Other Libraries](../../docs/notes/compatibility.md).\n\nThe differences in default config includes:\n* Use scale augmentation during training.\n* Use L1 loss instead of smooth L1 loss.\n* Use `POOLER_SAMPLING_RATIO=0` instead of 2.\n* Use `ROIAlignV2`.\n\nIn this directory, we provide a few configs that mimic Detectron's behavior as close as possible.\nThis provides a fair comparison of accuracy and speed against Detectron.\n\n<!--\n./gen_html_table.py --config 'Detectron1-Comparisons/*.yaml' --name \"Faster R-CNN\" \"Keypoint R-CNN\" \"Mask R-CNN\" --fields lr_sched train_speed inference_speed mem box_AP mask_AP keypoint_AP\n-->\n\n\n<table><tbody>\n<!-- START TABLE -->\n<!-- TABLE HEADER -->\n<th valign=\"bottom\">Name</th>\n<th valign=\"bottom\">lr<br/>sched</th>\n<th valign=\"bottom\">train<br/>time<br/>(s/iter)</th>\n<th valign=\"bottom\">inference<br/>time<br/>(s/im)</th>\n<th valign=\"bottom\">train<br/>mem<br/>(GB)</th>\n<th valign=\"bottom\">box<br/>AP</th>\n<th valign=\"bottom\">mask<br/>AP</th>\n<th valign=\"bottom\">kp.<br/>AP</th>\n<th valign=\"bottom\">model id</th>\n<th valign=\"bottom\">download</th>\n<!-- TABLE BODY -->\n<!-- ROW: faster_rcnn_R_50_FPN_noaug_1x -->\n <tr><td align=\"left\"><a href=\"configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml\">Faster R-CNN</a></td>\n<td align=\"center\">1x</td>\n<td align=\"center\">0.219</td>\n<td align=\"center\">0.048</td>\n<td align=\"center\">3.1</td>\n<td align=\"center\">36.9</td>\n<td align=\"center\"></td>\n<td align=\"center\"></td>\n<td align=\"center\">137781054</td>\n<td align=\"center\"><a href=\"https://dl.fbaipublicfiles.com/detectron2/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x/137781054/model_final_7ab50c.pkl\">model</a>&nbsp;|&nbsp;<a href=\"https://dl.fbaipublicfiles.com/detectron2/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x/137781054/metrics.json\">metrics</a></td>\n</tr>\n<!-- ROW: keypoint_rcnn_R_50_FPN_1x -->\n <tr><td align=\"left\"><a href=\"configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml\">Keypoint R-CNN</a></td>\n<td align=\"center\">1x</td>\n<td align=\"center\">0.313</td>\n<td align=\"center\">0.082</td>\n<td align=\"center\">5.0</td>\n<td align=\"center\">53.1</td>\n<td align=\"center\"></td>\n<td align=\"center\">64.2</td>\n<td align=\"center\">137781195</td>\n<td align=\"center\"><a href=\"https://dl.fbaipublicfiles.com/detectron2/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x/137781195/model_final_cce136.pkl\">model</a>&nbsp;|&nbsp;<a href=\"https://dl.fbaipublicfiles.com/detectron2/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x/137781195/metrics.json\">metrics</a></td>\n</tr>\n<!-- ROW: mask_rcnn_R_50_FPN_noaug_1x -->\n <tr><td align=\"left\"><a href=\"configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml\">Mask R-CNN</a></td>\n<td align=\"center\">1x</td>\n<td align=\"center\">0.273</td>\n<td align=\"center\">0.052</td>\n<td align=\"center\">3.4</td>\n<td align=\"center\">37.8</td>\n<td align=\"center\">34.9</td>\n<td align=\"center\"></td>\n<td align=\"center\">137781281</td>\n<td align=\"center\"><a href=\"https://dl.fbaipublicfiles.com/detectron2/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x/137781281/model_final_62ca52.pkl\">model</a>&nbsp;|&nbsp;<a href=\"https://dl.fbaipublicfiles.com/detectron2/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x/137781281/metrics.json\">metrics</a></td>\n</tr>\n</tbody></table>\n\n## Comparisons:\n\n* Faster R-CNN: Detectron's AP is 36.7, similar to ours.\n* Keypoint R-CNN: Detectron's AP is box 53.6, keypoint 64.2. Fixing a Detectron's\n  [bug](https://github.com/facebookresearch/Detectron/issues/459) lead to a drop in box AP, and can be\n\tcompensated back by some parameter tuning.\n* Mask R-CNN: Detectron's AP is box 37.7, mask 33.9. We're 1 AP better in mask AP, due to more correct implementation.\n\nFor speed comparison, see [benchmarks](https://detectron2.readthedocs.io/notes/benchmarks.html).\n"
  },
  {
    "path": "configs/Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n  # Detectron1 uses smooth L1 loss with some magic beta values.\n  # The defaults are changed to L1 loss in Detectron2.\n  RPN:\n    SMOOTH_L1_BETA: 0.1111\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 1.0\n    POOLER_SAMPLING_RATIO: 2\n    POOLER_TYPE: \"ROIAlign\"\nINPUT:\n  # no scale augmentation\n  MIN_SIZE_TRAIN: (800, )\n"
  },
  {
    "path": "configs/Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  KEYPOINT_ON: True\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    NUM_CLASSES: 1\n  ROI_KEYPOINT_HEAD:\n    POOLER_RESOLUTION: 14\n    POOLER_SAMPLING_RATIO: 2\n    POOLER_TYPE: \"ROIAlign\"\n  # Detectron1 uses smooth L1 loss with some magic beta values.\n  # The defaults are changed to L1 loss in Detectron2.\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 1.0\n    POOLER_SAMPLING_RATIO: 2\n    POOLER_TYPE: \"ROIAlign\"\n  RPN:\n    SMOOTH_L1_BETA: 0.1111\n    # Detectron1 uses 2000 proposals per-batch, but this option is per-image in detectron2\n    # 1000 proposals per-image is found to hurt box AP.\n    # Therefore we increase it to 1500 per-image.\n    POST_NMS_TOPK_TRAIN: 1500\nDATASETS:\n  TRAIN: (\"keypoints_coco_2017_train\",)\n  TEST: (\"keypoints_coco_2017_val\",)\n"
  },
  {
    "path": "configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n  # Detectron1 uses smooth L1 loss with some magic beta values.\n  # The defaults are changed to L1 loss in Detectron2.\n  RPN:\n    SMOOTH_L1_BETA: 0.1111\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 1.0\n    POOLER_SAMPLING_RATIO: 2\n    POOLER_TYPE: \"ROIAlign\"\n  ROI_MASK_HEAD:\n    POOLER_SAMPLING_RATIO: 2\n    POOLER_TYPE: \"ROIAlign\"\nINPUT:\n  # no scale augmentation\n  MIN_SIZE_TRAIN: (800, )\n"
  },
  {
    "path": "configs/LVIS-InstanceSegmentation/mask_rcnn_R_101_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-101.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 101\n  ROI_HEADS:\n    NUM_CLASSES: 1230\n    SCORE_THRESH_TEST: 0.0001\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nDATASETS:\n  TRAIN: (\"lvis_v0.5_train\",)\n  TEST: (\"lvis_v0.5_val\",)\nTEST:\n  DETECTIONS_PER_IMAGE: 300  # LVIS allows up to 300\nDATALOADER:\n  SAMPLER_TRAIN: \"RepeatFactorTrainingSampler\"\n  REPEAT_THRESHOLD: 0.001\n"
  },
  {
    "path": "configs/LVIS-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    NUM_CLASSES: 1230\n    SCORE_THRESH_TEST: 0.0001\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nDATASETS:\n  TRAIN: (\"lvis_v0.5_train\",)\n  TEST: (\"lvis_v0.5_val\",)\nTEST:\n  DETECTIONS_PER_IMAGE: 300  # LVIS allows up to 300\nDATALOADER:\n  SAMPLER_TRAIN: \"RepeatFactorTrainingSampler\"\n  REPEAT_THRESHOLD: 0.001\n"
  },
  {
    "path": "configs/LVIS-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/FAIR/X-101-32x8d.pkl\"\n  PIXEL_STD: [57.375, 57.120, 58.395]\n  MASK_ON: True\n  RESNETS:\n    STRIDE_IN_1X1: False  # this is a C2 model\n    NUM_GROUPS: 32\n    WIDTH_PER_GROUP: 8\n    DEPTH: 101\n  ROI_HEADS:\n    NUM_CLASSES: 1230\n    SCORE_THRESH_TEST: 0.0001\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nDATASETS:\n  TRAIN: (\"lvis_v0.5_train\",)\n  TEST: (\"lvis_v0.5_val\",)\nTEST:\n  DETECTIONS_PER_IMAGE: 300  # LVIS allows up to 300\nDATALOADER:\n  SAMPLER_TRAIN: \"RepeatFactorTrainingSampler\"\n  REPEAT_THRESHOLD: 0.001\n"
  },
  {
    "path": "configs/Misc/cascade_mask_rcnn_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    NAME: CascadeROIHeads\n  ROI_BOX_HEAD:\n    CLS_AGNOSTIC_BBOX_REG: True\n  RPN:\n    POST_NMS_TOPK_TRAIN: 2000\n"
  },
  {
    "path": "configs/Misc/cascade_mask_rcnn_R_50_FPN_3x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    NAME: CascadeROIHeads\n  ROI_BOX_HEAD:\n    CLS_AGNOSTIC_BBOX_REG: True\n  RPN:\n    POST_NMS_TOPK_TRAIN: 2000\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  MASK_ON: True\n  WEIGHTS: \"catalog://ImageNetPretrained/FAIR/X-152-32x8d-IN5k\"\n  RESNETS:\n    STRIDE_IN_1X1: False  # this is a C2 model\n    NUM_GROUPS: 32\n    WIDTH_PER_GROUP: 8\n    DEPTH: 152\n    DEFORM_ON_PER_STAGE: [False, True, True, True]\n  ROI_HEADS:\n    NAME: \"CascadeROIHeads\"\n  ROI_BOX_HEAD:\n    NAME: \"FastRCNNConvFCHead\"\n    NUM_CONV: 4\n    NUM_FC: 1\n    NORM: \"GN\"\n    CLS_AGNOSTIC_BBOX_REG: True\n  ROI_MASK_HEAD:\n    NUM_CONV: 8\n    NORM: \"GN\"\n  RPN:\n    POST_NMS_TOPK_TRAIN: 2000\nSOLVER:\n  IMS_PER_BATCH: 128\n  STEPS: (35000, 45000)\n  MAX_ITER: 50000\n  BASE_LR: 0.16\nINPUT:\n  MIN_SIZE_TRAIN: (640, 864)\n  MIN_SIZE_TRAIN_SAMPLING: \"range\"\n  MAX_SIZE_TRAIN: 1440\n  CROP:\n    ENABLED: True\nTEST:\n  EVAL_PERIOD: 2500\n"
  },
  {
    "path": "configs/Misc/mask_rcnn_R_50_FPN_1x_cls_agnostic.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n  ROI_BOX_HEAD:\n    CLS_AGNOSTIC_BBOX_REG: True\n  ROI_MASK_HEAD:\n    CLS_AGNOSTIC_MASK: True\n"
  },
  {
    "path": "configs/Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n    DEFORM_ON_PER_STAGE: [False, True, True, True] # on Res3,Res4,Res5\n    DEFORM_MODULATED: False\n"
  },
  {
    "path": "configs/Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n    DEFORM_ON_PER_STAGE: [False, True, True, True] # on Res3,Res4,Res5\n    DEFORM_MODULATED: False\nSOLVER:\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/Misc/mask_rcnn_R_50_FPN_3x_gn.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"catalog://ImageNetPretrained/FAIR/R-50-GN\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n    NORM: \"GN\"\n    STRIDE_IN_1X1: False\n  FPN:\n    NORM: \"GN\"\n  ROI_BOX_HEAD:\n    NAME: \"FastRCNNConvFCHead\"\n    NUM_CONV: 4\n    NUM_FC: 1\n    NORM: \"GN\"\n  ROI_MASK_HEAD:\n    NORM: \"GN\"\nSOLVER:\n  # 3x schedule\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\n"
  },
  {
    "path": "configs/Misc/mask_rcnn_R_50_FPN_3x_syncbn.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n    NORM: \"SyncBN\"\n    STRIDE_IN_1X1: False\n  FPN:\n    NORM: \"SyncBN\"\n  ROI_BOX_HEAD:\n    NAME: \"FastRCNNConvFCHead\"\n    NUM_CONV: 4\n    NUM_FC: 1\n    NORM: \"SyncBN\"\n  ROI_MASK_HEAD:\n    NORM: \"SyncBN\"\nSOLVER:\n  # 3x schedule\n  STEPS: (210000, 250000)\n  MAX_ITER: 270000\nTEST:\n  PRECISE_BN:\n    ENABLED: True\n"
  },
  {
    "path": "configs/Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x.yaml",
    "content": "# A large PanopticFPN for demo purposes.\n# Use GN on backbone to support semantic seg.\n# Use Cascade + Deform Conv to improve localization.\n_BASE_: \"../COCO-PanopticSegmentation/Base-Panoptic-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"catalog://ImageNetPretrained/FAIR/R-101-GN\"\n  RESNETS:\n    DEPTH: 101\n    NORM: \"GN\"\n    DEFORM_ON_PER_STAGE: [False, True, True, True]\n    STRIDE_IN_1X1: False\n  FPN:\n    NORM: \"GN\"\n  ROI_HEADS:\n    NAME: CascadeROIHeads\n  ROI_BOX_HEAD:\n    CLS_AGNOSTIC_BBOX_REG: True\n  ROI_MASK_HEAD:\n    NORM: \"GN\"\n  RPN:\n    POST_NMS_TOPK_TRAIN: 2000\nSOLVER:\n  STEPS: (105000, 125000)\n  MAX_ITER: 135000\n  IMS_PER_BATCH: 32\n  BASE_LR: 0.04\n"
  },
  {
    "path": "configs/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml",
    "content": "_BASE_: \"mask_rcnn_R_50_FPN_3x_gn.yaml\"\n# INPUT:\n  # It makes sense to divide by STD when training from scratch\n  # But it seems to make no difference on the results and C2's models didn't do this.\n  # So we keep things consistent with C2.\n  # PIXEL_STD: [57.375, 57.12, 58.395]\nMODEL:\n  # Train from random initialization.\n  WEIGHTS: \"\"\n  MASK_ON: True\n  BACKBONE:\n    FREEZE_AT: 0\n# NOTE: Please refer to Rethinking ImageNet Pre-training https://arxiv.org/abs/1811.08883\n# to learn what you need for training from scratch.\n"
  },
  {
    "path": "configs/Misc/semantic_R_50_FPN_1x.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"SemanticSegmentor\"\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\nDATASETS:\n  TRAIN: (\"coco_2017_train_panoptic_stuffonly\",)\n  TEST: (\"coco_2017_val_panoptic_stuffonly\",)\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\n"
  },
  {
    "path": "configs/PascalVOC-Detection/faster_rcnn_R_50_C4.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    NUM_CLASSES: 20\nINPUT:\n  MIN_SIZE_TRAIN: (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800)\n  MIN_SIZE_TEST: 800\nDATASETS:\n  TRAIN: ('voc_2007_trainval', 'voc_2012_trainval')\n  TEST: ('voc_2007_test',)\nSOLVER:\n  STEPS: (12000, 16000)\n  MAX_ITER: 18000  # 17.4 epochs\n  WARMUP_ITERS: 100\n"
  },
  {
    "path": "configs/PascalVOC-Detection/faster_rcnn_R_50_FPN.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: False\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    NUM_CLASSES: 20\nINPUT:\n  MIN_SIZE_TRAIN: (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800)\n  MIN_SIZE_TEST: 800\nDATASETS:\n  TRAIN: ('voc_2007_trainval', 'voc_2012_trainval')\n  TEST: ('voc_2007_test',)\nSOLVER:\n  STEPS: (12000, 16000)\n  MAX_ITER: 18000  # 17.4 epochs\n  WARMUP_ITERS: 100\n"
  },
  {
    "path": "configs/quick_schedules/README.md",
    "content": "These are quick configs for performance or accuracy regression tracking purposes.\n"
  },
  {
    "path": "configs/quick_schedules/fast_rcnn_R_50_FPN_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-Detection/fast_rcnn_R_50_FPN_1x/137635226/model_final_e5f7ce.pkl\"\nDATASETS:\n  TEST: (\"coco_2017_val_100\",)\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 45.70, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/fast_rcnn_R_50_FPN_instant_test.yaml",
    "content": "_BASE_: \"../COCO-Detection/fast_rcnn_R_50_FPN_1x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\nDATASETS:\n  TRAIN: (\"coco_2017_val_100\",)\n  PROPOSAL_FILES_TRAIN: (\"detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl\", )\n  TEST: (\"coco_2017_val_100\",)\n  PROPOSAL_FILES_TEST: (\"detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/coco_2017_val_box_proposals_ee0dad.pkl\", )\nSOLVER:\n  BASE_LR: 0.005\n  STEPS: (30,)\n  MAX_ITER: 40\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/quick_schedules/keypoint_rcnn_R_50_FPN_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x/137849621/model_final_a6e10b.pkl\"\nDATASETS:\n  TEST: (\"keypoints_coco_2017_val_100\",)\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 52.47, 0.02], [\"keypoints\", \"AP\", 67.36, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/keypoint_rcnn_R_50_FPN_instant_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  KEYPOINT_ON: True\nDATASETS:\n  TRAIN: (\"keypoints_coco_2017_val_100\",)\n  TEST: (\"keypoints_coco_2017_val_100\",)\nSOLVER:\n  BASE_LR: 0.005\n  STEPS: (30,)\n  MAX_ITER: 40\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/quick_schedules/keypoint_rcnn_R_50_FPN_normalized_training_acc_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  KEYPOINT_ON: True\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    BATCH_SIZE_PER_IMAGE: 256\n    NUM_CLASSES: 1\n  ROI_KEYPOINT_HEAD:\n    POOLER_RESOLUTION: 14\n    POOLER_SAMPLING_RATIO: 2\n    NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS: False\n    LOSS_WEIGHT: 4.0\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 1.0  # Keypoint AP degrades when using plain L1 loss\n  RPN:\n    SMOOTH_L1_BETA: 0.2  # Keypoint AP degrades when using plain L1 loss\nDATASETS:\n  TRAIN: (\"keypoints_coco_2017_val\",)\n  TEST: (\"keypoints_coco_2017_val\",)\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nSOLVER:\n  WARMUP_FACTOR: 0.33333333\n  WARMUP_ITERS: 100\n  STEPS: (5500, 5800)\n  MAX_ITER: 6000\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 55.35, 1.0], [\"keypoints\", \"AP\", 76.91, 1.0]]\n"
  },
  {
    "path": "configs/quick_schedules/keypoint_rcnn_R_50_FPN_training_acc_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  KEYPOINT_ON: True\n  RESNETS:\n    DEPTH: 50\n  ROI_HEADS:\n    BATCH_SIZE_PER_IMAGE: 256\n    NUM_CLASSES: 1\n  ROI_KEYPOINT_HEAD:\n    POOLER_RESOLUTION: 14\n    POOLER_SAMPLING_RATIO: 2\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 1.0  # Keypoint AP degrades when using plain L1 loss\n  RPN:\n    SMOOTH_L1_BETA: 0.2  # Keypoint AP degrades when using plain L1 loss\nDATASETS:\n  TRAIN: (\"keypoints_coco_2017_val\",)\n  TEST: (\"keypoints_coco_2017_val\",)\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nSOLVER:\n  WARMUP_FACTOR: 0.33333333\n  WARMUP_ITERS: 100\n  STEPS: (5500, 5800)\n  MAX_ITER: 6000\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 53.5, 1.0], [\"keypoints\", \"AP\", 72.4, 1.0]]\n"
  },
  {
    "path": "configs/quick_schedules/mask_rcnn_R_50_C4_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x/137849525/model_final_4ce675.pkl\"\nDATASETS:\n  TEST: (\"coco_2017_val_100\",)\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 47.37, 0.02], [\"segm\", \"AP\", 40.99, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/mask_rcnn_R_50_C4_instant_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\nDATASETS:\n  TRAIN: (\"coco_2017_val_100\",)\n  TEST: (\"coco_2017_val_100\",)\nSOLVER:\n  BASE_LR: 0.001\n  STEPS: (30,)\n  MAX_ITER: 40\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/quick_schedules/mask_rcnn_R_50_C4_training_acc_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-C4.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  ROI_HEADS:\n    BATCH_SIZE_PER_IMAGE: 256\n  MASK_ON: True\nDATASETS:\n  TRAIN: (\"coco_2017_val\",)\n  TEST: (\"coco_2017_val\",)\nINPUT:\n  MIN_SIZE_TRAIN: (600,)\n  MAX_SIZE_TRAIN: 1000\n  MIN_SIZE_TEST: 800\n  MAX_SIZE_TEST: 1000\nSOLVER:\n  IMS_PER_BATCH: 8  # base uses 16\n  WARMUP_FACTOR: 0.33333\n  WARMUP_ITERS: 100\n  STEPS: (11000, 11600)\n  MAX_ITER: 12000\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 41.88, 0.7], [\"segm\", \"AP\", 33.79, 0.5]]\n"
  },
  {
    "path": "configs/quick_schedules/mask_rcnn_R_50_DC5_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x/137849551/model_final_84107b.pkl\"\nDATASETS:\n  TEST: (\"coco_2017_val_100\",)\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 47.44, 0.02], [\"segm\", \"AP\", 42.94, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl\"\nDATASETS:\n  TEST: (\"coco_2017_val_100\",)\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 47.34, 0.02], [\"segm\", \"AP\",  42.67, 0.02]]\n  # expected results do not use test-time augmentation. TTA results are not verified.\n  AUG:\n    ENABLED: True\n    MIN_SIZES: (400, 500)  # to save some time\n"
  },
  {
    "path": "configs/quick_schedules/mask_rcnn_R_50_FPN_instant_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\nDATASETS:\n  TRAIN: (\"coco_2017_val_100\",)\n  TEST: (\"coco_2017_val_100\",)\nSOLVER:\n  BASE_LR: 0.005\n  STEPS: (30,)\n  MAX_ITER: 40\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/quick_schedules/mask_rcnn_R_50_FPN_training_acc_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  ROI_HEADS:\n    BATCH_SIZE_PER_IMAGE: 256\n  MASK_ON: True\nDATASETS:\n  TRAIN: (\"coco_2017_val\",)\n  TEST: (\"coco_2017_val\",)\nINPUT:\n  MIN_SIZE_TRAIN: (600,)\n  MAX_SIZE_TRAIN: 1000\n  MIN_SIZE_TEST: 800\n  MAX_SIZE_TEST: 1000\nSOLVER:\n  WARMUP_FACTOR: 0.3333333\n  WARMUP_ITERS: 100\n  STEPS: (5500, 5800)\n  MAX_ITER: 6000\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 42.0, 1.6], [\"segm\", \"AP\", 35.4, 1.25]]\n"
  },
  {
    "path": "configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-PanopticSegmentation/panoptic_fpn_R_50_3x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-PanopticSegmentation/panoptic_fpn_R_50_3x/139514569/model_final_c10459.pkl\"\nDATASETS:\n  TEST: (\"coco_2017_val_100_panoptic_separated\",)\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 46.47, 0.02], [\"segm\", \"AP\", 43.39, 0.02], [\"sem_seg\", \"mIoU\", 42.55, 0.02], [\"panoptic_seg\", \"PQ\", 38.99, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/panoptic_fpn_R_50_instant_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"PanopticFPN\"\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n  SEM_SEG_HEAD:\n    LOSS_WEIGHT: 0.5\nDATASETS:\n  TRAIN: (\"coco_2017_val_100_panoptic_separated\",)\n  TEST: (\"coco_2017_val_100_panoptic_separated\",)\nSOLVER:\n  BASE_LR: 0.005\n  STEPS: (30,)\n  MAX_ITER: 40\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 1\n"
  },
  {
    "path": "configs/quick_schedules/panoptic_fpn_R_50_training_acc_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"PanopticFPN\"\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 50\n  SEM_SEG_HEAD:\n    LOSS_WEIGHT: 0.5\nDATASETS:\n  TRAIN: (\"coco_2017_val_panoptic_separated\",)\n  TEST: (\"coco_2017_val_panoptic_separated\",)\nSOLVER:\n  BASE_LR: 0.01\n  WARMUP_FACTOR: 0.001\n  WARMUP_ITERS: 500\n  STEPS: (5500,)\n  MAX_ITER: 7000\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 46.80, 1.1], [\"segm\", \"AP\", 38.93, 0.7], [\"sem_seg\", \"mIoU\", 64.53, 1.0], [\"panoptic_seg\", \"PQ\", 48.23, 0.8]]\n"
  },
  {
    "path": "configs/quick_schedules/retinanet_R_50_FPN_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-Detection/retinanet_R_50_FPN_3x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-Detection/retinanet_R_50_FPN_3x/137849486/model_final_4cafe0.pkl\"\nDATASETS:\n  TEST: (\"coco_2017_val_100\",)\nTEST:\n  EXPECTED_RESULTS: [[\"bbox\", \"AP\", 44.36, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/retinanet_R_50_FPN_instant_test.yaml",
    "content": "_BASE_: \"../COCO-Detection/retinanet_R_50_FPN_1x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\nDATASETS:\n  TRAIN: (\"coco_2017_val_100\",)\n  TEST: (\"coco_2017_val_100\",)\nSOLVER:\n  BASE_LR: 0.005\n  STEPS: (30,)\n  MAX_ITER: 40\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/quick_schedules/rpn_R_50_FPN_inference_acc_test.yaml",
    "content": "_BASE_: \"../COCO-Detection/rpn_R_50_FPN_1x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://COCO-Detection/rpn_R_50_FPN_1x/137258492/model_final_02ce48.pkl\"\nDATASETS:\n  TEST: (\"coco_2017_val_100\",)\nTEST:\n  EXPECTED_RESULTS: [[\"box_proposals\", \"AR@1000\", 58.16, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/rpn_R_50_FPN_instant_test.yaml",
    "content": "_BASE_: \"../COCO-Detection/rpn_R_50_FPN_1x.yaml\"\nMODEL:\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\nDATASETS:\n  TRAIN: (\"coco_2017_val_100\",)\n  TEST: (\"coco_2017_val_100\",)\nSOLVER:\n  STEPS: (30,)\n  MAX_ITER: 40\n  BASE_LR: 0.005\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/quick_schedules/semantic_R_50_FPN_inference_acc_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"SemanticSegmentor\"\n  WEIGHTS: \"detectron2://semantic_R_50_FPN_1x/111802073/model_final_c18079783c55a94968edc28b7101c5f0.pkl\"\n  RESNETS:\n    DEPTH: 50\nDATASETS:\n  TEST: (\"coco_2017_val_100_panoptic_stuffonly\",)\nTEST:\n  EXPECTED_RESULTS: [[\"sem_seg\", \"mIoU\", 39.53, 0.02], [\"sem_seg\", \"mACC\", 51.50, 0.02]]\n"
  },
  {
    "path": "configs/quick_schedules/semantic_R_50_FPN_instant_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"SemanticSegmentor\"\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\nDATASETS:\n  TRAIN: (\"coco_2017_val_100_panoptic_stuffonly\",)\n  TEST: (\"coco_2017_val_100_panoptic_stuffonly\",)\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\nSOLVER:\n  BASE_LR: 0.005\n  STEPS: (30,)\n  MAX_ITER: 40\n  IMS_PER_BATCH: 4\nDATALOADER:\n  NUM_WORKERS: 2\n"
  },
  {
    "path": "configs/quick_schedules/semantic_R_50_FPN_training_acc_test.yaml",
    "content": "_BASE_: \"../Base-RCNN-FPN.yaml\"\nMODEL:\n  META_ARCHITECTURE: \"SemanticSegmentor\"\n  WEIGHTS: \"detectron2://ImageNetPretrained/MSRA/R-50.pkl\"\n  RESNETS:\n    DEPTH: 50\nDATASETS:\n  TRAIN: (\"coco_2017_val_panoptic_stuffonly\",)\n  TEST: (\"coco_2017_val_panoptic_stuffonly\",)\nSOLVER:\n  BASE_LR: 0.01\n  WARMUP_FACTOR: 0.001\n  WARMUP_ITERS: 300\n  STEPS: (5500,)\n  MAX_ITER: 7000\nTEST:\n  EXPECTED_RESULTS: [[\"sem_seg\", \"mIoU\", 76.51, 1.0], [\"sem_seg\", \"mACC\", 83.25, 1.0]]\nINPUT:\n  # no scale augmentation\n  MIN_SIZE_TRAIN: (800, )\n"
  },
  {
    "path": "datasets/README.md",
    "content": "\nFor a few datasets that detectron2 natively supports,\nthe datasets are assumed to exist in a directory called\n\"datasets/\", under the directory where you launch the program.\nThey need to have the following directory structure:\n\n## Expected dataset structure for COCO instance/keypoint detection:\n\n```\ncoco/\n  annotations/\n    instances_{train,val}2017.json\n    person_keypoints_{train,val}2017.json\n  {train,val}2017/\n    # image files that are mentioned in the corresponding json\n```\n\nYou can use the 2014 version of the dataset as well.\n\nSome of the builtin tests (`dev/run_*_tests.sh`) uses a tiny version of the COCO dataset,\nwhich you can download with `./prepare_for_tests.sh`.\n\n## Expected dataset structure for PanopticFPN:\n\n```\ncoco/\n  annotations/\n    panoptic_{train,val}2017.json\n  panoptic_{train,val}2017/\n    # png annotations\n  panoptic_stuff_{train,val}2017/  # generated by the script mentioned below\n```\n\nInstall panopticapi by:\n```\npip install git+https://github.com/cocodataset/panopticapi.git\n```\nThen, run `python prepare_panoptic_fpn.py`, to extract semantic annotations from panoptic annotations.\n\n## Expected dataset structure for LVIS instance segmentation:\n```\ncoco/\n  {train,val,test}2017/\nlvis/\n  lvis_v0.5_{train,val}.json\n  lvis_v0.5_image_info_test.json\n```\n\nInstall lvis-api by:\n```\npip install git+https://github.com/lvis-dataset/lvis-api.git\n```\n\n## Expected dataset structure for cityscapes:\n```\ncityscapes/\n  gtFine/\n    train/\n      aachen/\n        color.png, instanceIds.png, labelIds.png, polygons.json,\n        labelTrainIds.png\n      ...\n    val/\n    test/\n  leftImg8bit/\n    train/\n    val/\n    test/\n```\nInstall cityscapes scripts by:\n```\npip install git+https://github.com/mcordts/cityscapesScripts.git\n```\n\nNote:\nlabelTrainIds.png are created by `cityscapesscripts/preparation/createTrainIdLabelImgs.py`.\nThey are not needed for instance segmentation.\n\n## Expected dataset structure for Pascal VOC:\n```\nVOC20{07,12}/\n  Annotations/\n  ImageSets/\n    Main/\n      trainval.txt\n      test.txt\n      # train.txt or val.txt, if you use these splits\n  JPEGImages/\n```\n"
  },
  {
    "path": "datasets/prepare_for_tests.sh",
    "content": "#!/bin/bash -e\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n# Download some files needed for running tests.\n\ncd \"${0%/*}\"\n\nBASE=https://dl.fbaipublicfiles.com/detectron2\nmkdir -p coco/annotations\n\nfor anno in instances_val2017_100 \\\n  person_keypoints_val2017_100 \\\n  instances_minival2014_100 \\\n  person_keypoints_minival2014_100; do\n\n  dest=coco/annotations/$anno.json\n  [[ -s $dest ]] && {\n    echo \"$dest exists. Skipping ...\"\n  } || {\n    wget $BASE/annotations/coco/$anno.json -O $dest\n  }\ndone\n"
  },
  {
    "path": "datasets/prepare_panoptic_fpn.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport time\nimport functools\nimport json\nimport multiprocessing as mp\nimport numpy as np\nimport os\nfrom PIL import Image\n\nfrom detectron2.data.datasets.builtin_meta import COCO_CATEGORIES\nfrom fvcore.common.download import download\n\nfrom panopticapi.utils import rgb2id\n\n\ndef _process_panoptic_to_semantic(input_panoptic, output_semantic, segments, id_map):\n    panoptic = np.asarray(Image.open(input_panoptic), dtype=np.uint32)\n    panoptic = rgb2id(panoptic)\n    output = np.zeros_like(panoptic, dtype=np.uint8) + 255\n    for seg in segments:\n        cat_id = seg[\"category_id\"]\n        new_cat_id = id_map[cat_id]\n        output[panoptic == seg[\"id\"]] = new_cat_id\n    Image.fromarray(output).save(output_semantic)\n\n\ndef separate_coco_semantic_from_panoptic(panoptic_json, panoptic_root, sem_seg_root, categories):\n    \"\"\"\n    Create semantic segmentation annotations from panoptic segmentation\n    annotations, to be used by PanopticFPN.\n\n    It maps all thing categories to class 0, and maps all unlabeled pixels to class 255.\n    It maps all stuff categories to contiguous ids starting from 1.\n\n    Args:\n        panoptic_json (str): path to the panoptic json file, in COCO's format.\n        panoptic_root (str): a directory with panoptic annotation files, in COCO's format.\n        sem_seg_root (str): a directory to output semantic annotation files\n        categories (list[dict]): category metadata. Each dict needs to have:\n            \"id\": corresponds to the \"category_id\" in the json annotations\n            \"isthing\": 0 or 1\n    \"\"\"\n    os.makedirs(sem_seg_root, exist_ok=True)\n\n    stuff_ids = [k[\"id\"] for k in categories if k[\"isthing\"] == 0]\n    thing_ids = [k[\"id\"] for k in categories if k[\"isthing\"] == 1]\n    id_map = {}  # map from category id to id in the output semantic annotation\n    assert len(stuff_ids) <= 254\n    for i, stuff_id in enumerate(stuff_ids):\n        id_map[stuff_id] = i + 1\n    for thing_id in thing_ids:\n        id_map[thing_id] = 0\n    id_map[0] = 255\n\n    with open(panoptic_json) as f:\n        obj = json.load(f)\n\n    pool = mp.Pool(processes=max(mp.cpu_count() // 2, 4))\n\n    def iter_annotations():\n        for anno in obj[\"annotations\"]:\n            file_name = anno[\"file_name\"]\n            segments = anno[\"segments_info\"]\n            input = os.path.join(panoptic_root, file_name)\n            output = os.path.join(sem_seg_root, file_name)\n            yield input, output, segments\n\n    print(\"Start writing to {} ...\".format(sem_seg_root))\n    start = time.time()\n    pool.starmap(\n        functools.partial(_process_panoptic_to_semantic, id_map=id_map),\n        iter_annotations(),\n        chunksize=100,\n    )\n    print(\"Finished. time: {:.2f}s\".format(time.time() - start))\n\n\nif __name__ == \"__main__\":\n    dataset_dir = os.path.join(os.path.dirname(__file__), \"coco\")\n    for s in [\"val2017\", \"train2017\"]:\n        separate_coco_semantic_from_panoptic(\n            os.path.join(dataset_dir, \"annotations/panoptic_{}.json\".format(s)),\n            os.path.join(dataset_dir, \"panoptic_{}\".format(s)),\n            os.path.join(dataset_dir, \"panoptic_stuff_{}\".format(s)),\n            COCO_CATEGORIES,\n        )\n\n    # Prepare val2017_100 for quick testing:\n\n    dest_dir = os.path.join(dataset_dir, \"annotations/\")\n    URL_PREFIX = \"https://dl.fbaipublicfiles.com/detectron2/\"\n    download(URL_PREFIX + \"annotations/coco/panoptic_val2017_100.json\", dest_dir)\n    with open(os.path.join(dest_dir, \"panoptic_val2017_100.json\")) as f:\n        obj = json.load(f)\n\n    def link_val100(dir_full, dir_100):\n        print(\"Creating \" + dir_100 + \" ...\")\n        os.makedirs(dir_100, exist_ok=True)\n        for img in obj[\"images\"]:\n            basename = os.path.splitext(img[\"file_name\"])[0]\n            src = os.path.join(dir_full, basename + \".png\")\n            dst = os.path.join(dir_100, basename + \".png\")\n            src = os.path.relpath(src, start=dir_100)\n            os.symlink(src, dst)\n\n    link_val100(\n        os.path.join(dataset_dir, \"panoptic_val2017\"),\n        os.path.join(dataset_dir, \"panoptic_val2017_100\"),\n    )\n\n    link_val100(\n        os.path.join(dataset_dir, \"panoptic_stuff_val2017\"),\n        os.path.join(dataset_dir, \"panoptic_stuff_val2017_100\"),\n    )\n"
  },
  {
    "path": "demo/README.md",
    "content": "\n## Detectron2 Demo\n\nWe provide a command line tool to run a simple demo of builtin models.\nThe usage is explained in [GETTING_STARTED.md](../GETTING_STARTED.md).\n\nSee our [blog post](https://ai.facebook.com/blog/-detectron2-a-pytorch-based-modular-object-detection-library-)\nfor a high-quality demo generated with this tool.\n"
  },
  {
    "path": "demo/demo.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport argparse\nimport glob\nimport multiprocessing as mp\nimport os\nimport time\nimport cv2\nimport tqdm\n\nfrom detectron2.config import get_cfg\nfrom detectron2.data.detection_utils import read_image\nfrom detectron2.utils.logger import setup_logger\n\nfrom predictor import VisualizationDemo\n\n# constants\nWINDOW_NAME = \"COCO detections\"\n\n\ndef setup_cfg(args):\n    # load config from file and command-line arguments\n    cfg = get_cfg()\n    cfg.merge_from_file(args.config_file)\n    cfg.merge_from_list(args.opts)\n    # Set score_threshold for builtin models\n    cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold\n    cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold\n    cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold\n    cfg.freeze()\n    return cfg\n\n\ndef get_parser():\n    parser = argparse.ArgumentParser(description=\"Detectron2 demo for builtin models\")\n    parser.add_argument(\n        \"--config-file\",\n        default=\"configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml\",\n        metavar=\"FILE\",\n        help=\"path to config file\",\n    )\n    parser.add_argument(\"--webcam\", action=\"store_true\", help=\"Take inputs from webcam.\")\n    parser.add_argument(\"--video-input\", help=\"Path to video file.\")\n    parser.add_argument(\"--input\", nargs=\"+\", help=\"A list of space separated input images\")\n    parser.add_argument(\n        \"--output\",\n        help=\"A file or directory to save output visualizations. \"\n        \"If not given, will show output in an OpenCV window.\",\n    )\n\n    parser.add_argument(\n        \"--confidence-threshold\",\n        type=float,\n        default=0.5,\n        help=\"Minimum score for instance predictions to be shown\",\n    )\n    parser.add_argument(\n        \"--opts\",\n        help=\"Modify config options using the command-line 'KEY VALUE' pairs\",\n        default=[],\n        nargs=argparse.REMAINDER,\n    )\n    return parser\n\n\nif __name__ == \"__main__\":\n    mp.set_start_method(\"spawn\", force=True)\n    args = get_parser().parse_args()\n    setup_logger(name=\"fvcore\")\n    logger = setup_logger()\n    logger.info(\"Arguments: \" + str(args))\n\n    cfg = setup_cfg(args)\n\n    demo = VisualizationDemo(cfg)\n\n    if args.input:\n        if len(args.input) == 1:\n            args.input = glob.glob(os.path.expanduser(args.input[0]))\n            assert args.input, \"The input path(s) was not found\"\n        for path in tqdm.tqdm(args.input, disable=not args.output):\n            # use PIL, to be consistent with evaluation\n            img = read_image(path, format=\"BGR\")\n            start_time = time.time()\n            predictions, visualized_output = demo.run_on_image(img)\n            logger.info(\n                \"{}: detected {} instances in {:.2f}s\".format(\n                    path, len(predictions[\"instances\"]), time.time() - start_time\n                )\n            )\n\n            if args.output:\n                if os.path.isdir(args.output):\n                    assert os.path.isdir(args.output), args.output\n                    out_filename = os.path.join(args.output, os.path.basename(path))\n                else:\n                    assert len(args.input) == 1, \"Please specify a directory with args.output\"\n                    out_filename = args.output\n                visualized_output.save(out_filename)\n            else:\n                cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)\n                cv2.imshow(WINDOW_NAME, visualized_output.get_image()[:, :, ::-1])\n                if cv2.waitKey(0) == 27:\n                    break  # esc to quit\n    elif args.webcam:\n        assert args.input is None, \"Cannot have both --input and --webcam!\"\n        cam = cv2.VideoCapture(0)\n        for vis in tqdm.tqdm(demo.run_on_video(cam)):\n            cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)\n            cv2.imshow(WINDOW_NAME, vis)\n            if cv2.waitKey(1) == 27:\n                break  # esc to quit\n        cv2.destroyAllWindows()\n    elif args.video_input:\n        video = cv2.VideoCapture(args.video_input)\n        width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH))\n        height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT))\n        frames_per_second = video.get(cv2.CAP_PROP_FPS)\n        num_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))\n        basename = os.path.basename(args.video_input)\n\n        if args.output:\n            if os.path.isdir(args.output):\n                output_fname = os.path.join(args.output, basename)\n                output_fname = os.path.splitext(output_fname)[0] + \".mkv\"\n            else:\n                output_fname = args.output\n            assert not os.path.isfile(output_fname), output_fname\n            output_file = cv2.VideoWriter(\n                filename=output_fname,\n                # some installation of opencv may not support x264 (due to its license),\n                # you can try other format (e.g. MPEG)\n                fourcc=cv2.VideoWriter_fourcc(*\"x264\"),\n                fps=float(frames_per_second),\n                frameSize=(width, height),\n                isColor=True,\n            )\n        assert os.path.isfile(args.video_input)\n        for vis_frame in tqdm.tqdm(demo.run_on_video(video), total=num_frames):\n            if args.output:\n                output_file.write(vis_frame)\n            else:\n                cv2.namedWindow(basename, cv2.WINDOW_NORMAL)\n                cv2.imshow(basename, vis_frame)\n                if cv2.waitKey(1) == 27:\n                    break  # esc to quit\n        video.release()\n        if args.output:\n            output_file.release()\n        else:\n            cv2.destroyAllWindows()\n"
  },
  {
    "path": "demo/predictor.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport atexit\nimport bisect\nimport multiprocessing as mp\nfrom collections import deque\nimport cv2\nimport torch\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.engine.defaults import DefaultPredictor\nfrom detectron2.utils.video_visualizer import VideoVisualizer\nfrom detectron2.utils.visualizer import ColorMode, Visualizer\n\n\nclass VisualizationDemo(object):\n    def __init__(self, cfg, instance_mode=ColorMode.IMAGE, parallel=False):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            instance_mode (ColorMode):\n            parallel (bool): whether to run the model in different processes from visualization.\n                Useful since the visualization logic can be slow.\n        \"\"\"\n        self.metadata = MetadataCatalog.get(\n            cfg.DATASETS.TEST[0] if len(cfg.DATASETS.TEST) else \"__unused\"\n        )\n        self.cpu_device = torch.device(\"cpu\")\n        self.instance_mode = instance_mode\n\n        self.parallel = parallel\n        if parallel:\n            num_gpu = torch.cuda.device_count()\n            self.predictor = AsyncPredictor(cfg, num_gpus=num_gpu)\n        else:\n            self.predictor = DefaultPredictor(cfg)\n\n    def run_on_image(self, image):\n        \"\"\"\n        Args:\n            image (np.ndarray): an image of shape (H, W, C) (in BGR order).\n                This is the format used by OpenCV.\n\n        Returns:\n            predictions (dict): the output of the model.\n            vis_output (VisImage): the visualized image output.\n        \"\"\"\n        vis_output = None\n        predictions = self.predictor(image)\n        # Convert image from OpenCV BGR format to Matplotlib RGB format.\n        image = image[:, :, ::-1]\n        visualizer = Visualizer(image, self.metadata, instance_mode=self.instance_mode)\n        if \"panoptic_seg\" in predictions:\n            panoptic_seg, segments_info = predictions[\"panoptic_seg\"]\n            vis_output = visualizer.draw_panoptic_seg_predictions(\n                panoptic_seg.to(self.cpu_device), segments_info\n            )\n        else:\n            if \"sem_seg\" in predictions:\n                vis_output = visualizer.draw_sem_seg(\n                    predictions[\"sem_seg\"].argmax(dim=0).to(self.cpu_device)\n                )\n            if \"instances\" in predictions:\n                instances = predictions[\"instances\"].to(self.cpu_device)\n                vis_output = visualizer.draw_instance_predictions(predictions=instances)\n\n        return predictions, vis_output\n\n    def _frame_from_video(self, video):\n        while video.isOpened():\n            success, frame = video.read()\n            if success:\n                yield frame\n            else:\n                break\n\n    def run_on_video(self, video):\n        \"\"\"\n        Visualizes predictions on frames of the input video.\n\n        Args:\n            video (cv2.VideoCapture): a :class:`VideoCapture` object, whose source can be\n                either a webcam or a video file.\n\n        Yields:\n            ndarray: BGR visualizations of each video frame.\n        \"\"\"\n        video_visualizer = VideoVisualizer(self.metadata, self.instance_mode)\n\n        def process_predictions(frame, predictions):\n            frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)\n            if \"panoptic_seg\" in predictions:\n                panoptic_seg, segments_info = predictions[\"panoptic_seg\"]\n                vis_frame = video_visualizer.draw_panoptic_seg_predictions(\n                    frame, panoptic_seg.to(self.cpu_device), segments_info\n                )\n            elif \"instances\" in predictions:\n                predictions = predictions[\"instances\"].to(self.cpu_device)\n                vis_frame = video_visualizer.draw_instance_predictions(frame, predictions)\n            elif \"sem_seg\" in predictions:\n                vis_frame = video_visualizer.draw_sem_seg(\n                    frame, predictions[\"sem_seg\"].argmax(dim=0).to(self.cpu_device)\n                )\n\n            # Converts Matplotlib RGB format to OpenCV BGR format\n            vis_frame = cv2.cvtColor(vis_frame.get_image(), cv2.COLOR_RGB2BGR)\n            return vis_frame\n\n        frame_gen = self._frame_from_video(video)\n        if self.parallel:\n            buffer_size = self.predictor.default_buffer_size\n\n            frame_data = deque()\n\n            for cnt, frame in enumerate(frame_gen):\n                frame_data.append(frame)\n                self.predictor.put(frame)\n\n                if cnt >= buffer_size:\n                    frame = frame_data.popleft()\n                    predictions = self.predictor.get()\n                    yield process_predictions(frame, predictions)\n\n            while len(frame_data):\n                frame = frame_data.popleft()\n                predictions = self.predictor.get()\n                yield process_predictions(frame, predictions)\n        else:\n            for frame in frame_gen:\n                yield process_predictions(frame, self.predictor(frame))\n\n\nclass AsyncPredictor:\n    \"\"\"\n    A predictor that runs the model asynchronously, possibly on >1 GPUs.\n    Because rendering the visualization takes considerably amount of time,\n    this helps improve throughput when rendering videos.\n    \"\"\"\n\n    class _StopToken:\n        pass\n\n    class _PredictWorker(mp.Process):\n        def __init__(self, cfg, task_queue, result_queue):\n            self.cfg = cfg\n            self.task_queue = task_queue\n            self.result_queue = result_queue\n            super().__init__()\n\n        def run(self):\n            predictor = DefaultPredictor(self.cfg)\n\n            while True:\n                task = self.task_queue.get()\n                if isinstance(task, AsyncPredictor._StopToken):\n                    break\n                idx, data = task\n                result = predictor(data)\n                self.result_queue.put((idx, result))\n\n    def __init__(self, cfg, num_gpus: int = 1):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            num_gpus (int): if 0, will run on CPU\n        \"\"\"\n        num_workers = max(num_gpus, 1)\n        self.task_queue = mp.Queue(maxsize=num_workers * 3)\n        self.result_queue = mp.Queue(maxsize=num_workers * 3)\n        self.procs = []\n        for gpuid in range(max(num_gpus, 1)):\n            cfg = cfg.clone()\n            cfg.defrost()\n            cfg.MODEL.DEVICE = \"cuda:{}\".format(gpuid) if num_gpus > 0 else \"cpu\"\n            self.procs.append(\n                AsyncPredictor._PredictWorker(cfg, self.task_queue, self.result_queue)\n            )\n\n        self.put_idx = 0\n        self.get_idx = 0\n        self.result_rank = []\n        self.result_data = []\n\n        for p in self.procs:\n            p.start()\n        atexit.register(self.shutdown)\n\n    def put(self, image):\n        self.put_idx += 1\n        self.task_queue.put((self.put_idx, image))\n\n    def get(self):\n        self.get_idx += 1  # the index needed for this request\n        if len(self.result_rank) and self.result_rank[0] == self.get_idx:\n            res = self.result_data[0]\n            del self.result_data[0], self.result_rank[0]\n            return res\n\n        while True:\n            # make sure the results are returned in the correct order\n            idx, res = self.result_queue.get()\n            if idx == self.get_idx:\n                return res\n            insert = bisect.bisect(self.result_rank, idx)\n            self.result_rank.insert(insert, idx)\n            self.result_data.insert(insert, res)\n\n    def __len__(self):\n        return self.put_idx - self.get_idx\n\n    def __call__(self, image):\n        self.put(image)\n        return self.get()\n\n    def shutdown(self):\n        for _ in self.procs:\n            self.task_queue.put(AsyncPredictor._StopToken())\n\n    @property\n    def default_buffer_size(self):\n        return len(self.procs) * 5\n"
  },
  {
    "path": "detectron2/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n\nfrom .utils.env import setup_environment\n\nsetup_environment()\n\n\n__version__ = \"0.1\"\n"
  },
  {
    "path": "detectron2/checkpoint/__init__.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# File:\n\n\nfrom . import model_zoo as _UNUSED  # register the handler\nfrom .detection_checkpoint import DetectionCheckpointer\nfrom fvcore.common.checkpoint import Checkpointer, PeriodicCheckpointer\n\n__all__ = [\"Checkpointer\", \"PeriodicCheckpointer\", \"DetectionCheckpointer\"]\n"
  },
  {
    "path": "detectron2/checkpoint/c2_model_loading.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport logging\nimport re\nimport torch\nfrom fvcore.common.checkpoint import (\n    get_missing_parameters_message,\n    get_unexpected_parameters_message,\n)\n\n\ndef convert_basic_c2_names(original_keys):\n    \"\"\"\n    Apply some basic name conversion to names in C2 weights.\n    It only deals with typical backbone models.\n\n    Args:\n        original_keys (list[str]):\n    Returns:\n        list[str]: The same number of strings matching those in original_keys.\n    \"\"\"\n    layer_keys = copy.deepcopy(original_keys)\n    layer_keys = [\n        {\"pred_b\": \"linear_b\", \"pred_w\": \"linear_w\"}.get(k, k) for k in layer_keys\n    ]  # some hard-coded mappings\n\n    layer_keys = [k.replace(\"_\", \".\") for k in layer_keys]\n    layer_keys = [re.sub(\"\\\\.b$\", \".bias\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"\\\\.w$\", \".weight\", k) for k in layer_keys]\n    # Uniform both bn and gn names to \"norm\"\n    layer_keys = [re.sub(\"bn\\\\.s$\", \"norm.weight\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"bn\\\\.bias$\", \"norm.bias\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"bn\\\\.rm\", \"norm.running_mean\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"bn\\\\.running.mean$\", \"norm.running_mean\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"bn\\\\.riv$\", \"norm.running_var\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"bn\\\\.running.var$\", \"norm.running_var\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"bn\\\\.gamma$\", \"norm.weight\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"bn\\\\.beta$\", \"norm.bias\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"gn\\\\.s$\", \"norm.weight\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"gn\\\\.bias$\", \"norm.bias\", k) for k in layer_keys]\n\n    # stem\n    layer_keys = [re.sub(\"^res\\\\.conv1\\\\.norm\\\\.\", \"conv1.norm.\", k) for k in layer_keys]\n    # to avoid mis-matching with \"conv1\" in other components (e.g. detection head)\n    layer_keys = [re.sub(\"^conv1\\\\.\", \"stem.conv1.\", k) for k in layer_keys]\n\n    # layer1-4 is used by torchvision, however we follow the C2 naming strategy (res2-5)\n    # layer_keys = [re.sub(\"^res2.\", \"layer1.\", k) for k in layer_keys]\n    # layer_keys = [re.sub(\"^res3.\", \"layer2.\", k) for k in layer_keys]\n    # layer_keys = [re.sub(\"^res4.\", \"layer3.\", k) for k in layer_keys]\n    # layer_keys = [re.sub(\"^res5.\", \"layer4.\", k) for k in layer_keys]\n\n    # blocks\n    layer_keys = [k.replace(\".branch1.\", \".shortcut.\") for k in layer_keys]\n    layer_keys = [k.replace(\".branch2a.\", \".conv1.\") for k in layer_keys]\n    layer_keys = [k.replace(\".branch2b.\", \".conv2.\") for k in layer_keys]\n    layer_keys = [k.replace(\".branch2c.\", \".conv3.\") for k in layer_keys]\n\n    # DensePose substitutions\n    layer_keys = [re.sub(\"^body.conv.fcn\", \"body_conv_fcn\", k) for k in layer_keys]\n    layer_keys = [k.replace(\"AnnIndex.lowres\", \"ann_index_lowres\") for k in layer_keys]\n    layer_keys = [k.replace(\"Index.UV.lowres\", \"index_uv_lowres\") for k in layer_keys]\n    layer_keys = [k.replace(\"U.lowres\", \"u_lowres\") for k in layer_keys]\n    layer_keys = [k.replace(\"V.lowres\", \"v_lowres\") for k in layer_keys]\n    return layer_keys\n\n\ndef convert_c2_detectron_names(weights):\n    \"\"\"\n    Map Caffe2 Detectron weight names to Detectron2 names.\n\n    Args:\n        weights (dict): name -> tensor\n\n    Returns:\n        dict: detectron2 names -> tensor\n        dict: detectron2 names -> C2 names\n    \"\"\"\n    logger = logging.getLogger(__name__)\n    logger.info(\"Remapping C2 weights ......\")\n    original_keys = sorted(weights.keys())\n    layer_keys = copy.deepcopy(original_keys)\n\n    layer_keys = convert_basic_c2_names(layer_keys)\n\n    # --------------------------------------------------------------------------\n    # RPN hidden representation conv\n    # --------------------------------------------------------------------------\n    # FPN case\n    # In the C2 model, the RPN hidden layer conv is defined for FPN level 2 and then\n    # shared for all other levels, hence the appearance of \"fpn2\"\n    layer_keys = [\n        k.replace(\"conv.rpn.fpn2\", \"proposal_generator.rpn_head.conv\") for k in layer_keys\n    ]\n    # Non-FPN case\n    layer_keys = [k.replace(\"conv.rpn\", \"proposal_generator.rpn_head.conv\") for k in layer_keys]\n\n    # --------------------------------------------------------------------------\n    # RPN box transformation conv\n    # --------------------------------------------------------------------------\n    # FPN case (see note above about \"fpn2\")\n    layer_keys = [\n        k.replace(\"rpn.bbox.pred.fpn2\", \"proposal_generator.rpn_head.anchor_deltas\")\n        for k in layer_keys\n    ]\n    layer_keys = [\n        k.replace(\"rpn.cls.logits.fpn2\", \"proposal_generator.rpn_head.objectness_logits\")\n        for k in layer_keys\n    ]\n    # Non-FPN case\n    layer_keys = [\n        k.replace(\"rpn.bbox.pred\", \"proposal_generator.rpn_head.anchor_deltas\") for k in layer_keys\n    ]\n    layer_keys = [\n        k.replace(\"rpn.cls.logits\", \"proposal_generator.rpn_head.objectness_logits\")\n        for k in layer_keys\n    ]\n\n    # --------------------------------------------------------------------------\n    # Fast R-CNN box head\n    # --------------------------------------------------------------------------\n    layer_keys = [re.sub(\"^bbox\\\\.pred\", \"bbox_pred\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"^cls\\\\.score\", \"cls_score\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"^fc6\\\\.\", \"box_head.fc1.\", k) for k in layer_keys]\n    layer_keys = [re.sub(\"^fc7\\\\.\", \"box_head.fc2.\", k) for k in layer_keys]\n    # 4conv1fc head tensor names: head_conv1_w, head_conv1_gn_s\n    layer_keys = [re.sub(\"^head\\\\.conv\", \"box_head.conv\", k) for k in layer_keys]\n\n    # --------------------------------------------------------------------------\n    # FPN lateral and output convolutions\n    # --------------------------------------------------------------------------\n    def fpn_map(name):\n        \"\"\"\n        Look for keys with the following patterns:\n        1) Starts with \"fpn.inner.\"\n           Example: \"fpn.inner.res2.2.sum.lateral.weight\"\n           Meaning: These are lateral pathway convolutions\n        2) Starts with \"fpn.res\"\n           Example: \"fpn.res2.2.sum.weight\"\n           Meaning: These are FPN output convolutions\n        \"\"\"\n        splits = name.split(\".\")\n        norm = \".norm\" if \"norm\" in splits else \"\"\n        if name.startswith(\"fpn.inner.\"):\n            # splits example: ['fpn', 'inner', 'res2', '2', 'sum', 'lateral', 'weight']\n            stage = int(splits[2][len(\"res\") :])\n            return \"fpn_lateral{}{}.{}\".format(stage, norm, splits[-1])\n        elif name.startswith(\"fpn.res\"):\n            # splits example: ['fpn', 'res2', '2', 'sum', 'weight']\n            stage = int(splits[1][len(\"res\") :])\n            return \"fpn_output{}{}.{}\".format(stage, norm, splits[-1])\n        return name\n\n    layer_keys = [fpn_map(k) for k in layer_keys]\n\n    # --------------------------------------------------------------------------\n    # Mask R-CNN mask head\n    # --------------------------------------------------------------------------\n    # roi_heads.StandardROIHeads case\n    layer_keys = [k.replace(\".[mask].fcn\", \"mask_head.mask_fcn\") for k in layer_keys]\n    layer_keys = [re.sub(\"^\\\\.mask\\\\.fcn\", \"mask_head.mask_fcn\", k) for k in layer_keys]\n    layer_keys = [k.replace(\"mask.fcn.logits\", \"mask_head.predictor\") for k in layer_keys]\n    # roi_heads.Res5ROIHeads case\n    layer_keys = [k.replace(\"conv5.mask\", \"mask_head.deconv\") for k in layer_keys]\n\n    # --------------------------------------------------------------------------\n    # Keypoint R-CNN head\n    # --------------------------------------------------------------------------\n    # interestingly, the keypoint head convs have blob names that are simply \"conv_fcnX\"\n    layer_keys = [k.replace(\"conv.fcn\", \"roi_heads.keypoint_head.conv_fcn\") for k in layer_keys]\n    layer_keys = [\n        k.replace(\"kps.score.lowres\", \"roi_heads.keypoint_head.score_lowres\") for k in layer_keys\n    ]\n    layer_keys = [k.replace(\"kps.score.\", \"roi_heads.keypoint_head.score.\") for k in layer_keys]\n\n    # --------------------------------------------------------------------------\n    # Done with replacements\n    # --------------------------------------------------------------------------\n    assert len(set(layer_keys)) == len(layer_keys)\n    assert len(original_keys) == len(layer_keys)\n\n    new_weights = {}\n    new_keys_to_original_keys = {}\n    for orig, renamed in zip(original_keys, layer_keys):\n        new_keys_to_original_keys[renamed] = orig\n        if renamed.startswith(\"bbox_pred.\") or renamed.startswith(\"mask_head.predictor.\"):\n            # remove the meaningless prediction weight for background class\n            new_start_idx = 4 if renamed.startswith(\"bbox_pred.\") else 1\n            new_weights[renamed] = weights[orig][new_start_idx:]\n            logger.info(\n                \"Remove prediction weight for background class in {}. The shape changes from \"\n                \"{} to {}.\".format(\n                    renamed, tuple(weights[orig].shape), tuple(new_weights[renamed].shape)\n                )\n            )\n        elif renamed.startswith(\"cls_score.\"):\n            # move weights of bg class from original index 0 to last index\n            logger.info(\n                \"Move classification weights for background class in {} from index 0 to \"\n                \"index {}.\".format(renamed, weights[orig].shape[0] - 1)\n            )\n            new_weights[renamed] = torch.cat([weights[orig][1:], weights[orig][:1]])\n        else:\n            new_weights[renamed] = weights[orig]\n\n    return new_weights, new_keys_to_original_keys\n\n\n# Note the current matching is not symmetric.\n# it assumes model_state_dict will have longer names.\ndef align_and_update_state_dicts(model_state_dict, ckpt_state_dict, c2_conversion=True):\n    \"\"\"\n    Match names between the two state-dict, and update the values of model_state_dict in-place with\n    copies of the matched tensor in ckpt_state_dict.\n    If `c2_conversion==True`, `ckpt_state_dict` is assumed to be a Caffe2\n    model and will be renamed at first.\n\n    Strategy: suppose that the models that we will create will have prefixes appended\n    to each of its keys, for example due to an extra level of nesting that the original\n    pre-trained weights from ImageNet won't contain. For example, model.state_dict()\n    might return backbone[0].body.res2.conv1.weight, while the pre-trained model contains\n    res2.conv1.weight. We thus want to match both parameters together.\n    For that, we look for each model weight, look among all loaded keys if there is one\n    that is a suffix of the current weight name, and use it if that's the case.\n    If multiple matches exist, take the one with longest size\n    of the corresponding name. For example, for the same model as before, the pretrained\n    weight file can contain both res2.conv1.weight, as well as conv1.weight. In this case,\n    we want to match backbone[0].body.conv1.weight to conv1.weight, and\n    backbone[0].body.res2.conv1.weight to res2.conv1.weight.\n    \"\"\"\n    model_keys = sorted(list(model_state_dict.keys()))\n    if c2_conversion:\n        ckpt_state_dict, original_keys = convert_c2_detectron_names(ckpt_state_dict)\n        # original_keys: the name in the original dict (before renaming)\n    else:\n        original_keys = {x: x for x in ckpt_state_dict.keys()}\n    ckpt_keys = sorted(list(ckpt_state_dict.keys()))\n\n    def match(a, b):\n        # Matched ckpt_key should be a complete (starts with '.') suffix.\n        # For example, roi_heads.mesh_head.whatever_conv1 does not match conv1,\n        # but matches whatever_conv1 or mesh_head.whatever_conv1.\n        return a == b or a.endswith(\".\" + b)\n\n    # get a matrix of string matches, where each (i, j) entry correspond to the size of the\n    # ckpt_key string, if it matches\n    match_matrix = [len(j) if match(i, j) else 0 for i in model_keys for j in ckpt_keys]\n    match_matrix = torch.as_tensor(match_matrix).view(len(model_keys), len(ckpt_keys))\n    # use the matched one with longest size in case of multiple matches\n    max_match_size, idxs = match_matrix.max(1)\n    # remove indices that correspond to no-match\n    idxs[max_match_size == 0] = -1\n\n    # used for logging\n    max_len_model = max(len(key) for key in model_keys) if model_keys else 1\n    max_len_ckpt = max(len(key) for key in ckpt_keys) if ckpt_keys else 1\n    log_str_template = \"{: <{}} loaded from {: <{}} of shape {}\"\n    logger = logging.getLogger(__name__)\n    # matched_pairs (matched checkpoint key --> matched model key)\n    matched_keys = {}\n    for idx_model, idx_ckpt in enumerate(idxs.tolist()):\n        if idx_ckpt == -1:\n            continue\n        key_model = model_keys[idx_model]\n        key_ckpt = ckpt_keys[idx_ckpt]\n        value_ckpt = ckpt_state_dict[key_ckpt]\n        shape_in_model = model_state_dict[key_model].shape\n\n        if shape_in_model != value_ckpt.shape:\n            logger.warning(\n                \"Shape of {} in checkpoint is {}, while shape of {} in model is {}.\".format(\n                    key_ckpt, value_ckpt.shape, key_model, shape_in_model\n                )\n            )\n            logger.warning(\n                \"{} will not be loaded. Please double check and see if this is desired.\".format(\n                    key_ckpt\n                )\n            )\n            continue\n\n        model_state_dict[key_model] = value_ckpt.clone()\n        if key_ckpt in matched_keys:  # already added to matched_keys\n            logger.error(\n                \"Ambiguity found for {} in checkpoint!\"\n                \"It matches at least two keys in the model ({} and {}).\".format(\n                    key_ckpt, key_model, matched_keys[key_ckpt]\n                )\n            )\n            raise ValueError(\"Cannot match one checkpoint key to multiple keys in the model.\")\n\n        matched_keys[key_ckpt] = key_model\n        logger.info(\n            log_str_template.format(\n                key_model,\n                max_len_model,\n                original_keys[key_ckpt],\n                max_len_ckpt,\n                tuple(shape_in_model),\n            )\n        )\n    matched_model_keys = matched_keys.values()\n    matched_ckpt_keys = matched_keys.keys()\n    # print warnings about unmatched keys on both side\n    unmatched_model_keys = [k for k in model_keys if k not in matched_model_keys]\n    if len(unmatched_model_keys):\n        logger.info(get_missing_parameters_message(unmatched_model_keys))\n\n    unmatched_ckpt_keys = [k for k in ckpt_keys if k not in matched_ckpt_keys]\n    if len(unmatched_ckpt_keys):\n        logger.info(\n            get_unexpected_parameters_message(original_keys[x] for x in unmatched_ckpt_keys)\n        )\n"
  },
  {
    "path": "detectron2/checkpoint/catalog.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nfrom fvcore.common.file_io import PathHandler, PathManager\n\n\nclass ModelCatalog(object):\n    \"\"\"\n    Store mappings from names to third-party models.\n    \"\"\"\n\n    S3_C2_DETECTRON_PREFIX = \"https://dl.fbaipublicfiles.com/detectron\"\n\n    # MSRA models have STRIDE_IN_1X1=True. False otherwise.\n    # NOTE: all BN models here have fused BN into an affine layer.\n    # As a result, you should only load them to a model with \"FrozenBN\".\n    # Loading them to a model with regular BN or SyncBN is wrong.\n    # Even when loaded to FrozenBN, it is still different from affine by an epsilon,\n    # which should be negligible for training.\n    # NOTE: all models here uses PIXEL_STD=[1,1,1]\n    C2_IMAGENET_MODELS = {\n        \"MSRA/R-50\": \"ImageNetPretrained/MSRA/R-50.pkl\",\n        \"MSRA/R-101\": \"ImageNetPretrained/MSRA/R-101.pkl\",\n        \"FAIR/R-50-GN\": \"ImageNetPretrained/47261647/R-50-GN.pkl\",\n        \"FAIR/R-101-GN\": \"ImageNetPretrained/47592356/R-101-GN.pkl\",\n        \"FAIR/X-101-32x8d\": \"ImageNetPretrained/20171220/X-101-32x8d.pkl\",\n        \"FAIR/X-101-64x4d\": \"ImageNetPretrained/FBResNeXt/X-101-64x4d.pkl\",\n        \"FAIR/X-152-32x8d-IN5k\": \"ImageNetPretrained/25093814/X-152-32x8d-IN5k.pkl\",\n    }\n\n    C2_DETECTRON_PATH_FORMAT = (\n        \"{prefix}/{url}/output/train/{dataset}/{type}/model_final.pkl\"\n    )  # noqa B950\n\n    C2_DATASET_COCO = \"coco_2014_train%3Acoco_2014_valminusminival\"\n    C2_DATASET_COCO_KEYPOINTS = \"keypoints_coco_2014_train%3Akeypoints_coco_2014_valminusminival\"\n\n    # format: {model_name} -> part of the url\n    C2_DETECTRON_MODELS = {\n        \"35857197/e2e_faster_rcnn_R-50-C4_1x\": \"35857197/12_2017_baselines/e2e_faster_rcnn_R-50-C4_1x.yaml.01_33_49.iAX0mXvW\",  # noqa B950\n        \"35857345/e2e_faster_rcnn_R-50-FPN_1x\": \"35857345/12_2017_baselines/e2e_faster_rcnn_R-50-FPN_1x.yaml.01_36_30.cUF7QR7I\",  # noqa B950\n        \"35857890/e2e_faster_rcnn_R-101-FPN_1x\": \"35857890/12_2017_baselines/e2e_faster_rcnn_R-101-FPN_1x.yaml.01_38_50.sNxI7sX7\",  # noqa B950\n        \"36761737/e2e_faster_rcnn_X-101-32x8d-FPN_1x\": \"36761737/12_2017_baselines/e2e_faster_rcnn_X-101-32x8d-FPN_1x.yaml.06_31_39.5MIHi1fZ\",  # noqa B950\n        \"35858791/e2e_mask_rcnn_R-50-C4_1x\": \"35858791/12_2017_baselines/e2e_mask_rcnn_R-50-C4_1x.yaml.01_45_57.ZgkA7hPB\",  # noqa B950\n        \"35858933/e2e_mask_rcnn_R-50-FPN_1x\": \"35858933/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml.01_48_14.DzEQe4wC\",  # noqa B950\n        \"35861795/e2e_mask_rcnn_R-101-FPN_1x\": \"35861795/12_2017_baselines/e2e_mask_rcnn_R-101-FPN_1x.yaml.02_31_37.KqyEK4tT\",  # noqa B950\n        \"36761843/e2e_mask_rcnn_X-101-32x8d-FPN_1x\": \"36761843/12_2017_baselines/e2e_mask_rcnn_X-101-32x8d-FPN_1x.yaml.06_35_59.RZotkLKI\",  # noqa B950\n        \"48616381/e2e_mask_rcnn_R-50-FPN_2x_gn\": \"GN/48616381/04_2018_gn_baselines/e2e_mask_rcnn_R-50-FPN_2x_gn_0416.13_23_38.bTlTI97Q\",  # noqa B950\n        \"37697547/e2e_keypoint_rcnn_R-50-FPN_1x\": \"37697547/12_2017_baselines/e2e_keypoint_rcnn_R-50-FPN_1x.yaml.08_42_54.kdzV35ao\",  # noqa B950\n        \"35998355/rpn_R-50-C4_1x\": \"35998355/12_2017_baselines/rpn_R-50-C4_1x.yaml.08_00_43.njH5oD9L\",  # noqa B950\n        \"35998814/rpn_R-50-FPN_1x\": \"35998814/12_2017_baselines/rpn_R-50-FPN_1x.yaml.08_06_03.Axg0r179\",  # noqa B950\n        \"36225147/fast_R-50-FPN_1x\": \"36225147/12_2017_baselines/fast_rcnn_R-50-FPN_1x.yaml.08_39_09.L3obSdQ2\",  # noqa B950\n    }\n\n    @staticmethod\n    def get(name):\n        if name.startswith(\"Caffe2Detectron/COCO\"):\n            return ModelCatalog._get_c2_detectron_baseline(name)\n        if name.startswith(\"ImageNetPretrained/\"):\n            return ModelCatalog._get_c2_imagenet_pretrained(name)\n        raise RuntimeError(\"model not present in the catalog: {}\".format(name))\n\n    @staticmethod\n    def _get_c2_imagenet_pretrained(name):\n        prefix = ModelCatalog.S3_C2_DETECTRON_PREFIX\n        name = name[len(\"ImageNetPretrained/\") :]\n        name = ModelCatalog.C2_IMAGENET_MODELS[name]\n        url = \"/\".join([prefix, name])\n        return url\n\n    @staticmethod\n    def _get_c2_detectron_baseline(name):\n        name = name[len(\"Caffe2Detectron/COCO/\") :]\n        url = ModelCatalog.C2_DETECTRON_MODELS[name]\n        if \"keypoint_rcnn\" in name:\n            dataset = ModelCatalog.C2_DATASET_COCO_KEYPOINTS\n        else:\n            dataset = ModelCatalog.C2_DATASET_COCO\n\n        if \"35998355/rpn_R-50-C4_1x\" in name:\n            # this one model is somehow different from others ..\n            type = \"rpn\"\n        else:\n            type = \"generalized_rcnn\"\n\n        # Detectron C2 models are stored in the structure defined in `C2_DETECTRON_PATH_FORMAT`.\n        url = ModelCatalog.C2_DETECTRON_PATH_FORMAT.format(\n            prefix=ModelCatalog.S3_C2_DETECTRON_PREFIX, url=url, type=type, dataset=dataset\n        )\n        return url\n\n\nclass ModelCatalogHandler(PathHandler):\n    \"\"\"\n    Resolve URL like catalog://.\n    \"\"\"\n\n    PREFIX = \"catalog://\"\n\n    def _get_supported_prefixes(self):\n        return [self.PREFIX]\n\n    def _get_local_path(self, path):\n        logger = logging.getLogger(__name__)\n        catalog_path = ModelCatalog.get(path[len(self.PREFIX) :])\n        logger.info(\"Catalog entry {} points to {}\".format(path, catalog_path))\n        return PathManager.get_local_path(catalog_path)\n\n    def _open(self, path, mode=\"r\", **kwargs):\n        return PathManager.open(self._get_local_path(path), mode, **kwargs)\n\n\nclass Detectron2Handler(PathHandler):\n    \"\"\"\n    Resolve anything that's in Detectron2 model zoo.\n    \"\"\"\n\n    PREFIX = \"detectron2://\"\n    S3_DETECTRON2_PREFIX = \"https://dl.fbaipublicfiles.com/detectron2/\"\n\n    def _get_supported_prefixes(self):\n        return [self.PREFIX]\n\n    def _get_local_path(self, path):\n        name = path[len(self.PREFIX) :]\n        return PathManager.get_local_path(self.S3_DETECTRON2_PREFIX + name)\n\n    def _open(self, path, mode=\"r\", **kwargs):\n        return PathManager.open(self._get_local_path(path), mode, **kwargs)\n\n\nPathManager.register_handler(ModelCatalogHandler())\nPathManager.register_handler(Detectron2Handler())\n"
  },
  {
    "path": "detectron2/checkpoint/detection_checkpoint.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport pickle\nfrom fvcore.common.checkpoint import Checkpointer\nfrom fvcore.common.file_io import PathManager\n\nimport detectron2.utils.comm as comm\n\nfrom .c2_model_loading import align_and_update_state_dicts\n\n\nclass DetectionCheckpointer(Checkpointer):\n    \"\"\"\n    Same as :class:`Checkpointer`, but is able to handle models in detectron & detectron2\n    model zoo, and apply conversions for legacy models.\n    \"\"\"\n\n    def __init__(self, model, save_dir=\"\", *, save_to_disk=None, **checkpointables):\n        is_main_process = comm.is_main_process()\n        super().__init__(\n            model,\n            save_dir,\n            save_to_disk=is_main_process if save_to_disk is None else save_to_disk,\n            **checkpointables,\n        )\n\n    def _load_file(self, filename):\n        if filename.endswith(\".pkl\"):\n            with PathManager.open(filename, \"rb\") as f:\n                data = pickle.load(f, encoding=\"latin1\")\n            if \"model\" in data and \"__author__\" in data:\n                # file is in Detectron2 model zoo format\n                self.logger.info(\"Reading a file from '{}'\".format(data[\"__author__\"]))\n                return data\n            else:\n                # assume file is from Caffe2 / Detectron1 model zoo\n                if \"blobs\" in data:\n                    # Detection models have \"blobs\", but ImageNet models don't\n                    data = data[\"blobs\"]\n                data = {k: v for k, v in data.items() if not k.endswith(\"_momentum\")}\n                return {\"model\": data, \"__author__\": \"Caffe2\", \"matching_heuristics\": True}\n\n        loaded = super()._load_file(filename)  # load native pth checkpoint\n        if \"model\" not in loaded:\n            loaded = {\"model\": loaded}\n        return loaded\n\n    def _load_model(self, checkpoint):\n        if checkpoint.get(\"matching_heuristics\", False):\n            self._convert_ndarray_to_tensor(checkpoint[\"model\"])\n            # convert weights by name-matching heuristics\n            model_state_dict = self.model.state_dict()\n            align_and_update_state_dicts(\n                model_state_dict,\n                checkpoint[\"model\"],\n                c2_conversion=checkpoint.get(\"__author__\", None) == \"Caffe2\",\n            )\n            checkpoint[\"model\"] = model_state_dict\n        # for non-caffe2 models, use standard ways to load it\n        super()._load_model(checkpoint)\n"
  },
  {
    "path": "detectron2/checkpoint/model_zoo.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nfrom fvcore.common.file_io import PathHandler, PathManager\n\n\nclass ModelCatalog(object):\n    \"\"\"\n    Store mappings from names to third-party models.\n    \"\"\"\n\n    S3_C2_DETECTRON_PREFIX = \"https://dl.fbaipublicfiles.com/detectron\"\n\n    # MSRA models have STRIDE_IN_1X1=True. False otherwise.\n    # NOTE: all BN models here have fused BN into an affine layer.\n    # As a result, you should only load them to a model with \"FrozenBN\".\n    # Loading them to a model with regular BN or SyncBN is wrong.\n    # Even when loaded to FrozenBN, it is still different from affine by an epsilon,\n    # which should be negligible for training.\n    # NOTE: all models here uses PIXEL_STD=[1,1,1]\n    C2_IMAGENET_MODELS = {\n        \"MSRA/R-50\": \"ImageNetPretrained/MSRA/R-50.pkl\",\n        \"MSRA/R-101\": \"ImageNetPretrained/MSRA/R-101.pkl\",\n        \"FAIR/R-50-GN\": \"ImageNetPretrained/47261647/R-50-GN.pkl\",\n        \"FAIR/R-101-GN\": \"ImageNetPretrained/47592356/R-101-GN.pkl\",\n        \"FAIR/X-101-32x8d\": \"ImageNetPretrained/20171220/X-101-32x8d.pkl\",\n        \"FAIR/X-101-64x4d\": \"ImageNetPretrained/FBResNeXt/X-101-64x4d.pkl\",\n        \"FAIR/X-152-32x8d-IN5k\": \"ImageNetPretrained/25093814/X-152-32x8d-IN5k.pkl\",\n    }\n\n    C2_DETECTRON_PATH_FORMAT = (\n        \"{prefix}/{url}/output/train/{dataset}/{type}/model_final.pkl\"\n    )  # noqa B950\n\n    C2_DATASET_COCO = \"coco_2014_train%3Acoco_2014_valminusminival\"\n    C2_DATASET_COCO_KEYPOINTS = \"keypoints_coco_2014_train%3Akeypoints_coco_2014_valminusminival\"\n\n    # format: {model_name} -> part of the url\n    C2_DETECTRON_MODELS = {\n        \"35857197/e2e_faster_rcnn_R-50-C4_1x\": \"35857197/12_2017_baselines/e2e_faster_rcnn_R-50-C4_1x.yaml.01_33_49.iAX0mXvW\",  # noqa B950\n        \"35857345/e2e_faster_rcnn_R-50-FPN_1x\": \"35857345/12_2017_baselines/e2e_faster_rcnn_R-50-FPN_1x.yaml.01_36_30.cUF7QR7I\",  # noqa B950\n        \"35857890/e2e_faster_rcnn_R-101-FPN_1x\": \"35857890/12_2017_baselines/e2e_faster_rcnn_R-101-FPN_1x.yaml.01_38_50.sNxI7sX7\",  # noqa B950\n        \"36761737/e2e_faster_rcnn_X-101-32x8d-FPN_1x\": \"36761737/12_2017_baselines/e2e_faster_rcnn_X-101-32x8d-FPN_1x.yaml.06_31_39.5MIHi1fZ\",  # noqa B950\n        \"35858791/e2e_mask_rcnn_R-50-C4_1x\": \"35858791/12_2017_baselines/e2e_mask_rcnn_R-50-C4_1x.yaml.01_45_57.ZgkA7hPB\",  # noqa B950\n        \"35858933/e2e_mask_rcnn_R-50-FPN_1x\": \"35858933/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml.01_48_14.DzEQe4wC\",  # noqa B950\n        \"35861795/e2e_mask_rcnn_R-101-FPN_1x\": \"35861795/12_2017_baselines/e2e_mask_rcnn_R-101-FPN_1x.yaml.02_31_37.KqyEK4tT\",  # noqa B950\n        \"36761843/e2e_mask_rcnn_X-101-32x8d-FPN_1x\": \"36761843/12_2017_baselines/e2e_mask_rcnn_X-101-32x8d-FPN_1x.yaml.06_35_59.RZotkLKI\",  # noqa B950\n        \"48616381/e2e_mask_rcnn_R-50-FPN_2x_gn\": \"GN/48616381/04_2018_gn_baselines/e2e_mask_rcnn_R-50-FPN_2x_gn_0416.13_23_38.bTlTI97Q\",  # noqa B950\n        \"37697547/e2e_keypoint_rcnn_R-50-FPN_1x\": \"37697547/12_2017_baselines/e2e_keypoint_rcnn_R-50-FPN_1x.yaml.08_42_54.kdzV35ao\",  # noqa B950\n        \"35998355/rpn_R-50-C4_1x\": \"35998355/12_2017_baselines/rpn_R-50-C4_1x.yaml.08_00_43.njH5oD9L\",  # noqa B950\n        \"35998814/rpn_R-50-FPN_1x\": \"35998814/12_2017_baselines/rpn_R-50-FPN_1x.yaml.08_06_03.Axg0r179\",  # noqa B950\n        \"36225147/fast_R-50-FPN_1x\": \"36225147/12_2017_baselines/fast_rcnn_R-50-FPN_1x.yaml.08_39_09.L3obSdQ2\",  # noqa B950\n    }\n\n    @staticmethod\n    def get(name):\n        if name.startswith(\"Caffe2Detectron/COCO\"):\n            return ModelCatalog._get_c2_detectron_baseline(name)\n        if name.startswith(\"ImageNetPretrained/\"):\n            return ModelCatalog._get_c2_imagenet_pretrained(name)\n        raise RuntimeError(\"model not present in the catalog: {}\".format(name))\n\n    @staticmethod\n    def _get_c2_imagenet_pretrained(name):\n        prefix = ModelCatalog.S3_C2_DETECTRON_PREFIX\n        name = name[len(\"ImageNetPretrained/\") :]\n        name = ModelCatalog.C2_IMAGENET_MODELS[name]\n        url = \"/\".join([prefix, name])\n        return url\n\n    @staticmethod\n    def _get_c2_detectron_baseline(name):\n        name = name[len(\"Caffe2Detectron/COCO/\") :]\n        url = ModelCatalog.C2_DETECTRON_MODELS[name]\n        if \"keypoint_rcnn\" in name:\n            dataset = ModelCatalog.C2_DATASET_COCO_KEYPOINTS\n        else:\n            dataset = ModelCatalog.C2_DATASET_COCO\n\n        if \"35998355/rpn_R-50-C4_1x\" in name:\n            # this one model is somehow different from others ..\n            type = \"rpn\"\n        else:\n            type = \"generalized_rcnn\"\n\n        # Detectron C2 models are stored in the structure defined in `C2_DETECTRON_PATH_FORMAT`.\n        url = ModelCatalog.C2_DETECTRON_PATH_FORMAT.format(\n            prefix=ModelCatalog.S3_C2_DETECTRON_PREFIX, url=url, type=type, dataset=dataset\n        )\n        return url\n\n\nclass ModelCatalogHandler(PathHandler):\n    \"\"\"\n    Resolve URL like catalog://.\n    \"\"\"\n\n    PREFIX = \"catalog://\"\n\n    def _get_supported_prefixes(self):\n        return [self.PREFIX]\n\n    def _get_local_path(self, path):\n        logger = logging.getLogger(__name__)\n        catalog_path = ModelCatalog.get(path[len(self.PREFIX) :])\n        logger.info(\"Catalog entry {} points to {}\".format(path, catalog_path))\n        return PathManager.get_local_path(catalog_path)\n\n    def _open(self, path, mode=\"r\"):\n        return PathManager.open(self._get_local_path(path), mode)\n\n\nclass Detectron2Handler(PathHandler):\n    \"\"\"\n    Resolve anything that's in Detectron2 model zoo.\n    \"\"\"\n\n    PREFIX = \"detectron2://\"\n    S3_DETECTRON2_PREFIX = \"https://dl.fbaipublicfiles.com/detectron2/\"\n\n    def _get_supported_prefixes(self):\n        return [self.PREFIX]\n\n    def _get_local_path(self, path):\n        name = path[len(self.PREFIX) :]\n        return PathManager.get_local_path(self.S3_DETECTRON2_PREFIX + name)\n\n    def _open(self, path, mode=\"r\"):\n        return PathManager.open(self._get_local_path(path), mode)\n\n\nPathManager.register_handler(ModelCatalogHandler())\nPathManager.register_handler(Detectron2Handler())\n"
  },
  {
    "path": "detectron2/config/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .compat import downgrade_config, upgrade_config\nfrom .config import CfgNode, get_cfg, global_cfg, set_global_cfg\n\n__all__ = [\n    \"CfgNode\",\n    \"get_cfg\",\n    \"global_cfg\",\n    \"set_global_cfg\",\n    \"downgrade_config\",\n    \"upgrade_config\",\n]\n"
  },
  {
    "path": "detectron2/config/compat.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\"\"\"\nBackward compatibility of configs.\n\nInstructions to bump version:\n+ It's not needed to bump version if new keys are added.\n  It's only needed when backward-incompatible changes happen\n  (i.e., some existing keys disappear, or the meaning of a key changes)\n+ To bump version, do the following:\n    1. Increment _C.VERSION in defaults.py\n    2. Add a converter in this file.\n\n      Each ConverterVX has a function \"upgrade\" which in-place upgrades config from X-1 to X,\n      and a function \"downgrade\" which in-place downgrades config from X to X-1\n\n      In each function, VERSION is left unchanged.\n\n      Each converter assumes that its input has the relevant keys\n      (i.e., the input is not a partial config).\n    3. Run the tests (test_config.py) to make sure the upgrade & downgrade\n       functions are consistent.\n\"\"\"\n\nimport logging\nfrom typing import List, Optional, Tuple\n\nfrom .config import CfgNode as CN\nfrom .defaults import _C\n\n__all__ = [\"upgrade_config\", \"downgrade_config\"]\n\n\ndef upgrade_config(cfg: CN, to_version: Optional[int] = None) -> CN:\n    \"\"\"\n    Upgrade a config from its current version to a newer version.\n\n    Args:\n        cfg (CfgNode):\n        to_version (int): defaults to the latest version.\n    \"\"\"\n    cfg = cfg.clone()\n    if to_version is None:\n        to_version = _C.VERSION\n\n    assert cfg.VERSION <= to_version, \"Cannot upgrade from v{} to v{}!\".format(\n        cfg.VERSION, to_version\n    )\n    for k in range(cfg.VERSION, to_version):\n        converter = globals()[\"ConverterV\" + str(k + 1)]\n        converter.upgrade(cfg)\n        cfg.VERSION = k + 1\n    return cfg\n\n\ndef downgrade_config(cfg: CN, to_version: int) -> CN:\n    \"\"\"\n    Downgrade a config from its current version to an older version.\n\n    Args:\n        cfg (CfgNode):\n        to_version (int):\n\n    Note:\n        A general downgrade of arbitrary configs is not always possible due to the\n        different functionalities in different versions.\n        The purpose of downgrade is only to recover the defaults in old versions,\n        allowing it to load an old partial yaml config.\n        Therefore, the implementation only needs to fill in the default values\n        in the old version when a general downgrade is not possible.\n    \"\"\"\n    cfg = cfg.clone()\n    assert cfg.VERSION >= to_version, \"Cannot downgrade from v{} to v{}!\".format(\n        cfg.VERSION, to_version\n    )\n    for k in range(cfg.VERSION, to_version, -1):\n        converter = globals()[\"ConverterV\" + str(k)]\n        converter.downgrade(cfg)\n        cfg.VERSION = k - 1\n    return cfg\n\n\ndef guess_version(cfg: CN, filename: str) -> int:\n    \"\"\"\n    Guess the version of a partial config where the VERSION field is not specified.\n    Returns the version, or the latest if cannot make a guess.\n\n    This makes it easier for users to migrate.\n    \"\"\"\n    logger = logging.getLogger(__name__)\n\n    def _has(name: str) -> bool:\n        cur = cfg\n        for n in name.split(\".\"):\n            if n not in cur:\n                return False\n            cur = cur[n]\n        return True\n\n    # Most users' partial configs have \"MODEL.WEIGHT\", so guess on it\n    ret = None\n    if _has(\"MODEL.WEIGHT\") or _has(\"TEST.AUG_ON\"):\n        ret = 1\n\n    if ret is not None:\n        logger.warning(\"Config '{}' has no VERSION. Assuming it to be v{}.\".format(filename, ret))\n    else:\n        ret = _C.VERSION\n        logger.warning(\n            \"Config '{}' has no VERSION. Assuming it to be compatible with latest v{}.\".format(\n                filename, ret\n            )\n        )\n    return ret\n\n\ndef _rename(cfg: CN, old: str, new: str) -> None:\n    old_keys = old.split(\".\")\n    new_keys = new.split(\".\")\n\n    def _set(key_seq: List[str], val: str) -> None:\n        cur = cfg\n        for k in key_seq[:-1]:\n            if k not in cur:\n                cur[k] = CN()\n            cur = cur[k]\n        cur[key_seq[-1]] = val\n\n    def _get(key_seq: List[str]) -> CN:\n        cur = cfg\n        for k in key_seq:\n            cur = cur[k]\n        return cur\n\n    def _del(key_seq: List[str]) -> None:\n        cur = cfg\n        for k in key_seq[:-1]:\n            cur = cur[k]\n        del cur[key_seq[-1]]\n        if len(cur) == 0 and len(key_seq) > 1:\n            _del(key_seq[:-1])\n\n    _set(new_keys, _get(old_keys))\n    _del(old_keys)\n\n\nclass _RenameConverter:\n    \"\"\"\n    A converter that handles simple rename.\n    \"\"\"\n\n    RENAME: List[Tuple[str, str]] = []  # list of tuples of (old name, new name)\n\n    @classmethod\n    def upgrade(cls, cfg: CN) -> None:\n        for old, new in cls.RENAME:\n            _rename(cfg, old, new)\n\n    @classmethod\n    def downgrade(cls, cfg: CN) -> None:\n        for old, new in cls.RENAME[::-1]:\n            _rename(cfg, new, old)\n\n\nclass ConverterV1(_RenameConverter):\n    RENAME = [(\"MODEL.RPN_HEAD.NAME\", \"MODEL.RPN.HEAD_NAME\")]\n\n\nclass ConverterV2(_RenameConverter):\n    \"\"\"\n    A large bulk of rename, before public release.\n    \"\"\"\n\n    RENAME = [\n        (\"MODEL.WEIGHT\", \"MODEL.WEIGHTS\"),\n        (\"MODEL.PANOPTIC_FPN.SEMANTIC_LOSS_SCALE\", \"MODEL.SEM_SEG_HEAD.LOSS_WEIGHT\"),\n        (\"MODEL.PANOPTIC_FPN.RPN_LOSS_SCALE\", \"MODEL.RPN.LOSS_WEIGHT\"),\n        (\"MODEL.PANOPTIC_FPN.INSTANCE_LOSS_SCALE\", \"MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT\"),\n        (\"MODEL.PANOPTIC_FPN.COMBINE_ON\", \"MODEL.PANOPTIC_FPN.COMBINE.ENABLED\"),\n        (\n            \"MODEL.PANOPTIC_FPN.COMBINE_OVERLAP_THRESHOLD\",\n            \"MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH\",\n        ),\n        (\n            \"MODEL.PANOPTIC_FPN.COMBINE_STUFF_AREA_LIMIT\",\n            \"MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT\",\n        ),\n        (\n            \"MODEL.PANOPTIC_FPN.COMBINE_INSTANCES_CONFIDENCE_THRESHOLD\",\n            \"MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH\",\n        ),\n        (\"MODEL.ROI_HEADS.SCORE_THRESH\", \"MODEL.ROI_HEADS.SCORE_THRESH_TEST\"),\n        (\"MODEL.ROI_HEADS.NMS\", \"MODEL.ROI_HEADS.NMS_THRESH_TEST\"),\n        (\"MODEL.RETINANET.INFERENCE_SCORE_THRESHOLD\", \"MODEL.RETINANET.SCORE_THRESH_TEST\"),\n        (\"MODEL.RETINANET.INFERENCE_TOPK_CANDIDATES\", \"MODEL.RETINANET.TOPK_CANDIDATES_TEST\"),\n        (\"MODEL.RETINANET.INFERENCE_NMS_THRESHOLD\", \"MODEL.RETINANET.NMS_THRESH_TEST\"),\n        (\"TEST.DETECTIONS_PER_IMG\", \"TEST.DETECTIONS_PER_IMAGE\"),\n        (\"TEST.AUG_ON\", \"TEST.AUG.ENABLED\"),\n        (\"TEST.AUG_MIN_SIZES\", \"TEST.AUG.MIN_SIZES\"),\n        (\"TEST.AUG_MAX_SIZE\", \"TEST.AUG.MAX_SIZE\"),\n        (\"TEST.AUG_FLIP\", \"TEST.AUG.FLIP\"),\n    ]\n\n    @classmethod\n    def upgrade(cls, cfg: CN) -> None:\n        super().upgrade(cfg)\n\n        if cfg.MODEL.META_ARCHITECTURE == \"RetinaNet\":\n            _rename(\n                cfg, \"MODEL.RETINANET.ANCHOR_ASPECT_RATIOS\", \"MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS\"\n            )\n            _rename(cfg, \"MODEL.RETINANET.ANCHOR_SIZES\", \"MODEL.ANCHOR_GENERATOR.SIZES\")\n            del cfg[\"MODEL\"][\"RPN\"][\"ANCHOR_SIZES\"]\n            del cfg[\"MODEL\"][\"RPN\"][\"ANCHOR_ASPECT_RATIOS\"]\n        else:\n            _rename(cfg, \"MODEL.RPN.ANCHOR_ASPECT_RATIOS\", \"MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS\")\n            _rename(cfg, \"MODEL.RPN.ANCHOR_SIZES\", \"MODEL.ANCHOR_GENERATOR.SIZES\")\n            del cfg[\"MODEL\"][\"RETINANET\"][\"ANCHOR_SIZES\"]\n            del cfg[\"MODEL\"][\"RETINANET\"][\"ANCHOR_ASPECT_RATIOS\"]\n        del cfg[\"MODEL\"][\"RETINANET\"][\"ANCHOR_STRIDES\"]\n\n    @classmethod\n    def downgrade(cls, cfg: CN) -> None:\n        super().downgrade(cfg)\n\n        _rename(cfg, \"MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS\", \"MODEL.RPN.ANCHOR_ASPECT_RATIOS\")\n        _rename(cfg, \"MODEL.ANCHOR_GENERATOR.SIZES\", \"MODEL.RPN.ANCHOR_SIZES\")\n        cfg.MODEL.RETINANET.ANCHOR_ASPECT_RATIOS = cfg.MODEL.RPN.ANCHOR_ASPECT_RATIOS\n        cfg.MODEL.RETINANET.ANCHOR_SIZES = cfg.MODEL.RPN.ANCHOR_SIZES\n        cfg.MODEL.RETINANET.ANCHOR_STRIDES = []  # this is not used anywhere in any version\n"
  },
  {
    "path": "detectron2/config/config.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport logging\nfrom fvcore.common.config import CfgNode as _CfgNode\n\n\nclass CfgNode(_CfgNode):\n    \"\"\"\n    The same as `fvcore.common.config.CfgNode`, but different in:\n\n    1. Use unsafe yaml loading by default.\n      Note that this may lead to arbitrary code execution: you must not\n      load a config file from untrusted sources before manually inspecting\n      the content of the file.\n    2. Support config versioning.\n      When attempting to merge an old config, it will convert the old config automatically.\n\n    \"\"\"\n\n    # Note that the default value of allow_unsafe is changed to True\n    def merge_from_file(self, cfg_filename: str, allow_unsafe: bool = True) -> None:\n        loaded_cfg = _CfgNode.load_yaml_with_base(cfg_filename, allow_unsafe=allow_unsafe)\n        loaded_cfg = type(self)(loaded_cfg)\n\n        # defaults.py needs to import CfgNode\n        from .defaults import _C\n\n        latest_ver = _C.VERSION\n        assert (\n            latest_ver == self.VERSION\n        ), \"CfgNode.merge_from_file is only allowed on a config of latest version!\"\n\n        logger = logging.getLogger(__name__)\n\n        loaded_ver = loaded_cfg.get(\"VERSION\", None)\n        if loaded_ver is None:\n            from .compat import guess_version\n\n            loaded_ver = guess_version(loaded_cfg, cfg_filename)\n        assert loaded_ver <= self.VERSION, \"Cannot merge a v{} config into a v{} config.\".format(\n            loaded_ver, self.VERSION\n        )\n\n        if loaded_ver == self.VERSION:\n            self.merge_from_other_cfg(loaded_cfg)\n        else:\n            # compat.py needs to import CfgNode\n            from .compat import upgrade_config, downgrade_config\n\n            logger.warning(\n                \"Loading an old v{} config file '{}' by automatically upgrading to v{}. \"\n                \"See docs/CHANGELOG.md for instructions to update your files.\".format(\n                    loaded_ver, cfg_filename, self.VERSION\n                )\n            )\n            # To convert, first obtain a full config at an old version\n            old_self = downgrade_config(self, to_version=loaded_ver)\n            old_self.merge_from_other_cfg(loaded_cfg)\n            new_config = upgrade_config(old_self)\n            self.clear()\n            self.update(new_config)\n\n\nglobal_cfg = CfgNode()\n\n\ndef get_cfg() -> CfgNode:\n    \"\"\"\n    Get a copy of the default config.\n\n    Returns:\n        a detectron2 CfgNode instance.\n    \"\"\"\n    from .defaults import _C\n\n    return _C.clone()\n\n\ndef set_global_cfg(cfg: CfgNode) -> None:\n    \"\"\"\n    Let the global config point to the given cfg.\n\n    Assume that the given \"cfg\" has the key \"KEY\", after calling\n    `set_global_cfg(cfg)`, the key can be accessed by:\n\n    .. code-block:: python\n\n        from detectron2.config import global_cfg\n        print(global_cfg.KEY)\n\n    By using a hacky global config, you can access these configs anywhere,\n    without having to pass the config object or the values deep into the code.\n    This is a hacky feature introduced for quick prototyping / research exploration.\n    \"\"\"\n    global global_cfg\n    global_cfg.clear()\n    global_cfg.update(cfg)\n"
  },
  {
    "path": "detectron2/config/defaults.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .config import CfgNode as CN\n\n# -----------------------------------------------------------------------------\n# Convention about Training / Test specific parameters\n# -----------------------------------------------------------------------------\n# Whenever an argument can be either used for training or for testing, the\n# corresponding name will be post-fixed by a _TRAIN for a training parameter,\n# or _TEST for a test-specific parameter.\n# For example, the number of images during training will be\n# IMAGES_PER_BATCH_TRAIN, while the number of images for testing will be\n# IMAGES_PER_BATCH_TEST\n\n# -----------------------------------------------------------------------------\n# Config definition\n# -----------------------------------------------------------------------------\n\n_C = CN()\n\n_C.VERSION = 2\n\n_C.MODEL = CN()\n_C.MODEL.LOAD_PROPOSALS = False\n_C.MODEL.MASK_ON = False\n_C.MODEL.KEYPOINT_ON = False\n_C.MODEL.DEVICE = \"cuda\"\n_C.MODEL.META_ARCHITECTURE = \"GeneralizedRCNN\"\n\n# If the WEIGHT starts with a catalog://, like :R-50, the code will look for\n# the path in ModelCatalog. Else, it will use it as the specified absolute\n# path\n_C.MODEL.WEIGHTS = \"\"\n\n# Values to be used for image normalization (BGR order)\n# Default values are the mean pixel value from ImageNet: [103.53, 116.28, 123.675]\n_C.MODEL.PIXEL_MEAN = [103.530, 116.280, 123.675]\n# When using pre-trained models in Detectron1 or any MSRA models,\n# std has been absorbed into its conv1 weights, so the std needs to be set 1.\n# Otherwise, you can use [57.375, 57.120, 58.395] (ImageNet std)\n_C.MODEL.PIXEL_STD = [1.0, 1.0, 1.0]\n\n\n# -----------------------------------------------------------------------------\n# INPUT\n# -----------------------------------------------------------------------------\n_C.INPUT = CN()\n# Size of the smallest side of the image during training\n_C.INPUT.MIN_SIZE_TRAIN = (800,)\n# Sample size of smallest side by choice or random selection from range give by\n# INPUT.MIN_SIZE_TRAIN\n_C.INPUT.MIN_SIZE_TRAIN_SAMPLING = \"choice\"\n# Maximum size of the side of the image during training\n_C.INPUT.MAX_SIZE_TRAIN = 1333\n# Size of the smallest side of the image during testing. Set to zero to disable resize in testing.\n_C.INPUT.MIN_SIZE_TEST = 800\n# Maximum size of the side of the image during testing\n_C.INPUT.MAX_SIZE_TEST = 1333\n\n# `True` if cropping is used for data augmentation during training\n_C.INPUT.CROP = CN({\"ENABLED\": False})\n# Cropping type:\n# - \"relative\" crop (H * CROP.SIZE[0], W * CROP.SIZE[1]) part of an input of size (H, W)\n# - \"relative_range\" uniformly sample relative crop size from between [CROP.SIZE[0], [CROP.SIZE[1]].\n#   and  [1, 1] and use it as in \"relative\" scenario.\n# - \"absolute\" crop part of an input with absolute size: (CROP.SIZE[0], CROP.SIZE[1]).\n_C.INPUT.CROP.TYPE = \"relative_range\"\n# Size of crop in range (0, 1] if CROP.TYPE is \"relative\" or \"relative_range\" and in number of\n# pixels if CROP.TYPE is \"absolute\"\n_C.INPUT.CROP.SIZE = [0.9, 0.9]\n\n\n# Whether the model needs RGB, YUV, HSV etc.\n# Should be one of the modes defined here, as we use PIL to read the image:\n# https://pillow.readthedocs.io/en/stable/handbook/concepts.html#concept-modes\n# with BGR being the one exception. One can set image format to BGR, we will\n# internally use RGB for conversion and flip the channels over\n_C.INPUT.FORMAT = \"BGR\"\n_C.INPUT.MASK_FORMAT = \"polygon\"  # alternative: \"bitmask\"\n\n\n# -----------------------------------------------------------------------------\n# Dataset\n# -----------------------------------------------------------------------------\n_C.DATASETS = CN()\n# List of the dataset names for training. Must be registered in DatasetCatalog\n_C.DATASETS.TRAIN = ()\n# List of the pre-computed proposal files for training, which must be consistent\n# with datasets listed in DATASETS.TRAIN.\n_C.DATASETS.PROPOSAL_FILES_TRAIN = ()\n# Number of top scoring precomputed proposals to keep for training\n_C.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN = 2000\n# List of the dataset names for testing. Must be registered in DatasetCatalog\n_C.DATASETS.TEST = ()\n# List of the pre-computed proposal files for test, which must be consistent\n# with datasets listed in DATASETS.TEST.\n_C.DATASETS.PROPOSAL_FILES_TEST = ()\n# Number of top scoring precomputed proposals to keep for test\n_C.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST = 1000\n\n# -----------------------------------------------------------------------------\n# DataLoader\n# -----------------------------------------------------------------------------\n_C.DATALOADER = CN()\n# Number of data loading threads\n_C.DATALOADER.NUM_WORKERS = 4\n# If True, each batch should contain only images for which the aspect ratio\n# is compatible. This groups portrait images together, and landscape images\n# are not batched with portrait images.\n_C.DATALOADER.ASPECT_RATIO_GROUPING = True\n# Options: TrainingSampler, RepeatFactorTrainingSampler\n_C.DATALOADER.SAMPLER_TRAIN = \"TrainingSampler\"\n# Repeat threshold for RepeatFactorTrainingSampler\n_C.DATALOADER.REPEAT_THRESHOLD = 0.0\n\n\n# ---------------------------------------------------------------------------- #\n# Backbone options\n# ---------------------------------------------------------------------------- #\n_C.MODEL.BACKBONE = CN()\n\n_C.MODEL.BACKBONE.NAME = \"build_resnet_backbone\"\n# Add StopGrad at a specified stage so the bottom layers are frozen\n_C.MODEL.BACKBONE.FREEZE_AT = 2\n\n\n# ---------------------------------------------------------------------------- #\n# FPN options\n# ---------------------------------------------------------------------------- #\n_C.MODEL.FPN = CN()\n# Names of the input feature maps to be used by FPN\n# They must have contiguous power of 2 strides\n# e.g., [\"res2\", \"res3\", \"res4\", \"res5\"]\n_C.MODEL.FPN.IN_FEATURES = []\n_C.MODEL.FPN.OUT_CHANNELS = 256\n\n# Options: \"\" (no norm), \"GN\"\n_C.MODEL.FPN.NORM = \"\"\n\n# Types for fusing the FPN top-down and lateral features. Can be either \"sum\" or \"avg\"\n_C.MODEL.FPN.FUSE_TYPE = \"sum\"\n\n\n# ---------------------------------------------------------------------------- #\n# Proposal generator options\n# ---------------------------------------------------------------------------- #\n_C.MODEL.PROPOSAL_GENERATOR = CN()\n# Current proposal generators include \"RPN\", \"RRPN\" and \"PrecomputedProposals\"\n_C.MODEL.PROPOSAL_GENERATOR.NAME = \"RPN\"\n# Proposal height and width both need to be greater than MIN_SIZE\n# (a the scale used during training or inference)\n_C.MODEL.PROPOSAL_GENERATOR.MIN_SIZE = 0\n\n\n# ---------------------------------------------------------------------------- #\n# Anchor generator options\n# ---------------------------------------------------------------------------- #\n_C.MODEL.ANCHOR_GENERATOR = CN()\n# The generator can be any name in the ANCHOR_GENERATOR registry\n_C.MODEL.ANCHOR_GENERATOR.NAME = \"DefaultAnchorGenerator\"\n# anchor sizes given in absolute pixels w.r.t. the scaled network input.\n# Format: list of lists of sizes. SIZES[i] specifies the list of sizes\n# to use for IN_FEATURES[i]; len(SIZES) == len(IN_FEATURES) must be true,\n# or len(SIZES) == 1 is true and size list SIZES[0] is used for all\n# IN_FEATURES.\n_C.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64, 128, 256, 512]]\n# Anchor aspect ratios.\n# Format is list of lists of sizes. ASPECT_RATIOS[i] specifies the list of aspect ratios\n# to use for IN_FEATURES[i]; len(ASPECT_RATIOS) == len(IN_FEATURES) must be true,\n# or len(ASPECT_RATIOS) == 1 is true and aspect ratio list ASPECT_RATIOS[0] is used\n# for all IN_FEATURES.\n_C.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.5, 1.0, 2.0]]\n# Anchor angles.\n# list[float], the angle in degrees, for each input feature map.\n# ANGLES[i] specifies the list of angles for IN_FEATURES[i].\n_C.MODEL.ANCHOR_GENERATOR.ANGLES = [[-90, 0, 90]]\n\n\n# ---------------------------------------------------------------------------- #\n# RPN options\n# ---------------------------------------------------------------------------- #\n_C.MODEL.RPN = CN()\n_C.MODEL.RPN.HEAD_NAME = \"StandardRPNHead\"  # used by RPN_HEAD_REGISTRY\n\n# Names of the input feature maps to be used by RPN\n# e.g., [\"p2\", \"p3\", \"p4\", \"p5\", \"p6\"] for FPN\n_C.MODEL.RPN.IN_FEATURES = [\"res4\"]\n# Remove RPN anchors that go outside the image by BOUNDARY_THRESH pixels\n# Set to -1 or a large value, e.g. 100000, to disable pruning anchors\n_C.MODEL.RPN.BOUNDARY_THRESH = -1\n# IOU overlap ratios [BG_IOU_THRESHOLD, FG_IOU_THRESHOLD]\n# Minimum overlap required between an anchor and ground-truth box for the\n# (anchor, gt box) pair to be a positive example (IoU >= FG_IOU_THRESHOLD\n# ==> positive RPN example: 1)\n# Maximum overlap allowed between an anchor and ground-truth box for the\n# (anchor, gt box) pair to be a negative examples (IoU < BG_IOU_THRESHOLD\n# ==> negative RPN example: 0)\n# Anchors with overlap in between (BG_IOU_THRESHOLD <= IoU < FG_IOU_THRESHOLD)\n# are ignored (-1)\n_C.MODEL.RPN.IOU_THRESHOLDS = [0.3, 0.7]\n_C.MODEL.RPN.IOU_LABELS = [0, -1, 1]\n# Total number of RPN examples per image\n_C.MODEL.RPN.BATCH_SIZE_PER_IMAGE = 256\n# Target fraction of foreground (positive) examples per RPN minibatch\n_C.MODEL.RPN.POSITIVE_FRACTION = 0.5\n# Weights on (dx, dy, dw, dh) for normalizing RPN anchor regression targets\n_C.MODEL.RPN.BBOX_REG_WEIGHTS = (1.0, 1.0, 1.0, 1.0)\n# The transition point from L1 to L2 loss. Set to 0.0 to make the loss simply L1.\n_C.MODEL.RPN.SMOOTH_L1_BETA = 0.0\n_C.MODEL.RPN.LOSS_WEIGHT = 1.0\n# Number of top scoring RPN proposals to keep before applying NMS\n# When FPN is used, this is *per FPN level* (not total)\n_C.MODEL.RPN.PRE_NMS_TOPK_TRAIN = 12000\n_C.MODEL.RPN.PRE_NMS_TOPK_TEST = 6000\n# Number of top scoring RPN proposals to keep after applying NMS\n# When FPN is used, this limit is applied per level and then again to the union\n# of proposals from all levels\n# NOTE: When FPN is used, the meaning of this config is different from Detectron1.\n# It means per-batch topk in Detectron1, but per-image topk here.\n# See \"modeling/rpn/rpn_outputs.py\" for details.\n_C.MODEL.RPN.POST_NMS_TOPK_TRAIN = 2000\n_C.MODEL.RPN.POST_NMS_TOPK_TEST = 1000\n# NMS threshold used on RPN proposals\n_C.MODEL.RPN.NMS_THRESH = 0.7\n\n# ---------------------------------------------------------------------------- #\n# ROI HEADS options\n# ---------------------------------------------------------------------------- #\n_C.MODEL.ROI_HEADS = CN()\n_C.MODEL.ROI_HEADS.NAME = \"Res5ROIHeads\"\n# Number of foreground classes\n_C.MODEL.ROI_HEADS.NUM_CLASSES = 80\n# Names of the input feature maps to be used by ROI heads\n# Currently all heads (box, mask, ...) use the same input feature map list\n# e.g., [\"p2\", \"p3\", \"p4\", \"p5\"] is commonly used for FPN\n_C.MODEL.ROI_HEADS.IN_FEATURES = [\"res4\"]\n# IOU overlap ratios [IOU_THRESHOLD]\n# Overlap threshold for an RoI to be considered background (if < IOU_THRESHOLD)\n# Overlap threshold for an RoI to be considered foreground (if >= IOU_THRESHOLD)\n_C.MODEL.ROI_HEADS.IOU_THRESHOLDS = [0.5]\n_C.MODEL.ROI_HEADS.IOU_LABELS = [0, 1]\n# RoI minibatch size *per image* (number of regions of interest [ROIs])\n# Total number of RoIs per training minibatch =\n#   ROI_HEADS.BATCH_SIZE_PER_IMAGE * SOLVER.IMS_PER_BATCH\n# E.g., a common configuration is: 512 * 16 = 8192\n_C.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 512\n# Target fraction of RoI minibatch that is labeled foreground (i.e. class > 0)\n_C.MODEL.ROI_HEADS.POSITIVE_FRACTION = 0.25\n\n# Only used on test mode\n\n# Minimum score threshold (assuming scores in a [0, 1] range); a value chosen to\n# balance obtaining high recall with not having too many low precision\n# detections that will slow down inference post processing steps (like NMS)\n# A default threshold of 0.0 increases AP by ~0.2-0.3 but significantly slows down\n# inference.\n_C.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.05\n# Overlap threshold used for non-maximum suppression (suppress boxes with\n# IoU >= this threshold)\n_C.MODEL.ROI_HEADS.NMS_THRESH_TEST = 0.5\n# If True, augment proposals with ground-truth boxes before sampling proposals to\n# train ROI heads.\n_C.MODEL.ROI_HEADS.PROPOSAL_APPEND_GT = True\n\n\n# ---------------------------------------------------------------------------- #\n# Box Head\n# ---------------------------------------------------------------------------- #\n_C.MODEL.ROI_BOX_HEAD = CN()\n# C4 don't use head name option\n# Options for non-C4 models: FastRCNNConvFCHead,\n_C.MODEL.ROI_BOX_HEAD.NAME = \"\"\n# Default weights on (dx, dy, dw, dh) for normalizing bbox regression targets\n# These are empirically chosen to approximately lead to unit variance targets\n_C.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10.0, 10.0, 5.0, 5.0)\n# The transition point from L1 to L2 loss. Set to 0.0 to make the loss simply L1.\n_C.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA = 0.0\n_C.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION = 14\n_C.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO = 0\n# Type of pooling operation applied to the incoming feature map for each RoI\n_C.MODEL.ROI_BOX_HEAD.POOLER_TYPE = \"ROIAlignV2\"\n\n_C.MODEL.ROI_BOX_HEAD.NUM_FC = 0\n# Hidden layer dimension for FC layers in the RoI box head\n_C.MODEL.ROI_BOX_HEAD.FC_DIM = 1024\n_C.MODEL.ROI_BOX_HEAD.NUM_CONV = 0\n# Channel dimension for Conv layers in the RoI box head\n_C.MODEL.ROI_BOX_HEAD.CONV_DIM = 256\n# Normalization method for the convolution layers.\n# Options: \"\" (no norm), \"GN\", \"SyncBN\".\n_C.MODEL.ROI_BOX_HEAD.NORM = \"\"\n# Whether to use class agnostic for bbox regression\n_C.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG = False\n\n# ---------------------------------------------------------------------------- #\n# Cascaded Box Head\n# ---------------------------------------------------------------------------- #\n_C.MODEL.ROI_BOX_CASCADE_HEAD = CN()\n# The number of cascade stages is implicitly defined by the length of the following two configs.\n_C.MODEL.ROI_BOX_CASCADE_HEAD.BBOX_REG_WEIGHTS = (\n    (10.0, 10.0, 5.0, 5.0),\n    (20.0, 20.0, 10.0, 10.0),\n    (30.0, 30.0, 15.0, 15.0),\n)\n_C.MODEL.ROI_BOX_CASCADE_HEAD.IOUS = (0.5, 0.6, 0.7)\n\n\n# ---------------------------------------------------------------------------- #\n# Mask Head\n# ---------------------------------------------------------------------------- #\n_C.MODEL.ROI_MASK_HEAD = CN()\n_C.MODEL.ROI_MASK_HEAD.NAME = \"MaskRCNNConvUpsampleHead\"\n_C.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION = 14\n_C.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO = 0\n_C.MODEL.ROI_MASK_HEAD.NUM_CONV = 0  # The number of convs in the mask head\n_C.MODEL.ROI_MASK_HEAD.CONV_DIM = 256\n# Normalization method for the convolution layers.\n# Options: \"\" (no norm), \"GN\", \"SyncBN\".\n_C.MODEL.ROI_MASK_HEAD.NORM = \"\"\n# Whether to use class agnostic for mask prediction\n_C.MODEL.ROI_MASK_HEAD.CLS_AGNOSTIC_MASK = False\n# Type of pooling operation applied to the incoming feature map for each RoI\n_C.MODEL.ROI_MASK_HEAD.POOLER_TYPE = \"ROIAlignV2\"\n\n\n# ---------------------------------------------------------------------------- #\n# Keypoint Head\n# ---------------------------------------------------------------------------- #\n_C.MODEL.ROI_KEYPOINT_HEAD = CN()\n_C.MODEL.ROI_KEYPOINT_HEAD.NAME = \"KRCNNConvDeconvUpsampleHead\"\n_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION = 14\n_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO = 0\n_C.MODEL.ROI_KEYPOINT_HEAD.CONV_DIMS = tuple(512 for _ in range(8))\n_C.MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS = 17  # 17 is the number of keypoints in COCO.\n\n# Images with too few (or no) keypoints are excluded from training.\n_C.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE = 1\n# Normalize by the total number of visible keypoints in the minibatch if True.\n# Otherwise, normalize by the total number of keypoints that could ever exist\n# in the minibatch.\n# The keypoint softmax loss is only calculated on visible keypoints.\n# Since the number of visible keypoints can vary significantly between\n# minibatches, this has the effect of up-weighting the importance of\n# minibatches with few visible keypoints. (Imagine the extreme case of\n# only one visible keypoint versus N: in the case of N, each one\n# contributes 1/N to the gradient compared to the single keypoint\n# determining the gradient direction). Instead, we can normalize the\n# loss by the total number of keypoints, if it were the case that all\n# keypoints were visible in a full minibatch. (Returning to the example,\n# this means that the one visible keypoint contributes as much as each\n# of the N keypoints.)\n_C.MODEL.ROI_KEYPOINT_HEAD.NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS = True\n# Multi-task loss weight to use for keypoints\n# Recommended values:\n#   - use 1.0 if NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS is True\n#   - use 4.0 if NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS is False\n_C.MODEL.ROI_KEYPOINT_HEAD.LOSS_WEIGHT = 1.0\n# Type of pooling operation applied to the incoming feature map for each RoI\n_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_TYPE = \"ROIAlignV2\"\n\n# ---------------------------------------------------------------------------- #\n# Semantic Segmentation Head\n# ---------------------------------------------------------------------------- #\n_C.MODEL.SEM_SEG_HEAD = CN()\n_C.MODEL.SEM_SEG_HEAD.NAME = \"SemSegFPNHead\"\n_C.MODEL.SEM_SEG_HEAD.IN_FEATURES = [\"p2\", \"p3\", \"p4\", \"p5\"]\n# Label in the semantic segmentation ground truth that is ignored, i.e., no loss is calculated for\n# the correposnding pixel.\n_C.MODEL.SEM_SEG_HEAD.IGNORE_VALUE = 255\n# Number of classes in the semantic segmentation head\n_C.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 54\n# Number of channels in the 3x3 convs inside semantic-FPN heads.\n_C.MODEL.SEM_SEG_HEAD.CONVS_DIM = 128\n# Outputs from semantic-FPN heads are up-scaled to the COMMON_STRIDE stride.\n_C.MODEL.SEM_SEG_HEAD.COMMON_STRIDE = 4\n# Normalization method for the convolution layers. Options: \"\" (no norm), \"GN\".\n_C.MODEL.SEM_SEG_HEAD.NORM = \"GN\"\n_C.MODEL.SEM_SEG_HEAD.LOSS_WEIGHT = 1.0\n\n_C.MODEL.PANOPTIC_FPN = CN()\n# Scaling of all losses from instance detection / segmentation head.\n_C.MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT = 1.0\n\n# options when combining instance & semantic segmentation outputs\n_C.MODEL.PANOPTIC_FPN.COMBINE = CN({\"ENABLED\": True})\n_C.MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH = 0.5\n_C.MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT = 4096\n_C.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = 0.5\n\n\n# ---------------------------------------------------------------------------- #\n# RetinaNet Head\n# ---------------------------------------------------------------------------- #\n_C.MODEL.RETINANET = CN()\n\n# This is the number of foreground classes.\n_C.MODEL.RETINANET.NUM_CLASSES = 80\n\n_C.MODEL.RETINANET.IN_FEATURES = [\"p3\", \"p4\", \"p5\", \"p6\", \"p7\"]\n\n# Convolutions to use in the cls and bbox tower\n# NOTE: this doesn't include the last conv for logits\n_C.MODEL.RETINANET.NUM_CONVS = 4\n\n# IoU overlap ratio [bg, fg] for labeling anchors.\n# Anchors with < bg are labeled negative (0)\n# Anchors  with >= bg and < fg are ignored (-1)\n# Anchors with >= fg are labeled positive (1)\n_C.MODEL.RETINANET.IOU_THRESHOLDS = [0.4, 0.5]\n_C.MODEL.RETINANET.IOU_LABELS = [0, -1, 1]\n\n# Prior prob for rare case (i.e. foreground) at the beginning of training.\n# This is used to set the bias for the logits layer of the classifier subnet.\n# This improves training stability in the case of heavy class imbalance.\n_C.MODEL.RETINANET.PRIOR_PROB = 0.01\n\n# Inference cls score threshold, only anchors with score > INFERENCE_TH are\n# considered for inference (to improve speed)\n_C.MODEL.RETINANET.SCORE_THRESH_TEST = 0.05\n_C.MODEL.RETINANET.TOPK_CANDIDATES_TEST = 1000\n_C.MODEL.RETINANET.NMS_THRESH_TEST = 0.5\n\n# Weights on (dx, dy, dw, dh) for normalizing Retinanet anchor regression targets\n_C.MODEL.RETINANET.BBOX_REG_WEIGHTS = (1.0, 1.0, 1.0, 1.0)\n\n# Loss parameters\n_C.MODEL.RETINANET.FOCAL_LOSS_GAMMA = 2.0\n_C.MODEL.RETINANET.FOCAL_LOSS_ALPHA = 0.25\n_C.MODEL.RETINANET.SMOOTH_L1_LOSS_BETA = 0.1\n\n\n# ---------------------------------------------------------------------------- #\n# ResNe[X]t options (ResNets = {ResNet, ResNeXt}\n# Note that parts of a resnet may be used for both the backbone and the head\n# These options apply to both\n# ---------------------------------------------------------------------------- #\n_C.MODEL.RESNETS = CN()\n\n_C.MODEL.RESNETS.DEPTH = 50\n_C.MODEL.RESNETS.OUT_FEATURES = [\"res4\"]  # res4 for C4 backbone, res2..5 for FPN backbone\n\n# Number of groups to use; 1 ==> ResNet; > 1 ==> ResNeXt\n_C.MODEL.RESNETS.NUM_GROUPS = 1\n\n# Options: FrozenBN, GN, \"SyncBN\", \"BN\"\n_C.MODEL.RESNETS.NORM = \"FrozenBN\"\n\n# Baseline width of each group.\n# Scaling this parameters will scale the width of all bottleneck layers.\n_C.MODEL.RESNETS.WIDTH_PER_GROUP = 64\n\n# Place the stride 2 conv on the 1x1 filter\n# Use True only for the original MSRA ResNet; use False for C2 and Torch models\n_C.MODEL.RESNETS.STRIDE_IN_1X1 = True\n\n# Apply dilation in stage \"res5\"\n_C.MODEL.RESNETS.RES5_DILATION = 1\n\n# Output width of res2. Scaling this parameters will scale the width of all 1x1 convs in ResNet\n_C.MODEL.RESNETS.RES2_OUT_CHANNELS = 256\n_C.MODEL.RESNETS.STEM_OUT_CHANNELS = 64\n\n# Apply Deformable Convolution in stages\n# Specify if apply deform_conv on Res2, Res3, Res4, Res5\n_C.MODEL.RESNETS.DEFORM_ON_PER_STAGE = [False, False, False, False]\n# Use True to use modulated deform_conv (DeformableV2, https://arxiv.org/abs/1811.11168);\n# Use False for DeformableV1.\n_C.MODEL.RESNETS.DEFORM_MODULATED = False\n# Number of groups in deformable conv.\n_C.MODEL.RESNETS.DEFORM_NUM_GROUPS = 1\n\n\n# ---------------------------------------------------------------------------- #\n# Solver\n# ---------------------------------------------------------------------------- #\n_C.SOLVER = CN()\n\n# See detectron2/solver/build.py for LR scheduler options\n_C.SOLVER.LR_SCHEDULER_NAME = \"WarmupMultiStepLR\"\n\n_C.SOLVER.MAX_ITER = 40000\n\n_C.SOLVER.BASE_LR = 0.001\n\n_C.SOLVER.MOMENTUM = 0.9\n\n_C.SOLVER.WEIGHT_DECAY = 0.0001\n# The weight decay that's applied to parameters of normalization layers\n# (typically the affine transformation)\n_C.SOLVER.WEIGHT_DECAY_NORM = 0.0\n\n_C.SOLVER.GAMMA = 0.1\n_C.SOLVER.STEPS = (30000,)\n\n_C.SOLVER.WARMUP_FACTOR = 1.0 / 1000\n_C.SOLVER.WARMUP_ITERS = 1000\n_C.SOLVER.WARMUP_METHOD = \"linear\"\n\n_C.SOLVER.CHECKPOINT_PERIOD = 5000\n\n# Number of images per batch across all machines.\n# If we have 16 GPUs and IMS_PER_BATCH = 32,\n# each GPU will see 2 images per batch.\n_C.SOLVER.IMS_PER_BATCH = 16\n\n# Detectron v1 (and previous detection code) used a 2x higher LR and 0 WD for\n# biases. This is not useful (at least for recent models). You should avoid\n# changing these and they exist only to reproduce Detectron v1 training if\n# desired.\n_C.SOLVER.BIAS_LR_FACTOR = 1.0\n_C.SOLVER.WEIGHT_DECAY_BIAS = _C.SOLVER.WEIGHT_DECAY\n\n# ---------------------------------------------------------------------------- #\n# Specific test options\n# ---------------------------------------------------------------------------- #\n_C.TEST = CN()\n# For end-to-end tests to verify the expected accuracy.\n# Each item is [task, metric, value, tolerance]\n# e.g.: [['bbox', 'AP', 38.5, 0.2]]\n_C.TEST.EXPECTED_RESULTS = []\n# The period (in terms of steps) to evaluate the model during training.\n# Set to 0 to disable.\n_C.TEST.EVAL_PERIOD = 0\n# The sigmas used to calculate keypoint OKS.\n# When empty it will use the defaults in COCO.\n# Otherwise it should have the same length as ROI_KEYPOINT_HEAD.NUM_KEYPOINTS.\n_C.TEST.KEYPOINT_OKS_SIGMAS = []\n# Maximum number of detections to return per image during inference (100 is\n# based on the limit established for the COCO dataset).\n_C.TEST.DETECTIONS_PER_IMAGE = 100\n\n_C.TEST.AUG = CN({\"ENABLED\": False})\n_C.TEST.AUG.MIN_SIZES = (400, 500, 600, 700, 800, 900, 1000, 1100, 1200)\n_C.TEST.AUG.MAX_SIZE = 4000\n_C.TEST.AUG.FLIP = True\n\n_C.TEST.PRECISE_BN = CN({\"ENABLED\": False})\n_C.TEST.PRECISE_BN.NUM_ITER = 200\n\n# ---------------------------------------------------------------------------- #\n# Misc options\n# ---------------------------------------------------------------------------- #\n# Directory where output files are written\n_C.OUTPUT_DIR = \"./output\"\n# Set seed to negative to fully randomize everything.\n# Set seed to positive to use a fixed seed. Note that a fixed seed does not\n# guarantee fully deterministic behavior.\n_C.SEED = -1\n# Benchmark different cudnn algorithms. It has large overhead for about 10k\n# iterations. It usually hurts total time, but can benefit for certain models.\n_C.CUDNN_BENCHMARK = False\n\n# global config is for quick hack purposes.\n# You can set them in command line or config files,\n# and access it with:\n#\n# from detectron2.config import global_cfg\n# print(global_cfg.HACK)\n#\n# Do not commit any configs into it.\n_C.GLOBAL = CN()\n_C.GLOBAL.HACK = 1.0\n"
  },
  {
    "path": "detectron2/data/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom . import transforms  # isort:skip\n\nfrom .build import (\n    build_detection_test_loader,\n    build_detection_train_loader,\n    get_detection_dataset_dicts,\n    load_proposals_into_dataset,\n    print_instances_class_histogram,\n)\nfrom .catalog import DatasetCatalog, MetadataCatalog\nfrom .common import DatasetFromList, MapDataset\nfrom .dataset_mapper import DatasetMapper\n\n# ensure the builtin datasets are registered\nfrom . import datasets, samplers  # isort:skip\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/data/build.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport bisect\nimport copy\nimport itertools\nimport logging\nimport numpy as np\nimport pickle\nimport torch.utils.data\nfrom fvcore.common.file_io import PathManager\nfrom tabulate import tabulate\nfrom termcolor import colored\n\nfrom detectron2.structures import BoxMode\nfrom detectron2.utils.comm import get_world_size\nfrom detectron2.utils.env import seed_all_rng\nfrom detectron2.utils.logger import log_first_n\n\nfrom . import samplers\nfrom .catalog import DatasetCatalog, MetadataCatalog\nfrom .common import DatasetFromList, MapDataset\nfrom .dataset_mapper import DatasetMapper\nfrom .detection_utils import check_metadata_consistency\n\n\"\"\"\nThis file contains the default logic to build a dataloader for training or testing.\n\"\"\"\n\n__all__ = [\n    \"build_detection_train_loader\",\n    \"build_detection_test_loader\",\n    \"get_detection_dataset_dicts\",\n    \"load_proposals_into_dataset\",\n    \"print_instances_class_histogram\",\n]\n\n\ndef filter_images_with_only_crowd_annotations(dataset_dicts):\n    \"\"\"\n    Filter out images with none annotations or only crowd annotations\n    (i.e., images without non-crowd annotations).\n    A common training-time preprocessing on COCO dataset.\n\n    Args:\n        dataset_dicts (list[dict]): annotations in Detectron2 Dataset format.\n\n    Returns:\n        list[dict]: the same format, but filtered.\n    \"\"\"\n    num_before = len(dataset_dicts)\n\n    def valid(anns):\n        for ann in anns:\n            if ann.get(\"iscrowd\", 0) == 0:\n                return True\n        return False\n\n    dataset_dicts = [x for x in dataset_dicts if valid(x[\"annotations\"])]\n    num_after = len(dataset_dicts)\n    logger = logging.getLogger(__name__)\n    logger.info(\n        \"Removed {} images with no usable annotations. {} images left.\".format(\n            num_before - num_after, num_after\n        )\n    )\n    return dataset_dicts\n\n\ndef filter_images_with_few_keypoints(dataset_dicts, min_keypoints_per_image):\n    \"\"\"\n    Filter out images with too few number of keypoints.\n\n    Args:\n        dataset_dicts (list[dict]): annotations in Detectron2 Dataset format.\n\n    Returns:\n        list[dict]: the same format as dataset_dicts, but filtered.\n    \"\"\"\n    num_before = len(dataset_dicts)\n\n    def visible_keypoints_in_image(dic):\n        # Each keypoints field has the format [x1, y1, v1, ...], where v is visibility\n        annotations = dic[\"association_anno\"]\n        return sum(\n            (np.array(ann[\"keypoints\"][2::3]) > 0).sum()\n            for ann in annotations\n            if \"keypoints\" in ann\n        )\n\n    dataset_dicts = [\n        x for x in dataset_dicts if visible_keypoints_in_image(x) >= min_keypoints_per_image\n    ]\n    num_after = len(dataset_dicts)\n    logger = logging.getLogger(__name__)\n    logger.info(\n        \"Removed {} images with fewer than {} keypoints.\".format(\n            num_before - num_after, min_keypoints_per_image\n        )\n    )\n    return dataset_dicts\n\n\ndef load_proposals_into_dataset(dataset_dicts, proposal_file):\n    \"\"\"\n    Load precomputed object proposals into the dataset.\n\n    Args:\n        dataset_dicts (list[dict]): annotations in Detectron2 Dataset format.\n        proposal_file (str): file path of pre-computed proposals, in pkl format.\n\n    Returns:\n        list[dict]: the same format as dataset_dicts, but added proposal field.\n    \"\"\"\n    logger = logging.getLogger(__name__)\n    logger.info(\"Loading proposals from: {}\".format(proposal_file))\n\n    with PathManager.open(proposal_file, \"rb\") as f:\n        proposals = pickle.load(f, encoding=\"latin1\")\n\n    # Rename the key names in D1 proposal files\n    rename_keys = {\"indexes\": \"ids\", \"scores\": \"objectness_logits\"}\n    for key in rename_keys:\n        if key in proposals:\n            proposals[rename_keys[key]] = proposals.pop(key)\n\n    # Remove proposals whose ids are not in dataset\n    img_ids = set({entry[\"image_id\"] for entry in dataset_dicts})\n    keep = [i for i, id in enumerate(proposals[\"ids\"]) if id in img_ids]\n    # Sort proposals by ids following the image order in dataset\n    keep = sorted(keep)\n    for key in [\"boxes\", \"ids\", \"objectness_logits\"]:\n        proposals[key] = [proposals[key][i] for i in keep]\n    # Assuming default bbox_mode of precomputed proposals are 'XYXY_ABS'\n    bbox_mode = BoxMode(proposals[\"bbox_mode\"]) if \"bbox_mode\" in proposals else BoxMode.XYXY_ABS\n\n    for i, record in enumerate(dataset_dicts):\n        # Sanity check that these proposals are for the correct image id\n        assert record[\"image_id\"] == proposals[\"ids\"][i]\n\n        boxes = proposals[\"boxes\"][i]\n        objectness_logits = proposals[\"objectness_logits\"][i]\n        # Sort the proposals in descending order of the scores\n        inds = objectness_logits.argsort()[::-1]\n        record[\"proposal_boxes\"] = boxes[inds]\n        record[\"proposal_objectness_logits\"] = objectness_logits[inds]\n        record[\"proposal_bbox_mode\"] = bbox_mode\n        record[\"proposal_light\"] = proposals['light'][i][inds]\n\n    return dataset_dicts\n\n\ndef _quantize(x, bin_edges):\n    bin_edges = copy.copy(bin_edges)\n    bin_edges = sorted(bin_edges)\n    quantized = list(map(lambda y: bisect.bisect_right(bin_edges, y), x))\n    return quantized\n\n\ndef print_instances_class_histogram(dataset_dicts, class_names):\n    \"\"\"\n    Args:\n        dataset_dicts (list[dict]): list of dataset dicts.\n        class_names (list[str]): list of class names (zero-indexed).\n    \"\"\"\n    num_classes = len(class_names)\n    hist_bins = np.arange(num_classes + 1)\n    histogram = np.zeros((num_classes,), dtype=np.int)\n    for entry in dataset_dicts:\n        annos = entry[\"annotations\"]\n        classes = [x[\"category_id\"] for x in annos if not x.get(\"iscrowd\", 0)]\n        histogram += np.histogram(classes, bins=hist_bins)[0]\n\n    N_COLS = min(6, len(class_names) * 2)\n\n    def short_name(x):\n        # make long class names shorter. useful for lvis\n        if len(x) > 13:\n            return x[:11] + \"..\"\n        return x\n\n    data = list(\n        itertools.chain(*[[short_name(class_names[i]), int(v)] for i, v in enumerate(histogram)])\n    )\n    total_num_instances = sum(data[1::2])\n    data.extend([None] * (N_COLS - (len(data) % N_COLS)))\n    if num_classes > 1:\n        data.extend([\"total\", total_num_instances])\n    data = itertools.zip_longest(*[data[i::N_COLS] for i in range(N_COLS)])\n    table = tabulate(\n        data,\n        headers=[\"category\", \"#instances\"] * (N_COLS // 2),\n        tablefmt=\"pipe\",\n        numalign=\"left\",\n        stralign=\"center\",\n    )\n    log_first_n(\n        logging.INFO,\n        \"Distribution of training instances among all {} categories:\\n\".format(num_classes)\n        + colored(table, \"cyan\"),\n        key=\"message\",\n    )\n\n\ndef build_batch_data_sampler(\n    sampler, images_per_batch, group_bin_edges=None, grouping_features=None\n):\n    \"\"\"\n    Return a dataset index sampler that batches dataset indices possibly with\n    grouping to improve training efficiency.\n\n    Args:\n        sampler (torch.utils.data.sampler.Sampler): any subclass of\n            :class:`torch.utils.data.sampler.Sampler`.\n        images_per_batch (int): the batch size. Note that the sampler may return\n            batches that have between 1 and images_per_batch (inclusive) elements\n            because the underlying index set (and grouping partitions, if grouping\n            is used) may not be divisible by images_per_batch.\n        group_bin_edges (None, list[number], tuple[number]): If None, then grouping\n            is disabled. If a list or tuple is given, the values are used as bin\n            edges for defining len(group_bin_edges) + 1 groups. When batches are\n            sampled, only elements from the same group are returned together.\n        grouping_features (None, list[number], tuple[number]): If None, then grouping\n            is disabled. If a list or tuple is given, it must specify for each index\n            in the underlying dataset the value to be used for placing that dataset\n            index into one of the grouping bins.\n\n    Returns:\n        A BatchSampler or subclass of BatchSampler.\n    \"\"\"\n    if group_bin_edges and grouping_features:\n        assert isinstance(group_bin_edges, (list, tuple))\n        assert isinstance(grouping_features, (list, tuple))\n        group_ids = _quantize(grouping_features, group_bin_edges)\n        batch_sampler = samplers.GroupedBatchSampler(sampler, group_ids, images_per_batch)\n    else:\n        batch_sampler = torch.utils.data.sampler.BatchSampler(\n            sampler, images_per_batch, drop_last=True\n        )  # drop last so the batch always have the same size\n        # NOTE when we add batch inference support, make sure not to use this.\n    return batch_sampler\n\n\ndef get_detection_dataset_dicts(\n    dataset_names, filter_empty=True, min_keypoints=0, proposal_files=None\n):\n    \"\"\"\n    Load and prepare dataset dicts for instance detection/segmentation and semantic segmentation.\n\n    Args:\n        dataset_names (list[str]): a list of dataset names\n        filter_empty (bool): whether to filter out images without instance annotations\n        min_keypoints (int): filter out images with fewer keypoints than\n            `min_keypoints`. Set to 0 to do nothing.\n        proposal_files (list[str]): if given, a list of object proposal files\n            that match each dataset in `dataset_names`.\n    \"\"\"\n    assert len(dataset_names)\n    dataset_dicts = [DatasetCatalog.get(dataset_name) for dataset_name in dataset_names]\n\n    if proposal_files is not None:\n        assert len(dataset_names) == len(proposal_files)\n        # load precomputed proposals from proposal files\n        dataset_dicts = [\n            load_proposals_into_dataset(dataset_i_dicts, proposal_file)\n            for dataset_i_dicts, proposal_file in zip(dataset_dicts, proposal_files)\n        ]\n\n    dataset_dicts = list(itertools.chain.from_iterable(dataset_dicts))\n\n    has_instances = \"annotations\" in dataset_dicts[0]\n    # Keep images without instance-level GT if the dataset has semantic labels.\n    if filter_empty and has_instances and \"sem_seg_file_name\" not in dataset_dicts[0]:\n        dataset_dicts = filter_images_with_only_crowd_annotations(dataset_dicts)\n\n    if min_keypoints > 0 and has_instances:\n        dataset_dicts = filter_images_with_few_keypoints(dataset_dicts, min_keypoints)\n\n    if has_instances:\n        try:\n            class_names = MetadataCatalog.get(dataset_names[0]).thing_classes\n            check_metadata_consistency(\"thing_classes\", dataset_names)\n            print_instances_class_histogram(dataset_dicts, class_names)\n        except AttributeError:  # class names are not available for this dataset\n            pass\n    return dataset_dicts\n\n\ndef build_detection_train_loader(cfg, mapper=None):\n    \"\"\"\n    A data loader is created by the following steps:\n\n    1. Use the dataset names in config to query :class:`DatasetCatalog`, and obtain a list of dicts.\n    2. Start workers to work on the dicts. Each worker will:\n      * Map each metadata dict into another format to be consumed by the model.\n      * Batch them by simply putting dicts into a list.\n    The batched ``list[mapped_dict]`` is what this dataloader will return.\n\n    Args:\n        cfg (CfgNode): the config\n        mapper (callable): a callable which takes a sample (dict) from dataset and\n            returns the format to be consumed by the model.\n            By default it will be `DatasetMapper(cfg, True)`.\n\n    Returns:\n        a torch DataLoader object\n    \"\"\"\n    num_workers = get_world_size()\n    images_per_batch = cfg.SOLVER.IMS_PER_BATCH\n    assert (\n        images_per_batch % num_workers == 0\n    ), \"SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of workers ({}).\".format(\n        images_per_batch, num_workers\n    )\n    assert (\n        images_per_batch >= num_workers\n    ), \"SOLVER.IMS_PER_BATCH ({}) must be larger than the number of workers ({}).\".format(\n        images_per_batch, num_workers\n    )\n    images_per_worker = images_per_batch // num_workers\n\n    dataset_dicts = get_detection_dataset_dicts(\n        cfg.DATASETS.TRAIN,\n        filter_empty=True,\n        min_keypoints=cfg.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE\n        if cfg.MODEL.KEYPOINT_ON\n        else 0,\n        proposal_files=cfg.DATASETS.PROPOSAL_FILES_TRAIN if cfg.MODEL.LOAD_PROPOSALS else None,\n    )\n    # print(dataset_dicts[0])\n    # assert 1 ==0\n    dataset = DatasetFromList(dataset_dicts, copy=False)\n\n    # Bin edges for batching images with similar aspect ratios. If ASPECT_RATIO_GROUPING\n    # is enabled, we define two bins with an edge at height / width = 1.\n    group_bin_edges = [1] if cfg.DATALOADER.ASPECT_RATIO_GROUPING else []\n    aspect_ratios = [float(img[\"height\"]) / float(img[\"width\"]) for img in dataset]\n\n    if mapper is None:\n        mapper = DatasetMapper(cfg, True)\n    dataset = MapDataset(dataset, mapper)\n\n    sampler_name = cfg.DATALOADER.SAMPLER_TRAIN\n    logger = logging.getLogger(__name__)\n    logger.info(\"Using training sampler {}\".format(sampler_name))\n    if sampler_name == \"TrainingSampler\":\n        sampler = samplers.TrainingSampler(len(dataset))\n    elif sampler_name == \"RepeatFactorTrainingSampler\":\n        sampler = samplers.RepeatFactorTrainingSampler(\n            dataset_dicts, cfg.DATALOADER.REPEAT_THRESHOLD\n        )\n    else:\n        raise ValueError(\"Unknown training sampler: {}\".format(sampler_name))\n    batch_sampler = build_batch_data_sampler(\n        sampler, images_per_worker, group_bin_edges, aspect_ratios\n    )\n\n    data_loader = torch.utils.data.DataLoader(\n        dataset,\n        num_workers=cfg.DATALOADER.NUM_WORKERS,\n        batch_sampler=batch_sampler,\n        collate_fn=trivial_batch_collator,\n        worker_init_fn=worker_init_reset_seed,\n    )\n    return data_loader\n\n\ndef build_detection_test_loader(cfg, dataset_name, mapper=None):\n    \"\"\"\n    Similar to `build_detection_train_loader`.\n    But this function uses the given `dataset_name` argument (instead of the names in cfg),\n    and uses batch size 1.\n\n    Args:\n        cfg: a detectron2 CfgNode\n        dataset_name (str): a name of the dataset that's available in the DatasetCatalog\n        mapper (callable): a callable which takes a sample (dict) from dataset\n           and returns the format to be consumed by the model.\n           By default it will be `DatasetMapper(cfg, False)`.\n\n    Returns:\n        DataLoader: a torch DataLoader, that loads the given detection\n        dataset, with test-time transformation and batching.\n    \"\"\"\n    dataset_dicts = get_detection_dataset_dicts(\n        [dataset_name],\n        filter_empty=False,\n        proposal_files=[\n            cfg.DATASETS.PROPOSAL_FILES_TEST[list(cfg.DATASETS.TEST).index(dataset_name)]\n        ]\n        if cfg.MODEL.LOAD_PROPOSALS\n        else None,\n    )\n\n    dataset = DatasetFromList(dataset_dicts)\n    if mapper is None:\n        mapper = DatasetMapper(cfg, False)\n    dataset = MapDataset(dataset, mapper)\n\n    sampler = samplers.InferenceSampler(len(dataset))\n    # Always use 1 image per worker during inference since this is the\n    # standard when reporting inference time in papers.\n    batch_sampler = torch.utils.data.sampler.BatchSampler(sampler, 1, drop_last=False)\n\n    data_loader = torch.utils.data.DataLoader(\n        dataset,\n        num_workers=cfg.DATALOADER.NUM_WORKERS,\n        batch_sampler=batch_sampler,\n        collate_fn=trivial_batch_collator,\n    )\n    return data_loader\n\n\ndef trivial_batch_collator(batch):\n    \"\"\"\n    A batch collator that does nothing.\n    \"\"\"\n    return batch\n\n\ndef worker_init_reset_seed(worker_id):\n    seed_all_rng(np.random.randint(2 ** 31) + worker_id)\n"
  },
  {
    "path": "detectron2/data/catalog.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport logging\nimport types\nfrom typing import List\n\nfrom detectron2.utils.logger import log_first_n\n\n__all__ = [\"DatasetCatalog\", \"MetadataCatalog\"]\n\n\nclass DatasetCatalog(object):\n    \"\"\"\n    A catalog that stores information about the datasets and how to obtain them.\n\n    It contains a mapping from strings\n    (which are names that identify a dataset, e.g. \"coco_2014_train\")\n    to a function which parses the dataset and returns the samples in the\n    format of `list[dict]`.\n\n    The returned dicts should be in Detectron2 Dataset format (See DATASETS.md for details)\n    if used with the data loader functionalities in `data/build.py,data/detection_transform.py`.\n\n    The purpose of having this catalog is to make it easy to choose\n    different datasets, by just using the strings in the config.\n    \"\"\"\n\n    _REGISTERED = {}\n\n    @staticmethod\n    def register(name, func):\n        \"\"\"\n        Args:\n            name (str): the name that identifies a dataset, e.g. \"coco_2014_train\".\n            func (callable): a callable which takes no arguments and returns a list of dicts.\n        \"\"\"\n        DatasetCatalog._REGISTERED[name] = func\n\n    @staticmethod\n    def get(name):\n        \"\"\"\n        Call the registered function and return its results.\n\n        Args:\n            name (str): the name that identifies a dataset, e.g. \"coco_2014_train\".\n\n        Returns:\n            list[dict]: dataset annotations.0\n        \"\"\"\n        try:\n            f = DatasetCatalog._REGISTERED[name]\n        except KeyError:\n            raise KeyError(\n                \"Dataset '{}' is not registered! Available datasets are: {}\".format(\n                    name, \", \".join(DatasetCatalog._REGISTERED.keys())\n                )\n            )\n        return f()\n\n    @staticmethod\n    def list() -> List[str]:\n        \"\"\"\n        List all registered datasets.\n\n        Returns:\n            list[str]\n        \"\"\"\n        return list(DatasetCatalog._REGISTERED.keys())\n\n    @staticmethod\n    def clear():\n        \"\"\"\n        Remove all registered dataset.\n        \"\"\"\n        DatasetCatalog._REGISTERED.clear()\n\n\nclass Metadata(types.SimpleNamespace):\n    \"\"\"\n    A class that supports simple attribute setter/getter.\n    It is intended for storing metadata of a dataset and make it accessible globally.\n\n    Examples:\n\n    .. code-block:: python\n\n        # somewhere when you load the data:\n        MetadataCatalog.get(\"mydataset\").thing_classes = [\"person\", \"dog\"]\n\n        # somewhere when you print statistics or visualize:\n        classes = MetadataCatalog.get(\"mydataset\").thing_classes\n    \"\"\"\n\n    # the name of the dataset\n    # set default to N/A so that `self.name` in the errors will not trigger getattr again\n    name: str = \"N/A\"\n\n    _RENAMED = {\n        \"class_names\": \"thing_classes\",\n        \"dataset_id_to_contiguous_id\": \"thing_dataset_id_to_contiguous_id\",\n        \"stuff_class_names\": \"stuff_classes\",\n    }\n\n    def __getattr__(self, key):\n        if key in self._RENAMED:\n            log_first_n(\n                logging.WARNING,\n                \"Metadata '{}' was renamed to '{}'!\".format(key, self._RENAMED[key]),\n                n=10,\n            )\n            return getattr(self, self._RENAMED[key])\n\n        raise AttributeError(\n            \"Attribute '{}' does not exist in the metadata of '{}'. Available keys are {}.\".format(\n                key, self.name, str(self.__dict__.keys())\n            )\n        )\n\n    def __setattr__(self, key, val):\n        if key in self._RENAMED:\n            log_first_n(\n                logging.WARNING,\n                \"Metadata '{}' was renamed to '{}'!\".format(key, self._RENAMED[key]),\n                n=10,\n            )\n            setattr(self, self._RENAMED[key], val)\n\n        # Ensure that metadata of the same name stays consistent\n        try:\n            oldval = getattr(self, key)\n            assert oldval == val, (\n                \"Attribute '{}' in the metadata of '{}' cannot be set \"\n                \"to a different value!\\n{} != {}\".format(key, self.name, oldval, val)\n            )\n        except AttributeError:\n            super().__setattr__(key, val)\n\n    def as_dict(self):\n        \"\"\"\n        Returns all the metadata as a dict.\n        Note that modifications to the returned dict will not reflect on the Metadata object.\n        \"\"\"\n        return copy.copy(self.__dict__)\n\n    def set(self, **kwargs):\n        \"\"\"\n        Set multiple metadata with kwargs.\n        \"\"\"\n        for k, v in kwargs.items():\n            setattr(self, k, v)\n        return self\n\n    def get(self, key, default=None):\n        \"\"\"\n        Access an attribute and return its value if exists.\n        Otherwise return default.\n        \"\"\"\n        try:\n            return getattr(self, key)\n        except AttributeError:\n            return default\n\n\nclass MetadataCatalog:\n    \"\"\"\n    MetadataCatalog provides access to \"Metadata\" of a given dataset.\n\n    The metadata associated with a certain name is a singleton: once created,\n    the metadata will stay alive and will be returned by future calls to `get(name)`.\n\n    It's like global variables, so don't abuse it.\n    It's meant for storing knowledge that's constant and shared across the execution\n    of the program, e.g.: the class names in COCO.\n    \"\"\"\n\n    _NAME_TO_META = {}\n\n    @staticmethod\n    def get(name):\n        \"\"\"\n        Args:\n            name (str): name of a dataset (e.g. coco_2014_train).\n\n        Returns:\n            Metadata: The :class:`Metadata` instance associated with this name,\n            or create an empty one if none is available.\n        \"\"\"\n        assert len(name)\n        if name in MetadataCatalog._NAME_TO_META:\n            ret = MetadataCatalog._NAME_TO_META[name]\n            # TODO this is for the BC breaking change in D15247032.\n            # Remove this in the future.\n            if hasattr(ret, \"dataset_name\"):\n                logger = logging.getLogger()\n                logger.warning(\n                    \"\"\"\nThe 'dataset_name' key in metadata is no longer used for\nsharing metadata among splits after D15247032! Add\nmetadata to each split (now called dataset) separately!\n                    \"\"\"\n                )\n                parent_meta = MetadataCatalog.get(ret.dataset_name).as_dict()\n                ret.set(**parent_meta)\n            return ret\n        else:\n            m = MetadataCatalog._NAME_TO_META[name] = Metadata(name=name)\n            return m\n"
  },
  {
    "path": "detectron2/data/common.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport logging\nimport random\nimport torch.utils.data as data\n\nfrom detectron2.utils.serialize import PicklableWrapper\n\n__all__ = [\"MapDataset\", \"DatasetFromList\"]\n\n\nclass MapDataset(data.Dataset):\n    \"\"\"\n    Map a function over the elements in a dataset.\n\n    Args:\n        dataset: a dataset where map function is applied.\n        map_func: a callable which maps the element in dataset. map_func is\n            responsible for error handling, when error happens, it needs to\n            return None so the MapDataset will randomly use other\n            elements from the dataset.\n    \"\"\"\n\n    def __init__(self, dataset, map_func):\n        self._dataset = dataset\n        self._map_func = PicklableWrapper(map_func)  # wrap so that a lambda will work\n\n        self._rng = random.Random(42)\n        self._fallback_candidates = set(range(len(dataset)))\n\n    def __len__(self):\n        return len(self._dataset)\n\n    def __getitem__(self, idx):\n        retry_count = 0\n        cur_idx = int(idx)\n\n        while True:\n            data = self._map_func(self._dataset[cur_idx])\n            if data is not None:\n                self._fallback_candidates.add(cur_idx)\n                return data\n\n            # _map_func fails for this idx, use a random new index from the pool\n            retry_count += 1\n            self._fallback_candidates.discard(cur_idx)\n            cur_idx = self._rng.sample(self._fallback_candidates, k=1)[0]\n\n            if retry_count >= 3:\n                logger = logging.getLogger(__name__)\n                logger.warning(\n                    \"Failed to apply `_map_func` for idx: {}, retry count: {}\".format(\n                        idx, retry_count\n                    )\n                )\n\n\nclass DatasetFromList(data.Dataset):\n    \"\"\"\n    Wrap a list to a torch Dataset. It produces elements of the list as data.\n    \"\"\"\n\n    def __init__(self, lst: list, copy: bool = True):\n        \"\"\"\n        Args:\n            lst (list): a list which contains elements to produce.\n            copy (bool): whether to deepcopy the element when producing it,\n                so that the result can be modified in place without affecting the\n                source in the list.\n        \"\"\"\n        self._lst = lst\n        self._copy = copy\n\n    def __len__(self):\n        return len(self._lst)\n\n    def __getitem__(self, idx):\n        if self._copy:\n            return copy.deepcopy(self._lst[idx])\n        else:\n            return self._lst[idx]\n"
  },
  {
    "path": "detectron2/data/dataset_mapper.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport numpy as np\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom PIL import Image\n\nfrom . import detection_utils as utils\nfrom . import transforms as T\n\n\"\"\"\nThis file contains the default mapping that's applied to \"dataset dicts\".\n\"\"\"\n\n__all__ = [\"DatasetMapper\"]\n\n\nclass DatasetMapper:\n    \"\"\"\n    A callable which takes a dataset dict in Detectron2 Dataset format,\n    and map it into a format used by the model.\n\n    This is the default callable to be used to map your dataset dict into training data.\n    You may need to follow it to implement your own one for customized logic.\n\n    The callable currently does the following:\n    1. Read the image from \"file_name\"\n    2. Applies cropping/geometric transforms to the image and annotations\n    3. Prepare data and annotations to Tensor and :class:`Instances`\n    \"\"\"\n\n    def __init__(self, cfg, is_train=True):\n        self.tfm_gens = utils.build_transform_gen(cfg, is_train)\n\n        if cfg.INPUT.CROP.ENABLED and is_train:\n            self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)\n        else:\n            self.crop_gen = None\n\n        # fmt: off\n        self.img_format     = cfg.INPUT.FORMAT\n        self.mask_on        = cfg.MODEL.MASK_ON\n        self.mask_format    = cfg.INPUT.MASK_FORMAT\n        self.keypoint_on    = cfg.MODEL.KEYPOINT_ON\n        self.load_proposals = cfg.MODEL.LOAD_PROPOSALS\n        # fmt: on\n        if self.keypoint_on and is_train:\n            # Flip only makes sense in training\n            self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN)\n        else:\n            self.keypoint_hflip_indices = None\n\n        if self.load_proposals:\n            self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZE\n            self.proposal_topk = (\n                cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN\n                if is_train\n                else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST\n            )\n        self.is_train = is_train\n\n    def __call__(self, dataset_dict):\n        \"\"\"\n        Args:\n            dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.\n\n        Returns:\n            dict: a format that builtin models in detectron2 accept\n        \"\"\"\n        dataset_dict = copy.deepcopy(dataset_dict)  # it will be modified by code below\n        # USER: Write your own image loading if it's not from a file\n        image = utils.read_image(dataset_dict[\"file_name\"], format=self.img_format)\n        utils.check_image_size(dataset_dict, image)\n        \n        if \"annotations\" not in dataset_dict:\n            image, transforms = T.apply_transform_gens(\n                ([self.crop_gen] if self.crop_gen else []) + self.tfm_gens, image\n            )\n        else:\n            # Crop around an instance if there are instances in the image.\n            # USER: Remove if you don't use cropping\n            if self.crop_gen:\n                crop_tfm = utils.gen_crop_transform_with_instance(\n                    self.crop_gen.get_crop_size(image.shape[:2]),\n                    image.shape[:2],\n                    np.random.choice(dataset_dict[\"annotations\"]),\n                )\n                image = crop_tfm.apply_image(image)\n            image, transforms = T.apply_transform_gens(self.tfm_gens, image)\n            if self.crop_gen:\n                transforms = crop_tfm + transforms\n\n        image_shape = image.shape[:2]  # h, w\n\n        # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,\n        # but not efficient on large generic data structures due to the use of pickle & mp.Queue.\n        # Therefore it's important to use torch.Tensor.\n        dataset_dict[\"image\"] = torch.as_tensor(image.transpose(2, 0, 1).astype(\"float32\"))\n        # Can use uint8 if it turns out to be slow some day\n\n        # USER: Remove if you don't use pre-computed proposals.\n        if self.load_proposals:\n            utils.transform_proposals(\n                dataset_dict, image_shape, transforms, self.min_box_side_len, self.proposal_topk\n            )\n\n        if not self.is_train:\n            dataset_dict.pop(\"annotations\", None)\n            dataset_dict.pop(\"sem_seg_file_name\", None)\n            return dataset_dict\n\n        if \"annotations\" in dataset_dict:\n            # USER: Modify this if you want to keep them for some reason.\n            for anno in dataset_dict[\"annotations\"]:\n                if not self.mask_on:\n                    anno.pop(\"segmentation\", None)\n                if not self.keypoint_on:\n                    anno.pop(\"keypoints\", None)\n\n            # USER: Implement additional transformations if you have other types of data\n            annos = [\n                utils.transform_instance_annotations(\n                    obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indices\n                )\n                for obj in dataset_dict.pop(\"annotations\")\n                if obj.get(\"iscrowd\", 0) == 0\n            ]\n            rela_annos = [\n                utils.transform_instance_annotations(\n                    obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indices\n                )\n                for obj in dataset_dict.pop(\"association_anno\")\n                if obj.get(\"iscrowd\", 0) == 0\n            ]\n            instances = utils.annotations_to_instances(\n                annos, image_shape, mask_format=\"bitmask\"\n            )\n\n            associations = utils.annotations_to_instances(\n                rela_annos, image_shape, mask_format=\"bitmask\"\n            )\n\n            # Create a tight bounding box from masks, useful when image is cropped\n            if self.crop_gen and instances.has(\"gt_masks\"):\n                instances.gt_boxes = instances.gt_masks.get_bounding_boxes()\n            dataset_dict[\"instances\"] = utils.filter_empty_instances(instances)\n            dataset_dict[\"associations\"] = utils.filter_empty_instances(associations)\n        # USER: Remove if you don't do semantic/panoptic segmentation.\n        if \"sem_seg_file_name\" in dataset_dict:\n            with PathManager.open(dataset_dict.pop(\"sem_seg_file_name\"), \"rb\") as f:\n                sem_seg_gt = Image.open(f)\n                sem_seg_gt = np.asarray(sem_seg_gt, dtype=\"uint8\")\n            sem_seg_gt = transforms.apply_segmentation(sem_seg_gt)\n            sem_seg_gt = torch.as_tensor(sem_seg_gt.astype(\"long\"))\n            dataset_dict[\"sem_seg\"] = sem_seg_gt\n        return dataset_dict\n"
  },
  {
    "path": "detectron2/data/dataset_mapper.py.bak",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport numpy as np\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom PIL import Image\n\nfrom . import detection_utils as utils\nfrom . import transforms as T\n\n\"\"\"\nThis file contains the default mapping that's applied to \"dataset dicts\".\n\"\"\"\n\n__all__ = [\"DatasetMapper\"]\n\n\nclass DatasetMapper:\n    \"\"\"\n    A callable which takes a dataset dict in Detectron2 Dataset format,\n    and map it into a format used by the model.\n\n    This is the default callable to be used to map your dataset dict into training data.\n    You may need to follow it to implement your own one for customized logic.\n\n    The callable currently does the following:\n    1. Read the image from \"file_name\"\n    2. Applies cropping/geometric transforms to the image and annotations\n    3. Prepare data and annotations to Tensor and :class:`Instances`\n    \"\"\"\n\n    def __init__(self, cfg, is_train=True):\n        self.tfm_gens = utils.build_transform_gen(cfg, is_train)\n\n        if cfg.INPUT.CROP.ENABLED and is_train:\n            self.crop_gen = T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)\n        else:\n            self.crop_gen = None\n\n        # fmt: off\n        self.img_format     = cfg.INPUT.FORMAT\n        self.mask_on        = cfg.MODEL.MASK_ON\n        self.mask_format    = cfg.INPUT.MASK_FORMAT\n        self.keypoint_on    = cfg.MODEL.KEYPOINT_ON\n        self.load_proposals = cfg.MODEL.LOAD_PROPOSALS\n        # fmt: on\n        if self.keypoint_on and is_train:\n            # Flip only makes sense in training\n            self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN)\n        else:\n            self.keypoint_hflip_indices = None\n\n        if self.load_proposals:\n            self.min_box_side_len = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZE\n            self.proposal_topk = (\n                cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN\n                if is_train\n                else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST\n            )\n        self.is_train = is_train\n\n    def __call__(self, dataset_dict):\n        \"\"\"\n        Args:\n            dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.\n\n        Returns:\n            dict: a format that builtin models in detectron2 accept\n        \"\"\"\n        dataset_dict = copy.deepcopy(dataset_dict)  # it will be modified by code below\n        # USER: Write your own image loading if it's not from a file\n        image = utils.read_image(dataset_dict[\"file_name\"], format=self.img_format)\n        utils.check_image_size(dataset_dict, image)\n\n        if \"annotations\" not in dataset_dict:\n            image, transforms = T.apply_transform_gens(\n                ([self.crop_gen] if self.crop_gen else []) + self.tfm_gens, image\n            )\n        else:\n            # Crop around an instance if there are instances in the image.\n            # USER: Remove if you don't use cropping\n            if self.crop_gen:\n                crop_tfm = utils.gen_crop_transform_with_instance(\n                    self.crop_gen.get_crop_size(image.shape[:2]),\n                    image.shape[:2],\n                    np.random.choice(dataset_dict[\"annotations\"]),\n                )\n                image = crop_tfm.apply_image(image)\n            image, transforms = T.apply_transform_gens(self.tfm_gens, image)\n            if self.crop_gen:\n                transforms = crop_tfm + transforms\n\n        image_shape = image.shape[:2]  # h, w\n\n        # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory,\n        # but not efficient on large generic data structures due to the use of pickle & mp.Queue.\n        # Therefore it's important to use torch.Tensor.\n        dataset_dict[\"image\"] = torch.as_tensor(image.transpose(2, 0, 1).astype(\"float32\"))\n        # Can use uint8 if it turns out to be slow some day\n\n        # USER: Remove if you don't use pre-computed proposals.\n        if self.load_proposals:\n            utils.transform_proposals(\n                dataset_dict, image_shape, transforms, self.min_box_side_len, self.proposal_topk\n            )\n\n        if not self.is_train:\n            dataset_dict.pop(\"annotations\", None)\n            dataset_dict.pop(\"sem_seg_file_name\", None)\n            return dataset_dict\n\n        if \"annotations\" in dataset_dict:\n            # USER: Modify this if you want to keep them for some reason.\n            for anno in dataset_dict[\"annotations\"]:\n                if not self.mask_on:\n                    anno.pop(\"segmentation\", None)\n                if not self.keypoint_on:\n                    anno.pop(\"keypoints\", None)\n\n            # USER: Implement additional transformations if you have other types of data\n            annos = [\n                utils.transform_instance_annotations(\n                    obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indices\n                )\n                for obj in dataset_dict.pop(\"annotations\")\n                if obj.get(\"iscrowd\", 0) == 0\n            ]\n            instances = utils.annotations_to_instances(\n                annos, image_shape, mask_format=self.mask_format\n            )\n            # Create a tight bounding box from masks, useful when image is cropped\n            if self.crop_gen and instances.has(\"gt_masks\"):\n                instances.gt_boxes = instances.gt_masks.get_bounding_boxes()\n            dataset_dict[\"instances\"] = utils.filter_empty_instances(instances)\n\n        # USER: Remove if you don't do semantic/panoptic segmentation.\n        if \"sem_seg_file_name\" in dataset_dict:\n            with PathManager.open(dataset_dict.pop(\"sem_seg_file_name\"), \"rb\") as f:\n                sem_seg_gt = Image.open(f)\n                sem_seg_gt = np.asarray(sem_seg_gt, dtype=\"uint8\")\n            sem_seg_gt = transforms.apply_segmentation(sem_seg_gt)\n            sem_seg_gt = torch.as_tensor(sem_seg_gt.astype(\"long\"))\n            dataset_dict[\"sem_seg\"] = sem_seg_gt\n        return dataset_dict\n"
  },
  {
    "path": "detectron2/data/datasets/README.md",
    "content": "\n\n### Common Datasets\n\nThe dataset implemented here do not need to load the data into the final format.\nIt should provide the minimal data structure needed to use the dataset, so it can be very efficient.\n\nFor example, for an image dataset, just provide the file names and labels, but don't read the images.\nLet the downstream decide how to read.\n"
  },
  {
    "path": "detectron2/data/datasets/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .cityscapes import load_cityscapes_instances\nfrom .soba import load_soba_json, load_sem_seg\nfrom .lvis import load_lvis_json, register_lvis_instances, get_lvis_instances_meta\nfrom .register_soba import register_soba_instances\nfrom . import builtin  # ensure the builtin datasets are registered\n\n\n__all__ = [k for k in globals().keys() if \"builtin\" not in k and not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/data/datasets/builtin.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n\n\"\"\"\nThis file registers pre-defined datasets at hard-coded paths, and their metadata.\n\nWe hard-code metadata for common datasets. This will enable:\n1. Consistency check when loading the datasets\n2. Use models on these standard datasets directly and run demos,\n   without having to download the dataset annotations\n\nWe hard-code some paths to the dataset that's assumed to\nexist in \"./datasets/\".\n\nUsers SHOULD NOT use this file to create new dataset / metadata for new dataset.\nTo add new dataset, refer to the tutorial \"docs/DATASETS.md\".\n\"\"\"\n\nimport os\n\nfrom detectron2.data import MetadataCatalog, DatasetCatalog\nfrom .register_coco import register_coco_instances, register_coco_panoptic_separated\nfrom .lvis import register_lvis_instances, get_lvis_instances_meta\nfrom .cityscapes import load_cityscapes_instances, load_cityscapes_semantic\nfrom .pascal_voc import register_pascal_voc\nfrom .builtin_meta import _get_builtin_metadata\n\n\n# ==== Predefined datasets and splits for COCO ==========\n\n_PREDEFINED_SPLITS_COCO = {}\n_PREDEFINED_SPLITS_COCO[\"coco\"] = {\n    \"coco_2014_train\": (\"coco/train2014\", \"coco/annotations/instances_train2014.json\"),\n    \"coco_2014_val\": (\"coco/val2014\", \"coco/annotations/instances_val2014.json\"),\n    \"coco_2014_minival\": (\"coco/val2014\", \"coco/annotations/instances_minival2014.json\"),\n    \"coco_2014_minival_100\": (\"coco/val2014\", \"coco/annotations/instances_minival2014_100.json\"),\n    \"coco_2014_valminusminival\": (\n        \"coco/val2014\",\n        \"coco/annotations/instances_valminusminival2014.json\",\n    ),\n    \"coco_2017_train\": (\"coco/train2017\", \"coco/annotations/instances_train2017.json\"),\n    \"coco_2017_val\": (\"coco/val2017\", \"coco/annotations/instances_val2017.json\"),\n    \"coco_2017_val_100\": (\"coco/val2017\", \"coco/annotations/instances_val2017_100.json\"),\n}\n\n_PREDEFINED_SPLITS_COCO[\"coco_person\"] = {\n    \"keypoints_coco_2014_train\": (\n        \"coco/train2014\",\n        \"coco/annotations/person_keypoints_train2014.json\",\n    ),\n    \"keypoints_coco_2014_val\": (\"coco/val2014\", \"coco/annotations/person_keypoints_val2014.json\"),\n    \"keypoints_coco_2014_minival\": (\n        \"coco/val2014\",\n        \"coco/annotations/person_keypoints_minival2014.json\",\n    ),\n    \"keypoints_coco_2014_valminusminival\": (\n        \"coco/val2014\",\n        \"coco/annotations/person_keypoints_valminusminival2014.json\",\n    ),\n    \"keypoints_coco_2014_minival_100\": (\n        \"coco/val2014\",\n        \"coco/annotations/person_keypoints_minival2014_100.json\",\n    ),\n    \"keypoints_coco_2017_train\": (\n        \"coco/train2017\",\n        \"coco/annotations/person_keypoints_train2017.json\",\n    ),\n    \"keypoints_coco_2017_val\": (\"coco/val2017\", \"coco/annotations/person_keypoints_val2017.json\"),\n    \"keypoints_coco_2017_val_100\": (\n        \"coco/val2017\",\n        \"coco/annotations/person_keypoints_val2017_100.json\",\n    ),\n}\n\n\n_PREDEFINED_SPLITS_COCO_PANOPTIC = {\n    \"coco_2017_train_panoptic\": (\n        # This is the original panoptic annotation directory\n        \"coco/panoptic_train2017\",\n        \"coco/annotations/panoptic_train2017.json\",\n        # This directory contains semantic annotations that are\n        # converted from panoptic annotations.\n        # It is used by PanopticFPN.\n        # You can use the script at detectron2/datasets/prepare_panoptic_fpn.py\n        # to create these directories.\n        \"coco/panoptic_stuff_train2017\",\n    ),\n    \"coco_2017_val_panoptic\": (\n        \"coco/panoptic_val2017\",\n        \"coco/annotations/panoptic_val2017.json\",\n        \"coco/panoptic_stuff_val2017\",\n    ),\n    \"coco_2017_val_100_panoptic\": (\n        \"coco/panoptic_val2017_100\",\n        \"coco/annotations/panoptic_val2017_100.json\",\n        \"coco/panoptic_stuff_val2017_100\",\n    ),\n}\n\n\ndef register_all_coco(root=\"datasets\"):\n    for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_COCO.items():\n        for key, (image_root, json_file) in splits_per_dataset.items():\n            # Assume pre-defined datasets live in `./datasets`.\n            register_coco_instances(\n                key,\n                _get_builtin_metadata(dataset_name),\n                os.path.join(root, json_file) if \"://\" not in json_file else json_file,\n                os.path.join(root, image_root),\n            )\n\n    for (\n        prefix,\n        (panoptic_root, panoptic_json, semantic_root),\n    ) in _PREDEFINED_SPLITS_COCO_PANOPTIC.items():\n        prefix_instances = prefix[: -len(\"_panoptic\")]\n        instances_meta = MetadataCatalog.get(prefix_instances)\n        image_root, instances_json = instances_meta.image_root, instances_meta.json_file\n        register_coco_panoptic_separated(\n            prefix,\n            _get_builtin_metadata(\"coco_panoptic_separated\"),\n            image_root,\n            os.path.join(root, panoptic_root),\n            os.path.join(root, panoptic_json),\n            os.path.join(root, semantic_root),\n            instances_json,\n        )\n\n\n# ==== Predefined datasets and splits for LVIS ==========\n\n\n_PREDEFINED_SPLITS_LVIS = {\n    \"lvis_v0.5\": {\n        \"lvis_v0.5_train\": (\"coco/train2017\", \"lvis/lvis_v0.5_train.json\"),\n        \"lvis_v0.5_val\": (\"coco/val2017\", \"lvis/lvis_v0.5_val.json\"),\n        \"lvis_v0.5_val_rand_100\": (\"coco/val2017\", \"lvis/lvis_v0.5_val_rand_100.json\"),\n        \"lvis_v0.5_test\": (\"coco/test2017\", \"lvis/lvis_v0.5_image_info_test.json\"),\n    }\n}\n\n\ndef register_all_lvis(root=\"datasets\"):\n    for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_LVIS.items():\n        for key, (image_root, json_file) in splits_per_dataset.items():\n            # Assume pre-defined datasets live in `./datasets`.\n            register_lvis_instances(\n                key,\n                get_lvis_instances_meta(dataset_name),\n                os.path.join(root, json_file) if \"://\" not in json_file else json_file,\n                os.path.join(root, image_root),\n            )\n\n\n# ==== Predefined splits for raw cityscapes images ===========\n\n\n_RAW_CITYSCAPES_SPLITS = {\n    \"cityscapes_fine_{task}_train\": (\"cityscapes/leftImg8bit/train\", \"cityscapes/gtFine/train\"),\n    \"cityscapes_fine_{task}_val\": (\"cityscapes/leftImg8bit/val\", \"cityscapes/gtFine/val\"),\n    \"cityscapes_fine_{task}_test\": (\"cityscapes/leftImg8bit/test\", \"cityscapes/gtFine/test\"),\n}\n\n\ndef register_all_cityscapes(root=\"datasets\"):\n    for key, (image_dir, gt_dir) in _RAW_CITYSCAPES_SPLITS.items():\n        meta = _get_builtin_metadata(\"cityscapes\")\n        image_dir = os.path.join(root, image_dir)\n        gt_dir = os.path.join(root, gt_dir)\n\n        inst_key = key.format(task=\"instance_seg\")\n        DatasetCatalog.register(\n            inst_key,\n            lambda x=image_dir, y=gt_dir: load_cityscapes_instances(\n                x, y, from_json=True, to_polygons=True\n            ),\n        )\n        MetadataCatalog.get(inst_key).set(\n            image_dir=image_dir, gt_dir=gt_dir, evaluator_type=\"cityscapes\", **meta\n        )\n\n        sem_key = key.format(task=\"sem_seg\")\n        DatasetCatalog.register(\n            sem_key, lambda x=image_dir, y=gt_dir: load_cityscapes_semantic(x, y)\n        )\n        MetadataCatalog.get(sem_key).set(\n            image_dir=image_dir, gt_dir=gt_dir, evaluator_type=\"sem_seg\", **meta\n        )\n\n\n# ==== Predefined splits for PASCAL VOC ===========\ndef register_all_pascal_voc(root=\"datasets\"):\n    SPLITS = [\n        (\"voc_2007_trainval\", \"VOC2007\", \"trainval\"),\n        (\"voc_2007_train\", \"VOC2007\", \"train\"),\n        (\"voc_2007_val\", \"VOC2007\", \"val\"),\n        (\"voc_2007_test\", \"VOC2007\", \"test\"),\n        (\"voc_2012_trainval\", \"VOC2012\", \"trainval\"),\n        (\"voc_2012_train\", \"VOC2012\", \"train\"),\n        (\"voc_2012_val\", \"VOC2012\", \"val\"),\n    ]\n    for name, dirname, split in SPLITS:\n        year = 2007 if \"2007\" in name else 2012\n        register_pascal_voc(name, os.path.join(root, dirname), split, year)\n        MetadataCatalog.get(name).evaluator_type = \"pascal_voc\"\n\n\n# Register them all under \"./datasets\"\nregister_all_coco()\nregister_all_lvis()\nregister_all_cityscapes()\nregister_all_pascal_voc()\n"
  },
  {
    "path": "detectron2/data/datasets/builtin_meta.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n\n# All coco categories, together with their nice-looking visualization colors\n# It's from https://github.com/cocodataset/panopticapi/blob/master/panoptic_coco_categories.json\nCOCO_CATEGORIES = [\n    {\"color\": [220, 20, 60], \"isthing\": 1, \"id\": 1, \"name\": \"person\"},\n    {\"color\": [119, 11, 32], \"isthing\": 1, \"id\": 2, \"name\": \"bicycle\"},\n    {\"color\": [0, 0, 142], \"isthing\": 1, \"id\": 3, \"name\": \"car\"},\n    {\"color\": [0, 0, 230], \"isthing\": 1, \"id\": 4, \"name\": \"motorcycle\"},\n    {\"color\": [106, 0, 228], \"isthing\": 1, \"id\": 5, \"name\": \"airplane\"},\n    {\"color\": [0, 60, 100], \"isthing\": 1, \"id\": 6, \"name\": \"bus\"},\n    {\"color\": [0, 80, 100], \"isthing\": 1, \"id\": 7, \"name\": \"train\"},\n    {\"color\": [0, 0, 70], \"isthing\": 1, \"id\": 8, \"name\": \"truck\"},\n    {\"color\": [0, 0, 192], \"isthing\": 1, \"id\": 9, \"name\": \"boat\"},\n    {\"color\": [250, 170, 30], \"isthing\": 1, \"id\": 10, \"name\": \"traffic light\"},\n    {\"color\": [100, 170, 30], \"isthing\": 1, \"id\": 11, \"name\": \"fire hydrant\"},\n    {\"color\": [220, 220, 0], \"isthing\": 1, \"id\": 13, \"name\": \"stop sign\"},\n    {\"color\": [175, 116, 175], \"isthing\": 1, \"id\": 14, \"name\": \"parking meter\"},\n    {\"color\": [250, 0, 30], \"isthing\": 1, \"id\": 15, \"name\": \"bench\"},\n    {\"color\": [165, 42, 42], \"isthing\": 1, \"id\": 16, \"name\": \"bird\"},\n    {\"color\": [255, 77, 255], \"isthing\": 1, \"id\": 17, \"name\": \"cat\"},\n    {\"color\": [0, 226, 252], \"isthing\": 1, \"id\": 18, \"name\": \"dog\"},\n    {\"color\": [182, 182, 255], \"isthing\": 1, \"id\": 19, \"name\": \"horse\"},\n    {\"color\": [0, 82, 0], \"isthing\": 1, \"id\": 20, \"name\": \"sheep\"},\n    {\"color\": [120, 166, 157], \"isthing\": 1, \"id\": 21, \"name\": \"cow\"},\n    {\"color\": [110, 76, 0], \"isthing\": 1, \"id\": 22, \"name\": \"elephant\"},\n    {\"color\": [174, 57, 255], \"isthing\": 1, \"id\": 23, \"name\": \"bear\"},\n    {\"color\": [199, 100, 0], \"isthing\": 1, \"id\": 24, \"name\": \"zebra\"},\n    {\"color\": [72, 0, 118], \"isthing\": 1, \"id\": 25, \"name\": \"giraffe\"},\n    {\"color\": [255, 179, 240], \"isthing\": 1, \"id\": 27, \"name\": \"backpack\"},\n    {\"color\": [0, 125, 92], \"isthing\": 1, \"id\": 28, \"name\": \"umbrella\"},\n    {\"color\": [209, 0, 151], \"isthing\": 1, \"id\": 31, \"name\": \"handbag\"},\n    {\"color\": [188, 208, 182], \"isthing\": 1, \"id\": 32, \"name\": \"tie\"},\n    {\"color\": [0, 220, 176], \"isthing\": 1, \"id\": 33, \"name\": \"suitcase\"},\n    {\"color\": [255, 99, 164], \"isthing\": 1, \"id\": 34, \"name\": \"frisbee\"},\n    {\"color\": [92, 0, 73], \"isthing\": 1, \"id\": 35, \"name\": \"skis\"},\n    {\"color\": [133, 129, 255], \"isthing\": 1, \"id\": 36, \"name\": \"snowboard\"},\n    {\"color\": [78, 180, 255], \"isthing\": 1, \"id\": 37, \"name\": \"sports ball\"},\n    {\"color\": [0, 228, 0], \"isthing\": 1, \"id\": 38, \"name\": \"kite\"},\n    {\"color\": [174, 255, 243], \"isthing\": 1, \"id\": 39, \"name\": \"baseball bat\"},\n    {\"color\": [45, 89, 255], \"isthing\": 1, \"id\": 40, \"name\": \"baseball glove\"},\n    {\"color\": [134, 134, 103], \"isthing\": 1, \"id\": 41, \"name\": \"skateboard\"},\n    {\"color\": [145, 148, 174], \"isthing\": 1, \"id\": 42, \"name\": \"surfboard\"},\n    {\"color\": [255, 208, 186], \"isthing\": 1, \"id\": 43, \"name\": \"tennis racket\"},\n    {\"color\": [197, 226, 255], \"isthing\": 1, \"id\": 44, \"name\": \"bottle\"},\n    {\"color\": [171, 134, 1], \"isthing\": 1, \"id\": 46, \"name\": \"wine glass\"},\n    {\"color\": [109, 63, 54], \"isthing\": 1, \"id\": 47, \"name\": \"cup\"},\n    {\"color\": [207, 138, 255], \"isthing\": 1, \"id\": 48, \"name\": \"fork\"},\n    {\"color\": [151, 0, 95], \"isthing\": 1, \"id\": 49, \"name\": \"knife\"},\n    {\"color\": [9, 80, 61], \"isthing\": 1, \"id\": 50, \"name\": \"spoon\"},\n    {\"color\": [84, 105, 51], \"isthing\": 1, \"id\": 51, \"name\": \"bowl\"},\n    {\"color\": [74, 65, 105], \"isthing\": 1, \"id\": 52, \"name\": \"banana\"},\n    {\"color\": [166, 196, 102], \"isthing\": 1, \"id\": 53, \"name\": \"apple\"},\n    {\"color\": [208, 195, 210], \"isthing\": 1, \"id\": 54, \"name\": \"sandwich\"},\n    {\"color\": [255, 109, 65], \"isthing\": 1, \"id\": 55, \"name\": \"orange\"},\n    {\"color\": [0, 143, 149], \"isthing\": 1, \"id\": 56, \"name\": \"broccoli\"},\n    {\"color\": [179, 0, 194], \"isthing\": 1, \"id\": 57, \"name\": \"carrot\"},\n    {\"color\": [209, 99, 106], \"isthing\": 1, \"id\": 58, \"name\": \"hot dog\"},\n    {\"color\": [5, 121, 0], \"isthing\": 1, \"id\": 59, \"name\": \"pizza\"},\n    {\"color\": [227, 255, 205], \"isthing\": 1, \"id\": 60, \"name\": \"donut\"},\n    {\"color\": [147, 186, 208], \"isthing\": 1, \"id\": 61, \"name\": \"cake\"},\n    {\"color\": [153, 69, 1], \"isthing\": 1, \"id\": 62, \"name\": \"chair\"},\n    {\"color\": [3, 95, 161], \"isthing\": 1, \"id\": 63, \"name\": \"couch\"},\n    {\"color\": [163, 255, 0], \"isthing\": 1, \"id\": 64, \"name\": \"potted plant\"},\n    {\"color\": [119, 0, 170], \"isthing\": 1, \"id\": 65, \"name\": \"bed\"},\n    {\"color\": [0, 182, 199], \"isthing\": 1, \"id\": 67, \"name\": \"dining table\"},\n    {\"color\": [0, 165, 120], \"isthing\": 1, \"id\": 70, \"name\": \"toilet\"},\n    {\"color\": [183, 130, 88], \"isthing\": 1, \"id\": 72, \"name\": \"tv\"},\n    {\"color\": [95, 32, 0], \"isthing\": 1, \"id\": 73, \"name\": \"laptop\"},\n    {\"color\": [130, 114, 135], \"isthing\": 1, \"id\": 74, \"name\": \"mouse\"},\n    {\"color\": [110, 129, 133], \"isthing\": 1, \"id\": 75, \"name\": \"remote\"},\n    {\"color\": [166, 74, 118], \"isthing\": 1, \"id\": 76, \"name\": \"keyboard\"},\n    {\"color\": [219, 142, 185], \"isthing\": 1, \"id\": 77, \"name\": \"cell phone\"},\n    {\"color\": [79, 210, 114], \"isthing\": 1, \"id\": 78, \"name\": \"microwave\"},\n    {\"color\": [178, 90, 62], \"isthing\": 1, \"id\": 79, \"name\": \"oven\"},\n    {\"color\": [65, 70, 15], \"isthing\": 1, \"id\": 80, \"name\": \"toaster\"},\n    {\"color\": [127, 167, 115], \"isthing\": 1, \"id\": 81, \"name\": \"sink\"},\n    {\"color\": [59, 105, 106], \"isthing\": 1, \"id\": 82, \"name\": \"refrigerator\"},\n    {\"color\": [142, 108, 45], \"isthing\": 1, \"id\": 84, \"name\": \"book\"},\n    {\"color\": [196, 172, 0], \"isthing\": 1, \"id\": 85, \"name\": \"clock\"},\n    {\"color\": [95, 54, 80], \"isthing\": 1, \"id\": 86, \"name\": \"vase\"},\n    {\"color\": [128, 76, 255], \"isthing\": 1, \"id\": 87, \"name\": \"scissors\"},\n    {\"color\": [201, 57, 1], \"isthing\": 1, \"id\": 88, \"name\": \"teddy bear\"},\n    {\"color\": [246, 0, 122], \"isthing\": 1, \"id\": 89, \"name\": \"hair drier\"},\n    {\"color\": [191, 162, 208], \"isthing\": 1, \"id\": 90, \"name\": \"toothbrush\"},\n    {\"color\": [255, 255, 128], \"isthing\": 0, \"id\": 92, \"name\": \"banner\"},\n    {\"color\": [147, 211, 203], \"isthing\": 0, \"id\": 93, \"name\": \"blanket\"},\n    {\"color\": [150, 100, 100], \"isthing\": 0, \"id\": 95, \"name\": \"bridge\"},\n    {\"color\": [168, 171, 172], \"isthing\": 0, \"id\": 100, \"name\": \"cardboard\"},\n    {\"color\": [146, 112, 198], \"isthing\": 0, \"id\": 107, \"name\": \"counter\"},\n    {\"color\": [210, 170, 100], \"isthing\": 0, \"id\": 109, \"name\": \"curtain\"},\n    {\"color\": [92, 136, 89], \"isthing\": 0, \"id\": 112, \"name\": \"door-stuff\"},\n    {\"color\": [218, 88, 184], \"isthing\": 0, \"id\": 118, \"name\": \"floor-wood\"},\n    {\"color\": [241, 129, 0], \"isthing\": 0, \"id\": 119, \"name\": \"flower\"},\n    {\"color\": [217, 17, 255], \"isthing\": 0, \"id\": 122, \"name\": \"fruit\"},\n    {\"color\": [124, 74, 181], \"isthing\": 0, \"id\": 125, \"name\": \"gravel\"},\n    {\"color\": [70, 70, 70], \"isthing\": 0, \"id\": 128, \"name\": \"house\"},\n    {\"color\": [255, 228, 255], \"isthing\": 0, \"id\": 130, \"name\": \"light\"},\n    {\"color\": [154, 208, 0], \"isthing\": 0, \"id\": 133, \"name\": \"mirror-stuff\"},\n    {\"color\": [193, 0, 92], \"isthing\": 0, \"id\": 138, \"name\": \"net\"},\n    {\"color\": [76, 91, 113], \"isthing\": 0, \"id\": 141, \"name\": \"pillow\"},\n    {\"color\": [255, 180, 195], \"isthing\": 0, \"id\": 144, \"name\": \"platform\"},\n    {\"color\": [106, 154, 176], \"isthing\": 0, \"id\": 145, \"name\": \"playingfield\"},\n    {\"color\": [230, 150, 140], \"isthing\": 0, \"id\": 147, \"name\": \"railroad\"},\n    {\"color\": [60, 143, 255], \"isthing\": 0, \"id\": 148, \"name\": \"river\"},\n    {\"color\": [128, 64, 128], \"isthing\": 0, \"id\": 149, \"name\": \"road\"},\n    {\"color\": [92, 82, 55], \"isthing\": 0, \"id\": 151, \"name\": \"roof\"},\n    {\"color\": [254, 212, 124], \"isthing\": 0, \"id\": 154, \"name\": \"sand\"},\n    {\"color\": [73, 77, 174], \"isthing\": 0, \"id\": 155, \"name\": \"sea\"},\n    {\"color\": [255, 160, 98], \"isthing\": 0, \"id\": 156, \"name\": \"shelf\"},\n    {\"color\": [255, 255, 255], \"isthing\": 0, \"id\": 159, \"name\": \"snow\"},\n    {\"color\": [104, 84, 109], \"isthing\": 0, \"id\": 161, \"name\": \"stairs\"},\n    {\"color\": [169, 164, 131], \"isthing\": 0, \"id\": 166, \"name\": \"tent\"},\n    {\"color\": [225, 199, 255], \"isthing\": 0, \"id\": 168, \"name\": \"towel\"},\n    {\"color\": [137, 54, 74], \"isthing\": 0, \"id\": 171, \"name\": \"wall-brick\"},\n    {\"color\": [135, 158, 223], \"isthing\": 0, \"id\": 175, \"name\": \"wall-stone\"},\n    {\"color\": [7, 246, 231], \"isthing\": 0, \"id\": 176, \"name\": \"wall-tile\"},\n    {\"color\": [107, 255, 200], \"isthing\": 0, \"id\": 177, \"name\": \"wall-wood\"},\n    {\"color\": [58, 41, 149], \"isthing\": 0, \"id\": 178, \"name\": \"water-other\"},\n    {\"color\": [183, 121, 142], \"isthing\": 0, \"id\": 180, \"name\": \"window-blind\"},\n    {\"color\": [255, 73, 97], \"isthing\": 0, \"id\": 181, \"name\": \"window-other\"},\n    {\"color\": [107, 142, 35], \"isthing\": 0, \"id\": 184, \"name\": \"tree-merged\"},\n    {\"color\": [190, 153, 153], \"isthing\": 0, \"id\": 185, \"name\": \"fence-merged\"},\n    {\"color\": [146, 139, 141], \"isthing\": 0, \"id\": 186, \"name\": \"ceiling-merged\"},\n    {\"color\": [70, 130, 180], \"isthing\": 0, \"id\": 187, \"name\": \"sky-other-merged\"},\n    {\"color\": [134, 199, 156], \"isthing\": 0, \"id\": 188, \"name\": \"cabinet-merged\"},\n    {\"color\": [209, 226, 140], \"isthing\": 0, \"id\": 189, \"name\": \"table-merged\"},\n    {\"color\": [96, 36, 108], \"isthing\": 0, \"id\": 190, \"name\": \"floor-other-merged\"},\n    {\"color\": [96, 96, 96], \"isthing\": 0, \"id\": 191, \"name\": \"pavement-merged\"},\n    {\"color\": [64, 170, 64], \"isthing\": 0, \"id\": 192, \"name\": \"mountain-merged\"},\n    {\"color\": [152, 251, 152], \"isthing\": 0, \"id\": 193, \"name\": \"grass-merged\"},\n    {\"color\": [208, 229, 228], \"isthing\": 0, \"id\": 194, \"name\": \"dirt-merged\"},\n    {\"color\": [206, 186, 171], \"isthing\": 0, \"id\": 195, \"name\": \"paper-merged\"},\n    {\"color\": [152, 161, 64], \"isthing\": 0, \"id\": 196, \"name\": \"food-other-merged\"},\n    {\"color\": [116, 112, 0], \"isthing\": 0, \"id\": 197, \"name\": \"building-other-merged\"},\n    {\"color\": [0, 114, 143], \"isthing\": 0, \"id\": 198, \"name\": \"rock-merged\"},\n    {\"color\": [102, 102, 156], \"isthing\": 0, \"id\": 199, \"name\": \"wall-other-merged\"},\n    {\"color\": [250, 141, 255], \"isthing\": 0, \"id\": 200, \"name\": \"rug-merged\"},\n]\n\n# fmt: off\nCOCO_PERSON_KEYPOINT_NAMES = (\n    \"nose\",\n    \"left_eye\", \"right_eye\",\n    \"left_ear\", \"right_ear\",\n    \"left_shoulder\", \"right_shoulder\",\n    \"left_elbow\", \"right_elbow\",\n    \"left_wrist\", \"right_wrist\",\n    \"left_hip\", \"right_hip\",\n    \"left_knee\", \"right_knee\",\n    \"left_ankle\", \"right_ankle\",\n)\n# fmt: on\n\n# Pairs of keypoints that should be exchanged under horizontal flipping\nCOCO_PERSON_KEYPOINT_FLIP_MAP = (\n    (\"left_eye\", \"right_eye\"),\n    (\"left_ear\", \"right_ear\"),\n    (\"left_shoulder\", \"right_shoulder\"),\n    (\"left_elbow\", \"right_elbow\"),\n    (\"left_wrist\", \"right_wrist\"),\n    (\"left_hip\", \"right_hip\"),\n    (\"left_knee\", \"right_knee\"),\n    (\"left_ankle\", \"right_ankle\"),\n)\n\n# rules for pairs of keypoints to draw a line between, and the line color to use.\nKEYPOINT_CONNECTION_RULES = [\n    # face\n    (\"left_ear\", \"left_eye\", (102, 204, 255)),\n    (\"right_ear\", \"right_eye\", (51, 153, 255)),\n    (\"left_eye\", \"nose\", (102, 0, 204)),\n    (\"nose\", \"right_eye\", (51, 102, 255)),\n    # upper-body\n    (\"left_shoulder\", \"right_shoulder\", (255, 128, 0)),\n    (\"left_shoulder\", \"left_elbow\", (153, 255, 204)),\n    (\"right_shoulder\", \"right_elbow\", (128, 229, 255)),\n    (\"left_elbow\", \"left_wrist\", (153, 255, 153)),\n    (\"right_elbow\", \"right_wrist\", (102, 255, 224)),\n    # lower-body\n    (\"left_hip\", \"right_hip\", (255, 102, 0)),\n    (\"left_hip\", \"left_knee\", (255, 255, 77)),\n    (\"right_hip\", \"right_knee\", (153, 255, 204)),\n    (\"left_knee\", \"left_ankle\", (191, 255, 128)),\n    (\"right_knee\", \"right_ankle\", (255, 195, 77)),\n]\n\n\ndef _get_coco_instances_meta():\n    thing_ids = [k[\"id\"] for k in COCO_CATEGORIES if k[\"isthing\"] == 1]\n    thing_colors = [k[\"color\"] for k in COCO_CATEGORIES if k[\"isthing\"] == 1]\n    assert len(thing_ids) == 80, len(thing_ids)\n    # Mapping from the incontiguous COCO category id to an id in [0, 79]\n    thing_dataset_id_to_contiguous_id = {k: i for i, k in enumerate(thing_ids)}\n    thing_classes = [k[\"name\"] for k in COCO_CATEGORIES if k[\"isthing\"] == 1]\n    ret = {\n        \"thing_dataset_id_to_contiguous_id\": thing_dataset_id_to_contiguous_id,\n        \"thing_classes\": thing_classes,\n        \"thing_colors\": thing_colors,\n    }\n    return ret\n\n\ndef _get_coco_panoptic_separated_meta():\n    \"\"\"\n    Returns metadata for \"separated\" version of the panoptic segmentation dataset.\n    \"\"\"\n    stuff_ids = [k[\"id\"] for k in COCO_CATEGORIES if k[\"isthing\"] == 0]\n    assert len(stuff_ids) == 53, len(stuff_ids)\n\n    # For semantic segmentation, this mapping maps from contiguous stuff id\n    # (in [0, 53], used in models) to ids in the dataset (used for processing results)\n    # The id 0 is mapped to an extra category \"thing\".\n    stuff_dataset_id_to_contiguous_id = {k: i + 1 for i, k in enumerate(stuff_ids)}\n    # When converting COCO panoptic annotations to semantic annotations\n    # We label the \"thing\" category to 0\n    stuff_dataset_id_to_contiguous_id[0] = 0\n\n    # 54 names for COCO stuff categories (including \"things\")\n    stuff_classes = [\"things\"] + [\n        k[\"name\"].replace(\"-other\", \"\").replace(\"-merged\", \"\")\n        for k in COCO_CATEGORIES\n        if k[\"isthing\"] == 0\n    ]\n\n    # NOTE: I randomly picked a color for things\n    stuff_colors = [[82, 18, 128]] + [k[\"color\"] for k in COCO_CATEGORIES if k[\"isthing\"] == 0]\n    ret = {\n        \"stuff_dataset_id_to_contiguous_id\": stuff_dataset_id_to_contiguous_id,\n        \"stuff_classes\": stuff_classes,\n        \"stuff_colors\": stuff_colors,\n    }\n    ret.update(_get_coco_instances_meta())\n    return ret\n\n\ndef _get_builtin_metadata(dataset_name):\n    if dataset_name == \"coco\":\n        return _get_coco_instances_meta()\n    if dataset_name == \"coco_panoptic_separated\":\n        return _get_coco_panoptic_separated_meta()\n    elif dataset_name == \"coco_person\":\n        return {\n            \"thing_classes\": [\"person\"],\n            \"keypoint_names\": COCO_PERSON_KEYPOINT_NAMES,\n            \"keypoint_flip_map\": COCO_PERSON_KEYPOINT_FLIP_MAP,\n            \"keypoint_connection_rules\": KEYPOINT_CONNECTION_RULES,\n        }\n    elif dataset_name == \"cityscapes\":\n        # fmt: off\n        CITYSCAPES_THING_CLASSES = [\n            \"person\", \"rider\", \"car\", \"truck\",\n            \"bus\", \"train\", \"motorcycle\", \"bicycle\",\n        ]\n        CITYSCAPES_STUFF_CLASSES = [\n            \"road\", \"sidewalk\", \"building\", \"wall\", \"fence\", \"pole\", \"traffic light\",\n            \"traffic sign\", \"vegetation\", \"terrain\", \"sky\", \"person\", \"rider\", \"car\",\n            \"truck\", \"bus\", \"train\", \"motorcycle\", \"bicycle\", \"license plate\",\n        ]\n        # fmt: on\n        return {\n            \"thing_classes\": CITYSCAPES_THING_CLASSES,\n            \"stuff_classes\": CITYSCAPES_STUFF_CLASSES,\n        }\n    raise KeyError(\"No built-in metadata for dataset {}\".format(dataset_name))\n"
  },
  {
    "path": "detectron2/data/datasets/cityscapes.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport functools\nimport glob\nimport json\nimport logging\nimport multiprocessing as mp\nimport numpy as np\nimport os\nfrom itertools import chain\nimport pycocotools.mask as mask_util\nfrom PIL import Image\n\nfrom detectron2.structures import BoxMode\nfrom detectron2.utils.logger import setup_logger\nfrom detectron2.utils.comm import get_world_size\nfrom fvcore.common.file_io import PathManager\n\ntry:\n    import cv2  # noqa\nexcept ImportError:\n    # OpenCV is an optional dependency at the moment\n    pass\n\n\ndef load_cityscapes_instances(image_dir, gt_dir, from_json=True, to_polygons=True):\n    \"\"\"\n    Args:\n        image_dir (str): path to the raw dataset. e.g., \"~/cityscapes/leftImg8bit/train\".\n        gt_dir (str): path to the raw annotations. e.g., \"~/cityscapes/gtFine/train\".\n        from_json (bool): whether to read annotations from the raw json file or the png files.\n        to_polygons (bool): whether to represent the segmentation as polygons\n            (COCO's format) instead of masks (cityscapes's format).\n\n    Returns:\n        list[dict]: a list of dicts in Detectron2 standard format. (See\n        `Using Custom Datasets </tutorials/datasets.html>`_ )\n    \"\"\"\n    if from_json:\n        assert to_polygons, (\n            \"Cityscapes's json annotations are in polygon format. \"\n            \"Converting to mask format is not supported now.\"\n        )\n    files = []\n    for image_file in glob.glob(os.path.join(image_dir, \"**/*.png\")):\n        suffix = \"leftImg8bit.png\"\n        assert image_file.endswith(suffix)\n        prefix = image_dir\n        instance_file = gt_dir + image_file[len(prefix) : -len(suffix)] + \"gtFine_instanceIds.png\"\n        assert os.path.isfile(instance_file), instance_file\n\n        label_file = gt_dir + image_file[len(prefix) : -len(suffix)] + \"gtFine_labelIds.png\"\n        assert os.path.isfile(label_file), label_file\n\n        json_file = gt_dir + image_file[len(prefix) : -len(suffix)] + \"gtFine_polygons.json\"\n        files.append((image_file, instance_file, label_file, json_file))\n    assert len(files), \"No images found in {}\".format(image_dir)\n\n    logger = logging.getLogger(__name__)\n    logger.info(\"Preprocessing cityscapes annotations ...\")\n    # This is still not fast: all workers will execute duplicate works and will\n    # take up to 10m on a 8GPU server.\n    pool = mp.Pool(processes=max(mp.cpu_count() // get_world_size() // 2, 4))\n\n    ret = pool.map(\n        functools.partial(cityscapes_files_to_dict, from_json=from_json, to_polygons=to_polygons),\n        files,\n    )\n    logger.info(\"Loaded {} images from {}\".format(len(ret), image_dir))\n\n    # Map cityscape ids to contiguous ids\n    from cityscapesscripts.helpers.labels import labels\n\n    labels = [l for l in labels if l.hasInstances and not l.ignoreInEval]\n    dataset_id_to_contiguous_id = {l.id: idx for idx, l in enumerate(labels)}\n    for dict_per_image in ret:\n        for anno in dict_per_image[\"annotations\"]:\n            anno[\"category_id\"] = dataset_id_to_contiguous_id[anno[\"category_id\"]]\n    return ret\n\n\ndef load_cityscapes_semantic(image_dir, gt_dir):\n    \"\"\"\n    Args:\n        image_dir (str): path to the raw dataset. e.g., \"~/cityscapes/leftImg8bit/train\".\n        gt_dir (str): path to the raw annotations. e.g., \"~/cityscapes/gtFine/train\".\n\n    Returns:\n        list[dict]: a list of dict, each has \"file_name\" and\n            \"sem_seg_file_name\".\n    \"\"\"\n    ret = []\n    for image_file in glob.glob(os.path.join(image_dir, \"**/*.png\")):\n        suffix = \"leftImg8bit.png\"\n        assert image_file.endswith(suffix)\n        prefix = image_dir\n\n        label_file = gt_dir + image_file[len(prefix) : -len(suffix)] + \"gtFine_labelTrainIds.png\"\n        assert os.path.isfile(\n            label_file\n        ), \"Please generate labelTrainIds.png with cityscapesscripts/preparation/createTrainIdLabelImgs.py\"  # noqa\n\n        json_file = gt_dir + image_file[len(prefix) : -len(suffix)] + \"gtFine_polygons.json\"\n\n        with PathManager.open(json_file, \"r\") as f:\n            jsonobj = json.load(f)\n        ret.append(\n            {\n                \"file_name\": image_file,\n                \"sem_seg_file_name\": label_file,\n                \"height\": jsonobj[\"imgHeight\"],\n                \"width\": jsonobj[\"imgWidth\"],\n            }\n        )\n    return ret\n\n\ndef cityscapes_files_to_dict(files, from_json, to_polygons):\n    \"\"\"\n    Parse cityscapes annotation files to a dict.\n\n    Args:\n        files (tuple): consists of (image_file, instance_id_file, label_id_file, json_file)\n        from_json (bool): whether to read annotations from the raw json file or the png files.\n        to_polygons (bool): whether to represent the segmentation as polygons\n            (COCO's format) instead of masks (cityscapes's format).\n\n    Returns:\n        A dict in Detectron2 Dataset format.\n    \"\"\"\n    from cityscapesscripts.helpers.labels import id2label, name2label\n\n    image_file, instance_id_file, _, json_file = files\n\n    annos = []\n\n    if from_json:\n        from shapely.geometry import MultiPolygon, Polygon\n\n        with PathManager.open(json_file, \"r\") as f:\n            jsonobj = json.load(f)\n        ret = {\n            \"file_name\": image_file,\n            \"image_id\": os.path.basename(image_file),\n            \"height\": jsonobj[\"imgHeight\"],\n            \"width\": jsonobj[\"imgWidth\"],\n        }\n\n        # `polygons_union` contains the union of all valid polygons.\n        polygons_union = Polygon()\n\n        # CityscapesScripts draw the polygons in sequential order\n        # and each polygon *overwrites* existing ones. See\n        # (https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/preparation/json2instanceImg.py) # noqa\n        # We use reverse order, and each polygon *avoids* early ones.\n        # This will resolve the ploygon overlaps in the same way as CityscapesScripts.\n        for obj in jsonobj[\"objects\"][::-1]:\n            if \"deleted\" in obj:  # cityscapes data format specific\n                continue\n            label_name = obj[\"label\"]\n\n            try:\n                label = name2label[label_name]\n            except KeyError:\n                if label_name.endswith(\"group\"):  # crowd area\n                    label = name2label[label_name[: -len(\"group\")]]\n                else:\n                    raise\n            if label.id < 0:  # cityscapes data format\n                continue\n\n            # Cityscapes's raw annotations uses integer coordinates\n            # Therefore +0.5 here\n            poly_coord = np.asarray(obj[\"polygon\"], dtype=\"f4\") + 0.5\n            # CityscapesScript uses PIL.ImageDraw.polygon to rasterize\n            # polygons for evaluation. This function operates in integer space\n            # and draws each pixel whose center falls into the polygon.\n            # Therefore it draws a polygon which is 0.5 \"fatter\" in expectation.\n            # We therefore dilate the input polygon by 0.5 as our input.\n            poly = Polygon(poly_coord).buffer(0.5, resolution=4)\n\n            if not label.hasInstances or label.ignoreInEval:\n                # even if we won't store the polygon it still contributes to overlaps resolution\n                polygons_union = polygons_union.union(poly)\n                continue\n\n            # Take non-overlapping part of the polygon\n            poly_wo_overlaps = poly.difference(polygons_union)\n            if poly_wo_overlaps.is_empty:\n                continue\n            polygons_union = polygons_union.union(poly)\n\n            anno = {}\n            anno[\"iscrowd\"] = label_name.endswith(\"group\")\n            anno[\"category_id\"] = label.id\n\n            if isinstance(poly_wo_overlaps, Polygon):\n                poly_list = [poly_wo_overlaps]\n            elif isinstance(poly_wo_overlaps, MultiPolygon):\n                poly_list = poly_wo_overlaps.geoms\n            else:\n                raise NotImplementedError(\"Unknown geometric structure {}\".format(poly_wo_overlaps))\n\n            poly_coord = []\n            for poly_el in poly_list:\n                # COCO API can work only with exterior boundaries now, hence we store only them.\n                # TODO: store both exterior and interior boundaries once other parts of the\n                # codebase support holes in polygons.\n                poly_coord.append(list(chain(*poly_el.exterior.coords)))\n            anno[\"segmentation\"] = poly_coord\n            (xmin, ymin, xmax, ymax) = poly_wo_overlaps.bounds\n\n            anno[\"bbox\"] = (xmin, ymin, xmax, ymax)\n            anno[\"bbox_mode\"] = BoxMode.XYXY_ABS\n\n            annos.append(anno)\n    else:\n        # See also the official annotation parsing scripts at\n        # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/instances2dict.py  # noqa\n        with PathManager.open(instance_id_file, \"rb\") as f:\n            inst_image = np.asarray(Image.open(f), order=\"F\")\n        # ids < 24 are stuff labels (filtering them first is about 5% faster)\n        flattened_ids = np.unique(inst_image[inst_image >= 24])\n\n        ret = {\n            \"file_name\": image_file,\n            \"image_id\": os.path.basename(image_file),\n            \"height\": inst_image.shape[0],\n            \"width\": inst_image.shape[1],\n        }\n\n        for instance_id in flattened_ids:\n            # For non-crowd annotations, instance_id // 1000 is the label_id\n            # Crowd annotations have <1000 instance ids\n            label_id = instance_id // 1000 if instance_id >= 1000 else instance_id\n            label = id2label[label_id]\n            if not label.hasInstances or label.ignoreInEval:\n                continue\n\n            anno = {}\n            anno[\"iscrowd\"] = instance_id < 1000\n            anno[\"category_id\"] = label.id\n\n            mask = np.asarray(inst_image == instance_id, dtype=np.uint8, order=\"F\")\n\n            inds = np.nonzero(mask)\n            ymin, ymax = inds[0].min(), inds[0].max()\n            xmin, xmax = inds[1].min(), inds[1].max()\n            anno[\"bbox\"] = (xmin, ymin, xmax, ymax)\n            if xmax <= xmin or ymax <= ymin:\n                continue\n            anno[\"bbox_mode\"] = BoxMode.XYXY_ABS\n            if to_polygons:\n                # This conversion comes from D4809743 and D5171122,\n                # when Mask-RCNN was first developed.\n                contours = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[\n                    -2\n                ]\n                polygons = [c.reshape(-1).tolist() for c in contours if len(c) >= 3]\n                # opencv's can produce invalid polygons\n                if len(polygons) == 0:\n                    continue\n                anno[\"segmentation\"] = polygons\n            else:\n                anno[\"segmentation\"] = mask_util.encode(mask[:, :, None])[0]\n            annos.append(anno)\n    ret[\"annotations\"] = annos\n    return ret\n\n\nif __name__ == \"__main__\":\n    \"\"\"\n    Test the cityscapes dataset loader.\n\n    Usage:\n        python -m detectron2.data.datasets.cityscapes \\\n            cityscapes/leftImg8bit/train cityscapes/gtFine/train\n    \"\"\"\n    import argparse\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"image_dir\")\n    parser.add_argument(\"gt_dir\")\n    parser.add_argument(\"--type\", choices=[\"instance\", \"semantic\"], default=\"instance\")\n    args = parser.parse_args()\n    from detectron2.data.catalog import Metadata\n    from detectron2.utils.visualizer import Visualizer\n    from cityscapesscripts.helpers.labels import labels\n\n    logger = setup_logger(name=__name__)\n\n    dirname = \"cityscapes-data-vis\"\n    os.makedirs(dirname, exist_ok=True)\n\n    if args.type == \"instance\":\n        dicts = load_cityscapes_instances(\n            args.image_dir, args.gt_dir, from_json=True, to_polygons=True\n        )\n        logger.info(\"Done loading {} samples.\".format(len(dicts)))\n\n        thing_classes = [k.name for k in labels if k.hasInstances and not k.ignoreInEval]\n        meta = Metadata().set(thing_classes=thing_classes)\n\n    else:\n        dicts = load_cityscapes_semantic(args.image_dir, args.gt_dir)\n        logger.info(\"Done loading {} samples.\".format(len(dicts)))\n\n        stuff_names = [k.name for k in labels if k.trainId != 255]\n        stuff_colors = [k.color for k in labels if k.trainId != 255]\n        meta = Metadata().set(stuff_names=stuff_names, stuff_colors=stuff_colors)\n\n    for d in dicts:\n        img = np.array(Image.open(d[\"file_name\"]))\n        visualizer = Visualizer(img, metadata=meta)\n        vis = visualizer.draw_dataset_dict(d)\n        # cv2.imshow(\"a\", vis.get_image()[:, :, ::-1])\n        # cv2.waitKey()\n        fpath = os.path.join(dirname, os.path.basename(d[\"file_name\"]))\n        vis.save(fpath)\n"
  },
  {
    "path": "detectron2/data/datasets/coco.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport io\nimport logging\nimport contextlib\nimport os\nfrom PIL import Image\n\nfrom fvcore.common.timer import Timer\nfrom detectron2.structures import BoxMode\nfrom fvcore.common.file_io import PathManager\n\nfrom .. import MetadataCatalog, DatasetCatalog\n\n\"\"\"\nThis file contains functions to parse COCO-format annotations into dicts in \"Detectron2 format\".\n\"\"\"\n\n\nlogger = logging.getLogger(__name__)\n\n__all__ = [\"load_coco_json\", \"load_sem_seg\"]\n\n\ndef load_coco_json(json_file, image_root, dataset_name=None):\n    \"\"\"\n    Load a json file with COCO's instances annotation format.\n    Currently supports instance detection, instance segmentation,\n    person keypoints and densepose annotations.\n\n    Args:\n        json_file (str): full path to the json file in COCO instances annotation format.\n        image_root (str): the directory where the images in this json file exists.\n        dataset_name (str): the name of the dataset (e.g., coco_2017_train).\n            If provided, this function will also put \"thing_classes\" into\n            the metadata associated with this dataset.\n\n    Returns:\n        list[dict]: a list of dicts in Detectron2 standard format. (See\n        `Using Custom Datasets </tutorials/datasets.html>`_ )\n\n    Notes:\n        1. This function does not read the image files.\n           The results do not have the \"image\" field.\n    \"\"\"\n    from pycocotools.coco import COCO\n\n    timer = Timer()\n    json_file = PathManager.get_local_path(json_file)\n    with contextlib.redirect_stdout(io.StringIO()):\n        coco_api = COCO(json_file)\n    if timer.seconds() > 1:\n        logger.info(\"Loading {} takes {:.2f} seconds.\".format(json_file, timer.seconds()))\n\n    id_map = None\n    if dataset_name is not None:\n        meta = MetadataCatalog.get(dataset_name)\n        cat_ids = sorted(coco_api.getCatIds())\n        association_ids = coco_api.getRelaIds()\n        cats = coco_api.loadCats(cat_ids)\n        # association_cats = coco_api.loadRela(association_ids)\n        association = coco_api.loadRela(association_ids)\n        # The categories in a custom json file may not be sorted.\n        thing_classes = [c[\"name\"] for c in sorted(cats, key=lambda x: x[\"id\"])]\n        association_classes = [c[\"name\"] for c in sorted(association, key=lambda x: x[\"id\"])]\n        meta.association_classes = association_classes\n        meta.thing_classes = thing_classes\n        meta.keypoint_names = ['Object','Shadow']\n        meta.keypoint_flip_map = ({'Object':'Shadow'})\n        meta.keypoint_connection_rules = [('Object','Shadow',(255,255,255))]\n        # meta\n\n        # In COCO, certain category ids are artificially removed,\n        # and by convention they are always ignored.\n        # We deal with COCO's id issue and translate\n        # the category ids to contiguous ids in [0, 80).\n\n        # It works by looking at the \"categories\" field in the json, therefore\n        # if users' own json also have incontiguous ids, we'll\n        # apply this mapping as well but print a warning.\n        if not (min(cat_ids) == 1 and max(cat_ids) == len(cat_ids)):\n            if \"coco\" not in dataset_name:\n                logger.warning(\n                    \"\"\"\nCategory ids in annotations are not in [1, #categories]! We'll apply a mapping for you.\n\"\"\"\n                )\n        id_map = {v: i for i, v in enumerate(cat_ids)}\n        association_id_map = {v:i for i,v in enumerate(association_ids)}\n        meta.association_dataset_id_to_contiguous_id = association_id_map\n        meta.thing_dataset_id_to_contiguous_id = id_map\n\n    # sort indices for reproducible results\n    img_ids = sorted(list(coco_api.imgs.keys()))\n    # imgs is a list of dicts, each looks something like:\n    # {'license': 4,\n    #  'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',\n    #  'file_name': 'COCO_val2014_000000001268.jpg',\n    #  'height': 427,\n    #  'width': 640,\n    #  'date_captured': '2013-11-17 05:57:24',\n    #  'id': 1268}\n    imgs = coco_api.loadImgs(img_ids)\n    # anns is a list[list[dict]], where each dict is an annotation\n    # record for an object. The inner list enumerates the objects in an image\n    # and the outer list enumerates over images. Example of anns[0]:\n    # [{'segmentation': [[192.81,\n    #     247.09,\n    #     ...\n    #     219.03,\n    #     249.06]],\n    #   'area': 1035.749,\n    #   'iscrowd': 0,\n    #   'image_id': 1268,\n    #   'bbox': [192.81, 224.8, 74.73, 33.43],\n    #   'category_id': 16,\n    #   'id': 42986},\n    #  ...]\n    anns = [coco_api.imgToAnns[img_id] for img_id in img_ids]\n    assoAnns = [coco_api.imgToRelaAnns[img_id] for img_id in img_ids]\n\n    if \"minival\" not in json_file:\n        # The popular valminusminival & minival annotations for COCO2014 contain this bug.\n        # However the ratio of buggy annotations there is tiny and does not affect accuracy.\n        # Therefore we explicitly white-list them.\n        ann_ids = [ann[\"id\"] for anns_per_image in anns for ann in anns_per_image]\n        asso_ann_ids = [assoAnn[\"id\"] for anns_per_image in assoAnns for assoAnn in anns_per_image ]\n        assert len(set(ann_ids)) == len(ann_ids), \"Annotation ids in '{}' are not unique!\".format(\n            json_file\n        )\n\n    imgs_anns = list(zip(imgs, anns))\n    imgs_asso_anns = list(zip(imgs,assoAnns))\n    logger.info(\"Loaded {} images in COCO format from {}\".format(len(imgs_anns), json_file))\n\n    dataset_dicts = []\n\n    # TODO: refactoring candidate, one should not have to alter DB reader\n    # every time new data type is added\n    DENSEPOSE_KEYS = [\"dp_x\", \"dp_y\", \"dp_I\", \"dp_U\", \"dp_V\", \"dp_masks\"]\n\n    num_instances_without_valid_segmentation = 0\n\n    for (img_dict, anno_dict_list),(_,asso_anno_dict_list) in zip(imgs_anns,imgs_asso_anns):\n        record = {}\n        record[\"file_name\"] = os.path.join(image_root, img_dict[\"file_name\"])\n        record[\"height\"] = img_dict[\"height\"]\n        record[\"width\"] = img_dict[\"width\"]\n        image_id = record[\"image_id\"] = img_dict[\"id\"]\n\n        objs = []\n        for anno in anno_dict_list:\n            # Check that the image_id in this annotation is the same as\n            # the image_id we're looking at.\n            # This fails only when the data parsing logic or the annotation file is buggy.\n\n            # The original COCO valminusminival2014 & minival2014 annotation files\n            # actually contains bugs that, together with certain ways of using COCO API,\n            # can trigger this assertion.\n            assert anno[\"image_id\"] == image_id\n\n            assert anno.get(\"ignore\", 0) == 0\n\n            obj = {\n                field: anno[field]\n                for field in [\"iscrowd\", \"bbox\",\"keypoints\", \"category_id\"] + DENSEPOSE_KEYS\n                if field in anno\n            }\n\n            segm = anno.get(\"segmentation\", None)\n            if segm:  # either list[list[float]] or dict(RLE)\n                if not isinstance(segm, dict):\n                    # filter out invalid polygons (< 3 points)\n                    segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]\n                    if len(segm) == 0:\n                        num_instances_without_valid_segmentation += 1\n                        continue  # ignore this instance\n                obj[\"segmentation\"] = segm\n\n            keypts = anno.get(\"keypoints\", None)\n            if keypts:  # list[int]\n                for idx, v in enumerate(keypts):\n                    if idx % 3 != 2:\n                        # COCO's segmentation coordinates are floating points in [0, H or W],\n                        # but keypoint coordinates are integers in [0, H-1 or W-1]\n                        # Therefore we assume the coordinates are \"pixel indices\" and\n                        # add 0.5 to convert to floating point coordinates.\n                        keypts[idx] = v + 0.5\n                obj[\"keypoints\"] = keypts\n\n            obj[\"bbox_mode\"] = BoxMode.XYWH_ABS\n            if id_map:\n                obj[\"category_id\"] = id_map[obj[\"category_id\"]]\n            objs.append(obj)\n        record[\"annotations\"] = objs\n        objs = []\n        for anno in asso_anno_dict_list:\n            # Check that the image_id in this annotation is the same as\n            # the image_id we're looking at.\n            # This fails only when the data parsing logic or the annotation file is buggy.\n\n            # The original COCO valminusminival2014 & minival2014 annotation files\n            # actually contains bugs that, together with certain ways of using COCO API,\n            # can trigger this assertion.\n            assert anno[\"image_id\"] == image_id\n\n            assert anno.get(\"ignore\", 0) == 0\n\n            obj = {\n                field: anno[field]\n                for field in [\"iscrowd\", \"bbox\",'light', \"keypoints\", \"category_id\"] + DENSEPOSE_KEYS\n                if field in anno\n            }\n\n            segm = anno.get(\"segmentation\", None)\n            if segm:  # either list[list[float]] or dict(RLE)\n                if not isinstance(segm, dict):\n                    # filter out invalid polygons (< 3 points)\n                    segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]\n                    if len(segm) == 0:\n                        num_instances_without_valid_segmentation += 1\n                        continue  # ignore this instance\n                obj[\"segmentation\"] = segm\n\n            keypts = anno.get(\"keypoints\", None)\n            if keypts:  # list[int]\n                for idx, v in enumerate(keypts):\n                    if idx % 3 != 2:\n                        # COCO's segmentation coordinates are floating points in [0, H or W],\n                        # but keypoint coordinates are integers in [0, H-1 or W-1]\n                        # Therefore we assume the coordinates are \"pixel indices\" and\n                        # add 0.5 to convert to floating point coordinates.\n                        keypts[idx] = v + 0.5\n                obj[\"keypoints\"] = keypts\n\n            obj[\"bbox_mode\"] = BoxMode.XYWH_ABS\n            if id_map:\n                obj[\"category_id\"] = id_map[obj[\"category_id\"]]\n            objs.append(obj)\n        record[\"association_anno\"] = objs\n        \n\n        dataset_dicts.append(record)\n\n    if num_instances_without_valid_segmentation > 0:\n        logger.warn(\n            \"Filtered out {} instances without valid segmentation. \"\n            \"There might be issues in your dataset generation process.\".format(\n                num_instances_without_valid_segmentation\n            )\n        )\n    return dataset_dicts\n\n\n# TODO this function is not specific to COCO, except for the \"image_id\" logic.\ndef load_sem_seg(gt_root, image_root, gt_ext=\"png\", image_ext=\"jpg\"):\n    \"\"\"\n    Load semantic segmentation datasets. All files under \"gt_root\" with \"gt_ext\" extension are\n    treated as ground truth annotations and all files under \"image_root\" with \"image_ext\" extension\n    as input images. Ground truth and input images are matched using file paths relative to\n    \"gt_root\" and \"image_root\" respectively without taking into account file extensions.\n\n    Args:\n        gt_root (str): full path to ground truth semantic segmentation files. Semantic segmentation\n            annotations are stored as images with integer values in pixels that represent\n            corresponding semantic labels.\n        image_root (str): the directory where the input images are.\n        gt_ext (str): file extension for ground truth annotations.\n        image_ext (str): file extension for input images.\n\n    Returns:\n        list[dict]:\n            a list of dicts in detectron2 standard format without instance-level\n            annotation.\n\n    Notes:\n        1. This function does not read the image and ground truth files.\n           The results do not have the \"image\" and \"sem_seg\" fields.\n    \"\"\"\n\n    # We match input images with ground truth based on their relative filepaths (without file\n    # extensions) starting from 'image_root' and 'gt_root' respectively. COCO API works with integer\n    # IDs, hence, we try to convert these paths to int if possible.\n    def file2id(folder_path, file_path):\n        # TODO id is not used.\n        # extract relative path starting from `folder_path`\n        image_id = os.path.normpath(os.path.relpath(file_path, start=folder_path))\n        # remove file extension\n        image_id = os.path.splitext(image_id)[0]\n        try:\n            image_id = int(image_id)\n        except ValueError:\n            pass\n        return image_id\n\n    input_files = sorted(\n        (os.path.join(image_root, f) for f in PathManager.ls(image_root) if f.endswith(image_ext)),\n        key=lambda file_path: file2id(image_root, file_path),\n    )\n    gt_files = sorted(\n        (os.path.join(gt_root, f) for f in PathManager.ls(gt_root) if f.endswith(gt_ext)),\n        key=lambda file_path: file2id(gt_root, file_path),\n    )\n\n    assert len(gt_files) > 0, \"No annotations found in {}.\".format(gt_root)\n\n    # Use the intersection, so that val2017_100 annotations can run smoothly with val2017 images\n    if len(input_files) != len(gt_files):\n        logger.warn(\n            \"Directory {} and {} has {} and {} files, respectively.\".format(\n                image_root, gt_root, len(input_files), len(gt_files)\n            )\n        )\n        input_basenames = [os.path.basename(f)[: -len(image_ext)] for f in input_files]\n        gt_basenames = [os.path.basename(f)[: -len(gt_ext)] for f in gt_files]\n        intersect = list(set(input_basenames) & set(gt_basenames))\n        # sort, otherwise each worker may obtain a list[dict] in different order\n        intersect = sorted(intersect)\n        logger.warn(\"Will use their intersection of {} files.\".format(len(intersect)))\n        input_files = [os.path.join(image_root, f + image_ext) for f in intersect]\n        gt_files = [os.path.join(gt_root, f + gt_ext) for f in intersect]\n\n    logger.info(\n        \"Loaded {} images with semantic segmentation from {}\".format(len(input_files), image_root)\n    )\n\n    dataset_dicts = []\n    for (img_path, gt_path) in zip(input_files, gt_files):\n        record = {}\n        record[\"file_name\"] = img_path\n        record[\"sem_seg_file_name\"] = gt_path\n        record[\"image_id\"] = file2id(image_root, img_path)\n        assert record[\"image_id\"] == file2id(\n            gt_root, gt_path\n        ), \"there is no ground truth for {}\".format(img_path)\n        with PathManager.open(gt_path, \"rb\") as f:\n            img = Image.open(f)\n            w, h = img.size\n        record[\"height\"] = h\n        record[\"width\"] = w\n        dataset_dicts.append(record)\n\n    return dataset_dicts\n\n\nif __name__ == \"__main__\":\n    \"\"\"\n    Test the COCO json dataset loader.\n\n    Usage:\n        python -m detectron2.data.datasets.coco \\\n            path/to/json path/to/image_root dataset_name\n\n        \"dataset_name\" can be \"coco_2014_minival_100\", or other\n        pre-registered ones\n    \"\"\"\n    import numpy as np\n    from detectron2.utils.logger import setup_logger\n    from detectron2.utils.visualizer import Visualizer\n    import detectron2.data.datasets  # noqa # add pre-defined metadata\n    import sys\n\n    logger = setup_logger(name=__name__)\n    assert sys.argv[3] in DatasetCatalog.list()\n    meta = MetadataCatalog.get(sys.argv[3])\n\n    dicts = load_coco_json(sys.argv[1], sys.argv[2], sys.argv[3])\n    logger.info(\"Done loading {} samples.\".format(len(dicts)))\n\n    dirname = \"coco-data-vis\"\n    os.makedirs(dirname, exist_ok=True)\n    for d in dicts:\n        img = np.array(Image.open(d[\"file_name\"]))\n        visualizer = Visualizer(img, metadata=meta)\n        vis = visualizer.draw_dataset_dict(d)\n        fpath = os.path.join(dirname, os.path.basename(d[\"file_name\"]))\n        vis.save(fpath)\n"
  },
  {
    "path": "detectron2/data/datasets/lvis.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport os\n\nfrom fvcore.common.timer import Timer\nfrom detectron2.structures import BoxMode\nfrom fvcore.common.file_io import PathManager\nfrom detectron2.data import DatasetCatalog, MetadataCatalog\n\nfrom .lvis_v0_5_categories import LVIS_CATEGORIES\n\n\"\"\"\nThis file contains functions to parse LVIS-format annotations into dicts in the\n\"Detectron2 format\".\n\"\"\"\n\nlogger = logging.getLogger(__name__)\n\n__all__ = [\"load_lvis_json\", \"register_lvis_instances\", \"get_lvis_instances_meta\"]\n\n\ndef register_lvis_instances(name, metadata, json_file, image_root):\n    \"\"\"\n    Register a dataset in LVIS's json annotation format for instance detection and segmentation.\n\n    Args:\n        name (str): a name that identifies the dataset, e.g. \"lvis_v0.5_train\".\n        metadata (dict): extra metadata associated with this dataset. It can be an empty dict.\n        json_file (str): path to the json instance annotation file.\n        image_root (str): directory which contains all the images.\n    \"\"\"\n    DatasetCatalog.register(name, lambda: load_lvis_json(json_file, image_root, name))\n    MetadataCatalog.get(name).set(\n        json_file=json_file, image_root=image_root, evaluator_type=\"lvis\", **metadata\n    )\n\n\ndef load_lvis_json(json_file, image_root, dataset_name=None):\n    \"\"\"\n    Load a json file in LVIS's annotation format.\n\n    Args:\n        json_file (str): full path to the LVIS json annotation file.\n        image_root (str): the directory where the images in this json file exists.\n        dataset_name (str): the name of the dataset (e.g., \"lvis_v0.5_train\").\n            If provided, this function will put \"thing_classes\" into the metadata\n            associated with this dataset.\n\n    Returns:\n        list[dict]: a list of dicts in Detectron2 standard format. (See\n        `Using Custom Datasets </tutorials/datasets.html>`_ )\n\n    Notes:\n        1. This function does not read the image files.\n           The results do not have the \"image\" field.\n    \"\"\"\n    from lvis import LVIS\n\n    json_file = PathManager.get_local_path(json_file)\n\n    timer = Timer()\n    lvis_api = LVIS(json_file)\n    if timer.seconds() > 1:\n        logger.info(\"Loading {} takes {:.2f} seconds.\".format(json_file, timer.seconds()))\n\n    if dataset_name is not None:\n        meta = get_lvis_instances_meta(dataset_name)\n        MetadataCatalog.get(dataset_name).set(**meta)\n\n    # sort indices for reproducible results\n    img_ids = sorted(list(lvis_api.imgs.keys()))\n    # imgs is a list of dicts, each looks something like:\n    # {'license': 4,\n    #  'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',\n    #  'file_name': 'COCO_val2014_000000001268.jpg',\n    #  'height': 427,\n    #  'width': 640,\n    #  'date_captured': '2013-11-17 05:57:24',\n    #  'id': 1268}\n    imgs = lvis_api.load_imgs(img_ids)\n    # anns is a list[list[dict]], where each dict is an annotation\n    # record for an object. The inner list enumerates the objects in an image\n    # and the outer list enumerates over images. Example of anns[0]:\n    # [{'segmentation': [[192.81,\n    #     247.09,\n    #     ...\n    #     219.03,\n    #     249.06]],\n    #   'area': 1035.749,\n    #   'image_id': 1268,\n    #   'bbox': [192.81, 224.8, 74.73, 33.43],\n    #   'category_id': 16,\n    #   'id': 42986},\n    #  ...]\n    anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids]\n\n    # Sanity check that each annotation has a unique id\n    ann_ids = [ann[\"id\"] for anns_per_image in anns for ann in anns_per_image]\n    assert len(set(ann_ids)) == len(ann_ids), \"Annotation ids in '{}' are not unique\".format(\n        json_file\n    )\n\n    imgs_anns = list(zip(imgs, anns))\n\n    logger.info(\"Loaded {} images in the LVIS format from {}\".format(len(imgs_anns), json_file))\n\n    dataset_dicts = []\n\n    for (img_dict, anno_dict_list) in imgs_anns:\n        record = {}\n        file_name = img_dict[\"file_name\"]\n        if img_dict[\"file_name\"].startswith(\"COCO\"):\n            # Convert form the COCO 2014 file naming convention of\n            # COCO_[train/val/test]2014_000000000000.jpg to the 2017 naming convention of\n            # 000000000000.jpg (LVIS v1 will fix this naming issue)\n            file_name = file_name[-16:]\n        record[\"file_name\"] = os.path.join(image_root, file_name)\n        record[\"height\"] = img_dict[\"height\"]\n        record[\"width\"] = img_dict[\"width\"]\n        record[\"not_exhaustive_category_ids\"] = img_dict.get(\"not_exhaustive_category_ids\", [])\n        record[\"neg_category_ids\"] = img_dict.get(\"neg_category_ids\", [])\n        image_id = record[\"image_id\"] = img_dict[\"id\"]\n\n        objs = []\n        for anno in anno_dict_list:\n            # Check that the image_id in this annotation is the same as\n            # the image_id we're looking at.\n            # This fails only when the data parsing logic or the annotation file is buggy.\n            assert anno[\"image_id\"] == image_id\n            obj = {\"bbox\": anno[\"bbox\"], \"bbox_mode\": BoxMode.XYWH_ABS}\n            obj[\"category_id\"] = anno[\"category_id\"] - 1  # Convert 1-indexed to 0-indexed\n            segm = anno[\"segmentation\"]  # list[list[float]]\n            # filter out invalid polygons (< 3 points)\n            valid_segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]\n            assert len(segm) == len(\n                valid_segm\n            ), \"Annotation contains an invalid polygon with < 3 points\"\n            assert len(segm) > 0\n            obj[\"segmentation\"] = segm\n            objs.append(obj)\n        record[\"annotations\"] = objs\n        dataset_dicts.append(record)\n\n    return dataset_dicts\n\n\ndef get_lvis_instances_meta(dataset_name):\n    \"\"\"\n    Load LVIS metadata.\n\n    Args:\n        dataset_name (str): LVIS dataset name without the split name (e.g., \"lvis_v0.5\").\n\n    Returns:\n        dict: LVIS metadata with keys: thing_classes\n    \"\"\"\n    if \"v0.5\" in dataset_name:\n        return _get_lvis_instances_meta_v0_5()\n    # There will be a v1 in the future\n    # elif dataset_name == \"lvis_v1\":\n    #   return get_lvis_instances_meta_v1()\n    raise ValueError(\"No built-in metadata for dataset {}\".format(dataset_name))\n\n\ndef _get_lvis_instances_meta_v0_5():\n    assert len(LVIS_CATEGORIES) == 1230\n    cat_ids = [k[\"id\"] for k in LVIS_CATEGORIES]\n    assert min(cat_ids) == 1 and max(cat_ids) == len(\n        cat_ids\n    ), \"Category ids are not in [1, #categories], as expected\"\n    # Ensure that the category list is sorted by id\n    lvis_categories = [k for k in sorted(LVIS_CATEGORIES, key=lambda x: x[\"id\"])]\n    thing_classes = [k[\"synonyms\"][0] for k in lvis_categories]\n    meta = {\"thing_classes\": thing_classes}\n    return meta\n\n\nif __name__ == \"__main__\":\n    \"\"\"\n    Test the LVIS json dataset loader.\n\n    Usage:\n        python -m detectron2.data.datasets.lvis \\\n            path/to/json path/to/image_root dataset_name vis_limit\n    \"\"\"\n    import sys\n    import numpy as np\n    from detectron2.utils.logger import setup_logger\n    from PIL import Image\n    import detectron2.data.datasets  # noqa # add pre-defined metadata\n    from detectron2.utils.visualizer import Visualizer\n\n    logger = setup_logger(name=__name__)\n    meta = MetadataCatalog.get(sys.argv[3])\n\n    dicts = load_lvis_json(sys.argv[1], sys.argv[2], sys.argv[3])\n    logger.info(\"Done loading {} samples.\".format(len(dicts)))\n\n    dirname = \"lvis-data-vis\"\n    os.makedirs(dirname, exist_ok=True)\n    for d in dicts[: int(sys.argv[4])]:\n        img = np.array(Image.open(d[\"file_name\"]))\n        visualizer = Visualizer(img, metadata=meta)\n        vis = visualizer.draw_dataset_dict(d)\n        fpath = os.path.join(dirname, os.path.basename(d[\"file_name\"]))\n        vis.save(fpath)\n"
  },
  {
    "path": "detectron2/data/datasets/lvis_v0_5_categories.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# Autogen with\n# with open(\"lvis_v0.5_val.json\", \"r\") as f:\n#     a = json.load(f)\n# c = a[\"categories\"]\n# for x in c:\n#     del x[\"image_count\"]\n#     del x[\"instance_count\"]\n# LVIS_CATEGORIES = repr(c) + \"  # noqa\"\n\n# fmt: off\nLVIS_CATEGORIES = [{'frequency': 'r', 'id': 1, 'synset': 'acorn.n.01', 'synonyms': ['acorn'], 'def': 'nut from an oak tree', 'name': 'acorn'}, {'frequency': 'c', 'id': 2, 'synset': 'aerosol.n.02', 'synonyms': ['aerosol_can', 'spray_can'], 'def': 'a dispenser that holds a substance under pressure', 'name': 'aerosol_can'}, {'frequency': 'f', 'id': 3, 'synset': 'air_conditioner.n.01', 'synonyms': ['air_conditioner'], 'def': 'a machine that keeps air cool and dry', 'name': 'air_conditioner'}, {'frequency': 'f', 'id': 4, 'synset': 'airplane.n.01', 'synonyms': ['airplane', 'aeroplane'], 'def': 'an aircraft that has a fixed wing and is powered by propellers or jets', 'name': 'airplane'}, {'frequency': 'c', 'id': 5, 'synset': 'alarm_clock.n.01', 'synonyms': ['alarm_clock'], 'def': 'a clock that wakes a sleeper at some preset time', 'name': 'alarm_clock'}, {'frequency': 'c', 'id': 6, 'synset': 'alcohol.n.01', 'synonyms': ['alcohol', 'alcoholic_beverage'], 'def': 'a liquor or brew containing alcohol as the active agent', 'name': 'alcohol'}, {'frequency': 'r', 'id': 7, 'synset': 'alligator.n.02', 'synonyms': ['alligator', 'gator'], 'def': 'amphibious reptiles related to crocodiles but with shorter broader snouts', 'name': 'alligator'}, {'frequency': 'c', 'id': 8, 'synset': 'almond.n.02', 'synonyms': ['almond'], 'def': 'oval-shaped edible seed of the almond tree', 'name': 'almond'}, {'frequency': 'c', 'id': 9, 'synset': 'ambulance.n.01', 'synonyms': ['ambulance'], 'def': 'a vehicle that takes people to and from hospitals', 'name': 'ambulance'}, {'frequency': 'r', 'id': 10, 'synset': 'amplifier.n.01', 'synonyms': ['amplifier'], 'def': 'electronic equipment that increases strength of signals', 'name': 'amplifier'}, {'frequency': 'c', 'id': 11, 'synset': 'anklet.n.03', 'synonyms': ['anklet', 'ankle_bracelet'], 'def': 'an ornament worn around the ankle', 'name': 'anklet'}, {'frequency': 'f', 'id': 12, 'synset': 'antenna.n.01', 'synonyms': ['antenna', 'aerial', 'transmitting_aerial'], 'def': 'an electrical device that sends or receives radio or television signals', 'name': 'antenna'}, {'frequency': 'f', 'id': 13, 'synset': 'apple.n.01', 'synonyms': ['apple'], 'def': 'fruit with red or yellow or green skin and sweet to tart crisp whitish flesh', 'name': 'apple'}, {'frequency': 'r', 'id': 14, 'synset': 'apple_juice.n.01', 'synonyms': ['apple_juice'], 'def': 'the juice of apples', 'name': 'apple_juice'}, {'frequency': 'r', 'id': 15, 'synset': 'applesauce.n.01', 'synonyms': ['applesauce'], 'def': 'puree of stewed apples usually sweetened and spiced', 'name': 'applesauce'}, {'frequency': 'r', 'id': 16, 'synset': 'apricot.n.02', 'synonyms': ['apricot'], 'def': 'downy yellow to rosy-colored fruit resembling a small peach', 'name': 'apricot'}, {'frequency': 'f', 'id': 17, 'synset': 'apron.n.01', 'synonyms': ['apron'], 'def': 'a garment of cloth that is tied about the waist and worn to protect clothing', 'name': 'apron'}, {'frequency': 'c', 'id': 18, 'synset': 'aquarium.n.01', 'synonyms': ['aquarium', 'fish_tank'], 'def': 'a tank/pool/bowl filled with water for keeping live fish and underwater animals', 'name': 'aquarium'}, {'frequency': 'c', 'id': 19, 'synset': 'armband.n.02', 'synonyms': ['armband'], 'def': 'a band worn around the upper arm', 'name': 'armband'}, {'frequency': 'f', 'id': 20, 'synset': 'armchair.n.01', 'synonyms': ['armchair'], 'def': 'chair with a support on each side for arms', 'name': 'armchair'}, {'frequency': 'r', 'id': 21, 'synset': 'armoire.n.01', 'synonyms': ['armoire'], 'def': 'a large wardrobe or cabinet', 'name': 'armoire'}, {'frequency': 'r', 'id': 22, 'synset': 'armor.n.01', 'synonyms': ['armor', 'armour'], 'def': 'protective covering made of metal and used in combat', 'name': 'armor'}, {'frequency': 'c', 'id': 23, 'synset': 'artichoke.n.02', 'synonyms': ['artichoke'], 'def': 'a thistlelike flower head with edible fleshy leaves and heart', 'name': 'artichoke'}, {'frequency': 'f', 'id': 24, 'synset': 'ashcan.n.01', 'synonyms': ['trash_can', 'garbage_can', 'wastebin', 'dustbin', 'trash_barrel', 'trash_bin'], 'def': 'a bin that holds rubbish until it is collected', 'name': 'trash_can'}, {'frequency': 'c', 'id': 25, 'synset': 'ashtray.n.01', 'synonyms': ['ashtray'], 'def': \"a receptacle for the ash from smokers' cigars or cigarettes\", 'name': 'ashtray'}, {'frequency': 'c', 'id': 26, 'synset': 'asparagus.n.02', 'synonyms': ['asparagus'], 'def': 'edible young shoots of the asparagus plant', 'name': 'asparagus'}, {'frequency': 'c', 'id': 27, 'synset': 'atomizer.n.01', 'synonyms': ['atomizer', 'atomiser', 'spray', 'sprayer', 'nebulizer', 'nebuliser'], 'def': 'a dispenser that turns a liquid (such as perfume) into a fine mist', 'name': 'atomizer'}, {'frequency': 'c', 'id': 28, 'synset': 'avocado.n.01', 'synonyms': ['avocado'], 'def': 'a pear-shaped fruit with green or blackish skin and rich yellowish pulp enclosing a single large seed', 'name': 'avocado'}, {'frequency': 'c', 'id': 29, 'synset': 'award.n.02', 'synonyms': ['award', 'accolade'], 'def': 'a tangible symbol signifying approval or distinction', 'name': 'award'}, {'frequency': 'f', 'id': 30, 'synset': 'awning.n.01', 'synonyms': ['awning'], 'def': 'a canopy made of canvas to shelter people or things from rain or sun', 'name': 'awning'}, {'frequency': 'r', 'id': 31, 'synset': 'ax.n.01', 'synonyms': ['ax', 'axe'], 'def': 'an edge tool with a heavy bladed head mounted across a handle', 'name': 'ax'}, {'frequency': 'f', 'id': 32, 'synset': 'baby_buggy.n.01', 'synonyms': ['baby_buggy', 'baby_carriage', 'perambulator', 'pram', 'stroller'], 'def': 'a small vehicle with four wheels in which a baby or child is pushed around', 'name': 'baby_buggy'}, {'frequency': 'c', 'id': 33, 'synset': 'backboard.n.01', 'synonyms': ['basketball_backboard'], 'def': 'a raised vertical board with basket attached; used to play basketball', 'name': 'basketball_backboard'}, {'frequency': 'f', 'id': 34, 'synset': 'backpack.n.01', 'synonyms': ['backpack', 'knapsack', 'packsack', 'rucksack', 'haversack'], 'def': 'a bag carried by a strap on your back or shoulder', 'name': 'backpack'}, {'frequency': 'f', 'id': 35, 'synset': 'bag.n.04', 'synonyms': ['handbag', 'purse', 'pocketbook'], 'def': 'a container used for carrying money and small personal items or accessories', 'name': 'handbag'}, {'frequency': 'f', 'id': 36, 'synset': 'bag.n.06', 'synonyms': ['suitcase', 'baggage', 'luggage'], 'def': 'cases used to carry belongings when traveling', 'name': 'suitcase'}, {'frequency': 'c', 'id': 37, 'synset': 'bagel.n.01', 'synonyms': ['bagel', 'beigel'], 'def': 'glazed yeast-raised doughnut-shaped roll with hard crust', 'name': 'bagel'}, {'frequency': 'r', 'id': 38, 'synset': 'bagpipe.n.01', 'synonyms': ['bagpipe'], 'def': 'a tubular wind instrument; the player blows air into a bag and squeezes it out', 'name': 'bagpipe'}, {'frequency': 'r', 'id': 39, 'synset': 'baguet.n.01', 'synonyms': ['baguet', 'baguette'], 'def': 'narrow French stick loaf', 'name': 'baguet'}, {'frequency': 'r', 'id': 40, 'synset': 'bait.n.02', 'synonyms': ['bait', 'lure'], 'def': 'something used to lure fish or other animals into danger so they can be trapped or killed', 'name': 'bait'}, {'frequency': 'f', 'id': 41, 'synset': 'ball.n.06', 'synonyms': ['ball'], 'def': 'a spherical object used as a plaything', 'name': 'ball'}, {'frequency': 'r', 'id': 42, 'synset': 'ballet_skirt.n.01', 'synonyms': ['ballet_skirt', 'tutu'], 'def': 'very short skirt worn by ballerinas', 'name': 'ballet_skirt'}, {'frequency': 'f', 'id': 43, 'synset': 'balloon.n.01', 'synonyms': ['balloon'], 'def': 'large tough nonrigid bag filled with gas or heated air', 'name': 'balloon'}, {'frequency': 'c', 'id': 44, 'synset': 'bamboo.n.02', 'synonyms': ['bamboo'], 'def': 'woody tropical grass having hollow woody stems', 'name': 'bamboo'}, {'frequency': 'f', 'id': 45, 'synset': 'banana.n.02', 'synonyms': ['banana'], 'def': 'elongated crescent-shaped yellow fruit with soft sweet flesh', 'name': 'banana'}, {'frequency': 'r', 'id': 46, 'synset': 'band_aid.n.01', 'synonyms': ['Band_Aid'], 'def': 'trade name for an adhesive bandage to cover small cuts or blisters', 'name': 'Band_Aid'}, {'frequency': 'c', 'id': 47, 'synset': 'bandage.n.01', 'synonyms': ['bandage'], 'def': 'a piece of soft material that covers and protects an injured part of the body', 'name': 'bandage'}, {'frequency': 'c', 'id': 48, 'synset': 'bandanna.n.01', 'synonyms': ['bandanna', 'bandana'], 'def': 'large and brightly colored handkerchief; often used as a neckerchief', 'name': 'bandanna'}, {'frequency': 'r', 'id': 49, 'synset': 'banjo.n.01', 'synonyms': ['banjo'], 'def': 'a stringed instrument of the guitar family with a long neck and circular body', 'name': 'banjo'}, {'frequency': 'f', 'id': 50, 'synset': 'banner.n.01', 'synonyms': ['banner', 'streamer'], 'def': 'long strip of cloth or paper used for decoration or advertising', 'name': 'banner'}, {'frequency': 'r', 'id': 51, 'synset': 'barbell.n.01', 'synonyms': ['barbell'], 'def': 'a bar to which heavy discs are attached at each end; used in weightlifting', 'name': 'barbell'}, {'frequency': 'r', 'id': 52, 'synset': 'barge.n.01', 'synonyms': ['barge'], 'def': 'a flatbottom boat for carrying heavy loads (especially on canals)', 'name': 'barge'}, {'frequency': 'f', 'id': 53, 'synset': 'barrel.n.02', 'synonyms': ['barrel', 'cask'], 'def': 'a cylindrical container that holds liquids', 'name': 'barrel'}, {'frequency': 'c', 'id': 54, 'synset': 'barrette.n.01', 'synonyms': ['barrette'], 'def': \"a pin for holding women's hair in place\", 'name': 'barrette'}, {'frequency': 'c', 'id': 55, 'synset': 'barrow.n.03', 'synonyms': ['barrow', 'garden_cart', 'lawn_cart', 'wheelbarrow'], 'def': 'a cart for carrying small loads; has handles and one or more wheels', 'name': 'barrow'}, {'frequency': 'f', 'id': 56, 'synset': 'base.n.03', 'synonyms': ['baseball_base'], 'def': 'a place that the runner must touch before scoring', 'name': 'baseball_base'}, {'frequency': 'f', 'id': 57, 'synset': 'baseball.n.02', 'synonyms': ['baseball'], 'def': 'a ball used in playing baseball', 'name': 'baseball'}, {'frequency': 'f', 'id': 58, 'synset': 'baseball_bat.n.01', 'synonyms': ['baseball_bat'], 'def': 'an implement used in baseball by the batter', 'name': 'baseball_bat'}, {'frequency': 'f', 'id': 59, 'synset': 'baseball_cap.n.01', 'synonyms': ['baseball_cap', 'jockey_cap', 'golf_cap'], 'def': 'a cap with a bill', 'name': 'baseball_cap'}, {'frequency': 'f', 'id': 60, 'synset': 'baseball_glove.n.01', 'synonyms': ['baseball_glove', 'baseball_mitt'], 'def': 'the handwear used by fielders in playing baseball', 'name': 'baseball_glove'}, {'frequency': 'f', 'id': 61, 'synset': 'basket.n.01', 'synonyms': ['basket', 'handbasket'], 'def': 'a container that is usually woven and has handles', 'name': 'basket'}, {'frequency': 'c', 'id': 62, 'synset': 'basket.n.03', 'synonyms': ['basketball_hoop'], 'def': 'metal hoop supporting a net through which players try to throw the basketball', 'name': 'basketball_hoop'}, {'frequency': 'c', 'id': 63, 'synset': 'basketball.n.02', 'synonyms': ['basketball'], 'def': 'an inflated ball used in playing basketball', 'name': 'basketball'}, {'frequency': 'r', 'id': 64, 'synset': 'bass_horn.n.01', 'synonyms': ['bass_horn', 'sousaphone', 'tuba'], 'def': 'the lowest brass wind instrument', 'name': 'bass_horn'}, {'frequency': 'r', 'id': 65, 'synset': 'bat.n.01', 'synonyms': ['bat_(animal)'], 'def': 'nocturnal mouselike mammal with forelimbs modified to form membranous wings', 'name': 'bat_(animal)'}, {'frequency': 'f', 'id': 66, 'synset': 'bath_mat.n.01', 'synonyms': ['bath_mat'], 'def': 'a heavy towel or mat to stand on while drying yourself after a bath', 'name': 'bath_mat'}, {'frequency': 'f', 'id': 67, 'synset': 'bath_towel.n.01', 'synonyms': ['bath_towel'], 'def': 'a large towel; to dry yourself after a bath', 'name': 'bath_towel'}, {'frequency': 'c', 'id': 68, 'synset': 'bathrobe.n.01', 'synonyms': ['bathrobe'], 'def': 'a loose-fitting robe of towelling; worn after a bath or swim', 'name': 'bathrobe'}, {'frequency': 'f', 'id': 69, 'synset': 'bathtub.n.01', 'synonyms': ['bathtub', 'bathing_tub'], 'def': 'a large open container that you fill with water and use to wash the body', 'name': 'bathtub'}, {'frequency': 'r', 'id': 70, 'synset': 'batter.n.02', 'synonyms': ['batter_(food)'], 'def': 'a liquid or semiliquid mixture, as of flour, eggs, and milk, used in cooking', 'name': 'batter_(food)'}, {'frequency': 'c', 'id': 71, 'synset': 'battery.n.02', 'synonyms': ['battery'], 'def': 'a portable device that produces electricity', 'name': 'battery'}, {'frequency': 'r', 'id': 72, 'synset': 'beach_ball.n.01', 'synonyms': ['beachball'], 'def': 'large and light ball; for play at the seaside', 'name': 'beachball'}, {'frequency': 'c', 'id': 73, 'synset': 'bead.n.01', 'synonyms': ['bead'], 'def': 'a small ball with a hole through the middle used for ornamentation, jewellery, etc.', 'name': 'bead'}, {'frequency': 'r', 'id': 74, 'synset': 'beaker.n.01', 'synonyms': ['beaker'], 'def': 'a flatbottomed jar made of glass or plastic; used for chemistry', 'name': 'beaker'}, {'frequency': 'c', 'id': 75, 'synset': 'bean_curd.n.01', 'synonyms': ['bean_curd', 'tofu'], 'def': 'cheeselike food made of curdled soybean milk', 'name': 'bean_curd'}, {'frequency': 'c', 'id': 76, 'synset': 'beanbag.n.01', 'synonyms': ['beanbag'], 'def': 'a bag filled with dried beans or similar items; used in games or to sit on', 'name': 'beanbag'}, {'frequency': 'f', 'id': 77, 'synset': 'beanie.n.01', 'synonyms': ['beanie', 'beany'], 'def': 'a small skullcap; formerly worn by schoolboys and college freshmen', 'name': 'beanie'}, {'frequency': 'f', 'id': 78, 'synset': 'bear.n.01', 'synonyms': ['bear'], 'def': 'large carnivorous or omnivorous mammals with shaggy coats and claws', 'name': 'bear'}, {'frequency': 'f', 'id': 79, 'synset': 'bed.n.01', 'synonyms': ['bed'], 'def': 'a piece of furniture that provides a place to sleep', 'name': 'bed'}, {'frequency': 'c', 'id': 80, 'synset': 'bedspread.n.01', 'synonyms': ['bedspread', 'bedcover', 'bed_covering', 'counterpane', 'spread'], 'def': 'decorative cover for a bed', 'name': 'bedspread'}, {'frequency': 'f', 'id': 81, 'synset': 'beef.n.01', 'synonyms': ['cow'], 'def': 'cattle that are reared for their meat', 'name': 'cow'}, {'frequency': 'c', 'id': 82, 'synset': 'beef.n.02', 'synonyms': ['beef_(food)', 'boeuf_(food)'], 'def': 'meat from an adult domestic bovine', 'name': 'beef_(food)'}, {'frequency': 'r', 'id': 83, 'synset': 'beeper.n.01', 'synonyms': ['beeper', 'pager'], 'def': 'an device that beeps when the person carrying it is being paged', 'name': 'beeper'}, {'frequency': 'f', 'id': 84, 'synset': 'beer_bottle.n.01', 'synonyms': ['beer_bottle'], 'def': 'a bottle that holds beer', 'name': 'beer_bottle'}, {'frequency': 'c', 'id': 85, 'synset': 'beer_can.n.01', 'synonyms': ['beer_can'], 'def': 'a can that holds beer', 'name': 'beer_can'}, {'frequency': 'r', 'id': 86, 'synset': 'beetle.n.01', 'synonyms': ['beetle'], 'def': 'insect with hard wing covers', 'name': 'beetle'}, {'frequency': 'f', 'id': 87, 'synset': 'bell.n.01', 'synonyms': ['bell'], 'def': 'a hollow device made of metal that makes a ringing sound when struck', 'name': 'bell'}, {'frequency': 'f', 'id': 88, 'synset': 'bell_pepper.n.02', 'synonyms': ['bell_pepper', 'capsicum'], 'def': 'large bell-shaped sweet pepper in green or red or yellow or orange or black varieties', 'name': 'bell_pepper'}, {'frequency': 'f', 'id': 89, 'synset': 'belt.n.02', 'synonyms': ['belt'], 'def': 'a band to tie or buckle around the body (usually at the waist)', 'name': 'belt'}, {'frequency': 'f', 'id': 90, 'synset': 'belt_buckle.n.01', 'synonyms': ['belt_buckle'], 'def': 'the buckle used to fasten a belt', 'name': 'belt_buckle'}, {'frequency': 'f', 'id': 91, 'synset': 'bench.n.01', 'synonyms': ['bench'], 'def': 'a long seat for more than one person', 'name': 'bench'}, {'frequency': 'c', 'id': 92, 'synset': 'beret.n.01', 'synonyms': ['beret'], 'def': 'a cap with no brim or bill; made of soft cloth', 'name': 'beret'}, {'frequency': 'c', 'id': 93, 'synset': 'bib.n.02', 'synonyms': ['bib'], 'def': 'a napkin tied under the chin of a child while eating', 'name': 'bib'}, {'frequency': 'r', 'id': 94, 'synset': 'bible.n.01', 'synonyms': ['Bible'], 'def': 'the sacred writings of the Christian religions', 'name': 'Bible'}, {'frequency': 'f', 'id': 95, 'synset': 'bicycle.n.01', 'synonyms': ['bicycle', 'bike_(bicycle)'], 'def': 'a wheeled vehicle that has two wheels and is moved by foot pedals', 'name': 'bicycle'}, {'frequency': 'f', 'id': 96, 'synset': 'bill.n.09', 'synonyms': ['visor', 'vizor'], 'def': 'a brim that projects to the front to shade the eyes', 'name': 'visor'}, {'frequency': 'c', 'id': 97, 'synset': 'binder.n.03', 'synonyms': ['binder', 'ring-binder'], 'def': 'holds loose papers or magazines', 'name': 'binder'}, {'frequency': 'c', 'id': 98, 'synset': 'binoculars.n.01', 'synonyms': ['binoculars', 'field_glasses', 'opera_glasses'], 'def': 'an optical instrument designed for simultaneous use by both eyes', 'name': 'binoculars'}, {'frequency': 'f', 'id': 99, 'synset': 'bird.n.01', 'synonyms': ['bird'], 'def': 'animal characterized by feathers and wings', 'name': 'bird'}, {'frequency': 'r', 'id': 100, 'synset': 'bird_feeder.n.01', 'synonyms': ['birdfeeder'], 'def': 'an outdoor device that supplies food for wild birds', 'name': 'birdfeeder'}, {'frequency': 'r', 'id': 101, 'synset': 'birdbath.n.01', 'synonyms': ['birdbath'], 'def': 'an ornamental basin (usually in a garden) for birds to bathe in', 'name': 'birdbath'}, {'frequency': 'c', 'id': 102, 'synset': 'birdcage.n.01', 'synonyms': ['birdcage'], 'def': 'a cage in which a bird can be kept', 'name': 'birdcage'}, {'frequency': 'c', 'id': 103, 'synset': 'birdhouse.n.01', 'synonyms': ['birdhouse'], 'def': 'a shelter for birds', 'name': 'birdhouse'}, {'frequency': 'f', 'id': 104, 'synset': 'birthday_cake.n.01', 'synonyms': ['birthday_cake'], 'def': 'decorated cake served at a birthday party', 'name': 'birthday_cake'}, {'frequency': 'r', 'id': 105, 'synset': 'birthday_card.n.01', 'synonyms': ['birthday_card'], 'def': 'a card expressing a birthday greeting', 'name': 'birthday_card'}, {'frequency': 'r', 'id': 106, 'synset': 'biscuit.n.01', 'synonyms': ['biscuit_(bread)'], 'def': 'small round bread leavened with baking-powder or soda', 'name': 'biscuit_(bread)'}, {'frequency': 'r', 'id': 107, 'synset': 'black_flag.n.01', 'synonyms': ['pirate_flag'], 'def': 'a flag usually bearing a white skull and crossbones on a black background', 'name': 'pirate_flag'}, {'frequency': 'c', 'id': 108, 'synset': 'black_sheep.n.02', 'synonyms': ['black_sheep'], 'def': 'sheep with a black coat', 'name': 'black_sheep'}, {'frequency': 'c', 'id': 109, 'synset': 'blackboard.n.01', 'synonyms': ['blackboard', 'chalkboard'], 'def': 'sheet of slate; for writing with chalk', 'name': 'blackboard'}, {'frequency': 'f', 'id': 110, 'synset': 'blanket.n.01', 'synonyms': ['blanket'], 'def': 'bedding that keeps a person warm in bed', 'name': 'blanket'}, {'frequency': 'c', 'id': 111, 'synset': 'blazer.n.01', 'synonyms': ['blazer', 'sport_jacket', 'sport_coat', 'sports_jacket', 'sports_coat'], 'def': 'lightweight jacket; often striped in the colors of a club or school', 'name': 'blazer'}, {'frequency': 'f', 'id': 112, 'synset': 'blender.n.01', 'synonyms': ['blender', 'liquidizer', 'liquidiser'], 'def': 'an electrically powered mixer that mix or chop or liquefy foods', 'name': 'blender'}, {'frequency': 'r', 'id': 113, 'synset': 'blimp.n.02', 'synonyms': ['blimp'], 'def': 'a small nonrigid airship used for observation or as a barrage balloon', 'name': 'blimp'}, {'frequency': 'c', 'id': 114, 'synset': 'blinker.n.01', 'synonyms': ['blinker', 'flasher'], 'def': 'a light that flashes on and off; used as a signal or to send messages', 'name': 'blinker'}, {'frequency': 'c', 'id': 115, 'synset': 'blueberry.n.02', 'synonyms': ['blueberry'], 'def': 'sweet edible dark-blue berries of blueberry plants', 'name': 'blueberry'}, {'frequency': 'r', 'id': 116, 'synset': 'boar.n.02', 'synonyms': ['boar'], 'def': 'an uncastrated male hog', 'name': 'boar'}, {'frequency': 'r', 'id': 117, 'synset': 'board.n.09', 'synonyms': ['gameboard'], 'def': 'a flat portable surface (usually rectangular) designed for board games', 'name': 'gameboard'}, {'frequency': 'f', 'id': 118, 'synset': 'boat.n.01', 'synonyms': ['boat', 'ship_(boat)'], 'def': 'a vessel for travel on water', 'name': 'boat'}, {'frequency': 'c', 'id': 119, 'synset': 'bobbin.n.01', 'synonyms': ['bobbin', 'spool', 'reel'], 'def': 'a thing around which thread/tape/film or other flexible materials can be wound', 'name': 'bobbin'}, {'frequency': 'r', 'id': 120, 'synset': 'bobby_pin.n.01', 'synonyms': ['bobby_pin', 'hairgrip'], 'def': 'a flat wire hairpin used to hold bobbed hair in place', 'name': 'bobby_pin'}, {'frequency': 'c', 'id': 121, 'synset': 'boiled_egg.n.01', 'synonyms': ['boiled_egg', 'coddled_egg'], 'def': 'egg cooked briefly in the shell in gently boiling water', 'name': 'boiled_egg'}, {'frequency': 'r', 'id': 122, 'synset': 'bolo_tie.n.01', 'synonyms': ['bolo_tie', 'bolo', 'bola_tie', 'bola'], 'def': 'a cord fastened around the neck with an ornamental clasp and worn as a necktie', 'name': 'bolo_tie'}, {'frequency': 'c', 'id': 123, 'synset': 'bolt.n.03', 'synonyms': ['deadbolt'], 'def': 'the part of a lock that is engaged or withdrawn with a key', 'name': 'deadbolt'}, {'frequency': 'f', 'id': 124, 'synset': 'bolt.n.06', 'synonyms': ['bolt'], 'def': 'a screw that screws into a nut to form a fastener', 'name': 'bolt'}, {'frequency': 'r', 'id': 125, 'synset': 'bonnet.n.01', 'synonyms': ['bonnet'], 'def': 'a hat tied under the chin', 'name': 'bonnet'}, {'frequency': 'f', 'id': 126, 'synset': 'book.n.01', 'synonyms': ['book'], 'def': 'a written work or composition that has been published', 'name': 'book'}, {'frequency': 'r', 'id': 127, 'synset': 'book_bag.n.01', 'synonyms': ['book_bag'], 'def': 'a bag in which students carry their books', 'name': 'book_bag'}, {'frequency': 'c', 'id': 128, 'synset': 'bookcase.n.01', 'synonyms': ['bookcase'], 'def': 'a piece of furniture with shelves for storing books', 'name': 'bookcase'}, {'frequency': 'c', 'id': 129, 'synset': 'booklet.n.01', 'synonyms': ['booklet', 'brochure', 'leaflet', 'pamphlet'], 'def': 'a small book usually having a paper cover', 'name': 'booklet'}, {'frequency': 'r', 'id': 130, 'synset': 'bookmark.n.01', 'synonyms': ['bookmark', 'bookmarker'], 'def': 'a marker (a piece of paper or ribbon) placed between the pages of a book', 'name': 'bookmark'}, {'frequency': 'r', 'id': 131, 'synset': 'boom.n.04', 'synonyms': ['boom_microphone', 'microphone_boom'], 'def': 'a pole carrying an overhead microphone projected over a film or tv set', 'name': 'boom_microphone'}, {'frequency': 'f', 'id': 132, 'synset': 'boot.n.01', 'synonyms': ['boot'], 'def': 'footwear that covers the whole foot and lower leg', 'name': 'boot'}, {'frequency': 'f', 'id': 133, 'synset': 'bottle.n.01', 'synonyms': ['bottle'], 'def': 'a glass or plastic vessel used for storing drinks or other liquids', 'name': 'bottle'}, {'frequency': 'c', 'id': 134, 'synset': 'bottle_opener.n.01', 'synonyms': ['bottle_opener'], 'def': 'an opener for removing caps or corks from bottles', 'name': 'bottle_opener'}, {'frequency': 'c', 'id': 135, 'synset': 'bouquet.n.01', 'synonyms': ['bouquet'], 'def': 'an arrangement of flowers that is usually given as a present', 'name': 'bouquet'}, {'frequency': 'r', 'id': 136, 'synset': 'bow.n.04', 'synonyms': ['bow_(weapon)'], 'def': 'a weapon for shooting arrows', 'name': 'bow_(weapon)'}, {'frequency': 'f', 'id': 137, 'synset': 'bow.n.08', 'synonyms': ['bow_(decorative_ribbons)'], 'def': 'a decorative interlacing of ribbons', 'name': 'bow_(decorative_ribbons)'}, {'frequency': 'f', 'id': 138, 'synset': 'bow_tie.n.01', 'synonyms': ['bow-tie', 'bowtie'], 'def': \"a man's tie that ties in a bow\", 'name': 'bow-tie'}, {'frequency': 'f', 'id': 139, 'synset': 'bowl.n.03', 'synonyms': ['bowl'], 'def': 'a dish that is round and open at the top for serving foods', 'name': 'bowl'}, {'frequency': 'r', 'id': 140, 'synset': 'bowl.n.08', 'synonyms': ['pipe_bowl'], 'def': 'a small round container that is open at the top for holding tobacco', 'name': 'pipe_bowl'}, {'frequency': 'c', 'id': 141, 'synset': 'bowler_hat.n.01', 'synonyms': ['bowler_hat', 'bowler', 'derby_hat', 'derby', 'plug_hat'], 'def': 'a felt hat that is round and hard with a narrow brim', 'name': 'bowler_hat'}, {'frequency': 'r', 'id': 142, 'synset': 'bowling_ball.n.01', 'synonyms': ['bowling_ball'], 'def': 'a large ball with finger holes used in the sport of bowling', 'name': 'bowling_ball'}, {'frequency': 'r', 'id': 143, 'synset': 'bowling_pin.n.01', 'synonyms': ['bowling_pin'], 'def': 'a club-shaped wooden object used in bowling', 'name': 'bowling_pin'}, {'frequency': 'r', 'id': 144, 'synset': 'boxing_glove.n.01', 'synonyms': ['boxing_glove'], 'def': 'large glove coverings the fists of a fighter worn for the sport of boxing', 'name': 'boxing_glove'}, {'frequency': 'c', 'id': 145, 'synset': 'brace.n.06', 'synonyms': ['suspenders'], 'def': 'elastic straps that hold trousers up (usually used in the plural)', 'name': 'suspenders'}, {'frequency': 'f', 'id': 146, 'synset': 'bracelet.n.02', 'synonyms': ['bracelet', 'bangle'], 'def': 'jewelry worn around the wrist for decoration', 'name': 'bracelet'}, {'frequency': 'r', 'id': 147, 'synset': 'brass.n.07', 'synonyms': ['brass_plaque'], 'def': 'a memorial made of brass', 'name': 'brass_plaque'}, {'frequency': 'c', 'id': 148, 'synset': 'brassiere.n.01', 'synonyms': ['brassiere', 'bra', 'bandeau'], 'def': 'an undergarment worn by women to support their breasts', 'name': 'brassiere'}, {'frequency': 'c', 'id': 149, 'synset': 'bread-bin.n.01', 'synonyms': ['bread-bin', 'breadbox'], 'def': 'a container used to keep bread or cake in', 'name': 'bread-bin'}, {'frequency': 'r', 'id': 150, 'synset': 'breechcloth.n.01', 'synonyms': ['breechcloth', 'breechclout', 'loincloth'], 'def': 'a garment that provides covering for the loins', 'name': 'breechcloth'}, {'frequency': 'c', 'id': 151, 'synset': 'bridal_gown.n.01', 'synonyms': ['bridal_gown', 'wedding_gown', 'wedding_dress'], 'def': 'a gown worn by the bride at a wedding', 'name': 'bridal_gown'}, {'frequency': 'c', 'id': 152, 'synset': 'briefcase.n.01', 'synonyms': ['briefcase'], 'def': 'a case with a handle; for carrying papers or files or books', 'name': 'briefcase'}, {'frequency': 'c', 'id': 153, 'synset': 'bristle_brush.n.01', 'synonyms': ['bristle_brush'], 'def': 'a brush that is made with the short stiff hairs of an animal or plant', 'name': 'bristle_brush'}, {'frequency': 'f', 'id': 154, 'synset': 'broccoli.n.01', 'synonyms': ['broccoli'], 'def': 'plant with dense clusters of tight green flower buds', 'name': 'broccoli'}, {'frequency': 'r', 'id': 155, 'synset': 'brooch.n.01', 'synonyms': ['broach'], 'def': 'a decorative pin worn by women', 'name': 'broach'}, {'frequency': 'c', 'id': 156, 'synset': 'broom.n.01', 'synonyms': ['broom'], 'def': 'bundle of straws or twigs attached to a long handle; used for cleaning', 'name': 'broom'}, {'frequency': 'c', 'id': 157, 'synset': 'brownie.n.03', 'synonyms': ['brownie'], 'def': 'square or bar of very rich chocolate cake usually with nuts', 'name': 'brownie'}, {'frequency': 'c', 'id': 158, 'synset': 'brussels_sprouts.n.01', 'synonyms': ['brussels_sprouts'], 'def': 'the small edible cabbage-like buds growing along a stalk', 'name': 'brussels_sprouts'}, {'frequency': 'r', 'id': 159, 'synset': 'bubble_gum.n.01', 'synonyms': ['bubble_gum'], 'def': 'a kind of chewing gum that can be blown into bubbles', 'name': 'bubble_gum'}, {'frequency': 'f', 'id': 160, 'synset': 'bucket.n.01', 'synonyms': ['bucket', 'pail'], 'def': 'a roughly cylindrical vessel that is open at the top', 'name': 'bucket'}, {'frequency': 'r', 'id': 161, 'synset': 'buggy.n.01', 'synonyms': ['horse_buggy'], 'def': 'a small lightweight carriage; drawn by a single horse', 'name': 'horse_buggy'}, {'frequency': 'c', 'id': 162, 'synset': 'bull.n.11', 'synonyms': ['bull'], 'def': 'mature male cow', 'name': 'bull'}, {'frequency': 'r', 'id': 163, 'synset': 'bulldog.n.01', 'synonyms': ['bulldog'], 'def': 'a thickset short-haired dog with a large head and strong undershot lower jaw', 'name': 'bulldog'}, {'frequency': 'r', 'id': 164, 'synset': 'bulldozer.n.01', 'synonyms': ['bulldozer', 'dozer'], 'def': 'large powerful tractor; a large blade in front flattens areas of ground', 'name': 'bulldozer'}, {'frequency': 'c', 'id': 165, 'synset': 'bullet_train.n.01', 'synonyms': ['bullet_train'], 'def': 'a high-speed passenger train', 'name': 'bullet_train'}, {'frequency': 'c', 'id': 166, 'synset': 'bulletin_board.n.02', 'synonyms': ['bulletin_board', 'notice_board'], 'def': 'a board that hangs on a wall; displays announcements', 'name': 'bulletin_board'}, {'frequency': 'r', 'id': 167, 'synset': 'bulletproof_vest.n.01', 'synonyms': ['bulletproof_vest'], 'def': 'a vest capable of resisting the impact of a bullet', 'name': 'bulletproof_vest'}, {'frequency': 'c', 'id': 168, 'synset': 'bullhorn.n.01', 'synonyms': ['bullhorn', 'megaphone'], 'def': 'a portable loudspeaker with built-in microphone and amplifier', 'name': 'bullhorn'}, {'frequency': 'r', 'id': 169, 'synset': 'bully_beef.n.01', 'synonyms': ['corned_beef', 'corn_beef'], 'def': 'beef cured or pickled in brine', 'name': 'corned_beef'}, {'frequency': 'f', 'id': 170, 'synset': 'bun.n.01', 'synonyms': ['bun', 'roll'], 'def': 'small rounded bread either plain or sweet', 'name': 'bun'}, {'frequency': 'c', 'id': 171, 'synset': 'bunk_bed.n.01', 'synonyms': ['bunk_bed'], 'def': 'beds built one above the other', 'name': 'bunk_bed'}, {'frequency': 'f', 'id': 172, 'synset': 'buoy.n.01', 'synonyms': ['buoy'], 'def': 'a float attached by rope to the seabed to mark channels in a harbor or underwater hazards', 'name': 'buoy'}, {'frequency': 'r', 'id': 173, 'synset': 'burrito.n.01', 'synonyms': ['burrito'], 'def': 'a flour tortilla folded around a filling', 'name': 'burrito'}, {'frequency': 'f', 'id': 174, 'synset': 'bus.n.01', 'synonyms': ['bus_(vehicle)', 'autobus', 'charabanc', 'double-decker', 'motorbus', 'motorcoach'], 'def': 'a vehicle carrying many passengers; used for public transport', 'name': 'bus_(vehicle)'}, {'frequency': 'c', 'id': 175, 'synset': 'business_card.n.01', 'synonyms': ['business_card'], 'def': \"a card on which are printed the person's name and business affiliation\", 'name': 'business_card'}, {'frequency': 'c', 'id': 176, 'synset': 'butcher_knife.n.01', 'synonyms': ['butcher_knife'], 'def': 'a large sharp knife for cutting or trimming meat', 'name': 'butcher_knife'}, {'frequency': 'c', 'id': 177, 'synset': 'butter.n.01', 'synonyms': ['butter'], 'def': 'an edible emulsion of fat globules made by churning milk or cream; for cooking and table use', 'name': 'butter'}, {'frequency': 'c', 'id': 178, 'synset': 'butterfly.n.01', 'synonyms': ['butterfly'], 'def': 'insect typically having a slender body with knobbed antennae and broad colorful wings', 'name': 'butterfly'}, {'frequency': 'f', 'id': 179, 'synset': 'button.n.01', 'synonyms': ['button'], 'def': 'a round fastener sewn to shirts and coats etc to fit through buttonholes', 'name': 'button'}, {'frequency': 'f', 'id': 180, 'synset': 'cab.n.03', 'synonyms': ['cab_(taxi)', 'taxi', 'taxicab'], 'def': 'a car that takes passengers where they want to go in exchange for money', 'name': 'cab_(taxi)'}, {'frequency': 'r', 'id': 181, 'synset': 'cabana.n.01', 'synonyms': ['cabana'], 'def': 'a small tent used as a dressing room beside the sea or a swimming pool', 'name': 'cabana'}, {'frequency': 'r', 'id': 182, 'synset': 'cabin_car.n.01', 'synonyms': ['cabin_car', 'caboose'], 'def': 'a car on a freight train for use of the train crew; usually the last car on the train', 'name': 'cabin_car'}, {'frequency': 'f', 'id': 183, 'synset': 'cabinet.n.01', 'synonyms': ['cabinet'], 'def': 'a piece of furniture resembling a cupboard with doors and shelves and drawers', 'name': 'cabinet'}, {'frequency': 'r', 'id': 184, 'synset': 'cabinet.n.03', 'synonyms': ['locker', 'storage_locker'], 'def': 'a storage compartment for clothes and valuables; usually it has a lock', 'name': 'locker'}, {'frequency': 'f', 'id': 185, 'synset': 'cake.n.03', 'synonyms': ['cake'], 'def': 'baked goods made from or based on a mixture of flour, sugar, eggs, and fat', 'name': 'cake'}, {'frequency': 'c', 'id': 186, 'synset': 'calculator.n.02', 'synonyms': ['calculator'], 'def': 'a small machine that is used for mathematical calculations', 'name': 'calculator'}, {'frequency': 'f', 'id': 187, 'synset': 'calendar.n.02', 'synonyms': ['calendar'], 'def': 'a list or register of events (appointments/social events/court cases, etc)', 'name': 'calendar'}, {'frequency': 'c', 'id': 188, 'synset': 'calf.n.01', 'synonyms': ['calf'], 'def': 'young of domestic cattle', 'name': 'calf'}, {'frequency': 'c', 'id': 189, 'synset': 'camcorder.n.01', 'synonyms': ['camcorder'], 'def': 'a portable television camera and videocassette recorder', 'name': 'camcorder'}, {'frequency': 'c', 'id': 190, 'synset': 'camel.n.01', 'synonyms': ['camel'], 'def': 'cud-chewing mammal used as a draft or saddle animal in desert regions', 'name': 'camel'}, {'frequency': 'f', 'id': 191, 'synset': 'camera.n.01', 'synonyms': ['camera'], 'def': 'equipment for taking photographs', 'name': 'camera'}, {'frequency': 'c', 'id': 192, 'synset': 'camera_lens.n.01', 'synonyms': ['camera_lens'], 'def': 'a lens that focuses the image in a camera', 'name': 'camera_lens'}, {'frequency': 'c', 'id': 193, 'synset': 'camper.n.02', 'synonyms': ['camper_(vehicle)', 'camping_bus', 'motor_home'], 'def': 'a recreational vehicle equipped for camping out while traveling', 'name': 'camper_(vehicle)'}, {'frequency': 'f', 'id': 194, 'synset': 'can.n.01', 'synonyms': ['can', 'tin_can'], 'def': 'airtight sealed metal container for food or drink or paint etc.', 'name': 'can'}, {'frequency': 'c', 'id': 195, 'synset': 'can_opener.n.01', 'synonyms': ['can_opener', 'tin_opener'], 'def': 'a device for cutting cans open', 'name': 'can_opener'}, {'frequency': 'r', 'id': 196, 'synset': 'candelabrum.n.01', 'synonyms': ['candelabrum', 'candelabra'], 'def': 'branched candlestick; ornamental; has several lights', 'name': 'candelabrum'}, {'frequency': 'f', 'id': 197, 'synset': 'candle.n.01', 'synonyms': ['candle', 'candlestick'], 'def': 'stick of wax with a wick in the middle', 'name': 'candle'}, {'frequency': 'f', 'id': 198, 'synset': 'candlestick.n.01', 'synonyms': ['candle_holder'], 'def': 'a holder with sockets for candles', 'name': 'candle_holder'}, {'frequency': 'r', 'id': 199, 'synset': 'candy_bar.n.01', 'synonyms': ['candy_bar'], 'def': 'a candy shaped as a bar', 'name': 'candy_bar'}, {'frequency': 'c', 'id': 200, 'synset': 'candy_cane.n.01', 'synonyms': ['candy_cane'], 'def': 'a hard candy in the shape of a rod (usually with stripes)', 'name': 'candy_cane'}, {'frequency': 'c', 'id': 201, 'synset': 'cane.n.01', 'synonyms': ['walking_cane'], 'def': 'a stick that people can lean on to help them walk', 'name': 'walking_cane'}, {'frequency': 'c', 'id': 202, 'synset': 'canister.n.02', 'synonyms': ['canister', 'cannister'], 'def': 'metal container for storing dry foods such as tea or flour', 'name': 'canister'}, {'frequency': 'r', 'id': 203, 'synset': 'cannon.n.02', 'synonyms': ['cannon'], 'def': 'heavy gun fired from a tank', 'name': 'cannon'}, {'frequency': 'c', 'id': 204, 'synset': 'canoe.n.01', 'synonyms': ['canoe'], 'def': 'small and light boat; pointed at both ends; propelled with a paddle', 'name': 'canoe'}, {'frequency': 'r', 'id': 205, 'synset': 'cantaloup.n.02', 'synonyms': ['cantaloup', 'cantaloupe'], 'def': 'the fruit of a cantaloup vine; small to medium-sized melon with yellowish flesh', 'name': 'cantaloup'}, {'frequency': 'r', 'id': 206, 'synset': 'canteen.n.01', 'synonyms': ['canteen'], 'def': 'a flask for carrying water; used by soldiers or travelers', 'name': 'canteen'}, {'frequency': 'c', 'id': 207, 'synset': 'cap.n.01', 'synonyms': ['cap_(headwear)'], 'def': 'a tight-fitting headwear', 'name': 'cap_(headwear)'}, {'frequency': 'f', 'id': 208, 'synset': 'cap.n.02', 'synonyms': ['bottle_cap', 'cap_(container_lid)'], 'def': 'a top (as for a bottle)', 'name': 'bottle_cap'}, {'frequency': 'r', 'id': 209, 'synset': 'cape.n.02', 'synonyms': ['cape'], 'def': 'a sleeveless garment like a cloak but shorter', 'name': 'cape'}, {'frequency': 'c', 'id': 210, 'synset': 'cappuccino.n.01', 'synonyms': ['cappuccino', 'coffee_cappuccino'], 'def': 'equal parts of espresso and steamed milk', 'name': 'cappuccino'}, {'frequency': 'f', 'id': 211, 'synset': 'car.n.01', 'synonyms': ['car_(automobile)', 'auto_(automobile)', 'automobile'], 'def': 'a motor vehicle with four wheels', 'name': 'car_(automobile)'}, {'frequency': 'f', 'id': 212, 'synset': 'car.n.02', 'synonyms': ['railcar_(part_of_a_train)', 'railway_car_(part_of_a_train)', 'railroad_car_(part_of_a_train)'], 'def': 'a wheeled vehicle adapted to the rails of railroad', 'name': 'railcar_(part_of_a_train)'}, {'frequency': 'r', 'id': 213, 'synset': 'car.n.04', 'synonyms': ['elevator_car'], 'def': 'where passengers ride up and down', 'name': 'elevator_car'}, {'frequency': 'r', 'id': 214, 'synset': 'car_battery.n.01', 'synonyms': ['car_battery', 'automobile_battery'], 'def': 'a battery in a motor vehicle', 'name': 'car_battery'}, {'frequency': 'c', 'id': 215, 'synset': 'card.n.02', 'synonyms': ['identity_card'], 'def': 'a card certifying the identity of the bearer', 'name': 'identity_card'}, {'frequency': 'c', 'id': 216, 'synset': 'card.n.03', 'synonyms': ['card'], 'def': 'a rectangular piece of paper used to send messages (e.g. greetings or pictures)', 'name': 'card'}, {'frequency': 'r', 'id': 217, 'synset': 'cardigan.n.01', 'synonyms': ['cardigan'], 'def': 'knitted jacket that is fastened up the front with buttons or a zipper', 'name': 'cardigan'}, {'frequency': 'r', 'id': 218, 'synset': 'cargo_ship.n.01', 'synonyms': ['cargo_ship', 'cargo_vessel'], 'def': 'a ship designed to carry cargo', 'name': 'cargo_ship'}, {'frequency': 'r', 'id': 219, 'synset': 'carnation.n.01', 'synonyms': ['carnation'], 'def': 'plant with pink to purple-red spice-scented usually double flowers', 'name': 'carnation'}, {'frequency': 'c', 'id': 220, 'synset': 'carriage.n.02', 'synonyms': ['horse_carriage'], 'def': 'a vehicle with wheels drawn by one or more horses', 'name': 'horse_carriage'}, {'frequency': 'f', 'id': 221, 'synset': 'carrot.n.01', 'synonyms': ['carrot'], 'def': 'deep orange edible root of the cultivated carrot plant', 'name': 'carrot'}, {'frequency': 'c', 'id': 222, 'synset': 'carryall.n.01', 'synonyms': ['tote_bag'], 'def': 'a capacious bag or basket', 'name': 'tote_bag'}, {'frequency': 'c', 'id': 223, 'synset': 'cart.n.01', 'synonyms': ['cart'], 'def': 'a heavy open wagon usually having two wheels and drawn by an animal', 'name': 'cart'}, {'frequency': 'c', 'id': 224, 'synset': 'carton.n.02', 'synonyms': ['carton'], 'def': 'a box made of cardboard; opens by flaps on top', 'name': 'carton'}, {'frequency': 'c', 'id': 225, 'synset': 'cash_register.n.01', 'synonyms': ['cash_register', 'register_(for_cash_transactions)'], 'def': 'a cashbox with an adding machine to register transactions', 'name': 'cash_register'}, {'frequency': 'r', 'id': 226, 'synset': 'casserole.n.01', 'synonyms': ['casserole'], 'def': 'food cooked and served in a casserole', 'name': 'casserole'}, {'frequency': 'r', 'id': 227, 'synset': 'cassette.n.01', 'synonyms': ['cassette'], 'def': 'a container that holds a magnetic tape used for recording or playing sound or video', 'name': 'cassette'}, {'frequency': 'c', 'id': 228, 'synset': 'cast.n.05', 'synonyms': ['cast', 'plaster_cast', 'plaster_bandage'], 'def': 'bandage consisting of a firm covering that immobilizes broken bones while they heal', 'name': 'cast'}, {'frequency': 'f', 'id': 229, 'synset': 'cat.n.01', 'synonyms': ['cat'], 'def': 'a domestic house cat', 'name': 'cat'}, {'frequency': 'c', 'id': 230, 'synset': 'cauliflower.n.02', 'synonyms': ['cauliflower'], 'def': 'edible compact head of white undeveloped flowers', 'name': 'cauliflower'}, {'frequency': 'r', 'id': 231, 'synset': 'caviar.n.01', 'synonyms': ['caviar', 'caviare'], 'def': \"salted roe of sturgeon or other large fish; usually served as an hors d'oeuvre\", 'name': 'caviar'}, {'frequency': 'c', 'id': 232, 'synset': 'cayenne.n.02', 'synonyms': ['cayenne_(spice)', 'cayenne_pepper_(spice)', 'red_pepper_(spice)'], 'def': 'ground pods and seeds of pungent red peppers of the genus Capsicum', 'name': 'cayenne_(spice)'}, {'frequency': 'c', 'id': 233, 'synset': 'cd_player.n.01', 'synonyms': ['CD_player'], 'def': 'electronic equipment for playing compact discs (CDs)', 'name': 'CD_player'}, {'frequency': 'c', 'id': 234, 'synset': 'celery.n.01', 'synonyms': ['celery'], 'def': 'widely cultivated herb with aromatic leaf stalks that are eaten raw or cooked', 'name': 'celery'}, {'frequency': 'f', 'id': 235, 'synset': 'cellular_telephone.n.01', 'synonyms': ['cellular_telephone', 'cellular_phone', 'cellphone', 'mobile_phone', 'smart_phone'], 'def': 'a hand-held mobile telephone', 'name': 'cellular_telephone'}, {'frequency': 'r', 'id': 236, 'synset': 'chain_mail.n.01', 'synonyms': ['chain_mail', 'ring_mail', 'chain_armor', 'chain_armour', 'ring_armor', 'ring_armour'], 'def': '(Middle Ages) flexible armor made of interlinked metal rings', 'name': 'chain_mail'}, {'frequency': 'f', 'id': 237, 'synset': 'chair.n.01', 'synonyms': ['chair'], 'def': 'a seat for one person, with a support for the back', 'name': 'chair'}, {'frequency': 'r', 'id': 238, 'synset': 'chaise_longue.n.01', 'synonyms': ['chaise_longue', 'chaise', 'daybed'], 'def': 'a long chair; for reclining', 'name': 'chaise_longue'}, {'frequency': 'r', 'id': 239, 'synset': 'champagne.n.01', 'synonyms': ['champagne'], 'def': 'a white sparkling wine produced in Champagne or resembling that produced there', 'name': 'champagne'}, {'frequency': 'f', 'id': 240, 'synset': 'chandelier.n.01', 'synonyms': ['chandelier'], 'def': 'branched lighting fixture; often ornate; hangs from the ceiling', 'name': 'chandelier'}, {'frequency': 'r', 'id': 241, 'synset': 'chap.n.04', 'synonyms': ['chap'], 'def': 'leather leggings without a seat; worn over trousers by cowboys to protect their legs', 'name': 'chap'}, {'frequency': 'r', 'id': 242, 'synset': 'checkbook.n.01', 'synonyms': ['checkbook', 'chequebook'], 'def': 'a book issued to holders of checking accounts', 'name': 'checkbook'}, {'frequency': 'r', 'id': 243, 'synset': 'checkerboard.n.01', 'synonyms': ['checkerboard'], 'def': 'a board having 64 squares of two alternating colors', 'name': 'checkerboard'}, {'frequency': 'c', 'id': 244, 'synset': 'cherry.n.03', 'synonyms': ['cherry'], 'def': 'a red fruit with a single hard stone', 'name': 'cherry'}, {'frequency': 'r', 'id': 245, 'synset': 'chessboard.n.01', 'synonyms': ['chessboard'], 'def': 'a checkerboard used to play chess', 'name': 'chessboard'}, {'frequency': 'r', 'id': 246, 'synset': 'chest_of_drawers.n.01', 'synonyms': ['chest_of_drawers_(furniture)', 'bureau_(furniture)', 'chest_(furniture)'], 'def': 'furniture with drawers for keeping clothes', 'name': 'chest_of_drawers_(furniture)'}, {'frequency': 'c', 'id': 247, 'synset': 'chicken.n.02', 'synonyms': ['chicken_(animal)'], 'def': 'a domestic fowl bred for flesh or eggs', 'name': 'chicken_(animal)'}, {'frequency': 'c', 'id': 248, 'synset': 'chicken_wire.n.01', 'synonyms': ['chicken_wire'], 'def': 'a galvanized wire network with a hexagonal mesh; used to build fences', 'name': 'chicken_wire'}, {'frequency': 'r', 'id': 249, 'synset': 'chickpea.n.01', 'synonyms': ['chickpea', 'garbanzo'], 'def': 'the seed of the chickpea plant; usually dried', 'name': 'chickpea'}, {'frequency': 'r', 'id': 250, 'synset': 'chihuahua.n.03', 'synonyms': ['Chihuahua'], 'def': 'an old breed of tiny short-haired dog with protruding eyes from Mexico', 'name': 'Chihuahua'}, {'frequency': 'r', 'id': 251, 'synset': 'chili.n.02', 'synonyms': ['chili_(vegetable)', 'chili_pepper_(vegetable)', 'chilli_(vegetable)', 'chilly_(vegetable)', 'chile_(vegetable)'], 'def': 'very hot and finely tapering pepper of special pungency', 'name': 'chili_(vegetable)'}, {'frequency': 'r', 'id': 252, 'synset': 'chime.n.01', 'synonyms': ['chime', 'gong'], 'def': 'an instrument consisting of a set of bells that are struck with a hammer', 'name': 'chime'}, {'frequency': 'r', 'id': 253, 'synset': 'chinaware.n.01', 'synonyms': ['chinaware'], 'def': 'dishware made of high quality porcelain', 'name': 'chinaware'}, {'frequency': 'c', 'id': 254, 'synset': 'chip.n.04', 'synonyms': ['crisp_(potato_chip)', 'potato_chip'], 'def': 'a thin crisp slice of potato fried in deep fat', 'name': 'crisp_(potato_chip)'}, {'frequency': 'r', 'id': 255, 'synset': 'chip.n.06', 'synonyms': ['poker_chip'], 'def': 'a small disk-shaped counter used to represent money when gambling', 'name': 'poker_chip'}, {'frequency': 'c', 'id': 256, 'synset': 'chocolate_bar.n.01', 'synonyms': ['chocolate_bar'], 'def': 'a bar of chocolate candy', 'name': 'chocolate_bar'}, {'frequency': 'c', 'id': 257, 'synset': 'chocolate_cake.n.01', 'synonyms': ['chocolate_cake'], 'def': 'cake containing chocolate', 'name': 'chocolate_cake'}, {'frequency': 'r', 'id': 258, 'synset': 'chocolate_milk.n.01', 'synonyms': ['chocolate_milk'], 'def': 'milk flavored with chocolate syrup', 'name': 'chocolate_milk'}, {'frequency': 'r', 'id': 259, 'synset': 'chocolate_mousse.n.01', 'synonyms': ['chocolate_mousse'], 'def': 'dessert mousse made with chocolate', 'name': 'chocolate_mousse'}, {'frequency': 'f', 'id': 260, 'synset': 'choker.n.03', 'synonyms': ['choker', 'collar', 'neckband'], 'def': 'necklace that fits tightly around the neck', 'name': 'choker'}, {'frequency': 'f', 'id': 261, 'synset': 'chopping_board.n.01', 'synonyms': ['chopping_board', 'cutting_board', 'chopping_block'], 'def': 'a wooden board where meats or vegetables can be cut', 'name': 'chopping_board'}, {'frequency': 'c', 'id': 262, 'synset': 'chopstick.n.01', 'synonyms': ['chopstick'], 'def': 'one of a pair of slender sticks used as oriental tableware to eat food with', 'name': 'chopstick'}, {'frequency': 'f', 'id': 263, 'synset': 'christmas_tree.n.05', 'synonyms': ['Christmas_tree'], 'def': 'an ornamented evergreen used as a Christmas decoration', 'name': 'Christmas_tree'}, {'frequency': 'c', 'id': 264, 'synset': 'chute.n.02', 'synonyms': ['slide'], 'def': 'sloping channel through which things can descend', 'name': 'slide'}, {'frequency': 'r', 'id': 265, 'synset': 'cider.n.01', 'synonyms': ['cider', 'cyder'], 'def': 'a beverage made from juice pressed from apples', 'name': 'cider'}, {'frequency': 'r', 'id': 266, 'synset': 'cigar_box.n.01', 'synonyms': ['cigar_box'], 'def': 'a box for holding cigars', 'name': 'cigar_box'}, {'frequency': 'c', 'id': 267, 'synset': 'cigarette.n.01', 'synonyms': ['cigarette'], 'def': 'finely ground tobacco wrapped in paper; for smoking', 'name': 'cigarette'}, {'frequency': 'c', 'id': 268, 'synset': 'cigarette_case.n.01', 'synonyms': ['cigarette_case', 'cigarette_pack'], 'def': 'a small flat case for holding cigarettes', 'name': 'cigarette_case'}, {'frequency': 'f', 'id': 269, 'synset': 'cistern.n.02', 'synonyms': ['cistern', 'water_tank'], 'def': 'a tank that holds the water used to flush a toilet', 'name': 'cistern'}, {'frequency': 'r', 'id': 270, 'synset': 'clarinet.n.01', 'synonyms': ['clarinet'], 'def': 'a single-reed instrument with a straight tube', 'name': 'clarinet'}, {'frequency': 'r', 'id': 271, 'synset': 'clasp.n.01', 'synonyms': ['clasp'], 'def': 'a fastener (as a buckle or hook) that is used to hold two things together', 'name': 'clasp'}, {'frequency': 'c', 'id': 272, 'synset': 'cleansing_agent.n.01', 'synonyms': ['cleansing_agent', 'cleanser', 'cleaner'], 'def': 'a preparation used in cleaning something', 'name': 'cleansing_agent'}, {'frequency': 'r', 'id': 273, 'synset': 'clementine.n.01', 'synonyms': ['clementine'], 'def': 'a variety of mandarin orange', 'name': 'clementine'}, {'frequency': 'c', 'id': 274, 'synset': 'clip.n.03', 'synonyms': ['clip'], 'def': 'any of various small fasteners used to hold loose articles together', 'name': 'clip'}, {'frequency': 'c', 'id': 275, 'synset': 'clipboard.n.01', 'synonyms': ['clipboard'], 'def': 'a small writing board with a clip at the top for holding papers', 'name': 'clipboard'}, {'frequency': 'f', 'id': 276, 'synset': 'clock.n.01', 'synonyms': ['clock', 'timepiece', 'timekeeper'], 'def': 'a timepiece that shows the time of day', 'name': 'clock'}, {'frequency': 'f', 'id': 277, 'synset': 'clock_tower.n.01', 'synonyms': ['clock_tower'], 'def': 'a tower with a large clock visible high up on an outside face', 'name': 'clock_tower'}, {'frequency': 'c', 'id': 278, 'synset': 'clothes_hamper.n.01', 'synonyms': ['clothes_hamper', 'laundry_basket', 'clothes_basket'], 'def': 'a hamper that holds dirty clothes to be washed or wet clothes to be dried', 'name': 'clothes_hamper'}, {'frequency': 'c', 'id': 279, 'synset': 'clothespin.n.01', 'synonyms': ['clothespin', 'clothes_peg'], 'def': 'wood or plastic fastener; for holding clothes on a clothesline', 'name': 'clothespin'}, {'frequency': 'r', 'id': 280, 'synset': 'clutch_bag.n.01', 'synonyms': ['clutch_bag'], 'def': \"a woman's strapless purse that is carried in the hand\", 'name': 'clutch_bag'}, {'frequency': 'f', 'id': 281, 'synset': 'coaster.n.03', 'synonyms': ['coaster'], 'def': 'a covering (plate or mat) that protects the surface of a table', 'name': 'coaster'}, {'frequency': 'f', 'id': 282, 'synset': 'coat.n.01', 'synonyms': ['coat'], 'def': 'an outer garment that has sleeves and covers the body from shoulder down', 'name': 'coat'}, {'frequency': 'c', 'id': 283, 'synset': 'coat_hanger.n.01', 'synonyms': ['coat_hanger', 'clothes_hanger', 'dress_hanger'], 'def': \"a hanger that is shaped like a person's shoulders\", 'name': 'coat_hanger'}, {'frequency': 'r', 'id': 284, 'synset': 'coatrack.n.01', 'synonyms': ['coatrack', 'hatrack'], 'def': 'a rack with hooks for temporarily holding coats and hats', 'name': 'coatrack'}, {'frequency': 'c', 'id': 285, 'synset': 'cock.n.04', 'synonyms': ['cock', 'rooster'], 'def': 'adult male chicken', 'name': 'cock'}, {'frequency': 'c', 'id': 286, 'synset': 'coconut.n.02', 'synonyms': ['coconut', 'cocoanut'], 'def': 'large hard-shelled brown oval nut with a fibrous husk', 'name': 'coconut'}, {'frequency': 'r', 'id': 287, 'synset': 'coffee_filter.n.01', 'synonyms': ['coffee_filter'], 'def': 'filter (usually of paper) that passes the coffee and retains the coffee grounds', 'name': 'coffee_filter'}, {'frequency': 'f', 'id': 288, 'synset': 'coffee_maker.n.01', 'synonyms': ['coffee_maker', 'coffee_machine'], 'def': 'a kitchen appliance for brewing coffee automatically', 'name': 'coffee_maker'}, {'frequency': 'f', 'id': 289, 'synset': 'coffee_table.n.01', 'synonyms': ['coffee_table', 'cocktail_table'], 'def': 'low table where magazines can be placed and coffee or cocktails are served', 'name': 'coffee_table'}, {'frequency': 'c', 'id': 290, 'synset': 'coffeepot.n.01', 'synonyms': ['coffeepot'], 'def': 'tall pot in which coffee is brewed', 'name': 'coffeepot'}, {'frequency': 'r', 'id': 291, 'synset': 'coil.n.05', 'synonyms': ['coil'], 'def': 'tubing that is wound in a spiral', 'name': 'coil'}, {'frequency': 'c', 'id': 292, 'synset': 'coin.n.01', 'synonyms': ['coin'], 'def': 'a flat metal piece (usually a disc) used as money', 'name': 'coin'}, {'frequency': 'r', 'id': 293, 'synset': 'colander.n.01', 'synonyms': ['colander', 'cullender'], 'def': 'bowl-shaped strainer; used to wash or drain foods', 'name': 'colander'}, {'frequency': 'c', 'id': 294, 'synset': 'coleslaw.n.01', 'synonyms': ['coleslaw', 'slaw'], 'def': 'basically shredded cabbage', 'name': 'coleslaw'}, {'frequency': 'r', 'id': 295, 'synset': 'coloring_material.n.01', 'synonyms': ['coloring_material', 'colouring_material'], 'def': 'any material used for its color', 'name': 'coloring_material'}, {'frequency': 'r', 'id': 296, 'synset': 'combination_lock.n.01', 'synonyms': ['combination_lock'], 'def': 'lock that can be opened only by turning dials in a special sequence', 'name': 'combination_lock'}, {'frequency': 'c', 'id': 297, 'synset': 'comforter.n.04', 'synonyms': ['pacifier', 'teething_ring'], 'def': 'device used for an infant to suck or bite on', 'name': 'pacifier'}, {'frequency': 'r', 'id': 298, 'synset': 'comic_book.n.01', 'synonyms': ['comic_book'], 'def': 'a magazine devoted to comic strips', 'name': 'comic_book'}, {'frequency': 'f', 'id': 299, 'synset': 'computer_keyboard.n.01', 'synonyms': ['computer_keyboard', 'keyboard_(computer)'], 'def': 'a keyboard that is a data input device for computers', 'name': 'computer_keyboard'}, {'frequency': 'r', 'id': 300, 'synset': 'concrete_mixer.n.01', 'synonyms': ['concrete_mixer', 'cement_mixer'], 'def': 'a machine with a large revolving drum in which cement/concrete is mixed', 'name': 'concrete_mixer'}, {'frequency': 'f', 'id': 301, 'synset': 'cone.n.01', 'synonyms': ['cone', 'traffic_cone'], 'def': 'a cone-shaped object used to direct traffic', 'name': 'cone'}, {'frequency': 'f', 'id': 302, 'synset': 'control.n.09', 'synonyms': ['control', 'controller'], 'def': 'a mechanism that controls the operation of a machine', 'name': 'control'}, {'frequency': 'r', 'id': 303, 'synset': 'convertible.n.01', 'synonyms': ['convertible_(automobile)'], 'def': 'a car that has top that can be folded or removed', 'name': 'convertible_(automobile)'}, {'frequency': 'r', 'id': 304, 'synset': 'convertible.n.03', 'synonyms': ['sofa_bed'], 'def': 'a sofa that can be converted into a bed', 'name': 'sofa_bed'}, {'frequency': 'c', 'id': 305, 'synset': 'cookie.n.01', 'synonyms': ['cookie', 'cooky', 'biscuit_(cookie)'], 'def': \"any of various small flat sweet cakes (`biscuit' is the British term)\", 'name': 'cookie'}, {'frequency': 'r', 'id': 306, 'synset': 'cookie_jar.n.01', 'synonyms': ['cookie_jar', 'cooky_jar'], 'def': 'a jar in which cookies are kept (and sometimes money is hidden)', 'name': 'cookie_jar'}, {'frequency': 'r', 'id': 307, 'synset': 'cooking_utensil.n.01', 'synonyms': ['cooking_utensil'], 'def': 'a kitchen utensil made of material that does not melt easily; used for cooking', 'name': 'cooking_utensil'}, {'frequency': 'f', 'id': 308, 'synset': 'cooler.n.01', 'synonyms': ['cooler_(for_food)', 'ice_chest'], 'def': 'an insulated box for storing food often with ice', 'name': 'cooler_(for_food)'}, {'frequency': 'c', 'id': 309, 'synset': 'cork.n.04', 'synonyms': ['cork_(bottle_plug)', 'bottle_cork'], 'def': 'the plug in the mouth of a bottle (especially a wine bottle)', 'name': 'cork_(bottle_plug)'}, {'frequency': 'r', 'id': 310, 'synset': 'corkboard.n.01', 'synonyms': ['corkboard'], 'def': 'a sheet consisting of cork granules', 'name': 'corkboard'}, {'frequency': 'r', 'id': 311, 'synset': 'corkscrew.n.01', 'synonyms': ['corkscrew', 'bottle_screw'], 'def': 'a bottle opener that pulls corks', 'name': 'corkscrew'}, {'frequency': 'c', 'id': 312, 'synset': 'corn.n.03', 'synonyms': ['edible_corn', 'corn', 'maize'], 'def': 'ears of corn that can be prepared and served for human food', 'name': 'edible_corn'}, {'frequency': 'r', 'id': 313, 'synset': 'cornbread.n.01', 'synonyms': ['cornbread'], 'def': 'bread made primarily of cornmeal', 'name': 'cornbread'}, {'frequency': 'c', 'id': 314, 'synset': 'cornet.n.01', 'synonyms': ['cornet', 'horn', 'trumpet'], 'def': 'a brass musical instrument with a narrow tube and a flared bell and many valves', 'name': 'cornet'}, {'frequency': 'c', 'id': 315, 'synset': 'cornice.n.01', 'synonyms': ['cornice', 'valance', 'valance_board', 'pelmet'], 'def': 'a decorative framework to conceal curtain fixtures at the top of a window casing', 'name': 'cornice'}, {'frequency': 'r', 'id': 316, 'synset': 'cornmeal.n.01', 'synonyms': ['cornmeal'], 'def': 'coarsely ground corn', 'name': 'cornmeal'}, {'frequency': 'r', 'id': 317, 'synset': 'corset.n.01', 'synonyms': ['corset', 'girdle'], 'def': \"a woman's close-fitting foundation garment\", 'name': 'corset'}, {'frequency': 'r', 'id': 318, 'synset': 'cos.n.02', 'synonyms': ['romaine_lettuce'], 'def': 'lettuce with long dark-green leaves in a loosely packed elongated head', 'name': 'romaine_lettuce'}, {'frequency': 'c', 'id': 319, 'synset': 'costume.n.04', 'synonyms': ['costume'], 'def': 'the attire characteristic of a country or a time or a social class', 'name': 'costume'}, {'frequency': 'r', 'id': 320, 'synset': 'cougar.n.01', 'synonyms': ['cougar', 'puma', 'catamount', 'mountain_lion', 'panther'], 'def': 'large American feline resembling a lion', 'name': 'cougar'}, {'frequency': 'r', 'id': 321, 'synset': 'coverall.n.01', 'synonyms': ['coverall'], 'def': 'a loose-fitting protective garment that is worn over other clothing', 'name': 'coverall'}, {'frequency': 'r', 'id': 322, 'synset': 'cowbell.n.01', 'synonyms': ['cowbell'], 'def': 'a bell hung around the neck of cow so that the cow can be easily located', 'name': 'cowbell'}, {'frequency': 'f', 'id': 323, 'synset': 'cowboy_hat.n.01', 'synonyms': ['cowboy_hat', 'ten-gallon_hat'], 'def': 'a hat with a wide brim and a soft crown; worn by American ranch hands', 'name': 'cowboy_hat'}, {'frequency': 'r', 'id': 324, 'synset': 'crab.n.01', 'synonyms': ['crab_(animal)'], 'def': 'decapod having eyes on short stalks and a broad flattened shell and pincers', 'name': 'crab_(animal)'}, {'frequency': 'c', 'id': 325, 'synset': 'cracker.n.01', 'synonyms': ['cracker'], 'def': 'a thin crisp wafer', 'name': 'cracker'}, {'frequency': 'r', 'id': 326, 'synset': 'crape.n.01', 'synonyms': ['crape', 'crepe', 'French_pancake'], 'def': 'small very thin pancake', 'name': 'crape'}, {'frequency': 'f', 'id': 327, 'synset': 'crate.n.01', 'synonyms': ['crate'], 'def': 'a rugged box (usually made of wood); used for shipping', 'name': 'crate'}, {'frequency': 'r', 'id': 328, 'synset': 'crayon.n.01', 'synonyms': ['crayon', 'wax_crayon'], 'def': 'writing or drawing implement made of a colored stick of composition wax', 'name': 'crayon'}, {'frequency': 'r', 'id': 329, 'synset': 'cream_pitcher.n.01', 'synonyms': ['cream_pitcher'], 'def': 'a small pitcher for serving cream', 'name': 'cream_pitcher'}, {'frequency': 'r', 'id': 330, 'synset': 'credit_card.n.01', 'synonyms': ['credit_card', 'charge_card', 'debit_card'], 'def': 'a card, usually plastic, used to pay for goods and services', 'name': 'credit_card'}, {'frequency': 'c', 'id': 331, 'synset': 'crescent_roll.n.01', 'synonyms': ['crescent_roll', 'croissant'], 'def': 'very rich flaky crescent-shaped roll', 'name': 'crescent_roll'}, {'frequency': 'c', 'id': 332, 'synset': 'crib.n.01', 'synonyms': ['crib', 'cot'], 'def': 'baby bed with high sides made of slats', 'name': 'crib'}, {'frequency': 'c', 'id': 333, 'synset': 'crock.n.03', 'synonyms': ['crock_pot', 'earthenware_jar'], 'def': 'an earthen jar (made of baked clay)', 'name': 'crock_pot'}, {'frequency': 'f', 'id': 334, 'synset': 'crossbar.n.01', 'synonyms': ['crossbar'], 'def': 'a horizontal bar that goes across something', 'name': 'crossbar'}, {'frequency': 'r', 'id': 335, 'synset': 'crouton.n.01', 'synonyms': ['crouton'], 'def': 'a small piece of toasted or fried bread; served in soup or salads', 'name': 'crouton'}, {'frequency': 'r', 'id': 336, 'synset': 'crow.n.01', 'synonyms': ['crow'], 'def': 'black birds having a raucous call', 'name': 'crow'}, {'frequency': 'c', 'id': 337, 'synset': 'crown.n.04', 'synonyms': ['crown'], 'def': 'an ornamental jeweled headdress signifying sovereignty', 'name': 'crown'}, {'frequency': 'c', 'id': 338, 'synset': 'crucifix.n.01', 'synonyms': ['crucifix'], 'def': 'representation of the cross on which Jesus died', 'name': 'crucifix'}, {'frequency': 'c', 'id': 339, 'synset': 'cruise_ship.n.01', 'synonyms': ['cruise_ship', 'cruise_liner'], 'def': 'a passenger ship used commercially for pleasure cruises', 'name': 'cruise_ship'}, {'frequency': 'c', 'id': 340, 'synset': 'cruiser.n.01', 'synonyms': ['police_cruiser', 'patrol_car', 'police_car', 'squad_car'], 'def': 'a car in which policemen cruise the streets', 'name': 'police_cruiser'}, {'frequency': 'c', 'id': 341, 'synset': 'crumb.n.03', 'synonyms': ['crumb'], 'def': 'small piece of e.g. bread or cake', 'name': 'crumb'}, {'frequency': 'r', 'id': 342, 'synset': 'crutch.n.01', 'synonyms': ['crutch'], 'def': 'a wooden or metal staff that fits under the armpit and reaches to the ground', 'name': 'crutch'}, {'frequency': 'c', 'id': 343, 'synset': 'cub.n.03', 'synonyms': ['cub_(animal)'], 'def': 'the young of certain carnivorous mammals such as the bear or wolf or lion', 'name': 'cub_(animal)'}, {'frequency': 'r', 'id': 344, 'synset': 'cube.n.05', 'synonyms': ['cube', 'square_block'], 'def': 'a block in the (approximate) shape of a cube', 'name': 'cube'}, {'frequency': 'f', 'id': 345, 'synset': 'cucumber.n.02', 'synonyms': ['cucumber', 'cuke'], 'def': 'cylindrical green fruit with thin green rind and white flesh eaten as a vegetable', 'name': 'cucumber'}, {'frequency': 'c', 'id': 346, 'synset': 'cufflink.n.01', 'synonyms': ['cufflink'], 'def': 'jewelry consisting of linked buttons used to fasten the cuffs of a shirt', 'name': 'cufflink'}, {'frequency': 'f', 'id': 347, 'synset': 'cup.n.01', 'synonyms': ['cup'], 'def': 'a small open container usually used for drinking; usually has a handle', 'name': 'cup'}, {'frequency': 'c', 'id': 348, 'synset': 'cup.n.08', 'synonyms': ['trophy_cup'], 'def': 'a metal vessel with handles that is awarded as a trophy to a competition winner', 'name': 'trophy_cup'}, {'frequency': 'c', 'id': 349, 'synset': 'cupcake.n.01', 'synonyms': ['cupcake'], 'def': 'small cake baked in a muffin tin', 'name': 'cupcake'}, {'frequency': 'r', 'id': 350, 'synset': 'curler.n.01', 'synonyms': ['hair_curler', 'hair_roller', 'hair_crimper'], 'def': 'a cylindrical tube around which the hair is wound to curl it', 'name': 'hair_curler'}, {'frequency': 'r', 'id': 351, 'synset': 'curling_iron.n.01', 'synonyms': ['curling_iron'], 'def': 'a cylindrical home appliance that heats hair that has been curled around it', 'name': 'curling_iron'}, {'frequency': 'f', 'id': 352, 'synset': 'curtain.n.01', 'synonyms': ['curtain', 'drapery'], 'def': 'hanging cloth used as a blind (especially for a window)', 'name': 'curtain'}, {'frequency': 'f', 'id': 353, 'synset': 'cushion.n.03', 'synonyms': ['cushion'], 'def': 'a soft bag filled with air or padding such as feathers or foam rubber', 'name': 'cushion'}, {'frequency': 'r', 'id': 354, 'synset': 'custard.n.01', 'synonyms': ['custard'], 'def': 'sweetened mixture of milk and eggs baked or boiled or frozen', 'name': 'custard'}, {'frequency': 'c', 'id': 355, 'synset': 'cutter.n.06', 'synonyms': ['cutting_tool'], 'def': 'a cutting implement; a tool for cutting', 'name': 'cutting_tool'}, {'frequency': 'r', 'id': 356, 'synset': 'cylinder.n.04', 'synonyms': ['cylinder'], 'def': 'a cylindrical container', 'name': 'cylinder'}, {'frequency': 'r', 'id': 357, 'synset': 'cymbal.n.01', 'synonyms': ['cymbal'], 'def': 'a percussion instrument consisting of a concave brass disk', 'name': 'cymbal'}, {'frequency': 'r', 'id': 358, 'synset': 'dachshund.n.01', 'synonyms': ['dachshund', 'dachsie', 'badger_dog'], 'def': 'small long-bodied short-legged breed of dog having a short sleek coat and long drooping ears', 'name': 'dachshund'}, {'frequency': 'r', 'id': 359, 'synset': 'dagger.n.01', 'synonyms': ['dagger'], 'def': 'a short knife with a pointed blade used for piercing or stabbing', 'name': 'dagger'}, {'frequency': 'r', 'id': 360, 'synset': 'dartboard.n.01', 'synonyms': ['dartboard'], 'def': 'a circular board of wood or cork used as the target in the game of darts', 'name': 'dartboard'}, {'frequency': 'r', 'id': 361, 'synset': 'date.n.08', 'synonyms': ['date_(fruit)'], 'def': 'sweet edible fruit of the date palm with a single long woody seed', 'name': 'date_(fruit)'}, {'frequency': 'f', 'id': 362, 'synset': 'deck_chair.n.01', 'synonyms': ['deck_chair', 'beach_chair'], 'def': 'a folding chair for use outdoors; a wooden frame supports a length of canvas', 'name': 'deck_chair'}, {'frequency': 'c', 'id': 363, 'synset': 'deer.n.01', 'synonyms': ['deer', 'cervid'], 'def': \"distinguished from Bovidae by the male's having solid deciduous antlers\", 'name': 'deer'}, {'frequency': 'c', 'id': 364, 'synset': 'dental_floss.n.01', 'synonyms': ['dental_floss', 'floss'], 'def': 'a soft thread for cleaning the spaces between the teeth', 'name': 'dental_floss'}, {'frequency': 'f', 'id': 365, 'synset': 'desk.n.01', 'synonyms': ['desk'], 'def': 'a piece of furniture with a writing surface and usually drawers or other compartments', 'name': 'desk'}, {'frequency': 'r', 'id': 366, 'synset': 'detergent.n.01', 'synonyms': ['detergent'], 'def': 'a surface-active chemical widely used in industry and laundering', 'name': 'detergent'}, {'frequency': 'c', 'id': 367, 'synset': 'diaper.n.01', 'synonyms': ['diaper'], 'def': 'garment consisting of a folded cloth drawn up between the legs and fastened at the waist', 'name': 'diaper'}, {'frequency': 'r', 'id': 368, 'synset': 'diary.n.01', 'synonyms': ['diary', 'journal'], 'def': 'a daily written record of (usually personal) experiences and observations', 'name': 'diary'}, {'frequency': 'r', 'id': 369, 'synset': 'die.n.01', 'synonyms': ['die', 'dice'], 'def': 'a small cube with 1 to 6 spots on the six faces; used in gambling', 'name': 'die'}, {'frequency': 'r', 'id': 370, 'synset': 'dinghy.n.01', 'synonyms': ['dinghy', 'dory', 'rowboat'], 'def': 'a small boat of shallow draft with seats and oars with which it is propelled', 'name': 'dinghy'}, {'frequency': 'f', 'id': 371, 'synset': 'dining_table.n.01', 'synonyms': ['dining_table'], 'def': 'a table at which meals are served', 'name': 'dining_table'}, {'frequency': 'r', 'id': 372, 'synset': 'dinner_jacket.n.01', 'synonyms': ['tux', 'tuxedo'], 'def': 'semiformal evening dress for men', 'name': 'tux'}, {'frequency': 'c', 'id': 373, 'synset': 'dish.n.01', 'synonyms': ['dish'], 'def': 'a piece of dishware normally used as a container for holding or serving food', 'name': 'dish'}, {'frequency': 'c', 'id': 374, 'synset': 'dish.n.05', 'synonyms': ['dish_antenna'], 'def': 'directional antenna consisting of a parabolic reflector', 'name': 'dish_antenna'}, {'frequency': 'c', 'id': 375, 'synset': 'dishrag.n.01', 'synonyms': ['dishrag', 'dishcloth'], 'def': 'a cloth for washing dishes', 'name': 'dishrag'}, {'frequency': 'c', 'id': 376, 'synset': 'dishtowel.n.01', 'synonyms': ['dishtowel', 'tea_towel'], 'def': 'a towel for drying dishes', 'name': 'dishtowel'}, {'frequency': 'f', 'id': 377, 'synset': 'dishwasher.n.01', 'synonyms': ['dishwasher', 'dishwashing_machine'], 'def': 'a machine for washing dishes', 'name': 'dishwasher'}, {'frequency': 'r', 'id': 378, 'synset': 'dishwasher_detergent.n.01', 'synonyms': ['dishwasher_detergent', 'dishwashing_detergent', 'dishwashing_liquid'], 'def': 'a low-sudsing detergent designed for use in dishwashers', 'name': 'dishwasher_detergent'}, {'frequency': 'r', 'id': 379, 'synset': 'diskette.n.01', 'synonyms': ['diskette', 'floppy', 'floppy_disk'], 'def': 'a small plastic magnetic disk enclosed in a stiff envelope used to store data', 'name': 'diskette'}, {'frequency': 'c', 'id': 380, 'synset': 'dispenser.n.01', 'synonyms': ['dispenser'], 'def': 'a container so designed that the contents can be used in prescribed amounts', 'name': 'dispenser'}, {'frequency': 'c', 'id': 381, 'synset': 'dixie_cup.n.01', 'synonyms': ['Dixie_cup', 'paper_cup'], 'def': 'a disposable cup made of paper; for holding drinks', 'name': 'Dixie_cup'}, {'frequency': 'f', 'id': 382, 'synset': 'dog.n.01', 'synonyms': ['dog'], 'def': 'a common domesticated dog', 'name': 'dog'}, {'frequency': 'f', 'id': 383, 'synset': 'dog_collar.n.01', 'synonyms': ['dog_collar'], 'def': 'a collar for a dog', 'name': 'dog_collar'}, {'frequency': 'c', 'id': 384, 'synset': 'doll.n.01', 'synonyms': ['doll'], 'def': 'a toy replica of a HUMAN (NOT AN ANIMAL)', 'name': 'doll'}, {'frequency': 'r', 'id': 385, 'synset': 'dollar.n.02', 'synonyms': ['dollar', 'dollar_bill', 'one_dollar_bill'], 'def': 'a piece of paper money worth one dollar', 'name': 'dollar'}, {'frequency': 'r', 'id': 386, 'synset': 'dolphin.n.02', 'synonyms': ['dolphin'], 'def': 'any of various small toothed whales with a beaklike snout; larger than porpoises', 'name': 'dolphin'}, {'frequency': 'c', 'id': 387, 'synset': 'domestic_ass.n.01', 'synonyms': ['domestic_ass', 'donkey'], 'def': 'domestic beast of burden descended from the African wild ass; patient but stubborn', 'name': 'domestic_ass'}, {'frequency': 'r', 'id': 388, 'synset': 'domino.n.03', 'synonyms': ['eye_mask'], 'def': 'a mask covering the upper part of the face but with holes for the eyes', 'name': 'eye_mask'}, {'frequency': 'r', 'id': 389, 'synset': 'doorbell.n.01', 'synonyms': ['doorbell', 'buzzer'], 'def': 'a button at an outer door that gives a ringing or buzzing signal when pushed', 'name': 'doorbell'}, {'frequency': 'f', 'id': 390, 'synset': 'doorknob.n.01', 'synonyms': ['doorknob', 'doorhandle'], 'def': \"a knob used to open a door (often called `doorhandle' in Great Britain)\", 'name': 'doorknob'}, {'frequency': 'c', 'id': 391, 'synset': 'doormat.n.02', 'synonyms': ['doormat', 'welcome_mat'], 'def': 'a mat placed outside an exterior door for wiping the shoes before entering', 'name': 'doormat'}, {'frequency': 'f', 'id': 392, 'synset': 'doughnut.n.02', 'synonyms': ['doughnut', 'donut'], 'def': 'a small ring-shaped friedcake', 'name': 'doughnut'}, {'frequency': 'r', 'id': 393, 'synset': 'dove.n.01', 'synonyms': ['dove'], 'def': 'any of numerous small pigeons', 'name': 'dove'}, {'frequency': 'r', 'id': 394, 'synset': 'dragonfly.n.01', 'synonyms': ['dragonfly'], 'def': 'slender-bodied non-stinging insect having iridescent wings that are outspread at rest', 'name': 'dragonfly'}, {'frequency': 'f', 'id': 395, 'synset': 'drawer.n.01', 'synonyms': ['drawer'], 'def': 'a boxlike container in a piece of furniture; made so as to slide in and out', 'name': 'drawer'}, {'frequency': 'c', 'id': 396, 'synset': 'drawers.n.01', 'synonyms': ['underdrawers', 'boxers', 'boxershorts'], 'def': 'underpants worn by men', 'name': 'underdrawers'}, {'frequency': 'f', 'id': 397, 'synset': 'dress.n.01', 'synonyms': ['dress', 'frock'], 'def': 'a one-piece garment for a woman; has skirt and bodice', 'name': 'dress'}, {'frequency': 'c', 'id': 398, 'synset': 'dress_hat.n.01', 'synonyms': ['dress_hat', 'high_hat', 'opera_hat', 'silk_hat', 'top_hat'], 'def': \"a man's hat with a tall crown; usually covered with silk or with beaver fur\", 'name': 'dress_hat'}, {'frequency': 'c', 'id': 399, 'synset': 'dress_suit.n.01', 'synonyms': ['dress_suit'], 'def': 'formalwear consisting of full evening dress for men', 'name': 'dress_suit'}, {'frequency': 'c', 'id': 400, 'synset': 'dresser.n.05', 'synonyms': ['dresser'], 'def': 'a cabinet with shelves', 'name': 'dresser'}, {'frequency': 'c', 'id': 401, 'synset': 'drill.n.01', 'synonyms': ['drill'], 'def': 'a tool with a sharp rotating point for making holes in hard materials', 'name': 'drill'}, {'frequency': 'r', 'id': 402, 'synset': 'drinking_fountain.n.01', 'synonyms': ['drinking_fountain'], 'def': 'a public fountain to provide a jet of drinking water', 'name': 'drinking_fountain'}, {'frequency': 'r', 'id': 403, 'synset': 'drone.n.04', 'synonyms': ['drone'], 'def': 'an aircraft without a pilot that is operated by remote control', 'name': 'drone'}, {'frequency': 'r', 'id': 404, 'synset': 'dropper.n.01', 'synonyms': ['dropper', 'eye_dropper'], 'def': 'pipet consisting of a small tube with a vacuum bulb at one end for drawing liquid in and releasing it a drop at a time', 'name': 'dropper'}, {'frequency': 'c', 'id': 405, 'synset': 'drum.n.01', 'synonyms': ['drum_(musical_instrument)'], 'def': 'a musical percussion instrument; usually consists of a hollow cylinder with a membrane stretched across each end', 'name': 'drum_(musical_instrument)'}, {'frequency': 'r', 'id': 406, 'synset': 'drumstick.n.02', 'synonyms': ['drumstick'], 'def': 'a stick used for playing a drum', 'name': 'drumstick'}, {'frequency': 'f', 'id': 407, 'synset': 'duck.n.01', 'synonyms': ['duck'], 'def': 'small web-footed broad-billed swimming bird', 'name': 'duck'}, {'frequency': 'r', 'id': 408, 'synset': 'duckling.n.02', 'synonyms': ['duckling'], 'def': 'young duck', 'name': 'duckling'}, {'frequency': 'c', 'id': 409, 'synset': 'duct_tape.n.01', 'synonyms': ['duct_tape'], 'def': 'a wide silvery adhesive tape', 'name': 'duct_tape'}, {'frequency': 'f', 'id': 410, 'synset': 'duffel_bag.n.01', 'synonyms': ['duffel_bag', 'duffle_bag', 'duffel', 'duffle'], 'def': 'a large cylindrical bag of heavy cloth', 'name': 'duffel_bag'}, {'frequency': 'r', 'id': 411, 'synset': 'dumbbell.n.01', 'synonyms': ['dumbbell'], 'def': 'an exercising weight with two ball-like ends connected by a short handle', 'name': 'dumbbell'}, {'frequency': 'c', 'id': 412, 'synset': 'dumpster.n.01', 'synonyms': ['dumpster'], 'def': 'a container designed to receive and transport and dump waste', 'name': 'dumpster'}, {'frequency': 'r', 'id': 413, 'synset': 'dustpan.n.02', 'synonyms': ['dustpan'], 'def': 'a short-handled receptacle into which dust can be swept', 'name': 'dustpan'}, {'frequency': 'r', 'id': 414, 'synset': 'dutch_oven.n.02', 'synonyms': ['Dutch_oven'], 'def': 'iron or earthenware cooking pot; used for stews', 'name': 'Dutch_oven'}, {'frequency': 'c', 'id': 415, 'synset': 'eagle.n.01', 'synonyms': ['eagle'], 'def': 'large birds of prey noted for their broad wings and strong soaring flight', 'name': 'eagle'}, {'frequency': 'f', 'id': 416, 'synset': 'earphone.n.01', 'synonyms': ['earphone', 'earpiece', 'headphone'], 'def': 'device for listening to audio that is held over or inserted into the ear', 'name': 'earphone'}, {'frequency': 'r', 'id': 417, 'synset': 'earplug.n.01', 'synonyms': ['earplug'], 'def': 'a soft plug that is inserted into the ear canal to block sound', 'name': 'earplug'}, {'frequency': 'f', 'id': 418, 'synset': 'earring.n.01', 'synonyms': ['earring'], 'def': 'jewelry to ornament the ear', 'name': 'earring'}, {'frequency': 'c', 'id': 419, 'synset': 'easel.n.01', 'synonyms': ['easel'], 'def': \"an upright tripod for displaying something (usually an artist's canvas)\", 'name': 'easel'}, {'frequency': 'r', 'id': 420, 'synset': 'eclair.n.01', 'synonyms': ['eclair'], 'def': 'oblong cream puff', 'name': 'eclair'}, {'frequency': 'r', 'id': 421, 'synset': 'eel.n.01', 'synonyms': ['eel'], 'def': 'an elongate fish with fatty flesh', 'name': 'eel'}, {'frequency': 'f', 'id': 422, 'synset': 'egg.n.02', 'synonyms': ['egg', 'eggs'], 'def': 'oval reproductive body of a fowl (especially a hen) used as food', 'name': 'egg'}, {'frequency': 'r', 'id': 423, 'synset': 'egg_roll.n.01', 'synonyms': ['egg_roll', 'spring_roll'], 'def': 'minced vegetables and meat wrapped in a pancake and fried', 'name': 'egg_roll'}, {'frequency': 'c', 'id': 424, 'synset': 'egg_yolk.n.01', 'synonyms': ['egg_yolk', 'yolk_(egg)'], 'def': 'the yellow spherical part of an egg', 'name': 'egg_yolk'}, {'frequency': 'c', 'id': 425, 'synset': 'eggbeater.n.02', 'synonyms': ['eggbeater', 'eggwhisk'], 'def': 'a mixer for beating eggs or whipping cream', 'name': 'eggbeater'}, {'frequency': 'c', 'id': 426, 'synset': 'eggplant.n.01', 'synonyms': ['eggplant', 'aubergine'], 'def': 'egg-shaped vegetable having a shiny skin typically dark purple', 'name': 'eggplant'}, {'frequency': 'r', 'id': 427, 'synset': 'electric_chair.n.01', 'synonyms': ['electric_chair'], 'def': 'a chair-shaped instrument of execution by electrocution', 'name': 'electric_chair'}, {'frequency': 'f', 'id': 428, 'synset': 'electric_refrigerator.n.01', 'synonyms': ['refrigerator'], 'def': 'a refrigerator in which the coolant is pumped around by an electric motor', 'name': 'refrigerator'}, {'frequency': 'f', 'id': 429, 'synset': 'elephant.n.01', 'synonyms': ['elephant'], 'def': 'a common elephant', 'name': 'elephant'}, {'frequency': 'r', 'id': 430, 'synset': 'elk.n.01', 'synonyms': ['elk', 'moose'], 'def': 'large northern deer with enormous flattened antlers in the male', 'name': 'elk'}, {'frequency': 'c', 'id': 431, 'synset': 'envelope.n.01', 'synonyms': ['envelope'], 'def': 'a flat (usually rectangular) container for a letter, thin package, etc.', 'name': 'envelope'}, {'frequency': 'c', 'id': 432, 'synset': 'eraser.n.01', 'synonyms': ['eraser'], 'def': 'an implement used to erase something', 'name': 'eraser'}, {'frequency': 'r', 'id': 433, 'synset': 'escargot.n.01', 'synonyms': ['escargot'], 'def': 'edible snail usually served in the shell with a sauce of melted butter and garlic', 'name': 'escargot'}, {'frequency': 'r', 'id': 434, 'synset': 'eyepatch.n.01', 'synonyms': ['eyepatch'], 'def': 'a protective cloth covering for an injured eye', 'name': 'eyepatch'}, {'frequency': 'r', 'id': 435, 'synset': 'falcon.n.01', 'synonyms': ['falcon'], 'def': 'birds of prey having long pointed powerful wings adapted for swift flight', 'name': 'falcon'}, {'frequency': 'f', 'id': 436, 'synset': 'fan.n.01', 'synonyms': ['fan'], 'def': 'a device for creating a current of air by movement of a surface or surfaces', 'name': 'fan'}, {'frequency': 'f', 'id': 437, 'synset': 'faucet.n.01', 'synonyms': ['faucet', 'spigot', 'tap'], 'def': 'a regulator for controlling the flow of a liquid from a reservoir', 'name': 'faucet'}, {'frequency': 'r', 'id': 438, 'synset': 'fedora.n.01', 'synonyms': ['fedora'], 'def': 'a hat made of felt with a creased crown', 'name': 'fedora'}, {'frequency': 'r', 'id': 439, 'synset': 'ferret.n.02', 'synonyms': ['ferret'], 'def': 'domesticated albino variety of the European polecat bred for hunting rats and rabbits', 'name': 'ferret'}, {'frequency': 'c', 'id': 440, 'synset': 'ferris_wheel.n.01', 'synonyms': ['Ferris_wheel'], 'def': 'a large wheel with suspended seats that remain upright as the wheel rotates', 'name': 'Ferris_wheel'}, {'frequency': 'r', 'id': 441, 'synset': 'ferry.n.01', 'synonyms': ['ferry', 'ferryboat'], 'def': 'a boat that transports people or vehicles across a body of water and operates on a regular schedule', 'name': 'ferry'}, {'frequency': 'r', 'id': 442, 'synset': 'fig.n.04', 'synonyms': ['fig_(fruit)'], 'def': 'fleshy sweet pear-shaped yellowish or purple fruit eaten fresh or preserved or dried', 'name': 'fig_(fruit)'}, {'frequency': 'c', 'id': 443, 'synset': 'fighter.n.02', 'synonyms': ['fighter_jet', 'fighter_aircraft', 'attack_aircraft'], 'def': 'a high-speed military or naval airplane designed to destroy enemy targets', 'name': 'fighter_jet'}, {'frequency': 'f', 'id': 444, 'synset': 'figurine.n.01', 'synonyms': ['figurine'], 'def': 'a small carved or molded figure', 'name': 'figurine'}, {'frequency': 'c', 'id': 445, 'synset': 'file.n.03', 'synonyms': ['file_cabinet', 'filing_cabinet'], 'def': 'office furniture consisting of a container for keeping papers in order', 'name': 'file_cabinet'}, {'frequency': 'r', 'id': 446, 'synset': 'file.n.04', 'synonyms': ['file_(tool)'], 'def': 'a steel hand tool with small sharp teeth on some or all of its surfaces; used for smoothing wood or metal', 'name': 'file_(tool)'}, {'frequency': 'f', 'id': 447, 'synset': 'fire_alarm.n.02', 'synonyms': ['fire_alarm', 'smoke_alarm'], 'def': 'an alarm that is tripped off by fire or smoke', 'name': 'fire_alarm'}, {'frequency': 'c', 'id': 448, 'synset': 'fire_engine.n.01', 'synonyms': ['fire_engine', 'fire_truck'], 'def': 'large trucks that carry firefighters and equipment to the site of a fire', 'name': 'fire_engine'}, {'frequency': 'c', 'id': 449, 'synset': 'fire_extinguisher.n.01', 'synonyms': ['fire_extinguisher', 'extinguisher'], 'def': 'a manually operated device for extinguishing small fires', 'name': 'fire_extinguisher'}, {'frequency': 'c', 'id': 450, 'synset': 'fire_hose.n.01', 'synonyms': ['fire_hose'], 'def': 'a large hose that carries water from a fire hydrant to the site of the fire', 'name': 'fire_hose'}, {'frequency': 'f', 'id': 451, 'synset': 'fireplace.n.01', 'synonyms': ['fireplace'], 'def': 'an open recess in a wall at the base of a chimney where a fire can be built', 'name': 'fireplace'}, {'frequency': 'f', 'id': 452, 'synset': 'fireplug.n.01', 'synonyms': ['fireplug', 'fire_hydrant', 'hydrant'], 'def': 'an upright hydrant for drawing water to use in fighting a fire', 'name': 'fireplug'}, {'frequency': 'c', 'id': 453, 'synset': 'fish.n.01', 'synonyms': ['fish'], 'def': 'any of various mostly cold-blooded aquatic vertebrates usually having scales and breathing through gills', 'name': 'fish'}, {'frequency': 'r', 'id': 454, 'synset': 'fish.n.02', 'synonyms': ['fish_(food)'], 'def': 'the flesh of fish used as food', 'name': 'fish_(food)'}, {'frequency': 'r', 'id': 455, 'synset': 'fishbowl.n.02', 'synonyms': ['fishbowl', 'goldfish_bowl'], 'def': 'a transparent bowl in which small fish are kept', 'name': 'fishbowl'}, {'frequency': 'r', 'id': 456, 'synset': 'fishing_boat.n.01', 'synonyms': ['fishing_boat', 'fishing_vessel'], 'def': 'a vessel for fishing', 'name': 'fishing_boat'}, {'frequency': 'c', 'id': 457, 'synset': 'fishing_rod.n.01', 'synonyms': ['fishing_rod', 'fishing_pole'], 'def': 'a rod that is used in fishing to extend the fishing line', 'name': 'fishing_rod'}, {'frequency': 'f', 'id': 458, 'synset': 'flag.n.01', 'synonyms': ['flag'], 'def': 'emblem usually consisting of a rectangular piece of cloth of distinctive design (do not include pole)', 'name': 'flag'}, {'frequency': 'f', 'id': 459, 'synset': 'flagpole.n.02', 'synonyms': ['flagpole', 'flagstaff'], 'def': 'a tall staff or pole on which a flag is raised', 'name': 'flagpole'}, {'frequency': 'c', 'id': 460, 'synset': 'flamingo.n.01', 'synonyms': ['flamingo'], 'def': 'large pink web-footed bird with down-bent bill', 'name': 'flamingo'}, {'frequency': 'c', 'id': 461, 'synset': 'flannel.n.01', 'synonyms': ['flannel'], 'def': 'a soft light woolen fabric; used for clothing', 'name': 'flannel'}, {'frequency': 'r', 'id': 462, 'synset': 'flash.n.10', 'synonyms': ['flash', 'flashbulb'], 'def': 'a lamp for providing momentary light to take a photograph', 'name': 'flash'}, {'frequency': 'c', 'id': 463, 'synset': 'flashlight.n.01', 'synonyms': ['flashlight', 'torch'], 'def': 'a small portable battery-powered electric lamp', 'name': 'flashlight'}, {'frequency': 'r', 'id': 464, 'synset': 'fleece.n.03', 'synonyms': ['fleece'], 'def': 'a soft bulky fabric with deep pile; used chiefly for clothing', 'name': 'fleece'}, {'frequency': 'f', 'id': 465, 'synset': 'flip-flop.n.02', 'synonyms': ['flip-flop_(sandal)'], 'def': 'a backless sandal held to the foot by a thong between two toes', 'name': 'flip-flop_(sandal)'}, {'frequency': 'c', 'id': 466, 'synset': 'flipper.n.01', 'synonyms': ['flipper_(footwear)', 'fin_(footwear)'], 'def': 'a shoe to aid a person in swimming', 'name': 'flipper_(footwear)'}, {'frequency': 'f', 'id': 467, 'synset': 'flower_arrangement.n.01', 'synonyms': ['flower_arrangement', 'floral_arrangement'], 'def': 'a decorative arrangement of flowers', 'name': 'flower_arrangement'}, {'frequency': 'c', 'id': 468, 'synset': 'flute.n.02', 'synonyms': ['flute_glass', 'champagne_flute'], 'def': 'a tall narrow wineglass', 'name': 'flute_glass'}, {'frequency': 'r', 'id': 469, 'synset': 'foal.n.01', 'synonyms': ['foal'], 'def': 'a young horse', 'name': 'foal'}, {'frequency': 'c', 'id': 470, 'synset': 'folding_chair.n.01', 'synonyms': ['folding_chair'], 'def': 'a chair that can be folded flat for storage', 'name': 'folding_chair'}, {'frequency': 'c', 'id': 471, 'synset': 'food_processor.n.01', 'synonyms': ['food_processor'], 'def': 'a kitchen appliance for shredding, blending, chopping, or slicing food', 'name': 'food_processor'}, {'frequency': 'c', 'id': 472, 'synset': 'football.n.02', 'synonyms': ['football_(American)'], 'def': 'the inflated oblong ball used in playing American football', 'name': 'football_(American)'}, {'frequency': 'r', 'id': 473, 'synset': 'football_helmet.n.01', 'synonyms': ['football_helmet'], 'def': 'a padded helmet with a face mask to protect the head of football players', 'name': 'football_helmet'}, {'frequency': 'c', 'id': 474, 'synset': 'footstool.n.01', 'synonyms': ['footstool', 'footrest'], 'def': 'a low seat or a stool to rest the feet of a seated person', 'name': 'footstool'}, {'frequency': 'f', 'id': 475, 'synset': 'fork.n.01', 'synonyms': ['fork'], 'def': 'cutlery used for serving and eating food', 'name': 'fork'}, {'frequency': 'r', 'id': 476, 'synset': 'forklift.n.01', 'synonyms': ['forklift'], 'def': 'an industrial vehicle with a power operated fork in front that can be inserted under loads to lift and move them', 'name': 'forklift'}, {'frequency': 'r', 'id': 477, 'synset': 'freight_car.n.01', 'synonyms': ['freight_car'], 'def': 'a railway car that carries freight', 'name': 'freight_car'}, {'frequency': 'r', 'id': 478, 'synset': 'french_toast.n.01', 'synonyms': ['French_toast'], 'def': 'bread slice dipped in egg and milk and fried', 'name': 'French_toast'}, {'frequency': 'c', 'id': 479, 'synset': 'freshener.n.01', 'synonyms': ['freshener', 'air_freshener'], 'def': 'anything that freshens', 'name': 'freshener'}, {'frequency': 'f', 'id': 480, 'synset': 'frisbee.n.01', 'synonyms': ['frisbee'], 'def': 'a light, plastic disk propelled with a flip of the wrist for recreation or competition', 'name': 'frisbee'}, {'frequency': 'c', 'id': 481, 'synset': 'frog.n.01', 'synonyms': ['frog', 'toad', 'toad_frog'], 'def': 'a tailless stout-bodied amphibians with long hind limbs for leaping', 'name': 'frog'}, {'frequency': 'c', 'id': 482, 'synset': 'fruit_juice.n.01', 'synonyms': ['fruit_juice'], 'def': 'drink produced by squeezing or crushing fruit', 'name': 'fruit_juice'}, {'frequency': 'r', 'id': 483, 'synset': 'fruit_salad.n.01', 'synonyms': ['fruit_salad'], 'def': 'salad composed of fruits', 'name': 'fruit_salad'}, {'frequency': 'c', 'id': 484, 'synset': 'frying_pan.n.01', 'synonyms': ['frying_pan', 'frypan', 'skillet'], 'def': 'a pan used for frying foods', 'name': 'frying_pan'}, {'frequency': 'r', 'id': 485, 'synset': 'fudge.n.01', 'synonyms': ['fudge'], 'def': 'soft creamy candy', 'name': 'fudge'}, {'frequency': 'r', 'id': 486, 'synset': 'funnel.n.02', 'synonyms': ['funnel'], 'def': 'a cone-shaped utensil used to channel a substance into a container with a small mouth', 'name': 'funnel'}, {'frequency': 'c', 'id': 487, 'synset': 'futon.n.01', 'synonyms': ['futon'], 'def': 'a pad that is used for sleeping on the floor or on a raised frame', 'name': 'futon'}, {'frequency': 'r', 'id': 488, 'synset': 'gag.n.02', 'synonyms': ['gag', 'muzzle'], 'def': \"restraint put into a person's mouth to prevent speaking or shouting\", 'name': 'gag'}, {'frequency': 'r', 'id': 489, 'synset': 'garbage.n.03', 'synonyms': ['garbage'], 'def': 'a receptacle where waste can be discarded', 'name': 'garbage'}, {'frequency': 'c', 'id': 490, 'synset': 'garbage_truck.n.01', 'synonyms': ['garbage_truck'], 'def': 'a truck for collecting domestic refuse', 'name': 'garbage_truck'}, {'frequency': 'c', 'id': 491, 'synset': 'garden_hose.n.01', 'synonyms': ['garden_hose'], 'def': 'a hose used for watering a lawn or garden', 'name': 'garden_hose'}, {'frequency': 'c', 'id': 492, 'synset': 'gargle.n.01', 'synonyms': ['gargle', 'mouthwash'], 'def': 'a medicated solution used for gargling and rinsing the mouth', 'name': 'gargle'}, {'frequency': 'r', 'id': 493, 'synset': 'gargoyle.n.02', 'synonyms': ['gargoyle'], 'def': 'an ornament consisting of a grotesquely carved figure of a person or animal', 'name': 'gargoyle'}, {'frequency': 'c', 'id': 494, 'synset': 'garlic.n.02', 'synonyms': ['garlic', 'ail'], 'def': 'aromatic bulb used as seasoning', 'name': 'garlic'}, {'frequency': 'r', 'id': 495, 'synset': 'gasmask.n.01', 'synonyms': ['gasmask', 'respirator', 'gas_helmet'], 'def': 'a protective face mask with a filter', 'name': 'gasmask'}, {'frequency': 'r', 'id': 496, 'synset': 'gazelle.n.01', 'synonyms': ['gazelle'], 'def': 'small swift graceful antelope of Africa and Asia having lustrous eyes', 'name': 'gazelle'}, {'frequency': 'c', 'id': 497, 'synset': 'gelatin.n.02', 'synonyms': ['gelatin', 'jelly'], 'def': 'an edible jelly made with gelatin and used as a dessert or salad base or a coating for foods', 'name': 'gelatin'}, {'frequency': 'r', 'id': 498, 'synset': 'gem.n.02', 'synonyms': ['gemstone'], 'def': 'a crystalline rock that can be cut and polished for jewelry', 'name': 'gemstone'}, {'frequency': 'c', 'id': 499, 'synset': 'giant_panda.n.01', 'synonyms': ['giant_panda', 'panda', 'panda_bear'], 'def': 'large black-and-white herbivorous mammal of bamboo forests of China and Tibet', 'name': 'giant_panda'}, {'frequency': 'c', 'id': 500, 'synset': 'gift_wrap.n.01', 'synonyms': ['gift_wrap'], 'def': 'attractive wrapping paper suitable for wrapping gifts', 'name': 'gift_wrap'}, {'frequency': 'c', 'id': 501, 'synset': 'ginger.n.03', 'synonyms': ['ginger', 'gingerroot'], 'def': 'the root of the common ginger plant; used fresh as a seasoning', 'name': 'ginger'}, {'frequency': 'f', 'id': 502, 'synset': 'giraffe.n.01', 'synonyms': ['giraffe'], 'def': 'tall animal having a spotted coat and small horns and very long neck and legs', 'name': 'giraffe'}, {'frequency': 'c', 'id': 503, 'synset': 'girdle.n.02', 'synonyms': ['cincture', 'sash', 'waistband', 'waistcloth'], 'def': 'a band of material around the waist that strengthens a skirt or trousers', 'name': 'cincture'}, {'frequency': 'f', 'id': 504, 'synset': 'glass.n.02', 'synonyms': ['glass_(drink_container)', 'drinking_glass'], 'def': 'a container for holding liquids while drinking', 'name': 'glass_(drink_container)'}, {'frequency': 'c', 'id': 505, 'synset': 'globe.n.03', 'synonyms': ['globe'], 'def': 'a sphere on which a map (especially of the earth) is represented', 'name': 'globe'}, {'frequency': 'f', 'id': 506, 'synset': 'glove.n.02', 'synonyms': ['glove'], 'def': 'handwear covering the hand', 'name': 'glove'}, {'frequency': 'c', 'id': 507, 'synset': 'goat.n.01', 'synonyms': ['goat'], 'def': 'a common goat', 'name': 'goat'}, {'frequency': 'f', 'id': 508, 'synset': 'goggles.n.01', 'synonyms': ['goggles'], 'def': 'tight-fitting spectacles worn to protect the eyes', 'name': 'goggles'}, {'frequency': 'r', 'id': 509, 'synset': 'goldfish.n.01', 'synonyms': ['goldfish'], 'def': 'small golden or orange-red freshwater fishes used as pond or aquarium pets', 'name': 'goldfish'}, {'frequency': 'r', 'id': 510, 'synset': 'golf_club.n.02', 'synonyms': ['golf_club', 'golf-club'], 'def': 'golf equipment used by a golfer to hit a golf ball', 'name': 'golf_club'}, {'frequency': 'c', 'id': 511, 'synset': 'golfcart.n.01', 'synonyms': ['golfcart'], 'def': 'a small motor vehicle in which golfers can ride between shots', 'name': 'golfcart'}, {'frequency': 'r', 'id': 512, 'synset': 'gondola.n.02', 'synonyms': ['gondola_(boat)'], 'def': 'long narrow flat-bottomed boat propelled by sculling; traditionally used on canals of Venice', 'name': 'gondola_(boat)'}, {'frequency': 'c', 'id': 513, 'synset': 'goose.n.01', 'synonyms': ['goose'], 'def': 'loud, web-footed long-necked aquatic birds usually larger than ducks', 'name': 'goose'}, {'frequency': 'r', 'id': 514, 'synset': 'gorilla.n.01', 'synonyms': ['gorilla'], 'def': 'largest ape', 'name': 'gorilla'}, {'frequency': 'r', 'id': 515, 'synset': 'gourd.n.02', 'synonyms': ['gourd'], 'def': 'any of numerous inedible fruits with hard rinds', 'name': 'gourd'}, {'frequency': 'r', 'id': 516, 'synset': 'gown.n.04', 'synonyms': ['surgical_gown', 'scrubs_(surgical_clothing)'], 'def': 'protective garment worn by surgeons during operations', 'name': 'surgical_gown'}, {'frequency': 'f', 'id': 517, 'synset': 'grape.n.01', 'synonyms': ['grape'], 'def': 'any of various juicy fruit with green or purple skins; grow in clusters', 'name': 'grape'}, {'frequency': 'r', 'id': 518, 'synset': 'grasshopper.n.01', 'synonyms': ['grasshopper'], 'def': 'plant-eating insect with hind legs adapted for leaping', 'name': 'grasshopper'}, {'frequency': 'c', 'id': 519, 'synset': 'grater.n.01', 'synonyms': ['grater'], 'def': 'utensil with sharp perforations for shredding foods (as vegetables or cheese)', 'name': 'grater'}, {'frequency': 'c', 'id': 520, 'synset': 'gravestone.n.01', 'synonyms': ['gravestone', 'headstone', 'tombstone'], 'def': 'a stone that is used to mark a grave', 'name': 'gravestone'}, {'frequency': 'r', 'id': 521, 'synset': 'gravy_boat.n.01', 'synonyms': ['gravy_boat', 'gravy_holder'], 'def': 'a dish (often boat-shaped) for serving gravy or sauce', 'name': 'gravy_boat'}, {'frequency': 'c', 'id': 522, 'synset': 'green_bean.n.02', 'synonyms': ['green_bean'], 'def': 'a common bean plant cultivated for its slender green edible pods', 'name': 'green_bean'}, {'frequency': 'c', 'id': 523, 'synset': 'green_onion.n.01', 'synonyms': ['green_onion', 'spring_onion', 'scallion'], 'def': 'a young onion before the bulb has enlarged', 'name': 'green_onion'}, {'frequency': 'r', 'id': 524, 'synset': 'griddle.n.01', 'synonyms': ['griddle'], 'def': 'cooking utensil consisting of a flat heated surface on which food is cooked', 'name': 'griddle'}, {'frequency': 'r', 'id': 525, 'synset': 'grillroom.n.01', 'synonyms': ['grillroom', 'grill_(restaurant)'], 'def': 'a restaurant where food is cooked on a grill', 'name': 'grillroom'}, {'frequency': 'r', 'id': 526, 'synset': 'grinder.n.04', 'synonyms': ['grinder_(tool)'], 'def': 'a machine tool that polishes metal', 'name': 'grinder_(tool)'}, {'frequency': 'r', 'id': 527, 'synset': 'grits.n.01', 'synonyms': ['grits', 'hominy_grits'], 'def': 'coarsely ground corn boiled as a breakfast dish', 'name': 'grits'}, {'frequency': 'c', 'id': 528, 'synset': 'grizzly.n.01', 'synonyms': ['grizzly', 'grizzly_bear'], 'def': 'powerful brownish-yellow bear of the uplands of western North America', 'name': 'grizzly'}, {'frequency': 'c', 'id': 529, 'synset': 'grocery_bag.n.01', 'synonyms': ['grocery_bag'], 'def': \"a sack for holding customer's groceries\", 'name': 'grocery_bag'}, {'frequency': 'r', 'id': 530, 'synset': 'guacamole.n.01', 'synonyms': ['guacamole'], 'def': 'a dip made of mashed avocado mixed with chopped onions and other seasonings', 'name': 'guacamole'}, {'frequency': 'f', 'id': 531, 'synset': 'guitar.n.01', 'synonyms': ['guitar'], 'def': 'a stringed instrument usually having six strings; played by strumming or plucking', 'name': 'guitar'}, {'frequency': 'c', 'id': 532, 'synset': 'gull.n.02', 'synonyms': ['gull', 'seagull'], 'def': 'mostly white aquatic bird having long pointed wings and short legs', 'name': 'gull'}, {'frequency': 'c', 'id': 533, 'synset': 'gun.n.01', 'synonyms': ['gun'], 'def': 'a weapon that discharges a bullet at high velocity from a metal tube', 'name': 'gun'}, {'frequency': 'r', 'id': 534, 'synset': 'hair_spray.n.01', 'synonyms': ['hair_spray'], 'def': 'substance sprayed on the hair to hold it in place', 'name': 'hair_spray'}, {'frequency': 'c', 'id': 535, 'synset': 'hairbrush.n.01', 'synonyms': ['hairbrush'], 'def': \"a brush used to groom a person's hair\", 'name': 'hairbrush'}, {'frequency': 'c', 'id': 536, 'synset': 'hairnet.n.01', 'synonyms': ['hairnet'], 'def': 'a small net that someone wears over their hair to keep it in place', 'name': 'hairnet'}, {'frequency': 'c', 'id': 537, 'synset': 'hairpin.n.01', 'synonyms': ['hairpin'], 'def': \"a double pronged pin used to hold women's hair in place\", 'name': 'hairpin'}, {'frequency': 'f', 'id': 538, 'synset': 'ham.n.01', 'synonyms': ['ham', 'jambon', 'gammon'], 'def': 'meat cut from the thigh of a hog (usually smoked)', 'name': 'ham'}, {'frequency': 'c', 'id': 539, 'synset': 'hamburger.n.01', 'synonyms': ['hamburger', 'beefburger', 'burger'], 'def': 'a sandwich consisting of a patty of minced beef served on a bun', 'name': 'hamburger'}, {'frequency': 'c', 'id': 540, 'synset': 'hammer.n.02', 'synonyms': ['hammer'], 'def': 'a hand tool with a heavy head and a handle; used to deliver an impulsive force by striking', 'name': 'hammer'}, {'frequency': 'r', 'id': 541, 'synset': 'hammock.n.02', 'synonyms': ['hammock'], 'def': 'a hanging bed of canvas or rope netting (usually suspended between two trees)', 'name': 'hammock'}, {'frequency': 'r', 'id': 542, 'synset': 'hamper.n.02', 'synonyms': ['hamper'], 'def': 'a basket usually with a cover', 'name': 'hamper'}, {'frequency': 'r', 'id': 543, 'synset': 'hamster.n.01', 'synonyms': ['hamster'], 'def': 'short-tailed burrowing rodent with large cheek pouches', 'name': 'hamster'}, {'frequency': 'c', 'id': 544, 'synset': 'hand_blower.n.01', 'synonyms': ['hair_dryer'], 'def': 'a hand-held electric blower that can blow warm air onto the hair', 'name': 'hair_dryer'}, {'frequency': 'r', 'id': 545, 'synset': 'hand_glass.n.01', 'synonyms': ['hand_glass', 'hand_mirror'], 'def': 'a mirror intended to be held in the hand', 'name': 'hand_glass'}, {'frequency': 'f', 'id': 546, 'synset': 'hand_towel.n.01', 'synonyms': ['hand_towel', 'face_towel'], 'def': 'a small towel used to dry the hands or face', 'name': 'hand_towel'}, {'frequency': 'c', 'id': 547, 'synset': 'handcart.n.01', 'synonyms': ['handcart', 'pushcart', 'hand_truck'], 'def': 'wheeled vehicle that can be pushed by a person', 'name': 'handcart'}, {'frequency': 'r', 'id': 548, 'synset': 'handcuff.n.01', 'synonyms': ['handcuff'], 'def': 'shackle that consists of a metal loop that can be locked around the wrist', 'name': 'handcuff'}, {'frequency': 'c', 'id': 549, 'synset': 'handkerchief.n.01', 'synonyms': ['handkerchief'], 'def': 'a square piece of cloth used for wiping the eyes or nose or as a costume accessory', 'name': 'handkerchief'}, {'frequency': 'f', 'id': 550, 'synset': 'handle.n.01', 'synonyms': ['handle', 'grip', 'handgrip'], 'def': 'the appendage to an object that is designed to be held in order to use or move it', 'name': 'handle'}, {'frequency': 'r', 'id': 551, 'synset': 'handsaw.n.01', 'synonyms': ['handsaw', \"carpenter's_saw\"], 'def': 'a saw used with one hand for cutting wood', 'name': 'handsaw'}, {'frequency': 'r', 'id': 552, 'synset': 'hardback.n.01', 'synonyms': ['hardback_book', 'hardcover_book'], 'def': 'a book with cardboard or cloth or leather covers', 'name': 'hardback_book'}, {'frequency': 'r', 'id': 553, 'synset': 'harmonium.n.01', 'synonyms': ['harmonium', 'organ_(musical_instrument)', 'reed_organ_(musical_instrument)'], 'def': 'a free-reed instrument in which air is forced through the reeds by bellows', 'name': 'harmonium'}, {'frequency': 'f', 'id': 554, 'synset': 'hat.n.01', 'synonyms': ['hat'], 'def': 'headwear that protects the head from bad weather, sun, or worn for fashion', 'name': 'hat'}, {'frequency': 'r', 'id': 555, 'synset': 'hatbox.n.01', 'synonyms': ['hatbox'], 'def': 'a round piece of luggage for carrying hats', 'name': 'hatbox'}, {'frequency': 'r', 'id': 556, 'synset': 'hatch.n.03', 'synonyms': ['hatch'], 'def': 'a movable barrier covering a hatchway', 'name': 'hatch'}, {'frequency': 'c', 'id': 557, 'synset': 'head_covering.n.01', 'synonyms': ['veil'], 'def': 'a garment that covers the head and face', 'name': 'veil'}, {'frequency': 'f', 'id': 558, 'synset': 'headband.n.01', 'synonyms': ['headband'], 'def': 'a band worn around or over the head', 'name': 'headband'}, {'frequency': 'f', 'id': 559, 'synset': 'headboard.n.01', 'synonyms': ['headboard'], 'def': 'a vertical board or panel forming the head of a bedstead', 'name': 'headboard'}, {'frequency': 'f', 'id': 560, 'synset': 'headlight.n.01', 'synonyms': ['headlight', 'headlamp'], 'def': 'a powerful light with reflector; attached to the front of an automobile or locomotive', 'name': 'headlight'}, {'frequency': 'c', 'id': 561, 'synset': 'headscarf.n.01', 'synonyms': ['headscarf'], 'def': 'a kerchief worn over the head and tied under the chin', 'name': 'headscarf'}, {'frequency': 'r', 'id': 562, 'synset': 'headset.n.01', 'synonyms': ['headset'], 'def': 'receiver consisting of a pair of headphones', 'name': 'headset'}, {'frequency': 'c', 'id': 563, 'synset': 'headstall.n.01', 'synonyms': ['headstall_(for_horses)', 'headpiece_(for_horses)'], 'def': \"the band that is the part of a bridle that fits around a horse's head\", 'name': 'headstall_(for_horses)'}, {'frequency': 'r', 'id': 564, 'synset': 'hearing_aid.n.02', 'synonyms': ['hearing_aid'], 'def': 'an acoustic device used to direct sound to the ear of a hearing-impaired person', 'name': 'hearing_aid'}, {'frequency': 'c', 'id': 565, 'synset': 'heart.n.02', 'synonyms': ['heart'], 'def': 'a muscular organ; its contractions move the blood through the body', 'name': 'heart'}, {'frequency': 'c', 'id': 566, 'synset': 'heater.n.01', 'synonyms': ['heater', 'warmer'], 'def': 'device that heats water or supplies warmth to a room', 'name': 'heater'}, {'frequency': 'c', 'id': 567, 'synset': 'helicopter.n.01', 'synonyms': ['helicopter'], 'def': 'an aircraft without wings that obtains its lift from the rotation of overhead blades', 'name': 'helicopter'}, {'frequency': 'f', 'id': 568, 'synset': 'helmet.n.02', 'synonyms': ['helmet'], 'def': 'a protective headgear made of hard material to resist blows', 'name': 'helmet'}, {'frequency': 'r', 'id': 569, 'synset': 'heron.n.02', 'synonyms': ['heron'], 'def': 'grey or white wading bird with long neck and long legs and (usually) long bill', 'name': 'heron'}, {'frequency': 'c', 'id': 570, 'synset': 'highchair.n.01', 'synonyms': ['highchair', 'feeding_chair'], 'def': 'a chair for feeding a very young child', 'name': 'highchair'}, {'frequency': 'f', 'id': 571, 'synset': 'hinge.n.01', 'synonyms': ['hinge'], 'def': 'a joint that holds two parts together so that one can swing relative to the other', 'name': 'hinge'}, {'frequency': 'r', 'id': 572, 'synset': 'hippopotamus.n.01', 'synonyms': ['hippopotamus'], 'def': 'massive thick-skinned animal living in or around rivers of tropical Africa', 'name': 'hippopotamus'}, {'frequency': 'r', 'id': 573, 'synset': 'hockey_stick.n.01', 'synonyms': ['hockey_stick'], 'def': 'sports implement consisting of a stick used by hockey players to move the puck', 'name': 'hockey_stick'}, {'frequency': 'c', 'id': 574, 'synset': 'hog.n.03', 'synonyms': ['hog', 'pig'], 'def': 'domestic swine', 'name': 'hog'}, {'frequency': 'f', 'id': 575, 'synset': 'home_plate.n.01', 'synonyms': ['home_plate_(baseball)', 'home_base_(baseball)'], 'def': '(baseball) a rubber slab where the batter stands; it must be touched by a base runner in order to score', 'name': 'home_plate_(baseball)'}, {'frequency': 'c', 'id': 576, 'synset': 'honey.n.01', 'synonyms': ['honey'], 'def': 'a sweet yellow liquid produced by bees', 'name': 'honey'}, {'frequency': 'f', 'id': 577, 'synset': 'hood.n.06', 'synonyms': ['fume_hood', 'exhaust_hood'], 'def': 'metal covering leading to a vent that exhausts smoke or fumes', 'name': 'fume_hood'}, {'frequency': 'f', 'id': 578, 'synset': 'hook.n.05', 'synonyms': ['hook'], 'def': 'a curved or bent implement for suspending or pulling something', 'name': 'hook'}, {'frequency': 'f', 'id': 579, 'synset': 'horse.n.01', 'synonyms': ['horse'], 'def': 'a common horse', 'name': 'horse'}, {'frequency': 'f', 'id': 580, 'synset': 'hose.n.03', 'synonyms': ['hose', 'hosepipe'], 'def': 'a flexible pipe for conveying a liquid or gas', 'name': 'hose'}, {'frequency': 'r', 'id': 581, 'synset': 'hot-air_balloon.n.01', 'synonyms': ['hot-air_balloon'], 'def': 'balloon for travel through the air in a basket suspended below a large bag of heated air', 'name': 'hot-air_balloon'}, {'frequency': 'r', 'id': 582, 'synset': 'hot_plate.n.01', 'synonyms': ['hotplate'], 'def': 'a portable electric appliance for heating or cooking or keeping food warm', 'name': 'hotplate'}, {'frequency': 'c', 'id': 583, 'synset': 'hot_sauce.n.01', 'synonyms': ['hot_sauce'], 'def': 'a pungent peppery sauce', 'name': 'hot_sauce'}, {'frequency': 'r', 'id': 584, 'synset': 'hourglass.n.01', 'synonyms': ['hourglass'], 'def': 'a sandglass timer that runs for sixty minutes', 'name': 'hourglass'}, {'frequency': 'r', 'id': 585, 'synset': 'houseboat.n.01', 'synonyms': ['houseboat'], 'def': 'a barge that is designed and equipped for use as a dwelling', 'name': 'houseboat'}, {'frequency': 'r', 'id': 586, 'synset': 'hummingbird.n.01', 'synonyms': ['hummingbird'], 'def': 'tiny American bird having brilliant iridescent plumage and long slender bills', 'name': 'hummingbird'}, {'frequency': 'r', 'id': 587, 'synset': 'hummus.n.01', 'synonyms': ['hummus', 'humus', 'hommos', 'hoummos', 'humous'], 'def': 'a thick spread made from mashed chickpeas', 'name': 'hummus'}, {'frequency': 'c', 'id': 588, 'synset': 'ice_bear.n.01', 'synonyms': ['polar_bear'], 'def': 'white bear of Arctic regions', 'name': 'polar_bear'}, {'frequency': 'c', 'id': 589, 'synset': 'ice_cream.n.01', 'synonyms': ['icecream'], 'def': 'frozen dessert containing cream and sugar and flavoring', 'name': 'icecream'}, {'frequency': 'r', 'id': 590, 'synset': 'ice_lolly.n.01', 'synonyms': ['popsicle'], 'def': 'ice cream or water ice on a small wooden stick', 'name': 'popsicle'}, {'frequency': 'c', 'id': 591, 'synset': 'ice_maker.n.01', 'synonyms': ['ice_maker'], 'def': 'an appliance included in some electric refrigerators for making ice cubes', 'name': 'ice_maker'}, {'frequency': 'r', 'id': 592, 'synset': 'ice_pack.n.01', 'synonyms': ['ice_pack', 'ice_bag'], 'def': 'a waterproof bag filled with ice: applied to the body (especially the head) to cool or reduce swelling', 'name': 'ice_pack'}, {'frequency': 'r', 'id': 593, 'synset': 'ice_skate.n.01', 'synonyms': ['ice_skate'], 'def': 'skate consisting of a boot with a steel blade fitted to the sole', 'name': 'ice_skate'}, {'frequency': 'r', 'id': 594, 'synset': 'ice_tea.n.01', 'synonyms': ['ice_tea', 'iced_tea'], 'def': 'strong tea served over ice', 'name': 'ice_tea'}, {'frequency': 'c', 'id': 595, 'synset': 'igniter.n.01', 'synonyms': ['igniter', 'ignitor', 'lighter'], 'def': 'a substance or device used to start a fire', 'name': 'igniter'}, {'frequency': 'r', 'id': 596, 'synset': 'incense.n.01', 'synonyms': ['incense'], 'def': 'a substance that produces a fragrant odor when burned', 'name': 'incense'}, {'frequency': 'r', 'id': 597, 'synset': 'inhaler.n.01', 'synonyms': ['inhaler', 'inhalator'], 'def': 'a dispenser that produces a chemical vapor to be inhaled through mouth or nose', 'name': 'inhaler'}, {'frequency': 'c', 'id': 598, 'synset': 'ipod.n.01', 'synonyms': ['iPod'], 'def': 'a pocket-sized device used to play music files', 'name': 'iPod'}, {'frequency': 'c', 'id': 599, 'synset': 'iron.n.04', 'synonyms': ['iron_(for_clothing)', 'smoothing_iron_(for_clothing)'], 'def': 'home appliance consisting of a flat metal base that is heated and used to smooth cloth', 'name': 'iron_(for_clothing)'}, {'frequency': 'r', 'id': 600, 'synset': 'ironing_board.n.01', 'synonyms': ['ironing_board'], 'def': 'narrow padded board on collapsible supports; used for ironing clothes', 'name': 'ironing_board'}, {'frequency': 'f', 'id': 601, 'synset': 'jacket.n.01', 'synonyms': ['jacket'], 'def': 'a waist-length coat', 'name': 'jacket'}, {'frequency': 'r', 'id': 602, 'synset': 'jam.n.01', 'synonyms': ['jam'], 'def': 'preserve of crushed fruit', 'name': 'jam'}, {'frequency': 'f', 'id': 603, 'synset': 'jean.n.01', 'synonyms': ['jean', 'blue_jean', 'denim'], 'def': '(usually plural) close-fitting trousers of heavy denim for manual work or casual wear', 'name': 'jean'}, {'frequency': 'c', 'id': 604, 'synset': 'jeep.n.01', 'synonyms': ['jeep', 'landrover'], 'def': 'a car suitable for traveling over rough terrain', 'name': 'jeep'}, {'frequency': 'r', 'id': 605, 'synset': 'jelly_bean.n.01', 'synonyms': ['jelly_bean', 'jelly_egg'], 'def': 'sugar-glazed jellied candy', 'name': 'jelly_bean'}, {'frequency': 'f', 'id': 606, 'synset': 'jersey.n.03', 'synonyms': ['jersey', 'T-shirt', 'tee_shirt'], 'def': 'a close-fitting pullover shirt', 'name': 'jersey'}, {'frequency': 'c', 'id': 607, 'synset': 'jet.n.01', 'synonyms': ['jet_plane', 'jet-propelled_plane'], 'def': 'an airplane powered by one or more jet engines', 'name': 'jet_plane'}, {'frequency': 'c', 'id': 608, 'synset': 'jewelry.n.01', 'synonyms': ['jewelry', 'jewellery'], 'def': 'an adornment (as a bracelet or ring or necklace) made of precious metals and set with gems (or imitation gems)', 'name': 'jewelry'}, {'frequency': 'r', 'id': 609, 'synset': 'joystick.n.02', 'synonyms': ['joystick'], 'def': 'a control device for computers consisting of a vertical handle that can move freely in two directions', 'name': 'joystick'}, {'frequency': 'r', 'id': 610, 'synset': 'jump_suit.n.01', 'synonyms': ['jumpsuit'], 'def': \"one-piece garment fashioned after a parachutist's uniform\", 'name': 'jumpsuit'}, {'frequency': 'c', 'id': 611, 'synset': 'kayak.n.01', 'synonyms': ['kayak'], 'def': 'a small canoe consisting of a light frame made watertight with animal skins', 'name': 'kayak'}, {'frequency': 'r', 'id': 612, 'synset': 'keg.n.02', 'synonyms': ['keg'], 'def': 'small cask or barrel', 'name': 'keg'}, {'frequency': 'r', 'id': 613, 'synset': 'kennel.n.01', 'synonyms': ['kennel', 'doghouse'], 'def': 'outbuilding that serves as a shelter for a dog', 'name': 'kennel'}, {'frequency': 'c', 'id': 614, 'synset': 'kettle.n.01', 'synonyms': ['kettle', 'boiler'], 'def': 'a metal pot for stewing or boiling; usually has a lid', 'name': 'kettle'}, {'frequency': 'f', 'id': 615, 'synset': 'key.n.01', 'synonyms': ['key'], 'def': 'metal instrument used to unlock a lock', 'name': 'key'}, {'frequency': 'r', 'id': 616, 'synset': 'keycard.n.01', 'synonyms': ['keycard'], 'def': 'a plastic card used to gain access typically to a door', 'name': 'keycard'}, {'frequency': 'r', 'id': 617, 'synset': 'kilt.n.01', 'synonyms': ['kilt'], 'def': 'a knee-length pleated tartan skirt worn by men as part of the traditional dress in the Highlands of northern Scotland', 'name': 'kilt'}, {'frequency': 'c', 'id': 618, 'synset': 'kimono.n.01', 'synonyms': ['kimono'], 'def': 'a loose robe; imitated from robes originally worn by Japanese', 'name': 'kimono'}, {'frequency': 'f', 'id': 619, 'synset': 'kitchen_sink.n.01', 'synonyms': ['kitchen_sink'], 'def': 'a sink in a kitchen', 'name': 'kitchen_sink'}, {'frequency': 'c', 'id': 620, 'synset': 'kitchen_table.n.01', 'synonyms': ['kitchen_table'], 'def': 'a table in the kitchen', 'name': 'kitchen_table'}, {'frequency': 'f', 'id': 621, 'synset': 'kite.n.03', 'synonyms': ['kite'], 'def': 'plaything consisting of a light frame covered with tissue paper; flown in wind at end of a string', 'name': 'kite'}, {'frequency': 'c', 'id': 622, 'synset': 'kitten.n.01', 'synonyms': ['kitten', 'kitty'], 'def': 'young domestic cat', 'name': 'kitten'}, {'frequency': 'c', 'id': 623, 'synset': 'kiwi.n.03', 'synonyms': ['kiwi_fruit'], 'def': 'fuzzy brown egg-shaped fruit with slightly tart green flesh', 'name': 'kiwi_fruit'}, {'frequency': 'f', 'id': 624, 'synset': 'knee_pad.n.01', 'synonyms': ['knee_pad'], 'def': 'protective garment consisting of a pad worn by football or baseball or hockey players', 'name': 'knee_pad'}, {'frequency': 'f', 'id': 625, 'synset': 'knife.n.01', 'synonyms': ['knife'], 'def': 'tool with a blade and point used as a cutting instrument', 'name': 'knife'}, {'frequency': 'r', 'id': 626, 'synset': 'knight.n.02', 'synonyms': ['knight_(chess_piece)', 'horse_(chess_piece)'], 'def': 'a chess game piece shaped to resemble the head of a horse', 'name': 'knight_(chess_piece)'}, {'frequency': 'r', 'id': 627, 'synset': 'knitting_needle.n.01', 'synonyms': ['knitting_needle'], 'def': 'needle consisting of a slender rod with pointed ends; usually used in pairs', 'name': 'knitting_needle'}, {'frequency': 'f', 'id': 628, 'synset': 'knob.n.02', 'synonyms': ['knob'], 'def': 'a round handle often found on a door', 'name': 'knob'}, {'frequency': 'r', 'id': 629, 'synset': 'knocker.n.05', 'synonyms': ['knocker_(on_a_door)', 'doorknocker'], 'def': 'a device (usually metal and ornamental) attached by a hinge to a door', 'name': 'knocker_(on_a_door)'}, {'frequency': 'r', 'id': 630, 'synset': 'koala.n.01', 'synonyms': ['koala', 'koala_bear'], 'def': 'sluggish tailless Australian marsupial with grey furry ears and coat', 'name': 'koala'}, {'frequency': 'r', 'id': 631, 'synset': 'lab_coat.n.01', 'synonyms': ['lab_coat', 'laboratory_coat'], 'def': 'a light coat worn to protect clothing from substances used while working in a laboratory', 'name': 'lab_coat'}, {'frequency': 'f', 'id': 632, 'synset': 'ladder.n.01', 'synonyms': ['ladder'], 'def': 'steps consisting of two parallel members connected by rungs', 'name': 'ladder'}, {'frequency': 'c', 'id': 633, 'synset': 'ladle.n.01', 'synonyms': ['ladle'], 'def': 'a spoon-shaped vessel with a long handle frequently used to transfer liquids', 'name': 'ladle'}, {'frequency': 'r', 'id': 634, 'synset': 'ladybug.n.01', 'synonyms': ['ladybug', 'ladybeetle', 'ladybird_beetle'], 'def': 'small round bright-colored and spotted beetle, typically red and black', 'name': 'ladybug'}, {'frequency': 'c', 'id': 635, 'synset': 'lamb.n.01', 'synonyms': ['lamb_(animal)'], 'def': 'young sheep', 'name': 'lamb_(animal)'}, {'frequency': 'r', 'id': 636, 'synset': 'lamb_chop.n.01', 'synonyms': ['lamb-chop', 'lambchop'], 'def': 'chop cut from a lamb', 'name': 'lamb-chop'}, {'frequency': 'f', 'id': 637, 'synset': 'lamp.n.02', 'synonyms': ['lamp'], 'def': 'a piece of furniture holding one or more electric light bulbs', 'name': 'lamp'}, {'frequency': 'f', 'id': 638, 'synset': 'lamppost.n.01', 'synonyms': ['lamppost'], 'def': 'a metal post supporting an outdoor lamp (such as a streetlight)', 'name': 'lamppost'}, {'frequency': 'f', 'id': 639, 'synset': 'lampshade.n.01', 'synonyms': ['lampshade'], 'def': 'a protective ornamental shade used to screen a light bulb from direct view', 'name': 'lampshade'}, {'frequency': 'c', 'id': 640, 'synset': 'lantern.n.01', 'synonyms': ['lantern'], 'def': 'light in a transparent protective case', 'name': 'lantern'}, {'frequency': 'f', 'id': 641, 'synset': 'lanyard.n.02', 'synonyms': ['lanyard', 'laniard'], 'def': 'a cord worn around the neck to hold a knife or whistle, etc.', 'name': 'lanyard'}, {'frequency': 'f', 'id': 642, 'synset': 'laptop.n.01', 'synonyms': ['laptop_computer', 'notebook_computer'], 'def': 'a portable computer small enough to use in your lap', 'name': 'laptop_computer'}, {'frequency': 'r', 'id': 643, 'synset': 'lasagna.n.01', 'synonyms': ['lasagna', 'lasagne'], 'def': 'baked dish of layers of lasagna pasta with sauce and cheese and meat or vegetables', 'name': 'lasagna'}, {'frequency': 'c', 'id': 644, 'synset': 'latch.n.02', 'synonyms': ['latch'], 'def': 'a bar that can be lowered or slid into a groove to fasten a door or gate', 'name': 'latch'}, {'frequency': 'r', 'id': 645, 'synset': 'lawn_mower.n.01', 'synonyms': ['lawn_mower'], 'def': 'garden tool for mowing grass on lawns', 'name': 'lawn_mower'}, {'frequency': 'r', 'id': 646, 'synset': 'leather.n.01', 'synonyms': ['leather'], 'def': 'an animal skin made smooth and flexible by removing the hair and then tanning', 'name': 'leather'}, {'frequency': 'c', 'id': 647, 'synset': 'legging.n.01', 'synonyms': ['legging_(clothing)', 'leging_(clothing)', 'leg_covering'], 'def': 'a garment covering the leg (usually extending from the knee to the ankle)', 'name': 'legging_(clothing)'}, {'frequency': 'c', 'id': 648, 'synset': 'lego.n.01', 'synonyms': ['Lego', 'Lego_set'], 'def': \"a child's plastic construction set for making models from blocks\", 'name': 'Lego'}, {'frequency': 'f', 'id': 649, 'synset': 'lemon.n.01', 'synonyms': ['lemon'], 'def': 'yellow oval fruit with juicy acidic flesh', 'name': 'lemon'}, {'frequency': 'r', 'id': 650, 'synset': 'lemonade.n.01', 'synonyms': ['lemonade'], 'def': 'sweetened beverage of diluted lemon juice', 'name': 'lemonade'}, {'frequency': 'f', 'id': 651, 'synset': 'lettuce.n.02', 'synonyms': ['lettuce'], 'def': 'leafy plant commonly eaten in salad or on sandwiches', 'name': 'lettuce'}, {'frequency': 'f', 'id': 652, 'synset': 'license_plate.n.01', 'synonyms': ['license_plate', 'numberplate'], 'def': \"a plate mounted on the front and back of car and bearing the car's registration number\", 'name': 'license_plate'}, {'frequency': 'f', 'id': 653, 'synset': 'life_buoy.n.01', 'synonyms': ['life_buoy', 'lifesaver', 'life_belt', 'life_ring'], 'def': 'a ring-shaped life preserver used to prevent drowning (NOT a life-jacket or vest)', 'name': 'life_buoy'}, {'frequency': 'f', 'id': 654, 'synset': 'life_jacket.n.01', 'synonyms': ['life_jacket', 'life_vest'], 'def': 'life preserver consisting of a sleeveless jacket of buoyant or inflatable design', 'name': 'life_jacket'}, {'frequency': 'f', 'id': 655, 'synset': 'light_bulb.n.01', 'synonyms': ['lightbulb'], 'def': 'glass bulb or tube shaped electric device that emits light (DO NOT MARK LAMPS AS A WHOLE)', 'name': 'lightbulb'}, {'frequency': 'r', 'id': 656, 'synset': 'lightning_rod.n.02', 'synonyms': ['lightning_rod', 'lightning_conductor'], 'def': 'a metallic conductor that is attached to a high point and leads to the ground', 'name': 'lightning_rod'}, {'frequency': 'c', 'id': 657, 'synset': 'lime.n.06', 'synonyms': ['lime'], 'def': 'the green acidic fruit of any of various lime trees', 'name': 'lime'}, {'frequency': 'r', 'id': 658, 'synset': 'limousine.n.01', 'synonyms': ['limousine'], 'def': 'long luxurious car; usually driven by a chauffeur', 'name': 'limousine'}, {'frequency': 'r', 'id': 659, 'synset': 'linen.n.02', 'synonyms': ['linen_paper'], 'def': 'a high-quality paper made of linen fibers or with a linen finish', 'name': 'linen_paper'}, {'frequency': 'c', 'id': 660, 'synset': 'lion.n.01', 'synonyms': ['lion'], 'def': 'large gregarious predatory cat of Africa and India', 'name': 'lion'}, {'frequency': 'c', 'id': 661, 'synset': 'lip_balm.n.01', 'synonyms': ['lip_balm'], 'def': 'a balm applied to the lips', 'name': 'lip_balm'}, {'frequency': 'c', 'id': 662, 'synset': 'lipstick.n.01', 'synonyms': ['lipstick', 'lip_rouge'], 'def': 'makeup that is used to color the lips', 'name': 'lipstick'}, {'frequency': 'r', 'id': 663, 'synset': 'liquor.n.01', 'synonyms': ['liquor', 'spirits', 'hard_liquor', 'liqueur', 'cordial'], 'def': 'an alcoholic beverage that is distilled rather than fermented', 'name': 'liquor'}, {'frequency': 'r', 'id': 664, 'synset': 'lizard.n.01', 'synonyms': ['lizard'], 'def': 'a reptile with usually two pairs of legs and a tapering tail', 'name': 'lizard'}, {'frequency': 'r', 'id': 665, 'synset': 'loafer.n.02', 'synonyms': ['Loafer_(type_of_shoe)'], 'def': 'a low leather step-in shoe', 'name': 'Loafer_(type_of_shoe)'}, {'frequency': 'f', 'id': 666, 'synset': 'log.n.01', 'synonyms': ['log'], 'def': 'a segment of the trunk of a tree when stripped of branches', 'name': 'log'}, {'frequency': 'c', 'id': 667, 'synset': 'lollipop.n.02', 'synonyms': ['lollipop'], 'def': 'hard candy on a stick', 'name': 'lollipop'}, {'frequency': 'c', 'id': 668, 'synset': 'lotion.n.01', 'synonyms': ['lotion'], 'def': 'any of various cosmetic preparations that are applied to the skin', 'name': 'lotion'}, {'frequency': 'f', 'id': 669, 'synset': 'loudspeaker.n.01', 'synonyms': ['speaker_(stero_equipment)'], 'def': 'electronic device that produces sound often as part of a stereo system', 'name': 'speaker_(stero_equipment)'}, {'frequency': 'c', 'id': 670, 'synset': 'love_seat.n.01', 'synonyms': ['loveseat'], 'def': 'small sofa that seats two people', 'name': 'loveseat'}, {'frequency': 'r', 'id': 671, 'synset': 'machine_gun.n.01', 'synonyms': ['machine_gun'], 'def': 'a rapidly firing automatic gun', 'name': 'machine_gun'}, {'frequency': 'f', 'id': 672, 'synset': 'magazine.n.02', 'synonyms': ['magazine'], 'def': 'a paperback periodic publication', 'name': 'magazine'}, {'frequency': 'f', 'id': 673, 'synset': 'magnet.n.01', 'synonyms': ['magnet'], 'def': 'a device that attracts iron and produces a magnetic field', 'name': 'magnet'}, {'frequency': 'r', 'id': 674, 'synset': 'mail_slot.n.01', 'synonyms': ['mail_slot'], 'def': 'a slot (usually in a door) through which mail can be delivered', 'name': 'mail_slot'}, {'frequency': 'c', 'id': 675, 'synset': 'mailbox.n.01', 'synonyms': ['mailbox_(at_home)', 'letter_box_(at_home)'], 'def': 'a private box for delivery of mail', 'name': 'mailbox_(at_home)'}, {'frequency': 'r', 'id': 676, 'synset': 'mallet.n.01', 'synonyms': ['mallet'], 'def': 'a sports implement with a long handle and a hammer-like head used to hit a ball', 'name': 'mallet'}, {'frequency': 'r', 'id': 677, 'synset': 'mammoth.n.01', 'synonyms': ['mammoth'], 'def': 'any of numerous extinct elephants widely distributed in the Pleistocene', 'name': 'mammoth'}, {'frequency': 'c', 'id': 678, 'synset': 'mandarin.n.05', 'synonyms': ['mandarin_orange'], 'def': 'a somewhat flat reddish-orange loose skinned citrus of China', 'name': 'mandarin_orange'}, {'frequency': 'c', 'id': 679, 'synset': 'manger.n.01', 'synonyms': ['manger', 'trough'], 'def': 'a container (usually in a barn or stable) from which cattle or horses feed', 'name': 'manger'}, {'frequency': 'f', 'id': 680, 'synset': 'manhole.n.01', 'synonyms': ['manhole'], 'def': 'a hole (usually with a flush cover) through which a person can gain access to an underground structure', 'name': 'manhole'}, {'frequency': 'c', 'id': 681, 'synset': 'map.n.01', 'synonyms': ['map'], 'def': \"a diagrammatic representation of the earth's surface (or part of it)\", 'name': 'map'}, {'frequency': 'c', 'id': 682, 'synset': 'marker.n.03', 'synonyms': ['marker'], 'def': 'a writing implement for making a mark', 'name': 'marker'}, {'frequency': 'r', 'id': 683, 'synset': 'martini.n.01', 'synonyms': ['martini'], 'def': 'a cocktail made of gin (or vodka) with dry vermouth', 'name': 'martini'}, {'frequency': 'r', 'id': 684, 'synset': 'mascot.n.01', 'synonyms': ['mascot'], 'def': 'a person or animal that is adopted by a team or other group as a symbolic figure', 'name': 'mascot'}, {'frequency': 'c', 'id': 685, 'synset': 'mashed_potato.n.01', 'synonyms': ['mashed_potato'], 'def': 'potato that has been peeled and boiled and then mashed', 'name': 'mashed_potato'}, {'frequency': 'r', 'id': 686, 'synset': 'masher.n.02', 'synonyms': ['masher'], 'def': 'a kitchen utensil used for mashing (e.g. potatoes)', 'name': 'masher'}, {'frequency': 'f', 'id': 687, 'synset': 'mask.n.04', 'synonyms': ['mask', 'facemask'], 'def': 'a protective covering worn over the face', 'name': 'mask'}, {'frequency': 'f', 'id': 688, 'synset': 'mast.n.01', 'synonyms': ['mast'], 'def': 'a vertical spar for supporting sails', 'name': 'mast'}, {'frequency': 'c', 'id': 689, 'synset': 'mat.n.03', 'synonyms': ['mat_(gym_equipment)', 'gym_mat'], 'def': 'sports equipment consisting of a piece of thick padding on the floor for gymnastics', 'name': 'mat_(gym_equipment)'}, {'frequency': 'r', 'id': 690, 'synset': 'matchbox.n.01', 'synonyms': ['matchbox'], 'def': 'a box for holding matches', 'name': 'matchbox'}, {'frequency': 'f', 'id': 691, 'synset': 'mattress.n.01', 'synonyms': ['mattress'], 'def': 'a thick pad filled with resilient material used as a bed or part of a bed', 'name': 'mattress'}, {'frequency': 'c', 'id': 692, 'synset': 'measuring_cup.n.01', 'synonyms': ['measuring_cup'], 'def': 'graduated cup used to measure liquid or granular ingredients', 'name': 'measuring_cup'}, {'frequency': 'c', 'id': 693, 'synset': 'measuring_stick.n.01', 'synonyms': ['measuring_stick', 'ruler_(measuring_stick)', 'measuring_rod'], 'def': 'measuring instrument having a sequence of marks at regular intervals', 'name': 'measuring_stick'}, {'frequency': 'c', 'id': 694, 'synset': 'meatball.n.01', 'synonyms': ['meatball'], 'def': 'ground meat formed into a ball and fried or simmered in broth', 'name': 'meatball'}, {'frequency': 'c', 'id': 695, 'synset': 'medicine.n.02', 'synonyms': ['medicine'], 'def': 'something that treats or prevents or alleviates the symptoms of disease', 'name': 'medicine'}, {'frequency': 'r', 'id': 696, 'synset': 'melon.n.01', 'synonyms': ['melon'], 'def': 'fruit of the gourd family having a hard rind and sweet juicy flesh', 'name': 'melon'}, {'frequency': 'f', 'id': 697, 'synset': 'microphone.n.01', 'synonyms': ['microphone'], 'def': 'device for converting sound waves into electrical energy', 'name': 'microphone'}, {'frequency': 'r', 'id': 698, 'synset': 'microscope.n.01', 'synonyms': ['microscope'], 'def': 'magnifier of the image of small objects', 'name': 'microscope'}, {'frequency': 'f', 'id': 699, 'synset': 'microwave.n.02', 'synonyms': ['microwave_oven'], 'def': 'kitchen appliance that cooks food by passing an electromagnetic wave through it', 'name': 'microwave_oven'}, {'frequency': 'r', 'id': 700, 'synset': 'milestone.n.01', 'synonyms': ['milestone', 'milepost'], 'def': 'stone post at side of a road to show distances', 'name': 'milestone'}, {'frequency': 'c', 'id': 701, 'synset': 'milk.n.01', 'synonyms': ['milk'], 'def': 'a white nutritious liquid secreted by mammals and used as food by human beings', 'name': 'milk'}, {'frequency': 'f', 'id': 702, 'synset': 'minivan.n.01', 'synonyms': ['minivan'], 'def': 'a small box-shaped passenger van', 'name': 'minivan'}, {'frequency': 'r', 'id': 703, 'synset': 'mint.n.05', 'synonyms': ['mint_candy'], 'def': 'a candy that is flavored with a mint oil', 'name': 'mint_candy'}, {'frequency': 'f', 'id': 704, 'synset': 'mirror.n.01', 'synonyms': ['mirror'], 'def': 'polished surface that forms images by reflecting light', 'name': 'mirror'}, {'frequency': 'c', 'id': 705, 'synset': 'mitten.n.01', 'synonyms': ['mitten'], 'def': 'glove that encases the thumb separately and the other four fingers together', 'name': 'mitten'}, {'frequency': 'c', 'id': 706, 'synset': 'mixer.n.04', 'synonyms': ['mixer_(kitchen_tool)', 'stand_mixer'], 'def': 'a kitchen utensil that is used for mixing foods', 'name': 'mixer_(kitchen_tool)'}, {'frequency': 'c', 'id': 707, 'synset': 'money.n.03', 'synonyms': ['money'], 'def': 'the official currency issued by a government or national bank', 'name': 'money'}, {'frequency': 'f', 'id': 708, 'synset': 'monitor.n.04', 'synonyms': ['monitor_(computer_equipment) computer_monitor'], 'def': 'a computer monitor', 'name': 'monitor_(computer_equipment) computer_monitor'}, {'frequency': 'c', 'id': 709, 'synset': 'monkey.n.01', 'synonyms': ['monkey'], 'def': 'any of various long-tailed primates', 'name': 'monkey'}, {'frequency': 'f', 'id': 710, 'synset': 'motor.n.01', 'synonyms': ['motor'], 'def': 'machine that converts other forms of energy into mechanical energy and so imparts motion', 'name': 'motor'}, {'frequency': 'f', 'id': 711, 'synset': 'motor_scooter.n.01', 'synonyms': ['motor_scooter', 'scooter'], 'def': 'a wheeled vehicle with small wheels and a low-powered engine', 'name': 'motor_scooter'}, {'frequency': 'r', 'id': 712, 'synset': 'motor_vehicle.n.01', 'synonyms': ['motor_vehicle', 'automotive_vehicle'], 'def': 'a self-propelled wheeled vehicle that does not run on rails', 'name': 'motor_vehicle'}, {'frequency': 'r', 'id': 713, 'synset': 'motorboat.n.01', 'synonyms': ['motorboat', 'powerboat'], 'def': 'a boat propelled by an internal-combustion engine', 'name': 'motorboat'}, {'frequency': 'f', 'id': 714, 'synset': 'motorcycle.n.01', 'synonyms': ['motorcycle'], 'def': 'a motor vehicle with two wheels and a strong frame', 'name': 'motorcycle'}, {'frequency': 'f', 'id': 715, 'synset': 'mound.n.01', 'synonyms': ['mound_(baseball)', \"pitcher's_mound\"], 'def': '(baseball) the slight elevation on which the pitcher stands', 'name': 'mound_(baseball)'}, {'frequency': 'r', 'id': 716, 'synset': 'mouse.n.01', 'synonyms': ['mouse_(animal_rodent)'], 'def': 'a small rodent with pointed snouts and small ears on elongated bodies with slender usually hairless tails', 'name': 'mouse_(animal_rodent)'}, {'frequency': 'f', 'id': 717, 'synset': 'mouse.n.04', 'synonyms': ['mouse_(computer_equipment)', 'computer_mouse'], 'def': 'a computer input device that controls an on-screen pointer', 'name': 'mouse_(computer_equipment)'}, {'frequency': 'f', 'id': 718, 'synset': 'mousepad.n.01', 'synonyms': ['mousepad'], 'def': 'a small portable pad that provides an operating surface for a computer mouse', 'name': 'mousepad'}, {'frequency': 'c', 'id': 719, 'synset': 'muffin.n.01', 'synonyms': ['muffin'], 'def': 'a sweet quick bread baked in a cup-shaped pan', 'name': 'muffin'}, {'frequency': 'f', 'id': 720, 'synset': 'mug.n.04', 'synonyms': ['mug'], 'def': 'with handle and usually cylindrical', 'name': 'mug'}, {'frequency': 'f', 'id': 721, 'synset': 'mushroom.n.02', 'synonyms': ['mushroom'], 'def': 'a common mushroom', 'name': 'mushroom'}, {'frequency': 'r', 'id': 722, 'synset': 'music_stool.n.01', 'synonyms': ['music_stool', 'piano_stool'], 'def': 'a stool for piano players; usually adjustable in height', 'name': 'music_stool'}, {'frequency': 'r', 'id': 723, 'synset': 'musical_instrument.n.01', 'synonyms': ['musical_instrument', 'instrument_(musical)'], 'def': 'any of various devices or contrivances that can be used to produce musical tones or sounds', 'name': 'musical_instrument'}, {'frequency': 'r', 'id': 724, 'synset': 'nailfile.n.01', 'synonyms': ['nailfile'], 'def': 'a small flat file for shaping the nails', 'name': 'nailfile'}, {'frequency': 'r', 'id': 725, 'synset': 'nameplate.n.01', 'synonyms': ['nameplate'], 'def': 'a plate bearing a name', 'name': 'nameplate'}, {'frequency': 'f', 'id': 726, 'synset': 'napkin.n.01', 'synonyms': ['napkin', 'table_napkin', 'serviette'], 'def': 'a small piece of table linen or paper that is used to wipe the mouth and to cover the lap in order to protect clothing', 'name': 'napkin'}, {'frequency': 'r', 'id': 727, 'synset': 'neckerchief.n.01', 'synonyms': ['neckerchief'], 'def': 'a kerchief worn around the neck', 'name': 'neckerchief'}, {'frequency': 'f', 'id': 728, 'synset': 'necklace.n.01', 'synonyms': ['necklace'], 'def': 'jewelry consisting of a cord or chain (often bearing gems) worn about the neck as an ornament', 'name': 'necklace'}, {'frequency': 'f', 'id': 729, 'synset': 'necktie.n.01', 'synonyms': ['necktie', 'tie_(necktie)'], 'def': 'neckwear consisting of a long narrow piece of material worn under a collar and tied in knot at the front', 'name': 'necktie'}, {'frequency': 'r', 'id': 730, 'synset': 'needle.n.03', 'synonyms': ['needle'], 'def': 'a sharp pointed implement (usually metal)', 'name': 'needle'}, {'frequency': 'c', 'id': 731, 'synset': 'nest.n.01', 'synonyms': ['nest'], 'def': 'a structure in which animals lay eggs or give birth to their young', 'name': 'nest'}, {'frequency': 'r', 'id': 732, 'synset': 'newsstand.n.01', 'synonyms': ['newsstand'], 'def': 'a stall where newspapers and other periodicals are sold', 'name': 'newsstand'}, {'frequency': 'c', 'id': 733, 'synset': 'nightwear.n.01', 'synonyms': ['nightshirt', 'nightwear', 'sleepwear', 'nightclothes'], 'def': 'garments designed to be worn in bed', 'name': 'nightshirt'}, {'frequency': 'r', 'id': 734, 'synset': 'nosebag.n.01', 'synonyms': ['nosebag_(for_animals)', 'feedbag'], 'def': 'a canvas bag that is used to feed an animal (such as a horse); covers the muzzle and fastens at the top of the head', 'name': 'nosebag_(for_animals)'}, {'frequency': 'r', 'id': 735, 'synset': 'noseband.n.01', 'synonyms': ['noseband_(for_animals)', 'nosepiece_(for_animals)'], 'def': \"a strap that is the part of a bridle that goes over the animal's nose\", 'name': 'noseband_(for_animals)'}, {'frequency': 'f', 'id': 736, 'synset': 'notebook.n.01', 'synonyms': ['notebook'], 'def': 'a book with blank pages for recording notes or memoranda', 'name': 'notebook'}, {'frequency': 'c', 'id': 737, 'synset': 'notepad.n.01', 'synonyms': ['notepad'], 'def': 'a pad of paper for keeping notes', 'name': 'notepad'}, {'frequency': 'c', 'id': 738, 'synset': 'nut.n.03', 'synonyms': ['nut'], 'def': 'a small metal block (usually square or hexagonal) with internal screw thread to be fitted onto a bolt', 'name': 'nut'}, {'frequency': 'r', 'id': 739, 'synset': 'nutcracker.n.01', 'synonyms': ['nutcracker'], 'def': 'a hand tool used to crack nuts open', 'name': 'nutcracker'}, {'frequency': 'c', 'id': 740, 'synset': 'oar.n.01', 'synonyms': ['oar'], 'def': 'an implement used to propel or steer a boat', 'name': 'oar'}, {'frequency': 'r', 'id': 741, 'synset': 'octopus.n.01', 'synonyms': ['octopus_(food)'], 'def': 'tentacles of octopus prepared as food', 'name': 'octopus_(food)'}, {'frequency': 'r', 'id': 742, 'synset': 'octopus.n.02', 'synonyms': ['octopus_(animal)'], 'def': 'bottom-living cephalopod having a soft oval body with eight long tentacles', 'name': 'octopus_(animal)'}, {'frequency': 'c', 'id': 743, 'synset': 'oil_lamp.n.01', 'synonyms': ['oil_lamp', 'kerosene_lamp', 'kerosine_lamp'], 'def': 'a lamp that burns oil (as kerosine) for light', 'name': 'oil_lamp'}, {'frequency': 'c', 'id': 744, 'synset': 'olive_oil.n.01', 'synonyms': ['olive_oil'], 'def': 'oil from olives', 'name': 'olive_oil'}, {'frequency': 'r', 'id': 745, 'synset': 'omelet.n.01', 'synonyms': ['omelet', 'omelette'], 'def': 'beaten eggs cooked until just set; may be folded around e.g. ham or cheese or jelly', 'name': 'omelet'}, {'frequency': 'f', 'id': 746, 'synset': 'onion.n.01', 'synonyms': ['onion'], 'def': 'the bulb of an onion plant', 'name': 'onion'}, {'frequency': 'f', 'id': 747, 'synset': 'orange.n.01', 'synonyms': ['orange_(fruit)'], 'def': 'orange (FRUIT of an orange tree)', 'name': 'orange_(fruit)'}, {'frequency': 'c', 'id': 748, 'synset': 'orange_juice.n.01', 'synonyms': ['orange_juice'], 'def': 'bottled or freshly squeezed juice of oranges', 'name': 'orange_juice'}, {'frequency': 'r', 'id': 749, 'synset': 'oregano.n.01', 'synonyms': ['oregano', 'marjoram'], 'def': 'aromatic Eurasian perennial herb used in cooking and baking', 'name': 'oregano'}, {'frequency': 'c', 'id': 750, 'synset': 'ostrich.n.02', 'synonyms': ['ostrich'], 'def': 'fast-running African flightless bird with two-toed feet; largest living bird', 'name': 'ostrich'}, {'frequency': 'c', 'id': 751, 'synset': 'ottoman.n.03', 'synonyms': ['ottoman', 'pouf', 'pouffe', 'hassock'], 'def': 'thick cushion used as a seat', 'name': 'ottoman'}, {'frequency': 'c', 'id': 752, 'synset': 'overall.n.01', 'synonyms': ['overalls_(clothing)'], 'def': 'work clothing consisting of denim trousers usually with a bib and shoulder straps', 'name': 'overalls_(clothing)'}, {'frequency': 'c', 'id': 753, 'synset': 'owl.n.01', 'synonyms': ['owl'], 'def': 'nocturnal bird of prey with hawk-like beak and claws and large head with front-facing eyes', 'name': 'owl'}, {'frequency': 'c', 'id': 754, 'synset': 'packet.n.03', 'synonyms': ['packet'], 'def': 'a small package or bundle', 'name': 'packet'}, {'frequency': 'r', 'id': 755, 'synset': 'pad.n.03', 'synonyms': ['inkpad', 'inking_pad', 'stamp_pad'], 'def': 'absorbent material saturated with ink used to transfer ink evenly to a rubber stamp', 'name': 'inkpad'}, {'frequency': 'c', 'id': 756, 'synset': 'pad.n.04', 'synonyms': ['pad'], 'def': 'a flat mass of soft material used for protection, stuffing, or comfort', 'name': 'pad'}, {'frequency': 'c', 'id': 757, 'synset': 'paddle.n.04', 'synonyms': ['paddle', 'boat_paddle'], 'def': 'a short light oar used without an oarlock to propel a canoe or small boat', 'name': 'paddle'}, {'frequency': 'c', 'id': 758, 'synset': 'padlock.n.01', 'synonyms': ['padlock'], 'def': 'a detachable, portable lock', 'name': 'padlock'}, {'frequency': 'r', 'id': 759, 'synset': 'paintbox.n.01', 'synonyms': ['paintbox'], 'def': \"a box containing a collection of cubes or tubes of artists' paint\", 'name': 'paintbox'}, {'frequency': 'c', 'id': 760, 'synset': 'paintbrush.n.01', 'synonyms': ['paintbrush'], 'def': 'a brush used as an applicator to apply paint', 'name': 'paintbrush'}, {'frequency': 'f', 'id': 761, 'synset': 'painting.n.01', 'synonyms': ['painting'], 'def': 'graphic art consisting of an artistic composition made by applying paints to a surface', 'name': 'painting'}, {'frequency': 'c', 'id': 762, 'synset': 'pajama.n.02', 'synonyms': ['pajamas', 'pyjamas'], 'def': 'loose-fitting nightclothes worn for sleeping or lounging', 'name': 'pajamas'}, {'frequency': 'c', 'id': 763, 'synset': 'palette.n.02', 'synonyms': ['palette', 'pallet'], 'def': 'board that provides a flat surface on which artists mix paints and the range of colors used', 'name': 'palette'}, {'frequency': 'f', 'id': 764, 'synset': 'pan.n.01', 'synonyms': ['pan_(for_cooking)', 'cooking_pan'], 'def': 'cooking utensil consisting of a wide metal vessel', 'name': 'pan_(for_cooking)'}, {'frequency': 'r', 'id': 765, 'synset': 'pan.n.03', 'synonyms': ['pan_(metal_container)'], 'def': 'shallow container made of metal', 'name': 'pan_(metal_container)'}, {'frequency': 'c', 'id': 766, 'synset': 'pancake.n.01', 'synonyms': ['pancake'], 'def': 'a flat cake of thin batter fried on both sides on a griddle', 'name': 'pancake'}, {'frequency': 'r', 'id': 767, 'synset': 'pantyhose.n.01', 'synonyms': ['pantyhose'], 'def': \"a woman's tights consisting of underpants and stockings\", 'name': 'pantyhose'}, {'frequency': 'r', 'id': 768, 'synset': 'papaya.n.02', 'synonyms': ['papaya'], 'def': 'large oval melon-like tropical fruit with yellowish flesh', 'name': 'papaya'}, {'frequency': 'r', 'id': 769, 'synset': 'paper_clip.n.01', 'synonyms': ['paperclip'], 'def': 'a wire or plastic clip for holding sheets of paper together', 'name': 'paperclip'}, {'frequency': 'f', 'id': 770, 'synset': 'paper_plate.n.01', 'synonyms': ['paper_plate'], 'def': 'a disposable plate made of cardboard', 'name': 'paper_plate'}, {'frequency': 'f', 'id': 771, 'synset': 'paper_towel.n.01', 'synonyms': ['paper_towel'], 'def': 'a disposable towel made of absorbent paper', 'name': 'paper_towel'}, {'frequency': 'r', 'id': 772, 'synset': 'paperback_book.n.01', 'synonyms': ['paperback_book', 'paper-back_book', 'softback_book', 'soft-cover_book'], 'def': 'a book with paper covers', 'name': 'paperback_book'}, {'frequency': 'r', 'id': 773, 'synset': 'paperweight.n.01', 'synonyms': ['paperweight'], 'def': 'a weight used to hold down a stack of papers', 'name': 'paperweight'}, {'frequency': 'c', 'id': 774, 'synset': 'parachute.n.01', 'synonyms': ['parachute'], 'def': 'rescue equipment consisting of a device that fills with air and retards your fall', 'name': 'parachute'}, {'frequency': 'r', 'id': 775, 'synset': 'parakeet.n.01', 'synonyms': ['parakeet', 'parrakeet', 'parroket', 'paraquet', 'paroquet', 'parroquet'], 'def': 'any of numerous small slender long-tailed parrots', 'name': 'parakeet'}, {'frequency': 'c', 'id': 776, 'synset': 'parasail.n.01', 'synonyms': ['parasail_(sports)'], 'def': 'parachute that will lift a person up into the air when it is towed by a motorboat or a car', 'name': 'parasail_(sports)'}, {'frequency': 'r', 'id': 777, 'synset': 'parchment.n.01', 'synonyms': ['parchment'], 'def': 'a superior paper resembling sheepskin', 'name': 'parchment'}, {'frequency': 'r', 'id': 778, 'synset': 'parka.n.01', 'synonyms': ['parka', 'anorak'], 'def': \"a kind of heavy jacket (`windcheater' is a British term)\", 'name': 'parka'}, {'frequency': 'f', 'id': 779, 'synset': 'parking_meter.n.01', 'synonyms': ['parking_meter'], 'def': 'a coin-operated timer located next to a parking space', 'name': 'parking_meter'}, {'frequency': 'c', 'id': 780, 'synset': 'parrot.n.01', 'synonyms': ['parrot'], 'def': 'usually brightly colored tropical birds with short hooked beaks and the ability to mimic sounds', 'name': 'parrot'}, {'frequency': 'c', 'id': 781, 'synset': 'passenger_car.n.01', 'synonyms': ['passenger_car_(part_of_a_train)', 'coach_(part_of_a_train)'], 'def': 'a railcar where passengers ride', 'name': 'passenger_car_(part_of_a_train)'}, {'frequency': 'r', 'id': 782, 'synset': 'passenger_ship.n.01', 'synonyms': ['passenger_ship'], 'def': 'a ship built to carry passengers', 'name': 'passenger_ship'}, {'frequency': 'r', 'id': 783, 'synset': 'passport.n.02', 'synonyms': ['passport'], 'def': 'a document issued by a country to a citizen allowing that person to travel abroad and re-enter the home country', 'name': 'passport'}, {'frequency': 'f', 'id': 784, 'synset': 'pastry.n.02', 'synonyms': ['pastry'], 'def': 'any of various baked foods made of dough or batter', 'name': 'pastry'}, {'frequency': 'r', 'id': 785, 'synset': 'patty.n.01', 'synonyms': ['patty_(food)'], 'def': 'small flat mass of chopped food', 'name': 'patty_(food)'}, {'frequency': 'c', 'id': 786, 'synset': 'pea.n.01', 'synonyms': ['pea_(food)'], 'def': 'seed of a pea plant used for food', 'name': 'pea_(food)'}, {'frequency': 'c', 'id': 787, 'synset': 'peach.n.03', 'synonyms': ['peach'], 'def': 'downy juicy fruit with sweet yellowish or whitish flesh', 'name': 'peach'}, {'frequency': 'c', 'id': 788, 'synset': 'peanut_butter.n.01', 'synonyms': ['peanut_butter'], 'def': 'a spread made from ground peanuts', 'name': 'peanut_butter'}, {'frequency': 'c', 'id': 789, 'synset': 'pear.n.01', 'synonyms': ['pear'], 'def': 'sweet juicy gritty-textured fruit available in many varieties', 'name': 'pear'}, {'frequency': 'r', 'id': 790, 'synset': 'peeler.n.03', 'synonyms': ['peeler_(tool_for_fruit_and_vegetables)'], 'def': 'a device for peeling vegetables or fruits', 'name': 'peeler_(tool_for_fruit_and_vegetables)'}, {'frequency': 'r', 'id': 791, 'synset': 'pegboard.n.01', 'synonyms': ['pegboard'], 'def': 'a board perforated with regularly spaced holes into which pegs can be fitted', 'name': 'pegboard'}, {'frequency': 'c', 'id': 792, 'synset': 'pelican.n.01', 'synonyms': ['pelican'], 'def': 'large long-winged warm-water seabird having a large bill with a distensible pouch for fish', 'name': 'pelican'}, {'frequency': 'f', 'id': 793, 'synset': 'pen.n.01', 'synonyms': ['pen'], 'def': 'a writing implement with a point from which ink flows', 'name': 'pen'}, {'frequency': 'c', 'id': 794, 'synset': 'pencil.n.01', 'synonyms': ['pencil'], 'def': 'a thin cylindrical pointed writing implement made of wood and graphite', 'name': 'pencil'}, {'frequency': 'r', 'id': 795, 'synset': 'pencil_box.n.01', 'synonyms': ['pencil_box', 'pencil_case'], 'def': 'a box for holding pencils', 'name': 'pencil_box'}, {'frequency': 'r', 'id': 796, 'synset': 'pencil_sharpener.n.01', 'synonyms': ['pencil_sharpener'], 'def': 'a rotary implement for sharpening the point on pencils', 'name': 'pencil_sharpener'}, {'frequency': 'r', 'id': 797, 'synset': 'pendulum.n.01', 'synonyms': ['pendulum'], 'def': 'an apparatus consisting of an object mounted so that it swings freely under the influence of gravity', 'name': 'pendulum'}, {'frequency': 'c', 'id': 798, 'synset': 'penguin.n.01', 'synonyms': ['penguin'], 'def': 'short-legged flightless birds of cold southern regions having webbed feet and wings modified as flippers', 'name': 'penguin'}, {'frequency': 'r', 'id': 799, 'synset': 'pennant.n.02', 'synonyms': ['pennant'], 'def': 'a flag longer than it is wide (and often tapering)', 'name': 'pennant'}, {'frequency': 'r', 'id': 800, 'synset': 'penny.n.02', 'synonyms': ['penny_(coin)'], 'def': 'a coin worth one-hundredth of the value of the basic unit', 'name': 'penny_(coin)'}, {'frequency': 'c', 'id': 801, 'synset': 'pepper.n.03', 'synonyms': ['pepper', 'peppercorn'], 'def': 'pungent seasoning from the berry of the common pepper plant; whole or ground', 'name': 'pepper'}, {'frequency': 'c', 'id': 802, 'synset': 'pepper_mill.n.01', 'synonyms': ['pepper_mill', 'pepper_grinder'], 'def': 'a mill for grinding pepper', 'name': 'pepper_mill'}, {'frequency': 'c', 'id': 803, 'synset': 'perfume.n.02', 'synonyms': ['perfume'], 'def': 'a toiletry that emits and diffuses a fragrant odor', 'name': 'perfume'}, {'frequency': 'r', 'id': 804, 'synset': 'persimmon.n.02', 'synonyms': ['persimmon'], 'def': 'orange fruit resembling a plum; edible when fully ripe', 'name': 'persimmon'}, {'frequency': 'f', 'id': 805, 'synset': 'person.n.01', 'synonyms': ['baby', 'child', 'boy', 'girl', 'man', 'woman', 'person', 'human'], 'def': 'a human being', 'name': 'baby'}, {'frequency': 'r', 'id': 806, 'synset': 'pet.n.01', 'synonyms': ['pet'], 'def': 'a domesticated animal kept for companionship or amusement', 'name': 'pet'}, {'frequency': 'r', 'id': 807, 'synset': 'petfood.n.01', 'synonyms': ['petfood', 'pet-food'], 'def': 'food prepared for animal pets', 'name': 'petfood'}, {'frequency': 'r', 'id': 808, 'synset': 'pew.n.01', 'synonyms': ['pew_(church_bench)', 'church_bench'], 'def': 'long bench with backs; used in church by the congregation', 'name': 'pew_(church_bench)'}, {'frequency': 'r', 'id': 809, 'synset': 'phonebook.n.01', 'synonyms': ['phonebook', 'telephone_book', 'telephone_directory'], 'def': 'a directory containing an alphabetical list of telephone subscribers and their telephone numbers', 'name': 'phonebook'}, {'frequency': 'c', 'id': 810, 'synset': 'phonograph_record.n.01', 'synonyms': ['phonograph_record', 'phonograph_recording', 'record_(phonograph_recording)'], 'def': 'sound recording consisting of a typically black disk with a continuous groove', 'name': 'phonograph_record'}, {'frequency': 'c', 'id': 811, 'synset': 'piano.n.01', 'synonyms': ['piano'], 'def': 'a keyboard instrument that is played by depressing keys that cause hammers to strike tuned strings and produce sounds', 'name': 'piano'}, {'frequency': 'f', 'id': 812, 'synset': 'pickle.n.01', 'synonyms': ['pickle'], 'def': 'vegetables (especially cucumbers) preserved in brine or vinegar', 'name': 'pickle'}, {'frequency': 'f', 'id': 813, 'synset': 'pickup.n.01', 'synonyms': ['pickup_truck'], 'def': 'a light truck with an open body and low sides and a tailboard', 'name': 'pickup_truck'}, {'frequency': 'c', 'id': 814, 'synset': 'pie.n.01', 'synonyms': ['pie'], 'def': 'dish baked in pastry-lined pan often with a pastry top', 'name': 'pie'}, {'frequency': 'c', 'id': 815, 'synset': 'pigeon.n.01', 'synonyms': ['pigeon'], 'def': 'wild and domesticated birds having a heavy body and short legs', 'name': 'pigeon'}, {'frequency': 'r', 'id': 816, 'synset': 'piggy_bank.n.01', 'synonyms': ['piggy_bank', 'penny_bank'], 'def': \"a child's coin bank (often shaped like a pig)\", 'name': 'piggy_bank'}, {'frequency': 'f', 'id': 817, 'synset': 'pillow.n.01', 'synonyms': ['pillow'], 'def': 'a cushion to support the head of a sleeping person', 'name': 'pillow'}, {'frequency': 'r', 'id': 818, 'synset': 'pin.n.09', 'synonyms': ['pin_(non_jewelry)'], 'def': 'a small slender (often pointed) piece of wood or metal used to support or fasten or attach things', 'name': 'pin_(non_jewelry)'}, {'frequency': 'f', 'id': 819, 'synset': 'pineapple.n.02', 'synonyms': ['pineapple'], 'def': 'large sweet fleshy tropical fruit with a tuft of stiff leaves', 'name': 'pineapple'}, {'frequency': 'c', 'id': 820, 'synset': 'pinecone.n.01', 'synonyms': ['pinecone'], 'def': 'the seed-producing cone of a pine tree', 'name': 'pinecone'}, {'frequency': 'r', 'id': 821, 'synset': 'ping-pong_ball.n.01', 'synonyms': ['ping-pong_ball'], 'def': 'light hollow ball used in playing table tennis', 'name': 'ping-pong_ball'}, {'frequency': 'r', 'id': 822, 'synset': 'pinwheel.n.03', 'synonyms': ['pinwheel'], 'def': 'a toy consisting of vanes of colored paper or plastic that is pinned to a stick and spins when it is pointed into the wind', 'name': 'pinwheel'}, {'frequency': 'r', 'id': 823, 'synset': 'pipe.n.01', 'synonyms': ['tobacco_pipe'], 'def': 'a tube with a small bowl at one end; used for smoking tobacco', 'name': 'tobacco_pipe'}, {'frequency': 'f', 'id': 824, 'synset': 'pipe.n.02', 'synonyms': ['pipe', 'piping'], 'def': 'a long tube made of metal or plastic that is used to carry water or oil or gas etc.', 'name': 'pipe'}, {'frequency': 'r', 'id': 825, 'synset': 'pistol.n.01', 'synonyms': ['pistol', 'handgun'], 'def': 'a firearm that is held and fired with one hand', 'name': 'pistol'}, {'frequency': 'r', 'id': 826, 'synset': 'pita.n.01', 'synonyms': ['pita_(bread)', 'pocket_bread'], 'def': 'usually small round bread that can open into a pocket for filling', 'name': 'pita_(bread)'}, {'frequency': 'f', 'id': 827, 'synset': 'pitcher.n.02', 'synonyms': ['pitcher_(vessel_for_liquid)', 'ewer'], 'def': 'an open vessel with a handle and a spout for pouring', 'name': 'pitcher_(vessel_for_liquid)'}, {'frequency': 'r', 'id': 828, 'synset': 'pitchfork.n.01', 'synonyms': ['pitchfork'], 'def': 'a long-handled hand tool with sharp widely spaced prongs for lifting and pitching hay', 'name': 'pitchfork'}, {'frequency': 'f', 'id': 829, 'synset': 'pizza.n.01', 'synonyms': ['pizza'], 'def': 'Italian open pie made of thin bread dough spread with a spiced mixture of e.g. tomato sauce and cheese', 'name': 'pizza'}, {'frequency': 'f', 'id': 830, 'synset': 'place_mat.n.01', 'synonyms': ['place_mat'], 'def': 'a mat placed on a table for an individual place setting', 'name': 'place_mat'}, {'frequency': 'f', 'id': 831, 'synset': 'plate.n.04', 'synonyms': ['plate'], 'def': 'dish on which food is served or from which food is eaten', 'name': 'plate'}, {'frequency': 'c', 'id': 832, 'synset': 'platter.n.01', 'synonyms': ['platter'], 'def': 'a large shallow dish used for serving food', 'name': 'platter'}, {'frequency': 'r', 'id': 833, 'synset': 'playing_card.n.01', 'synonyms': ['playing_card'], 'def': 'one of a pack of cards that are used to play card games', 'name': 'playing_card'}, {'frequency': 'r', 'id': 834, 'synset': 'playpen.n.01', 'synonyms': ['playpen'], 'def': 'a portable enclosure in which babies may be left to play', 'name': 'playpen'}, {'frequency': 'c', 'id': 835, 'synset': 'pliers.n.01', 'synonyms': ['pliers', 'plyers'], 'def': 'a gripping hand tool with two hinged arms and (usually) serrated jaws', 'name': 'pliers'}, {'frequency': 'r', 'id': 836, 'synset': 'plow.n.01', 'synonyms': ['plow_(farm_equipment)', 'plough_(farm_equipment)'], 'def': 'a farm tool having one or more heavy blades to break the soil and cut a furrow prior to sowing', 'name': 'plow_(farm_equipment)'}, {'frequency': 'r', 'id': 837, 'synset': 'pocket_watch.n.01', 'synonyms': ['pocket_watch'], 'def': 'a watch that is carried in a small watch pocket', 'name': 'pocket_watch'}, {'frequency': 'c', 'id': 838, 'synset': 'pocketknife.n.01', 'synonyms': ['pocketknife'], 'def': 'a knife with a blade that folds into the handle; suitable for carrying in the pocket', 'name': 'pocketknife'}, {'frequency': 'c', 'id': 839, 'synset': 'poker.n.01', 'synonyms': ['poker_(fire_stirring_tool)', 'stove_poker', 'fire_hook'], 'def': 'fire iron consisting of a metal rod with a handle; used to stir a fire', 'name': 'poker_(fire_stirring_tool)'}, {'frequency': 'f', 'id': 840, 'synset': 'pole.n.01', 'synonyms': ['pole', 'post'], 'def': 'a long (usually round) rod of wood or metal or plastic', 'name': 'pole'}, {'frequency': 'r', 'id': 841, 'synset': 'police_van.n.01', 'synonyms': ['police_van', 'police_wagon', 'paddy_wagon', 'patrol_wagon'], 'def': 'van used by police to transport prisoners', 'name': 'police_van'}, {'frequency': 'f', 'id': 842, 'synset': 'polo_shirt.n.01', 'synonyms': ['polo_shirt', 'sport_shirt'], 'def': 'a shirt with short sleeves designed for comfort and casual wear', 'name': 'polo_shirt'}, {'frequency': 'r', 'id': 843, 'synset': 'poncho.n.01', 'synonyms': ['poncho'], 'def': 'a blanket-like cloak with a hole in the center for the head', 'name': 'poncho'}, {'frequency': 'c', 'id': 844, 'synset': 'pony.n.05', 'synonyms': ['pony'], 'def': 'any of various breeds of small gentle horses usually less than five feet high at the shoulder', 'name': 'pony'}, {'frequency': 'r', 'id': 845, 'synset': 'pool_table.n.01', 'synonyms': ['pool_table', 'billiard_table', 'snooker_table'], 'def': 'game equipment consisting of a heavy table on which pool is played', 'name': 'pool_table'}, {'frequency': 'f', 'id': 846, 'synset': 'pop.n.02', 'synonyms': ['pop_(soda)', 'soda_(pop)', 'tonic', 'soft_drink'], 'def': 'a sweet drink containing carbonated water and flavoring', 'name': 'pop_(soda)'}, {'frequency': 'r', 'id': 847, 'synset': 'portrait.n.02', 'synonyms': ['portrait', 'portrayal'], 'def': 'any likeness of a person, in any medium', 'name': 'portrait'}, {'frequency': 'c', 'id': 848, 'synset': 'postbox.n.01', 'synonyms': ['postbox_(public)', 'mailbox_(public)'], 'def': 'public box for deposit of mail', 'name': 'postbox_(public)'}, {'frequency': 'c', 'id': 849, 'synset': 'postcard.n.01', 'synonyms': ['postcard', 'postal_card', 'mailing-card'], 'def': 'a card for sending messages by post without an envelope', 'name': 'postcard'}, {'frequency': 'f', 'id': 850, 'synset': 'poster.n.01', 'synonyms': ['poster', 'placard'], 'def': 'a sign posted in a public place as an advertisement', 'name': 'poster'}, {'frequency': 'f', 'id': 851, 'synset': 'pot.n.01', 'synonyms': ['pot'], 'def': 'metal or earthenware cooking vessel that is usually round and deep; often has a handle and lid', 'name': 'pot'}, {'frequency': 'f', 'id': 852, 'synset': 'pot.n.04', 'synonyms': ['flowerpot'], 'def': 'a container in which plants are cultivated', 'name': 'flowerpot'}, {'frequency': 'f', 'id': 853, 'synset': 'potato.n.01', 'synonyms': ['potato'], 'def': 'an edible tuber native to South America', 'name': 'potato'}, {'frequency': 'c', 'id': 854, 'synset': 'potholder.n.01', 'synonyms': ['potholder'], 'def': 'an insulated pad for holding hot pots', 'name': 'potholder'}, {'frequency': 'c', 'id': 855, 'synset': 'pottery.n.01', 'synonyms': ['pottery', 'clayware'], 'def': 'ceramic ware made from clay and baked in a kiln', 'name': 'pottery'}, {'frequency': 'c', 'id': 856, 'synset': 'pouch.n.01', 'synonyms': ['pouch'], 'def': 'a small or medium size container for holding or carrying things', 'name': 'pouch'}, {'frequency': 'r', 'id': 857, 'synset': 'power_shovel.n.01', 'synonyms': ['power_shovel', 'excavator', 'digger'], 'def': 'a machine for excavating', 'name': 'power_shovel'}, {'frequency': 'c', 'id': 858, 'synset': 'prawn.n.01', 'synonyms': ['prawn', 'shrimp'], 'def': 'any of various edible decapod crustaceans', 'name': 'prawn'}, {'frequency': 'f', 'id': 859, 'synset': 'printer.n.03', 'synonyms': ['printer', 'printing_machine'], 'def': 'a machine that prints', 'name': 'printer'}, {'frequency': 'c', 'id': 860, 'synset': 'projectile.n.01', 'synonyms': ['projectile_(weapon)', 'missile'], 'def': 'a weapon that is forcibly thrown or projected at a targets', 'name': 'projectile_(weapon)'}, {'frequency': 'c', 'id': 861, 'synset': 'projector.n.02', 'synonyms': ['projector'], 'def': 'an optical instrument that projects an enlarged image onto a screen', 'name': 'projector'}, {'frequency': 'f', 'id': 862, 'synset': 'propeller.n.01', 'synonyms': ['propeller', 'propellor'], 'def': 'a mechanical device that rotates to push against air or water', 'name': 'propeller'}, {'frequency': 'r', 'id': 863, 'synset': 'prune.n.01', 'synonyms': ['prune'], 'def': 'dried plum', 'name': 'prune'}, {'frequency': 'r', 'id': 864, 'synset': 'pudding.n.01', 'synonyms': ['pudding'], 'def': 'any of various soft thick unsweetened baked dishes', 'name': 'pudding'}, {'frequency': 'r', 'id': 865, 'synset': 'puffer.n.02', 'synonyms': ['puffer_(fish)', 'pufferfish', 'blowfish', 'globefish'], 'def': 'fishes whose elongated spiny body can inflate itself with water or air to form a globe', 'name': 'puffer_(fish)'}, {'frequency': 'r', 'id': 866, 'synset': 'puffin.n.01', 'synonyms': ['puffin'], 'def': 'seabirds having short necks and brightly colored compressed bills', 'name': 'puffin'}, {'frequency': 'r', 'id': 867, 'synset': 'pug.n.01', 'synonyms': ['pug-dog'], 'def': 'small compact smooth-coated breed of Asiatic origin having a tightly curled tail and broad flat wrinkled muzzle', 'name': 'pug-dog'}, {'frequency': 'c', 'id': 868, 'synset': 'pumpkin.n.02', 'synonyms': ['pumpkin'], 'def': 'usually large pulpy deep-yellow round fruit of the squash family maturing in late summer or early autumn', 'name': 'pumpkin'}, {'frequency': 'r', 'id': 869, 'synset': 'punch.n.03', 'synonyms': ['puncher'], 'def': 'a tool for making holes or indentations', 'name': 'puncher'}, {'frequency': 'r', 'id': 870, 'synset': 'puppet.n.01', 'synonyms': ['puppet', 'marionette'], 'def': 'a small figure of a person operated from above with strings by a puppeteer', 'name': 'puppet'}, {'frequency': 'r', 'id': 871, 'synset': 'puppy.n.01', 'synonyms': ['puppy'], 'def': 'a young dog', 'name': 'puppy'}, {'frequency': 'r', 'id': 872, 'synset': 'quesadilla.n.01', 'synonyms': ['quesadilla'], 'def': 'a tortilla that is filled with cheese and heated', 'name': 'quesadilla'}, {'frequency': 'r', 'id': 873, 'synset': 'quiche.n.02', 'synonyms': ['quiche'], 'def': 'a tart filled with rich unsweetened custard; often contains other ingredients (as cheese or ham or seafood or vegetables)', 'name': 'quiche'}, {'frequency': 'f', 'id': 874, 'synset': 'quilt.n.01', 'synonyms': ['quilt', 'comforter'], 'def': 'bedding made of two layers of cloth filled with stuffing and stitched together', 'name': 'quilt'}, {'frequency': 'c', 'id': 875, 'synset': 'rabbit.n.01', 'synonyms': ['rabbit'], 'def': 'any of various burrowing animals of the family Leporidae having long ears and short tails', 'name': 'rabbit'}, {'frequency': 'r', 'id': 876, 'synset': 'racer.n.02', 'synonyms': ['race_car', 'racing_car'], 'def': 'a fast car that competes in races', 'name': 'race_car'}, {'frequency': 'c', 'id': 877, 'synset': 'racket.n.04', 'synonyms': ['racket', 'racquet'], 'def': 'a sports implement used to strike a ball in various games', 'name': 'racket'}, {'frequency': 'r', 'id': 878, 'synset': 'radar.n.01', 'synonyms': ['radar'], 'def': 'measuring instrument in which the echo of a pulse of microwave radiation is used to detect and locate distant objects', 'name': 'radar'}, {'frequency': 'c', 'id': 879, 'synset': 'radiator.n.03', 'synonyms': ['radiator'], 'def': 'a mechanism consisting of a metal honeycomb through which hot fluids circulate', 'name': 'radiator'}, {'frequency': 'c', 'id': 880, 'synset': 'radio_receiver.n.01', 'synonyms': ['radio_receiver', 'radio_set', 'radio', 'tuner_(radio)'], 'def': 'an electronic receiver that detects and demodulates and amplifies transmitted radio signals', 'name': 'radio_receiver'}, {'frequency': 'c', 'id': 881, 'synset': 'radish.n.03', 'synonyms': ['radish', 'daikon'], 'def': 'pungent edible root of any of various cultivated radish plants', 'name': 'radish'}, {'frequency': 'c', 'id': 882, 'synset': 'raft.n.01', 'synonyms': ['raft'], 'def': 'a flat float (usually made of logs or planks) that can be used for transport or as a platform for swimmers', 'name': 'raft'}, {'frequency': 'r', 'id': 883, 'synset': 'rag_doll.n.01', 'synonyms': ['rag_doll'], 'def': 'a cloth doll that is stuffed and (usually) painted', 'name': 'rag_doll'}, {'frequency': 'c', 'id': 884, 'synset': 'raincoat.n.01', 'synonyms': ['raincoat', 'waterproof_jacket'], 'def': 'a water-resistant coat', 'name': 'raincoat'}, {'frequency': 'c', 'id': 885, 'synset': 'ram.n.05', 'synonyms': ['ram_(animal)'], 'def': 'uncastrated adult male sheep', 'name': 'ram_(animal)'}, {'frequency': 'c', 'id': 886, 'synset': 'raspberry.n.02', 'synonyms': ['raspberry'], 'def': 'red or black edible aggregate berries usually smaller than the related blackberries', 'name': 'raspberry'}, {'frequency': 'r', 'id': 887, 'synset': 'rat.n.01', 'synonyms': ['rat'], 'def': 'any of various long-tailed rodents similar to but larger than a mouse', 'name': 'rat'}, {'frequency': 'c', 'id': 888, 'synset': 'razorblade.n.01', 'synonyms': ['razorblade'], 'def': 'a blade that has very sharp edge', 'name': 'razorblade'}, {'frequency': 'c', 'id': 889, 'synset': 'reamer.n.01', 'synonyms': ['reamer_(juicer)', 'juicer', 'juice_reamer'], 'def': 'a squeezer with a conical ridged center that is used for squeezing juice from citrus fruit', 'name': 'reamer_(juicer)'}, {'frequency': 'f', 'id': 890, 'synset': 'rearview_mirror.n.01', 'synonyms': ['rearview_mirror'], 'def': 'car mirror that reflects the view out of the rear window', 'name': 'rearview_mirror'}, {'frequency': 'c', 'id': 891, 'synset': 'receipt.n.02', 'synonyms': ['receipt'], 'def': 'an acknowledgment (usually tangible) that payment has been made', 'name': 'receipt'}, {'frequency': 'c', 'id': 892, 'synset': 'recliner.n.01', 'synonyms': ['recliner', 'reclining_chair', 'lounger_(chair)'], 'def': 'an armchair whose back can be lowered and foot can be raised to allow the sitter to recline in it', 'name': 'recliner'}, {'frequency': 'r', 'id': 893, 'synset': 'record_player.n.01', 'synonyms': ['record_player', 'phonograph_(record_player)', 'turntable'], 'def': 'machine in which rotating records cause a stylus to vibrate and the vibrations are amplified acoustically or electronically', 'name': 'record_player'}, {'frequency': 'r', 'id': 894, 'synset': 'red_cabbage.n.02', 'synonyms': ['red_cabbage'], 'def': 'compact head of purplish-red leaves', 'name': 'red_cabbage'}, {'frequency': 'f', 'id': 895, 'synset': 'reflector.n.01', 'synonyms': ['reflector'], 'def': 'device that reflects light, radiation, etc.', 'name': 'reflector'}, {'frequency': 'f', 'id': 896, 'synset': 'remote_control.n.01', 'synonyms': ['remote_control'], 'def': 'a device that can be used to control a machine or apparatus from a distance', 'name': 'remote_control'}, {'frequency': 'c', 'id': 897, 'synset': 'rhinoceros.n.01', 'synonyms': ['rhinoceros'], 'def': 'massive powerful herbivorous odd-toed ungulate of southeast Asia and Africa having very thick skin and one or two horns on the snout', 'name': 'rhinoceros'}, {'frequency': 'r', 'id': 898, 'synset': 'rib.n.03', 'synonyms': ['rib_(food)'], 'def': 'cut of meat including one or more ribs', 'name': 'rib_(food)'}, {'frequency': 'r', 'id': 899, 'synset': 'rifle.n.01', 'synonyms': ['rifle'], 'def': 'a shoulder firearm with a long barrel', 'name': 'rifle'}, {'frequency': 'f', 'id': 900, 'synset': 'ring.n.08', 'synonyms': ['ring'], 'def': 'jewelry consisting of a circlet of precious metal (often set with jewels) worn on the finger', 'name': 'ring'}, {'frequency': 'r', 'id': 901, 'synset': 'river_boat.n.01', 'synonyms': ['river_boat'], 'def': 'a boat used on rivers or to ply a river', 'name': 'river_boat'}, {'frequency': 'r', 'id': 902, 'synset': 'road_map.n.02', 'synonyms': ['road_map'], 'def': '(NOT A ROAD) a MAP showing roads (for automobile travel)', 'name': 'road_map'}, {'frequency': 'c', 'id': 903, 'synset': 'robe.n.01', 'synonyms': ['robe'], 'def': 'any loose flowing garment', 'name': 'robe'}, {'frequency': 'c', 'id': 904, 'synset': 'rocking_chair.n.01', 'synonyms': ['rocking_chair'], 'def': 'a chair mounted on rockers', 'name': 'rocking_chair'}, {'frequency': 'r', 'id': 905, 'synset': 'roller_skate.n.01', 'synonyms': ['roller_skate'], 'def': 'a shoe with pairs of rollers (small hard wheels) fixed to the sole', 'name': 'roller_skate'}, {'frequency': 'r', 'id': 906, 'synset': 'rollerblade.n.01', 'synonyms': ['Rollerblade'], 'def': 'an in-line variant of a roller skate', 'name': 'Rollerblade'}, {'frequency': 'c', 'id': 907, 'synset': 'rolling_pin.n.01', 'synonyms': ['rolling_pin'], 'def': 'utensil consisting of a cylinder (usually of wood) with a handle at each end; used to roll out dough', 'name': 'rolling_pin'}, {'frequency': 'r', 'id': 908, 'synset': 'root_beer.n.01', 'synonyms': ['root_beer'], 'def': 'carbonated drink containing extracts of roots and herbs', 'name': 'root_beer'}, {'frequency': 'c', 'id': 909, 'synset': 'router.n.02', 'synonyms': ['router_(computer_equipment)'], 'def': 'a device that forwards data packets between computer networks', 'name': 'router_(computer_equipment)'}, {'frequency': 'f', 'id': 910, 'synset': 'rubber_band.n.01', 'synonyms': ['rubber_band', 'elastic_band'], 'def': 'a narrow band of elastic rubber used to hold things (such as papers) together', 'name': 'rubber_band'}, {'frequency': 'c', 'id': 911, 'synset': 'runner.n.08', 'synonyms': ['runner_(carpet)'], 'def': 'a long narrow carpet', 'name': 'runner_(carpet)'}, {'frequency': 'f', 'id': 912, 'synset': 'sack.n.01', 'synonyms': ['plastic_bag', 'paper_bag'], 'def': \"a bag made of paper or plastic for holding customer's purchases\", 'name': 'plastic_bag'}, {'frequency': 'f', 'id': 913, 'synset': 'saddle.n.01', 'synonyms': ['saddle_(on_an_animal)'], 'def': 'a seat for the rider of a horse or camel', 'name': 'saddle_(on_an_animal)'}, {'frequency': 'f', 'id': 914, 'synset': 'saddle_blanket.n.01', 'synonyms': ['saddle_blanket', 'saddlecloth', 'horse_blanket'], 'def': 'stable gear consisting of a blanket placed under the saddle', 'name': 'saddle_blanket'}, {'frequency': 'c', 'id': 915, 'synset': 'saddlebag.n.01', 'synonyms': ['saddlebag'], 'def': 'a large bag (or pair of bags) hung over a saddle', 'name': 'saddlebag'}, {'frequency': 'r', 'id': 916, 'synset': 'safety_pin.n.01', 'synonyms': ['safety_pin'], 'def': 'a pin in the form of a clasp; has a guard so the point of the pin will not stick the user', 'name': 'safety_pin'}, {'frequency': 'c', 'id': 917, 'synset': 'sail.n.01', 'synonyms': ['sail'], 'def': 'a large piece of fabric by means of which wind is used to propel a sailing vessel', 'name': 'sail'}, {'frequency': 'c', 'id': 918, 'synset': 'salad.n.01', 'synonyms': ['salad'], 'def': 'food mixtures either arranged on a plate or tossed and served with a moist dressing; usually consisting of or including greens', 'name': 'salad'}, {'frequency': 'r', 'id': 919, 'synset': 'salad_plate.n.01', 'synonyms': ['salad_plate', 'salad_bowl'], 'def': 'a plate or bowl for individual servings of salad', 'name': 'salad_plate'}, {'frequency': 'r', 'id': 920, 'synset': 'salami.n.01', 'synonyms': ['salami'], 'def': 'highly seasoned fatty sausage of pork and beef usually dried', 'name': 'salami'}, {'frequency': 'r', 'id': 921, 'synset': 'salmon.n.01', 'synonyms': ['salmon_(fish)'], 'def': 'any of various large food and game fishes of northern waters', 'name': 'salmon_(fish)'}, {'frequency': 'r', 'id': 922, 'synset': 'salmon.n.03', 'synonyms': ['salmon_(food)'], 'def': 'flesh of any of various marine or freshwater fish of the family Salmonidae', 'name': 'salmon_(food)'}, {'frequency': 'r', 'id': 923, 'synset': 'salsa.n.01', 'synonyms': ['salsa'], 'def': 'spicy sauce of tomatoes and onions and chili peppers to accompany Mexican foods', 'name': 'salsa'}, {'frequency': 'f', 'id': 924, 'synset': 'saltshaker.n.01', 'synonyms': ['saltshaker'], 'def': 'a shaker with a perforated top for sprinkling salt', 'name': 'saltshaker'}, {'frequency': 'f', 'id': 925, 'synset': 'sandal.n.01', 'synonyms': ['sandal_(type_of_shoe)'], 'def': 'a shoe consisting of a sole fastened by straps to the foot', 'name': 'sandal_(type_of_shoe)'}, {'frequency': 'f', 'id': 926, 'synset': 'sandwich.n.01', 'synonyms': ['sandwich'], 'def': 'two (or more) slices of bread with a filling between them', 'name': 'sandwich'}, {'frequency': 'r', 'id': 927, 'synset': 'satchel.n.01', 'synonyms': ['satchel'], 'def': 'luggage consisting of a small case with a flat bottom and (usually) a shoulder strap', 'name': 'satchel'}, {'frequency': 'r', 'id': 928, 'synset': 'saucepan.n.01', 'synonyms': ['saucepan'], 'def': 'a deep pan with a handle; used for stewing or boiling', 'name': 'saucepan'}, {'frequency': 'f', 'id': 929, 'synset': 'saucer.n.02', 'synonyms': ['saucer'], 'def': 'a small shallow dish for holding a cup at the table', 'name': 'saucer'}, {'frequency': 'f', 'id': 930, 'synset': 'sausage.n.01', 'synonyms': ['sausage'], 'def': 'highly seasoned minced meat stuffed in casings', 'name': 'sausage'}, {'frequency': 'r', 'id': 931, 'synset': 'sawhorse.n.01', 'synonyms': ['sawhorse', 'sawbuck'], 'def': 'a framework for holding wood that is being sawed', 'name': 'sawhorse'}, {'frequency': 'r', 'id': 932, 'synset': 'sax.n.02', 'synonyms': ['saxophone'], 'def': \"a wind instrument with a `J'-shaped form typically made of brass\", 'name': 'saxophone'}, {'frequency': 'f', 'id': 933, 'synset': 'scale.n.07', 'synonyms': ['scale_(measuring_instrument)'], 'def': 'a measuring instrument for weighing; shows amount of mass', 'name': 'scale_(measuring_instrument)'}, {'frequency': 'r', 'id': 934, 'synset': 'scarecrow.n.01', 'synonyms': ['scarecrow', 'strawman'], 'def': 'an effigy in the shape of a man to frighten birds away from seeds', 'name': 'scarecrow'}, {'frequency': 'f', 'id': 935, 'synset': 'scarf.n.01', 'synonyms': ['scarf'], 'def': 'a garment worn around the head or neck or shoulders for warmth or decoration', 'name': 'scarf'}, {'frequency': 'c', 'id': 936, 'synset': 'school_bus.n.01', 'synonyms': ['school_bus'], 'def': 'a bus used to transport children to or from school', 'name': 'school_bus'}, {'frequency': 'f', 'id': 937, 'synset': 'scissors.n.01', 'synonyms': ['scissors'], 'def': 'a tool having two crossed pivoting blades with looped handles', 'name': 'scissors'}, {'frequency': 'c', 'id': 938, 'synset': 'scoreboard.n.01', 'synonyms': ['scoreboard'], 'def': 'a large board for displaying the score of a contest (and some other information)', 'name': 'scoreboard'}, {'frequency': 'c', 'id': 939, 'synset': 'scrambled_eggs.n.01', 'synonyms': ['scrambled_eggs'], 'def': 'eggs beaten and cooked to a soft firm consistency while stirring', 'name': 'scrambled_eggs'}, {'frequency': 'r', 'id': 940, 'synset': 'scraper.n.01', 'synonyms': ['scraper'], 'def': 'any of various hand tools for scraping', 'name': 'scraper'}, {'frequency': 'r', 'id': 941, 'synset': 'scratcher.n.03', 'synonyms': ['scratcher'], 'def': 'a device used for scratching', 'name': 'scratcher'}, {'frequency': 'c', 'id': 942, 'synset': 'screwdriver.n.01', 'synonyms': ['screwdriver'], 'def': 'a hand tool for driving screws; has a tip that fits into the head of a screw', 'name': 'screwdriver'}, {'frequency': 'c', 'id': 943, 'synset': 'scrub_brush.n.01', 'synonyms': ['scrubbing_brush'], 'def': 'a brush with short stiff bristles for heavy cleaning', 'name': 'scrubbing_brush'}, {'frequency': 'c', 'id': 944, 'synset': 'sculpture.n.01', 'synonyms': ['sculpture'], 'def': 'a three-dimensional work of art', 'name': 'sculpture'}, {'frequency': 'r', 'id': 945, 'synset': 'seabird.n.01', 'synonyms': ['seabird', 'seafowl'], 'def': 'a bird that frequents coastal waters and the open ocean: gulls; pelicans; gannets; cormorants; albatrosses; petrels; etc.', 'name': 'seabird'}, {'frequency': 'r', 'id': 946, 'synset': 'seahorse.n.02', 'synonyms': ['seahorse'], 'def': 'small fish with horse-like heads bent sharply downward and curled tails', 'name': 'seahorse'}, {'frequency': 'r', 'id': 947, 'synset': 'seaplane.n.01', 'synonyms': ['seaplane', 'hydroplane'], 'def': 'an airplane that can land on or take off from water', 'name': 'seaplane'}, {'frequency': 'c', 'id': 948, 'synset': 'seashell.n.01', 'synonyms': ['seashell'], 'def': 'the shell of a marine organism', 'name': 'seashell'}, {'frequency': 'r', 'id': 949, 'synset': 'seedling.n.01', 'synonyms': ['seedling'], 'def': 'young plant or tree grown from a seed', 'name': 'seedling'}, {'frequency': 'c', 'id': 950, 'synset': 'serving_dish.n.01', 'synonyms': ['serving_dish'], 'def': 'a dish used for serving food', 'name': 'serving_dish'}, {'frequency': 'r', 'id': 951, 'synset': 'sewing_machine.n.01', 'synonyms': ['sewing_machine'], 'def': 'a textile machine used as a home appliance for sewing', 'name': 'sewing_machine'}, {'frequency': 'r', 'id': 952, 'synset': 'shaker.n.03', 'synonyms': ['shaker'], 'def': 'a container in which something can be shaken', 'name': 'shaker'}, {'frequency': 'c', 'id': 953, 'synset': 'shampoo.n.01', 'synonyms': ['shampoo'], 'def': 'cleansing agent consisting of soaps or detergents used for washing the hair', 'name': 'shampoo'}, {'frequency': 'r', 'id': 954, 'synset': 'shark.n.01', 'synonyms': ['shark'], 'def': 'typically large carnivorous fishes with sharpe teeth', 'name': 'shark'}, {'frequency': 'r', 'id': 955, 'synset': 'sharpener.n.01', 'synonyms': ['sharpener'], 'def': 'any implement that is used to make something (an edge or a point) sharper', 'name': 'sharpener'}, {'frequency': 'r', 'id': 956, 'synset': 'sharpie.n.03', 'synonyms': ['Sharpie'], 'def': 'a pen with indelible ink that will write on any surface', 'name': 'Sharpie'}, {'frequency': 'r', 'id': 957, 'synset': 'shaver.n.03', 'synonyms': ['shaver_(electric)', 'electric_shaver', 'electric_razor'], 'def': 'a razor powered by an electric motor', 'name': 'shaver_(electric)'}, {'frequency': 'c', 'id': 958, 'synset': 'shaving_cream.n.01', 'synonyms': ['shaving_cream', 'shaving_soap'], 'def': 'toiletry consisting that forms a rich lather for softening the beard before shaving', 'name': 'shaving_cream'}, {'frequency': 'r', 'id': 959, 'synset': 'shawl.n.01', 'synonyms': ['shawl'], 'def': 'cloak consisting of an oblong piece of cloth used to cover the head and shoulders', 'name': 'shawl'}, {'frequency': 'r', 'id': 960, 'synset': 'shears.n.01', 'synonyms': ['shears'], 'def': 'large scissors with strong blades', 'name': 'shears'}, {'frequency': 'f', 'id': 961, 'synset': 'sheep.n.01', 'synonyms': ['sheep'], 'def': 'woolly usually horned ruminant mammal related to the goat', 'name': 'sheep'}, {'frequency': 'r', 'id': 962, 'synset': 'shepherd_dog.n.01', 'synonyms': ['shepherd_dog', 'sheepdog'], 'def': 'any of various usually long-haired breeds of dog reared to herd and guard sheep', 'name': 'shepherd_dog'}, {'frequency': 'r', 'id': 963, 'synset': 'sherbert.n.01', 'synonyms': ['sherbert', 'sherbet'], 'def': 'a frozen dessert made primarily of fruit juice and sugar', 'name': 'sherbert'}, {'frequency': 'r', 'id': 964, 'synset': 'shield.n.02', 'synonyms': ['shield'], 'def': 'armor carried on the arm to intercept blows', 'name': 'shield'}, {'frequency': 'f', 'id': 965, 'synset': 'shirt.n.01', 'synonyms': ['shirt'], 'def': 'a garment worn on the upper half of the body', 'name': 'shirt'}, {'frequency': 'f', 'id': 966, 'synset': 'shoe.n.01', 'synonyms': ['shoe', 'sneaker_(type_of_shoe)', 'tennis_shoe'], 'def': 'common footwear covering the foot', 'name': 'shoe'}, {'frequency': 'c', 'id': 967, 'synset': 'shopping_bag.n.01', 'synonyms': ['shopping_bag'], 'def': 'a bag made of plastic or strong paper (often with handles); used to transport goods after shopping', 'name': 'shopping_bag'}, {'frequency': 'c', 'id': 968, 'synset': 'shopping_cart.n.01', 'synonyms': ['shopping_cart'], 'def': 'a handcart that holds groceries or other goods while shopping', 'name': 'shopping_cart'}, {'frequency': 'f', 'id': 969, 'synset': 'short_pants.n.01', 'synonyms': ['short_pants', 'shorts_(clothing)', 'trunks_(clothing)'], 'def': 'trousers that end at or above the knee', 'name': 'short_pants'}, {'frequency': 'r', 'id': 970, 'synset': 'shot_glass.n.01', 'synonyms': ['shot_glass'], 'def': 'a small glass adequate to hold a single swallow of whiskey', 'name': 'shot_glass'}, {'frequency': 'c', 'id': 971, 'synset': 'shoulder_bag.n.01', 'synonyms': ['shoulder_bag'], 'def': 'a large handbag that can be carried by a strap looped over the shoulder', 'name': 'shoulder_bag'}, {'frequency': 'c', 'id': 972, 'synset': 'shovel.n.01', 'synonyms': ['shovel'], 'def': 'a hand tool for lifting loose material such as snow, dirt, etc.', 'name': 'shovel'}, {'frequency': 'f', 'id': 973, 'synset': 'shower.n.01', 'synonyms': ['shower_head'], 'def': 'a plumbing fixture that sprays water over you', 'name': 'shower_head'}, {'frequency': 'f', 'id': 974, 'synset': 'shower_curtain.n.01', 'synonyms': ['shower_curtain'], 'def': 'a curtain that keeps water from splashing out of the shower area', 'name': 'shower_curtain'}, {'frequency': 'r', 'id': 975, 'synset': 'shredder.n.01', 'synonyms': ['shredder_(for_paper)'], 'def': 'a device that shreds documents', 'name': 'shredder_(for_paper)'}, {'frequency': 'r', 'id': 976, 'synset': 'sieve.n.01', 'synonyms': ['sieve', 'screen_(sieve)'], 'def': 'a strainer for separating lumps from powdered material or grading particles', 'name': 'sieve'}, {'frequency': 'f', 'id': 977, 'synset': 'signboard.n.01', 'synonyms': ['signboard'], 'def': 'structure displaying a board on which advertisements can be posted', 'name': 'signboard'}, {'frequency': 'c', 'id': 978, 'synset': 'silo.n.01', 'synonyms': ['silo'], 'def': 'a cylindrical tower used for storing goods', 'name': 'silo'}, {'frequency': 'f', 'id': 979, 'synset': 'sink.n.01', 'synonyms': ['sink'], 'def': 'plumbing fixture consisting of a water basin fixed to a wall or floor and having a drainpipe', 'name': 'sink'}, {'frequency': 'f', 'id': 980, 'synset': 'skateboard.n.01', 'synonyms': ['skateboard'], 'def': 'a board with wheels that is ridden in a standing or crouching position and propelled by foot', 'name': 'skateboard'}, {'frequency': 'c', 'id': 981, 'synset': 'skewer.n.01', 'synonyms': ['skewer'], 'def': 'a long pin for holding meat in position while it is being roasted', 'name': 'skewer'}, {'frequency': 'f', 'id': 982, 'synset': 'ski.n.01', 'synonyms': ['ski'], 'def': 'sports equipment for skiing on snow', 'name': 'ski'}, {'frequency': 'f', 'id': 983, 'synset': 'ski_boot.n.01', 'synonyms': ['ski_boot'], 'def': 'a stiff boot that is fastened to a ski with a ski binding', 'name': 'ski_boot'}, {'frequency': 'f', 'id': 984, 'synset': 'ski_parka.n.01', 'synonyms': ['ski_parka', 'ski_jacket'], 'def': 'a parka to be worn while skiing', 'name': 'ski_parka'}, {'frequency': 'f', 'id': 985, 'synset': 'ski_pole.n.01', 'synonyms': ['ski_pole'], 'def': 'a pole with metal points used as an aid in skiing', 'name': 'ski_pole'}, {'frequency': 'f', 'id': 986, 'synset': 'skirt.n.02', 'synonyms': ['skirt'], 'def': 'a garment hanging from the waist; worn mainly by girls and women', 'name': 'skirt'}, {'frequency': 'c', 'id': 987, 'synset': 'sled.n.01', 'synonyms': ['sled', 'sledge', 'sleigh'], 'def': 'a vehicle or flat object for transportation over snow by sliding or pulled by dogs, etc.', 'name': 'sled'}, {'frequency': 'c', 'id': 988, 'synset': 'sleeping_bag.n.01', 'synonyms': ['sleeping_bag'], 'def': 'large padded bag designed to be slept in outdoors', 'name': 'sleeping_bag'}, {'frequency': 'r', 'id': 989, 'synset': 'sling.n.05', 'synonyms': ['sling_(bandage)', 'triangular_bandage'], 'def': 'bandage to support an injured forearm; slung over the shoulder or neck', 'name': 'sling_(bandage)'}, {'frequency': 'c', 'id': 990, 'synset': 'slipper.n.01', 'synonyms': ['slipper_(footwear)', 'carpet_slipper_(footwear)'], 'def': 'low footwear that can be slipped on and off easily; usually worn indoors', 'name': 'slipper_(footwear)'}, {'frequency': 'r', 'id': 991, 'synset': 'smoothie.n.02', 'synonyms': ['smoothie'], 'def': 'a thick smooth drink consisting of fresh fruit pureed with ice cream or yoghurt or milk', 'name': 'smoothie'}, {'frequency': 'r', 'id': 992, 'synset': 'snake.n.01', 'synonyms': ['snake', 'serpent'], 'def': 'limbless scaly elongate reptile; some are venomous', 'name': 'snake'}, {'frequency': 'f', 'id': 993, 'synset': 'snowboard.n.01', 'synonyms': ['snowboard'], 'def': 'a board that resembles a broad ski or a small surfboard; used in a standing position to slide down snow-covered slopes', 'name': 'snowboard'}, {'frequency': 'c', 'id': 994, 'synset': 'snowman.n.01', 'synonyms': ['snowman'], 'def': 'a figure of a person made of packed snow', 'name': 'snowman'}, {'frequency': 'c', 'id': 995, 'synset': 'snowmobile.n.01', 'synonyms': ['snowmobile'], 'def': 'tracked vehicle for travel on snow having skis in front', 'name': 'snowmobile'}, {'frequency': 'f', 'id': 996, 'synset': 'soap.n.01', 'synonyms': ['soap'], 'def': 'a cleansing agent made from the salts of vegetable or animal fats', 'name': 'soap'}, {'frequency': 'f', 'id': 997, 'synset': 'soccer_ball.n.01', 'synonyms': ['soccer_ball'], 'def': \"an inflated ball used in playing soccer (called `football' outside of the United States)\", 'name': 'soccer_ball'}, {'frequency': 'f', 'id': 998, 'synset': 'sock.n.01', 'synonyms': ['sock'], 'def': 'cloth covering for the foot; worn inside the shoe; reaches to between the ankle and the knee', 'name': 'sock'}, {'frequency': 'r', 'id': 999, 'synset': 'soda_fountain.n.02', 'synonyms': ['soda_fountain'], 'def': 'an apparatus for dispensing soda water', 'name': 'soda_fountain'}, {'frequency': 'r', 'id': 1000, 'synset': 'soda_water.n.01', 'synonyms': ['carbonated_water', 'club_soda', 'seltzer', 'sparkling_water'], 'def': 'effervescent beverage artificially charged with carbon dioxide', 'name': 'carbonated_water'}, {'frequency': 'f', 'id': 1001, 'synset': 'sofa.n.01', 'synonyms': ['sofa', 'couch', 'lounge'], 'def': 'an upholstered seat for more than one person', 'name': 'sofa'}, {'frequency': 'r', 'id': 1002, 'synset': 'softball.n.01', 'synonyms': ['softball'], 'def': 'ball used in playing softball', 'name': 'softball'}, {'frequency': 'c', 'id': 1003, 'synset': 'solar_array.n.01', 'synonyms': ['solar_array', 'solar_battery', 'solar_panel'], 'def': 'electrical device consisting of a large array of connected solar cells', 'name': 'solar_array'}, {'frequency': 'r', 'id': 1004, 'synset': 'sombrero.n.02', 'synonyms': ['sombrero'], 'def': 'a straw hat with a tall crown and broad brim; worn in American southwest and in Mexico', 'name': 'sombrero'}, {'frequency': 'c', 'id': 1005, 'synset': 'soup.n.01', 'synonyms': ['soup'], 'def': 'liquid food especially of meat or fish or vegetable stock often containing pieces of solid food', 'name': 'soup'}, {'frequency': 'r', 'id': 1006, 'synset': 'soup_bowl.n.01', 'synonyms': ['soup_bowl'], 'def': 'a bowl for serving soup', 'name': 'soup_bowl'}, {'frequency': 'c', 'id': 1007, 'synset': 'soupspoon.n.01', 'synonyms': ['soupspoon'], 'def': 'a spoon with a rounded bowl for eating soup', 'name': 'soupspoon'}, {'frequency': 'c', 'id': 1008, 'synset': 'sour_cream.n.01', 'synonyms': ['sour_cream', 'soured_cream'], 'def': 'soured light cream', 'name': 'sour_cream'}, {'frequency': 'r', 'id': 1009, 'synset': 'soya_milk.n.01', 'synonyms': ['soya_milk', 'soybean_milk', 'soymilk'], 'def': 'a milk substitute containing soybean flour and water; used in some infant formulas and in making tofu', 'name': 'soya_milk'}, {'frequency': 'r', 'id': 1010, 'synset': 'space_shuttle.n.01', 'synonyms': ['space_shuttle'], 'def': \"a reusable spacecraft with wings for a controlled descent through the Earth's atmosphere\", 'name': 'space_shuttle'}, {'frequency': 'r', 'id': 1011, 'synset': 'sparkler.n.02', 'synonyms': ['sparkler_(fireworks)'], 'def': 'a firework that burns slowly and throws out a shower of sparks', 'name': 'sparkler_(fireworks)'}, {'frequency': 'f', 'id': 1012, 'synset': 'spatula.n.02', 'synonyms': ['spatula'], 'def': 'a hand tool with a thin flexible blade used to mix or spread soft substances', 'name': 'spatula'}, {'frequency': 'r', 'id': 1013, 'synset': 'spear.n.01', 'synonyms': ['spear', 'lance'], 'def': 'a long pointed rod used as a tool or weapon', 'name': 'spear'}, {'frequency': 'f', 'id': 1014, 'synset': 'spectacles.n.01', 'synonyms': ['spectacles', 'specs', 'eyeglasses', 'glasses'], 'def': 'optical instrument consisting of a frame that holds a pair of lenses for correcting defective vision', 'name': 'spectacles'}, {'frequency': 'c', 'id': 1015, 'synset': 'spice_rack.n.01', 'synonyms': ['spice_rack'], 'def': 'a rack for displaying containers filled with spices', 'name': 'spice_rack'}, {'frequency': 'r', 'id': 1016, 'synset': 'spider.n.01', 'synonyms': ['spider'], 'def': 'predatory arachnid with eight legs, two poison fangs, two feelers, and usually two silk-spinning organs at the back end of the body', 'name': 'spider'}, {'frequency': 'c', 'id': 1017, 'synset': 'sponge.n.01', 'synonyms': ['sponge'], 'def': 'a porous mass usable to absorb water typically used for cleaning', 'name': 'sponge'}, {'frequency': 'f', 'id': 1018, 'synset': 'spoon.n.01', 'synonyms': ['spoon'], 'def': 'a piece of cutlery with a shallow bowl-shaped container and a handle', 'name': 'spoon'}, {'frequency': 'c', 'id': 1019, 'synset': 'sportswear.n.01', 'synonyms': ['sportswear', 'athletic_wear', 'activewear'], 'def': 'attire worn for sport or for casual wear', 'name': 'sportswear'}, {'frequency': 'c', 'id': 1020, 'synset': 'spotlight.n.02', 'synonyms': ['spotlight'], 'def': 'a lamp that produces a strong beam of light to illuminate a restricted area; used to focus attention of a stage performer', 'name': 'spotlight'}, {'frequency': 'r', 'id': 1021, 'synset': 'squirrel.n.01', 'synonyms': ['squirrel'], 'def': 'a kind of arboreal rodent having a long bushy tail', 'name': 'squirrel'}, {'frequency': 'c', 'id': 1022, 'synset': 'stapler.n.01', 'synonyms': ['stapler_(stapling_machine)'], 'def': 'a machine that inserts staples into sheets of paper in order to fasten them together', 'name': 'stapler_(stapling_machine)'}, {'frequency': 'r', 'id': 1023, 'synset': 'starfish.n.01', 'synonyms': ['starfish', 'sea_star'], 'def': 'echinoderms characterized by five arms extending from a central disk', 'name': 'starfish'}, {'frequency': 'f', 'id': 1024, 'synset': 'statue.n.01', 'synonyms': ['statue_(sculpture)'], 'def': 'a sculpture representing a human or animal', 'name': 'statue_(sculpture)'}, {'frequency': 'c', 'id': 1025, 'synset': 'steak.n.01', 'synonyms': ['steak_(food)'], 'def': 'a slice of meat cut from the fleshy part of an animal or large fish', 'name': 'steak_(food)'}, {'frequency': 'r', 'id': 1026, 'synset': 'steak_knife.n.01', 'synonyms': ['steak_knife'], 'def': 'a sharp table knife used in eating steak', 'name': 'steak_knife'}, {'frequency': 'r', 'id': 1027, 'synset': 'steamer.n.02', 'synonyms': ['steamer_(kitchen_appliance)'], 'def': 'a cooking utensil that can be used to cook food by steaming it', 'name': 'steamer_(kitchen_appliance)'}, {'frequency': 'f', 'id': 1028, 'synset': 'steering_wheel.n.01', 'synonyms': ['steering_wheel'], 'def': 'a handwheel that is used for steering', 'name': 'steering_wheel'}, {'frequency': 'r', 'id': 1029, 'synset': 'stencil.n.01', 'synonyms': ['stencil'], 'def': 'a sheet of material (metal, plastic, etc.) that has been perforated with a pattern; ink or paint can pass through the perforations to create the printed pattern on the surface below', 'name': 'stencil'}, {'frequency': 'r', 'id': 1030, 'synset': 'step_ladder.n.01', 'synonyms': ['stepladder'], 'def': 'a folding portable ladder hinged at the top', 'name': 'stepladder'}, {'frequency': 'c', 'id': 1031, 'synset': 'step_stool.n.01', 'synonyms': ['step_stool'], 'def': 'a stool that has one or two steps that fold under the seat', 'name': 'step_stool'}, {'frequency': 'c', 'id': 1032, 'synset': 'stereo.n.01', 'synonyms': ['stereo_(sound_system)'], 'def': 'electronic device for playing audio', 'name': 'stereo_(sound_system)'}, {'frequency': 'r', 'id': 1033, 'synset': 'stew.n.02', 'synonyms': ['stew'], 'def': 'food prepared by stewing especially meat or fish with vegetables', 'name': 'stew'}, {'frequency': 'r', 'id': 1034, 'synset': 'stirrer.n.02', 'synonyms': ['stirrer'], 'def': 'an implement used for stirring', 'name': 'stirrer'}, {'frequency': 'f', 'id': 1035, 'synset': 'stirrup.n.01', 'synonyms': ['stirrup'], 'def': \"support consisting of metal loops into which rider's feet go\", 'name': 'stirrup'}, {'frequency': 'c', 'id': 1036, 'synset': 'stocking.n.01', 'synonyms': ['stockings_(leg_wear)'], 'def': 'close-fitting hosiery to cover the foot and leg; come in matched pairs', 'name': 'stockings_(leg_wear)'}, {'frequency': 'f', 'id': 1037, 'synset': 'stool.n.01', 'synonyms': ['stool'], 'def': 'a simple seat without a back or arms', 'name': 'stool'}, {'frequency': 'f', 'id': 1038, 'synset': 'stop_sign.n.01', 'synonyms': ['stop_sign'], 'def': 'a traffic sign to notify drivers that they must come to a complete stop', 'name': 'stop_sign'}, {'frequency': 'f', 'id': 1039, 'synset': 'stoplight.n.01', 'synonyms': ['brake_light'], 'def': 'a red light on the rear of a motor vehicle that signals when the brakes are applied', 'name': 'brake_light'}, {'frequency': 'f', 'id': 1040, 'synset': 'stove.n.01', 'synonyms': ['stove', 'kitchen_stove', 'range_(kitchen_appliance)', 'kitchen_range', 'cooking_stove'], 'def': 'a kitchen appliance used for cooking food', 'name': 'stove'}, {'frequency': 'c', 'id': 1041, 'synset': 'strainer.n.01', 'synonyms': ['strainer'], 'def': 'a filter to retain larger pieces while smaller pieces and liquids pass through', 'name': 'strainer'}, {'frequency': 'f', 'id': 1042, 'synset': 'strap.n.01', 'synonyms': ['strap'], 'def': 'an elongated strip of material for binding things together or holding', 'name': 'strap'}, {'frequency': 'f', 'id': 1043, 'synset': 'straw.n.04', 'synonyms': ['straw_(for_drinking)', 'drinking_straw'], 'def': 'a thin paper or plastic tube used to suck liquids into the mouth', 'name': 'straw_(for_drinking)'}, {'frequency': 'f', 'id': 1044, 'synset': 'strawberry.n.01', 'synonyms': ['strawberry'], 'def': 'sweet fleshy red fruit', 'name': 'strawberry'}, {'frequency': 'f', 'id': 1045, 'synset': 'street_sign.n.01', 'synonyms': ['street_sign'], 'def': 'a sign visible from the street', 'name': 'street_sign'}, {'frequency': 'f', 'id': 1046, 'synset': 'streetlight.n.01', 'synonyms': ['streetlight', 'street_lamp'], 'def': 'a lamp supported on a lamppost; for illuminating a street', 'name': 'streetlight'}, {'frequency': 'r', 'id': 1047, 'synset': 'string_cheese.n.01', 'synonyms': ['string_cheese'], 'def': 'cheese formed in long strings twisted together', 'name': 'string_cheese'}, {'frequency': 'r', 'id': 1048, 'synset': 'stylus.n.02', 'synonyms': ['stylus'], 'def': 'a pointed tool for writing or drawing or engraving', 'name': 'stylus'}, {'frequency': 'r', 'id': 1049, 'synset': 'subwoofer.n.01', 'synonyms': ['subwoofer'], 'def': 'a loudspeaker that is designed to reproduce very low bass frequencies', 'name': 'subwoofer'}, {'frequency': 'r', 'id': 1050, 'synset': 'sugar_bowl.n.01', 'synonyms': ['sugar_bowl'], 'def': 'a dish in which sugar is served', 'name': 'sugar_bowl'}, {'frequency': 'r', 'id': 1051, 'synset': 'sugarcane.n.01', 'synonyms': ['sugarcane_(plant)'], 'def': 'juicy canes whose sap is a source of molasses and commercial sugar; fresh canes are sometimes chewed for the juice', 'name': 'sugarcane_(plant)'}, {'frequency': 'c', 'id': 1052, 'synset': 'suit.n.01', 'synonyms': ['suit_(clothing)'], 'def': 'a set of garments (usually including a jacket and trousers or skirt) for outerwear all of the same fabric and color', 'name': 'suit_(clothing)'}, {'frequency': 'c', 'id': 1053, 'synset': 'sunflower.n.01', 'synonyms': ['sunflower'], 'def': 'any plant of the genus Helianthus having large flower heads with dark disk florets and showy yellow rays', 'name': 'sunflower'}, {'frequency': 'f', 'id': 1054, 'synset': 'sunglasses.n.01', 'synonyms': ['sunglasses'], 'def': 'spectacles that are darkened or polarized to protect the eyes from the glare of the sun', 'name': 'sunglasses'}, {'frequency': 'c', 'id': 1055, 'synset': 'sunhat.n.01', 'synonyms': ['sunhat'], 'def': 'a hat with a broad brim that protects the face from direct exposure to the sun', 'name': 'sunhat'}, {'frequency': 'r', 'id': 1056, 'synset': 'sunscreen.n.01', 'synonyms': ['sunscreen', 'sunblock'], 'def': 'a cream spread on the skin; contains a chemical to filter out ultraviolet light and so protect from sunburn', 'name': 'sunscreen'}, {'frequency': 'f', 'id': 1057, 'synset': 'surfboard.n.01', 'synonyms': ['surfboard'], 'def': 'a narrow buoyant board for riding surf', 'name': 'surfboard'}, {'frequency': 'c', 'id': 1058, 'synset': 'sushi.n.01', 'synonyms': ['sushi'], 'def': 'rice (with raw fish) wrapped in seaweed', 'name': 'sushi'}, {'frequency': 'c', 'id': 1059, 'synset': 'swab.n.02', 'synonyms': ['mop'], 'def': 'cleaning implement consisting of absorbent material fastened to a handle; for cleaning floors', 'name': 'mop'}, {'frequency': 'c', 'id': 1060, 'synset': 'sweat_pants.n.01', 'synonyms': ['sweat_pants'], 'def': 'loose-fitting trousers with elastic cuffs; worn by athletes', 'name': 'sweat_pants'}, {'frequency': 'c', 'id': 1061, 'synset': 'sweatband.n.02', 'synonyms': ['sweatband'], 'def': 'a band of material tied around the forehead or wrist to absorb sweat', 'name': 'sweatband'}, {'frequency': 'f', 'id': 1062, 'synset': 'sweater.n.01', 'synonyms': ['sweater'], 'def': 'a crocheted or knitted garment covering the upper part of the body', 'name': 'sweater'}, {'frequency': 'f', 'id': 1063, 'synset': 'sweatshirt.n.01', 'synonyms': ['sweatshirt'], 'def': 'cotton knit pullover with long sleeves worn during athletic activity', 'name': 'sweatshirt'}, {'frequency': 'c', 'id': 1064, 'synset': 'sweet_potato.n.02', 'synonyms': ['sweet_potato'], 'def': 'the edible tuberous root of the sweet potato vine', 'name': 'sweet_potato'}, {'frequency': 'f', 'id': 1065, 'synset': 'swimsuit.n.01', 'synonyms': ['swimsuit', 'swimwear', 'bathing_suit', 'swimming_costume', 'bathing_costume', 'swimming_trunks', 'bathing_trunks'], 'def': 'garment worn for swimming', 'name': 'swimsuit'}, {'frequency': 'c', 'id': 1066, 'synset': 'sword.n.01', 'synonyms': ['sword'], 'def': 'a cutting or thrusting weapon that has a long metal blade', 'name': 'sword'}, {'frequency': 'r', 'id': 1067, 'synset': 'syringe.n.01', 'synonyms': ['syringe'], 'def': 'a medical instrument used to inject or withdraw fluids', 'name': 'syringe'}, {'frequency': 'r', 'id': 1068, 'synset': 'tabasco.n.02', 'synonyms': ['Tabasco_sauce'], 'def': 'very spicy sauce (trade name Tabasco) made from fully-aged red peppers', 'name': 'Tabasco_sauce'}, {'frequency': 'r', 'id': 1069, 'synset': 'table-tennis_table.n.01', 'synonyms': ['table-tennis_table', 'ping-pong_table'], 'def': 'a table used for playing table tennis', 'name': 'table-tennis_table'}, {'frequency': 'f', 'id': 1070, 'synset': 'table.n.02', 'synonyms': ['table'], 'def': 'a piece of furniture having a smooth flat top that is usually supported by one or more vertical legs', 'name': 'table'}, {'frequency': 'c', 'id': 1071, 'synset': 'table_lamp.n.01', 'synonyms': ['table_lamp'], 'def': 'a lamp that sits on a table', 'name': 'table_lamp'}, {'frequency': 'f', 'id': 1072, 'synset': 'tablecloth.n.01', 'synonyms': ['tablecloth'], 'def': 'a covering spread over a dining table', 'name': 'tablecloth'}, {'frequency': 'r', 'id': 1073, 'synset': 'tachometer.n.01', 'synonyms': ['tachometer'], 'def': 'measuring instrument for indicating speed of rotation', 'name': 'tachometer'}, {'frequency': 'r', 'id': 1074, 'synset': 'taco.n.02', 'synonyms': ['taco'], 'def': 'a small tortilla cupped around a filling', 'name': 'taco'}, {'frequency': 'f', 'id': 1075, 'synset': 'tag.n.02', 'synonyms': ['tag'], 'def': 'a label associated with something for the purpose of identification or information', 'name': 'tag'}, {'frequency': 'f', 'id': 1076, 'synset': 'taillight.n.01', 'synonyms': ['taillight', 'rear_light'], 'def': 'lamp (usually red) mounted at the rear of a motor vehicle', 'name': 'taillight'}, {'frequency': 'r', 'id': 1077, 'synset': 'tambourine.n.01', 'synonyms': ['tambourine'], 'def': 'a shallow drum with a single drumhead and with metallic disks in the sides', 'name': 'tambourine'}, {'frequency': 'r', 'id': 1078, 'synset': 'tank.n.01', 'synonyms': ['army_tank', 'armored_combat_vehicle', 'armoured_combat_vehicle'], 'def': 'an enclosed armored military vehicle; has a cannon and moves on caterpillar treads', 'name': 'army_tank'}, {'frequency': 'c', 'id': 1079, 'synset': 'tank.n.02', 'synonyms': ['tank_(storage_vessel)', 'storage_tank'], 'def': 'a large (usually metallic) vessel for holding gases or liquids', 'name': 'tank_(storage_vessel)'}, {'frequency': 'f', 'id': 1080, 'synset': 'tank_top.n.01', 'synonyms': ['tank_top_(clothing)'], 'def': 'a tight-fitting sleeveless shirt with wide shoulder straps and low neck and no front opening', 'name': 'tank_top_(clothing)'}, {'frequency': 'c', 'id': 1081, 'synset': 'tape.n.01', 'synonyms': ['tape_(sticky_cloth_or_paper)'], 'def': 'a long thin piece of cloth or paper as used for binding or fastening', 'name': 'tape_(sticky_cloth_or_paper)'}, {'frequency': 'c', 'id': 1082, 'synset': 'tape.n.04', 'synonyms': ['tape_measure', 'measuring_tape'], 'def': 'measuring instrument consisting of a narrow strip (cloth or metal) marked in inches or centimeters and used for measuring lengths', 'name': 'tape_measure'}, {'frequency': 'c', 'id': 1083, 'synset': 'tapestry.n.02', 'synonyms': ['tapestry'], 'def': 'a heavy textile with a woven design; used for curtains and upholstery', 'name': 'tapestry'}, {'frequency': 'f', 'id': 1084, 'synset': 'tarpaulin.n.01', 'synonyms': ['tarp'], 'def': 'waterproofed canvas', 'name': 'tarp'}, {'frequency': 'c', 'id': 1085, 'synset': 'tartan.n.01', 'synonyms': ['tartan', 'plaid'], 'def': 'a cloth having a crisscross design', 'name': 'tartan'}, {'frequency': 'c', 'id': 1086, 'synset': 'tassel.n.01', 'synonyms': ['tassel'], 'def': 'adornment consisting of a bunch of cords fastened at one end', 'name': 'tassel'}, {'frequency': 'r', 'id': 1087, 'synset': 'tea_bag.n.01', 'synonyms': ['tea_bag'], 'def': 'a measured amount of tea in a bag for an individual serving of tea', 'name': 'tea_bag'}, {'frequency': 'c', 'id': 1088, 'synset': 'teacup.n.02', 'synonyms': ['teacup'], 'def': 'a cup from which tea is drunk', 'name': 'teacup'}, {'frequency': 'c', 'id': 1089, 'synset': 'teakettle.n.01', 'synonyms': ['teakettle'], 'def': 'kettle for boiling water to make tea', 'name': 'teakettle'}, {'frequency': 'c', 'id': 1090, 'synset': 'teapot.n.01', 'synonyms': ['teapot'], 'def': 'pot for brewing tea; usually has a spout and handle', 'name': 'teapot'}, {'frequency': 'f', 'id': 1091, 'synset': 'teddy.n.01', 'synonyms': ['teddy_bear'], 'def': \"plaything consisting of a child's toy bear (usually plush and stuffed with soft materials)\", 'name': 'teddy_bear'}, {'frequency': 'f', 'id': 1092, 'synset': 'telephone.n.01', 'synonyms': ['telephone', 'phone', 'telephone_set'], 'def': 'electronic device for communicating by voice over long distances', 'name': 'telephone'}, {'frequency': 'c', 'id': 1093, 'synset': 'telephone_booth.n.01', 'synonyms': ['telephone_booth', 'phone_booth', 'call_box', 'telephone_box', 'telephone_kiosk'], 'def': 'booth for using a telephone', 'name': 'telephone_booth'}, {'frequency': 'f', 'id': 1094, 'synset': 'telephone_pole.n.01', 'synonyms': ['telephone_pole', 'telegraph_pole', 'telegraph_post'], 'def': 'tall pole supporting telephone wires', 'name': 'telephone_pole'}, {'frequency': 'r', 'id': 1095, 'synset': 'telephoto_lens.n.01', 'synonyms': ['telephoto_lens', 'zoom_lens'], 'def': 'a camera lens that magnifies the image', 'name': 'telephoto_lens'}, {'frequency': 'c', 'id': 1096, 'synset': 'television_camera.n.01', 'synonyms': ['television_camera', 'tv_camera'], 'def': 'television equipment for capturing and recording video', 'name': 'television_camera'}, {'frequency': 'f', 'id': 1097, 'synset': 'television_receiver.n.01', 'synonyms': ['television_set', 'tv', 'tv_set'], 'def': 'an electronic device that receives television signals and displays them on a screen', 'name': 'television_set'}, {'frequency': 'f', 'id': 1098, 'synset': 'tennis_ball.n.01', 'synonyms': ['tennis_ball'], 'def': 'ball about the size of a fist used in playing tennis', 'name': 'tennis_ball'}, {'frequency': 'f', 'id': 1099, 'synset': 'tennis_racket.n.01', 'synonyms': ['tennis_racket'], 'def': 'a racket used to play tennis', 'name': 'tennis_racket'}, {'frequency': 'r', 'id': 1100, 'synset': 'tequila.n.01', 'synonyms': ['tequila'], 'def': 'Mexican liquor made from fermented juices of an agave plant', 'name': 'tequila'}, {'frequency': 'c', 'id': 1101, 'synset': 'thermometer.n.01', 'synonyms': ['thermometer'], 'def': 'measuring instrument for measuring temperature', 'name': 'thermometer'}, {'frequency': 'c', 'id': 1102, 'synset': 'thermos.n.01', 'synonyms': ['thermos_bottle'], 'def': 'vacuum flask that preserves temperature of hot or cold drinks', 'name': 'thermos_bottle'}, {'frequency': 'c', 'id': 1103, 'synset': 'thermostat.n.01', 'synonyms': ['thermostat'], 'def': 'a regulator for automatically regulating temperature by starting or stopping the supply of heat', 'name': 'thermostat'}, {'frequency': 'r', 'id': 1104, 'synset': 'thimble.n.02', 'synonyms': ['thimble'], 'def': 'a small metal cap to protect the finger while sewing; can be used as a small container', 'name': 'thimble'}, {'frequency': 'c', 'id': 1105, 'synset': 'thread.n.01', 'synonyms': ['thread', 'yarn'], 'def': 'a fine cord of twisted fibers (of cotton or silk or wool or nylon etc.) used in sewing and weaving', 'name': 'thread'}, {'frequency': 'c', 'id': 1106, 'synset': 'thumbtack.n.01', 'synonyms': ['thumbtack', 'drawing_pin', 'pushpin'], 'def': 'a tack for attaching papers to a bulletin board or drawing board', 'name': 'thumbtack'}, {'frequency': 'c', 'id': 1107, 'synset': 'tiara.n.01', 'synonyms': ['tiara'], 'def': 'a jeweled headdress worn by women on formal occasions', 'name': 'tiara'}, {'frequency': 'c', 'id': 1108, 'synset': 'tiger.n.02', 'synonyms': ['tiger'], 'def': 'large feline of forests in most of Asia having a tawny coat with black stripes', 'name': 'tiger'}, {'frequency': 'c', 'id': 1109, 'synset': 'tights.n.01', 'synonyms': ['tights_(clothing)', 'leotards'], 'def': 'skintight knit hose covering the body from the waist to the feet worn by acrobats and dancers and as stockings by women and girls', 'name': 'tights_(clothing)'}, {'frequency': 'c', 'id': 1110, 'synset': 'timer.n.01', 'synonyms': ['timer', 'stopwatch'], 'def': 'a timepiece that measures a time interval and signals its end', 'name': 'timer'}, {'frequency': 'f', 'id': 1111, 'synset': 'tinfoil.n.01', 'synonyms': ['tinfoil'], 'def': 'foil made of tin or an alloy of tin and lead', 'name': 'tinfoil'}, {'frequency': 'r', 'id': 1112, 'synset': 'tinsel.n.01', 'synonyms': ['tinsel'], 'def': 'a showy decoration that is basically valueless', 'name': 'tinsel'}, {'frequency': 'f', 'id': 1113, 'synset': 'tissue.n.02', 'synonyms': ['tissue_paper'], 'def': 'a soft thin (usually translucent) paper', 'name': 'tissue_paper'}, {'frequency': 'c', 'id': 1114, 'synset': 'toast.n.01', 'synonyms': ['toast_(food)'], 'def': 'slice of bread that has been toasted', 'name': 'toast_(food)'}, {'frequency': 'f', 'id': 1115, 'synset': 'toaster.n.02', 'synonyms': ['toaster'], 'def': 'a kitchen appliance (usually electric) for toasting bread', 'name': 'toaster'}, {'frequency': 'c', 'id': 1116, 'synset': 'toaster_oven.n.01', 'synonyms': ['toaster_oven'], 'def': 'kitchen appliance consisting of a small electric oven for toasting or warming food', 'name': 'toaster_oven'}, {'frequency': 'f', 'id': 1117, 'synset': 'toilet.n.02', 'synonyms': ['toilet'], 'def': 'a plumbing fixture for defecation and urination', 'name': 'toilet'}, {'frequency': 'f', 'id': 1118, 'synset': 'toilet_tissue.n.01', 'synonyms': ['toilet_tissue', 'toilet_paper', 'bathroom_tissue'], 'def': 'a soft thin absorbent paper for use in toilets', 'name': 'toilet_tissue'}, {'frequency': 'f', 'id': 1119, 'synset': 'tomato.n.01', 'synonyms': ['tomato'], 'def': 'mildly acid red or yellow pulpy fruit eaten as a vegetable', 'name': 'tomato'}, {'frequency': 'c', 'id': 1120, 'synset': 'tongs.n.01', 'synonyms': ['tongs'], 'def': 'any of various devices for taking hold of objects; usually have two hinged legs with handles above and pointed hooks below', 'name': 'tongs'}, {'frequency': 'c', 'id': 1121, 'synset': 'toolbox.n.01', 'synonyms': ['toolbox'], 'def': 'a box or chest or cabinet for holding hand tools', 'name': 'toolbox'}, {'frequency': 'f', 'id': 1122, 'synset': 'toothbrush.n.01', 'synonyms': ['toothbrush'], 'def': 'small brush; has long handle; used to clean teeth', 'name': 'toothbrush'}, {'frequency': 'f', 'id': 1123, 'synset': 'toothpaste.n.01', 'synonyms': ['toothpaste'], 'def': 'a dentifrice in the form of a paste', 'name': 'toothpaste'}, {'frequency': 'c', 'id': 1124, 'synset': 'toothpick.n.01', 'synonyms': ['toothpick'], 'def': 'pick consisting of a small strip of wood or plastic; used to pick food from between the teeth', 'name': 'toothpick'}, {'frequency': 'c', 'id': 1125, 'synset': 'top.n.09', 'synonyms': ['cover'], 'def': 'covering for a hole (especially a hole in the top of a container)', 'name': 'cover'}, {'frequency': 'c', 'id': 1126, 'synset': 'tortilla.n.01', 'synonyms': ['tortilla'], 'def': 'thin unleavened pancake made from cornmeal or wheat flour', 'name': 'tortilla'}, {'frequency': 'c', 'id': 1127, 'synset': 'tow_truck.n.01', 'synonyms': ['tow_truck'], 'def': 'a truck equipped to hoist and pull wrecked cars (or to remove cars from no-parking zones)', 'name': 'tow_truck'}, {'frequency': 'f', 'id': 1128, 'synset': 'towel.n.01', 'synonyms': ['towel'], 'def': 'a rectangular piece of absorbent cloth (or paper) for drying or wiping', 'name': 'towel'}, {'frequency': 'f', 'id': 1129, 'synset': 'towel_rack.n.01', 'synonyms': ['towel_rack', 'towel_rail', 'towel_bar'], 'def': 'a rack consisting of one or more bars on which towels can be hung', 'name': 'towel_rack'}, {'frequency': 'f', 'id': 1130, 'synset': 'toy.n.03', 'synonyms': ['toy'], 'def': 'a device regarded as providing amusement', 'name': 'toy'}, {'frequency': 'c', 'id': 1131, 'synset': 'tractor.n.01', 'synonyms': ['tractor_(farm_equipment)'], 'def': 'a wheeled vehicle with large wheels; used in farming and other applications', 'name': 'tractor_(farm_equipment)'}, {'frequency': 'f', 'id': 1132, 'synset': 'traffic_light.n.01', 'synonyms': ['traffic_light'], 'def': 'a device to control vehicle traffic often consisting of three or more lights', 'name': 'traffic_light'}, {'frequency': 'r', 'id': 1133, 'synset': 'trail_bike.n.01', 'synonyms': ['dirt_bike'], 'def': 'a lightweight motorcycle equipped with rugged tires and suspension for off-road use', 'name': 'dirt_bike'}, {'frequency': 'c', 'id': 1134, 'synset': 'trailer_truck.n.01', 'synonyms': ['trailer_truck', 'tractor_trailer', 'trucking_rig', 'articulated_lorry', 'semi_truck'], 'def': 'a truck consisting of a tractor and trailer together', 'name': 'trailer_truck'}, {'frequency': 'f', 'id': 1135, 'synset': 'train.n.01', 'synonyms': ['train_(railroad_vehicle)', 'railroad_train'], 'def': 'public or private transport provided by a line of railway cars coupled together and drawn by a locomotive', 'name': 'train_(railroad_vehicle)'}, {'frequency': 'r', 'id': 1136, 'synset': 'trampoline.n.01', 'synonyms': ['trampoline'], 'def': 'gymnastic apparatus consisting of a strong canvas sheet attached with springs to a metal frame', 'name': 'trampoline'}, {'frequency': 'f', 'id': 1137, 'synset': 'tray.n.01', 'synonyms': ['tray'], 'def': 'an open receptacle for holding or displaying or serving articles or food', 'name': 'tray'}, {'frequency': 'r', 'id': 1138, 'synset': 'tree_house.n.01', 'synonyms': ['tree_house'], 'def': '(NOT A TREE) a PLAYHOUSE built in the branches of a tree', 'name': 'tree_house'}, {'frequency': 'r', 'id': 1139, 'synset': 'trench_coat.n.01', 'synonyms': ['trench_coat'], 'def': 'a military style raincoat; belted with deep pockets', 'name': 'trench_coat'}, {'frequency': 'r', 'id': 1140, 'synset': 'triangle.n.05', 'synonyms': ['triangle_(musical_instrument)'], 'def': 'a percussion instrument consisting of a metal bar bent in the shape of an open triangle', 'name': 'triangle_(musical_instrument)'}, {'frequency': 'r', 'id': 1141, 'synset': 'tricycle.n.01', 'synonyms': ['tricycle'], 'def': 'a vehicle with three wheels that is moved by foot pedals', 'name': 'tricycle'}, {'frequency': 'c', 'id': 1142, 'synset': 'tripod.n.01', 'synonyms': ['tripod'], 'def': 'a three-legged rack used for support', 'name': 'tripod'}, {'frequency': 'f', 'id': 1143, 'synset': 'trouser.n.01', 'synonyms': ['trousers', 'pants_(clothing)'], 'def': 'a garment extending from the waist to the knee or ankle, covering each leg separately', 'name': 'trousers'}, {'frequency': 'f', 'id': 1144, 'synset': 'truck.n.01', 'synonyms': ['truck'], 'def': 'an automotive vehicle suitable for hauling', 'name': 'truck'}, {'frequency': 'r', 'id': 1145, 'synset': 'truffle.n.03', 'synonyms': ['truffle_(chocolate)', 'chocolate_truffle'], 'def': 'creamy chocolate candy', 'name': 'truffle_(chocolate)'}, {'frequency': 'c', 'id': 1146, 'synset': 'trunk.n.02', 'synonyms': ['trunk'], 'def': 'luggage consisting of a large strong case used when traveling or for storage', 'name': 'trunk'}, {'frequency': 'r', 'id': 1147, 'synset': 'tub.n.02', 'synonyms': ['vat'], 'def': 'a large open vessel for holding or storing liquids', 'name': 'vat'}, {'frequency': 'c', 'id': 1148, 'synset': 'turban.n.01', 'synonyms': ['turban'], 'def': 'a traditional headdress consisting of a long scarf wrapped around the head', 'name': 'turban'}, {'frequency': 'r', 'id': 1149, 'synset': 'turkey.n.01', 'synonyms': ['turkey_(bird)'], 'def': 'large gallinaceous bird with fan-shaped tail; widely domesticated for food', 'name': 'turkey_(bird)'}, {'frequency': 'c', 'id': 1150, 'synset': 'turkey.n.04', 'synonyms': ['turkey_(food)'], 'def': 'flesh of large domesticated fowl usually roasted', 'name': 'turkey_(food)'}, {'frequency': 'r', 'id': 1151, 'synset': 'turnip.n.01', 'synonyms': ['turnip'], 'def': 'widely cultivated plant having a large fleshy edible white or yellow root', 'name': 'turnip'}, {'frequency': 'c', 'id': 1152, 'synset': 'turtle.n.02', 'synonyms': ['turtle'], 'def': 'any of various aquatic and land reptiles having a bony shell and flipper-like limbs for swimming', 'name': 'turtle'}, {'frequency': 'r', 'id': 1153, 'synset': 'turtleneck.n.01', 'synonyms': ['turtleneck_(clothing)', 'polo-neck'], 'def': 'a sweater or jersey with a high close-fitting collar', 'name': 'turtleneck_(clothing)'}, {'frequency': 'r', 'id': 1154, 'synset': 'typewriter.n.01', 'synonyms': ['typewriter'], 'def': 'hand-operated character printer for printing written messages one character at a time', 'name': 'typewriter'}, {'frequency': 'f', 'id': 1155, 'synset': 'umbrella.n.01', 'synonyms': ['umbrella'], 'def': 'a lightweight handheld collapsible canopy', 'name': 'umbrella'}, {'frequency': 'c', 'id': 1156, 'synset': 'underwear.n.01', 'synonyms': ['underwear', 'underclothes', 'underclothing', 'underpants'], 'def': 'undergarment worn next to the skin and under the outer garments', 'name': 'underwear'}, {'frequency': 'r', 'id': 1157, 'synset': 'unicycle.n.01', 'synonyms': ['unicycle'], 'def': 'a vehicle with a single wheel that is driven by pedals', 'name': 'unicycle'}, {'frequency': 'c', 'id': 1158, 'synset': 'urinal.n.01', 'synonyms': ['urinal'], 'def': 'a plumbing fixture (usually attached to the wall) used by men to urinate', 'name': 'urinal'}, {'frequency': 'r', 'id': 1159, 'synset': 'urn.n.01', 'synonyms': ['urn'], 'def': 'a large vase that usually has a pedestal or feet', 'name': 'urn'}, {'frequency': 'c', 'id': 1160, 'synset': 'vacuum.n.04', 'synonyms': ['vacuum_cleaner'], 'def': 'an electrical home appliance that cleans by suction', 'name': 'vacuum_cleaner'}, {'frequency': 'c', 'id': 1161, 'synset': 'valve.n.03', 'synonyms': ['valve'], 'def': 'control consisting of a mechanical device for controlling the flow of a fluid', 'name': 'valve'}, {'frequency': 'f', 'id': 1162, 'synset': 'vase.n.01', 'synonyms': ['vase'], 'def': 'an open jar of glass or porcelain used as an ornament or to hold flowers', 'name': 'vase'}, {'frequency': 'c', 'id': 1163, 'synset': 'vending_machine.n.01', 'synonyms': ['vending_machine'], 'def': 'a slot machine for selling goods', 'name': 'vending_machine'}, {'frequency': 'f', 'id': 1164, 'synset': 'vent.n.01', 'synonyms': ['vent', 'blowhole', 'air_vent'], 'def': 'a hole for the escape of gas or air', 'name': 'vent'}, {'frequency': 'c', 'id': 1165, 'synset': 'videotape.n.01', 'synonyms': ['videotape'], 'def': 'a video recording made on magnetic tape', 'name': 'videotape'}, {'frequency': 'r', 'id': 1166, 'synset': 'vinegar.n.01', 'synonyms': ['vinegar'], 'def': 'sour-tasting liquid produced usually by oxidation of the alcohol in wine or cider and used as a condiment or food preservative', 'name': 'vinegar'}, {'frequency': 'r', 'id': 1167, 'synset': 'violin.n.01', 'synonyms': ['violin', 'fiddle'], 'def': 'bowed stringed instrument that is the highest member of the violin family', 'name': 'violin'}, {'frequency': 'r', 'id': 1168, 'synset': 'vodka.n.01', 'synonyms': ['vodka'], 'def': 'unaged colorless liquor originating in Russia', 'name': 'vodka'}, {'frequency': 'r', 'id': 1169, 'synset': 'volleyball.n.02', 'synonyms': ['volleyball'], 'def': 'an inflated ball used in playing volleyball', 'name': 'volleyball'}, {'frequency': 'r', 'id': 1170, 'synset': 'vulture.n.01', 'synonyms': ['vulture'], 'def': 'any of various large birds of prey having naked heads and weak claws and feeding chiefly on carrion', 'name': 'vulture'}, {'frequency': 'c', 'id': 1171, 'synset': 'waffle.n.01', 'synonyms': ['waffle'], 'def': 'pancake batter baked in a waffle iron', 'name': 'waffle'}, {'frequency': 'r', 'id': 1172, 'synset': 'waffle_iron.n.01', 'synonyms': ['waffle_iron'], 'def': 'a kitchen appliance for baking waffles', 'name': 'waffle_iron'}, {'frequency': 'c', 'id': 1173, 'synset': 'wagon.n.01', 'synonyms': ['wagon'], 'def': 'any of various kinds of wheeled vehicles drawn by an animal or a tractor', 'name': 'wagon'}, {'frequency': 'c', 'id': 1174, 'synset': 'wagon_wheel.n.01', 'synonyms': ['wagon_wheel'], 'def': 'a wheel of a wagon', 'name': 'wagon_wheel'}, {'frequency': 'c', 'id': 1175, 'synset': 'walking_stick.n.01', 'synonyms': ['walking_stick'], 'def': 'a stick carried in the hand for support in walking', 'name': 'walking_stick'}, {'frequency': 'c', 'id': 1176, 'synset': 'wall_clock.n.01', 'synonyms': ['wall_clock'], 'def': 'a clock mounted on a wall', 'name': 'wall_clock'}, {'frequency': 'f', 'id': 1177, 'synset': 'wall_socket.n.01', 'synonyms': ['wall_socket', 'wall_plug', 'electric_outlet', 'electrical_outlet', 'outlet', 'electric_receptacle'], 'def': 'receptacle providing a place in a wiring system where current can be taken to run electrical devices', 'name': 'wall_socket'}, {'frequency': 'c', 'id': 1178, 'synset': 'wallet.n.01', 'synonyms': ['wallet', 'billfold'], 'def': 'a pocket-size case for holding papers and paper money', 'name': 'wallet'}, {'frequency': 'r', 'id': 1179, 'synset': 'walrus.n.01', 'synonyms': ['walrus'], 'def': 'either of two large northern marine mammals having ivory tusks and tough hide over thick blubber', 'name': 'walrus'}, {'frequency': 'r', 'id': 1180, 'synset': 'wardrobe.n.01', 'synonyms': ['wardrobe'], 'def': 'a tall piece of furniture that provides storage space for clothes; has a door and rails or hooks for hanging clothes', 'name': 'wardrobe'}, {'frequency': 'r', 'id': 1181, 'synset': 'wasabi.n.02', 'synonyms': ['wasabi'], 'def': 'the thick green root of the wasabi plant that the Japanese use in cooking and that tastes like strong horseradish', 'name': 'wasabi'}, {'frequency': 'c', 'id': 1182, 'synset': 'washer.n.03', 'synonyms': ['automatic_washer', 'washing_machine'], 'def': 'a home appliance for washing clothes and linens automatically', 'name': 'automatic_washer'}, {'frequency': 'f', 'id': 1183, 'synset': 'watch.n.01', 'synonyms': ['watch', 'wristwatch'], 'def': 'a small, portable timepiece', 'name': 'watch'}, {'frequency': 'f', 'id': 1184, 'synset': 'water_bottle.n.01', 'synonyms': ['water_bottle'], 'def': 'a bottle for holding water', 'name': 'water_bottle'}, {'frequency': 'c', 'id': 1185, 'synset': 'water_cooler.n.01', 'synonyms': ['water_cooler'], 'def': 'a device for cooling and dispensing drinking water', 'name': 'water_cooler'}, {'frequency': 'c', 'id': 1186, 'synset': 'water_faucet.n.01', 'synonyms': ['water_faucet', 'water_tap', 'tap_(water_faucet)'], 'def': 'a faucet for drawing water from a pipe or cask', 'name': 'water_faucet'}, {'frequency': 'r', 'id': 1187, 'synset': 'water_filter.n.01', 'synonyms': ['water_filter'], 'def': 'a filter to remove impurities from the water supply', 'name': 'water_filter'}, {'frequency': 'r', 'id': 1188, 'synset': 'water_heater.n.01', 'synonyms': ['water_heater', 'hot-water_heater'], 'def': 'a heater and storage tank to supply heated water', 'name': 'water_heater'}, {'frequency': 'r', 'id': 1189, 'synset': 'water_jug.n.01', 'synonyms': ['water_jug'], 'def': 'a jug that holds water', 'name': 'water_jug'}, {'frequency': 'r', 'id': 1190, 'synset': 'water_pistol.n.01', 'synonyms': ['water_gun', 'squirt_gun'], 'def': 'plaything consisting of a toy pistol that squirts water', 'name': 'water_gun'}, {'frequency': 'c', 'id': 1191, 'synset': 'water_scooter.n.01', 'synonyms': ['water_scooter', 'sea_scooter', 'jet_ski'], 'def': 'a motorboat resembling a motor scooter (NOT A SURFBOARD OR WATER SKI)', 'name': 'water_scooter'}, {'frequency': 'c', 'id': 1192, 'synset': 'water_ski.n.01', 'synonyms': ['water_ski'], 'def': 'broad ski for skimming over water towed by a speedboat (DO NOT MARK WATER)', 'name': 'water_ski'}, {'frequency': 'c', 'id': 1193, 'synset': 'water_tower.n.01', 'synonyms': ['water_tower'], 'def': 'a large reservoir for water', 'name': 'water_tower'}, {'frequency': 'c', 'id': 1194, 'synset': 'watering_can.n.01', 'synonyms': ['watering_can'], 'def': 'a container with a handle and a spout with a perforated nozzle; used to sprinkle water over plants', 'name': 'watering_can'}, {'frequency': 'c', 'id': 1195, 'synset': 'watermelon.n.02', 'synonyms': ['watermelon'], 'def': 'large oblong or roundish melon with a hard green rind and sweet watery red or occasionally yellowish pulp', 'name': 'watermelon'}, {'frequency': 'f', 'id': 1196, 'synset': 'weathervane.n.01', 'synonyms': ['weathervane', 'vane_(weathervane)', 'wind_vane'], 'def': 'mechanical device attached to an elevated structure; rotates freely to show the direction of the wind', 'name': 'weathervane'}, {'frequency': 'c', 'id': 1197, 'synset': 'webcam.n.01', 'synonyms': ['webcam'], 'def': 'a digital camera designed to take digital photographs and transmit them over the internet', 'name': 'webcam'}, {'frequency': 'c', 'id': 1198, 'synset': 'wedding_cake.n.01', 'synonyms': ['wedding_cake', 'bridecake'], 'def': 'a rich cake with two or more tiers and covered with frosting and decorations; served at a wedding reception', 'name': 'wedding_cake'}, {'frequency': 'c', 'id': 1199, 'synset': 'wedding_ring.n.01', 'synonyms': ['wedding_ring', 'wedding_band'], 'def': 'a ring given to the bride and/or groom at the wedding', 'name': 'wedding_ring'}, {'frequency': 'f', 'id': 1200, 'synset': 'wet_suit.n.01', 'synonyms': ['wet_suit'], 'def': 'a close-fitting garment made of a permeable material; worn in cold water to retain body heat', 'name': 'wet_suit'}, {'frequency': 'f', 'id': 1201, 'synset': 'wheel.n.01', 'synonyms': ['wheel'], 'def': 'a circular frame with spokes (or a solid disc) that can rotate on a shaft or axle', 'name': 'wheel'}, {'frequency': 'c', 'id': 1202, 'synset': 'wheelchair.n.01', 'synonyms': ['wheelchair'], 'def': 'a movable chair mounted on large wheels', 'name': 'wheelchair'}, {'frequency': 'c', 'id': 1203, 'synset': 'whipped_cream.n.01', 'synonyms': ['whipped_cream'], 'def': 'cream that has been beaten until light and fluffy', 'name': 'whipped_cream'}, {'frequency': 'r', 'id': 1204, 'synset': 'whiskey.n.01', 'synonyms': ['whiskey'], 'def': 'a liquor made from fermented mash of grain', 'name': 'whiskey'}, {'frequency': 'r', 'id': 1205, 'synset': 'whistle.n.03', 'synonyms': ['whistle'], 'def': 'a small wind instrument that produces a whistling sound by blowing into it', 'name': 'whistle'}, {'frequency': 'r', 'id': 1206, 'synset': 'wick.n.02', 'synonyms': ['wick'], 'def': 'a loosely woven cord in a candle or oil lamp that is lit on fire', 'name': 'wick'}, {'frequency': 'c', 'id': 1207, 'synset': 'wig.n.01', 'synonyms': ['wig'], 'def': 'hairpiece covering the head and made of real or synthetic hair', 'name': 'wig'}, {'frequency': 'c', 'id': 1208, 'synset': 'wind_chime.n.01', 'synonyms': ['wind_chime'], 'def': 'a decorative arrangement of pieces of metal or glass or pottery that hang together loosely so the wind can cause them to tinkle', 'name': 'wind_chime'}, {'frequency': 'c', 'id': 1209, 'synset': 'windmill.n.01', 'synonyms': ['windmill'], 'def': 'a mill that is powered by the wind', 'name': 'windmill'}, {'frequency': 'c', 'id': 1210, 'synset': 'window_box.n.01', 'synonyms': ['window_box_(for_plants)'], 'def': 'a container for growing plants on a windowsill', 'name': 'window_box_(for_plants)'}, {'frequency': 'f', 'id': 1211, 'synset': 'windshield_wiper.n.01', 'synonyms': ['windshield_wiper', 'windscreen_wiper', 'wiper_(for_windshield/screen)'], 'def': 'a mechanical device that cleans the windshield', 'name': 'windshield_wiper'}, {'frequency': 'c', 'id': 1212, 'synset': 'windsock.n.01', 'synonyms': ['windsock', 'air_sock', 'air-sleeve', 'wind_sleeve', 'wind_cone'], 'def': 'a truncated cloth cone mounted on a mast/pole; shows wind direction', 'name': 'windsock'}, {'frequency': 'f', 'id': 1213, 'synset': 'wine_bottle.n.01', 'synonyms': ['wine_bottle'], 'def': 'a bottle for holding wine', 'name': 'wine_bottle'}, {'frequency': 'r', 'id': 1214, 'synset': 'wine_bucket.n.01', 'synonyms': ['wine_bucket', 'wine_cooler'], 'def': 'a bucket of ice used to chill a bottle of wine', 'name': 'wine_bucket'}, {'frequency': 'f', 'id': 1215, 'synset': 'wineglass.n.01', 'synonyms': ['wineglass'], 'def': 'a glass that has a stem and in which wine is served', 'name': 'wineglass'}, {'frequency': 'r', 'id': 1216, 'synset': 'wing_chair.n.01', 'synonyms': ['wing_chair'], 'def': 'easy chair having wings on each side of a high back', 'name': 'wing_chair'}, {'frequency': 'c', 'id': 1217, 'synset': 'winker.n.02', 'synonyms': ['blinder_(for_horses)'], 'def': 'blinds that prevent a horse from seeing something on either side', 'name': 'blinder_(for_horses)'}, {'frequency': 'c', 'id': 1218, 'synset': 'wok.n.01', 'synonyms': ['wok'], 'def': 'pan with a convex bottom; used for frying in Chinese cooking', 'name': 'wok'}, {'frequency': 'r', 'id': 1219, 'synset': 'wolf.n.01', 'synonyms': ['wolf'], 'def': 'a wild carnivorous mammal of the dog family, living and hunting in packs', 'name': 'wolf'}, {'frequency': 'c', 'id': 1220, 'synset': 'wooden_spoon.n.02', 'synonyms': ['wooden_spoon'], 'def': 'a spoon made of wood', 'name': 'wooden_spoon'}, {'frequency': 'c', 'id': 1221, 'synset': 'wreath.n.01', 'synonyms': ['wreath'], 'def': 'an arrangement of flowers, leaves, or stems fastened in a ring', 'name': 'wreath'}, {'frequency': 'c', 'id': 1222, 'synset': 'wrench.n.03', 'synonyms': ['wrench', 'spanner'], 'def': 'a hand tool that is used to hold or twist a nut or bolt', 'name': 'wrench'}, {'frequency': 'c', 'id': 1223, 'synset': 'wristband.n.01', 'synonyms': ['wristband'], 'def': 'band consisting of a part of a sleeve that covers the wrist', 'name': 'wristband'}, {'frequency': 'f', 'id': 1224, 'synset': 'wristlet.n.01', 'synonyms': ['wristlet', 'wrist_band'], 'def': 'a band or bracelet worn around the wrist', 'name': 'wristlet'}, {'frequency': 'r', 'id': 1225, 'synset': 'yacht.n.01', 'synonyms': ['yacht'], 'def': 'an expensive vessel propelled by sail or power and used for cruising or racing', 'name': 'yacht'}, {'frequency': 'r', 'id': 1226, 'synset': 'yak.n.02', 'synonyms': ['yak'], 'def': 'large long-haired wild ox of Tibet often domesticated', 'name': 'yak'}, {'frequency': 'c', 'id': 1227, 'synset': 'yogurt.n.01', 'synonyms': ['yogurt', 'yoghurt', 'yoghourt'], 'def': 'a custard-like food made from curdled milk', 'name': 'yogurt'}, {'frequency': 'r', 'id': 1228, 'synset': 'yoke.n.07', 'synonyms': ['yoke_(animal_equipment)'], 'def': 'gear joining two animals at the neck; NOT egg yolk', 'name': 'yoke_(animal_equipment)'}, {'frequency': 'f', 'id': 1229, 'synset': 'zebra.n.01', 'synonyms': ['zebra'], 'def': 'any of several fleet black-and-white striped African equines', 'name': 'zebra'}, {'frequency': 'c', 'id': 1230, 'synset': 'zucchini.n.02', 'synonyms': ['zucchini', 'courgette'], 'def': 'small cucumber-shaped vegetable marrow; typically dark green', 'name': 'zucchini'}]  # noqa\n# fmt: on\n"
  },
  {
    "path": "detectron2/data/datasets/pascal_voc.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nfrom fvcore.common.file_io import PathManager\nimport os\nimport numpy as np\nimport xml.etree.ElementTree as ET\n\nfrom detectron2.structures import BoxMode\nfrom detectron2.data import DatasetCatalog, MetadataCatalog\n\n\n__all__ = [\"register_pascal_voc\"]\n\n\n# fmt: off\nCLASS_NAMES = [\n    \"aeroplane\", \"bicycle\", \"bird\", \"boat\", \"bottle\", \"bus\", \"car\", \"cat\",\n    \"chair\", \"cow\", \"diningtable\", \"dog\", \"horse\", \"motorbike\", \"person\",\n    \"pottedplant\", \"sheep\", \"sofa\", \"train\", \"tvmonitor\",\n]\n# fmt: on\n\n\ndef load_voc_instances(dirname: str, split: str):\n    \"\"\"\n    Load Pascal VOC detection annotations to Detectron2 format.\n\n    Args:\n        dirname: Contain \"Annotations\", \"ImageSets\", \"JPEGImages\"\n        split (str): one of \"train\", \"test\", \"val\", \"trainval\"\n    \"\"\"\n    with PathManager.open(os.path.join(dirname, \"ImageSets\", \"Main\", split + \".txt\")) as f:\n        fileids = np.loadtxt(f, dtype=np.str)\n\n    dicts = []\n    for fileid in fileids:\n        anno_file = os.path.join(dirname, \"Annotations\", fileid + \".xml\")\n        jpeg_file = os.path.join(dirname, \"JPEGImages\", fileid + \".jpg\")\n\n        tree = ET.parse(anno_file)\n\n        r = {\n            \"file_name\": jpeg_file,\n            \"image_id\": fileid,\n            \"height\": int(tree.findall(\"./size/height\")[0].text),\n            \"width\": int(tree.findall(\"./size/width\")[0].text),\n        }\n        instances = []\n\n        for obj in tree.findall(\"object\"):\n            cls = obj.find(\"name\").text\n            # We include \"difficult\" samples in training.\n            # Based on limited experiments, they don't hurt accuracy.\n            # difficult = int(obj.find(\"difficult\").text)\n            # if difficult == 1:\n            # continue\n            bbox = obj.find(\"bndbox\")\n            bbox = [float(bbox.find(x).text) for x in [\"xmin\", \"ymin\", \"xmax\", \"ymax\"]]\n            # Original annotations are integers in the range [1, W or H]\n            # Assuming they mean 1-based pixel indices (inclusive),\n            # a box with annotation (xmin=1, xmax=W) covers the whole image.\n            # In coordinate space this is represented by (xmin=0, xmax=W)\n            bbox[0] -= 1.0\n            bbox[1] -= 1.0\n            instances.append(\n                {\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS}\n            )\n        r[\"annotations\"] = instances\n        dicts.append(r)\n    return dicts\n\n\ndef register_pascal_voc(name, dirname, split, year):\n    DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split))\n    MetadataCatalog.get(name).set(\n        thing_classes=CLASS_NAMES, dirname=dirname, year=year, split=split\n    )\n"
  },
  {
    "path": "detectron2/data/datasets/register_coco.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\n\nfrom detectron2.data import DatasetCatalog, MetadataCatalog\nfrom .coco import load_coco_json, load_sem_seg\n\n\"\"\"\nThis file contains functions to register a COCO-format dataset to the DatasetCatalog.\n\"\"\"\n\n__all__ = [\"register_coco_instances\", \"register_coco_panoptic_separated\"]\n\n\ndef register_coco_instances(name, metadata, json_file, image_root):\n    \"\"\"\n    Register a dataset in COCO's json annotation format for\n    instance detection, instance segmentation and keypoint detection.\n    (i.e., Type 1 and 2 in http://cocodataset.org/#format-data.\n    `instances*.json` and `person_keypoints*.json` in the dataset).\n\n    This is an example of how to register a new dataset.\n    You can do something similar to this function, to register new datasets.\n\n    Args:\n        name (str): the name that identifies a dataset, e.g. \"coco_2014_train\".\n        metadata (dict): extra metadata associated with this dataset.  You can\n            leave it as an empty dict.\n        json_file (str): path to the json instance annotation file.\n        image_root (str): directory which contains all the images.\n    \"\"\"\n    # 1. register a function which returns dicts\n    DatasetCatalog.register(name, lambda: load_coco_json(json_file, image_root, name))\n\n    # 2. Optionally, add metadata about this dataset,\n    # since they might be useful in evaluation, visualization or logging\n    MetadataCatalog.get(name).set(\n        json_file=json_file, image_root=image_root, evaluator_type=\"coco\", **metadata\n    )\n\n\ndef register_coco_panoptic_separated(\n    name, metadata, image_root, panoptic_root, panoptic_json, sem_seg_root, instances_json\n):\n    \"\"\"\n    Register a COCO panoptic segmentation dataset named `name`.\n    The annotations in this registered dataset will contain both instance annotations and\n    semantic annotations, each with its own contiguous ids. Hence it's called \"separated\".\n\n    It follows the setting used by the PanopticFPN paper:\n\n    1. The instance annotations directly come from polygons in the COCO\n       instances annotation task, rather than from the masks in the COCO panoptic annotations.\n\n       The two format have small differences:\n       Polygons in the instance annotations may have overlaps.\n       The mask annotations are produced by labeling the overlapped polygons\n       with depth ordering.\n\n    2. The semantic annotations are converted from panoptic annotations, where\n       all \"things\" are assigned a semantic id of 0.\n       All semantic categories will therefore have ids in contiguous\n       range [1, #stuff_categories].\n\n    This function will also register a pure semantic segmentation dataset\n    named ``name + '_stuffonly'``.\n\n    Args:\n        name (str): the name that identifies a dataset,\n            e.g. \"coco_2017_train_panoptic\"\n        metadata (str): extra metadata associated with this dataset.\n        image_root (str): directory which contains all the images\n        panoptic_root (str): directory which contains panoptic annotation images\n        panoptic_json (str): path to the json panoptic annotation file\n        sem_seg_root (str): directory which contains all the ground truth segmentation annotations.\n        instances_json (str): path to the json instance annotation file\n    \"\"\"\n    panoptic_name = name + \"_separated\"\n    DatasetCatalog.register(\n        panoptic_name,\n        lambda: merge_to_panoptic(\n            load_coco_json(instances_json, image_root, panoptic_name),\n            load_sem_seg(sem_seg_root, image_root),\n        ),\n    )\n    MetadataCatalog.get(panoptic_name).set(\n        panoptic_root=panoptic_root,\n        image_root=image_root,\n        panoptic_json=panoptic_json,\n        sem_seg_root=sem_seg_root,\n        json_file=instances_json,  # TODO rename\n        evaluator_type=\"coco_panoptic_seg\",\n        **metadata\n    )\n\n    semantic_name = name + \"_stuffonly\"\n    DatasetCatalog.register(semantic_name, lambda: load_sem_seg(sem_seg_root, image_root))\n    MetadataCatalog.get(semantic_name).set(\n        sem_seg_root=sem_seg_root, image_root=image_root, evaluator_type=\"sem_seg\", **metadata\n    )\n\n\ndef merge_to_panoptic(detection_dicts, sem_seg_dicts):\n    \"\"\"\n    Create dataset dicts for panoptic segmentation, by\n    merging two dicts using \"file_name\" field to match their entries.\n\n    Args:\n        detection_dicts (list[dict]): lists of dicts for object detection or instance segmentation.\n        sem_seg_dicts (list[dict]): lists of dicts for semantic segmentation.\n\n    Returns:\n        list[dict] (one per input image): Each dict contains all (key, value) pairs from dicts in\n            both detection_dicts and sem_seg_dicts that correspond to the same image.\n            The function assumes that the same key in different dicts has the same value.\n    \"\"\"\n    results = []\n    sem_seg_file_to_entry = {x[\"file_name\"]: x for x in sem_seg_dicts}\n    assert len(sem_seg_file_to_entry) > 0\n\n    for det_dict in detection_dicts:\n        dic = copy.copy(det_dict)\n        dic.update(sem_seg_file_to_entry[dic[\"file_name\"]])\n        results.append(dic)\n    return results\n"
  },
  {
    "path": "detectron2/data/datasets/register_soba.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\n\nfrom detectron2.data import DatasetCatalog, MetadataCatalog\nfrom .soba import load_soba_json, load_sem_seg\n\n\"\"\"\nThis file contains functions to register a soba-format dataset to the DatasetCatalog.\n\"\"\"\n\n__all__ = [\"register_soba_instances\", \"register_soba_panoptic_separated\"]\n\n\ndef register_soba_instances(name, metadata, json_file, image_root):\n    \"\"\"\n    Register a dataset in soba's json annotation format for\n    instance detection, instance segmentation and keypoint detection.\n    (i.e., Type 1 and 2 in http://sobadataset.org/#format-data.\n    `instances*.json` and `person_keypoints*.json` in the dataset).\n\n    This is an example of how to register a new dataset.\n    You can do something similar to this function, to register new datasets.\n\n    Args:\n        name (str): the name that identifies a dataset, e.g. \"soba_2014_train\".\n        metadata (dict): extra metadata associated with this dataset.  You can\n            leave it as an empty dict.\n        json_file (str): path to the json instance annotation file.\n        image_root (str): directory which contains all the images.\n    \"\"\"\n    # 1. register a function which returns dicts\n    DatasetCatalog.register(name, lambda: load_soba_json(json_file, image_root, name))\n\n    # 2. Optionally, add metadata about this dataset,\n    # since they might be useful in evaluation, visualization or logging\n    MetadataCatalog.get(name).set(\n        json_file=json_file, image_root=image_root, evaluator_type=\"soba\", **metadata\n    )\n\n\ndef register_soba_panoptic_separated(\n    name, metadata, image_root, panoptic_root, panoptic_json, sem_seg_root, instances_json\n):\n    \"\"\"\n    Register a soba panoptic segmentation dataset named `name`.\n    The annotations in this registered dataset will contain both instance annotations and\n    semantic annotations, each with its own contiguous ids. Hence it's called \"separated\".\n\n    It follows the setting used by the PanopticFPN paper:\n\n    1. The instance annotations directly come from polygons in the soba\n       instances annotation task, rather than from the masks in the soba panoptic annotations.\n\n       The two format have small differences:\n       Polygons in the instance annotations may have overlaps.\n       The mask annotations are produced by labeling the overlapped polygons\n       with depth ordering.\n\n    2. The semantic annotations are converted from panoptic annotations, where\n       all \"things\" are assigned a semantic id of 0.\n       All semantic categories will therefore have ids in contiguous\n       range [1, #stuff_categories].\n\n    This function will also register a pure semantic segmentation dataset\n    named ``name + '_stuffonly'``.\n\n    Args:\n        name (str): the name that identifies a dataset,\n            e.g. \"soba_2017_train_panoptic\"\n        metadata (str): extra metadata associated with this dataset.\n        image_root (str): directory which contains all the images\n        panoptic_root (str): directory which contains panoptic annotation images\n        panoptic_json (str): path to the json panoptic annotation file\n        sem_seg_root (str): directory which contains all the ground truth segmentation annotations.\n        instances_json (str): path to the json instance annotation file\n    \"\"\"\n    panoptic_name = name + \"_separated\"\n    DatasetCatalog.register(\n        panoptic_name,\n        lambda: merge_to_panoptic(\n            load_soba_json(instances_json, image_root, panoptic_name),\n            load_sem_seg(sem_seg_root, image_root),\n        ),\n    )\n    MetadataCatalog.get(panoptic_name).set(\n        panoptic_root=panoptic_root,\n        image_root=image_root,\n        panoptic_json=panoptic_json,\n        sem_seg_root=sem_seg_root,\n        json_file=instances_json,  # TODO rename\n        evaluator_type=\"soba_panoptic_seg\",\n        **metadata\n    )\n\n    semantic_name = name + \"_stuffonly\"\n    DatasetCatalog.register(semantic_name, lambda: load_sem_seg(sem_seg_root, image_root))\n    MetadataCatalog.get(semantic_name).set(\n        sem_seg_root=sem_seg_root, image_root=image_root, evaluator_type=\"sem_seg\", **metadata\n    )\n\n\ndef merge_to_panoptic(detection_dicts, sem_seg_dicts):\n    \"\"\"\n    Create dataset dicts for panoptic segmentation, by\n    merging two dicts using \"file_name\" field to match their entries.\n\n    Args:\n        detection_dicts (list[dict]): lists of dicts for object detection or instance segmentation.\n        sem_seg_dicts (list[dict]): lists of dicts for semantic segmentation.\n\n    Returns:\n        list[dict] (one per input image): Each dict contains all (key, value) pairs from dicts in\n            both detection_dicts and sem_seg_dicts that correspond to the same image.\n            The function assumes that the same key in different dicts has the same value.\n    \"\"\"\n    results = []\n    sem_seg_file_to_entry = {x[\"file_name\"]: x for x in sem_seg_dicts}\n    assert len(sem_seg_file_to_entry) > 0\n\n    for det_dict in detection_dicts:\n        dic = copy.copy(det_dict)\n        dic.update(sem_seg_file_to_entry[dic[\"file_name\"]])\n        results.append(dic)\n    return results\n"
  },
  {
    "path": "detectron2/data/datasets/soba.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport io\nimport logging\nimport contextlib\nimport os\nfrom PIL import Image\n\nfrom fvcore.common.timer import Timer\nfrom detectron2.structures import BoxMode\nfrom fvcore.common.file_io import PathManager\n\nfrom .. import MetadataCatalog, DatasetCatalog\n\n\"\"\"\nThis file contains functions to parse SOBA-format annotations into dicts in \"Detectron2 format\".\n\"\"\"\n\n\nlogger = logging.getLogger(__name__)\n\n__all__ = [\"load_soba_json\", \"load_sem_seg\"]\n\n\ndef load_soba_json(json_file, image_root, dataset_name=None):\n    \"\"\"\n    Load a json file with SOBA's instances annotation format.\n    Currently supports instance detection, instance segmentation,\n    person keypoints and densepose annotations.\n\n    Args:\n        json_file (str): full path to the json file in SOBA instances annotation format.\n        image_root (str): the directory where the images in this json file exists.\n        dataset_name (str): the name of the dataset (e.g., soba_2017_train).\n            If provided, this function will also put \"thing_classes\" into\n            the metadata associated with this dataset.\n\n    Returns:\n        list[dict]: a list of dicts in Detectron2 standard format. (See\n        `Using Custom Datasets </tutorials/datasets.html>`_ )\n\n    Notes:\n        1. This function does not read the image files.\n           The results do not have the \"image\" field.\n    \"\"\"\n    from pysobatools.soba import SOBA\n\n    timer = Timer()\n    json_file = PathManager.get_local_path(json_file)\n    with contextlib.redirect_stdout(io.StringIO()):\n        soba_api = SOBA(json_file)\n    if timer.seconds() > 1:\n        logger.info(\"Loading {} takes {:.2f} seconds.\".format(json_file, timer.seconds()))\n\n    id_map = None\n    if dataset_name is not None:\n        meta = MetadataCatalog.get(dataset_name)\n        cat_ids = sorted(soba_api.getCatIds())\n        association_ids = soba_api.getAssoIds()\n        cats = soba_api.loadCats(cat_ids)\n        # association_cats = soba_api.loadAsso(association_ids)\n        association = soba_api.loadAsso(association_ids)\n        # The categories in a custom json file may not be sorted.\n        thing_classes = [c[\"name\"] for c in sorted(cats, key=lambda x: x[\"id\"])]\n        association_classes = [c[\"name\"] for c in sorted(association, key=lambda x: x[\"id\"])]\n        meta.association_classes = association_classes\n        meta.thing_classes = thing_classes\n        meta.keypoint_names = ['Object','Shadow']\n        meta.keypoint_flip_map = ({'Object':'Shadow'})\n        meta.keypoint_connection_rules = [('Object','Shadow',(255,255,255))]\n        # meta\n\n        # In SOBA, certain category ids are artificially removed,\n        # and by convention they are always ignored.\n        # We deal with SOBA's id issue and translate\n        # the category ids to contiguous ids in [0, 80).\n\n        # It works by looking at the \"categories\" field in the json, therefore\n        # if users' own json also have incontiguous ids, we'll\n        # apply this mapping as well but print a warning.\n        if not (min(cat_ids) == 1 and max(cat_ids) == len(cat_ids)):\n            if \"soba\" not in dataset_name:\n                logger.warning(\n                    \"\"\"\nCategory ids in annotations are not in [1, #categories]! We'll apply a mapping for you.\n\"\"\"\n                )\n        id_map = {v: i for i, v in enumerate(cat_ids)}\n        association_id_map = {v:i for i,v in enumerate(association_ids)}\n        meta.association_dataset_id_to_contiguous_id = association_id_map\n        meta.thing_dataset_id_to_contiguous_id = id_map\n\n    # sort indices for reproducible results\n    img_ids = sorted(list(soba_api.imgs.keys()))\n    # imgs is a list of dicts, each looks something like:\n    # {'license': 4,\n    #  'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg',\n    #  'file_name': 'SOBA_val2014_000000001268.jpg',\n    #  'height': 427,\n    #  'width': 640,\n    #  'date_captured': '2013-11-17 05:57:24',\n    #  'id': 1268}\n    imgs = soba_api.loadImgs(img_ids)\n    # anns is a list[list[dict]], where each dict is an annotation\n    # record for an object. The inner list enumerates the objects in an image\n    # and the outer list enumerates over images. Example of anns[0]:\n    # [{'segmentation': [[192.81,\n    #     247.09,\n    #     ...\n    #     219.03,\n    #     249.06]],\n    #   'area': 1035.749,\n    #   'iscrowd': 0,\n    #   'image_id': 1268,\n    #   'bbox': [192.81, 224.8, 74.73, 33.43],\n    #   'category_id': 16,\n    #   'id': 42986},\n    #  ...]\n    anns = [soba_api.imgToAnns[img_id] for img_id in img_ids]\n    assoAnns = [soba_api.imgToAssoAnns[img_id] for img_id in img_ids]\n\n    if \"minival\" not in json_file:\n        # The popular valminusminival & minival annotations for SOBA2014 contain this bug.\n        # However the ratio of buggy annotations there is tiny and does not affect accuracy.\n        # Therefore we explicitly white-list them.\n        ann_ids = [ann[\"id\"] for anns_per_image in anns for ann in anns_per_image]\n        asso_ann_ids = [assoAnn[\"id\"] for anns_per_image in assoAnns for assoAnn in anns_per_image ]\n        assert len(set(ann_ids)) == len(ann_ids), \"Annotation ids in '{}' are not unique!\".format(\n            json_file\n        )\n\n    imgs_anns = list(zip(imgs, anns))\n    imgs_asso_anns = list(zip(imgs,assoAnns))\n    logger.info(\"Loaded {} images in SOBA format from {}\".format(len(imgs_anns), json_file))\n\n    dataset_dicts = []\n\n    # TODO: refactoring candidate, one should not have to alter DB reader\n    # every time new data type is added\n    DENSEPOSE_KEYS = [\"dp_x\", \"dp_y\", \"dp_I\", \"dp_U\", \"dp_V\", \"dp_masks\"]\n\n    num_instances_without_valid_segmentation = 0\n\n    for (img_dict, anno_dict_list),(_,asso_anno_dict_list) in zip(imgs_anns,imgs_asso_anns):\n        record = {}\n        record[\"file_name\"] = os.path.join(image_root, img_dict[\"file_name\"])\n        record[\"height\"] = img_dict[\"height\"]\n        record[\"width\"] = img_dict[\"width\"]\n        image_id = record[\"image_id\"] = img_dict[\"id\"]\n\n        objs = []\n        for anno in anno_dict_list:\n            # Check that the image_id in this annotation is the same as\n            # the image_id we're looking at.\n            # This fails only when the data parsing logic or the annotation file is buggy.\n\n            # The original SOBA valminusminival2014 & minival2014 annotation files\n            # actually contains bugs that, together with certain ways of using SOBA API,\n            # can trigger this assertion.\n            assert anno[\"image_id\"] == image_id\n\n            assert anno.get(\"ignore\", 0) == 0\n\n            obj = {\n                field: anno[field]\n                for field in [\"iscrowd\", \"bbox\",\"keypoints\", \"category_id\"] + DENSEPOSE_KEYS\n                if field in anno\n            }\n\n            segm = anno.get(\"segmentation\", None)\n            if segm:  # either list[list[float]] or dict(RLE)\n                if not isinstance(segm, dict):\n                    # filter out invalid polygons (< 3 points)\n                    segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]\n                    if len(segm) == 0:\n                        num_instances_without_valid_segmentation += 1\n                        continue  # ignore this instance\n                obj[\"segmentation\"] = segm\n\n            keypts = anno.get(\"keypoints\", None)\n            if keypts:  # list[int]\n                for idx, v in enumerate(keypts):\n                    if idx % 3 != 2:\n                        # SOBA's segmentation coordinates are floating points in [0, H or W],\n                        # but keypoint coordinates are integers in [0, H-1 or W-1]\n                        # Therefore we assume the coordinates are \"pixel indices\" and\n                        # add 0.5 to convert to floating point coordinates.\n                        keypts[idx] = v + 0.5\n                obj[\"keypoints\"] = keypts\n\n            obj[\"bbox_mode\"] = BoxMode.XYWH_ABS\n            if id_map:\n                obj[\"category_id\"] = id_map[obj[\"category_id\"]]\n            objs.append(obj)\n        record[\"annotations\"] = objs\n        objs = []\n        for anno in asso_anno_dict_list:\n            # Check that the image_id in this annotation is the same as\n            # the image_id we're looking at.\n            # This fails only when the data parsing logic or the annotation file is buggy.\n\n            # The original SOBA valminusminival2014 & minival2014 annotation files\n            # actually contains bugs that, together with certain ways of using SOBA API,\n            # can trigger this assertion.\n            assert anno[\"image_id\"] == image_id\n\n            assert anno.get(\"ignore\", 0) == 0\n\n            obj = {\n                field: anno[field]\n                for field in [\"iscrowd\", \"bbox\",'light', \"keypoints\", \"category_id\"] + DENSEPOSE_KEYS\n                if field in anno\n            }\n\n            segm = anno.get(\"segmentation\", None)\n            if segm:  # either list[list[float]] or dict(RLE)\n                if not isinstance(segm, dict):\n                    # filter out invalid polygons (< 3 points)\n                    segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6]\n                    if len(segm) == 0:\n                        num_instances_without_valid_segmentation += 1\n                        continue  # ignore this instance\n                obj[\"segmentation\"] = segm\n\n            keypts = anno.get(\"keypoints\", None)\n            if keypts:  # list[int]\n                for idx, v in enumerate(keypts):\n                    if idx % 3 != 2:\n                        # SOBA's segmentation coordinates are floating points in [0, H or W],\n                        # but keypoint coordinates are integers in [0, H-1 or W-1]\n                        # Therefore we assume the coordinates are \"pixel indices\" and\n                        # add 0.5 to convert to floating point coordinates.\n                        keypts[idx] = v + 0.5\n                obj[\"keypoints\"] = keypts\n\n            obj[\"bbox_mode\"] = BoxMode.XYWH_ABS\n            if id_map:\n                obj[\"category_id\"] = id_map[obj[\"category_id\"]]\n            objs.append(obj)\n        record[\"association_anno\"] = objs\n        \n\n        dataset_dicts.append(record)\n\n    if num_instances_without_valid_segmentation > 0:\n        logger.warn(\n            \"Filtered out {} instances without valid segmentation. \"\n            \"There might be issues in your dataset generation process.\".format(\n                num_instances_without_valid_segmentation\n            )\n        )\n    return dataset_dicts\n\n\n# TODO this function is not specific to SOBA, except for the \"image_id\" logic.\ndef load_sem_seg(gt_root, image_root, gt_ext=\"png\", image_ext=\"jpg\"):\n    \"\"\"\n    Load semantic segmentation datasets. All files under \"gt_root\" with \"gt_ext\" extension are\n    treated as ground truth annotations and all files under \"image_root\" with \"image_ext\" extension\n    as input images. Ground truth and input images are matched using file paths relative to\n    \"gt_root\" and \"image_root\" respectively without taking into account file extensions.\n\n    Args:\n        gt_root (str): full path to ground truth semantic segmentation files. Semantic segmentation\n            annotations are stored as images with integer values in pixels that represent\n            corresponding semantic labels.\n        image_root (str): the directory where the input images are.\n        gt_ext (str): file extension for ground truth annotations.\n        image_ext (str): file extension for input images.\n\n    Returns:\n        list[dict]:\n            a list of dicts in detectron2 standard format without instance-level\n            annotation.\n\n    Notes:\n        1. This function does not read the image and ground truth files.\n           The results do not have the \"image\" and \"sem_seg\" fields.\n    \"\"\"\n\n    # We match input images with ground truth based on their relative filepaths (without file\n    # extensions) starting from 'image_root' and 'gt_root' respectively. SOBA API works with integer\n    # IDs, hence, we try to convert these paths to int if possible.\n    def file2id(folder_path, file_path):\n        # TODO id is not used.\n        # extract relative path starting from `folder_path`\n        image_id = os.path.normpath(os.path.relpath(file_path, start=folder_path))\n        # remove file extension\n        image_id = os.path.splitext(image_id)[0]\n        try:\n            image_id = int(image_id)\n        except ValueError:\n            pass\n        return image_id\n\n    input_files = sorted(\n        (os.path.join(image_root, f) for f in PathManager.ls(image_root) if f.endswith(image_ext)),\n        key=lambda file_path: file2id(image_root, file_path),\n    )\n    gt_files = sorted(\n        (os.path.join(gt_root, f) for f in PathManager.ls(gt_root) if f.endswith(gt_ext)),\n        key=lambda file_path: file2id(gt_root, file_path),\n    )\n\n    assert len(gt_files) > 0, \"No annotations found in {}.\".format(gt_root)\n\n    # Use the intersection, so that val2017_100 annotations can run smoothly with val2017 images\n    if len(input_files) != len(gt_files):\n        logger.warn(\n            \"Directory {} and {} has {} and {} files, respectively.\".format(\n                image_root, gt_root, len(input_files), len(gt_files)\n            )\n        )\n        input_basenames = [os.path.basename(f)[: -len(image_ext)] for f in input_files]\n        gt_basenames = [os.path.basename(f)[: -len(gt_ext)] for f in gt_files]\n        intersect = list(set(input_basenames) & set(gt_basenames))\n        # sort, otherwise each worker may obtain a list[dict] in different order\n        intersect = sorted(intersect)\n        logger.warn(\"Will use their intersection of {} files.\".format(len(intersect)))\n        input_files = [os.path.join(image_root, f + image_ext) for f in intersect]\n        gt_files = [os.path.join(gt_root, f + gt_ext) for f in intersect]\n\n    logger.info(\n        \"Loaded {} images with semantic segmentation from {}\".format(len(input_files), image_root)\n    )\n\n    dataset_dicts = []\n    for (img_path, gt_path) in zip(input_files, gt_files):\n        record = {}\n        record[\"file_name\"] = img_path\n        record[\"sem_seg_file_name\"] = gt_path\n        record[\"image_id\"] = file2id(image_root, img_path)\n        assert record[\"image_id\"] == file2id(\n            gt_root, gt_path\n        ), \"there is no ground truth for {}\".format(img_path)\n        with PathManager.open(gt_path, \"rb\") as f:\n            img = Image.open(f)\n            w, h = img.size\n        record[\"height\"] = h\n        record[\"width\"] = w\n        dataset_dicts.append(record)\n\n    return dataset_dicts\n\n\nif __name__ == \"__main__\":\n    \"\"\"\n    Test the SOBA json dataset loader.\n\n    Usage:\n        python -m detectron2.data.datasets.soba \\\n            path/to/json path/to/image_root dataset_name\n\n        \"dataset_name\" can be \"soba_2014_minival_100\", or other\n        pre-registered ones\n    \"\"\"\n    import numpy as np\n    from detectron2.utils.logger import setup_logger\n    from detectron2.utils.visualizer import Visualizer\n    import detectron2.data.datasets  # noqa # add pre-defined metadata\n    import sys\n\n    logger = setup_logger(name=__name__)\n    assert sys.argv[3] in DatasetCatalog.list()\n    meta = MetadataCatalog.get(sys.argv[3])\n\n    dicts = load_soba_json(sys.argv[1], sys.argv[2], sys.argv[3])\n    logger.info(\"Done loading {} samples.\".format(len(dicts)))\n\n    dirname = \"soba-data-vis\"\n    os.makedirs(dirname, exist_ok=True)\n    for d in dicts:\n        img = np.array(Image.open(d[\"file_name\"]))\n        visualizer = Visualizer(img, metadata=meta)\n        vis = visualizer.draw_dataset_dict(d)\n        fpath = os.path.join(dirname, os.path.basename(d[\"file_name\"]))\n        vis.save(fpath)\n"
  },
  {
    "path": "detectron2/data/detection_utils.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n\"\"\"\nCommon data processing utilities that are used in a\ntypical object detection data pipeline.\n\"\"\"\nimport logging\nimport numpy as np\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom PIL import Image\nimport pysobatools.mask as mask_util\n\nfrom detectron2.structures import (\n    BitMasks,\n    Boxes,\n    BoxMode,\n    Instances,\n    Keypoints,\n    PolygonMasks,\n    RotatedBoxes,\n)\n\nfrom . import transforms as T\nfrom .catalog import MetadataCatalog\n\n\nclass SizeMismatchError(ValueError):\n    \"\"\"\n    When loaded image has difference width/height compared with annotation.\n    \"\"\"\n\n\ndef read_image(file_name, format=None):\n    \"\"\"\n    Read an image into the given format.\n\n    Args:\n        dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format.\n        format (dict): one of the supported image modes in PIL, or \"BGR\"\n\n    Returns:\n        image (np.ndarray): an HWC image\n    \"\"\"\n    with PathManager.open(file_name, \"rb\") as f:\n        image = Image.open(f)\n\n        if format is not None:\n            # PIL only supports RGB, so convert to RGB and flip channels over below\n            conversion_format = format\n            if format == \"BGR\":\n                conversion_format = \"RGB\"\n            image = image.convert(conversion_format)\n        image = np.asarray(image)\n        if format == \"BGR\":\n            # flip channels if needed\n            image = image[:, :, ::-1]\n        # PIL squeezes out the channel dimension for \"L\", so make it HWC\n        if format == \"L\":\n            image = np.expand_dims(image, -1)\n        return image\n\n\ndef check_image_size(dataset_dict, image):\n    \"\"\"\n    Raise an error if the image does not match the size specified in the dict.\n    \"\"\"\n    if \"width\" in dataset_dict or \"height\" in dataset_dict:\n        image_wh = (image.shape[1], image.shape[0])\n        expected_wh = (dataset_dict[\"width\"], dataset_dict[\"height\"])\n        if not image_wh == expected_wh:\n            raise SizeMismatchError(\n                \"mismatch (W,H), got {}, expect {}, name {}\".format(image_wh, expected_wh,dataset_dict[\"file_name\"])\n            )\n\n\ndef transform_proposals(dataset_dict, image_shape, transforms, min_box_side_len, proposal_topk):\n    \"\"\"\n    Apply transformations to the proposals in dataset_dict, if any.\n\n    Args:\n        dataset_dict (dict): a dict read from the dataset, possibly\n            contains fields \"proposal_boxes\", \"proposal_objectness_logits\", \"proposal_bbox_mode\"\n        image_shape (tuple): height, width\n        transforms (TransformList):\n        min_box_side_len (int): keep proposals with at least this size\n        proposal_topk (int): only keep top-K scoring proposals\n\n    The input dict is modified in-place, with abovementioned keys removed. A new\n    key \"proposals\" will be added. Its value is an `Instances`\n    object which contains the transformed proposals in its field\n    \"proposal_boxes\" and \"objectness_logits\".\n    \"\"\"\n    if \"proposal_boxes\" in dataset_dict:\n        # Transform proposal boxes\n        boxes = transforms.apply_box(\n            BoxMode.convert(\n                dataset_dict.pop(\"proposal_boxes\"),\n                dataset_dict.pop(\"proposal_bbox_mode\"),\n                BoxMode.XYXY_ABS,\n            )\n        )\n        boxes = Boxes(boxes)\n        objectness_logits = torch.as_tensor(\n            dataset_dict.pop(\"proposal_objectness_logits\").astype(\"float32\")\n        )\n\n        boxes.clip(image_shape)\n        keep = boxes.nonempty(threshold=min_box_side_len)\n        boxes = boxes[keep]\n        objectness_logits = objectness_logits[keep]\n\n        proposals = Instances(image_shape)\n        proposals.proposal_boxes = boxes[:proposal_topk]\n        proposals.objectness_logits = objectness_logits[:proposal_topk]\n        dataset_dict[\"proposals\"] = proposals\n\n\ndef transform_instance_annotations(\n    annotation, transforms, image_size, *, keypoint_hflip_indices=None\n):\n    \"\"\"\n    Apply transforms to box, segmentation and keypoints of annotations of a single instance.\n\n    It will use `transforms.apply_box` for the box, and\n    `transforms.apply_coords` for segmentation polygons & keypoints.\n    If you need anything more specially designed for each data structure,\n    you'll need to implement your own version of this function or the transforms.\n\n    Args:\n        annotation (dict): dict of instance annotations for a single instance.\n        transforms (TransformList):\n        image_size (tuple): the height, width of the transformed image\n        keypoint_hflip_indices (ndarray[int]): see `create_keypoint_hflip_indices`.\n\n    Returns:\n        dict:\n            the same input dict with fields \"bbox\", \"segmentation\", \"keypoints\"\n            transformed according to `transforms`.\n            The \"bbox_mode\" field will be set to XYXY_ABS.\n    \"\"\"\n    bbox = BoxMode.convert(annotation[\"bbox\"], annotation[\"bbox_mode\"], BoxMode.XYXY_ABS)\n    if 'light' in annotation.keys():\n        light = BoxMode.convert(annotation['light'], annotation[\"bbox_mode\"],BoxMode.XYXY_ABS)\n        annotation[\"light\"] = transforms.apply_box([light])[0]\n    # Note that bbox is 1d (per-instance bounding box)\n    annotation[\"bbox\"] = transforms.apply_box([bbox])[0]\n    annotation[\"bbox_mode\"] = BoxMode.XYXY_ABS\n\n    # annotation[\"light\"] = transforms.apply_box([light])[0]\n\n    if \"segmentation\" in annotation:\n        # each instance contains 1 or more polygons\n        # print(annotation[\"segmentation\"])\n        # polygons = [np.asarray(p).reshape(-1, 2) for p in annotation[\"segmentation\"]]\n        # annotation[\"segmentation\"] = [p.reshape(-1) for p in transforms.apply_polygons(polygons)]\n        segm = annotation[\"segmentation\"]\n        if isinstance(segm, list):\n            # polygons\n            polygons = [np.asarray(p).reshape(-1, 2) for p in segm]\n            annotation[\"segmentation\"] = [\n                p.reshape(-1) for p in transforms.apply_polygons(polygons)\n            ]\n        elif isinstance(segm, dict):\n            # RLE\n            mask = mask_util.decode(segm)\n            mask = transforms.apply_segmentation(mask)\n            assert tuple(mask.shape[:2]) == image_size\n            annotation[\"segmentation\"] = mask\n        else:\n            raise ValueError(\n                \"Cannot transform segmentation of type '{}'!\"\n                \"Supported types are: polygons as list[list[float] or ndarray],\"\n                \" COCO-style RLE as a dict.\".format(type(segm))\n            )\n\n    if \"keypoints\" in annotation:\n        keypoints = transform_keypoint_annotations(\n            annotation[\"keypoints\"], transforms, image_size, keypoint_hflip_indices\n        )\n        annotation[\"keypoints\"] = keypoints\n\n    return annotation\n\n\ndef transform_keypoint_annotations(keypoints, transforms, image_size, keypoint_hflip_indices=None):\n    \"\"\"\n    Transform keypoint annotations of an image.\n\n    Args:\n        keypoints (list[float]): Nx3 float in Detectron2 Dataset format.\n        transforms (TransformList):\n        image_size (tuple): the height, width of the transformed image\n        keypoint_hflip_indices (ndarray[int]): see `create_keypoint_hflip_indices`.\n    \"\"\"\n    # (N*3,) -> (N, 3)\n    keypoints = np.asarray(keypoints, dtype=\"float64\").reshape(-1, 3)\n    keypoints[:, :2] = transforms.apply_coords(keypoints[:, :2])\n\n    # This assumes that HorizFlipTransform is the only one that does flip\n    do_hflip = sum(isinstance(t, T.HFlipTransform) for t in transforms.transforms) % 2 == 1\n\n    # Alternative way: check if probe points was horizontally flipped.\n    # probe = np.asarray([[0.0, 0.0], [image_width, 0.0]])\n    # probe_aug = transforms.apply_coords(probe.copy())\n    # do_hflip = np.sign(probe[1][0] - probe[0][0]) != np.sign(probe_aug[1][0] - probe_aug[0][0])  # noqa\n\n    # If flipped, swap each keypoint with its opposite-handed equivalent\n    if do_hflip:\n        assert keypoint_hflip_indices is not None\n        keypoints = keypoints[keypoint_hflip_indices, :]\n\n    # Maintain COCO convention that if visibility == 0, then x, y = 0\n    # TODO may need to reset visibility for cropped keypoints,\n    # but it does not matter for our existing algorithms\n    keypoints[keypoints[:, 2] == 0] = 0\n    return keypoints\n\n\ndef annotations_to_instances(annos, image_size, mask_format=\"polygon\"):\n    \"\"\"\n    Create an :class:`Instances` object used by the models,\n    from instance annotations in the dataset dict.\n\n    Args:\n        annos (list[dict]): a list of instance annotations in one image, each\n            element for one instance.\n        image_size (tuple): height, width\n\n    Returns:\n        Instances:\n            It will contain fields \"gt_boxes\", \"gt_classes\",\n            \"gt_masks\", \"gt_keypoints\", if they can be obtained from `annos`.\n            This is the format that builtin models expect.\n    \"\"\"\n    boxes = [BoxMode.convert(obj[\"bbox\"], obj[\"bbox_mode\"], BoxMode.XYXY_ABS) for obj in annos]\n    \n    target = Instances(image_size)\n    boxes = target.gt_boxes = Boxes(boxes)\n    if 'light' in annos[0].keys():\n        light = [BoxMode.convert(obj['light'],obj[\"bbox_mode\"], BoxMode.XYXY_ABS) for obj in annos]\n        light = target.gt_light = Boxes(light)\n        light.clip(image_size)\n    boxes.clip(image_size)\n    \n\n    classes = [obj[\"category_id\"] for obj in annos]\n    classes = torch.tensor(classes, dtype=torch.int64)\n    target.gt_classes = classes\n\n    if len(annos) and \"segmentation\" in annos[0]:\n        polygons = [obj[\"segmentation\"] for obj in annos]\n        if mask_format == \"polygon\":\n            masks = PolygonMasks(polygons)\n        else:\n            assert mask_format == \"bitmask\", mask_format\n            masks = []\n            for segm in polygons:\n                if isinstance(segm, list):\n                    # polygon\n                    masks.append(BitMasks.from_polygon_masks(segm, *image_size))\n                elif isinstance(segm, dict):\n                    # COCO RLE\n                    masks.append(mask_util.decode(segm))\n                elif isinstance(segm, np.ndarray):\n                    assert segm.ndim == 2, \"Expect segmentation of 2 dimensions, got {}.\".format(\n                        segm.ndim\n                    )\n                    # mask array\n                    masks.append(segm)\n                else:\n                    raise ValueError(\n                        \"Cannot convert segmentation of type '{}' to BitMasks!\"\n                        \"Supported types are: polygons as list[list[float] or ndarray],\"\n                        \" COCO-style RLE as a dict, or a binary segmentation mask \"\n                        \" in a 2D numpy array of shape HxW.\".format(type(segm))\n                    )\n            # torch.from_numpy does not support array with negative stride.\n            masks = BitMasks(\n                torch.stack([torch.from_numpy(np.ascontiguousarray(x)) for x in masks])\n            )\n        target.gt_masks = masks\n\n    if len(annos) and \"keypoints\" in annos[0]:\n        kpts = [obj.get(\"keypoints\", []) for obj in annos]\n        target.gt_keypoints = Keypoints(kpts)\n\n    return target\n\n\ndef annotations_to_instances_rotated(annos, image_size):\n    \"\"\"\n    Create an :class:`Instances` object used by the models,\n    from instance annotations in the dataset dict.\n    Compared to `annotations_to_instances`, this function is for rotated boxes only\n\n    Args:\n        annos (list[dict]): a list of instance annotations in one image, each\n            element for one instance.\n        image_size (tuple): height, width\n\n    Returns:\n        Instances:\n            Containing fields \"gt_boxes\", \"gt_classes\",\n            if they can be obtained from `annos`.\n            This is the format that builtin models expect.\n    \"\"\"\n    boxes = [obj[\"bbox\"] for obj in annos]\n    target = Instances(image_size)\n    boxes = target.gt_boxes = RotatedBoxes(boxes)\n    boxes.clip(image_size)\n\n    classes = [obj[\"category_id\"] for obj in annos]\n    classes = torch.tensor(classes, dtype=torch.int64)\n    target.gt_classes = classes\n\n    return target\n\n\ndef filter_empty_instances(instances, by_box=True, by_mask=True):\n    \"\"\"\n    Filter out empty instances in an `Instances` object.\n\n    Args:\n        instances (Instances):\n        by_box (bool): whether to filter out instances with empty boxes\n        by_mask (bool): whether to filter out instances with empty masks\n\n    Returns:\n        Instances: the filtered instances.\n    \"\"\"\n    assert by_box or by_mask\n    r = []\n    if by_box:\n        r.append(instances.gt_boxes.nonempty())\n    if instances.has(\"gt_masks\") and by_mask:\n        r.append(instances.gt_masks.nonempty() & (instances.gt_masks.tensor.sum(1).sum(1) > 20.0))\n\n    # TODO: can also filter visible keypoints\n\n    if not r:\n        return instances\n    m = r[0]\n    for x in r[1:]:\n        m = m & x\n    return instances[m]\n\n\ndef create_keypoint_hflip_indices(dataset_names):\n    \"\"\"\n    Args:\n        dataset_names (list[str]): list of dataset names\n    Returns:\n        ndarray[int]: a vector of size=#keypoints, storing the\n        horizontally-flipped keypoint indices.\n    \"\"\"\n\n    check_metadata_consistency(\"keypoint_names\", dataset_names)\n    check_metadata_consistency(\"keypoint_flip_map\", dataset_names)\n\n    meta = MetadataCatalog.get(dataset_names[0])\n    names = meta.keypoint_names\n    # print(names)\n    # TODO flip -> hflip\n    flip_map = dict(meta.keypoint_flip_map)\n    flip_map.update({v: k for k, v in flip_map.items()})\n    flipped_names = [i if i not in flip_map else flip_map[i] for i in names]\n    flip_indices = [names.index(i) for i in flipped_names]\n    return np.asarray(flip_indices)\n\n\ndef gen_crop_transform_with_instance(crop_size, image_size, instance):\n    \"\"\"\n    Generate a CropTransform so that the cropping region contains\n    the center of the given instance.\n\n    Args:\n        crop_size (tuple): h, w in pixels\n        image_size (tuple): h, w\n        instance (dict): an annotation dict of one instance, in Detectron2's\n            dataset format.\n    \"\"\"\n    crop_size = np.asarray(crop_size, dtype=np.int32)\n    bbox = BoxMode.convert(instance[\"bbox\"], instance[\"bbox_mode\"], BoxMode.XYXY_ABS)\n    center_yx = (bbox[1] + bbox[3]) * 0.5, (bbox[0] + bbox[2]) * 0.5\n\n    min_yx = np.maximum(np.floor(center_yx).astype(np.int32) - crop_size, 0)\n    max_yx = np.maximum(np.asarray(image_size, dtype=np.int32) - crop_size, 0)\n    max_yx = np.minimum(max_yx, np.ceil(center_yx).astype(np.int32))\n\n    y0 = np.random.randint(min_yx[0], max_yx[0] + 1)\n    x0 = np.random.randint(min_yx[1], max_yx[1] + 1)\n    return T.CropTransform(x0, y0, crop_size[1], crop_size[0])\n\n\ndef check_metadata_consistency(key, dataset_names):\n    \"\"\"\n    Check that the datasets have consistent metadata.\n\n    Args:\n        key (str): a metadata key\n        dataset_names (list[str]): a list of dataset names\n\n    Raises:\n        AttributeError: if the key does not exist in the metadata\n        ValueError: if the given datasets do not have the same metadata values defined by key\n    \"\"\"\n    if len(dataset_names) == 0:\n        return\n    logger = logging.getLogger(__name__)\n    entries_per_dataset = [getattr(MetadataCatalog.get(d), key) for d in dataset_names]\n    for idx, entry in enumerate(entries_per_dataset):\n        if entry != entries_per_dataset[0]:\n            logger.error(\n                \"Metadata '{}' for dataset '{}' is '{}'\".format(key, dataset_names[idx], str(entry))\n            )\n            logger.error(\n                \"Metadata '{}' for dataset '{}' is '{}'\".format(\n                    key, dataset_names[0], str(entries_per_dataset[0])\n                )\n            )\n            raise ValueError(\"Datasets have different metadata '{}'!\".format(key))\n\n\ndef build_transform_gen(cfg, is_train):\n    \"\"\"\n    Create a list of :class:`TransformGen` from config.\n    Now it includes resizing and flipping.\n\n    Returns:\n        list[TransformGen]\n    \"\"\"\n    if is_train:\n        min_size = cfg.INPUT.MIN_SIZE_TRAIN\n        max_size = cfg.INPUT.MAX_SIZE_TRAIN\n        sample_style = cfg.INPUT.MIN_SIZE_TRAIN_SAMPLING\n    else:\n        min_size = cfg.INPUT.MIN_SIZE_TEST\n        max_size = cfg.INPUT.MAX_SIZE_TEST\n        sample_style = \"choice\"\n    if sample_style == \"range\":\n        assert len(min_size) == 2, \"more than 2 ({}) min_size(s) are provided for ranges\".format(\n            len(min_size)\n        )\n\n    logger = logging.getLogger(__name__)\n    tfm_gens = []\n    if not min_size == 0:  # set to zero to disable resize\n        tfm_gens.append(T.ResizeShortestEdge(min_size, max_size, sample_style))\n    if is_train:\n        tfm_gens.append(T.RandomFlip())\n        logger.info(\"TransformGens used in training: \" + str(tfm_gens))\n    return tfm_gens\n"
  },
  {
    "path": "detectron2/data/samplers/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .distributed_sampler import InferenceSampler, RepeatFactorTrainingSampler, TrainingSampler\nfrom .grouped_batch_sampler import GroupedBatchSampler\n\n__all__ = [\n    \"GroupedBatchSampler\",\n    \"TrainingSampler\",\n    \"InferenceSampler\",\n    \"RepeatFactorTrainingSampler\",\n]\n"
  },
  {
    "path": "detectron2/data/samplers/distributed_sampler.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport itertools\nimport math\nfrom collections import defaultdict\nfrom typing import Optional\nimport torch\nfrom torch.utils.data.sampler import Sampler\n\nfrom detectron2.utils import comm\n\n\nclass TrainingSampler(Sampler):\n    \"\"\"\n    In training, we only care about the \"infinite stream\" of training data.\n    So this sampler produces an infinite stream of indices and\n    all workers cooperate to correctly shuffle the indices and sample different indices.\n\n    The samplers in each worker effectively produces `indices[worker_id::num_workers]`\n    where `indices` is an infinite stream of indices consisting of\n    `shuffle(range(size)) + shuffle(range(size)) + ...` (if shuffle is True)\n    or `range(size) + range(size) + ...` (if shuffle is False)\n    \"\"\"\n\n    def __init__(self, size: int, shuffle: bool = True, seed: Optional[int] = None):\n        \"\"\"\n        Args:\n            size (int): the total number of data of the underlying dataset to sample from\n            shuffle (bool): whether to shuffle the indices or not\n            seed (int): the initial seed of the shuffle. Must be the same\n                across all workers. If None, will use a random seed shared\n                among workers (require synchronization among all workers).\n        \"\"\"\n        self._size = size\n        assert size > 0\n        self._shuffle = shuffle\n        if seed is None:\n            seed = comm.shared_random_seed()\n        self._seed = int(seed)\n\n        self._rank = comm.get_rank()\n        self._world_size = comm.get_world_size()\n\n    def __iter__(self):\n        start = self._rank\n        yield from itertools.islice(self._infinite_indices(), start, None, self._world_size)\n\n    def _infinite_indices(self):\n        g = torch.Generator()\n        g.manual_seed(self._seed)\n        while True:\n            if self._shuffle:\n                yield from torch.randperm(self._size, generator=g)\n            else:\n                yield from torch.arange(self._size)\n\n\nclass RepeatFactorTrainingSampler(Sampler):\n    \"\"\"\n    Similar to TrainingSampler, but suitable for training on class imbalanced datasets\n    like LVIS. In each epoch, an image may appear multiple times based on its \"repeat\n    factor\". The repeat factor for an image is a function of the frequency the rarest\n    category labeled in that image. The \"frequency of category c\" in [0, 1] is defined\n    as the fraction of images in the training set (without repeats) in which category c\n    appears.\n\n    See https://arxiv.org/abs/1908.03195 (>= v2) Appendix B.2.\n    \"\"\"\n\n    def __init__(self, dataset_dicts, repeat_thresh, shuffle=True, seed=None):\n        \"\"\"\n        Args:\n            dataset_dicts (list[dict]): annotations in Detectron2 dataset format.\n            repeat_thresh (float): frequency threshold below which data is repeated.\n            shuffle (bool): whether to shuffle the indices or not\n            seed (int): the initial seed of the shuffle. Must be the same\n                across all workers. If None, will use a random seed shared\n                among workers (require synchronization among all workers).\n        \"\"\"\n        self._shuffle = shuffle\n        if seed is None:\n            seed = comm.shared_random_seed()\n        self._seed = int(seed)\n\n        self._rank = comm.get_rank()\n        self._world_size = comm.get_world_size()\n\n        # Get fractional repeat factors and split into whole number (_int_part)\n        # and fractional (_frac_part) parts.\n        rep_factors = self._get_repeat_factors(dataset_dicts, repeat_thresh)\n        self._int_part = torch.trunc(rep_factors)\n        self._frac_part = rep_factors - self._int_part\n\n    def _get_repeat_factors(self, dataset_dicts, repeat_thresh):\n        \"\"\"\n        Compute (fractional) per-image repeat factors.\n\n        Args:\n            See __init__.\n\n        Returns:\n            torch.Tensor: the i-th element is the repeat factor for the dataset image\n                at index i.\n        \"\"\"\n        # 1. For each category c, compute the fraction of images that contain it: f(c)\n        category_freq = defaultdict(int)\n        for dataset_dict in dataset_dicts:  # For each image (without repeats)\n            cat_ids = {ann[\"category_id\"] for ann in dataset_dict[\"annotations\"]}\n            for cat_id in cat_ids:\n                category_freq[cat_id] += 1\n        num_images = len(dataset_dicts)\n        for k, v in category_freq.items():\n            category_freq[k] = v / num_images\n\n        # 2. For each category c, compute the category-level repeat factor:\n        #    r(c) = max(1, sqrt(t / f(c)))\n        category_rep = {\n            cat_id: max(1.0, math.sqrt(repeat_thresh / cat_freq))\n            for cat_id, cat_freq in category_freq.items()\n        }\n\n        # 3. For each image I, compute the image-level repeat factor:\n        #    r(I) = max_{c in I} r(c)\n        rep_factors = []\n        for dataset_dict in dataset_dicts:\n            cat_ids = {ann[\"category_id\"] for ann in dataset_dict[\"annotations\"]}\n            rep_factor = max({category_rep[cat_id] for cat_id in cat_ids})\n            rep_factors.append(rep_factor)\n\n        return torch.tensor(rep_factors, dtype=torch.float32)\n\n    def _get_epoch_indices(self, generator):\n        \"\"\"\n        Create a list of dataset indices (with repeats) to use for one epoch.\n\n        Args:\n            generator (torch.Generator): pseudo random number generator used for\n                stochastic rounding.\n\n        Returns:\n            torch.Tensor: list of dataset indices to use in one epoch. Each index\n                is repeated based on its calculated repeat factor.\n        \"\"\"\n        # Since repeat factors are fractional, we use stochastic rounding so\n        # that the target repeat factor is achieved in expectation over the\n        # course of training\n        rands = torch.rand(len(self._frac_part), generator=generator)\n        rep_factors = self._int_part + (rands < self._frac_part).float()\n        # Construct a list of indices in which we repeat images as specified\n        indices = []\n        for dataset_index, rep_factor in enumerate(rep_factors):\n            indices.extend([dataset_index] * int(rep_factor.item()))\n        return torch.tensor(indices, dtype=torch.int64)\n\n    def __iter__(self):\n        start = self._rank\n        yield from itertools.islice(self._infinite_indices(), start, None, self._world_size)\n\n    def _infinite_indices(self):\n        g = torch.Generator()\n        g.manual_seed(self._seed)\n        while True:\n            # Sample indices with repeats determined by stochastic rounding; each\n            # \"epoch\" may have a slightly different size due to the rounding.\n            indices = self._get_epoch_indices(g)\n            if self._shuffle:\n                randperm = torch.randperm(len(indices), generator=g)\n                yield from indices[randperm]\n            else:\n                yield from indices\n\n\nclass InferenceSampler(Sampler):\n    \"\"\"\n    Produce indices for inference.\n    Inference needs to run on the __exact__ set of samples,\n    therefore when the total number of samples is not divisible by the number of workers,\n    this sampler produces different number of samples on different workers.\n    \"\"\"\n\n    def __init__(self, size: int):\n        \"\"\"\n        Args:\n            size (int): the total number of data of the underlying dataset to sample from\n        \"\"\"\n        self._size = size\n        assert size > 0\n        self._rank = comm.get_rank()\n        self._world_size = comm.get_world_size()\n\n        shard_size = (self._size - 1) // self._world_size + 1\n        begin = shard_size * self._rank\n        end = min(shard_size * (self._rank + 1), self._size)\n        self._local_indices = range(begin, end)\n\n    def __iter__(self):\n        yield from self._local_indices\n\n    def __len__(self):\n        return len(self._local_indices)\n"
  },
  {
    "path": "detectron2/data/samplers/grouped_batch_sampler.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport torch\nfrom torch.utils.data.sampler import BatchSampler, Sampler\n\n\nclass GroupedBatchSampler(BatchSampler):\n    \"\"\"\n    Wraps another sampler to yield a mini-batch of indices.\n    It enforces that the batch only contain elements from the same group.\n    It also tries to provide mini-batches which follows an ordering which is\n    as close as possible to the ordering from the original sampler.\n\n    Arguments:\n        sampler (Sampler): Base sampler.\n        group_ids (list[int]): If the sampler produces indices in range [0, N),\n            `group_ids` must be a list of `N` ints which contains the group id of each sample.\n            The group ids must be a continuous set of integers starting from\n            0, i.e. they must be in the range [0, num_groups).\n        batch_size (int): Size of mini-batch.\n    \"\"\"\n\n    def __init__(self, sampler, group_ids, batch_size):\n        if not isinstance(sampler, Sampler):\n            raise ValueError(\n                \"sampler should be an instance of \"\n                \"torch.utils.data.Sampler, but got sampler={}\".format(sampler)\n            )\n        self.sampler = sampler\n        self.group_ids = torch.as_tensor(group_ids)\n        assert self.group_ids.dim() == 1\n        self.batch_size = batch_size\n        self.groups = torch.unique(self.group_ids).sort(0)[0]\n        # group ids must range in [0, #group)\n        assert self.groups[0].item() == 0 and self.groups[-1].item() == len(self.groups) - 1\n\n        # buffer the indices of each group until batch size is reached\n        self.buffer_per_group = [[] for k in self.groups]\n\n    def __iter__(self):\n        for idx in self.sampler:\n            group_id = self.group_ids[idx]\n            group_buffer = self.buffer_per_group[group_id]\n            group_buffer.append(idx)\n            if len(group_buffer) == self.batch_size:\n                yield group_buffer[:]  # yield a copy of the list\n                del group_buffer[:]\n"
  },
  {
    "path": "detectron2/data/transforms/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .transform import *\nfrom fvcore.transforms.transform import *\nfrom .transform_gen import *\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/data/transforms/transform.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# File: transform.py\n\nimport numpy as np\nfrom fvcore.transforms.transform import HFlipTransform, NoOpTransform, Transform\nfrom PIL import Image\n\n__all__ = [\"ExtentTransform\", \"ResizeTransform\"]\n\n\nclass ExtentTransform(Transform):\n    \"\"\"\n    Extracts a subregion from the source image and scales it to the output size.\n\n    The fill color is used to map pixels from the source rect that fall outside\n    the source image.\n\n    See: https://pillow.readthedocs.io/en/latest/PIL.html#PIL.ImageTransform.ExtentTransform\n    \"\"\"\n\n    def __init__(self, src_rect, output_size, interp=Image.LINEAR, fill=0):\n        \"\"\"\n        Args:\n            src_rect (x0, y0, x1, y1): src coordinates\n            output_size (h, w): dst image size\n            interp: PIL interpolation methods\n            fill: Fill color used when src_rect extends outside image\n        \"\"\"\n        super().__init__()\n        self._set_attributes(locals())\n\n    def apply_image(self, img, interp=None):\n        h, w = self.output_size\n        ret = Image.fromarray(img).transform(\n            size=(w, h),\n            method=Image.EXTENT,\n            data=self.src_rect,\n            resample=interp if interp else self.interp,\n            fill=self.fill,\n        )\n        return np.asarray(ret)\n\n    def apply_coords(self, coords):\n        # Transform image center from source coordinates into output coordinates\n        # and then map the new origin to the corner of the output image.\n        h, w = self.output_size\n        x0, y0, x1, y1 = self.src_rect\n        new_coords = coords.astype(np.float32)\n        new_coords[:, 0] -= 0.5 * (x0 + x1)\n        new_coords[:, 1] -= 0.5 * (y0 + y1)\n        new_coords[:, 0] *= w / (x1 - x0)\n        new_coords[:, 1] *= h / (y1 - y0)\n        new_coords[:, 0] += 0.5 * w\n        new_coords[:, 1] += 0.5 * h\n        return new_coords\n\n    def apply_segmentation(self, segmentation):\n        segmentation = self.apply_image(segmentation, interp=Image.NEAREST)\n        return segmentation\n\n\nclass ResizeTransform(Transform):\n    \"\"\"\n    Resize the image to a target size.\n    \"\"\"\n\n    def __init__(self, h, w, new_h, new_w, interp):\n        \"\"\"\n        Args:\n            h, w (int): original image size\n            new_h, new_w (int): new image size\n            interp: PIL interpolation methods\n        \"\"\"\n        # TODO decide on PIL vs opencv\n        super().__init__()\n        self._set_attributes(locals())\n\n    def apply_image(self, img, interp=None):\n        assert img.shape[:2] == (self.h, self.w)\n        pil_image = Image.fromarray(img)\n        interp_method = interp if interp is not None else self.interp\n        pil_image = pil_image.resize((self.new_w, self.new_h), interp_method)\n        ret = np.asarray(pil_image)\n        return ret\n\n    def apply_coords(self, coords):\n        coords[:, 0] = coords[:, 0] * (self.new_w * 1.0 / self.w)\n        coords[:, 1] = coords[:, 1] * (self.new_h * 1.0 / self.h)\n        return coords\n\n    def apply_segmentation(self, segmentation):\n        segmentation = self.apply_image(segmentation, interp=Image.NEAREST)\n        return segmentation\n\n\ndef HFlip_rotated_box(transform, rotated_boxes):\n    \"\"\"\n    Apply the horizontal flip transform on an rotated boxes.\n\n    Args:\n        rotated_boxes (ndarray): Nx5 floating point array of\n            (x_center, y_center, width, height, angle_degrees) format\n            in absolute coordinates.\n    \"\"\"\n    # Transform x_center\n    rotated_boxes[:, 0] = transform.width - rotated_boxes[:, 0]\n    # Transform angle\n    rotated_boxes[:, 4] = -rotated_boxes[:, 4]\n    return rotated_boxes\n\n\ndef Resize_rotated_box(transform, rotated_boxes):\n    # Note: when scale_factor_x != scale_factor_y,\n    # the rotated box does not preserve the rectangular shape when the angle\n    # is not a multiple of 90 degrees under resize transformation.\n    # Instead, the shape is a parallelogram (that has skew)\n    # Here we make an approximation by fitting a rotated rectangle to the\n    # parallelogram that shares the same midpoints on the left and right edge\n    scale_factor_x = transform.new_w * 1.0 / transform.w\n    scale_factor_y = transform.new_h * 1.0 / transform.h\n    rotated_boxes[:, 0] *= scale_factor_x\n    rotated_boxes[:, 1] *= scale_factor_y\n    theta = rotated_boxes[:, 4] * np.pi / 180.0\n    c = np.cos(theta)\n    s = np.sin(theta)\n\n    # In image space, y is top->down and x is left->right\n    # Consider the local coordinate system for the rotated box,\n    # where the box center is located at (0, 0), and the four vertices ABCD are\n    # A(-w / 2, -h / 2), B(w / 2, -h / 2), C(w / 2, h / 2), D(-w / 2, h / 2)\n    # the midpoint of the left edge AD of the rotated box E is:\n    # E = (A+D)/2 = (-w / 2, 0)\n    # the midpoint of the top edge AB of the rotated box F is:\n    # F(0, -h / 2)\n    # To get the old coordinates in the global system, apply the rotation transformation\n    # (Note: the right-handed coordinate system for image space is yOx):\n    # (old_x, old_y) = (s * y + c * x, c * y - s * x)\n    # E(old) = (s * 0 + c * (-w/2), c * 0 - s * (-w/2)) = (-c * w / 2, s * w / 2)\n    # F(old) = (s * (-h / 2) + c * 0, c * (-h / 2) - s * 0) = (-s * h / 2, -c * h / 2)\n    # After applying the scaling factor (sfx, sfy):\n    # E(new) = (-sfx * c * w / 2, sfy * s * w / 2)\n    # F(new) = (-sfx * s * h / 2, -sfy * c * h / 2)\n    # The new width after scaling transformation becomes:\n\n    # w(new) = |E(new) - O| * 2\n    #        = sqrt[(sfx * c * w / 2)^2 + (sfy * s * w / 2)^2] * 2\n    #        = sqrt[(sfx * c)^2 + (sfy * s)^2] * w\n    # i.e., scale_factor_w = sqrt[(sfx * c)^2 + (sfy * s)^2]\n    #\n    # For example,\n    # when angle = 0 or 180, |c| = 1, s = 0, scale_factor_w == scale_factor_x;\n    # when |angle| = 90, c = 0, |s| = 1, scale_factor_w == scale_factor_y\n    rotated_boxes[:, 2] *= np.sqrt(np.square(scale_factor_x * c) + np.square(scale_factor_y * s))\n\n    # h(new) = |F(new) - O| * 2\n    #        = sqrt[(sfx * s * h / 2)^2 + (sfy * c * h / 2)^2] * 2\n    #        = sqrt[(sfx * s)^2 + (sfy * c)^2] * h\n    # i.e., scale_factor_h = sqrt[(sfx * s)^2 + (sfy * c)^2]\n    #\n    # For example,\n    # when angle = 0 or 180, |c| = 1, s = 0, scale_factor_h == scale_factor_y;\n    # when |angle| = 90, c = 0, |s| = 1, scale_factor_h == scale_factor_x\n    rotated_boxes[:, 3] *= np.sqrt(np.square(scale_factor_x * s) + np.square(scale_factor_y * c))\n\n    # The angle is the rotation angle from y-axis in image space to the height\n    # vector (top->down in the box's local coordinate system) of the box in CCW.\n    #\n    # angle(new) = angle_yOx(O - F(new))\n    #            = angle_yOx( (sfx * s * h / 2, sfy * c * h / 2) )\n    #            = atan2(sfx * s * h / 2, sfy * c * h / 2)\n    #            = atan2(sfx * s, sfy * c)\n    #\n    # For example,\n    # when sfx == sfy, angle(new) == atan2(s, c) == angle(old)\n    rotated_boxes[:, 4] = np.arctan2(scale_factor_x * s, scale_factor_y * c) * 180 / np.pi\n\n    return rotated_boxes\n\n\nHFlipTransform.register_type(\"rotated_box\", HFlip_rotated_box)\nNoOpTransform.register_type(\"rotated_box\", lambda t, x: x)\nResizeTransform.register_type(\"rotated_box\", Resize_rotated_box)\n"
  },
  {
    "path": "detectron2/data/transforms/transform_gen.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# File: transformer.py\n\nimport inspect\nimport numpy as np\nimport pprint\nimport sys\nfrom abc import ABCMeta, abstractmethod\nfrom fvcore.transforms.transform import (\n    BlendTransform,\n    CropTransform,\n    HFlipTransform,\n    NoOpTransform,\n    Transform,\n    TransformList,\n)\nfrom PIL import Image\n\nfrom .transform import ExtentTransform, ResizeTransform\n\n__all__ = [\n    \"RandomBrightness\",\n    \"RandomContrast\",\n    \"RandomCrop\",\n    \"RandomExtent\",\n    \"RandomFlip\",\n    \"RandomSaturation\",\n    \"RandomLighting\",\n    \"Resize\",\n    \"ResizeShortestEdge\",\n    \"TransformGen\",\n    \"apply_transform_gens\",\n]\n\n\ndef check_dtype(img):\n    assert isinstance(img, np.ndarray), \"[TransformGen] Needs an numpy array, but got a {}!\".format(\n        type(img)\n    )\n    assert not isinstance(img.dtype, np.integer) or (\n        img.dtype == np.uint8\n    ), \"[TransformGen] Got image of type {}, use uint8 or floating points instead!\".format(\n        img.dtype\n    )\n    assert img.ndim in [2, 3], img.ndim\n\n\nclass TransformGen(metaclass=ABCMeta):\n    \"\"\"\n    TransformGen takes an image of type uint8 in range [0, 255], or\n    floating point in range [0, 1] or [0, 255] as input.\n\n    It creates a :class:`Transform` based on the given image, sometimes with randomness.\n    The transform can then be used to transform images\n    or other data (boxes, points, annotations, etc.) associated with it.\n\n    The assumption made in this class\n    is that the image itself is sufficient to instantiate a transform.\n    When this assumption is not true, you need to create the transforms by your own.\n\n    A list of `TransformGen` can be applied with :func:`apply_transform_gens`.\n    \"\"\"\n\n    def _init(self, params=None):\n        if params:\n            for k, v in params.items():\n                if k != \"self\" and not k.startswith(\"_\"):\n                    setattr(self, k, v)\n\n    @abstractmethod\n    def get_transform(self, img):\n        pass\n\n    def _rand_range(self, low=1.0, high=None, size=None):\n        \"\"\"\n        Uniform float random number between low and high.\n        \"\"\"\n        if high is None:\n            low, high = 0, low\n        if size is None:\n            size = []\n        return np.random.uniform(low, high, size)\n\n    def __repr__(self):\n        \"\"\"\n        Produce something like:\n        \"MyTransformGen(field1={self.field1}, field2={self.field2})\"\n        \"\"\"\n        try:\n            argspec = inspect.getargspec(self.__init__)\n            assert argspec.varargs is None, \"The default __repr__ doesn't work for varargs!\"\n            assert argspec.keywords is None, \"The default __repr__ doesn't work for kwargs!\"\n            fields = argspec.args[1:]\n            index_field_has_default = len(fields) - (\n                0 if argspec.defaults is None else len(argspec.defaults)\n            )\n\n            classname = type(self).__name__\n            argstr = []\n            for idx, f in enumerate(fields):\n                assert hasattr(self, f), (\n                    \"Attribute {} not found! \"\n                    \"Default __repr__ only works if attributes match the constructor.\".format(f)\n                )\n                attr = getattr(self, f)\n                if idx >= index_field_has_default:\n                    if attr is argspec.defaults[idx - index_field_has_default]:\n                        continue\n                argstr.append(\"{}={}\".format(f, pprint.pformat(attr)))\n            return \"{}({})\".format(classname, \", \".join(argstr))\n        except AssertionError:\n            return super().__repr__()\n\n    __str__ = __repr__\n\n\nclass RandomFlip(TransformGen):\n    \"\"\"\n    Flip the image horizontally with the given probability.\n\n    TODO Vertical flip to be implemented.\n    \"\"\"\n\n    def __init__(self, prob=0.5):\n        \"\"\"\n        Args:\n            prob (float): probability of flip.\n        \"\"\"\n        horiz, vert = True, False\n        # TODO implement vertical flip when we need it\n        super().__init__()\n\n        if horiz and vert:\n            raise ValueError(\"Cannot do both horiz and vert. Please use two Flip instead.\")\n        if not horiz and not vert:\n            raise ValueError(\"At least one of horiz or vert has to be True!\")\n        self._init(locals())\n\n    def get_transform(self, img):\n        _, w = img.shape[:2]\n        do = self._rand_range() < self.prob\n        if do:\n            return HFlipTransform(w)\n        else:\n            return NoOpTransform()\n\n\nclass Resize(TransformGen):\n    \"\"\" Resize image to a target size\"\"\"\n\n    def __init__(self, shape, interp=Image.BILINEAR):\n        \"\"\"\n        Args:\n            shape: (h, w) tuple or a int\n            interp: PIL interpolation method\n        \"\"\"\n        if isinstance(shape, int):\n            shape = (shape, shape)\n        shape = tuple(shape)\n        self._init(locals())\n\n    def get_transform(self, img):\n        return ResizeTransform(\n            img.shape[0], img.shape[1], self.shape[0], self.shape[1], self.interp\n        )\n\n\nclass ResizeShortestEdge(TransformGen):\n    \"\"\"\n    Scale the shorter edge to the given size, with a limit of `max_size` on the longer edge.\n    If `max_size` is reached, then downscale so that the longer edge does not exceed max_size.\n    \"\"\"\n\n    def __init__(\n        self, short_edge_length, max_size=sys.maxsize, sample_style=\"range\", interp=Image.BILINEAR\n    ):\n        \"\"\"\n        Args:\n            short_edge_length (list[int]): If ``sample_style==\"range\"``,\n                a [min, max] interval from which to sample the shortest edge length.\n                If ``sample_style==\"choice\"``, a list of shortest edge lengths to sample from.\n            max_size (int): maximum allowed longest edge length.\n            sample_style (str): either \"range\" or \"choice\".\n        \"\"\"\n        super().__init__()\n        assert sample_style in [\"range\", \"choice\"], sample_style\n\n        self.is_range = sample_style == \"range\"\n        if isinstance(short_edge_length, int):\n            short_edge_length = (short_edge_length, short_edge_length)\n        self._init(locals())\n\n    def get_transform(self, img):\n        h, w = img.shape[:2]\n\n        if self.is_range:\n            size = np.random.randint(self.short_edge_length[0], self.short_edge_length[1] + 1)\n        else:\n            size = np.random.choice(self.short_edge_length)\n\n        scale = size * 1.0 / min(h, w)\n        if h < w:\n            newh, neww = size, scale * w\n        else:\n            newh, neww = scale * h, size\n        if max(newh, neww) > self.max_size:\n            scale = self.max_size * 1.0 / max(newh, neww)\n            newh = newh * scale\n            neww = neww * scale\n        neww = int(neww + 0.5)\n        newh = int(newh + 0.5)\n        return ResizeTransform(h, w, newh, neww, self.interp)\n\n\nclass RandomCrop(TransformGen):\n    \"\"\"\n    Randomly crop a subimage out of an image.\n    \"\"\"\n\n    def __init__(self, crop_type: str, crop_size):\n        \"\"\"\n        Args:\n            crop_type (str): one of \"relative_range\", \"relative\", \"absolute\".\n                See `config/defaults.py` for explanation.\n            crop_size (tuple[float]): the relative ratio or absolute pixels of\n                height and width\n        \"\"\"\n        super().__init__()\n        assert crop_type in [\"relative_range\", \"relative\", \"absolute\"]\n        self._init(locals())\n\n    def get_transform(self, img):\n        h, w = img.shape[:2]\n        croph, cropw = self.get_crop_size((h, w))\n        assert h >= croph and w >= cropw, \"Shape computation in {} has bugs.\".format(self)\n        h0 = np.random.randint(h - croph + 1)\n        w0 = np.random.randint(w - cropw + 1)\n        return CropTransform(w0, h0, cropw, croph)\n\n    def get_crop_size(self, image_size):\n        \"\"\"\n        Args:\n            image_size (tuple): height, width\n\n        Returns:\n            crop_size (tuple): height, width in absolute pixels\n        \"\"\"\n        h, w = image_size\n        if self.crop_type == \"relative\":\n            ch, cw = self.crop_size\n            return int(h * ch + 0.5), int(w * cw + 0.5)\n        elif self.crop_type == \"relative_range\":\n            crop_size = np.asarray(self.crop_size, dtype=np.float32)\n            ch, cw = crop_size + np.random.rand(2) * (1 - crop_size)\n            return int(h * ch + 0.5), int(w * cw + 0.5)\n        elif self.crop_type == \"absolute\":\n            return self.crop_size\n        else:\n            NotImplementedError(\"Unknown crop type {}\".format(self.crop_type))\n\n\nclass RandomExtent(TransformGen):\n    \"\"\"\n    Outputs an image by cropping a random \"subrect\" of the source image.\n\n    The subrect can be parameterized to include pixels outside the source image,\n    in which case they will be set to zeros (i.e. black). The size of the output\n    image will vary with the size of the random subrect.\n    \"\"\"\n\n    def __init__(self, scale_range, shift_range):\n        \"\"\"\n        Args:\n            output_size (h, w): Dimensions of output image\n            scale_range (l, h): Range of input-to-output size scaling factor\n            shift_range (x, y): Range of shifts of the cropped subrect. The rect\n                is shifted by [w / 2 * Uniform(-x, x), h / 2 * Uniform(-y, y)],\n                where (w, h) is the (width, height) of the input image. Set each\n                component to zero to crop at the image's center.\n        \"\"\"\n        super().__init__()\n        self._init(locals())\n\n    def get_transform(self, img):\n        img_h, img_w = img.shape[:2]\n\n        # Initialize src_rect to fit the input image.\n        src_rect = np.array([-0.5 * img_w, -0.5 * img_h, 0.5 * img_w, 0.5 * img_h])\n\n        # Apply a random scaling to the src_rect.\n        src_rect *= np.random.uniform(self.scale_range[0], self.scale_range[1])\n\n        # Apply a random shift to the coordinates origin.\n        src_rect[0::2] += self.shift_range[0] * img_w * (np.random.rand() - 0.5)\n        src_rect[1::2] += self.shift_range[1] * img_h * (np.random.rand() - 0.5)\n\n        # Map src_rect coordinates into image coordinates (center at corner).\n        src_rect[0::2] += 0.5 * img_w\n        src_rect[1::2] += 0.5 * img_h\n\n        return ExtentTransform(\n            src_rect=(src_rect[0], src_rect[1], src_rect[2], src_rect[3]),\n            output_size=(int(src_rect[3] - src_rect[1]), int(src_rect[2] - src_rect[0])),\n        )\n\n\nclass RandomContrast(TransformGen):\n    \"\"\"\n    Randomly transforms image contrast.\n\n    Contrast intensity is uniformly sampled in (intensity_min, intensity_max).\n    - intensity < 1 will reduce contrast\n    - intensity = 1 will preserve the input image\n    - intensity > 1 will increase contrast\n\n    See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html\n    \"\"\"\n\n    def __init__(self, intensity_min, intensity_max):\n        \"\"\"\n        Args:\n            intensity_min (float): Minimum augmentation\n            intensity_max (float): Maximum augmentation\n        \"\"\"\n        super().__init__()\n        self._init(locals())\n\n    def get_transform(self, img):\n        w = np.random.uniform(self.intensity_min, self.intensity_max)\n        return BlendTransform(src_image=img.mean(), src_weight=1 - w, dst_weight=w)\n\n\nclass RandomBrightness(TransformGen):\n    \"\"\"\n    Randomly transforms image brightness.\n\n    Brightness intensity is uniformly sampled in (intensity_min, intensity_max).\n    - intensity < 1 will reduce brightness\n    - intensity = 1 will preserve the input image\n    - intensity > 1 will increase brightness\n\n    See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html\n    \"\"\"\n\n    def __init__(self, intensity_min, intensity_max):\n        \"\"\"\n        Args:\n            intensity_min (float): Minimum augmentation\n            intensity_max (float): Maximum augmentation\n        \"\"\"\n        super().__init__()\n        self._init(locals())\n\n    def get_transform(self, img):\n        w = np.random.uniform(self.intensity_min, self.intensity_max)\n        return BlendTransform(src_image=0, src_weight=1 - w, dst_weight=w)\n\n\nclass RandomSaturation(TransformGen):\n    \"\"\"\n    Randomly transforms image saturation.\n\n    Saturation intensity is uniformly sampled in (intensity_min, intensity_max).\n    - intensity < 1 will reduce saturation (make the image more grayscale)\n    - intensity = 1 will preserve the input image\n    - intensity > 1 will increase saturation\n\n    See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html\n    \"\"\"\n\n    def __init__(self, intensity_min, intensity_max):\n        \"\"\"\n        Args:\n            intensity_min (float): Minimum augmentation (1 preserves input).\n            intensity_max (float): Maximum augmentation (1 preserves input).\n        \"\"\"\n        super().__init__()\n        self._init(locals())\n\n    def get_transform(self, img):\n        assert img.shape[-1] == 3, \"Saturation only works on RGB images\"\n        w = np.random.uniform(self.intensity_min, self.intensity_max)\n        grayscale = img.dot([0.299, 0.587, 0.114])[:, :, np.newaxis]\n        return BlendTransform(src_image=grayscale, src_weight=1 - w, dst_weight=w)\n\n\nclass RandomLighting(TransformGen):\n    \"\"\"\n    Randomly transforms image color using fixed PCA over ImageNet.\n\n    The degree of color jittering is randomly sampled via a normal distribution,\n    with standard deviation given by the scale parameter.\n    \"\"\"\n\n    def __init__(self, scale):\n        \"\"\"\n        Args:\n            scale (float): Standard deviation of principal component weighting.\n        \"\"\"\n        super().__init__()\n        self._init(locals())\n        self.eigen_vecs = np.array(\n            [[-0.5675, 0.7192, 0.4009], [-0.5808, -0.0045, -0.8140], [-0.5836, -0.6948, 0.4203]]\n        )\n        self.eigen_vals = np.array([0.2175, 0.0188, 0.0045])\n\n    def get_transform(self, img):\n        assert img.shape[-1] == 3, \"Saturation only works on RGB images\"\n        weights = np.random.normal(scale=self.scale, size=3)\n        return BlendTransform(\n            src_image=self.eigen_vecs.dot(weights * self.eigen_vals), src_weight=1.0, dst_weight=1.0\n        )\n\n\ndef apply_transform_gens(transform_gens, img):\n    \"\"\"\n    Apply a list of :class:`TransformGen` on the input image, and\n    returns the transformed image and a list of transforms.\n\n    We cannot simply create and return all transforms without\n    applying it to the image, because a subsequent transform may\n    need the output of the previous one.\n\n    Args:\n        transform_gens (list): list of :class:`TransformGen` instance to\n            be applied.\n        img (ndarray): uint8 or floating point images with 1 or 3 channels.\n\n    Returns:\n        ndarray: the transformed image\n        TransformList: contain the transforms that's used.\n    \"\"\"\n    for g in transform_gens:\n        assert isinstance(g, TransformGen), g\n\n    check_dtype(img)\n\n    tfms = []\n    for g in transform_gens:\n        tfm = g.get_transform(img)\n        assert isinstance(\n            tfm, Transform\n        ), \"TransformGen {} must return an instance of Transform! Got {} instead\".format(g, tfm)\n        img = tfm.apply_image(img)\n        tfms.append(tfm)\n    return img, TransformList(tfms)\n"
  },
  {
    "path": "detectron2/engine/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n\nfrom .launch import *\nfrom .train_loop import *\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n\n\n# prefer to let hooks and defaults live in separate namespaces (therefore not in __all__)\n# but still make them available here\nfrom .hooks import *\nfrom .defaults import *\n"
  },
  {
    "path": "detectron2/engine/defaults.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n\"\"\"\nThis file contains components with some default boilerplate logic user may need\nin training / testing. They will not work for everyone, but many users may find them useful.\n\nThe behavior of functions/classes in this file is subject to change,\nsince they are meant to represent the \"common default behavior\" people need in their projects.\n\"\"\"\n\nimport argparse\nimport logging\nimport os\nfrom collections import OrderedDict\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom fvcore.nn.precise_bn import get_bn_modules\nfrom torch.nn.parallel import DistributedDataParallel\n\nimport detectron2.data.transforms as T\nfrom detectron2.checkpoint import DetectionCheckpointer\nfrom detectron2.data import (\n    MetadataCatalog,\n    build_detection_test_loader,\n    build_detection_train_loader,\n)\nfrom detectron2.evaluation import (\n    DatasetEvaluator,\n    inference_on_dataset,\n    print_csv_format,\n    verify_results,\n)\nfrom detectron2.modeling import build_model\nfrom detectron2.solver import build_lr_scheduler, build_optimizer\nfrom detectron2.utils import comm\nfrom detectron2.utils.collect_env import collect_env_info\nfrom detectron2.utils.env import seed_all_rng\nfrom detectron2.utils.events import CommonMetricPrinter, JSONWriter, TensorboardXWriter\nfrom detectron2.utils.logger import setup_logger\n\nfrom . import hooks\nfrom .train_loop import SimpleTrainer\n\n__all__ = [\"default_argument_parser\", \"default_setup\", \"DefaultPredictor\", \"DefaultTrainer\"]\n\n\ndef default_argument_parser():\n    \"\"\"\n    Create a parser with some common arguments used by detectron2 users.\n\n    Returns:\n        argparse.ArgumentParser:\n    \"\"\"\n    parser = argparse.ArgumentParser(description=\"Detectron2 Training\")\n    parser.add_argument(\"--config-file\", default=\"\", metavar=\"FILE\", help=\"path to config file\")\n    parser.add_argument(\n        \"--resume\",\n        action=\"store_true\",\n        help=\"whether to attempt to resume from the checkpoint directory\",\n    )\n    parser.add_argument(\"--eval-only\", action=\"store_true\", help=\"perform evaluation only\")\n    parser.add_argument(\"--num-gpus\", type=int, default=1, help=\"number of gpus *per machine*\")\n    parser.add_argument(\"--num-machines\", type=int, default=1)\n    parser.add_argument(\n        \"--machine-rank\", type=int, default=0, help=\"the rank of this machine (unique per machine)\"\n    )\n\n    # PyTorch still may leave orphan processes in multi-gpu training.\n    # Therefore we use a deterministic way to obtain port,\n    # so that users are aware of orphan processes by seeing the port occupied.\n    port = 2 ** 15 + 2 ** 14 + hash(os.getuid()) % 2 ** 14\n    parser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:{}\".format(port))\n    parser.add_argument(\n        \"opts\",\n        help=\"Modify config options using the command-line\",\n        default=None,\n        nargs=argparse.REMAINDER,\n    )\n    return parser\n\n\ndef default_setup(cfg, args):\n    \"\"\"\n    Perform some basic common setups at the beginning of a job, including:\n\n    1. Set up the detectron2 logger\n    2. Log basic information about environment, cmdline arguments, and config\n    3. Backup the config to the output directory\n\n    Args:\n        cfg (CfgNode): the full config to be used\n        args (argparse.NameSpace): the command line arguments to be logged\n    \"\"\"\n    output_dir = cfg.OUTPUT_DIR\n    if comm.is_main_process() and output_dir:\n        PathManager.mkdirs(output_dir)\n\n    rank = comm.get_rank()\n    setup_logger(output_dir, distributed_rank=rank, name=\"fvcore\")\n    logger = setup_logger(output_dir, distributed_rank=rank)\n\n    logger.info(\"Rank of current process: {}. World size: {}\".format(rank, comm.get_world_size()))\n    logger.info(\"Environment info:\\n\" + collect_env_info())\n\n    logger.info(\"Command line arguments: \" + str(args))\n    if hasattr(args, \"config_file\"):\n        logger.info(\n            \"Contents of args.config_file={}:\\n{}\".format(\n                args.config_file, PathManager.open(args.config_file, \"r\").read()\n            )\n        )\n\n    logger.info(\"Running with full config:\\n{}\".format(cfg))\n    if comm.is_main_process() and output_dir:\n        # Note: some of our scripts may expect the existence of\n        # config.yaml in output directory\n        path = os.path.join(output_dir, \"config.yaml\")\n        with PathManager.open(path, \"w\") as f:\n            f.write(cfg.dump())\n        logger.info(\"Full config saved to {}\".format(os.path.abspath(path)))\n\n    # make sure each worker has a different, yet deterministic seed if specified\n    seed_all_rng(None if cfg.SEED < 0 else cfg.SEED + rank)\n\n    # cudnn benchmark has large overhead. It shouldn't be used considering the small size of\n    # typical validation set.\n    if not (hasattr(args, \"eval_only\") and args.eval_only):\n        torch.backends.cudnn.benchmark = cfg.CUDNN_BENCHMARK\n\n\nclass DefaultPredictor:\n    \"\"\"\n    Create a simple end-to-end predictor with the given config.\n    The predictor takes an BGR image and produce a dict of predictions.\n\n    Attributes:\n        metadata (Metadata): the metadata of the underlying dataset, obtained from\n            cfg.DATASETS.TEST.\n    \"\"\"\n\n    def __init__(self, cfg):\n        self.cfg = cfg.clone()  # cfg can be modified by model\n        self.model = build_model(self.cfg)\n        self.model.eval()\n        self.metadata = MetadataCatalog.get(cfg.DATASETS.TEST[0])\n\n        checkpointer = DetectionCheckpointer(self.model)\n        checkpointer.load(cfg.MODEL.WEIGHTS)\n\n        self.transform_gen = T.ResizeShortestEdge(\n            [cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST\n        )\n\n        self.input_format = cfg.INPUT.FORMAT\n        assert self.input_format in [\"RGB\", \"BGR\"], self.input_format\n\n    @torch.no_grad()\n    def __call__(self, original_image):\n        \"\"\"\n        Args:\n            original_image (np.ndarray): an image of shape (H, W, C) (in BGR order).\n\n        Returns:\n            predictions (dict): the output of the model\n        \"\"\"\n        # Apply pre-processing to image.\n        if self.input_format == \"RGB\":\n            # whether the model expects BGR inputs or RGB\n            original_image = original_image[:, :, ::-1]\n        height, width = original_image.shape[:2]\n        image = self.transform_gen.get_transform(original_image).apply_image(original_image)\n        image = torch.as_tensor(image.astype(\"float32\").transpose(2, 0, 1))\n\n        inputs = {\"image\": image, \"height\": height, \"width\": width}\n        predictions = self.model([inputs])\n        return predictions\n\n\nclass DefaultTrainer(SimpleTrainer):\n    \"\"\"\n    A trainer with default training logic. Compared to `SimpleTrainer`, it\n    contains the following logic in addition:\n\n    1. Create model, optimizer, scheduler, dataloader from the given config.\n    2. Load a checkpoint or `cfg.MODEL.WEIGHTS`, if exists.\n    3. Register a few common hooks.\n\n    It is created to simplify the **standard model training workflow** and reduce code boilerplate\n    for users who only need the standard training workflow, with standard features.\n    It means this class makes *many assumptions* about your training logic that\n    may easily become invalid in a new research. In fact, any assumptions beyond those made in the\n    :class:`SimpleTrainer` are too much for research.\n\n    The code of this class has been annotated about restrictive assumptions it mades.\n    When they do not work for you, you're encouraged to write your own training logic.\n\n    Also note that the behavior of this class, like other functions/classes in\n    this file, is not stable, since it is meant to represent the \"common default behavior\".\n    It is only guaranteed to work well with the standard models and training workflow in detectron2.\n    To obtain more stable behavior, write your own training logic with other public APIs.\n\n    Attributes:\n        scheduler:\n        checkpointer (DetectionCheckpointer):\n        cfg (CfgNode):\n    \"\"\"\n\n    def __init__(self, cfg):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n        \"\"\"\n        # Assume these objects must be constructed in this order.\n        model = self.build_model(cfg)\n        optimizer = self.build_optimizer(cfg, model)\n        data_loader = self.build_train_loader(cfg)\n\n        # For training, wrap with DDP. But don't need this for inference.\n        if comm.get_world_size() > 1:\n            model = DistributedDataParallel(\n                model, device_ids=[comm.get_local_rank()], broadcast_buffers=False\n            )\n        super().__init__(model, data_loader, optimizer)\n\n        self.scheduler = self.build_lr_scheduler(cfg, optimizer)\n        # Assume no other objects need to be checkpointed.\n        # We can later make it checkpoint the stateful hooks\n        self.checkpointer = DetectionCheckpointer(\n            # Assume you want to save checkpoints together with logs/statistics\n            model,\n            cfg.OUTPUT_DIR,\n            optimizer=optimizer,\n            scheduler=self.scheduler,\n        )\n        self.start_iter = 0\n        self.max_iter = cfg.SOLVER.MAX_ITER\n        self.cfg = cfg\n\n        self.register_hooks(self.build_hooks())\n\n    def resume_or_load(self, resume=True):\n        \"\"\"\n        If `resume==True`, and last checkpoint exists, resume from it.\n\n        Otherwise, load a model specified by the config.\n\n        Args:\n            resume (bool): whether to do resume or not\n        \"\"\"\n        # The checkpoint stores the training iteration that just finished, thus we start\n        # at the next iteration (or iter zero if there's no checkpoint).\n        self.start_iter = (\n            self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume).get(\n                \"iteration\", -1\n            )\n            + 1\n        )\n\n    def build_hooks(self):\n        \"\"\"\n        Build a list of default hooks.\n\n        Returns:\n            list[HookBase]:\n        \"\"\"\n        cfg = self.cfg.clone()\n        cfg.defrost()\n        cfg.DATALOADER.NUM_WORKERS = 0  # save some memory and time for PreciseBN\n\n        ret = [\n            hooks.IterationTimer(),\n            hooks.LRScheduler(self.optimizer, self.scheduler),\n            hooks.PreciseBN(\n                # Run at the same freq as (but before) evaluation.\n                cfg.TEST.EVAL_PERIOD,\n                self.model,\n                # Build a new data loader to not affect training\n                self.build_train_loader(cfg),\n                cfg.TEST.PRECISE_BN.NUM_ITER,\n            )\n            if cfg.TEST.PRECISE_BN.ENABLED and get_bn_modules(self.model)\n            else None,\n        ]\n\n        # Do PreciseBN before checkpointer, because it updates the model and need to\n        # be saved by checkpointer.\n        # This is not always the best: if checkpointing has a different frequency,\n        # some checkpoints may have more precise statistics than others.\n        if comm.is_main_process():\n            ret.append(hooks.PeriodicCheckpointer(self.checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD))\n\n        def test_and_save_results():\n            self._last_eval_results = self.test(self.cfg, self.model)\n            return self._last_eval_results\n\n        # Do evaluation after checkpointer, because then if it fails,\n        # we can use the saved checkpoint to debug.\n        ret.append(hooks.EvalHook(cfg.TEST.EVAL_PERIOD, test_and_save_results))\n\n        if comm.is_main_process():\n            # run writers in the end, so that evaluation metrics are written\n            ret.append(hooks.PeriodicWriter(self.build_writers()))\n        return ret\n\n    def build_writers(self):\n        \"\"\"\n        Build a list of default writers, that write metrics to the screen,\n        a json file, and a tensorboard event file respectively.\n\n        Returns:\n            list[Writer]: a list of objects that have a ``.write`` method.\n        \"\"\"\n        # Assume the default print/log frequency.\n        return [\n            # It may not always print what you want to see, since it prints \"common\" metrics only.\n            CommonMetricPrinter(self.max_iter),\n            JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, \"metrics.json\")),\n            TensorboardXWriter(self.cfg.OUTPUT_DIR),\n        ]\n\n    def train(self):\n        \"\"\"\n        Run training.\n\n        Returns:\n            OrderedDict of results, if evaluation is enabled. Otherwise None.\n        \"\"\"\n        super().train(self.start_iter, self.max_iter)\n        if hasattr(self, \"_last_eval_results\") and comm.is_main_process():\n            verify_results(self.cfg, self._last_eval_results)\n            return self._last_eval_results\n\n    @classmethod\n    def build_model(cls, cfg):\n        \"\"\"\n        Returns:\n            torch.nn.Module:\n        \"\"\"\n        model = build_model(cfg)\n        logger = logging.getLogger(__name__)\n        logger.info(\"Model:\\n{}\".format(model))\n        return model\n\n    @classmethod\n    def build_optimizer(cls, cfg, model):\n        \"\"\"\n        Returns:\n            torch.optim.Optimizer:\n        \"\"\"\n        return build_optimizer(cfg, model)\n\n    @classmethod\n    def build_lr_scheduler(cls, cfg, optimizer):\n        return build_lr_scheduler(cfg, optimizer)\n\n    @classmethod\n    def build_train_loader(cls, cfg):\n        \"\"\"\n        Returns:\n            iterable\n        \"\"\"\n        return build_detection_train_loader(cfg)\n\n    @classmethod\n    def build_test_loader(cls, cfg, dataset_name):\n        \"\"\"\n        Returns:\n            iterable\n        \"\"\"\n        return build_detection_test_loader(cfg, dataset_name)\n\n    @classmethod\n    def build_evaluator(cls, cfg, dataset_name):\n        \"\"\"\n        Returns:\n            DatasetEvaluator\n        \"\"\"\n        raise NotImplementedError\n\n    @classmethod\n    def test(cls, cfg, model, evaluators=None):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            model (nn.Module):\n            evaluators (list[DatasetEvaluator] or None): if None, will call\n                :meth:`build_evaluator`. Otherwise, must have the same length as\n                `cfg.DATASETS.TEST`.\n\n        Returns:\n            dict: a dict of result metrics\n        \"\"\"\n        logger = logging.getLogger(__name__)\n        if isinstance(evaluators, DatasetEvaluator):\n            evaluators = [evaluators]\n        if evaluators is not None:\n            assert len(cfg.DATASETS.TEST) == len(evaluators), \"{} != {}\".format(\n                len(cfg.DATASETS.TEST), len(evaluators)\n            )\n        # print(evaluators)\n\n        results = OrderedDict()\n        relations =OrderedDict()\n        for idx, dataset_name in enumerate(cfg.DATASETS.TEST):\n            data_loader = cls.build_test_loader(cfg, dataset_name)\n            # When evaluators are passed in as arguments,\n            # implicitly assume that evaluators can be created before data_loader.\n            evaluator = (\n                evaluators[idx]\n                if evaluators is not None\n                else cls.build_evaluator(cfg, dataset_name)\n            )\n            results_i,relations_i = inference_on_dataset(model, data_loader, evaluator)\n            results[dataset_name] = results_i\n            relations[dataset_name] = relations_i\n            if comm.is_main_process():\n                assert isinstance(\n                    results_i, dict\n                ), \"Evaluator must return a dict on the main process. Got {} instead.\".format(\n                    results_i\n                )\n                logger.info(\"Evaluation results for {} in csv format:\".format(dataset_name))\n                print_csv_format(results_i)\n                # print_csv_format(relations_i)\n\n        if len(results) == 1 and len(relations):\n            results = list(results.values())[0]\n            relations = list(relations.values())[0]\n        return results,relations\n"
  },
  {
    "path": "detectron2/engine/hooks.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport datetime\nimport logging\nimport os\nimport tempfile\nimport time\nfrom collections import Counter\nimport torch\nfrom fvcore.common.checkpoint import PeriodicCheckpointer as _PeriodicCheckpointer\nfrom fvcore.common.file_io import PathManager\nfrom fvcore.common.timer import Timer\nfrom fvcore.nn.precise_bn import get_bn_modules, update_bn_stats\n\nimport detectron2.utils.comm as comm\nfrom detectron2.evaluation.testing import flatten_results_dict\nfrom detectron2.utils.events import EventStorage\n\nfrom .train_loop import HookBase\n\n__all__ = [\n    \"CallbackHook\",\n    \"IterationTimer\",\n    \"PeriodicWriter\",\n    \"PeriodicCheckpointer\",\n    \"LRScheduler\",\n    \"AutogradProfiler\",\n    \"EvalHook\",\n    \"PreciseBN\",\n]\n\n\n\"\"\"\nImplement some common hooks.\n\"\"\"\n\n\nclass CallbackHook(HookBase):\n    \"\"\"\n    Create a hook using callback functions provided by the user.\n    \"\"\"\n\n    def __init__(self, *, before_train=None, after_train=None, before_step=None, after_step=None):\n        \"\"\"\n        Each argument is a function that takes one argument: the trainer.\n        \"\"\"\n        self._before_train = before_train\n        self._before_step = before_step\n        self._after_step = after_step\n        self._after_train = after_train\n\n    def before_train(self):\n        if self._before_train:\n            self._before_train(self.trainer)\n\n    def after_train(self):\n        if self._after_train:\n            self._after_train(self.trainer)\n        # The functions may be closures that hold reference to the trainer\n        # Therefore, delete them to avoid circular reference.\n        del self._before_train, self._after_train\n        del self._before_step, self._after_step\n\n    def before_step(self):\n        if self._before_step:\n            self._before_step(self.trainer)\n\n    def after_step(self):\n        if self._after_step:\n            self._after_step(self.trainer)\n\n\nclass IterationTimer(HookBase):\n    \"\"\"\n    Track the time spent for each iteration (each run_step call in the trainer).\n    Print a summary in the end of training.\n\n    This hook uses the time between the call to its :meth:`before_step`\n    and :meth:`after_step` methods.\n    Under the convention that :meth:`before_step` of all hooks should only\n    take negligible amount of time, the :class:`IterationTimer` hook should be\n    placed at the beginning of the list of hooks to obtain accurate timing.\n    \"\"\"\n\n    def __init__(self, warmup_iter=3):\n        \"\"\"\n        Args:\n            warmup_iter (int): the number of iterations at the beginning to exclude\n                from timing.\n        \"\"\"\n        self._warmup_iter = warmup_iter\n        self._step_timer = Timer()\n\n    def before_train(self):\n        self._start_time = time.perf_counter()\n        self._total_timer = Timer()\n        self._total_timer.pause()\n\n    def after_train(self):\n        logger = logging.getLogger(__name__)\n        total_time = time.perf_counter() - self._start_time\n        total_time_minus_hooks = self._total_timer.seconds()\n        hook_time = total_time - total_time_minus_hooks\n\n        num_iter = self.trainer.iter + 1 - self.trainer.start_iter - self._warmup_iter\n\n        if num_iter > 0 and total_time_minus_hooks > 0:\n            # Speed is meaningful only after warmup\n            # NOTE this format is parsed by grep in some scripts\n            logger.info(\n                \"Overall training speed: {} iterations in {} ({:.4f} s / it)\".format(\n                    num_iter,\n                    str(datetime.timedelta(seconds=int(total_time_minus_hooks))),\n                    total_time_minus_hooks / num_iter,\n                )\n            )\n\n        logger.info(\n            \"Total training time: {} ({} on hooks)\".format(\n                str(datetime.timedelta(seconds=int(total_time))),\n                str(datetime.timedelta(seconds=int(hook_time))),\n            )\n        )\n\n    def before_step(self):\n        self._step_timer.reset()\n        self._total_timer.resume()\n\n    def after_step(self):\n        # +1 because we're in after_step\n        iter_done = self.trainer.iter - self.trainer.start_iter + 1\n        if iter_done >= self._warmup_iter:\n            sec = self._step_timer.seconds()\n            self.trainer.storage.put_scalars(time=sec)\n        else:\n            self._start_time = time.perf_counter()\n            self._total_timer.reset()\n\n        self._total_timer.pause()\n\n\nclass PeriodicWriter(HookBase):\n    \"\"\"\n    Write events to EventStorage periodically.\n\n    It is executed every ``period`` iterations and after the last iteration.\n    \"\"\"\n\n    def __init__(self, writers, period=20):\n        \"\"\"\n        Args:\n            writers (list): a list of objects with a \"write\" method.\n            period (int):\n        \"\"\"\n        self._writers = writers\n        self._period = period\n\n    def after_step(self):\n        if (self.trainer.iter + 1) % self._period == 0 or (\n            self.trainer.iter == self.trainer.max_iter - 1\n        ):\n            for writer in self._writers:\n                writer.write()\n\n\nclass PeriodicCheckpointer(_PeriodicCheckpointer, HookBase):\n    \"\"\"\n    Same as :class:`detectron2.checkpoint.PeriodicCheckpointer`, but as a hook.\n\n    Note that when used as a hook,\n    it is unable to save additional data other than what's defined\n    by the given `checkpointer`.\n\n    It is executed every ``period`` iterations and after the last iteration.\n    \"\"\"\n\n    def before_train(self):\n        self.max_iter = self.trainer.max_iter\n\n    def after_step(self):\n        # No way to use **kwargs\n        self.step(self.trainer.iter)\n\n\nclass LRScheduler(HookBase):\n    \"\"\"\n    A hook which executes a torch builtin LR scheduler and summarizes the LR.\n    It is executed after every iteration.\n    \"\"\"\n\n    def __init__(self, optimizer, scheduler):\n        \"\"\"\n        Args:\n            optimizer (torch.optim.Optimizer):\n            scheduler (torch.optim._LRScheduler)\n        \"\"\"\n        self._optimizer = optimizer\n        self._scheduler = scheduler\n\n        # NOTE: some heuristics on what LR to summarize\n        # summarize the param group with most parameters\n        largest_group = max(len(g[\"params\"]) for g in optimizer.param_groups)\n\n        if largest_group == 1:\n            # If all groups have one parameter,\n            # then find the most common initial LR, and use it for summary\n            lr_count = Counter([g[\"lr\"] for g in optimizer.param_groups])\n            lr = lr_count.most_common()[0][0]\n            for i, g in enumerate(optimizer.param_groups):\n                if g[\"lr\"] == lr:\n                    self._best_param_group_id = i\n                    break\n        else:\n            for i, g in enumerate(optimizer.param_groups):\n                if len(g[\"params\"]) == largest_group:\n                    self._best_param_group_id = i\n                    break\n\n    def after_step(self):\n        lr = self._optimizer.param_groups[self._best_param_group_id][\"lr\"]\n        self.trainer.storage.put_scalar(\"lr\", lr, smoothing_hint=False)\n        self._scheduler.step()\n\n\nclass AutogradProfiler(HookBase):\n    \"\"\"\n    A hook which runs `torch.autograd.profiler.profile`.\n\n    Note:\n        When used together with NCCL on older version of GPUs,\n        autograd profiler may cause deadlock because it unnecessarily allocates\n        memory on every device it sees. The memory management calls, if\n        interleaved with NCCL calls, lead to deadlock on GPUs that do not\n        support `cudaLaunchCooperativeKernelMultiDevice`.\n    \"\"\"\n\n    def __init__(self, enable_predicate, output_dir, *, use_cuda=True):\n        \"\"\"\n        Args:\n            enable_predicate (callable[trainer -> bool]): a function which takes a trainer,\n                and returns whether to enable the profiler.\n                It will be called once every step, and can be used to select which steps to profile.\n            output_dir (str): the output directory to dump tracing files.\n            use_cuda (bool): same as in `torch.autograd.profiler.profile`.\n        \"\"\"\n        self._enable_predicate = enable_predicate\n        self._use_cuda = use_cuda\n        self._output_dir = output_dir\n\n    def before_step(self):\n        if self._enable_predicate(self.trainer):\n            self._profiler = torch.autograd.profiler.profile(use_cuda=self._use_cuda)\n            self._profiler.__enter__()\n        else:\n            self._profiler = None\n\n    def after_step(self):\n        if self._profiler is None:\n            return\n        self._profiler.__exit__(None, None, None)\n        out_file = os.path.join(\n            self._output_dir, \"profiler-trace-iter{}.json\".format(self.trainer.iter)\n        )\n        if \"://\" not in out_file:\n            self._profiler.export_chrome_trace(out_file)\n        else:\n            # Support non-posix filesystems\n            with tempfile.TemporaryDirectory(prefix=\"detectron2_profiler\") as d:\n                tmp_file = os.path.join(d, \"tmp.json\")\n                self._profiler.export_chrome_trace(tmp_file)\n                with open(tmp_file) as f:\n                    content = f.read()\n            with PathManager.open(out_file, \"w\") as f:\n                f.write(content)\n\n\nclass EvalHook(HookBase):\n    \"\"\"\n    Run an evaluation function periodically, and at the end of training.\n\n    It is executed every ``eval_period`` iterations and after the last iteration.\n    \"\"\"\n\n    def __init__(self, eval_period, eval_function):\n        \"\"\"\n        Args:\n            eval_period (int): the period to run `eval_function`.\n            eval_function (callable): a function which takes no arguments, and\n                returns a nested dict of evaluation metrics.\n\n        Note:\n            This hook must be enabled in all or none workers.\n            If you would like only certain workers to perform evaluation,\n            give other workers a no-op function (`eval_function=lambda: None`).\n        \"\"\"\n        self._period = eval_period\n        self._func = eval_function\n\n    def after_step(self):\n        next_iter = self.trainer.iter + 1\n        is_final = next_iter == self.trainer.max_iter\n        if is_final or (self._period > 0 and next_iter % self._period == 0):\n            results = self._func()\n\n            if results:\n                assert isinstance(\n                    results, dict\n                ), \"Eval function must return a dict. Got {} instead.\".format(results)\n\n                flattened_results = flatten_results_dict(results)\n                for k, v in flattened_results.items():\n                    try:\n                        v = float(v)\n                    except Exception:\n                        raise ValueError(\n                            \"[EvalHook] eval_function should return a nested dict of float. \"\n                            \"Got '{}: {}' instead.\".format(k, v)\n                        )\n                self.trainer.storage.put_scalars(**flattened_results, smoothing_hint=False)\n\n            # Evaluation may take different time among workers.\n            # A barrier make them start the next iteration together.\n            comm.synchronize()\n\n    def after_train(self):\n        # func is likely a closure that holds reference to the trainer\n        # therefore we clean it to avoid circular reference in the end\n        del self._func\n\n\nclass PreciseBN(HookBase):\n    \"\"\"\n    The standard implementation of BatchNorm uses EMA in inference, which is\n    sometimes suboptimal.\n    This class computes the true average of statistics rather than the moving average,\n    and put true averages to every BN layer in the given model.\n\n    It is executed every ``period`` iterations and after the last iteration.\n    \"\"\"\n\n    def __init__(self, period, model, data_loader, num_iter):\n        \"\"\"\n        Args:\n            period (int): the period this hook is run, or 0 to not run during training.\n                The hook will always run in the end of training.\n            model (nn.Module): a module whose all BN layers in training mode will be\n                updated by precise BN.\n                Note that user is responsible for ensuring the BN layers to be\n                updated are in training mode when this hook is triggered.\n            data_loader (iterable): it will produce data to be run by `model(data)`.\n            num_iter (int): number of iterations used to compute the precise\n                statistics.\n        \"\"\"\n        self._logger = logging.getLogger(__name__)\n        if len(get_bn_modules(model)) == 0:\n            self._logger.info(\n                \"PreciseBN is disabled because model does not contain BN layers in training mode.\"\n            )\n            self._disabled = True\n            return\n\n        self._model = model\n        self._data_loader = data_loader\n        self._num_iter = num_iter\n        self._period = period\n        self._disabled = False\n\n        self._data_iter = None\n\n    def after_step(self):\n        next_iter = self.trainer.iter + 1\n        is_final = next_iter == self.trainer.max_iter\n        if is_final or (self._period > 0 and next_iter % self._period == 0):\n            self.update_stats()\n\n    def update_stats(self):\n        \"\"\"\n        Update the model with precise statistics. Users can manually call this method.\n        \"\"\"\n        if self._disabled:\n            return\n\n        if self._data_iter is None:\n            self._data_iter = iter(self._data_loader)\n\n        num_iter = 0\n\n        def data_loader():\n            nonlocal num_iter\n            num_iter += 1\n            if num_iter % 100 == 0:\n                self._logger.info(\n                    \"Running precise-BN ... {}/{} iterations.\".format(num_iter, self._num_iter)\n                )\n            # This way we can reuse the same iterator\n            yield next(self._data_iter)\n\n        with EventStorage():  # capture events in a new storage to discard them\n            self._logger.info(\n                \"Running precise-BN for {} iterations...  \".format(self._num_iter)\n                + \"Note that this could produce different statistics every time.\"\n            )\n            update_bn_stats(self._model, data_loader(), self._num_iter)\n"
  },
  {
    "path": "detectron2/engine/launch.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport torch\nimport torch.distributed as dist\nimport torch.multiprocessing as mp\n\nfrom detectron2.utils import comm\n\n__all__ = [\"launch\"]\n\n\ndef _find_free_port():\n    import socket\n\n    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n    # Binding to port 0 will cause the OS to find an available port for us\n    sock.bind((\"\", 0))\n    port = sock.getsockname()[1]\n    sock.close()\n    # NOTE: there is still a chance the port could be taken by other processes.\n    return port\n\n\ndef launch(main_func, num_gpus_per_machine, num_machines=1, machine_rank=0, dist_url=None, args=()):\n    \"\"\"\n    Args:\n        main_func: a function that will be called by `main_func(*args)`\n        num_machines (int): the total number of machines\n        machine_rank (int): the rank of this machine (one per machine)\n        dist_url (str): url to connect to for distributed training, including protocol\n                       e.g. \"tcp://127.0.0.1:8686\".\n                       Can be set to auto to automatically select a free port on localhost\n        args (tuple): arguments passed to main_func\n    \"\"\"\n    world_size = num_machines * num_gpus_per_machine\n    if world_size > 1:\n        # https://github.com/pytorch/pytorch/pull/14391\n        # TODO prctl in spawned processes\n\n        if dist_url == \"auto\":\n            assert num_machines == 1, \"dist_url=auto cannot work with distributed training.\"\n            port = _find_free_port()\n            dist_url = f\"tcp://127.0.0.1:{port}\"\n\n        mp.spawn(\n            _distributed_worker,\n            nprocs=num_gpus_per_machine,\n            args=(main_func, world_size, num_gpus_per_machine, machine_rank, dist_url, args),\n            daemon=False,\n        )\n    else:\n        main_func(*args)\n\n\ndef _distributed_worker(\n    local_rank, main_func, world_size, num_gpus_per_machine, machine_rank, dist_url, args\n):\n    assert torch.cuda.is_available(), \"cuda is not available. Please check your installation.\"\n    global_rank = machine_rank * num_gpus_per_machine + local_rank\n    try:\n        dist.init_process_group(\n            backend=\"NCCL\", init_method=dist_url, world_size=world_size, rank=global_rank\n        )\n    except Exception as e:\n        logger = logging.getLogger(__name__)\n        logger.error(\"Process group URL: {}\".format(dist_url))\n        raise e\n    # synchronize is needed here to prevent a possible timeout after calling init_process_group\n    # See: https://github.com/facebookresearch/maskrcnn-benchmark/issues/172\n    comm.synchronize()\n\n    assert num_gpus_per_machine <= torch.cuda.device_count()\n    torch.cuda.set_device(local_rank)\n\n    # Setup the local process group (which contains ranks within the same machine)\n    assert comm._LOCAL_PROCESS_GROUP is None\n    num_machines = world_size // num_gpus_per_machine\n    for i in range(num_machines):\n        ranks_on_i = list(range(i * num_gpus_per_machine, (i + 1) * num_gpus_per_machine))\n        pg = dist.new_group(ranks_on_i)\n        if i == machine_rank:\n            comm._LOCAL_PROCESS_GROUP = pg\n\n    main_func(*args)\n"
  },
  {
    "path": "detectron2/engine/train_loop.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport logging\nimport numpy as np\nimport time\nimport weakref\nimport torch\n\nimport detectron2.utils.comm as comm\nfrom detectron2.utils.events import EventStorage\n\n__all__ = [\"HookBase\", \"TrainerBase\", \"SimpleTrainer\"]\n\n\nclass HookBase:\n    \"\"\"\n    Base class for hooks that can be registered with :class:`TrainerBase`.\n\n    Each hook can implement 4 methods. The way they are called is demonstrated\n    in the following snippet:\n\n    .. code-block:: python\n\n        hook.before_train()\n        for iter in range(start_iter, max_iter):\n            hook.before_step()\n            trainer.run_step()\n            hook.after_step()\n        hook.after_train()\n\n    Notes:\n        1. In the hook method, users can access `self.trainer` to access more\n           properties about the context (e.g., current iteration).\n\n        2. A hook that does something in :meth:`before_step` can often be\n           implemented equivalently in :meth:`after_step`.\n           If the hook takes non-trivial time, it is strongly recommended to\n           implement the hook in :meth:`after_step` instead of :meth:`before_step`.\n           The convention is that :meth:`before_step` should only take negligible time.\n\n           Following this convention will allow hooks that do care about the difference\n           between :meth:`before_step` and :meth:`after_step` (e.g., timer) to\n           function properly.\n\n    Attributes:\n        trainer: A weak reference to the trainer object. Set by the trainer when the hook is\n            registered.\n    \"\"\"\n\n    def before_train(self):\n        \"\"\"\n        Called before the first iteration.\n        \"\"\"\n        pass\n\n    def after_train(self):\n        \"\"\"\n        Called after the last iteration.\n        \"\"\"\n        pass\n\n    def before_step(self):\n        \"\"\"\n        Called before each iteration.\n        \"\"\"\n        pass\n\n    def after_step(self):\n        \"\"\"\n        Called after each iteration.\n        \"\"\"\n        pass\n\n\nclass TrainerBase:\n    \"\"\"\n    Base class for iterative trainer with hooks.\n\n    The only assumption we made here is: the training runs in a loop.\n    A subclass can implement what the loop is.\n    We made no assumptions about the existence of dataloader, optimizer, model, etc.\n\n    Attributes:\n        iter(int): the current iteration.\n\n        start_iter(int): The iteration to start with.\n            By convention the minimum possible value is 0.\n\n        max_iter(int): The iteration to end training.\n\n        storage(EventStorage): An EventStorage that's opened during the course of training.\n    \"\"\"\n\n    def __init__(self):\n        self._hooks = []\n\n    def register_hooks(self, hooks):\n        \"\"\"\n        Register hooks to the trainer. The hooks are executed in the order\n        they are registered.\n\n        Args:\n            hooks (list[Optional[HookBase]]): list of hooks\n        \"\"\"\n        hooks = [h for h in hooks if h is not None]\n        for h in hooks:\n            assert isinstance(h, HookBase)\n            # To avoid circular reference, hooks and trainer cannot own each other.\n            # This normally does not matter, but will cause memory leak if the\n            # involved objects contain __del__:\n            # See http://engineering.hearsaysocial.com/2013/06/16/circular-references-in-python/\n            h.trainer = weakref.proxy(self)\n        self._hooks.extend(hooks)\n\n    def train(self, start_iter: int, max_iter: int):\n        \"\"\"\n        Args:\n            start_iter, max_iter (int): See docs above\n        \"\"\"\n        logger = logging.getLogger(__name__)\n        logger.info(\"Starting training from iteration {}\".format(start_iter))\n\n        self.iter = self.start_iter = start_iter\n        self.max_iter = max_iter\n\n        with EventStorage(start_iter) as self.storage:\n            try:\n                self.before_train()\n                for self.iter in range(start_iter, max_iter):\n                    self.before_step()\n                    self.run_step()\n                    self.after_step()\n            finally:\n                self.after_train()\n\n    def before_train(self):\n        for h in self._hooks:\n            h.before_train()\n\n    def after_train(self):\n        for h in self._hooks:\n            h.after_train()\n\n    def before_step(self):\n        for h in self._hooks:\n            h.before_step()\n\n    def after_step(self):\n        for h in self._hooks:\n            h.after_step()\n        # this guarantees, that in each hook's after_step, storage.iter == trainer.iter\n        self.storage.step()\n\n    def run_step(self):\n        raise NotImplementedError\n\n\nclass SimpleTrainer(TrainerBase):\n    \"\"\"\n    A simple trainer for the most common type of task:\n    single-cost single-optimizer single-data-source iterative optimization.\n    It assumes that every step, you:\n\n    1. Compute the loss with a data from the data_loader.\n    2. Compute the gradients with the above loss.\n    3. Update the model with the optimizer.\n\n    If you want to do anything fancier than this,\n    either subclass TrainerBase and implement your own `run_step`,\n    or write your own training loop.\n    \"\"\"\n\n    def __init__(self, model, data_loader, optimizer):\n        \"\"\"\n        Args:\n            model: a torch Module. Takes a data from data_loader and returns a\n                dict of losses.\n            data_loader: an iterable. Contains data to be used to call model.\n            optimizer: a torch optimizer.\n        \"\"\"\n        super().__init__()\n\n        \"\"\"\n        We set the model to training mode in the trainer.\n        However it's valid to train a model that's in eval mode.\n        If you want your model (or a submodule of it) to behave\n        like evaluation during training, you can overwrite its train() method.\n        \"\"\"\n        model.train()\n\n        self.model = model\n        self.data_loader = data_loader\n        self._data_loader_iter = iter(data_loader)\n        self.optimizer = optimizer\n\n    def run_step(self):\n        \"\"\"\n        Implement the standard training logic described above.\n        \"\"\"\n        assert self.model.training, \"[SimpleTrainer] model was changed to eval mode!\"\n        start = time.perf_counter()\n        \"\"\"\n        If your want to do something with the data, you can wrap the dataloader.\n        \"\"\"\n        data = next(self._data_loader_iter)\n        data_time = time.perf_counter() - start\n\n        \"\"\"\n        If your want to do something with the losses, you can wrap the model.\n        \"\"\"\n        loss_dict = self.model(data)\n        losses = sum(loss for loss in loss_dict.values())\n        self._detect_anomaly(losses, loss_dict)\n\n        metrics_dict = loss_dict\n        metrics_dict[\"data_time\"] = data_time\n        self._write_metrics(metrics_dict)\n\n        \"\"\"\n        If you need accumulate gradients or something similar, you can\n        wrap the optimizer with your custom `zero_grad()` method.\n        \"\"\"\n        self.optimizer.zero_grad()\n        losses.backward()\n\n        \"\"\"\n        If you need gradient clipping/scaling or other processing, you can\n        wrap the optimizer with your custom `step()` method.\n        \"\"\"\n        self.optimizer.step()\n\n    def _detect_anomaly(self, losses, loss_dict):\n        if not torch.isfinite(losses).all():\n            raise FloatingPointError(\n                \"Loss became infinite or NaN at iteration={}!\\nloss_dict = {}\".format(\n                    self.iter, loss_dict\n                )\n            )\n\n    def _write_metrics(self, metrics_dict: dict):\n        \"\"\"\n        Args:\n            metrics_dict (dict): dict of scalar metrics\n        \"\"\"\n        metrics_dict = {\n            k: v.detach().cpu().item() if isinstance(v, torch.Tensor) else float(v)\n            for k, v in metrics_dict.items()\n        }\n        # gather metrics among all workers for logging\n        # This assumes we do DDP-style training, which is currently the only\n        # supported method in detectron2.\n        all_metrics_dict = comm.gather(metrics_dict)\n\n        if comm.is_main_process():\n            if \"data_time\" in all_metrics_dict[0]:\n                # data_time among workers can have high variance. The actual latency\n                # caused by data_time is the maximum among workers.\n                data_time = np.max([x.pop(\"data_time\") for x in all_metrics_dict])\n                self.storage.put_scalar(\"data_time\", data_time)\n\n            # average the rest metrics\n            metrics_dict = {\n                k: np.mean([x[k] for x in all_metrics_dict]) for k in all_metrics_dict[0].keys()\n            }\n            total_losses_reduced = sum(loss for loss in metrics_dict.values())\n\n            self.storage.put_scalar(\"total_loss\", total_losses_reduced)\n            if len(metrics_dict) > 1:\n                self.storage.put_scalars(**metrics_dict)\n"
  },
  {
    "path": "detectron2/evaluation/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .cityscapes_evaluation import CityscapesEvaluator\nfrom .soba_evaluation import SOBAEvaluator\nfrom .evaluator import DatasetEvaluator, DatasetEvaluators, inference_context, inference_on_dataset\nfrom .lvis_evaluation import LVISEvaluator\nfrom .panoptic_evaluation import COCOPanopticEvaluator\nfrom .pascal_voc_evaluation import PascalVOCDetectionEvaluator\nfrom .sem_seg_evaluation import SemSegEvaluator\nfrom .testing import print_csv_format, verify_results\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/evaluation/cityscapes_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport glob\nimport logging\nimport os\nimport tempfile\nfrom collections import OrderedDict\nimport torch\nfrom PIL import Image\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.utils import comm\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass CityscapesEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate instance segmentation results using cityscapes API.\n\n    Note:\n        * It does not work in multi-machine distributed training.\n        * It contains a synchronization, therefore has to be used on all ranks.\n    \"\"\"\n\n    def __init__(self, dataset_name):\n        \"\"\"\n        Args:\n            dataset_name (str): the name of the dataset.\n                It must have the following metadata associated with it:\n                \"thing_classes\", \"gt_dir\".\n        \"\"\"\n        self._metadata = MetadataCatalog.get(dataset_name)\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n    def reset(self):\n        self._working_dir = tempfile.TemporaryDirectory(prefix=\"cityscapes_eval_\")\n        self._temp_dir = self._working_dir.name\n        # All workers will write to the same results directory\n        # TODO this does not work in distributed training\n        self._temp_dir = comm.all_gather(self._temp_dir)[0]\n        if self._temp_dir != self._working_dir.name:\n            self._working_dir.cleanup()\n        self._logger.info(\n            \"Writing cityscapes results to temporary directory {} ...\".format(self._temp_dir)\n        )\n\n    def process(self, inputs, outputs):\n        from cityscapesscripts.helpers.labels import name2label\n\n        for input, output in zip(inputs, outputs):\n            file_name = input[\"file_name\"]\n            basename = os.path.splitext(os.path.basename(file_name))[0]\n            pred_txt = os.path.join(self._temp_dir, basename + \"_pred.txt\")\n\n            output = output[\"instances\"].to(self._cpu_device)\n            num_instances = len(output)\n            with open(pred_txt, \"w\") as fout:\n                for i in range(num_instances):\n                    pred_class = output.pred_classes[i]\n                    classes = self._metadata.thing_classes[pred_class]\n                    class_id = name2label[classes].id\n                    score = output.scores[i]\n                    mask = output.pred_masks[i].numpy().astype(\"uint8\")\n                    png_filename = os.path.join(\n                        self._temp_dir, basename + \"_{}_{}.png\".format(i, classes)\n                    )\n\n                    Image.fromarray(mask * 255).save(png_filename)\n                    fout.write(\"{} {} {}\\n\".format(os.path.basename(png_filename), class_id, score))\n\n    def evaluate(self):\n        \"\"\"\n        Returns:\n            dict: has a key \"segm\", whose value is a dict of \"AP\" and \"AP50\".\n        \"\"\"\n        comm.synchronize()\n        if comm.get_rank() > 0:\n            return\n        os.environ[\"CITYSCAPES_DATASET\"] = os.path.abspath(\n            os.path.join(self._metadata.gt_dir, \"..\", \"..\")\n        )\n        # Load the Cityscapes eval script *after* setting the required env var,\n        # since the script reads CITYSCAPES_DATASET into global variables at load time.\n        import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as cityscapes_eval\n\n        self._logger.info(\"Evaluating results under {} ...\".format(self._temp_dir))\n\n        # set some global states in cityscapes evaluation API, before evaluating\n        cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir)\n        cityscapes_eval.args.predictionWalk = None\n        cityscapes_eval.args.JSONOutput = False\n        cityscapes_eval.args.colorized = False\n        cityscapes_eval.args.gtInstancesFile = os.path.join(self._temp_dir, \"gtInstances.json\")\n\n        # These lines are adopted from\n        # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa\n        groundTruthImgList = glob.glob(cityscapes_eval.args.groundTruthSearch)\n        assert len(\n            groundTruthImgList\n        ), \"Cannot find any ground truth images to use for evaluation. Searched for: {}\".format(\n            cityscapes_eval.args.groundTruthSearch\n        )\n        predictionImgList = []\n        for gt in groundTruthImgList:\n            predictionImgList.append(cityscapes_eval.getPrediction(gt, cityscapes_eval.args))\n        results = cityscapes_eval.evaluateImgLists(\n            predictionImgList, groundTruthImgList, cityscapes_eval.args\n        )[\"averages\"]\n\n        ret = OrderedDict()\n        ret[\"segm\"] = {\"AP\": results[\"allAp\"] * 100, \"AP50\": results[\"allAp50%\"] * 100}\n        self._working_dir.cleanup()\n        return ret\n"
  },
  {
    "path": "detectron2/evaluation/coco_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport contextlib\nimport copy\nimport io\nimport itertools\nimport json\nimport logging\nimport numpy as np\nimport os\nimport pickle\nfrom collections import OrderedDict\nimport pycocotools.mask as mask_util\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom pycocotools.coco import COCO\nfrom pycocotools.cocoeval import COCOeval\nfrom tabulate import tabulate\n\nimport detectron2.utils.comm as comm\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.structures import Boxes, BoxMode, pairwise_iou\nfrom detectron2.utils.logger import create_small_table\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass COCOEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate object proposal, instance detection/segmentation, keypoint detection\n    outputs using COCO's metrics and APIs.\n    \"\"\"\n\n    def __init__(self, dataset_name, cfg, distributed, output_dir=None):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset to be evaluated.\n                It must have the following corresponding metadata:\n                    \"json_file\": the path to the COCO format annotation\n            cfg (CfgNode): config instance\n            distributed (True): if True, will collect results from all ranks for evaluation.\n                Otherwise, will evaluate the results in the current process.\n            output_dir (str): optional, an output directory to dump results.\n        \"\"\"\n        self._tasks = self._tasks_from_config(cfg)\n        self._distributed = distributed\n        self._output_dir = output_dir\n\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n        self._metadata = MetadataCatalog.get(dataset_name)\n        json_file = PathManager.get_local_path(self._metadata.json_file)\n        with contextlib.redirect_stdout(io.StringIO()):\n            self._coco_api = COCO(json_file)\n\n        self._kpt_oks_sigmas = cfg.TEST.KEYPOINT_OKS_SIGMAS\n        # Test set json files do not contain annotations (evaluation must be\n        # performed using the COCO evaluation server).\n        self._do_evaluation = len(self._coco_api.getAnnIds()) > 0\n\n    def reset(self):\n        self._predictions = []\n        self._coco_results = []\n\n    def _tasks_from_config(self, cfg):\n        \"\"\"\n        Returns:\n            tuple[str]: tasks that can be evaluated under the given configuration.\n        \"\"\"\n        tasks = (\"bbox\",)\n        if cfg.MODEL.MASK_ON:\n            tasks = tasks + (\"segm\",)\n        if cfg.MODEL.KEYPOINT_ON:\n            tasks = tasks + (\"keypoints\",)\n        return tasks\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a COCO model (e.g., GeneralizedRCNN).\n                It is a list of dict. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a COCO model. It is a list of dicts with key\n                \"instances\" that contains :class:`Instances`.\n        \"\"\"\n        results,associations = outputs\n        for input, result, association in zip(inputs, results, associations):\n            prediction = {\"image_id\": input[\"image_id\"]}\n            # TODO this is ugly\n            if \"instances\" in result:\n                instances = result[\"instances\"].to(self._cpu_device)\n\n                if instances.has(\"pred_masks\"):\n                    # use RLE to encode the masks, because they are too large and takes memory\n                    # since this evaluator stores results of the entire dataset\n                    # Our model may predict bool array, but cocoapi expects uint8\n                    rles = [\n                        mask_util.encode(np.array(mask[:, :, None], order=\"F\", dtype=\"uint8\"))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        # \"counts\" is an array encoded by mask_util as a byte-stream. Python3's\n                        # json writer which always produces strings cannot serialize a bytestream\n                        # unless you decode it. Thankfully, utf-8 works out (which is also what\n                        # the pycocotools/_mask.pyx does).\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n\n                prediction[\"instances\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"instances\" in association:\n                instances = association[\"instances\"].to(self._cpu_device)\n                if instances.has(\"pred_masks\"):\n\n                    rles = [\n                        mask_util.encode(np.array(mask[:,:],order='F',dtype='uint8'))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n                prediction[\"associations\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"proposals\" in result:\n                prediction[\"proposals\"] = result[\"proposals\"].to(self._cpu_device)\n            if \"rela_proposals\" in association:\n                prediction[\"rela_proposals\"] = association[\"proposals\"].to(self._cpu_device)\n            self._predictions.append(prediction)\n\n    def evaluate(self): \n        if self._distributed:\n            comm.synchronize()\n            self._predictions = comm.gather(self._predictions, dst=0)\n            self._predictions = list(itertools.chain(*self._predictions))\n\n            if not comm.is_main_process():\n                return {},{}\n        \n        if len(self._predictions) == 0:\n            self._logger.warning(\"[COCOEvaluator] Did not receive valid predictions.\")\n            return {}\n\n        if self._output_dir:\n            PathManager.mkdirs(self._output_dir)\n            file_path = os.path.join(self._output_dir, \"instances_predictions.pth\")\n            with PathManager.open(file_path, \"wb\") as f:\n                torch.save(self._predictions, f)\n\n        self._results = OrderedDict()\n        self._association_results = OrderedDict()\n\n        if \"proposals\" in self._predictions[0]: #and \"rela_proposals\" in self._predictions[0]:\n            self._eval_box_proposals()\n\n        if \"instances\" in self._predictions[0]: #and \"associations\" in self._predictions[0]:\n            self._eval_predictions(set(self._tasks))\n        # Copy so the caller can do whatever with results\n        return copy.deepcopy(self._results),copy.deepcopy(self._association_results)\n\n    def _eval_predictions(self, tasks):\n        \"\"\"\n        Evaluate self._predictions on the given tasks.\n        Fill self._results with the metrics of the tasks.\n        \"\"\"\n        self._logger.info(\"Preparing results for COCO format ...\")\n        self._coco_results = list(itertools.chain(*[x[\"instances\"] for x in self._predictions]))\n        self._rela_results = list(itertools.chain(*[x[\"associations\"] for x in self._predictions]))\n        # unmap the category ids for COCO\n        if hasattr(self._metadata, \"thing_dataset_id_to_contiguous_id\"):\n            reverse_id_mapping = {\n                v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items()\n            }\n            for result in self._coco_results:\n                result[\"category_id\"] = reverse_id_mapping[result[\"category_id\"]]\n\n        if hasattr(self._metadata, \"association_dataset_id_to_contiguous_id\"):\n            reverse_rela_if_mapping = {\n                v: k for k,v in self._metadata.association_dataset_id_to_contiguous_id.items()\n            }\n            for result in self._rela_results:\n                result[\"category_id\"] = reverse_rela_if_mapping[result[\"category_id\"]]\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"coco_instances_results.json\")\n            self._logger.info(\"Saving results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._coco_results))\n                f.flush()\n            file_path = os.path.join(self._output_dir, \"coco_association_results.json\")\n            self._logger.info(\"Saving association results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._rela_results))\n                f.flush()\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n        self._logger.info(\"Evaluating predictions ...\")\n        tasks = ('bbox','segm')\n        for task in sorted(tasks):\n            coco_eval = (\n                _evaluate_predictions_on_coco(\n                    self._coco_api, self._coco_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas\n                )\n                if len(self._coco_results) > 0\n                else None  # cocoapi does not handle empty results very well\n            )\n\n            res = self._derive_coco_results(\n                coco_eval, task, class_names=self._metadata.get(\"thing_classes\")\n            )\n            self._results[task] = res\n        self._logger.info(\"Evaluating association predictions ...\")\n        tasks = ('bbox','segm')\n        for task in sorted(tasks):\n            coco_eval =(\n                _evaluate_predictions_on_coco(\n                    self._coco_api, self._rela_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas,is_rela=True\n                )\n                if len(self._rela_results)>0\n                else None\n            )\n            res = self._derive_coco_results(\n                coco_eval, task, class_names=self._metadata.get(\"association_classes\")\n            )\n            self._association_results[task] = res\n    def _eval_box_proposals(self):\n        \"\"\"\n        Evaluate the box proposals in self._predictions.\n        Fill self._results with the metrics for \"box_proposals\" task.\n        \"\"\"\n        if self._output_dir:\n            # Saving generated box proposals to file.\n            # Predicted box_proposals are in XYXY_ABS mode.\n            bbox_mode = BoxMode.XYXY_ABS.value\n            ids, boxes, objectness_logits = [], [], []\n            rela_boxes, rela_objectness_logits = [], []\n            for prediction in self._predictions:\n                ids.append(prediction[\"image_id\"])\n                boxes.append(prediction[\"proposals\"].proposal_boxes.tensor.numpy())\n                objectness_logits.append(prediction[\"proposals\"].objectness_logits.numpy())\n                rela_boxes.append(prediction[\"rela_proposals\"].proposal_boxes.tensor.numpy())\n                rela_objectness_logits.append(prediction[\"rela_proposals\"].objectness_logits.numpy())\n\n            proposal_data = {\n                \"boxes\": boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            rela_proposal_data = {\n                \"boxes\" : boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            with PathManager.open(os.path.join(self._output_dir, \"box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(proposal_data, f)\n            with PathManager.open(os.path.join(self._output_dir, \"rela_box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(rela_proposal_data, f)\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating bbox proposals ...\")\n        res = {}\n        rela_res = {}\n        areas = {\"all\": \"\", \"small\": \"s\", \"medium\": \"m\", \"large\": \"l\"}\n        for limit in [100, 1000]:\n            for area, suffix in areas.items():\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._coco_api, area=area, limit=limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                res[key] = float(stats[\"ar\"].item() * 100)\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._coco_api, area = area, limit =limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                rela_res[key] = float(stats[\"ar\"].item() * 100)\n\n        self._logger.info(\"Proposal metrics: \\n\" + create_small_table(res))\n        self._results[\"box_proposals\"] = res\n        self._association_results[\"box_proposals\"] = rela_res\n\n    def _derive_coco_results(self, coco_eval, iou_type, class_names=None):\n        \"\"\"\n        Derive the desired score numbers from summarized COCOeval.\n\n        Args:\n            coco_eval (None or COCOEval): None represents no predictions from model.\n            iou_type (str):\n            class_names (None or list[str]): if provided, will use it to predict\n                per-category AP.\n\n        Returns:\n            a dict of {metric name: score}\n        \"\"\"\n\n        metrics = {\n            \"bbox\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\"],\n            \"segm\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\"],\n            \"keypoints\": [\"AP\", \"AP50\", \"AP75\", \"APm\", \"APl\"],\n        }[iou_type]\n\n        if coco_eval is None:\n            self._logger.warn(\"No predictions from the model! Set scores to -1\")\n            return {metric: -1 for metric in metrics}\n\n        # the standard metrics\n        results = {metric: float(coco_eval.stats[idx] * 100) for idx, metric in enumerate(metrics)}\n        self._logger.info(\n            \"Evaluation results for {}: \\n\".format(iou_type) + create_small_table(results)\n        )\n\n        if class_names is None or len(class_names) <= 1:\n            return results\n        # Compute per-category AP\n        # from https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L222-L252 # noqa\n        precisions = coco_eval.eval[\"precision\"]\n        # precision has dims (iou, recall, cls, area range, max dets)\n        assert len(class_names) == precisions.shape[2]\n\n        results_per_category = []\n        for idx, name in enumerate(class_names):\n            # area range index 0: all area ranges\n            # max dets index -1: typically 100 per image\n            precision = precisions[:, :, idx, 0, -1]\n            precision = precision[precision > -1]\n            ap = np.mean(precision) if precision.size else float(\"nan\")\n            results_per_category.append((\"{}\".format(name), float(ap * 100)))\n\n        # tabulate it\n        N_COLS = min(6, len(results_per_category) * 2)\n        results_flatten = list(itertools.chain(*results_per_category))\n        results_2d = itertools.zip_longest(*[results_flatten[i::N_COLS] for i in range(N_COLS)])\n        table = tabulate(\n            results_2d,\n            tablefmt=\"pipe\",\n            floatfmt=\".3f\",\n            headers=[\"category\", \"AP\"] * (N_COLS // 2),\n            numalign=\"left\",\n        )\n        self._logger.info(\"Per-category {} AP: \\n\".format(iou_type) + table)\n\n        results.update({\"AP-\" + name: ap for name, ap in results_per_category})\n        return results\n\n\ndef instances_to_json(instances, img_id):\n    num_instance = len(instances)\n    has_light = instances.has(\"pred_light\")\n    if num_instance == 0:\n        return []\n    try:\n        boxes = instances.pred_boxes.tensor.numpy()\n        boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS)\n        boxes = boxes.tolist()\n        scores = instances.scores.tolist()\n        classes = instances.pred_classes.tolist()\n        association = instances.pred_associations\n        if has_light:\n            light = instances.pred_light\n    except:\n        boxes = instances.pred_boxes\n        boxes =BoxMode.convert(np.array(boxes,dtype='float'),BoxMode.XYXY_ABS,BoxMode.XYWH_ABS)\n        boxes = boxes.tolist()\n        scores = instances.scores\n        association = instances.pred_associations\n        classes = instances.pred_classes\n        if has_light:\n            light = instances.pred_light\n\n    has_mask = instances.has(\"pred_masks_rle\")\n    \n    if has_mask:\n        rles = instances.pred_masks_rle\n\n    has_keypoints = instances.has(\"pred_keypoints\")\n    if has_keypoints:\n        keypoints = instances.pred_keypoints\n\n    results = []\n    for k in range(num_instance):\n        result = {\n            \"image_id\": img_id,\n            \"category_id\": classes[k],\n            \"bbox\": boxes[k],\n            \"score\": scores[k],\n            'association_id':association[k]\n        }\n        if has_light:\n            result[\"light\"] = light[k]\n        if has_mask:\n            result[\"segmentation\"] = rles[k]\n        if has_keypoints:\n            # In COCO annotations,\n            # keypoints coordinates are pixel indices.\n            # However our predictions are floating point coordinates.\n            # Therefore we subtract 0.5 to be consistent with the annotation format.\n            # This is the inverse of data loading logic in `datasets/coco.py`.\n            keypoints[k][:,:2] -= 0.5\n            result[\"keypoints\"] = keypoints[k].flatten().tolist()\n        results.append(result)\n    return results\n\n\n# inspired from Detectron:\n# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa\ndef _evaluate_box_proposals(dataset_predictions, coco_api, thresholds=None, area=\"all\", limit=None):\n    \"\"\"\n    Evaluate detection proposal recall metrics. This function is a much\n    faster alternative to the official COCO API recall evaluation code. However,\n    it produces slightly different results.\n    \"\"\"\n    # Record max overlap value for each gt box\n    # Return vector of overlap values\n    areas = {\n        \"all\": 0,\n        \"small\": 1,\n        \"medium\": 2,\n        \"large\": 3,\n        \"96-128\": 4,\n        \"128-256\": 5,\n        \"256-512\": 6,\n        \"512-inf\": 7,\n    }\n    area_ranges = [\n        [0 ** 2, 1e5 ** 2],  # all\n        [0 ** 2, 32 ** 2],  # small\n        [32 ** 2, 96 ** 2],  # medium\n        [96 ** 2, 1e5 ** 2],  # large\n        [96 ** 2, 128 ** 2],  # 96-128\n        [128 ** 2, 256 ** 2],  # 128-256\n        [256 ** 2, 512 ** 2],  # 256-512\n        [512 ** 2, 1e5 ** 2],\n    ]  # 512-inf\n    assert area in areas, \"Unknown area range: {}\".format(area)\n    area_range = area_ranges[areas[area]]\n    gt_overlaps = []\n    num_pos = 0\n\n    for prediction_dict in dataset_predictions:\n        predictions = prediction_dict[\"proposals\"]\n\n        # sort predictions in descending order\n        # TODO maybe remove this and make it explicit in the documentation\n        inds = predictions.objectness_logits.sort(descending=True)[1]\n        predictions = predictions[inds]\n\n        ann_ids = coco_api.getAnnIds(imgIds=prediction_dict[\"image_id\"])\n        anno = coco_api.loadAnns(ann_ids)\n        gt_boxes = [\n            BoxMode.convert(obj[\"bbox\"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS)\n            for obj in anno\n            if obj[\"iscrowd\"] == 0\n        ]\n        gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4)  # guard against no boxes\n        gt_boxes = Boxes(gt_boxes)\n        gt_areas = torch.as_tensor([obj[\"area\"] for obj in anno if obj[\"iscrowd\"] == 0])\n\n        if len(gt_boxes) == 0 or len(predictions) == 0:\n            continue\n\n        valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])\n        gt_boxes = gt_boxes[valid_gt_inds]\n\n        num_pos += len(gt_boxes)\n\n        if len(gt_boxes) == 0:\n            continue\n\n        if limit is not None and len(predictions) > limit:\n            predictions = predictions[:limit]\n\n        overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)\n\n        _gt_overlaps = torch.zeros(len(gt_boxes))\n        for j in range(min(len(predictions), len(gt_boxes))):\n            # find which proposal box maximally covers each gt box\n            # and get the iou amount of coverage for each gt box\n            max_overlaps, argmax_overlaps = overlaps.max(dim=0)\n\n            # find which gt box is 'best' covered (i.e. 'best' = most iou)\n            gt_ovr, gt_ind = max_overlaps.max(dim=0)\n            assert gt_ovr >= 0\n            # find the proposal box that covers the best covered gt box\n            box_ind = argmax_overlaps[gt_ind]\n            # record the iou coverage of this gt box\n            _gt_overlaps[j] = overlaps[box_ind, gt_ind]\n            assert _gt_overlaps[j] == gt_ovr\n            # mark the proposal box and the gt box as used\n            overlaps[box_ind, :] = -1\n            overlaps[:, gt_ind] = -1\n\n        # append recorded iou coverage level\n        gt_overlaps.append(_gt_overlaps)\n    gt_overlaps = torch.cat(gt_overlaps, dim=0)\n    gt_overlaps, _ = torch.sort(gt_overlaps)\n\n    if thresholds is None:\n        step = 0.05\n        thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)\n    recalls = torch.zeros_like(thresholds)\n    # compute recall for each iou threshold\n    for i, t in enumerate(thresholds):\n        recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)\n    # ar = 2 * np.trapz(recalls, thresholds)\n    ar = recalls.mean()\n    return {\n        \"ar\": ar,\n        \"recalls\": recalls,\n        \"thresholds\": thresholds,\n        \"gt_overlaps\": gt_overlaps,\n        \"num_pos\": num_pos,\n    }\n\n\ndef _evaluate_predictions_on_coco(coco_gt, coco_results, iou_type, kpt_oks_sigmas=None,is_rela= False):\n    \"\"\"\n    Evaluate the coco results using COCOEval API.\n    \"\"\"\n    assert len(coco_results) > 0\n\n    if iou_type == \"segm\":\n        coco_results = copy.deepcopy(coco_results)\n        # When evaluating mask AP, if the results contain bbox, cocoapi will\n        # use the box area as the area of the instance, instead of the mask area.\n        # This leads to a different definition of small/medium/large.\n        # We remove the bbox field to let mask AP use mask area.\n        for c in coco_results:\n            c.pop(\"bbox\", None)\n    if is_rela:\n        coco_dt = coco_gt.loadRes_rela(coco_results)\n    else:\n        coco_dt = coco_gt.loadRes(coco_results)\n\n    coco_eval = COCOeval(coco_gt, coco_dt, iou_type)\n    # Use the COCO default keypoint OKS sigmas unless overrides are specified\n    if kpt_oks_sigmas:\n        coco_eval.params.kpt_oks_sigmas = np.array(kpt_oks_sigmas)\n    if is_rela:\n        coco_eval.evaluate_rela()\n    else:\n        coco_eval.evaluate()\n    coco_eval.accumulate()\n    coco_eval.summarize()\n\n    return coco_eval\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .cityscapes_evaluation import CityscapesEvaluator\nfrom .coco_evaluation import COCOEvaluator\nfrom .evaluator import DatasetEvaluator, DatasetEvaluators, inference_context, inference_on_dataset\nfrom .lvis_evaluation import LVISEvaluator\nfrom .panoptic_evaluation import COCOPanopticEvaluator\nfrom .pascal_voc_evaluation import PascalVOCDetectionEvaluator\nfrom .sem_seg_evaluation import SemSegEvaluator\nfrom .testing import print_csv_format, verify_results\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/cityscapes_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport glob\nimport logging\nimport os\nimport tempfile\nfrom collections import OrderedDict\nimport torch\nfrom PIL import Image\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.utils import comm\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass CityscapesEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate instance segmentation results using cityscapes API.\n\n    Note:\n        * It does not work in multi-machine distributed training.\n        * It contains a synchronization, therefore has to be used on all ranks.\n    \"\"\"\n\n    def __init__(self, dataset_name):\n        \"\"\"\n        Args:\n            dataset_name (str): the name of the dataset.\n                It must have the following metadata associated with it:\n                \"thing_classes\", \"gt_dir\".\n        \"\"\"\n        self._metadata = MetadataCatalog.get(dataset_name)\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n    def reset(self):\n        self._working_dir = tempfile.TemporaryDirectory(prefix=\"cityscapes_eval_\")\n        self._temp_dir = self._working_dir.name\n        # All workers will write to the same results directory\n        # TODO this does not work in distributed training\n        self._temp_dir = comm.all_gather(self._temp_dir)[0]\n        if self._temp_dir != self._working_dir.name:\n            self._working_dir.cleanup()\n        self._logger.info(\n            \"Writing cityscapes results to temporary directory {} ...\".format(self._temp_dir)\n        )\n\n    def process(self, inputs, outputs):\n        from cityscapesscripts.helpers.labels import name2label\n\n        for input, output in zip(inputs, outputs):\n            file_name = input[\"file_name\"]\n            basename = os.path.splitext(os.path.basename(file_name))[0]\n            pred_txt = os.path.join(self._temp_dir, basename + \"_pred.txt\")\n\n            output = output[\"instances\"].to(self._cpu_device)\n            num_instances = len(output)\n            with open(pred_txt, \"w\") as fout:\n                for i in range(num_instances):\n                    pred_class = output.pred_classes[i]\n                    classes = self._metadata.thing_classes[pred_class]\n                    class_id = name2label[classes].id\n                    score = output.scores[i]\n                    mask = output.pred_masks[i].numpy().astype(\"uint8\")\n                    png_filename = os.path.join(\n                        self._temp_dir, basename + \"_{}_{}.png\".format(i, classes)\n                    )\n\n                    Image.fromarray(mask * 255).save(png_filename)\n                    fout.write(\"{} {} {}\\n\".format(os.path.basename(png_filename), class_id, score))\n\n    def evaluate(self):\n        \"\"\"\n        Returns:\n            dict: has a key \"segm\", whose value is a dict of \"AP\" and \"AP50\".\n        \"\"\"\n        comm.synchronize()\n        if comm.get_rank() > 0:\n            return\n        os.environ[\"CITYSCAPES_DATASET\"] = os.path.abspath(\n            os.path.join(self._metadata.gt_dir, \"..\", \"..\")\n        )\n        # Load the Cityscapes eval script *after* setting the required env var,\n        # since the script reads CITYSCAPES_DATASET into global variables at load time.\n        import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as cityscapes_eval\n\n        self._logger.info(\"Evaluating results under {} ...\".format(self._temp_dir))\n\n        # set some global states in cityscapes evaluation API, before evaluating\n        cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir)\n        cityscapes_eval.args.predictionWalk = None\n        cityscapes_eval.args.JSONOutput = False\n        cityscapes_eval.args.colorized = False\n        cityscapes_eval.args.gtInstancesFile = os.path.join(self._temp_dir, \"gtInstances.json\")\n\n        # These lines are adopted from\n        # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa\n        groundTruthImgList = glob.glob(cityscapes_eval.args.groundTruthSearch)\n        assert len(\n            groundTruthImgList\n        ), \"Cannot find any ground truth images to use for evaluation. Searched for: {}\".format(\n            cityscapes_eval.args.groundTruthSearch\n        )\n        predictionImgList = []\n        for gt in groundTruthImgList:\n            predictionImgList.append(cityscapes_eval.getPrediction(gt, cityscapes_eval.args))\n        results = cityscapes_eval.evaluateImgLists(\n            predictionImgList, groundTruthImgList, cityscapes_eval.args\n        )[\"averages\"]\n\n        ret = OrderedDict()\n        ret[\"segm\"] = {\"AP\": results[\"allAp\"] * 100, \"AP50\": results[\"allAp50%\"] * 100}\n        self._working_dir.cleanup()\n        return ret\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/coco_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport contextlib\nimport copy\nimport io\nimport itertools\nimport json\nimport logging\nimport numpy as np\nimport os\nimport pickle\nfrom collections import OrderedDict\nimport pycocotools.mask as mask_util\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom pycocotools.coco import COCO\nfrom pycocotools.cocoeval import COCOeval\nfrom tabulate import tabulate\n\nimport detectron2.utils.comm as comm\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.structures import Boxes, BoxMode, pairwise_iou\nfrom detectron2.utils.logger import create_small_table\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass COCOEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate object proposal, instance detection/segmentation, keypoint detection\n    outputs using COCO's metrics and APIs.\n    \"\"\"\n\n    def __init__(self, dataset_name, cfg, distributed, output_dir=None):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset to be evaluated.\n                It must have the following corresponding metadata:\n                    \"json_file\": the path to the COCO format annotation\n            cfg (CfgNode): config instance\n            distributed (True): if True, will collect results from all ranks for evaluation.\n                Otherwise, will evaluate the results in the current process.\n            output_dir (str): optional, an output directory to dump results.\n        \"\"\"\n        self._tasks = self._tasks_from_config(cfg)\n        self._distributed = distributed\n        self._output_dir = output_dir\n\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n        self._metadata = MetadataCatalog.get(dataset_name)\n        json_file = PathManager.get_local_path(self._metadata.json_file)\n        with contextlib.redirect_stdout(io.StringIO()):\n            self._coco_api = COCO(json_file)\n\n        self._kpt_oks_sigmas = cfg.TEST.KEYPOINT_OKS_SIGMAS\n        # Test set json files do not contain annotations (evaluation must be\n        # performed using the COCO evaluation server).\n        self._do_evaluation = len(self._coco_api.getAnnIds()) > 0\n\n    def reset(self):\n        self._predictions = []\n        self._coco_results = []\n\n    def _tasks_from_config(self, cfg):\n        \"\"\"\n        Returns:\n            tuple[str]: tasks that can be evaluated under the given configuration.\n        \"\"\"\n        tasks = (\"bbox\",)\n        if cfg.MODEL.MASK_ON:\n            tasks = tasks + (\"segm\",)\n        if cfg.MODEL.KEYPOINT_ON:\n            tasks = tasks + (\"keypoints\",)\n        return tasks\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a COCO model (e.g., GeneralizedRCNN).\n                It is a list of dict. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a COCO model. It is a list of dicts with key\n                \"instances\" that contains :class:`Instances`.\n        \"\"\"\n        results,associations = outputs\n        for input, result, association in zip(inputs, results, associations):\n            prediction = {\"image_id\": input[\"image_id\"]}\n            # TODO this is ugly\n            if \"instances\" in result:\n                instances = result[\"instances\"].to(self._cpu_device)\n\n                if instances.has(\"pred_masks\"):\n                    # use RLE to encode the masks, because they are too large and takes memory\n                    # since this evaluator stores results of the entire dataset\n                    # Our model may predict bool array, but cocoapi expects uint8\n                    rles = [\n                        mask_util.encode(np.array(mask[:, :, None], order=\"F\", dtype=\"uint8\"))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        # \"counts\" is an array encoded by mask_util as a byte-stream. Python3's\n                        # json writer which always produces strings cannot serialize a bytestream\n                        # unless you decode it. Thankfully, utf-8 works out (which is also what\n                        # the pycocotools/_mask.pyx does).\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n\n                prediction[\"instances\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"instances\" in association:\n                instances = association[\"instances\"].to(self._cpu_device)\n                if instances.has(\"pred_masks\"):\n\n                    rles = [\n                        mask_util.encode(np.array(mask[:,:],order='F',dtype='uint8'))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n                prediction[\"associations\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"proposals\" in result:\n                prediction[\"proposals\"] = result[\"proposals\"].to(self._cpu_device)\n            if \"rela_proposals\" in association:\n                prediction[\"rela_proposals\"] = association[\"proposals\"].to(self._cpu_device)\n            self._predictions.append(prediction)\n\n    def evaluate(self): \n        if self._distributed:\n            comm.synchronize()\n            self._predictions = comm.gather(self._predictions, dst=0)\n            self._predictions = list(itertools.chain(*self._predictions))\n\n            if not comm.is_main_process():\n                return {},{}\n        \n        if len(self._predictions) == 0:\n            self._logger.warning(\"[COCOEvaluator] Did not receive valid predictions.\")\n            return {}\n\n        if self._output_dir:\n            PathManager.mkdirs(self._output_dir)\n            file_path = os.path.join(self._output_dir, \"instances_predictions.pth\")\n            with PathManager.open(file_path, \"wb\") as f:\n                torch.save(self._predictions, f)\n\n        self._results = OrderedDict()\n        self._association_results = OrderedDict()\n\n        if \"proposals\" in self._predictions[0]: #and \"rela_proposals\" in self._predictions[0]:\n            self._eval_box_proposals()\n\n        if \"instances\" in self._predictions[0]: #and \"associations\" in self._predictions[0]:\n            self._eval_predictions(set(self._tasks))\n        # Copy so the caller can do whatever with results\n        return copy.deepcopy(self._results),copy.deepcopy(self._association_results)\n\n    def _eval_predictions(self, tasks):\n        \"\"\"\n        Evaluate self._predictions on the given tasks.\n        Fill self._results with the metrics of the tasks.\n        \"\"\"\n        self._logger.info(\"Preparing results for COCO format ...\")\n        self._coco_results = list(itertools.chain(*[x[\"instances\"] for x in self._predictions]))\n        self._rela_results = list(itertools.chain(*[x[\"associations\"] for x in self._predictions]))\n        # unmap the category ids for COCO\n        if hasattr(self._metadata, \"thing_dataset_id_to_contiguous_id\"):\n            reverse_id_mapping = {\n                v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items()\n            }\n            for result in self._coco_results:\n                result[\"category_id\"] = reverse_id_mapping[result[\"category_id\"]]\n\n        if hasattr(self._metadata, \"association_dataset_id_to_contiguous_id\"):\n            reverse_rela_if_mapping = {\n                v: k for k,v in self._metadata.association_dataset_id_to_contiguous_id.items()\n            }\n            for result in self._rela_results:\n                result[\"category_id\"] = reverse_rela_if_mapping[result[\"category_id\"]]\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"coco_instances_results.json\")\n            self._logger.info(\"Saving results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._coco_results))\n                f.flush()\n            file_path = os.path.join(self._output_dir, \"coco_association_results.json\")\n            self._logger.info(\"Saving association results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._rela_results))\n                f.flush()\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n        self._logger.info(\"Evaluating predictions ...\")\n        tasks = ('bbox','segm')\n        for task in sorted(tasks):\n            coco_eval = (\n                _evaluate_predictions_on_coco(\n                    self._coco_api, self._coco_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas\n                )\n                if len(self._coco_results) > 0\n                else None  # cocoapi does not handle empty results very well\n            )\n\n            res = self._derive_coco_results(\n                coco_eval, task, class_names=self._metadata.get(\"thing_classes\")\n            )\n            self._results[task] = res\n        self._logger.info(\"Evaluating association predictions ...\")\n        tasks = ('bbox','segm')\n        for task in sorted(tasks):\n            coco_eval =(\n                _evaluate_predictions_on_coco(\n                    self._coco_api, self._rela_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas,is_rela=True\n                )\n                if len(self._rela_results)>0\n                else None\n            )\n            res = self._derive_coco_results(\n                coco_eval, task, class_names=self._metadata.get(\"association_classes\")\n            )\n            self._association_results[task] = res\n    def _eval_box_proposals(self):\n        \"\"\"\n        Evaluate the box proposals in self._predictions.\n        Fill self._results with the metrics for \"box_proposals\" task.\n        \"\"\"\n        if self._output_dir:\n            # Saving generated box proposals to file.\n            # Predicted box_proposals are in XYXY_ABS mode.\n            bbox_mode = BoxMode.XYXY_ABS.value\n            ids, boxes, objectness_logits = [], [], []\n            rela_boxes, rela_objectness_logits = [], []\n            for prediction in self._predictions:\n                ids.append(prediction[\"image_id\"])\n                boxes.append(prediction[\"proposals\"].proposal_boxes.tensor.numpy())\n                objectness_logits.append(prediction[\"proposals\"].objectness_logits.numpy())\n                rela_boxes.append(prediction[\"rela_proposals\"].proposal_boxes.tensor.numpy())\n                rela_objectness_logits.append(prediction[\"rela_proposals\"].objectness_logits.numpy())\n\n            proposal_data = {\n                \"boxes\": boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            rela_proposal_data = {\n                \"boxes\" : boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            with PathManager.open(os.path.join(self._output_dir, \"box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(proposal_data, f)\n            with PathManager.open(os.path.join(self._output_dir, \"rela_box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(rela_proposal_data, f)\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating bbox proposals ...\")\n        res = {}\n        rela_res = {}\n        areas = {\"all\": \"\", \"small\": \"s\", \"medium\": \"m\", \"large\": \"l\"}\n        for limit in [100, 1000]:\n            for area, suffix in areas.items():\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._coco_api, area=area, limit=limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                res[key] = float(stats[\"ar\"].item() * 100)\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._coco_api, area = area, limit =limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                rela_res[key] = float(stats[\"ar\"].item() * 100)\n\n        self._logger.info(\"Proposal metrics: \\n\" + create_small_table(res))\n        self._results[\"box_proposals\"] = res\n        self._association_results[\"box_proposals\"] = rela_res\n\n    def _derive_coco_results(self, coco_eval, iou_type, class_names=None):\n        \"\"\"\n        Derive the desired score numbers from summarized COCOeval.\n\n        Args:\n            coco_eval (None or COCOEval): None represents no predictions from model.\n            iou_type (str):\n            class_names (None or list[str]): if provided, will use it to predict\n                per-category AP.\n\n        Returns:\n            a dict of {metric name: score}\n        \"\"\"\n\n        metrics = {\n            \"bbox\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\"],\n            \"segm\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\"],\n            \"keypoints\": [\"AP\", \"AP50\", \"AP75\", \"APm\", \"APl\"],\n        }[iou_type]\n\n        if coco_eval is None:\n            self._logger.warn(\"No predictions from the model! Set scores to -1\")\n            return {metric: -1 for metric in metrics}\n\n        # the standard metrics\n        results = {metric: float(coco_eval.stats[idx] * 100) for idx, metric in enumerate(metrics)}\n        self._logger.info(\n            \"Evaluation results for {}: \\n\".format(iou_type) + create_small_table(results)\n        )\n\n        if class_names is None or len(class_names) <= 1:\n            return results\n        # Compute per-category AP\n        # from https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L222-L252 # noqa\n        precisions = coco_eval.eval[\"precision\"]\n        # precision has dims (iou, recall, cls, area range, max dets)\n        assert len(class_names) == precisions.shape[2]\n\n        results_per_category = []\n        for idx, name in enumerate(class_names):\n            # area range index 0: all area ranges\n            # max dets index -1: typically 100 per image\n            precision = precisions[:, :, idx, 0, -1]\n            precision = precision[precision > -1]\n            ap = np.mean(precision) if precision.size else float(\"nan\")\n            results_per_category.append((\"{}\".format(name), float(ap * 100)))\n\n        # tabulate it\n        N_COLS = min(6, len(results_per_category) * 2)\n        results_flatten = list(itertools.chain(*results_per_category))\n        results_2d = itertools.zip_longest(*[results_flatten[i::N_COLS] for i in range(N_COLS)])\n        table = tabulate(\n            results_2d,\n            tablefmt=\"pipe\",\n            floatfmt=\".3f\",\n            headers=[\"category\", \"AP\"] * (N_COLS // 2),\n            numalign=\"left\",\n        )\n        self._logger.info(\"Per-category {} AP: \\n\".format(iou_type) + table)\n\n        results.update({\"AP-\" + name: ap for name, ap in results_per_category})\n        return results\n\n\ndef instances_to_json(instances, img_id):\n    num_instance = len(instances)\n    has_light = instances.has(\"pred_light\")\n    if num_instance == 0:\n        return []\n    try:\n        boxes = instances.pred_boxes.tensor.numpy()\n        boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS)\n        boxes = boxes.tolist()\n        scores = instances.scores.tolist()\n        classes = instances.pred_classes.tolist()\n        association = instances.pred_associations\n        if has_light:\n            light = instances.pred_light\n    except:\n        boxes = instances.pred_boxes\n        boxes =BoxMode.convert(np.array(boxes,dtype='float'),BoxMode.XYXY_ABS,BoxMode.XYWH_ABS)\n        boxes = boxes.tolist()\n        scores = instances.scores\n        association = instances.pred_associations\n        classes = instances.pred_classes\n        if has_light:\n            light = instances.pred_light\n\n    has_mask = instances.has(\"pred_masks_rle\")\n    \n    if has_mask:\n        rles = instances.pred_masks_rle\n\n    has_keypoints = instances.has(\"pred_keypoints\")\n    if has_keypoints:\n        keypoints = instances.pred_keypoints\n\n    results = []\n    for k in range(num_instance):\n        result = {\n            \"image_id\": img_id,\n            \"category_id\": classes[k],\n            \"bbox\": boxes[k],\n            \"score\": scores[k],\n            'association_id':association[k]\n        }\n        if has_light:\n            result[\"light\"] = light[k]\n        if has_mask:\n            result[\"segmentation\"] = rles[k]\n        if has_keypoints:\n            # In COCO annotations,\n            # keypoints coordinates are pixel indices.\n            # However our predictions are floating point coordinates.\n            # Therefore we subtract 0.5 to be consistent with the annotation format.\n            # This is the inverse of data loading logic in `datasets/coco.py`.\n            keypoints[k][:,:2] -= 0.5\n            result[\"keypoints\"] = keypoints[k].flatten().tolist()\n        results.append(result)\n    return results\n\n\n# inspired from Detectron:\n# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa\ndef _evaluate_box_proposals(dataset_predictions, coco_api, thresholds=None, area=\"all\", limit=None):\n    \"\"\"\n    Evaluate detection proposal recall metrics. This function is a much\n    faster alternative to the official COCO API recall evaluation code. However,\n    it produces slightly different results.\n    \"\"\"\n    # Record max overlap value for each gt box\n    # Return vector of overlap values\n    areas = {\n        \"all\": 0,\n        \"small\": 1,\n        \"medium\": 2,\n        \"large\": 3,\n        \"96-128\": 4,\n        \"128-256\": 5,\n        \"256-512\": 6,\n        \"512-inf\": 7,\n    }\n    area_ranges = [\n        [0 ** 2, 1e5 ** 2],  # all\n        [0 ** 2, 32 ** 2],  # small\n        [32 ** 2, 96 ** 2],  # medium\n        [96 ** 2, 1e5 ** 2],  # large\n        [96 ** 2, 128 ** 2],  # 96-128\n        [128 ** 2, 256 ** 2],  # 128-256\n        [256 ** 2, 512 ** 2],  # 256-512\n        [512 ** 2, 1e5 ** 2],\n    ]  # 512-inf\n    assert area in areas, \"Unknown area range: {}\".format(area)\n    area_range = area_ranges[areas[area]]\n    gt_overlaps = []\n    num_pos = 0\n\n    for prediction_dict in dataset_predictions:\n        predictions = prediction_dict[\"proposals\"]\n\n        # sort predictions in descending order\n        # TODO maybe remove this and make it explicit in the documentation\n        inds = predictions.objectness_logits.sort(descending=True)[1]\n        predictions = predictions[inds]\n\n        ann_ids = coco_api.getAnnIds(imgIds=prediction_dict[\"image_id\"])\n        anno = coco_api.loadAnns(ann_ids)\n        gt_boxes = [\n            BoxMode.convert(obj[\"bbox\"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS)\n            for obj in anno\n            if obj[\"iscrowd\"] == 0\n        ]\n        gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4)  # guard against no boxes\n        gt_boxes = Boxes(gt_boxes)\n        gt_areas = torch.as_tensor([obj[\"area\"] for obj in anno if obj[\"iscrowd\"] == 0])\n\n        if len(gt_boxes) == 0 or len(predictions) == 0:\n            continue\n\n        valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])\n        gt_boxes = gt_boxes[valid_gt_inds]\n\n        num_pos += len(gt_boxes)\n\n        if len(gt_boxes) == 0:\n            continue\n\n        if limit is not None and len(predictions) > limit:\n            predictions = predictions[:limit]\n\n        overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)\n\n        _gt_overlaps = torch.zeros(len(gt_boxes))\n        for j in range(min(len(predictions), len(gt_boxes))):\n            # find which proposal box maximally covers each gt box\n            # and get the iou amount of coverage for each gt box\n            max_overlaps, argmax_overlaps = overlaps.max(dim=0)\n\n            # find which gt box is 'best' covered (i.e. 'best' = most iou)\n            gt_ovr, gt_ind = max_overlaps.max(dim=0)\n            assert gt_ovr >= 0\n            # find the proposal box that covers the best covered gt box\n            box_ind = argmax_overlaps[gt_ind]\n            # record the iou coverage of this gt box\n            _gt_overlaps[j] = overlaps[box_ind, gt_ind]\n            assert _gt_overlaps[j] == gt_ovr\n            # mark the proposal box and the gt box as used\n            overlaps[box_ind, :] = -1\n            overlaps[:, gt_ind] = -1\n\n        # append recorded iou coverage level\n        gt_overlaps.append(_gt_overlaps)\n    gt_overlaps = torch.cat(gt_overlaps, dim=0)\n    gt_overlaps, _ = torch.sort(gt_overlaps)\n\n    if thresholds is None:\n        step = 0.05\n        thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)\n    recalls = torch.zeros_like(thresholds)\n    # compute recall for each iou threshold\n    for i, t in enumerate(thresholds):\n        recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)\n    # ar = 2 * np.trapz(recalls, thresholds)\n    ar = recalls.mean()\n    return {\n        \"ar\": ar,\n        \"recalls\": recalls,\n        \"thresholds\": thresholds,\n        \"gt_overlaps\": gt_overlaps,\n        \"num_pos\": num_pos,\n    }\n\n\ndef _evaluate_predictions_on_coco(coco_gt, coco_results, iou_type, kpt_oks_sigmas=None,is_rela= False):\n    \"\"\"\n    Evaluate the coco results using COCOEval API.\n    \"\"\"\n    assert len(coco_results) > 0\n\n    if iou_type == \"segm\":\n        coco_results = copy.deepcopy(coco_results)\n        # When evaluating mask AP, if the results contain bbox, cocoapi will\n        # use the box area as the area of the instance, instead of the mask area.\n        # This leads to a different definition of small/medium/large.\n        # We remove the bbox field to let mask AP use mask area.\n        for c in coco_results:\n            c.pop(\"bbox\", None)\n    if is_rela:\n        coco_dt = coco_gt.loadRes_rela(coco_results)\n    else:\n        coco_dt = coco_gt.loadRes(coco_results)\n\n    coco_eval = COCOeval(coco_gt, coco_dt, iou_type)\n    # Use the COCO default keypoint OKS sigmas unless overrides are specified\n    if kpt_oks_sigmas:\n        coco_eval.params.kpt_oks_sigmas = np.array(kpt_oks_sigmas)\n    if is_rela:\n        coco_eval.evaluate_rela()\n    else:\n        coco_eval.evaluate()\n    coco_eval.accumulate()\n    coco_eval.summarize()\n\n    return coco_eval\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/evaluator.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport datetime\nimport logging\nimport time\nfrom collections import OrderedDict\nfrom contextlib import contextmanager\nimport torch\n\nfrom detectron2.utils.comm import is_main_process\n\n\nclass DatasetEvaluator:\n    \"\"\"\n    Base class for a dataset evaluator.\n\n    The function :func:`inference_on_dataset` runs the model over\n    all samples in the dataset, and have a DatasetEvaluator to process the inputs/outputs.\n\n    This class will accumulate information of the inputs/outputs (by :meth:`process`),\n    and produce evaluation results in the end (by :meth:`evaluate`).\n    \"\"\"\n\n    def reset(self):\n        \"\"\"\n        Preparation for a new round of evaluation.\n        Should be called before starting a round of evaluation.\n        \"\"\"\n        pass\n\n    def process(self, input, output):\n        \"\"\"\n        Process an input/output pair.\n\n        Args:\n            input: the input that's used to call the model.\n            output: the return value of `model(output)`\n        \"\"\"\n        pass\n\n    def evaluate(self):\n        \"\"\"\n        Evaluate/summarize the performance, after processing all input/output pairs.\n\n        Returns:\n            dict:\n                A new evaluator class can return a dict of arbitrary format\n                as long as the user can process the results.\n                In our train_net.py, we expect the following format:\n\n                * key: the name of the task (e.g., bbox)\n                * value: a dict of {metric name: score}, e.g.: {\"AP50\": 80}\n        \"\"\"\n        pass\n\n\nclass DatasetEvaluators(DatasetEvaluator):\n    def __init__(self, evaluators):\n        assert len(evaluators)\n        super().__init__()\n        self._evaluators = evaluators\n\n    def reset(self):\n        for evaluator in self._evaluators:\n            evaluator.reset()\n\n    def process(self, input, output):\n        for evaluator in self._evaluators:\n            evaluator.process(input, output)\n\n    def evaluate(self):\n        results = OrderedDict()\n        for evaluator in self._evaluators:\n            result = evaluator.evaluate()\n            if is_main_process():\n                for k, v in result.items():\n                    assert (\n                        k not in results\n                    ), \"Different evaluators produce results with the same key {}\".format(k)\n                    results[k] = v\n        return results\n\n\ndef inference_on_dataset(model, data_loader, evaluator):\n    \"\"\"\n    Run model (in eval mode) on the data_loader and evaluate the metrics with evaluator.\n\n    Args:\n        model (nn.Module): a module which accepts an object from\n            `data_loader` and returns some outputs. It will be temporarily set to `eval` mode.\n\n            If you wish to evaluate a model in `training` mode instead, you can\n            wrap the given model and override its behavior of `.eval()` and `.train()`.\n        data_loader: an iterable object with a length.\n            The elements it generates will be the inputs to the model.\n        evaluator (DatasetEvaluator): the evaluator to run\n\n    Returns:\n        The return value of `evaluator.evaluate()`\n    \"\"\"\n    num_devices = torch.distributed.get_world_size() if torch.distributed.is_initialized() else 1\n    logger = logging.getLogger(__name__)\n    logger.info(\"Start inference on {} images\".format(len(data_loader)))\n\n    total = len(data_loader)  # inference data loader must have a fixed length\n    evaluator.reset()\n\n    logging_interval = 50\n    num_warmup = min(5, logging_interval - 1, total - 1)\n    start_time = time.time()\n    with inference_context(model), torch.no_grad():\n        for idx, inputs in enumerate(data_loader):\n            if idx == num_warmup:\n                start_time = time.time()\n\n            results,relations = model(inputs)\n            outputs = (results,relations)\n            evaluator.process(inputs, outputs)\n\n            if (idx + 1) % logging_interval == 0:\n                duration = time.time() - start_time\n                seconds_per_img = duration / (idx + 1 - num_warmup)\n                eta = datetime.timedelta(\n                    seconds=int(seconds_per_img * (total - num_warmup) - duration)\n                )\n                logger.info(\n                    \"Inference done {}/{}. {:.4f} s / img. ETA={}\".format(\n                        idx + 1, total, seconds_per_img, str(eta)\n                    )\n                )\n\n    # Measure the time only for this worker (before the synchronization barrier)\n    total_time = int(time.time() - start_time)\n    total_time_str = str(datetime.timedelta(seconds=total_time))\n    # NOTE this format is parsed by grep\n    logger.info(\n        \"Total inference time: {} ({:.6f} s / img per device, on {} devices)\".format(\n            total_time_str, total_time / (total - num_warmup), num_devices\n        )\n    )\n\n    results,relations = evaluator.evaluate()\n    # An evaluator may return None when not in main process.\n    # Replace it by an empty dict instead to make it easier for downstream code to handle\n    if results is None:\n        results = {}\n    return results,relations\n\n\n@contextmanager\ndef inference_context(model):\n    \"\"\"\n    A context where the model is temporarily changed to eval mode,\n    and restored to previous mode afterwards.\n\n    Args:\n        model: a torch Module\n    \"\"\"\n    training_mode = model.training\n    model.eval()\n    yield\n    model.train(training_mode)\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/lvis_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport itertools\nimport json\nimport logging\nimport numpy as np\nimport os\nimport pickle\nfrom collections import OrderedDict\nimport pycocotools.mask as mask_util\nimport torch\nfrom fvcore.common.file_io import PathManager\n\nimport detectron2.utils.comm as comm\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.structures import Boxes, BoxMode, pairwise_iou\nfrom detectron2.utils.logger import create_small_table\n\nfrom .coco_evaluation import instances_to_json\nfrom .evaluator import DatasetEvaluator\n\n\nclass LVISEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate object proposal and instance detection/segmentation outputs using\n    LVIS's metrics and evaluation API.\n    \"\"\"\n\n    def __init__(self, dataset_name, cfg, distributed, output_dir=None):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset to be evaluated.\n                It must have the following corresponding metadata:\n                    \"json_file\": the path to the LVIS format annotation\n            cfg (CfgNode): config instance\n            distributed (True): if True, will collect results from all ranks for evaluation.\n                Otherwise, will evaluate the results in the current process.\n            output_dir (str): optional, an output directory to dump results.\n        \"\"\"\n        from lvis import LVIS\n\n        self._tasks = self._tasks_from_config(cfg)\n        self._distributed = distributed\n        self._output_dir = output_dir\n\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n        self._metadata = MetadataCatalog.get(dataset_name)\n        json_file = PathManager.get_local_path(self._metadata.json_file)\n        self._lvis_api = LVIS(json_file)\n        # Test set json files do not contain annotations (evaluation must be\n        # performed using the LVIS evaluation server).\n        self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0\n\n    def reset(self):\n        self._predictions = []\n        self._lvis_results = []\n\n    def _tasks_from_config(self, cfg):\n        \"\"\"\n        Returns:\n            tuple[str]: tasks that can be evaluated under the given configuration.\n        \"\"\"\n        tasks = (\"bbox\",)\n        if cfg.MODEL.MASK_ON:\n            tasks = tasks + (\"segm\",)\n        return tasks\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a LVIS model (e.g., GeneralizedRCNN).\n                It is a list of dict. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a LVIS model. It is a list of dicts with key\n                \"instances\" that contains :class:`Instances`.\n        \"\"\"\n        for input, output in zip(inputs, outputs):\n            prediction = {\"image_id\": input[\"image_id\"]}\n\n            # TODO this is ugly\n            if \"instances\" in output:\n                instances = output[\"instances\"].to(self._cpu_device)\n\n                if instances.has(\"pred_masks\"):\n                    # use RLE to encode the masks, because they are too large and takes memory\n                    # since this evaluator stores outputs of the entire dataset\n                    rles = [\n                        mask_util.encode(np.array(mask[:, :, None], order=\"F\", dtype=\"uint8\"))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        # \"counts\" is an array encoded by mask_util as a byte-stream. Python3's\n                        # json writer which always produces strings cannot serialize a bytestream\n                        # unless you decode it. Thankfully, utf-8 works out (which is also what\n                        # the pycocotools/_mask.pyx does).\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n\n                prediction[\"instances\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"proposals\" in output:\n                prediction[\"proposals\"] = output[\"proposals\"].to(self._cpu_device)\n            self._predictions.append(prediction)\n\n    def evaluate(self):\n        if self._distributed:\n            comm.synchronize()\n            self._predictions = comm.gather(self._predictions, dst=0)\n            self._predictions = list(itertools.chain(*self._predictions))\n\n            if not comm.is_main_process():\n                return\n\n        if len(self._predictions) == 0:\n            self._logger.warning(\"[LVISEvaluator] Did not receive valid predictions.\")\n            return {}\n\n        if self._output_dir:\n            PathManager.mkdirs(self._output_dir)\n            file_path = os.path.join(self._output_dir, \"instances_predictions.pth\")\n            with PathManager.open(file_path, \"wb\") as f:\n                torch.save(self._predictions, f)\n\n        self._results = OrderedDict()\n        if \"proposals\" in self._predictions[0]:\n            self._eval_box_proposals()\n        if \"instances\" in self._predictions[0]:\n            self._eval_predictions(set(self._tasks))\n        # Copy so the caller can do whatever with results\n        return copy.deepcopy(self._results)\n\n    def _eval_predictions(self, tasks):\n        \"\"\"\n        Evaluate self._predictions on the given tasks.\n        Fill self._results with the metrics of the tasks.\n        \"\"\"\n        self._logger.info(\"Preparing results in the LVIS format ...\")\n        self._lvis_results = list(itertools.chain(*[x[\"instances\"] for x in self._predictions]))\n\n        # unmap the category ids for LVIS (from 0-indexed to 1-indexed)\n        for result in self._lvis_results:\n            result[\"category_id\"] += 1\n\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"lvis_instances_results.json\")\n            self._logger.info(\"Saving results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._lvis_results))\n                f.flush()\n\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating predictions ...\")\n        for task in sorted(tasks):\n            res = _evaluate_predictions_on_lvis(\n                self._lvis_api,\n                self._lvis_results,\n                task,\n                class_names=self._metadata.get(\"thing_classes\"),\n            )\n            self._results[task] = res\n\n    def _eval_box_proposals(self):\n        \"\"\"\n        Evaluate the box proposals in self._predictions.\n        Fill self._results with the metrics for \"box_proposals\" task.\n        \"\"\"\n        if self._output_dir:\n            # Saving generated box proposals to file.\n            # Predicted box_proposals are in XYXY_ABS mode.\n            bbox_mode = BoxMode.XYXY_ABS.value\n            ids, boxes, objectness_logits = [], [], []\n            for prediction in self._predictions:\n                ids.append(prediction[\"image_id\"])\n                boxes.append(prediction[\"proposals\"].proposal_boxes.tensor.numpy())\n                objectness_logits.append(prediction[\"proposals\"].objectness_logits.numpy())\n\n            proposal_data = {\n                \"boxes\": boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            with PathManager.open(os.path.join(self._output_dir, \"box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(proposal_data, f)\n\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating bbox proposals ...\")\n        res = {}\n        areas = {\"all\": \"\", \"small\": \"s\", \"medium\": \"m\", \"large\": \"l\"}\n        for limit in [100, 1000]:\n            for area, suffix in areas.items():\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._lvis_api, area=area, limit=limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                res[key] = float(stats[\"ar\"].item() * 100)\n        self._logger.info(\"Proposal metrics: \\n\" + create_small_table(res))\n        self._results[\"box_proposals\"] = res\n\n\n# inspired from Detectron:\n# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa\ndef _evaluate_box_proposals(dataset_predictions, lvis_api, thresholds=None, area=\"all\", limit=None):\n    \"\"\"\n    Evaluate detection proposal recall metrics. This function is a much\n    faster alternative to the official LVIS API recall evaluation code. However,\n    it produces slightly different results.\n    \"\"\"\n    # Record max overlap value for each gt box\n    # Return vector of overlap values\n    areas = {\n        \"all\": 0,\n        \"small\": 1,\n        \"medium\": 2,\n        \"large\": 3,\n        \"96-128\": 4,\n        \"128-256\": 5,\n        \"256-512\": 6,\n        \"512-inf\": 7,\n    }\n    area_ranges = [\n        [0 ** 2, 1e5 ** 2],  # all\n        [0 ** 2, 32 ** 2],  # small\n        [32 ** 2, 96 ** 2],  # medium\n        [96 ** 2, 1e5 ** 2],  # large\n        [96 ** 2, 128 ** 2],  # 96-128\n        [128 ** 2, 256 ** 2],  # 128-256\n        [256 ** 2, 512 ** 2],  # 256-512\n        [512 ** 2, 1e5 ** 2],\n    ]  # 512-inf\n    assert area in areas, \"Unknown area range: {}\".format(area)\n    area_range = area_ranges[areas[area]]\n    gt_overlaps = []\n    num_pos = 0\n\n    for prediction_dict in dataset_predictions:\n        predictions = prediction_dict[\"proposals\"]\n\n        # sort predictions in descending order\n        # TODO maybe remove this and make it explicit in the documentation\n        inds = predictions.objectness_logits.sort(descending=True)[1]\n        predictions = predictions[inds]\n\n        ann_ids = lvis_api.get_ann_ids(img_ids=[prediction_dict[\"image_id\"]])\n        anno = lvis_api.load_anns(ann_ids)\n        gt_boxes = [\n            BoxMode.convert(obj[\"bbox\"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) for obj in anno\n        ]\n        gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4)  # guard against no boxes\n        gt_boxes = Boxes(gt_boxes)\n        gt_areas = torch.as_tensor([obj[\"area\"] for obj in anno])\n\n        if len(gt_boxes) == 0 or len(predictions) == 0:\n            continue\n\n        valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])\n        gt_boxes = gt_boxes[valid_gt_inds]\n\n        num_pos += len(gt_boxes)\n\n        if len(gt_boxes) == 0:\n            continue\n\n        if limit is not None and len(predictions) > limit:\n            predictions = predictions[:limit]\n\n        overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)\n\n        _gt_overlaps = torch.zeros(len(gt_boxes))\n        for j in range(min(len(predictions), len(gt_boxes))):\n            # find which proposal box maximally covers each gt box\n            # and get the iou amount of coverage for each gt box\n            max_overlaps, argmax_overlaps = overlaps.max(dim=0)\n\n            # find which gt box is 'best' covered (i.e. 'best' = most iou)\n            gt_ovr, gt_ind = max_overlaps.max(dim=0)\n            assert gt_ovr >= 0\n            # find the proposal box that covers the best covered gt box\n            box_ind = argmax_overlaps[gt_ind]\n            # record the iou coverage of this gt box\n            _gt_overlaps[j] = overlaps[box_ind, gt_ind]\n            assert _gt_overlaps[j] == gt_ovr\n            # mark the proposal box and the gt box as used\n            overlaps[box_ind, :] = -1\n            overlaps[:, gt_ind] = -1\n\n        # append recorded iou coverage level\n        gt_overlaps.append(_gt_overlaps)\n    gt_overlaps = torch.cat(gt_overlaps, dim=0)\n    gt_overlaps, _ = torch.sort(gt_overlaps)\n\n    if thresholds is None:\n        step = 0.05\n        thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)\n    recalls = torch.zeros_like(thresholds)\n    # compute recall for each iou threshold\n    for i, t in enumerate(thresholds):\n        recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)\n    # ar = 2 * np.trapz(recalls, thresholds)\n    ar = recalls.mean()\n    return {\n        \"ar\": ar,\n        \"recalls\": recalls,\n        \"thresholds\": thresholds,\n        \"gt_overlaps\": gt_overlaps,\n        \"num_pos\": num_pos,\n    }\n\n\ndef _evaluate_predictions_on_lvis(lvis_gt, lvis_results, iou_type, class_names=None):\n    \"\"\"\n    Args:\n        iou_type (str):\n        kpt_oks_sigmas (list[float]):\n        class_names (None or list[str]): if provided, will use it to predict\n            per-category AP.\n\n    Returns:\n        a dict of {metric name: score}\n    \"\"\"\n    metrics = {\n        \"bbox\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\", \"APr\", \"APc\", \"APf\"],\n        \"segm\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\", \"APr\", \"APc\", \"APf\"],\n    }[iou_type]\n\n    logger = logging.getLogger(__name__)\n\n    if len(lvis_results) == 0:  # TODO: check if needed\n        logger.warn(\"No predictions from the model! Set scores to -1\")\n        return {metric: -1 for metric in metrics}\n\n    if iou_type == \"segm\":\n        lvis_results = copy.deepcopy(lvis_results)\n        # When evaluating mask AP, if the results contain bbox, LVIS API will\n        # use the box area as the area of the instance, instead of the mask area.\n        # This leads to a different definition of small/medium/large.\n        # We remove the bbox field to let mask AP use mask area.\n        for c in lvis_results:\n            c.pop(\"bbox\", None)\n\n    from lvis import LVISEval, LVISResults\n\n    lvis_results = LVISResults(lvis_gt, lvis_results)\n    lvis_eval = LVISEval(lvis_gt, lvis_results, iou_type)\n    lvis_eval.run()\n    lvis_eval.print_results()\n\n    # Pull the standard metrics from the LVIS results\n    results = lvis_eval.get_results()\n    results = {metric: float(results[metric] * 100) for metric in metrics}\n    logger.info(\"Evaluation results for {}: \\n\".format(iou_type) + create_small_table(results))\n    return results\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/panoptic_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport contextlib\nimport io\nimport itertools\nimport json\nimport logging\nimport os\nimport tempfile\nfrom collections import OrderedDict\nfrom fvcore.common.file_io import PathManager\nfrom PIL import Image\nfrom tabulate import tabulate\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.utils import comm\n\nfrom .evaluator import DatasetEvaluator\n\nlogger = logging.getLogger(__name__)\n\n\nclass COCOPanopticEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate Panoptic Quality metrics on COCO using PanopticAPI.\n    It saves panoptic segmentation prediction in `output_dir`\n\n    It contains a synchronize call and has to be called from all workers.\n    \"\"\"\n\n    def __init__(self, dataset_name, output_dir):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset\n            output_dir (str): output directory to save results for evaluation\n        \"\"\"\n        self._metadata = MetadataCatalog.get(dataset_name)\n        self._thing_contiguous_id_to_dataset_id = {\n            v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items()\n        }\n        self._stuff_contiguous_id_to_dataset_id = {\n            v: k for k, v in self._metadata.stuff_dataset_id_to_contiguous_id.items()\n        }\n\n        self._predictions_json = os.path.join(output_dir, \"predictions.json\")\n        self._predictions_dir = os.path.join(output_dir, \"predictions\")\n\n    def reset(self):\n        self._predictions = []\n\n    def _convert_category_id(self, segment_info):\n        isthing = segment_info.pop(\"isthing\", None)\n        if isthing is None:\n            # the model produces panoptic category id directly. No more conversion needed\n            return segment_info\n        if isthing is True:\n            segment_info[\"category_id\"] = self._thing_contiguous_id_to_dataset_id[\n                segment_info[\"category_id\"]\n            ]\n        else:\n            segment_info[\"category_id\"] = self._stuff_contiguous_id_to_dataset_id[\n                segment_info[\"category_id\"]\n            ]\n        return segment_info\n\n    def process(self, inputs, outputs):\n        from panopticapi.utils import id2rgb\n\n        for input, output in zip(inputs, outputs):\n            panoptic_img, segments_info = output[\"panoptic_seg\"]\n            panoptic_img = panoptic_img.cpu().numpy()\n\n            file_name = os.path.basename(input[\"file_name\"])\n            file_name_png = os.path.splitext(file_name)[0] + \".png\"\n            with io.BytesIO() as out:\n                Image.fromarray(id2rgb(panoptic_img)).save(out, format=\"PNG\")\n                segments_info = [self._convert_category_id(x) for x in segments_info]\n                self._predictions.append(\n                    {\n                        \"image_id\": input[\"image_id\"],\n                        \"file_name\": file_name_png,\n                        \"png_string\": out.getvalue(),\n                        \"segments_info\": segments_info,\n                    }\n                )\n\n    def evaluate(self):\n        comm.synchronize()\n\n        self._predictions = comm.gather(self._predictions)\n        self._predictions = list(itertools.chain(*self._predictions))\n        if not comm.is_main_process():\n            return\n\n        gt_json = PathManager.get_local_path(self._metadata.panoptic_json)\n        gt_folder = self._metadata.panoptic_root\n\n        with tempfile.TemporaryDirectory(prefix=\"panoptic_eval\") as pred_dir:\n            if \"://\" not in self._predictions_dir:\n                pred_dir = self._predictions_dir\n                os.makedirs(pred_dir, exist_ok=True)\n\n            logger.info(\"Writing all panoptic predictions to {} ...\".format(pred_dir))\n            for p in self._predictions:\n                with open(os.path.join(pred_dir, p[\"file_name\"]), \"wb\") as f:\n                    f.write(p.pop(\"png_string\"))\n\n            with open(gt_json, \"r\") as f:\n                json_data = json.load(f)\n            json_data[\"annotations\"] = self._predictions\n            with PathManager.open(self._predictions_json, \"w\") as f:\n                f.write(json.dumps(json_data))\n\n            from panopticapi.evaluation import pq_compute\n\n            with contextlib.redirect_stdout(io.StringIO()):\n                pq_res = pq_compute(\n                    gt_json,\n                    PathManager.get_local_path(self._predictions_json),\n                    gt_folder=gt_folder,\n                    pred_folder=pred_dir,\n                )\n\n        res = {}\n        res[\"PQ\"] = 100 * pq_res[\"All\"][\"pq\"]\n        res[\"SQ\"] = 100 * pq_res[\"All\"][\"sq\"]\n        res[\"RQ\"] = 100 * pq_res[\"All\"][\"rq\"]\n        res[\"PQ_th\"] = 100 * pq_res[\"Things\"][\"pq\"]\n        res[\"SQ_th\"] = 100 * pq_res[\"Things\"][\"sq\"]\n        res[\"RQ_th\"] = 100 * pq_res[\"Things\"][\"rq\"]\n        res[\"PQ_st\"] = 100 * pq_res[\"Stuff\"][\"pq\"]\n        res[\"SQ_st\"] = 100 * pq_res[\"Stuff\"][\"sq\"]\n        res[\"RQ_st\"] = 100 * pq_res[\"Stuff\"][\"rq\"]\n\n        results = OrderedDict({\"panoptic_seg\": res})\n        _print_panoptic_results(pq_res)\n\n        return results\n\n\ndef _print_panoptic_results(pq_res):\n    headers = [\"\", \"PQ\", \"SQ\", \"RQ\", \"#categories\"]\n    data = []\n    for name in [\"All\", \"Things\", \"Stuff\"]:\n        row = [name] + [pq_res[name][k] * 100 for k in [\"pq\", \"sq\", \"rq\"]] + [pq_res[name][\"n\"]]\n        data.append(row)\n    table = tabulate(\n        data, headers=headers, tablefmt=\"pipe\", floatfmt=\".3f\", stralign=\"center\", numalign=\"center\"\n    )\n    logger.info(\"Panoptic Evaluation Results:\\n\" + table)\n\n\nif __name__ == \"__main__\":\n    from detectron2.utils.logger import setup_logger\n\n    logger = setup_logger()\n    import argparse\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"--gt-json\")\n    parser.add_argument(\"--gt-dir\")\n    parser.add_argument(\"--pred-json\")\n    parser.add_argument(\"--pred-dir\")\n    args = parser.parse_args()\n\n    from panopticapi.evaluation import pq_compute\n\n    with contextlib.redirect_stdout(io.StringIO()):\n        pq_res = pq_compute(\n            args.gt_json, args.pred_json, gt_folder=args.gt_dir, pred_folder=args.pred_dir\n        )\n        _print_panoptic_results(pq_res)\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/pascal_voc_evaluation.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport logging\nimport numpy as np\nimport os\nimport tempfile\nimport xml.etree.ElementTree as ET\nfrom collections import OrderedDict, defaultdict\nfrom functools import lru_cache\nimport torch\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.utils import comm\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass PascalVOCDetectionEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate Pascal VOC AP.\n    It contains a synchronization, therefore has to be called from all ranks.\n\n    Note that this is a rewrite of the official Matlab API.\n    The results should be similar, but not identical to the one produced by\n    the official API.\n    \"\"\"\n\n    def __init__(self, dataset_name):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset, e.g., \"voc_2007_test\"\n        \"\"\"\n        self._dataset_name = dataset_name\n        meta = MetadataCatalog.get(dataset_name)\n        self._anno_file_template = os.path.join(meta.dirname, \"Annotations\", \"{}.xml\")\n        self._image_set_path = os.path.join(meta.dirname, \"ImageSets\", \"Main\", meta.split + \".txt\")\n        self._class_names = meta.thing_classes\n        assert meta.year in [2007, 2012], meta.year\n        self._is_2007 = meta.year == 2007\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n    def reset(self):\n        self._predictions = defaultdict(list)  # class name -> list of prediction strings\n\n    def process(self, inputs, outputs):\n        for input, output in zip(inputs, outputs):\n            image_id = input[\"image_id\"]\n            instances = output[\"instances\"].to(self._cpu_device)\n            boxes = instances.pred_boxes.tensor.numpy()\n            scores = instances.scores.tolist()\n            classes = instances.pred_classes.tolist()\n            for box, score, cls in zip(boxes, scores, classes):\n                xmin, ymin, xmax, ymax = box\n                # The inverse of data loading logic in `datasets/pascal_voc.py`\n                xmin += 1\n                ymin += 1\n                self._predictions[cls].append(\n                    f\"{image_id} {score:.3f} {xmin:.1f} {ymin:.1f} {xmax:.1f} {ymax:.1f}\"\n                )\n\n    def evaluate(self):\n        \"\"\"\n        Returns:\n            dict: has a key \"segm\", whose value is a dict of \"AP\", \"AP50\", and \"AP75\".\n        \"\"\"\n        all_predictions = comm.gather(self._predictions, dst=0)\n        if not comm.is_main_process():\n            return\n        predictions = defaultdict(list)\n        for predictions_per_rank in all_predictions:\n            for clsid, lines in predictions_per_rank.items():\n                predictions[clsid].extend(lines)\n        del all_predictions\n\n        self._logger.info(\n            \"Evaluating {} using {} metric. \"\n            \"Note that results do not use the official Matlab API.\".format(\n                self._dataset_name, 2007 if self._is_2007 else 2012\n            )\n        )\n\n        with tempfile.TemporaryDirectory(prefix=\"pascal_voc_eval_\") as dirname:\n            res_file_template = os.path.join(dirname, \"{}.txt\")\n\n            aps = defaultdict(list)  # iou -> ap per class\n            for cls_id, cls_name in enumerate(self._class_names):\n                lines = predictions.get(cls_id, [\"\"])\n\n                with open(res_file_template.format(cls_name), \"w\") as f:\n                    f.write(\"\\n\".join(lines))\n\n                for thresh in range(50, 100, 5):\n                    rec, prec, ap = voc_eval(\n                        res_file_template,\n                        self._anno_file_template,\n                        self._image_set_path,\n                        cls_name,\n                        ovthresh=thresh / 100.0,\n                        use_07_metric=self._is_2007,\n                    )\n                    aps[thresh].append(ap * 100)\n\n        ret = OrderedDict()\n        mAP = {iou: np.mean(x) for iou, x in aps.items()}\n        ret[\"bbox\"] = {\"AP\": np.mean(list(mAP.values())), \"AP50\": mAP[50], \"AP75\": mAP[75]}\n        return ret\n\n\n##############################################################################\n#\n# Below code is modified from\n# https://github.com/rbgirshick/py-faster-rcnn/blob/master/lib/datasets/voc_eval.py\n# --------------------------------------------------------\n# Fast/er R-CNN\n# Licensed under The MIT License [see LICENSE for details]\n# Written by Bharath Hariharan\n# --------------------------------------------------------\n\n\"\"\"Python implementation of the PASCAL VOC devkit's AP evaluation code.\"\"\"\n\n\n@lru_cache(maxsize=None)\ndef parse_rec(filename):\n    \"\"\"Parse a PASCAL VOC xml file.\"\"\"\n    tree = ET.parse(filename)\n    objects = []\n    for obj in tree.findall(\"object\"):\n        obj_struct = {}\n        obj_struct[\"name\"] = obj.find(\"name\").text\n        obj_struct[\"pose\"] = obj.find(\"pose\").text\n        obj_struct[\"truncated\"] = int(obj.find(\"truncated\").text)\n        obj_struct[\"difficult\"] = int(obj.find(\"difficult\").text)\n        bbox = obj.find(\"bndbox\")\n        obj_struct[\"bbox\"] = [\n            int(bbox.find(\"xmin\").text),\n            int(bbox.find(\"ymin\").text),\n            int(bbox.find(\"xmax\").text),\n            int(bbox.find(\"ymax\").text),\n        ]\n        objects.append(obj_struct)\n\n    return objects\n\n\ndef voc_ap(rec, prec, use_07_metric=False):\n    \"\"\"Compute VOC AP given precision and recall. If use_07_metric is true, uses\n    the VOC 07 11-point method (default:False).\n    \"\"\"\n    if use_07_metric:\n        # 11 point metric\n        ap = 0.0\n        for t in np.arange(0.0, 1.1, 0.1):\n            if np.sum(rec >= t) == 0:\n                p = 0\n            else:\n                p = np.max(prec[rec >= t])\n            ap = ap + p / 11.0\n    else:\n        # correct AP calculation\n        # first append sentinel values at the end\n        mrec = np.concatenate(([0.0], rec, [1.0]))\n        mpre = np.concatenate(([0.0], prec, [0.0]))\n\n        # compute the precision envelope\n        for i in range(mpre.size - 1, 0, -1):\n            mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])\n\n        # to calculate area under PR curve, look for points\n        # where X axis (recall) changes value\n        i = np.where(mrec[1:] != mrec[:-1])[0]\n\n        # and sum (\\Delta recall) * prec\n        ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])\n    return ap\n\n\ndef voc_eval(detpath, annopath, imagesetfile, classname, ovthresh=0.5, use_07_metric=False):\n    \"\"\"rec, prec, ap = voc_eval(detpath,\n                                annopath,\n                                imagesetfile,\n                                classname,\n                                [ovthresh],\n                                [use_07_metric])\n\n    Top level function that does the PASCAL VOC evaluation.\n\n    detpath: Path to detections\n        detpath.format(classname) should produce the detection results file.\n    annopath: Path to annotations\n        annopath.format(imagename) should be the xml annotations file.\n    imagesetfile: Text file containing the list of images, one image per line.\n    classname: Category name (duh)\n    [ovthresh]: Overlap threshold (default = 0.5)\n    [use_07_metric]: Whether to use VOC07's 11 point AP computation\n        (default False)\n    \"\"\"\n    # assumes detections are in detpath.format(classname)\n    # assumes annotations are in annopath.format(imagename)\n    # assumes imagesetfile is a text file with each line an image name\n\n    # first load gt\n    # read list of images\n    with open(imagesetfile, \"r\") as f:\n        lines = f.readlines()\n    imagenames = [x.strip() for x in lines]\n\n    # load annots\n    recs = {}\n    for imagename in imagenames:\n        recs[imagename] = parse_rec(annopath.format(imagename))\n\n    # extract gt objects for this class\n    class_recs = {}\n    npos = 0\n    for imagename in imagenames:\n        R = [obj for obj in recs[imagename] if obj[\"name\"] == classname]\n        bbox = np.array([x[\"bbox\"] for x in R])\n        difficult = np.array([x[\"difficult\"] for x in R]).astype(np.bool)\n        # difficult = np.array([False for x in R]).astype(np.bool)  # treat all \"difficult\" as GT\n        det = [False] * len(R)\n        npos = npos + sum(~difficult)\n        class_recs[imagename] = {\"bbox\": bbox, \"difficult\": difficult, \"det\": det}\n\n    # read dets\n    detfile = detpath.format(classname)\n    with open(detfile, \"r\") as f:\n        lines = f.readlines()\n\n    splitlines = [x.strip().split(\" \") for x in lines]\n    image_ids = [x[0] for x in splitlines]\n    confidence = np.array([float(x[1]) for x in splitlines])\n    BB = np.array([[float(z) for z in x[2:]] for x in splitlines]).reshape(-1, 4)\n\n    # sort by confidence\n    sorted_ind = np.argsort(-confidence)\n    BB = BB[sorted_ind, :]\n    image_ids = [image_ids[x] for x in sorted_ind]\n\n    # go down dets and mark TPs and FPs\n    nd = len(image_ids)\n    tp = np.zeros(nd)\n    fp = np.zeros(nd)\n    for d in range(nd):\n        R = class_recs[image_ids[d]]\n        bb = BB[d, :].astype(float)\n        ovmax = -np.inf\n        BBGT = R[\"bbox\"].astype(float)\n\n        if BBGT.size > 0:\n            # compute overlaps\n            # intersection\n            ixmin = np.maximum(BBGT[:, 0], bb[0])\n            iymin = np.maximum(BBGT[:, 1], bb[1])\n            ixmax = np.minimum(BBGT[:, 2], bb[2])\n            iymax = np.minimum(BBGT[:, 3], bb[3])\n            iw = np.maximum(ixmax - ixmin + 1.0, 0.0)\n            ih = np.maximum(iymax - iymin + 1.0, 0.0)\n            inters = iw * ih\n\n            # union\n            uni = (\n                (bb[2] - bb[0] + 1.0) * (bb[3] - bb[1] + 1.0)\n                + (BBGT[:, 2] - BBGT[:, 0] + 1.0) * (BBGT[:, 3] - BBGT[:, 1] + 1.0)\n                - inters\n            )\n\n            overlaps = inters / uni\n            ovmax = np.max(overlaps)\n            jmax = np.argmax(overlaps)\n\n        if ovmax > ovthresh:\n            if not R[\"difficult\"][jmax]:\n                if not R[\"det\"][jmax]:\n                    tp[d] = 1.0\n                    R[\"det\"][jmax] = 1\n                else:\n                    fp[d] = 1.0\n        else:\n            fp[d] = 1.0\n\n    # compute precision recall\n    fp = np.cumsum(fp)\n    tp = np.cumsum(tp)\n    rec = tp / float(npos)\n    # avoid divide by zero in case the first detection matches a difficult\n    # ground truth\n    prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)\n    ap = voc_ap(rec, prec, use_07_metric)\n\n    return rec, prec, ap\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/sem_seg_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport itertools\nimport json\nimport logging\nimport numpy as np\nimport os\nfrom collections import OrderedDict\nimport PIL.Image as Image\nimport pycocotools.mask as mask_util\nimport torch\nfrom fvcore.common.file_io import PathManager\n\nfrom detectron2.data import DatasetCatalog, MetadataCatalog\nfrom detectron2.utils.comm import all_gather, is_main_process, synchronize\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass SemSegEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate semantic segmentation\n    \"\"\"\n\n    def __init__(self, dataset_name, distributed, num_classes, ignore_label=255, output_dir=None):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset to be evaluated.\n            distributed (True): if True, will collect results from all ranks for evaluation.\n                Otherwise, will evaluate the results in the current process.\n            num_classes (int): number of classes\n            ignore_label (int): value in semantic segmentation ground truth. Predictions for the\n            corresponding pixels should be ignored.\n            output_dir (str): an output directory to dump results.\n        \"\"\"\n        self._dataset_name = dataset_name\n        self._distributed = distributed\n        self._output_dir = output_dir\n        self._num_classes = num_classes\n        self._ignore_label = ignore_label\n        self._N = num_classes + 1\n\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n        self.image_id_to_gt_file = {\n            dataset_record[\"image_id\"]: dataset_record[\"sem_seg_file_name\"]\n            for dataset_record in DatasetCatalog.get(dataset_name)\n        }\n\n        meta = MetadataCatalog.get(dataset_name)\n        # Dict that maps contiguous training ids to COCO category ids\n        try:\n            c2d = meta.stuff_dataset_id_to_contiguous_id\n            self._contiguous_id_to_dataset_id = {v: k for k, v in c2d.items()}\n        except AttributeError:\n            self._contiguous_id_to_dataset_id = None\n\n    def reset(self):\n        self._conf_matrix = np.zeros((self._N, self._N), dtype=np.int64)\n        self._predictions = []\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a model.\n                It is a list of dicts. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a model. It is either list of semantic segmentation predictions\n                (Tensor [H, W]) or list of dicts with key \"sem_seg\" that contains semantic\n                segmentation prediction in the same format.\n        \"\"\"\n        for input, output in zip(inputs, outputs):\n            output = output[\"sem_seg\"].argmax(dim=0).to(self._cpu_device)\n            pred = np.array(output, dtype=np.int)\n            with PathManager.open(self.image_id_to_gt_file[input[\"image_id\"]], \"rb\") as f:\n                gt = np.array(Image.open(f), dtype=np.int)\n\n            gt[gt == self._ignore_label] = self._num_classes\n\n            self._conf_matrix += np.bincount(\n                self._N * pred.reshape(-1) + gt.reshape(-1), minlength=self._N ** 2\n            ).reshape(self._N, self._N)\n\n            self._predictions.extend(self.encode_json_sem_seg(pred, input[\"image_id\"]))\n\n    def evaluate(self):\n        \"\"\"\n        Evaluates standard semantic segmentation metrics (http://cocodataset.org/#stuff-eval):\n\n        * Mean intersection-over-union averaged across classes (mIoU)\n        * Frequency Weighted IoU (fwIoU)\n        * Mean pixel accuracy averaged across classes (mACC)\n        * Pixel Accuracy (pACC)\n        \"\"\"\n        if self._distributed:\n            synchronize()\n            conf_matrix_list = all_gather(self._conf_matrix)\n            self._predictions = all_gather(self._predictions)\n            self._predictions = list(itertools.chain(*self._predictions))\n            if not is_main_process():\n                return\n\n            self._conf_matrix = np.zeros_like(self._conf_matrix)\n            for conf_matrix in conf_matrix_list:\n                self._conf_matrix += conf_matrix\n\n        if self._output_dir:\n            PathManager.mkdirs(self._output_dir)\n            file_path = os.path.join(self._output_dir, \"sem_seg_predictions.json\")\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._predictions))\n\n        acc = np.zeros(self._num_classes, dtype=np.float)\n        iou = np.zeros(self._num_classes, dtype=np.float)\n        tp = self._conf_matrix.diagonal()[:-1].astype(np.float)\n        pos_gt = np.sum(self._conf_matrix[:-1, :-1], axis=0).astype(np.float)\n        class_weights = pos_gt / np.sum(pos_gt)\n        pos_pred = np.sum(self._conf_matrix[:-1, :-1], axis=1).astype(np.float)\n        acc_valid = pos_gt > 0\n        acc[acc_valid] = tp[acc_valid] / pos_gt[acc_valid]\n        iou_valid = (pos_gt + pos_pred) > 0\n        union = pos_gt + pos_pred - tp\n        iou[acc_valid] = tp[acc_valid] / union[acc_valid]\n        macc = np.sum(acc) / np.sum(acc_valid)\n        miou = np.sum(iou) / np.sum(iou_valid)\n        fiou = np.sum(iou * class_weights)\n        pacc = np.sum(tp) / np.sum(pos_gt)\n\n        res = {}\n        res[\"mIoU\"] = 100 * miou\n        res[\"fwIoU\"] = 100 * fiou\n        res[\"mACC\"] = 100 * macc\n        res[\"pACC\"] = 100 * pacc\n\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"sem_seg_evaluation.pth\")\n            with PathManager.open(file_path, \"wb\") as f:\n                torch.save(res, f)\n        results = OrderedDict({\"sem_seg\": res})\n        self._logger.info(results)\n        return results\n\n    def encode_json_sem_seg(self, sem_seg, image_id):\n        \"\"\"\n        Convert semantic segmentation to COCO stuff format with segments encoded as RLEs.\n        See http://cocodataset.org/#format-results\n        \"\"\"\n        json_list = []\n        for label in np.unique(sem_seg):\n            if self._contiguous_id_to_dataset_id is not None:\n                assert (\n                    label in self._contiguous_id_to_dataset_id\n                ), \"Label {} is not in the metadata info for {}\".format(label, self._dataset_name)\n                dataset_id = self._contiguous_id_to_dataset_id[label]\n            else:\n                dataset_id = int(label)\n            mask = (sem_seg == label).astype(np.uint8)\n            mask_rle = mask_util.encode(np.array(mask[:, :, None], order=\"F\"))[0]\n            mask_rle[\"counts\"] = mask_rle[\"counts\"].decode(\"utf-8\")\n            json_list.append(\n                {\"image_id\": image_id, \"category_id\": dataset_id, \"segmentation\": mask_rle}\n            )\n        return json_list\n"
  },
  {
    "path": "detectron2/evaluation/evaluation/testing.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport numpy as np\nimport pprint\nimport sys\nfrom collections import Mapping, OrderedDict\n\n\ndef print_csv_format(results):\n    \"\"\"\n    Print main metrics in a format similar to Detectron,\n    so that they are easy to copypaste into a spreadsheet.\n\n    Args:\n        results (OrderedDict[dict]): task_name -> {metric -> score}\n    \"\"\"\n    assert isinstance(results, OrderedDict), results  # unordered results cannot be properly printed\n    logger = logging.getLogger(__name__)\n    for task, res in results.items():\n        # Don't print \"AP-category\" metrics since they are usually not tracked.\n        important_res = [(k, v) for k, v in res.items() if \"-\" not in k]\n        logger.info(\"copypaste: Task: {}\".format(task))\n        logger.info(\"copypaste: \" + \",\".join([k[0] for k in important_res]))\n        logger.info(\"copypaste: \" + \",\".join([\"{0:.4f}\".format(k[1]) for k in important_res]))\n\n\ndef verify_results(cfg, results):\n    \"\"\"\n    Args:\n        results (OrderedDict[dict]): task_name -> {metric -> score}\n\n    Returns:\n        bool: whether the verification succeeds or not\n    \"\"\"\n    expected_results = cfg.TEST.EXPECTED_RESULTS\n    if not len(expected_results):\n        return True\n\n    ok = True\n    for task, metric, expected, tolerance in expected_results:\n        actual = results[task][metric]\n        if not np.isfinite(actual):\n            ok = False\n        diff = abs(actual - expected)\n        if diff > tolerance:\n            ok = False\n\n    logger = logging.getLogger(__name__)\n    if not ok:\n        logger.error(\"Result verification failed!\")\n        logger.error(\"Expected Results: \" + str(expected_results))\n        logger.error(\"Actual Results: \" + pprint.pformat(results))\n\n        sys.exit(1)\n    else:\n        logger.info(\"Results verification passed.\")\n    return ok\n\n\ndef flatten_results_dict(results):\n    \"\"\"\n    Expand a hierarchical dict of scalars into a flat dict of scalars.\n    If results[k1][k2][k3] = v, the returned dict will have the entry\n    {\"k1/k2/k3\": v}.\n\n    Args:\n        results (dict):\n    \"\"\"\n    r = {}\n    for k, v in results.items():\n        if isinstance(v, Mapping):\n            v = flatten_results_dict(v)\n            for kk, vv in v.items():\n                r[k + \"/\" + kk] = vv\n        else:\n            r[k] = v\n    return r\n"
  },
  {
    "path": "detectron2/evaluation/evaluator.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport datetime\nimport logging\nimport time\nfrom collections import OrderedDict\nfrom contextlib import contextmanager\nimport torch\n\nfrom detectron2.utils.comm import is_main_process\n\n\nclass DatasetEvaluator:\n    \"\"\"\n    Base class for a dataset evaluator.\n\n    The function :func:`inference_on_dataset` runs the model over\n    all samples in the dataset, and have a DatasetEvaluator to process the inputs/outputs.\n\n    This class will accumulate information of the inputs/outputs (by :meth:`process`),\n    and produce evaluation results in the end (by :meth:`evaluate`).\n    \"\"\"\n\n    def reset(self):\n        \"\"\"\n        Preparation for a new round of evaluation.\n        Should be called before starting a round of evaluation.\n        \"\"\"\n        pass\n\n    def process(self, input, output):\n        \"\"\"\n        Process an input/output pair.\n\n        Args:\n            input: the input that's used to call the model.\n            output: the return value of `model(output)`\n        \"\"\"\n        pass\n\n    def evaluate(self):\n        \"\"\"\n        Evaluate/summarize the performance, after processing all input/output pairs.\n\n        Returns:\n            dict:\n                A new evaluator class can return a dict of arbitrary format\n                as long as the user can process the results.\n                In our train_net.py, we expect the following format:\n\n                * key: the name of the task (e.g., bbox)\n                * value: a dict of {metric name: score}, e.g.: {\"AP50\": 80}\n        \"\"\"\n        pass\n\n\nclass DatasetEvaluators(DatasetEvaluator):\n    def __init__(self, evaluators):\n        assert len(evaluators)\n        super().__init__()\n        self._evaluators = evaluators\n\n    def reset(self):\n        for evaluator in self._evaluators:\n            evaluator.reset()\n\n    def process(self, input, output):\n        for evaluator in self._evaluators:\n            evaluator.process(input, output)\n\n    def evaluate(self):\n        results = OrderedDict()\n        for evaluator in self._evaluators:\n            result = evaluator.evaluate()\n            if is_main_process():\n                for k, v in result.items():\n                    assert (\n                        k not in results\n                    ), \"Different evaluators produce results with the same key {}\".format(k)\n                    results[k] = v\n        return results\n\n\ndef inference_on_dataset(model, data_loader, evaluator):\n    \"\"\"\n    Run model (in eval mode) on the data_loader and evaluate the metrics with evaluator.\n\n    Args:\n        model (nn.Module): a module which accepts an object from\n            `data_loader` and returns some outputs. It will be temporarily set to `eval` mode.\n\n            If you wish to evaluate a model in `training` mode instead, you can\n            wrap the given model and override its behavior of `.eval()` and `.train()`.\n        data_loader: an iterable object with a length.\n            The elements it generates will be the inputs to the model.\n        evaluator (DatasetEvaluator): the evaluator to run\n\n    Returns:\n        The return value of `evaluator.evaluate()`\n    \"\"\"\n    num_devices = torch.distributed.get_world_size() if torch.distributed.is_initialized() else 1\n    logger = logging.getLogger(__name__)\n    logger.info(\"Start inference on {} images\".format(len(data_loader)))\n\n    total = len(data_loader)  # inference data loader must have a fixed length\n    evaluator.reset()\n\n    logging_interval = 50\n    num_warmup = min(5, logging_interval - 1, total - 1)\n    start_time = time.time()\n    with inference_context(model), torch.no_grad():\n        for idx, inputs in enumerate(data_loader):\n            if idx == num_warmup:\n                start_time = time.time()\n\n            results,associations = model(inputs)\n            outputs = (results,associations)\n            evaluator.process(inputs, outputs)\n\n            if (idx + 1) % logging_interval == 0:\n                duration = time.time() - start_time\n                seconds_per_img = duration / (idx + 1 - num_warmup)\n                eta = datetime.timedelta(\n                    seconds=int(seconds_per_img * (total - num_warmup) - duration)\n                )\n                logger.info(\n                    \"Inference done {}/{}. {:.4f} s / img. ETA={}\".format(\n                        idx + 1, total, seconds_per_img, str(eta)\n                    )\n                )\n\n    # Measure the time only for this worker (before the synchronization barrier)\n    total_time = int(time.time() - start_time)\n    total_time_str = str(datetime.timedelta(seconds=total_time))\n    # NOTE this format is parsed by grep\n    logger.info(\n        \"Total inference time: {} ({:.6f} s / img per device, on {} devices)\".format(\n            total_time_str, total_time / (total - num_warmup), num_devices\n        )\n    )\n\n    results,associations = evaluator.evaluate()\n    # An evaluator may return None when not in main process.\n    # Replace it by an empty dict instead to make it easier for downstream code to handle\n    if results is None:\n        results = {}\n    return results,associations\n\n\n@contextmanager\ndef inference_context(model):\n    \"\"\"\n    A context where the model is temporarily changed to eval mode,\n    and restored to previous mode afterwards.\n\n    Args:\n        model: a torch Module\n    \"\"\"\n    training_mode = model.training\n    model.eval()\n    yield\n    model.train(training_mode)\n"
  },
  {
    "path": "detectron2/evaluation/lvis_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport itertools\nimport json\nimport logging\nimport numpy as np\nimport os\nimport pickle\nfrom collections import OrderedDict\nimport pycocotools.mask as mask_util\nimport torch\nfrom fvcore.common.file_io import PathManager\n\nimport detectron2.utils.comm as comm\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.structures import Boxes, BoxMode, pairwise_iou\nfrom detectron2.utils.logger import create_small_table\n\nfrom .coco_evaluation import instances_to_json\nfrom .evaluator import DatasetEvaluator\n\n\nclass LVISEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate object proposal and instance detection/segmentation outputs using\n    LVIS's metrics and evaluation API.\n    \"\"\"\n\n    def __init__(self, dataset_name, cfg, distributed, output_dir=None):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset to be evaluated.\n                It must have the following corresponding metadata:\n                    \"json_file\": the path to the LVIS format annotation\n            cfg (CfgNode): config instance\n            distributed (True): if True, will collect results from all ranks for evaluation.\n                Otherwise, will evaluate the results in the current process.\n            output_dir (str): optional, an output directory to dump results.\n        \"\"\"\n        from lvis import LVIS\n\n        self._tasks = self._tasks_from_config(cfg)\n        self._distributed = distributed\n        self._output_dir = output_dir\n\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n        self._metadata = MetadataCatalog.get(dataset_name)\n        json_file = PathManager.get_local_path(self._metadata.json_file)\n        self._lvis_api = LVIS(json_file)\n        # Test set json files do not contain annotations (evaluation must be\n        # performed using the LVIS evaluation server).\n        self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0\n\n    def reset(self):\n        self._predictions = []\n        self._lvis_results = []\n\n    def _tasks_from_config(self, cfg):\n        \"\"\"\n        Returns:\n            tuple[str]: tasks that can be evaluated under the given configuration.\n        \"\"\"\n        tasks = (\"bbox\",)\n        if cfg.MODEL.MASK_ON:\n            tasks = tasks + (\"segm\",)\n        return tasks\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a LVIS model (e.g., GeneralizedRCNN).\n                It is a list of dict. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a LVIS model. It is a list of dicts with key\n                \"instances\" that contains :class:`Instances`.\n        \"\"\"\n        for input, output in zip(inputs, outputs):\n            prediction = {\"image_id\": input[\"image_id\"]}\n\n            # TODO this is ugly\n            if \"instances\" in output:\n                instances = output[\"instances\"].to(self._cpu_device)\n\n                if instances.has(\"pred_masks\"):\n                    # use RLE to encode the masks, because they are too large and takes memory\n                    # since this evaluator stores outputs of the entire dataset\n                    rles = [\n                        mask_util.encode(np.array(mask[:, :, None], order=\"F\", dtype=\"uint8\"))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        # \"counts\" is an array encoded by mask_util as a byte-stream. Python3's\n                        # json writer which always produces strings cannot serialize a bytestream\n                        # unless you decode it. Thankfully, utf-8 works out (which is also what\n                        # the pycocotools/_mask.pyx does).\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n\n                prediction[\"instances\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"proposals\" in output:\n                prediction[\"proposals\"] = output[\"proposals\"].to(self._cpu_device)\n            self._predictions.append(prediction)\n\n    def evaluate(self):\n        if self._distributed:\n            comm.synchronize()\n            self._predictions = comm.gather(self._predictions, dst=0)\n            self._predictions = list(itertools.chain(*self._predictions))\n\n            if not comm.is_main_process():\n                return\n\n        if len(self._predictions) == 0:\n            self._logger.warning(\"[LVISEvaluator] Did not receive valid predictions.\")\n            return {}\n\n        if self._output_dir:\n            PathManager.mkdirs(self._output_dir)\n            file_path = os.path.join(self._output_dir, \"instances_predictions.pth\")\n            with PathManager.open(file_path, \"wb\") as f:\n                torch.save(self._predictions, f)\n\n        self._results = OrderedDict()\n        if \"proposals\" in self._predictions[0]:\n            self._eval_box_proposals()\n        if \"instances\" in self._predictions[0]:\n            self._eval_predictions(set(self._tasks))\n        # Copy so the caller can do whatever with results\n        return copy.deepcopy(self._results)\n\n    def _eval_predictions(self, tasks):\n        \"\"\"\n        Evaluate self._predictions on the given tasks.\n        Fill self._results with the metrics of the tasks.\n        \"\"\"\n        self._logger.info(\"Preparing results in the LVIS format ...\")\n        self._lvis_results = list(itertools.chain(*[x[\"instances\"] for x in self._predictions]))\n\n        # unmap the category ids for LVIS (from 0-indexed to 1-indexed)\n        for result in self._lvis_results:\n            result[\"category_id\"] += 1\n\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"lvis_instances_results.json\")\n            self._logger.info(\"Saving results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._lvis_results))\n                f.flush()\n\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating predictions ...\")\n        for task in sorted(tasks):\n            res = _evaluate_predictions_on_lvis(\n                self._lvis_api,\n                self._lvis_results,\n                task,\n                class_names=self._metadata.get(\"thing_classes\"),\n            )\n            self._results[task] = res\n\n    def _eval_box_proposals(self):\n        \"\"\"\n        Evaluate the box proposals in self._predictions.\n        Fill self._results with the metrics for \"box_proposals\" task.\n        \"\"\"\n        if self._output_dir:\n            # Saving generated box proposals to file.\n            # Predicted box_proposals are in XYXY_ABS mode.\n            bbox_mode = BoxMode.XYXY_ABS.value\n            ids, boxes, objectness_logits = [], [], []\n            for prediction in self._predictions:\n                ids.append(prediction[\"image_id\"])\n                boxes.append(prediction[\"proposals\"].proposal_boxes.tensor.numpy())\n                objectness_logits.append(prediction[\"proposals\"].objectness_logits.numpy())\n\n            proposal_data = {\n                \"boxes\": boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            with PathManager.open(os.path.join(self._output_dir, \"box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(proposal_data, f)\n\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating bbox proposals ...\")\n        res = {}\n        areas = {\"all\": \"\", \"small\": \"s\", \"medium\": \"m\", \"large\": \"l\"}\n        for limit in [100, 1000]:\n            for area, suffix in areas.items():\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._lvis_api, area=area, limit=limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                res[key] = float(stats[\"ar\"].item() * 100)\n        self._logger.info(\"Proposal metrics: \\n\" + create_small_table(res))\n        self._results[\"box_proposals\"] = res\n\n\n# inspired from Detectron:\n# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa\ndef _evaluate_box_proposals(dataset_predictions, lvis_api, thresholds=None, area=\"all\", limit=None):\n    \"\"\"\n    Evaluate detection proposal recall metrics. This function is a much\n    faster alternative to the official LVIS API recall evaluation code. However,\n    it produces slightly different results.\n    \"\"\"\n    # Record max overlap value for each gt box\n    # Return vector of overlap values\n    areas = {\n        \"all\": 0,\n        \"small\": 1,\n        \"medium\": 2,\n        \"large\": 3,\n        \"96-128\": 4,\n        \"128-256\": 5,\n        \"256-512\": 6,\n        \"512-inf\": 7,\n    }\n    area_ranges = [\n        [0 ** 2, 1e5 ** 2],  # all\n        [0 ** 2, 32 ** 2],  # small\n        [32 ** 2, 96 ** 2],  # medium\n        [96 ** 2, 1e5 ** 2],  # large\n        [96 ** 2, 128 ** 2],  # 96-128\n        [128 ** 2, 256 ** 2],  # 128-256\n        [256 ** 2, 512 ** 2],  # 256-512\n        [512 ** 2, 1e5 ** 2],\n    ]  # 512-inf\n    assert area in areas, \"Unknown area range: {}\".format(area)\n    area_range = area_ranges[areas[area]]\n    gt_overlaps = []\n    num_pos = 0\n\n    for prediction_dict in dataset_predictions:\n        predictions = prediction_dict[\"proposals\"]\n\n        # sort predictions in descending order\n        # TODO maybe remove this and make it explicit in the documentation\n        inds = predictions.objectness_logits.sort(descending=True)[1]\n        predictions = predictions[inds]\n\n        ann_ids = lvis_api.get_ann_ids(img_ids=[prediction_dict[\"image_id\"]])\n        anno = lvis_api.load_anns(ann_ids)\n        gt_boxes = [\n            BoxMode.convert(obj[\"bbox\"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) for obj in anno\n        ]\n        gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4)  # guard against no boxes\n        gt_boxes = Boxes(gt_boxes)\n        gt_areas = torch.as_tensor([obj[\"area\"] for obj in anno])\n\n        if len(gt_boxes) == 0 or len(predictions) == 0:\n            continue\n\n        valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])\n        gt_boxes = gt_boxes[valid_gt_inds]\n\n        num_pos += len(gt_boxes)\n\n        if len(gt_boxes) == 0:\n            continue\n\n        if limit is not None and len(predictions) > limit:\n            predictions = predictions[:limit]\n\n        overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)\n\n        _gt_overlaps = torch.zeros(len(gt_boxes))\n        for j in range(min(len(predictions), len(gt_boxes))):\n            # find which proposal box maximally covers each gt box\n            # and get the iou amount of coverage for each gt box\n            max_overlaps, argmax_overlaps = overlaps.max(dim=0)\n\n            # find which gt box is 'best' covered (i.e. 'best' = most iou)\n            gt_ovr, gt_ind = max_overlaps.max(dim=0)\n            assert gt_ovr >= 0\n            # find the proposal box that covers the best covered gt box\n            box_ind = argmax_overlaps[gt_ind]\n            # record the iou coverage of this gt box\n            _gt_overlaps[j] = overlaps[box_ind, gt_ind]\n            assert _gt_overlaps[j] == gt_ovr\n            # mark the proposal box and the gt box as used\n            overlaps[box_ind, :] = -1\n            overlaps[:, gt_ind] = -1\n\n        # append recorded iou coverage level\n        gt_overlaps.append(_gt_overlaps)\n    gt_overlaps = torch.cat(gt_overlaps, dim=0)\n    gt_overlaps, _ = torch.sort(gt_overlaps)\n\n    if thresholds is None:\n        step = 0.05\n        thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)\n    recalls = torch.zeros_like(thresholds)\n    # compute recall for each iou threshold\n    for i, t in enumerate(thresholds):\n        recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)\n    # ar = 2 * np.trapz(recalls, thresholds)\n    ar = recalls.mean()\n    return {\n        \"ar\": ar,\n        \"recalls\": recalls,\n        \"thresholds\": thresholds,\n        \"gt_overlaps\": gt_overlaps,\n        \"num_pos\": num_pos,\n    }\n\n\ndef _evaluate_predictions_on_lvis(lvis_gt, lvis_results, iou_type, class_names=None):\n    \"\"\"\n    Args:\n        iou_type (str):\n        kpt_oks_sigmas (list[float]):\n        class_names (None or list[str]): if provided, will use it to predict\n            per-category AP.\n\n    Returns:\n        a dict of {metric name: score}\n    \"\"\"\n    metrics = {\n        \"bbox\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\", \"APr\", \"APc\", \"APf\"],\n        \"segm\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\", \"APr\", \"APc\", \"APf\"],\n    }[iou_type]\n\n    logger = logging.getLogger(__name__)\n\n    if len(lvis_results) == 0:  # TODO: check if needed\n        logger.warn(\"No predictions from the model! Set scores to -1\")\n        return {metric: -1 for metric in metrics}\n\n    if iou_type == \"segm\":\n        lvis_results = copy.deepcopy(lvis_results)\n        # When evaluating mask AP, if the results contain bbox, LVIS API will\n        # use the box area as the area of the instance, instead of the mask area.\n        # This leads to a different definition of small/medium/large.\n        # We remove the bbox field to let mask AP use mask area.\n        for c in lvis_results:\n            c.pop(\"bbox\", None)\n\n    from lvis import LVISEval, LVISResults\n\n    lvis_results = LVISResults(lvis_gt, lvis_results)\n    lvis_eval = LVISEval(lvis_gt, lvis_results, iou_type)\n    lvis_eval.run()\n    lvis_eval.print_results()\n\n    # Pull the standard metrics from the LVIS results\n    results = lvis_eval.get_results()\n    results = {metric: float(results[metric] * 100) for metric in metrics}\n    logger.info(\"Evaluation results for {}: \\n\".format(iou_type) + create_small_table(results))\n    return results\n"
  },
  {
    "path": "detectron2/evaluation/panoptic_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport contextlib\nimport io\nimport itertools\nimport json\nimport logging\nimport os\nimport tempfile\nfrom collections import OrderedDict\nfrom fvcore.common.file_io import PathManager\nfrom PIL import Image\nfrom tabulate import tabulate\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.utils import comm\n\nfrom .evaluator import DatasetEvaluator\n\nlogger = logging.getLogger(__name__)\n\n\nclass COCOPanopticEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate Panoptic Quality metrics on COCO using PanopticAPI.\n    It saves panoptic segmentation prediction in `output_dir`\n\n    It contains a synchronize call and has to be called from all workers.\n    \"\"\"\n\n    def __init__(self, dataset_name, output_dir):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset\n            output_dir (str): output directory to save results for evaluation\n        \"\"\"\n        self._metadata = MetadataCatalog.get(dataset_name)\n        self._thing_contiguous_id_to_dataset_id = {\n            v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items()\n        }\n        self._stuff_contiguous_id_to_dataset_id = {\n            v: k for k, v in self._metadata.stuff_dataset_id_to_contiguous_id.items()\n        }\n\n        self._predictions_json = os.path.join(output_dir, \"predictions.json\")\n        self._predictions_dir = os.path.join(output_dir, \"predictions\")\n\n    def reset(self):\n        self._predictions = []\n\n    def _convert_category_id(self, segment_info):\n        isthing = segment_info.pop(\"isthing\", None)\n        if isthing is None:\n            # the model produces panoptic category id directly. No more conversion needed\n            return segment_info\n        if isthing is True:\n            segment_info[\"category_id\"] = self._thing_contiguous_id_to_dataset_id[\n                segment_info[\"category_id\"]\n            ]\n        else:\n            segment_info[\"category_id\"] = self._stuff_contiguous_id_to_dataset_id[\n                segment_info[\"category_id\"]\n            ]\n        return segment_info\n\n    def process(self, inputs, outputs):\n        from panopticapi.utils import id2rgb\n\n        for input, output in zip(inputs, outputs):\n            panoptic_img, segments_info = output[\"panoptic_seg\"]\n            panoptic_img = panoptic_img.cpu().numpy()\n\n            file_name = os.path.basename(input[\"file_name\"])\n            file_name_png = os.path.splitext(file_name)[0] + \".png\"\n            with io.BytesIO() as out:\n                Image.fromarray(id2rgb(panoptic_img)).save(out, format=\"PNG\")\n                segments_info = [self._convert_category_id(x) for x in segments_info]\n                self._predictions.append(\n                    {\n                        \"image_id\": input[\"image_id\"],\n                        \"file_name\": file_name_png,\n                        \"png_string\": out.getvalue(),\n                        \"segments_info\": segments_info,\n                    }\n                )\n\n    def evaluate(self):\n        comm.synchronize()\n\n        self._predictions = comm.gather(self._predictions)\n        self._predictions = list(itertools.chain(*self._predictions))\n        if not comm.is_main_process():\n            return\n\n        gt_json = PathManager.get_local_path(self._metadata.panoptic_json)\n        gt_folder = self._metadata.panoptic_root\n\n        with tempfile.TemporaryDirectory(prefix=\"panoptic_eval\") as pred_dir:\n            if \"://\" not in self._predictions_dir:\n                pred_dir = self._predictions_dir\n                os.makedirs(pred_dir, exist_ok=True)\n\n            logger.info(\"Writing all panoptic predictions to {} ...\".format(pred_dir))\n            for p in self._predictions:\n                with open(os.path.join(pred_dir, p[\"file_name\"]), \"wb\") as f:\n                    f.write(p.pop(\"png_string\"))\n\n            with open(gt_json, \"r\") as f:\n                json_data = json.load(f)\n            json_data[\"annotations\"] = self._predictions\n            with PathManager.open(self._predictions_json, \"w\") as f:\n                f.write(json.dumps(json_data))\n\n            from panopticapi.evaluation import pq_compute\n\n            with contextlib.redirect_stdout(io.StringIO()):\n                pq_res = pq_compute(\n                    gt_json,\n                    PathManager.get_local_path(self._predictions_json),\n                    gt_folder=gt_folder,\n                    pred_folder=pred_dir,\n                )\n\n        res = {}\n        res[\"PQ\"] = 100 * pq_res[\"All\"][\"pq\"]\n        res[\"SQ\"] = 100 * pq_res[\"All\"][\"sq\"]\n        res[\"RQ\"] = 100 * pq_res[\"All\"][\"rq\"]\n        res[\"PQ_th\"] = 100 * pq_res[\"Things\"][\"pq\"]\n        res[\"SQ_th\"] = 100 * pq_res[\"Things\"][\"sq\"]\n        res[\"RQ_th\"] = 100 * pq_res[\"Things\"][\"rq\"]\n        res[\"PQ_st\"] = 100 * pq_res[\"Stuff\"][\"pq\"]\n        res[\"SQ_st\"] = 100 * pq_res[\"Stuff\"][\"sq\"]\n        res[\"RQ_st\"] = 100 * pq_res[\"Stuff\"][\"rq\"]\n\n        results = OrderedDict({\"panoptic_seg\": res})\n        _print_panoptic_results(pq_res)\n\n        return results\n\n\ndef _print_panoptic_results(pq_res):\n    headers = [\"\", \"PQ\", \"SQ\", \"RQ\", \"#categories\"]\n    data = []\n    for name in [\"All\", \"Things\", \"Stuff\"]:\n        row = [name] + [pq_res[name][k] * 100 for k in [\"pq\", \"sq\", \"rq\"]] + [pq_res[name][\"n\"]]\n        data.append(row)\n    table = tabulate(\n        data, headers=headers, tablefmt=\"pipe\", floatfmt=\".3f\", stralign=\"center\", numalign=\"center\"\n    )\n    logger.info(\"Panoptic Evaluation Results:\\n\" + table)\n\n\nif __name__ == \"__main__\":\n    from detectron2.utils.logger import setup_logger\n\n    logger = setup_logger()\n    import argparse\n\n    parser = argparse.ArgumentParser()\n    parser.add_argument(\"--gt-json\")\n    parser.add_argument(\"--gt-dir\")\n    parser.add_argument(\"--pred-json\")\n    parser.add_argument(\"--pred-dir\")\n    args = parser.parse_args()\n\n    from panopticapi.evaluation import pq_compute\n\n    with contextlib.redirect_stdout(io.StringIO()):\n        pq_res = pq_compute(\n            args.gt_json, args.pred_json, gt_folder=args.gt_dir, pred_folder=args.pred_dir\n        )\n        _print_panoptic_results(pq_res)\n"
  },
  {
    "path": "detectron2/evaluation/pascal_voc_evaluation.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport logging\nimport numpy as np\nimport os\nimport tempfile\nimport xml.etree.ElementTree as ET\nfrom collections import OrderedDict, defaultdict\nfrom functools import lru_cache\nimport torch\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.utils import comm\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass PascalVOCDetectionEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate Pascal VOC AP.\n    It contains a synchronization, therefore has to be called from all ranks.\n\n    Note that this is a rewrite of the official Matlab API.\n    The results should be similar, but not identical to the one produced by\n    the official API.\n    \"\"\"\n\n    def __init__(self, dataset_name):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset, e.g., \"voc_2007_test\"\n        \"\"\"\n        self._dataset_name = dataset_name\n        meta = MetadataCatalog.get(dataset_name)\n        self._anno_file_template = os.path.join(meta.dirname, \"Annotations\", \"{}.xml\")\n        self._image_set_path = os.path.join(meta.dirname, \"ImageSets\", \"Main\", meta.split + \".txt\")\n        self._class_names = meta.thing_classes\n        assert meta.year in [2007, 2012], meta.year\n        self._is_2007 = meta.year == 2007\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n    def reset(self):\n        self._predictions = defaultdict(list)  # class name -> list of prediction strings\n\n    def process(self, inputs, outputs):\n        for input, output in zip(inputs, outputs):\n            image_id = input[\"image_id\"]\n            instances = output[\"instances\"].to(self._cpu_device)\n            boxes = instances.pred_boxes.tensor.numpy()\n            scores = instances.scores.tolist()\n            classes = instances.pred_classes.tolist()\n            for box, score, cls in zip(boxes, scores, classes):\n                xmin, ymin, xmax, ymax = box\n                # The inverse of data loading logic in `datasets/pascal_voc.py`\n                xmin += 1\n                ymin += 1\n                self._predictions[cls].append(\n                    f\"{image_id} {score:.3f} {xmin:.1f} {ymin:.1f} {xmax:.1f} {ymax:.1f}\"\n                )\n\n    def evaluate(self):\n        \"\"\"\n        Returns:\n            dict: has a key \"segm\", whose value is a dict of \"AP\", \"AP50\", and \"AP75\".\n        \"\"\"\n        all_predictions = comm.gather(self._predictions, dst=0)\n        if not comm.is_main_process():\n            return\n        predictions = defaultdict(list)\n        for predictions_per_rank in all_predictions:\n            for clsid, lines in predictions_per_rank.items():\n                predictions[clsid].extend(lines)\n        del all_predictions\n\n        self._logger.info(\n            \"Evaluating {} using {} metric. \"\n            \"Note that results do not use the official Matlab API.\".format(\n                self._dataset_name, 2007 if self._is_2007 else 2012\n            )\n        )\n\n        with tempfile.TemporaryDirectory(prefix=\"pascal_voc_eval_\") as dirname:\n            res_file_template = os.path.join(dirname, \"{}.txt\")\n\n            aps = defaultdict(list)  # iou -> ap per class\n            for cls_id, cls_name in enumerate(self._class_names):\n                lines = predictions.get(cls_id, [\"\"])\n\n                with open(res_file_template.format(cls_name), \"w\") as f:\n                    f.write(\"\\n\".join(lines))\n\n                for thresh in range(50, 100, 5):\n                    rec, prec, ap = voc_eval(\n                        res_file_template,\n                        self._anno_file_template,\n                        self._image_set_path,\n                        cls_name,\n                        ovthresh=thresh / 100.0,\n                        use_07_metric=self._is_2007,\n                    )\n                    aps[thresh].append(ap * 100)\n\n        ret = OrderedDict()\n        mAP = {iou: np.mean(x) for iou, x in aps.items()}\n        ret[\"bbox\"] = {\"AP\": np.mean(list(mAP.values())), \"AP50\": mAP[50], \"AP75\": mAP[75]}\n        return ret\n\n\n##############################################################################\n#\n# Below code is modified from\n# https://github.com/rbgirshick/py-faster-rcnn/blob/master/lib/datasets/voc_eval.py\n# --------------------------------------------------------\n# Fast/er R-CNN\n# Licensed under The MIT License [see LICENSE for details]\n# Written by Bharath Hariharan\n# --------------------------------------------------------\n\n\"\"\"Python implementation of the PASCAL VOC devkit's AP evaluation code.\"\"\"\n\n\n@lru_cache(maxsize=None)\ndef parse_rec(filename):\n    \"\"\"Parse a PASCAL VOC xml file.\"\"\"\n    tree = ET.parse(filename)\n    objects = []\n    for obj in tree.findall(\"object\"):\n        obj_struct = {}\n        obj_struct[\"name\"] = obj.find(\"name\").text\n        obj_struct[\"pose\"] = obj.find(\"pose\").text\n        obj_struct[\"truncated\"] = int(obj.find(\"truncated\").text)\n        obj_struct[\"difficult\"] = int(obj.find(\"difficult\").text)\n        bbox = obj.find(\"bndbox\")\n        obj_struct[\"bbox\"] = [\n            int(bbox.find(\"xmin\").text),\n            int(bbox.find(\"ymin\").text),\n            int(bbox.find(\"xmax\").text),\n            int(bbox.find(\"ymax\").text),\n        ]\n        objects.append(obj_struct)\n\n    return objects\n\n\ndef voc_ap(rec, prec, use_07_metric=False):\n    \"\"\"Compute VOC AP given precision and recall. If use_07_metric is true, uses\n    the VOC 07 11-point method (default:False).\n    \"\"\"\n    if use_07_metric:\n        # 11 point metric\n        ap = 0.0\n        for t in np.arange(0.0, 1.1, 0.1):\n            if np.sum(rec >= t) == 0:\n                p = 0\n            else:\n                p = np.max(prec[rec >= t])\n            ap = ap + p / 11.0\n    else:\n        # correct AP calculation\n        # first append sentinel values at the end\n        mrec = np.concatenate(([0.0], rec, [1.0]))\n        mpre = np.concatenate(([0.0], prec, [0.0]))\n\n        # compute the precision envelope\n        for i in range(mpre.size - 1, 0, -1):\n            mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])\n\n        # to calculate area under PR curve, look for points\n        # where X axis (recall) changes value\n        i = np.where(mrec[1:] != mrec[:-1])[0]\n\n        # and sum (\\Delta recall) * prec\n        ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])\n    return ap\n\n\ndef voc_eval(detpath, annopath, imagesetfile, classname, ovthresh=0.5, use_07_metric=False):\n    \"\"\"rec, prec, ap = voc_eval(detpath,\n                                annopath,\n                                imagesetfile,\n                                classname,\n                                [ovthresh],\n                                [use_07_metric])\n\n    Top level function that does the PASCAL VOC evaluation.\n\n    detpath: Path to detections\n        detpath.format(classname) should produce the detection results file.\n    annopath: Path to annotations\n        annopath.format(imagename) should be the xml annotations file.\n    imagesetfile: Text file containing the list of images, one image per line.\n    classname: Category name (duh)\n    [ovthresh]: Overlap threshold (default = 0.5)\n    [use_07_metric]: Whether to use VOC07's 11 point AP computation\n        (default False)\n    \"\"\"\n    # assumes detections are in detpath.format(classname)\n    # assumes annotations are in annopath.format(imagename)\n    # assumes imagesetfile is a text file with each line an image name\n\n    # first load gt\n    # read list of images\n    with open(imagesetfile, \"r\") as f:\n        lines = f.readlines()\n    imagenames = [x.strip() for x in lines]\n\n    # load annots\n    recs = {}\n    for imagename in imagenames:\n        recs[imagename] = parse_rec(annopath.format(imagename))\n\n    # extract gt objects for this class\n    class_recs = {}\n    npos = 0\n    for imagename in imagenames:\n        R = [obj for obj in recs[imagename] if obj[\"name\"] == classname]\n        bbox = np.array([x[\"bbox\"] for x in R])\n        difficult = np.array([x[\"difficult\"] for x in R]).astype(np.bool)\n        # difficult = np.array([False for x in R]).astype(np.bool)  # treat all \"difficult\" as GT\n        det = [False] * len(R)\n        npos = npos + sum(~difficult)\n        class_recs[imagename] = {\"bbox\": bbox, \"difficult\": difficult, \"det\": det}\n\n    # read dets\n    detfile = detpath.format(classname)\n    with open(detfile, \"r\") as f:\n        lines = f.readlines()\n\n    splitlines = [x.strip().split(\" \") for x in lines]\n    image_ids = [x[0] for x in splitlines]\n    confidence = np.array([float(x[1]) for x in splitlines])\n    BB = np.array([[float(z) for z in x[2:]] for x in splitlines]).reshape(-1, 4)\n\n    # sort by confidence\n    sorted_ind = np.argsort(-confidence)\n    BB = BB[sorted_ind, :]\n    image_ids = [image_ids[x] for x in sorted_ind]\n\n    # go down dets and mark TPs and FPs\n    nd = len(image_ids)\n    tp = np.zeros(nd)\n    fp = np.zeros(nd)\n    for d in range(nd):\n        R = class_recs[image_ids[d]]\n        bb = BB[d, :].astype(float)\n        ovmax = -np.inf\n        BBGT = R[\"bbox\"].astype(float)\n\n        if BBGT.size > 0:\n            # compute overlaps\n            # intersection\n            ixmin = np.maximum(BBGT[:, 0], bb[0])\n            iymin = np.maximum(BBGT[:, 1], bb[1])\n            ixmax = np.minimum(BBGT[:, 2], bb[2])\n            iymax = np.minimum(BBGT[:, 3], bb[3])\n            iw = np.maximum(ixmax - ixmin + 1.0, 0.0)\n            ih = np.maximum(iymax - iymin + 1.0, 0.0)\n            inters = iw * ih\n\n            # union\n            uni = (\n                (bb[2] - bb[0] + 1.0) * (bb[3] - bb[1] + 1.0)\n                + (BBGT[:, 2] - BBGT[:, 0] + 1.0) * (BBGT[:, 3] - BBGT[:, 1] + 1.0)\n                - inters\n            )\n\n            overlaps = inters / uni\n            ovmax = np.max(overlaps)\n            jmax = np.argmax(overlaps)\n\n        if ovmax > ovthresh:\n            if not R[\"difficult\"][jmax]:\n                if not R[\"det\"][jmax]:\n                    tp[d] = 1.0\n                    R[\"det\"][jmax] = 1\n                else:\n                    fp[d] = 1.0\n        else:\n            fp[d] = 1.0\n\n    # compute precision recall\n    fp = np.cumsum(fp)\n    tp = np.cumsum(tp)\n    rec = tp / float(npos)\n    # avoid divide by zero in case the first detection matches a difficult\n    # ground truth\n    prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)\n    ap = voc_ap(rec, prec, use_07_metric)\n\n    return rec, prec, ap\n"
  },
  {
    "path": "detectron2/evaluation/rotated_coco_evaluation.py",
    "content": "import itertools\nimport json\nimport numpy as np\nimport os\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom pycocotools.cocoeval import COCOeval, maskUtils\n\nfrom detectron2.structures import BoxMode, RotatedBoxes, pairwise_iou_rotated\n\nfrom .coco_evaluation import COCOEvaluator\n\n\nclass RotatedCOCOeval(COCOeval):\n    @staticmethod\n    def is_rotated(box_list):\n        if type(box_list) == np.ndarray:\n            return box_list.shape[1] == 5\n        elif type(box_list) == list:\n            if box_list == []:  # cannot decide the box_dim\n                return False\n            return np.all(\n                np.array(\n                    [\n                        (len(obj) == 5) and ((type(obj) == list) or (type(obj) == np.ndarray))\n                        for obj in box_list\n                    ]\n                )\n            )\n        return False\n\n    @staticmethod\n    def boxlist_to_tensor(boxlist, output_box_dim):\n        if type(boxlist) == np.ndarray:\n            box_tensor = torch.from_numpy(boxlist)\n        elif type(boxlist) == list:\n            if boxlist == []:\n                return torch.zeros((0, output_box_dim), dtype=torch.float32)\n            else:\n                box_tensor = torch.FloatTensor(boxlist)\n        else:\n            raise Exception(\"Unrecognized boxlist type\")\n\n        input_box_dim = box_tensor.shape[1]\n        if input_box_dim != output_box_dim:\n            if input_box_dim == 4 and output_box_dim == 5:\n                box_tensor = BoxMode.convert(box_tensor, BoxMode.XYWH_ABS, BoxMode.XYWHA_ABS)\n            else:\n                raise Exception(\n                    \"Unable to convert from {}-dim box to {}-dim box\".format(\n                        input_box_dim, output_box_dim\n                    )\n                )\n        return box_tensor\n\n    def compute_iou_dt_gt(self, dt, gt, is_crowd):\n        if self.is_rotated(dt) or self.is_rotated(gt):\n            # TODO: take is_crowd into consideration\n            assert all(c == 0 for c in is_crowd)\n            dt = RotatedBoxes(self.boxlist_to_tensor(dt, output_box_dim=5))\n            gt = RotatedBoxes(self.boxlist_to_tensor(gt, output_box_dim=5))\n            return pairwise_iou_rotated(dt, gt)\n        else:\n            # This is the same as the classical COCO evaluation\n            return maskUtils.iou(dt, gt, is_crowd)\n\n    def computeIoU(self, imgId, catId):\n        p = self.params\n        if p.useCats:\n            gt = self._gts[imgId, catId]\n            dt = self._dts[imgId, catId]\n        else:\n            gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]]\n            dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]]\n        if len(gt) == 0 and len(dt) == 0:\n            return []\n        inds = np.argsort([-d[\"score\"] for d in dt], kind=\"mergesort\")\n        dt = [dt[i] for i in inds]\n        if len(dt) > p.maxDets[-1]:\n            dt = dt[0 : p.maxDets[-1]]\n\n        assert p.iouType == \"bbox\", \"unsupported iouType for iou computation\"\n\n        g = [g[\"bbox\"] for g in gt]\n        d = [d[\"bbox\"] for d in dt]\n\n        # compute iou between each dt and gt region\n        iscrowd = [int(o[\"iscrowd\"]) for o in gt]\n\n        # Note: this function is copied from cocoeval.py in cocoapi\n        # and the major difference is here.\n        ious = self.compute_iou_dt_gt(d, g, iscrowd)\n        return ious\n\n\nclass RotatedCOCOEvaluator(COCOEvaluator):\n    \"\"\"\n    Evaluate object proposal/instance detection outputs using COCO-like metrics and APIs,\n    with rotated boxes support.\n    Note: this uses IOU only and does not consider angle differences.\n    \"\"\"\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a COCO model (e.g., GeneralizedRCNN).\n                It is a list of dict. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a COCO model. It is a list of dicts with key\n                \"instances\" that contains :class:`Instances`.\n        \"\"\"\n        for input, output in zip(inputs, outputs):\n            prediction = {\"image_id\": input[\"image_id\"]}\n\n            if \"instances\" in output:\n                instances = output[\"instances\"].to(self._cpu_device)\n\n                prediction[\"instances\"] = self.instances_to_json(instances, input[\"image_id\"])\n            if \"proposals\" in output:\n                prediction[\"proposals\"] = output[\"proposals\"].to(self._cpu_device)\n            self._predictions.append(prediction)\n\n    def instances_to_json(self, instances, img_id):\n        num_instance = len(instances)\n        if num_instance == 0:\n            return []\n\n        boxes = instances.pred_boxes.tensor.numpy()\n        if boxes.shape[1] == 4:\n            boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS)\n        boxes = boxes.tolist()\n        scores = instances.scores.tolist()\n        classes = instances.pred_classes.tolist()\n\n        results = []\n        for k in range(num_instance):\n            result = {\n                \"image_id\": img_id,\n                \"category_id\": classes[k],\n                \"bbox\": boxes[k],\n                \"score\": scores[k],\n            }\n\n            results.append(result)\n        return results\n\n    def _eval_predictions(self, tasks):\n        \"\"\"\n        Evaluate self._predictions on the given tasks.\n        Fill self._results with the metrics of the tasks.\n        \"\"\"\n        self._logger.info(\"Preparing results for COCO format ...\")\n        self._coco_results = list(itertools.chain(*[x[\"instances\"] for x in self._predictions]))\n\n        # unmap the category ids for COCO\n        if hasattr(self._metadata, \"thing_dataset_id_to_contiguous_id\"):\n            reverse_id_mapping = {\n                v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items()\n            }\n            for result in self._coco_results:\n                result[\"category_id\"] = reverse_id_mapping[result[\"category_id\"]]\n\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"coco_instances_results.json\")\n            self._logger.info(\"Saving results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._coco_results))\n                f.flush()\n\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating predictions ...\")\n        for task in sorted(tasks):\n            assert task == \"bbox\", \"Task {} is not supported\".format(task)\n            coco_eval = (\n                self._evaluate_predictions_on_coco(self._coco_api, self._coco_results)\n                if len(self._coco_results) > 0\n                else None  # cocoapi does not handle empty results very well\n            )\n\n            res = self._derive_coco_results(\n                coco_eval, task, class_names=self._metadata.get(\"thing_classes\")\n            )\n            self._results[task] = res\n\n    def _evaluate_predictions_on_coco(self, coco_gt, coco_results):\n        \"\"\"\n        Evaluate the coco results using COCOEval API.\n        \"\"\"\n        assert len(coco_results) > 0\n\n        coco_dt = coco_gt.loadRes(coco_results)\n\n        # Only bbox is supported for now\n        coco_eval = RotatedCOCOeval(coco_gt, coco_dt, iouType=\"bbox\")\n\n        coco_eval.evaluate()\n        coco_eval.accumulate()\n        coco_eval.summarize()\n\n        return coco_eval\n"
  },
  {
    "path": "detectron2/evaluation/sem_seg_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport itertools\nimport json\nimport logging\nimport numpy as np\nimport os\nfrom collections import OrderedDict\nimport PIL.Image as Image\nimport pycocotools.mask as mask_util\nimport torch\nfrom fvcore.common.file_io import PathManager\n\nfrom detectron2.data import DatasetCatalog, MetadataCatalog\nfrom detectron2.utils.comm import all_gather, is_main_process, synchronize\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass SemSegEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate semantic segmentation\n    \"\"\"\n\n    def __init__(self, dataset_name, distributed, num_classes, ignore_label=255, output_dir=None):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset to be evaluated.\n            distributed (True): if True, will collect results from all ranks for evaluation.\n                Otherwise, will evaluate the results in the current process.\n            num_classes (int): number of classes\n            ignore_label (int): value in semantic segmentation ground truth. Predictions for the\n            corresponding pixels should be ignored.\n            output_dir (str): an output directory to dump results.\n        \"\"\"\n        self._dataset_name = dataset_name\n        self._distributed = distributed\n        self._output_dir = output_dir\n        self._num_classes = num_classes\n        self._ignore_label = ignore_label\n        self._N = num_classes + 1\n\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n        self.image_id_to_gt_file = {\n            dataset_record[\"image_id\"]: dataset_record[\"sem_seg_file_name\"]\n            for dataset_record in DatasetCatalog.get(dataset_name)\n        }\n\n        meta = MetadataCatalog.get(dataset_name)\n        # Dict that maps contiguous training ids to COCO category ids\n        try:\n            c2d = meta.stuff_dataset_id_to_contiguous_id\n            self._contiguous_id_to_dataset_id = {v: k for k, v in c2d.items()}\n        except AttributeError:\n            self._contiguous_id_to_dataset_id = None\n\n    def reset(self):\n        self._conf_matrix = np.zeros((self._N, self._N), dtype=np.int64)\n        self._predictions = []\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a model.\n                It is a list of dicts. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a model. It is either list of semantic segmentation predictions\n                (Tensor [H, W]) or list of dicts with key \"sem_seg\" that contains semantic\n                segmentation prediction in the same format.\n        \"\"\"\n        for input, output in zip(inputs, outputs):\n            output = output[\"sem_seg\"].argmax(dim=0).to(self._cpu_device)\n            pred = np.array(output, dtype=np.int)\n            with PathManager.open(self.image_id_to_gt_file[input[\"image_id\"]], \"rb\") as f:\n                gt = np.array(Image.open(f), dtype=np.int)\n\n            gt[gt == self._ignore_label] = self._num_classes\n\n            self._conf_matrix += np.bincount(\n                self._N * pred.reshape(-1) + gt.reshape(-1), minlength=self._N ** 2\n            ).reshape(self._N, self._N)\n\n            self._predictions.extend(self.encode_json_sem_seg(pred, input[\"image_id\"]))\n\n    def evaluate(self):\n        \"\"\"\n        Evaluates standard semantic segmentation metrics (http://cocodataset.org/#stuff-eval):\n\n        * Mean intersection-over-union averaged across classes (mIoU)\n        * Frequency Weighted IoU (fwIoU)\n        * Mean pixel accuracy averaged across classes (mACC)\n        * Pixel Accuracy (pACC)\n        \"\"\"\n        if self._distributed:\n            synchronize()\n            conf_matrix_list = all_gather(self._conf_matrix)\n            self._predictions = all_gather(self._predictions)\n            self._predictions = list(itertools.chain(*self._predictions))\n            if not is_main_process():\n                return\n\n            self._conf_matrix = np.zeros_like(self._conf_matrix)\n            for conf_matrix in conf_matrix_list:\n                self._conf_matrix += conf_matrix\n\n        if self._output_dir:\n            PathManager.mkdirs(self._output_dir)\n            file_path = os.path.join(self._output_dir, \"sem_seg_predictions.json\")\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._predictions))\n\n        acc = np.zeros(self._num_classes, dtype=np.float)\n        iou = np.zeros(self._num_classes, dtype=np.float)\n        tp = self._conf_matrix.diagonal()[:-1].astype(np.float)\n        pos_gt = np.sum(self._conf_matrix[:-1, :-1], axis=0).astype(np.float)\n        class_weights = pos_gt / np.sum(pos_gt)\n        pos_pred = np.sum(self._conf_matrix[:-1, :-1], axis=1).astype(np.float)\n        acc_valid = pos_gt > 0\n        acc[acc_valid] = tp[acc_valid] / pos_gt[acc_valid]\n        iou_valid = (pos_gt + pos_pred) > 0\n        union = pos_gt + pos_pred - tp\n        iou[acc_valid] = tp[acc_valid] / union[acc_valid]\n        macc = np.sum(acc) / np.sum(acc_valid)\n        miou = np.sum(iou) / np.sum(iou_valid)\n        fiou = np.sum(iou * class_weights)\n        pacc = np.sum(tp) / np.sum(pos_gt)\n\n        res = {}\n        res[\"mIoU\"] = 100 * miou\n        res[\"fwIoU\"] = 100 * fiou\n        res[\"mACC\"] = 100 * macc\n        res[\"pACC\"] = 100 * pacc\n\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"sem_seg_evaluation.pth\")\n            with PathManager.open(file_path, \"wb\") as f:\n                torch.save(res, f)\n        results = OrderedDict({\"sem_seg\": res})\n        self._logger.info(results)\n        return results\n\n    def encode_json_sem_seg(self, sem_seg, image_id):\n        \"\"\"\n        Convert semantic segmentation to COCO stuff format with segments encoded as RLEs.\n        See http://cocodataset.org/#format-results\n        \"\"\"\n        json_list = []\n        for label in np.unique(sem_seg):\n            if self._contiguous_id_to_dataset_id is not None:\n                assert (\n                    label in self._contiguous_id_to_dataset_id\n                ), \"Label {} is not in the metadata info for {}\".format(label, self._dataset_name)\n                dataset_id = self._contiguous_id_to_dataset_id[label]\n            else:\n                dataset_id = int(label)\n            mask = (sem_seg == label).astype(np.uint8)\n            mask_rle = mask_util.encode(np.array(mask[:, :, None], order=\"F\"))[0]\n            mask_rle[\"counts\"] = mask_rle[\"counts\"].decode(\"utf-8\")\n            json_list.append(\n                {\"image_id\": image_id, \"category_id\": dataset_id, \"segmentation\": mask_rle}\n            )\n        return json_list\n"
  },
  {
    "path": "detectron2/evaluation/soba_evaluation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport contextlib\nimport copy\nimport io\nimport itertools\nimport json\nimport logging\nimport numpy as np\nimport os\nimport pickle\nfrom collections import OrderedDict\nimport pysobatools.mask as mask_util\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom pysobatools.soba import SOBA\nfrom pysobatools.cocoeval import COCOeval\nfrom tabulate import tabulate\n\nimport detectron2.utils.comm as comm\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.structures import Boxes, BoxMode, pairwise_iou\nfrom detectron2.utils.logger import create_small_table\n\nfrom .evaluator import DatasetEvaluator\n\n\nclass SOBAEvaluator(DatasetEvaluator):\n    \"\"\"\n    Evaluate object proposal, instance detection/segmentation, keypoint detection\n    outputs using SOBA's metrics and APIs.\n    \"\"\"\n\n    def __init__(self, dataset_name, cfg, distributed, output_dir=None):\n        \"\"\"\n        Args:\n            dataset_name (str): name of the dataset to be evaluated.\n                It must have the following corresponding metadata:\n                    \"json_file\": the path to the SOBA format annotation\n            cfg (CfgNode): config instance\n            distributed (True): if True, will collect results from all ranks for evaluation.\n                Otherwise, will evaluate the results in the current process.\n            output_dir (str): optional, an output directory to dump results.\n        \"\"\"\n        self._tasks = self._tasks_from_config(cfg)\n        self._distributed = distributed\n        self._output_dir = output_dir\n\n        self._cpu_device = torch.device(\"cpu\")\n        self._logger = logging.getLogger(__name__)\n\n        self._metadata = MetadataCatalog.get(dataset_name)\n        json_file = PathManager.get_local_path(self._metadata.json_file)\n        with contextlib.redirect_stdout(io.StringIO()):\n            self._soba_api = SOBA(json_file)\n\n        self._kpt_oks_sigmas = cfg.TEST.KEYPOINT_OKS_SIGMAS\n        # Test set json files do not contain annotations (evaluation must be\n        # performed using the SOBA evaluation server).\n        self._do_evaluation = len(self._soba_api.getAnnIds()) > 0\n\n    def reset(self):\n        self._predictions = []\n        self._soba_results = []\n\n    def _tasks_from_config(self, cfg):\n        \"\"\"\n        Returns:\n            tuple[str]: tasks that can be evaluated under the given configuration.\n        \"\"\"\n        tasks = (\"bbox\",)\n        if cfg.MODEL.MASK_ON:\n            tasks = tasks + (\"segm\",)\n        if cfg.MODEL.KEYPOINT_ON:\n            tasks = tasks + (\"keypoints\",)\n        return tasks\n\n    def process(self, inputs, outputs):\n        \"\"\"\n        Args:\n            inputs: the inputs to a SOBA model (e.g., GeneralizedRCNN).\n                It is a list of dict. Each dict corresponds to an image and\n                contains keys like \"height\", \"width\", \"file_name\", \"image_id\".\n            outputs: the outputs of a SOBA model. It is a list of dicts with key\n                \"instances\" that contains :class:`Instances`.\n        \"\"\"\n        results,associations = outputs\n        for input, result, association in zip(inputs, results, associations):\n            prediction = {\"image_id\": input[\"image_id\"]}\n            # TODO this is ugly\n            if \"instances\" in result:\n                instances = result[\"instances\"].to(self._cpu_device)\n\n                if instances.has(\"pred_masks\"):\n                    # use RLE to encode the masks, because they are too large and takes memory\n                    # since this evaluator stores results of the entire dataset\n                    # Our model may predict bool array, but sobaapi expects uint8\n                    rles = [\n                        mask_util.encode(np.array(mask[:, :, None], order=\"F\", dtype=\"uint8\"))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        # \"counts\" is an array encoded by mask_util as a byte-stream. Python3's\n                        # json writer which always produces strings cannot serialize a bytestream\n                        # unless you decode it. Thankfully, utf-8 works out (which is also what\n                        # the pysobatools/_mask.pyx does).\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n\n                prediction[\"instances\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"instances\" in association:\n                instances = association[\"instances\"].to(self._cpu_device)\n                if instances.has(\"pred_masks\"):\n\n                    rles = [\n                        mask_util.encode(np.array(mask[:,:],order='F',dtype='uint8'))[0]\n                        for mask in instances.pred_masks\n                    ]\n                    for rle in rles:\n                        rle[\"counts\"] = rle[\"counts\"].decode(\"utf-8\")\n                    instances.pred_masks_rle = rles\n                    instances.remove(\"pred_masks\")\n                prediction[\"associations\"] = instances_to_json(instances, input[\"image_id\"])\n            if \"proposals\" in result:\n                prediction[\"proposals\"] = result[\"proposals\"].to(self._cpu_device)\n            if \"asso_proposals\" in association:\n                prediction[\"asso_proposals\"] = association[\"proposals\"].to(self._cpu_device)\n            self._predictions.append(prediction)\n\n    def evaluate(self): \n        if self._distributed:\n            comm.synchronize()\n            self._predictions = comm.gather(self._predictions, dst=0)\n            self._predictions = list(itertools.chain(*self._predictions))\n\n            if not comm.is_main_process():\n                return {},{}\n        \n        if len(self._predictions) == 0:\n            self._logger.warning(\"[SOBAEvaluator] Did not receive valid predictions.\")\n            return {}\n\n        if self._output_dir:\n            PathManager.mkdirs(self._output_dir)\n            file_path = os.path.join(self._output_dir, \"instances_predictions.pth\")\n            with PathManager.open(file_path, \"wb\") as f:\n                torch.save(self._predictions, f)\n\n        self._results = OrderedDict()\n        self._association_results = OrderedDict()\n\n        if \"proposals\" in self._predictions[0]: #and \"asso_proposals\" in self._predictions[0]:\n            self._eval_box_proposals()\n\n        if \"instances\" in self._predictions[0]: #and \"associations\" in self._predictions[0]:\n            self._eval_predictions(set(self._tasks))\n        # Copy so the caller can do whatever with results\n        return copy.deepcopy(self._results),copy.deepcopy(self._association_results)\n\n    def _eval_predictions(self, tasks):\n        \"\"\"\n        Evaluate self._predictions on the given tasks.\n        Fill self._results with the metrics of the tasks.\n        \"\"\"\n        self._logger.info(\"Preparing results for SOBA format ...\")\n        self._soba_results = list(itertools.chain(*[x[\"instances\"] for x in self._predictions]))\n        self._asso_results = list(itertools.chain(*[x[\"associations\"] for x in self._predictions]))\n        # unmap the category ids for SOBA\n        if hasattr(self._metadata, \"thing_dataset_id_to_contiguous_id\"):\n            reverse_id_mapping = {\n                v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items()\n            }\n            for result in self._soba_results:\n                result[\"category_id\"] = reverse_id_mapping[result[\"category_id\"]]\n\n        if hasattr(self._metadata, \"association_dataset_id_to_contiguous_id\"):\n            reverse_asso_if_mapping = {\n                v: k for k,v in self._metadata.association_dataset_id_to_contiguous_id.items()\n            }\n            for result in self._asso_results:\n                result[\"category_id\"] = reverse_asso_if_mapping[result[\"category_id\"]]\n        if self._output_dir:\n            file_path = os.path.join(self._output_dir, \"soba_instances_results.json\")\n            self._logger.info(\"Saving results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._soba_results))\n                f.flush()\n            file_path = os.path.join(self._output_dir, \"soba_association_results.json\")\n            self._logger.info(\"Saving association results to {}\".format(file_path))\n            with PathManager.open(file_path, \"w\") as f:\n                f.write(json.dumps(self._asso_results))\n                f.flush()\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n        self._logger.info(\"Evaluating predictions ...\")\n        tasks = ('bbox','segm')\n        for task in sorted(tasks):\n            soba_eval = (\n                _evaluate_predictions_on_soba(\n                    self._soba_api, self._soba_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas\n                )\n                if len(self._soba_results) > 0\n                else None  # sobaapi does not handle empty results very well\n            )\n\n            res = self._derive_soba_results(\n                soba_eval, task, class_names=self._metadata.get(\"thing_classes\")\n            )\n            self._results[task] = res\n        self._logger.info(\"Evaluating association predictions ...\")\n        tasks = ('bbox','segm')\n        for task in sorted(tasks):\n            soba_eval =(\n                _evaluate_predictions_on_soba(\n                    self._soba_api, self._asso_results, task, kpt_oks_sigmas=self._kpt_oks_sigmas,is_asso=True\n                )\n                if len(self._asso_results)>0\n                else None\n            )\n            res = self._derive_soba_results(\n                soba_eval, task, class_names=self._metadata.get(\"association_classes\")\n            )\n            self._association_results[task] = res\n    def _eval_box_proposals(self):\n        \"\"\"\n        Evaluate the box proposals in self._predictions.\n        Fill self._results with the metrics for \"box_proposals\" task.\n        \"\"\"\n        if self._output_dir:\n            # Saving generated box proposals to file.\n            # Predicted box_proposals are in XYXY_ABS mode.\n            bbox_mode = BoxMode.XYXY_ABS.value\n            ids, boxes, objectness_logits = [], [], []\n            asso_boxes, asso_objectness_logits = [], []\n            for prediction in self._predictions:\n                ids.append(prediction[\"image_id\"])\n                boxes.append(prediction[\"proposals\"].proposal_boxes.tensor.numpy())\n                objectness_logits.append(prediction[\"proposals\"].objectness_logits.numpy())\n                asso_boxes.append(prediction[\"asso_proposals\"].proposal_boxes.tensor.numpy())\n                asso_objectness_logits.append(prediction[\"asso_proposals\"].objectness_logits.numpy())\n\n            proposal_data = {\n                \"boxes\": boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            asso_proposal_data = {\n                \"boxes\" : boxes,\n                \"objectness_logits\": objectness_logits,\n                \"ids\": ids,\n                \"bbox_mode\": bbox_mode,\n            }\n            with PathManager.open(os.path.join(self._output_dir, \"box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(proposal_data, f)\n            with PathManager.open(os.path.join(self._output_dir, \"asso_box_proposals.pkl\"), \"wb\") as f:\n                pickle.dump(asso_proposal_data, f)\n        if not self._do_evaluation:\n            self._logger.info(\"Annotations are not available for evaluation.\")\n            return\n\n        self._logger.info(\"Evaluating bbox proposals ...\")\n        res = {}\n        asso_res = {}\n        areas = {\"all\": \"\", \"small\": \"s\", \"medium\": \"m\", \"large\": \"l\"}\n        for limit in [100, 1000]:\n            for area, suffix in areas.items():\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._soba_api, area=area, limit=limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                res[key] = float(stats[\"ar\"].item() * 100)\n                stats = _evaluate_box_proposals(\n                    self._predictions, self._soba_api, area = area, limit =limit\n                )\n                key = \"AR{}@{:d}\".format(suffix, limit)\n                asso_res[key] = float(stats[\"ar\"].item() * 100)\n\n        self._logger.info(\"Proposal metrics: \\n\" + create_small_table(res))\n        self._results[\"box_proposals\"] = res\n        self._association_results[\"box_proposals\"] = asso_res\n\n    def _derive_soba_results(self, soba_eval, iou_type, class_names=None):\n        \"\"\"\n        Derive the desired score numbers from summarized SOBAeval.\n\n        Args:\n            soba_eval (None or SOBAEval): None represents no predictions from model.\n            iou_type (str):\n            class_names (None or list[str]): if provided, will use it to predict\n                per-category AP.\n\n        Returns:\n            a dict of {metric name: score}\n        \"\"\"\n\n        metrics = {\n            \"bbox\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\"],\n            \"segm\": [\"AP\", \"AP50\", \"AP75\", \"APs\", \"APm\", \"APl\"],\n            \"keypoints\": [\"AP\", \"AP50\", \"AP75\", \"APm\", \"APl\"],\n        }[iou_type]\n\n        if soba_eval is None:\n            self._logger.warn(\"No predictions from the model! Set scores to -1\")\n            return {metric: -1 for metric in metrics}\n\n        # the standard metrics\n        results = {metric: float(soba_eval.stats[idx] * 100) for idx, metric in enumerate(metrics)}\n        self._logger.info(\n            \"Evaluation results for {}: \\n\".format(iou_type) + create_small_table(results)\n        )\n\n        if class_names is None or len(class_names) <= 1:\n            return results\n        # Compute per-category AP\n        # from https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L222-L252 # noqa\n        precisions = soba_eval.eval[\"precision\"]\n        # precision has dims (iou, recall, cls, area range, max dets)\n        assert len(class_names) == precisions.shape[2]\n\n        results_per_category = []\n        for idx, name in enumerate(class_names):\n            # area range index 0: all area ranges\n            # max dets index -1: typically 100 per image\n            precision = precisions[:, :, idx, 0, -1]\n            precision = precision[precision > -1]\n            ap = np.mean(precision) if precision.size else float(\"nan\")\n            results_per_category.append((\"{}\".format(name), float(ap * 100)))\n\n        # tabulate it\n        N_COLS = min(6, len(results_per_category) * 2)\n        results_flatten = list(itertools.chain(*results_per_category))\n        results_2d = itertools.zip_longest(*[results_flatten[i::N_COLS] for i in range(N_COLS)])\n        table = tabulate(\n            results_2d,\n            tablefmt=\"pipe\",\n            floatfmt=\".3f\",\n            headers=[\"category\", \"AP\"] * (N_COLS // 2),\n            numalign=\"left\",\n        )\n        self._logger.info(\"Per-category {} AP: \\n\".format(iou_type) + table)\n\n        results.update({\"AP-\" + name: ap for name, ap in results_per_category})\n        return results\n\n\ndef instances_to_json(instances, img_id):\n    num_instance = len(instances)\n    has_light = instances.has(\"pred_light\")\n    if num_instance == 0:\n        return []\n    try:\n        boxes = instances.pred_boxes.tensor.numpy()\n        boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS)\n        boxes = boxes.tolist()\n        scores = instances.scores.tolist()\n        classes = instances.pred_classes.tolist()\n        association = instances.pred_associations\n        if has_light:\n            light = instances.pred_light\n    except:\n        boxes = instances.pred_boxes\n        boxes =BoxMode.convert(np.array(boxes,dtype='float'),BoxMode.XYXY_ABS,BoxMode.XYWH_ABS)\n        boxes = boxes.tolist()\n        scores = instances.scores\n        association = instances.pred_associations\n        classes = instances.pred_classes\n        if has_light:\n            light = instances.pred_light\n\n    has_mask = instances.has(\"pred_masks_rle\")\n    \n    if has_mask:\n        rles = instances.pred_masks_rle\n\n    has_keypoints = instances.has(\"pred_keypoints\")\n    if has_keypoints:\n        keypoints = instances.pred_keypoints\n\n    results = []\n    for k in range(num_instance):\n        result = {\n            \"image_id\": img_id,\n            \"category_id\": classes[k],\n            \"bbox\": boxes[k],\n            \"score\": scores[k],\n            'association_id':association[k]\n        }\n        if has_light:\n            result[\"light\"] = light[k]\n        if has_mask:\n            result[\"segmentation\"] = rles[k]\n        if has_keypoints:\n            # In SOBA annotations,\n            # keypoints coordinates are pixel indices.\n            # However our predictions are floating point coordinates.\n            # Therefore we subtract 0.5 to be consistent with the annotation format.\n            # This is the inverse of data loading logic in `datasets/soba.py`.\n            keypoints[k][:,:2] -= 0.5\n            result[\"keypoints\"] = keypoints[k].flatten().tolist()\n        results.append(result)\n    return results\n\n\n# inspired from Detectron:\n# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa\ndef _evaluate_box_proposals(dataset_predictions, soba_api, thresholds=None, area=\"all\", limit=None):\n    \"\"\"\n    Evaluate detection proposal recall metrics. This function is a much\n    faster alternative to the official SOBA API recall evaluation code. However,\n    it produces slightly different results.\n    \"\"\"\n    # Record max overlap value for each gt box\n    # Return vector of overlap values\n    areas = {\n        \"all\": 0,\n        \"small\": 1,\n        \"medium\": 2,\n        \"large\": 3,\n        \"96-128\": 4,\n        \"128-256\": 5,\n        \"256-512\": 6,\n        \"512-inf\": 7,\n    }\n    area_ranges = [\n        [0 ** 2, 1e5 ** 2],  # all\n        [0 ** 2, 32 ** 2],  # small\n        [32 ** 2, 96 ** 2],  # medium\n        [96 ** 2, 1e5 ** 2],  # large\n        [96 ** 2, 128 ** 2],  # 96-128\n        [128 ** 2, 256 ** 2],  # 128-256\n        [256 ** 2, 512 ** 2],  # 256-512\n        [512 ** 2, 1e5 ** 2],\n    ]  # 512-inf\n    assert area in areas, \"Unknown area range: {}\".format(area)\n    area_range = area_ranges[areas[area]]\n    gt_overlaps = []\n    num_pos = 0\n\n    for prediction_dict in dataset_predictions:\n        predictions = prediction_dict[\"proposals\"]\n\n        # sort predictions in descending order\n        # TODO maybe remove this and make it explicit in the documentation\n        inds = predictions.objectness_logits.sort(descending=True)[1]\n        predictions = predictions[inds]\n\n        ann_ids = soba_api.getAnnIds(imgIds=prediction_dict[\"image_id\"])\n        anno = soba_api.loadAnns(ann_ids)\n        gt_boxes = [\n            BoxMode.convert(obj[\"bbox\"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS)\n            for obj in anno\n            if obj[\"iscrowd\"] == 0\n        ]\n        gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4)  # guard against no boxes\n        gt_boxes = Boxes(gt_boxes)\n        gt_areas = torch.as_tensor([obj[\"area\"] for obj in anno if obj[\"iscrowd\"] == 0])\n\n        if len(gt_boxes) == 0 or len(predictions) == 0:\n            continue\n\n        valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])\n        gt_boxes = gt_boxes[valid_gt_inds]\n\n        num_pos += len(gt_boxes)\n\n        if len(gt_boxes) == 0:\n            continue\n\n        if limit is not None and len(predictions) > limit:\n            predictions = predictions[:limit]\n\n        overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)\n\n        _gt_overlaps = torch.zeros(len(gt_boxes))\n        for j in range(min(len(predictions), len(gt_boxes))):\n            # find which proposal box maximally covers each gt box\n            # and get the iou amount of coverage for each gt box\n            max_overlaps, argmax_overlaps = overlaps.max(dim=0)\n\n            # find which gt box is 'best' covered (i.e. 'best' = most iou)\n            gt_ovr, gt_ind = max_overlaps.max(dim=0)\n            assert gt_ovr >= 0\n            # find the proposal box that covers the best covered gt box\n            box_ind = argmax_overlaps[gt_ind]\n            # record the iou coverage of this gt box\n            _gt_overlaps[j] = overlaps[box_ind, gt_ind]\n            assert _gt_overlaps[j] == gt_ovr\n            # mark the proposal box and the gt box as used\n            overlaps[box_ind, :] = -1\n            overlaps[:, gt_ind] = -1\n\n        # append recorded iou coverage level\n        gt_overlaps.append(_gt_overlaps)\n    gt_overlaps = torch.cat(gt_overlaps, dim=0)\n    gt_overlaps, _ = torch.sort(gt_overlaps)\n\n    if thresholds is None:\n        step = 0.05\n        thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)\n    recalls = torch.zeros_like(thresholds)\n    # compute recall for each iou threshold\n    for i, t in enumerate(thresholds):\n        recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)\n    # ar = 2 * np.trapz(recalls, thresholds)\n    ar = recalls.mean()\n    return {\n        \"ar\": ar,\n        \"recalls\": recalls,\n        \"thresholds\": thresholds,\n        \"gt_overlaps\": gt_overlaps,\n        \"num_pos\": num_pos,\n    }\n\n\ndef _evaluate_predictions_on_soba(soba_gt, soba_results, iou_type, kpt_oks_sigmas=None,is_asso= False):\n    \"\"\"\n    Evaluate the soba results using SOBAEval API.\n    \"\"\"\n    assert len(soba_results) > 0\n\n    if iou_type == \"segm\":\n        soba_results = copy.deepcopy(soba_results)\n        # When evaluating mask AP, if the results contain bbox, sobaapi will\n        # use the box area as the area of the instance, instead of the mask area.\n        # This leads to a different definition of small/medium/large.\n        # We remove the bbox field to let mask AP use mask area.\n        for c in soba_results:\n            c.pop(\"bbox\", None)\n    if is_asso:\n        soba_dt = soba_gt.loadRes_asso(soba_results)\n    else:\n        soba_dt = soba_gt.loadRes(soba_results)\n\n    soba_eval = COCOeval(soba_gt, soba_dt, iou_type)\n    # Use the SOBA default keypoint OKS sigmas unless overrides are specified\n    if kpt_oks_sigmas:\n        soba_eval.params.kpt_oks_sigmas = np.array(kpt_oks_sigmas)\n    if is_asso:\n        soba_eval.evaluate_asso()\n    else:\n        soba_eval.evaluate()\n    soba_eval.accumulate()\n    soba_eval.summarize()\n\n    return soba_eval\n"
  },
  {
    "path": "detectron2/evaluation/testing.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport numpy as np\nimport pprint\nimport sys\nfrom collections import Mapping, OrderedDict\n\n\ndef print_csv_format(results):\n    \"\"\"\n    Print main metrics in a format similar to Detectron,\n    so that they are easy to copypaste into a spreadsheet.\n\n    Args:\n        results (OrderedDict[dict]): task_name -> {metric -> score}\n    \"\"\"\n    assert isinstance(results, OrderedDict), results  # unordered results cannot be properly printed\n    logger = logging.getLogger(__name__)\n    for task, res in results.items():\n        # Don't print \"AP-category\" metrics since they are usually not tracked.\n        important_res = [(k, v) for k, v in res.items() if \"-\" not in k]\n        logger.info(\"copypaste: Task: {}\".format(task))\n        logger.info(\"copypaste: \" + \",\".join([k[0] for k in important_res]))\n        logger.info(\"copypaste: \" + \",\".join([\"{0:.4f}\".format(k[1]) for k in important_res]))\n\n\ndef verify_results(cfg, results):\n    \"\"\"\n    Args:\n        results (OrderedDict[dict]): task_name -> {metric -> score}\n\n    Returns:\n        bool: whether the verification succeeds or not\n    \"\"\"\n    expected_results = cfg.TEST.EXPECTED_RESULTS\n    if not len(expected_results):\n        return True\n\n    ok = True\n    for task, metric, expected, tolerance in expected_results:\n        actual = results[task][metric]\n        if not np.isfinite(actual):\n            ok = False\n        diff = abs(actual - expected)\n        if diff > tolerance:\n            ok = False\n\n    logger = logging.getLogger(__name__)\n    if not ok:\n        logger.error(\"Result verification failed!\")\n        logger.error(\"Expected Results: \" + str(expected_results))\n        logger.error(\"Actual Results: \" + pprint.pformat(results))\n\n        sys.exit(1)\n    else:\n        logger.info(\"Results verification passed.\")\n    return ok\n\n\ndef flatten_results_dict(results):\n    \"\"\"\n    Expand a hierarchical dict of scalars into a flat dict of scalars.\n    If results[k1][k2][k3] = v, the returned dict will have the entry\n    {\"k1/k2/k3\": v}.\n\n    Args:\n        results (dict):\n    \"\"\"\n    r = {}\n    for k, v in results.items():\n        if isinstance(v, Mapping):\n            v = flatten_results_dict(v)\n            for kk, vv in v.items():\n                r[k + \"/\" + kk] = vv\n        else:\n            r[k] = v\n    return r\n"
  },
  {
    "path": "detectron2/export/README.md",
    "content": "\nThis directory contains code to prepare a detectron2 model for deployment.\nCurrently it supports exporting a detectron2 model to Caffe2 format through ONNX.\n\nPlease see [documentation](https://detectron2.readthedocs.io/tutorials/deployment.html) for its usage.\n\n\n### Acknowledgements\n\nThanks to Mobile Vision team at Facebook for developing the conversion tools.\n"
  },
  {
    "path": "detectron2/export/__init__.py",
    "content": "# -*- coding: utf-8 -*-\n\nfrom .api import *\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/export/api.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\nimport logging\nimport os\nfrom caffe2.proto import caffe2_pb2\nfrom torch import nn\n\nfrom detectron2.config import CfgNode as CN\n\nfrom .caffe2_export import export_caffe2_detection_model, run_and_save_graph\nfrom .caffe2_inference import ProtobufDetectionModel\nfrom .caffe2_modeling import META_ARCH_CAFFE2_EXPORT_TYPE_MAP, convert_batched_inputs_to_c2_format\nfrom .shared import get_pb_arg_vali, get_pb_arg_vals, save_graph\n\n__all__ = [\"add_export_config\", \"export_caffe2_model\", \"Caffe2Model\"]\n\n\ndef add_export_config(cfg):\n    \"\"\"\n    Args:\n        cfg (CfgNode): a detectron2 config\n\n    Returns:\n        CfgNode: an updated config with new options that :func:`export_caffe2_model` will need.\n    \"\"\"\n    is_frozen = cfg.is_frozen()\n    cfg.defrost()\n    cfg.EXPORT_CAFFE2 = CN()\n    cfg.EXPORT_CAFFE2.USE_HEATMAP_MAX_KEYPOINT = False\n    if is_frozen:\n        cfg.freeze()\n    return cfg\n\n\ndef export_caffe2_model(cfg, model, inputs):\n    \"\"\"\n    Export a detectron2 model to caffe2 format.\n\n    Args:\n        cfg (CfgNode): a detectron2 config, with extra export-related options\n            added by :func:`add_export_config`.\n        model (nn.Module): a model built by\n            :func:`detectron2.modeling.build_model`.\n            It will be modified by this function.\n        inputs: sample inputs that the given model takes for inference.\n            Will be used to trace the model.\n\n    Returns:\n        Caffe2Model\n    \"\"\"\n    assert isinstance(cfg, CN), cfg\n    C2MetaArch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[cfg.MODEL.META_ARCHITECTURE]\n    c2_compatible_model = C2MetaArch(cfg, model)\n    c2_format_input = c2_compatible_model.get_caffe2_inputs(inputs)\n    predict_net, init_net = export_caffe2_detection_model(c2_compatible_model, c2_format_input)\n    return Caffe2Model(predict_net, init_net)\n\n\nclass Caffe2Model(nn.Module):\n    def __init__(self, predict_net, init_net):\n        super().__init__()\n        self.eval()  # always in eval mode\n        self._predict_net = predict_net\n        self._init_net = init_net\n        self._predictor = None\n\n    @property\n    def predict_net(self):\n        \"\"\"\n        Returns:\n            core.Net: the underlying caffe2 predict net\n        \"\"\"\n        return self._predict_net\n\n    @property\n    def init_net(self):\n        \"\"\"\n        Returns:\n            core.Net: the underlying caffe2 init net\n        \"\"\"\n        return self._init_net\n\n    __init__.__HIDE_SPHINX_DOC__ = True\n\n    def save_protobuf(self, output_dir):\n        \"\"\"\n        Save the model as caffe2's protobuf format.\n\n        Args:\n            output_dir (str): the output directory to save protobuf files.\n        \"\"\"\n        logger = logging.getLogger(__name__)\n        logger.info(\"Saving model to {} ...\".format(output_dir))\n        os.makedirs(output_dir, exist_ok=True)\n\n        with open(os.path.join(output_dir, \"model.pb\"), \"wb\") as f:\n            f.write(self._predict_net.SerializeToString())\n        with open(os.path.join(output_dir, \"model.pbtxt\"), \"w\") as f:\n            f.write(str(self._predict_net))\n        with open(os.path.join(output_dir, \"model_init.pb\"), \"wb\") as f:\n            f.write(self._init_net.SerializeToString())\n\n    def save_graph(self, output_file, inputs=None):\n        \"\"\"\n        Save the graph as SVG format.\n\n        Args:\n            output_file (str): a SVG file\n            inputs: optional inputs given to the model.\n                If given, the inputs will be used to run the graph to record\n                shape of every tensor. The shape information will be\n                saved together with the graph.\n        \"\"\"\n        if inputs is None:\n            save_graph(self._predict_net, output_file, op_only=False)\n        else:\n            size_divisibility = get_pb_arg_vali(self._predict_net, \"size_divisibility\", 0)\n            device = get_pb_arg_vals(self._predict_net, \"device\", b\"cpu\").decode(\"ascii\")\n            inputs = convert_batched_inputs_to_c2_format(inputs, size_divisibility, device)\n            inputs = [x.numpy() for x in inputs]\n            run_and_save_graph(self._predict_net, self._init_net, inputs, output_file)\n\n    @staticmethod\n    def load_protobuf(dir):\n        \"\"\"\n        Args:\n            dir (str): a directory used to save Caffe2Model with\n                :meth:`save_protobuf`.\n                The files \"model.pb\" and \"model_init.pb\" are needed.\n\n        Returns:\n            Caffe2Model: the caffe2 model loaded from this directory.\n        \"\"\"\n        predict_net = caffe2_pb2.NetDef()\n        with open(os.path.join(dir, \"model.pb\"), \"rb\") as f:\n            predict_net.ParseFromString(f.read())\n\n        init_net = caffe2_pb2.NetDef()\n        with open(os.path.join(dir, \"model_init.pb\"), \"rb\") as f:\n            init_net.ParseFromString(f.read())\n\n        return Caffe2Model(predict_net, init_net)\n\n    def __call__(self, inputs):\n        \"\"\"\n        An interface that wraps around a caffe2 model and mimics detectron2's models'\n        input & output format. This is used to compare the caffe2 model\n        with its original torch model.\n        \"\"\"\n        if self._predictor is None:\n            self._predictor = ProtobufDetectionModel(self._predict_net, self._init_net)\n        return self._predictor(inputs)\n"
  },
  {
    "path": "detectron2/export/c10.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n\nimport math\nimport torch\nimport torch.nn.functional as F\n\nfrom detectron2.layers import cat\nfrom detectron2.modeling import poolers\nfrom detectron2.modeling.proposal_generator import rpn\nfrom detectron2.modeling.roi_heads.mask_head import mask_rcnn_inference\nfrom detectron2.structures import Boxes, ImageList, Instances, Keypoints\n\nfrom .shared import alias, to_device\n\n\n\"\"\"\nThis file contains caffe2-compatible implementation of several detectrno2 components.\n\"\"\"\n\n\nclass Boxes4or5(Boxes):\n    \"\"\"\n    Representing a list of detectron2.structures.Boxes from minibatch, each box\n    is represented by a 5d vector (batch index + 4 coordinates).\n    \"\"\"\n\n    def __init__(self, tensor):\n        assert isinstance(tensor, torch.Tensor)\n        assert tensor.dim() == 2 and tensor.size(-1) in [4, 5], tensor.size()\n        # TODO: make tensor immutable when dim is Nx5?\n        self.tensor = tensor\n\n\n# TODO clean up this class, maybe just extend Instances\nclass InstancesList(object):\n    \"\"\"\n    Tensor representation of a list of Instances object for a batch of images.\n\n    When dealing with a batch of images with Caffe2 ops, a list of bboxes\n    (instances) are usually represented by single Tensor with size\n    (sigma(Ni), 5) or (sigma(Ni), 4) plus a batch split Tensor. This class is\n    for providing common functions to convert between these two representations.\n    \"\"\"\n\n    def __init__(self, im_info, indices, extra_fields=None):\n        # [N, 3] -> (H, W, Scale)\n        self.im_info = im_info\n        # [N,] -> indice of batch to which the instance belongs\n        self.indices = indices\n        # [N, ...]\n        self.batch_extra_fields = extra_fields or {}\n\n        self.image_size = self.im_info\n\n    def get_fields(self):\n        \"\"\" like `get_fields` in the Instances object,\n        but return each field in tensor representations \"\"\"\n        ret = {}\n        for k, v in self.batch_extra_fields.items():\n            # if isinstance(v, torch.Tensor):\n            #     tensor_rep = v\n            # elif isinstance(v, (Boxes, Keypoints)):\n            #     tensor_rep = v.tensor\n            # else:\n            #     raise ValueError(\"Can't find tensor representation for: {}\".format())\n            ret[k] = v\n        return ret\n\n    def has(self, name):\n        return name in self.batch_extra_fields\n\n    def set(self, name, value):\n        data_len = len(value)\n        if len(self.batch_extra_fields):\n            assert (\n                len(self) == data_len\n            ), \"Adding a field of length {} to a Instances of length {}\".format(data_len, len(self))\n        self.batch_extra_fields[name] = value\n\n    def __setattr__(self, name, val):\n        if name in [\"im_info\", \"indices\", \"batch_extra_fields\", \"image_size\"]:\n            super().__setattr__(name, val)\n        else:\n            self.set(name, val)\n\n    def __getattr__(self, name):\n        if name not in self.batch_extra_fields:\n            raise AttributeError(\"Cannot find field '{}' in the given Instances!\".format(name))\n        return self.batch_extra_fields[name]\n\n    def __len__(self):\n        return len(self.indices)\n\n    def flatten(self):\n        ret = []\n        for _, v in self.batch_extra_fields.items():\n            if isinstance(v, (Boxes, Keypoints)):\n                ret.append(v.tensor)\n            else:\n                ret.append(v)\n        return ret\n\n    @staticmethod\n    def to_d2_instances_list(instances_list):\n        \"\"\"\n        Convert InstancesList to List[Instances]. The input `instances_list` can\n        also be a List[Instances], in this case this method is a non-op.\n        \"\"\"\n        if not isinstance(instances_list, InstancesList):\n            assert all(isinstance(x, Instances) for x in instances_list)\n            return instances_list\n\n        ret = []\n        for i, info in enumerate(instances_list.im_info):\n            instances = Instances(torch.Size([int(info[0].item()), int(info[1].item())]))\n\n            ids = instances_list.indices == i\n            for k, v in instances_list.batch_extra_fields.items():\n                if isinstance(v, torch.Tensor):\n                    instances.set(k, v[ids])\n                    continue\n                elif isinstance(v, Boxes):\n                    instances.set(k, v[ids, -4:])\n                    continue\n\n                target_type, tensor_source = v\n                assert isinstance(tensor_source, torch.Tensor)\n                assert tensor_source.shape[0] == instances_list.indices.shape[0]\n                tensor_source = tensor_source[ids]\n\n                if issubclass(target_type, Boxes):\n                    instances.set(k, Boxes(tensor_source[:, -4:]))\n                elif issubclass(target_type, Keypoints):\n                    instances.set(k, Keypoints(tensor_source))\n                elif issubclass(target_type, torch.Tensor):\n                    instances.set(k, tensor_source)\n                else:\n                    raise ValueError(\"Can't handle targe type: {}\".format(target_type))\n\n            ret.append(instances)\n        return ret\n\n\nclass Caffe2Compatible(object):\n    def _get_tensor_mode(self):\n        return self._tensor_mode\n\n    def _set_tensor_mode(self, v):\n        self._tensor_mode = v\n\n    tensor_mode = property(_get_tensor_mode, _set_tensor_mode)\n    \"\"\"\n    If true, the model expects C2-style tensor only inputs/outputs format.\n    \"\"\"\n\n\nclass Caffe2RPN(Caffe2Compatible, rpn.RPN):\n    def forward(self, images, features, gt_instances=None):\n        assert not self.training\n\n        features = [features[f] for f in self.in_features]\n        objectness_logits_pred, anchor_deltas_pred = self.rpn_head(features)\n\n        # TODO is the needed?\n        # objectness_logits_pred = [t.sigmoid() for t in objectness_logits_pred]\n\n        assert isinstance(images, ImageList)\n        if self.tensor_mode:\n            im_info = images.image_sizes\n        else:\n            im_info = torch.Tensor(\n                [[im_sz[0], im_sz[1], torch.Tensor([1.0])] for im_sz in images.image_sizes]\n            ).to(images.tensor.device)\n        assert isinstance(im_info, torch.Tensor)\n\n        rpn_rois_list = []\n        rpn_roi_probs_list = []\n        for scores, bbox_deltas, cell_anchors_tensor, feat_stride in zip(\n            objectness_logits_pred,\n            anchor_deltas_pred,\n            iter(self.anchor_generator.cell_anchors),\n            self.anchor_generator.strides,\n        ):\n            scores = scores.detach()\n            bbox_deltas = bbox_deltas.detach()\n\n            rpn_rois, rpn_roi_probs = torch.ops._caffe2.GenerateProposals(\n                scores,\n                bbox_deltas,\n                im_info,\n                cell_anchors_tensor,\n                spatial_scale=1.0 / feat_stride,\n                pre_nms_topN=self.pre_nms_topk[self.training],\n                post_nms_topN=self.post_nms_topk[self.training],\n                nms_thresh=self.nms_thresh,\n                min_size=self.min_box_side_len,\n                # correct_transform_coords=True,  # deprecated argument\n                angle_bound_on=True,  # Default\n                angle_bound_lo=-90,  # Default\n                angle_bound_hi=90,  # Default\n                clip_angle_thresh=1.0,  # Default\n                legacy_plus_one=False,\n            )\n            rpn_rois_list.append(rpn_rois)\n            rpn_roi_probs_list.append(rpn_roi_probs)\n\n        # For FPN in D2, in RPN all proposals from different levels are concated\n        # together, ranked and picked by top post_nms_topk. Then in ROIPooler\n        # it calculates level_assignments and calls the RoIAlign from\n        # the corresponding level.\n\n        if len(objectness_logits_pred) == 1:\n            rpn_rois = rpn_rois_list[0]\n            rpn_roi_probs = rpn_roi_probs_list[0]\n        else:\n            assert len(rpn_rois_list) == len(rpn_roi_probs_list)\n            rpn_post_nms_topN = self.post_nms_topk[self.training]\n\n            device = rpn_rois_list[0].device\n            input_list = [to_device(x, \"cpu\") for x in (rpn_rois_list + rpn_roi_probs_list)]\n\n            # TODO remove this after confirming rpn_max_level/rpn_min_level\n            # is not needed in CollectRpnProposals.\n            feature_strides = list(self.anchor_generator.strides)\n            rpn_min_level = int(math.log2(feature_strides[0]))\n            rpn_max_level = int(math.log2(feature_strides[-1]))\n            assert (rpn_max_level - rpn_min_level + 1) == len(\n                rpn_rois_list\n            ), \"CollectRpnProposals requires continuous levels\"\n\n            rpn_rois = torch.ops._caffe2.CollectRpnProposals(\n                input_list,\n                # NOTE: in current implementation, rpn_max_level and rpn_min_level\n                # are not needed, only the subtraction of two matters and it\n                # can be infer from the number of inputs. Keep them now for\n                # consistency.\n                rpn_max_level=2 + len(rpn_rois_list) - 1,\n                rpn_min_level=2,\n                rpn_post_nms_topN=rpn_post_nms_topN,\n            )\n            rpn_rois = to_device(rpn_rois, device)\n            rpn_roi_probs = []\n\n        proposals = self.c2_postprocess(im_info, rpn_rois, rpn_roi_probs, self.tensor_mode)\n        return proposals, {}\n\n    @staticmethod\n    def c2_postprocess(im_info, rpn_rois, rpn_roi_probs, tensor_mode):\n        proposals = InstancesList(\n            im_info=im_info,\n            indices=rpn_rois[:, 0],\n            extra_fields={\n                \"proposal_boxes\": Boxes4or5(rpn_rois),\n                \"objectness_logits\": (torch.Tensor, rpn_roi_probs),\n            },\n        )\n        if not tensor_mode:\n            proposals = InstancesList.to_d2_instances_list(proposals)\n        else:\n            proposals = [proposals]\n        return proposals\n\n\nclass Caffe2ROIPooler(Caffe2Compatible, poolers.ROIPooler):\n    @staticmethod\n    def c2_preprocess(box_lists):\n        assert all(isinstance(x, Boxes) for x in box_lists)\n        if all(isinstance(x, Boxes4or5) for x in box_lists):\n            # input is pure-tensor based\n            assert len(box_lists) == 1\n            pooler_fmt_boxes = box_lists[0].tensor\n        else:\n            pooler_fmt_boxes = poolers.convert_boxes_to_pooler_format(box_lists)\n        return pooler_fmt_boxes\n\n    def forward(self, x, box_lists):\n        assert not self.training\n\n        pooler_fmt_boxes = self.c2_preprocess(box_lists)\n        num_level_assignments = len(self.level_poolers)\n\n        if num_level_assignments == 1:\n            out = torch.ops._caffe2.RoIAlign(\n                x[0],\n                pooler_fmt_boxes,\n                order=\"NCHW\",\n                spatial_scale=float(self.level_poolers[0].spatial_scale),\n                pooled_h=int(self.output_size[0]),\n                pooled_w=int(self.output_size[1]),\n                sampling_ratio=int(self.level_poolers[0].sampling_ratio),\n                aligned=bool(self.level_poolers[0].aligned),\n            )\n            return out\n\n        device = pooler_fmt_boxes.device\n        assert (\n            self.max_level - self.min_level + 1 == 4\n        ), \"Currently DistributeFpnProposals only support 4 levels\"\n        fpn_outputs = torch.ops._caffe2.DistributeFpnProposals(\n            to_device(pooler_fmt_boxes, \"cpu\"),\n            roi_canonical_scale=self.canonical_box_size,\n            roi_canonical_level=self.canonical_level,\n            roi_max_level=self.max_level,\n            roi_min_level=self.min_level,\n            legacy_plus_one=False,\n        )\n        fpn_outputs = [to_device(x, device) for x in fpn_outputs]\n\n        rois_fpn_list = fpn_outputs[:-1]\n        rois_idx_restore_int32 = fpn_outputs[-1]\n\n        roi_feat_fpn_list = []\n        for roi_fpn, x_level, pooler in zip(rois_fpn_list, x, self.level_poolers):\n            roi_feat_fpn = torch.ops._caffe2.RoIAlign(\n                x_level,\n                roi_fpn,\n                order=\"NCHW\",\n                spatial_scale=float(pooler.spatial_scale),\n                pooled_h=int(self.output_size[0]),\n                pooled_w=int(self.output_size[1]),\n                sampling_ratio=int(pooler.sampling_ratio),\n                aligned=bool(pooler.aligned),\n            )\n            roi_feat_fpn_list.append(roi_feat_fpn)\n\n        roi_feat_shuffled = cat(roi_feat_fpn_list, dim=0)\n        roi_feat = torch.ops._caffe2.BatchPermutation(roi_feat_shuffled, rois_idx_restore_int32)\n        return roi_feat\n\n\nclass Caffe2FastRCNNOutputsInference:\n    def __init__(self, tensor_mode):\n        self.tensor_mode = tensor_mode\n\n    def __call__(self, fastrcnn_outputs, score_thresh, nms_thresh, topk_per_image):\n        \"\"\" equivalent to FastRCNNOutputs.inference \"\"\"\n        assert isinstance(fastrcnn_outputs.proposals, Boxes)\n        input_tensor_mode = fastrcnn_outputs.proposals.tensor.shape[1] == 5\n\n        class_logits = fastrcnn_outputs.pred_class_logits\n        box_regression = fastrcnn_outputs.pred_proposal_deltas\n        class_prob = F.softmax(class_logits, -1)\n\n        assert box_regression.shape[1] % 4 == 0\n        cls_agnostic_bbox_reg = box_regression.shape[1] // 4 == 1\n\n        device = class_logits.device\n\n        im_info = (\n            torch.Tensor(\n                [[sz[0], sz[1], torch.Tensor([1.0])] for sz in fastrcnn_outputs.image_shapes]\n            ).to(device)\n            if not input_tensor_mode\n            else fastrcnn_outputs.image_shapes[0]\n        )\n\n        rois_n4 = fastrcnn_outputs.proposals.tensor\n        device, dtype = rois_n4.device, rois_n4.dtype\n        if not input_tensor_mode:\n            batch_ids = cat(\n                [\n                    torch.full((b, 1), i, dtype=dtype, device=device)\n                    for i, b in enumerate(fastrcnn_outputs.num_preds_per_image)\n                ],\n                dim=0,\n            )\n            rois = torch.cat([batch_ids, rois_n4], dim=1)\n        else:\n            rois = fastrcnn_outputs.proposals.tensor\n\n        roi_pred_bbox, roi_batch_splits = torch.ops._caffe2.BBoxTransform(\n            to_device(rois, \"cpu\"),\n            to_device(box_regression, \"cpu\"),\n            to_device(im_info, \"cpu\"),\n            weights=fastrcnn_outputs.box2box_transform.weights,\n            apply_scale=True,\n            rotated=False,\n            angle_bound_on=True,\n            angle_bound_lo=-90,\n            angle_bound_hi=90,\n            clip_angle_thresh=1.0,\n            legacy_plus_one=False,\n        )\n        roi_pred_bbox = to_device(roi_pred_bbox, device)\n        roi_batch_splits = to_device(roi_batch_splits, device)\n\n        nms_outputs = torch.ops._caffe2.BoxWithNMSLimit(\n            to_device(class_prob, \"cpu\"),\n            to_device(roi_pred_bbox, \"cpu\"),\n            to_device(roi_batch_splits, \"cpu\"),\n            score_thresh=float(score_thresh),\n            nms=float(nms_thresh),\n            detections_per_im=int(topk_per_image),\n            soft_nms_enabled=False,\n            soft_nms_method=\"linear\",\n            soft_nms_sigma=0.5,\n            soft_nms_min_score_thres=0.001,\n            rotated=False,\n            cls_agnostic_bbox_reg=cls_agnostic_bbox_reg,\n            input_boxes_include_bg_cls=False,\n            output_classes_include_bg_cls=False,\n            legacy_plus_one=False,\n        )\n        roi_score_nms = to_device(nms_outputs[0], device)\n        roi_bbox_nms = to_device(nms_outputs[1], device)\n        roi_class_nms = to_device(nms_outputs[2], device)\n        roi_batch_splits_nms = to_device(nms_outputs[3], device)\n        roi_keeps_nms = to_device(nms_outputs[4], device)\n        roi_keeps_size_nms = to_device(nms_outputs[5], device)\n        if not self.tensor_mode:\n            roi_class_nms = roi_class_nms.to(torch.int64)\n\n        roi_batch_ids = cat(\n            [\n                torch.full((b, 1), i, dtype=dtype, device=device)\n                for i, b in enumerate(int(x.item()) for x in roi_batch_splits_nms)\n            ],\n            dim=0,\n        )\n\n        roi_class_nms = alias(roi_class_nms, \"class_nms\")\n        roi_score_nms = alias(roi_score_nms, \"score_nms\")\n        roi_bbox_nms = alias(roi_bbox_nms, \"bbox_nms\")\n        roi_batch_splits_nms = alias(roi_batch_splits_nms, \"batch_splits_nms\")\n        roi_keeps_nms = alias(roi_keeps_nms, \"keeps_nms\")\n        roi_keeps_size_nms = alias(roi_keeps_size_nms, \"keeps_size_nms\")\n\n        results = InstancesList(\n            im_info=im_info,\n            indices=roi_batch_ids[:, 0],\n            extra_fields={\n                \"pred_boxes\": Boxes4or5(roi_bbox_nms),\n                \"scores\": roi_score_nms,\n                \"pred_classes\": roi_class_nms,\n            },\n        )\n\n        if not self.tensor_mode:\n            results = InstancesList.to_d2_instances_list(results)\n            batch_splits = roi_batch_splits_nms.int().tolist()\n            kept_indices = list(roi_keeps_nms.to(torch.int64).split(batch_splits))\n        else:\n            results = [results]\n            kept_indices = [roi_keeps_nms]\n\n        return results, kept_indices\n\n\nclass Caffe2MaskRCNNInference:\n    def __call__(self, pred_mask_logits, pred_instances):\n        \"\"\" equivalent to mask_head.mask_rcnn_inference \"\"\"\n        if all(isinstance(x, InstancesList) for x in pred_instances):\n            assert len(pred_instances) == 1\n            mask_probs_pred = pred_mask_logits.sigmoid()\n            mask_probs_pred = alias(mask_probs_pred, \"mask_fcn_probs\")\n            pred_instances[0].pred_masks = mask_probs_pred\n        else:\n            mask_rcnn_inference(pred_mask_logits, pred_instances)\n\n\nclass Caffe2KeypointRCNNInference:\n    def __init__(self, use_heatmap_max_keypoint):\n        self.use_heatmap_max_keypoint = use_heatmap_max_keypoint\n\n    def __call__(self, pred_keypoint_logits, pred_instances):\n        # just return the keypoint heatmap for now,\n        # there will be option to call HeatmapMaxKeypointOp\n        output = alias(pred_keypoint_logits, \"kps_score\")\n        if all(isinstance(x, InstancesList) for x in pred_instances):\n            assert len(pred_instances) == 1\n            if self.use_heatmap_max_keypoint:\n                device = output.device\n                output = torch.ops._caffe2.HeatmapMaxKeypoint(\n                    to_device(output, \"cpu\"),\n                    pred_instances[0].pred_boxes.tensor,\n                    should_output_softmax=True,  # worth make it configerable?\n                )\n                output = to_device(output, device)\n                output = alias(output, \"keypoints_out\")\n            pred_instances[0].pred_keypoints = output\n        return pred_keypoint_logits\n"
  },
  {
    "path": "detectron2/export/caffe2_export.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport copy\nimport io\nimport logging\nimport numpy as np\nfrom typing import List\nimport onnx\nimport torch\nfrom caffe2.proto import caffe2_pb2\nfrom caffe2.python import core\nfrom caffe2.python.onnx.backend import Caffe2Backend\nfrom tabulate import tabulate\nfrom termcolor import colored\nfrom torch.onnx import OperatorExportTypes\n\nfrom .shared import (\n    ScopedWS,\n    construct_init_net_from_params,\n    fuse_alias_placeholder,\n    fuse_copy_between_cpu_and_gpu,\n    get_params_from_init_net,\n    group_norm_replace_aten_with_caffe2,\n    infer_device_type,\n    remove_dead_end_ops,\n    remove_reshape_for_fc,\n    save_graph,\n)\n\nlogger = logging.getLogger(__name__)\n\n\ndef _export_via_onnx(model, inputs):\n    # make sure all modules are in eval mode, onnx may change the training state\n    #  of the moodule if the states are not consistent\n    def _check_eval(module):\n        assert not module.training\n\n    model.apply(_check_eval)\n\n    # Export the model to ONNX\n    with torch.no_grad():\n        with io.BytesIO() as f:\n            torch.onnx.export(\n                model,\n                inputs,\n                f,\n                operator_export_type=OperatorExportTypes.ONNX_ATEN_FALLBACK,\n                # verbose=True,  # NOTE: uncomment this for debugging\n                # export_params=True,\n            )\n            onnx_model = onnx.load_from_string(f.getvalue())\n\n    # Apply ONNX's Optimization\n    all_passes = onnx.optimizer.get_available_passes()\n    passes = [\"fuse_bn_into_conv\"]\n    assert all(p in all_passes for p in passes)\n    onnx_model = onnx.optimizer.optimize(onnx_model, passes)\n\n    # Convert ONNX model to Caffe2 protobuf\n    init_net, predict_net = Caffe2Backend.onnx_graph_to_caffe2_net(onnx_model)\n\n    return predict_net, init_net\n\n\ndef _op_stats(net_def):\n    type_count = {}\n    for t in [op.type for op in net_def.op]:\n        type_count[t] = type_count.get(t, 0) + 1\n    type_count_list = sorted(type_count.items(), key=lambda kv: kv[0])  # alphabet\n    type_count_list = sorted(type_count_list, key=lambda kv: -kv[1])  # count\n    return \"\\n\".join(\"{:>4}x {}\".format(count, name) for name, count in type_count_list)\n\n\ndef _assign_device_option(\n    predict_net: caffe2_pb2.NetDef, init_net: caffe2_pb2.NetDef, tensor_inputs: List[torch.Tensor]\n):\n    \"\"\"\n    ONNX exported network doesn't have concept of device, assign necessary\n    device option for each op in order to make it runable on GPU runtime.\n    \"\"\"\n\n    def _get_device_type(torch_tensor):\n        assert torch_tensor.device.type in [\"cpu\", \"cuda\"]\n        assert torch_tensor.device.index == 0\n        return torch_tensor.device.type\n\n    def _assign_op_device_option(net_proto, net_ssa, blob_device_types):\n        for op, ssa_i in zip(net_proto.op, net_ssa):\n            if op.type in [\"CopyCPUToGPU\", \"CopyGPUToCPU\"]:\n                op.device_option.CopyFrom(core.DeviceOption(caffe2_pb2.CUDA, 0))\n            else:\n                devices = [blob_device_types[b] for b in ssa_i[0] + ssa_i[1]]\n                assert all(d == devices[0] for d in devices)\n                if devices[0] == \"cuda\":\n                    op.device_option.CopyFrom(core.DeviceOption(caffe2_pb2.CUDA, 0))\n\n    # update ops in predict_net\n    predict_net_input_device_types = {\n        (name, 0): _get_device_type(tensor)\n        for name, tensor in zip(predict_net.external_input, tensor_inputs)\n    }\n    predict_net_device_types = infer_device_type(\n        predict_net, known_status=predict_net_input_device_types, device_name_style=\"pytorch\"\n    )\n    predict_net_ssa, _ = core.get_ssa(predict_net)\n    _assign_op_device_option(predict_net, predict_net_ssa, predict_net_device_types)\n\n    # update ops in init_net\n    init_net_ssa, versions = core.get_ssa(init_net)\n    init_net_output_device_types = {\n        (name, versions[name]): predict_net_device_types[(name, 0)]\n        for name in init_net.external_output\n    }\n    init_net_device_types = infer_device_type(\n        init_net, known_status=init_net_output_device_types, device_name_style=\"pytorch\"\n    )\n    _assign_op_device_option(init_net, init_net_ssa, init_net_device_types)\n\n\ndef export_caffe2_detection_model(model: torch.nn.Module, tensor_inputs: List[torch.Tensor]):\n    \"\"\"\n    Export a Detectron2 model via ONNX.\n\n    Arg:\n        model: a caffe2-compatible version of detectron2 model, defined in caffe2_modeling.py\n        tensor_inputs: a list of tensors that caffe2 model takes as input.\n    \"\"\"\n    model = copy.deepcopy(model)\n    assert isinstance(model, torch.nn.Module)\n    assert hasattr(model, \"encode_additional_info\")\n\n    # Export via ONNX\n    logger.info(\"Exporting a {} model via ONNX ...\".format(type(model).__name__))\n    predict_net, init_net = _export_via_onnx(model, (tensor_inputs,))\n    ops_table = [[op.type, op.input, op.output] for op in predict_net.op]\n    table = tabulate(ops_table, headers=[\"type\", \"input\", \"output\"], tablefmt=\"pipe\")\n    logger.info(\n        \"ONNX export Done. Exported predict_net (before optimizations):\\n\" + colored(table, \"cyan\")\n    )\n\n    # Apply protobuf optimization\n    fuse_alias_placeholder(predict_net, init_net)\n    if any(t.device.type != \"cpu\" for t in tensor_inputs):\n        fuse_copy_between_cpu_and_gpu(predict_net)\n        remove_dead_end_ops(init_net)\n        _assign_device_option(predict_net, init_net, tensor_inputs)\n    params, device_options = get_params_from_init_net(init_net)\n    predict_net, params = remove_reshape_for_fc(predict_net, params)\n    init_net = construct_init_net_from_params(params, device_options)\n    group_norm_replace_aten_with_caffe2(predict_net)\n\n    # Record necessary information for running the pb model in Detectron2 system.\n    model.encode_additional_info(predict_net, init_net)\n\n    logger.info(\"Operators used in predict_net: \\n{}\".format(_op_stats(predict_net)))\n    logger.info(\"Operators used in init_net: \\n{}\".format(_op_stats(init_net)))\n\n    return predict_net, init_net\n\n\ndef run_and_save_graph(predict_net, init_net, tensor_inputs, graph_save_path):\n    \"\"\"\n    Run the caffe2 model on given inputs, recording the shape and draw the graph.\n\n    predict_net/init_net: caffe2 model.\n    tensor_inputs: a list of tensors that caffe2 model takes as input.\n    graph_save_path: path for saving graph of exported model.\n    \"\"\"\n\n    logger.info(\"Saving graph of ONNX exported model to {} ...\".format(graph_save_path))\n    save_graph(predict_net, graph_save_path, op_only=False)\n\n    # Run the exported Caffe2 net\n    logger.info(\"Running ONNX exported model ...\")\n    with ScopedWS(\"__ws_tmp__\", True) as ws:\n        ws.RunNetOnce(init_net)\n        initialized_blobs = set(ws.Blobs())\n        uninitialized = [inp for inp in predict_net.external_input if inp not in initialized_blobs]\n        for name, blob in zip(uninitialized, tensor_inputs):\n            ws.FeedBlob(name, blob)\n\n        try:\n            ws.RunNetOnce(predict_net)\n        except RuntimeError as e:\n            logger.warning(\"Encountered RuntimeError: \\n{}\".format(str(e)))\n\n        ws_blobs = {b: ws.FetchBlob(b) for b in ws.Blobs()}\n        blob_sizes = {b: ws_blobs[b].shape for b in ws_blobs if isinstance(ws_blobs[b], np.ndarray)}\n\n        logger.info(\"Saving graph with blob shapes to {} ...\".format(graph_save_path))\n        save_graph(predict_net, graph_save_path, op_only=False, blob_sizes=blob_sizes)\n\n        return ws_blobs\n"
  },
  {
    "path": "detectron2/export/caffe2_inference.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport collections\nimport logging\nimport numpy as np\nimport torch\nfrom caffe2.proto import caffe2_pb2\nfrom caffe2.python import core\n\nfrom .caffe2_modeling import META_ARCH_CAFFE2_EXPORT_TYPE_MAP, convert_batched_inputs_to_c2_format\nfrom .shared import ScopedWS, get_pb_arg_vali, get_pb_arg_vals, infer_device_type\n\nlogger = logging.getLogger(__name__)\n\n\nclass ProtobufModel(torch.nn.Module):\n    \"\"\"\n    A class works just like nn.Module in terms of inference, but running\n    caffe2 model under the hood. Input/Output are Dict[str, tensor] whose keys\n    are in external_input/output.\n    \"\"\"\n\n    def __init__(self, predict_net, init_net):\n        logger.info(\"Initializing ProtobufModel ...\")\n        super().__init__()\n        assert isinstance(predict_net, caffe2_pb2.NetDef)\n        assert isinstance(init_net, caffe2_pb2.NetDef)\n        self.ws_name = \"__ws_tmp__\"\n        self.net = core.Net(predict_net)\n\n        with ScopedWS(self.ws_name, is_reset=True, is_cleanup=False) as ws:\n            ws.RunNetOnce(init_net)\n            for blob in self.net.Proto().external_input:\n                if blob not in ws.Blobs():\n                    ws.CreateBlob(blob)\n            ws.CreateNet(self.net)\n\n        self._error_msgs = set()\n\n    def forward(self, inputs_dict):\n        assert all(inp in self.net.Proto().external_input for inp in inputs_dict)\n        with ScopedWS(self.ws_name, is_reset=False, is_cleanup=False) as ws:\n            for b, tensor in inputs_dict.items():\n                ws.FeedBlob(b, tensor)\n            try:\n                ws.RunNet(self.net.Proto().name)\n            except RuntimeError as e:\n                if not str(e) in self._error_msgs:\n                    self._error_msgs.add(str(e))\n                    logger.warning(\"Encountered new RuntimeError: \\n{}\".format(str(e)))\n                logger.warning(\"Catch the error and use partial results.\")\n\n            outputs_dict = collections.OrderedDict(\n                [(b, ws.FetchBlob(b)) for b in self.net.Proto().external_output]\n            )\n            # Remove outputs of current run, this is necessary in order to\n            # prevent fetching the result from previous run if the model fails\n            # in the middle.\n            for b in self.net.Proto().external_output:\n                # Needs to create uninitialized blob to make the net runable.\n                # This is \"equivalent\" to: ws.RemoveBlob(b) then ws.CreateBlob(b),\n                # but there'no such API.\n                ws.FeedBlob(b, \"{}, a C++ native class of type nullptr (uninitialized).\".format(b))\n\n        return outputs_dict\n\n\nclass ProtobufDetectionModel(torch.nn.Module):\n    \"\"\"\n    A class works just like a pytorch meta arch in terms of inference, but running\n    caffe2 model under the hood.\n    \"\"\"\n\n    def __init__(self, predict_net, init_net, *, convert_outputs=None):\n        \"\"\"\n        Args:\n            predict_net, init_net (core.Net): caffe2 nets\n            convert_outptus (callable): a function that converts caffe2\n                outputs to the same format of the original pytorch model.\n                By default, use the one defined in the caffe2 meta_arch.\n        \"\"\"\n        super().__init__()\n        self.protobuf_model = ProtobufModel(predict_net, init_net)\n        self.size_divisibility = get_pb_arg_vali(predict_net, \"size_divisibility\", 0)\n        self.device = get_pb_arg_vals(predict_net, \"device\", b\"cpu\").decode(\"ascii\")\n\n        if convert_outputs is None:\n            meta_arch = get_pb_arg_vals(predict_net, \"meta_architecture\", b\"GeneralizedRCNN\")\n            meta_arch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[meta_arch.decode(\"ascii\")]\n            self._convert_outputs = meta_arch.get_outputs_converter(predict_net, init_net)\n        else:\n            self._convert_outputs = convert_outputs\n\n    def _infer_output_devices(self, inputs_dict):\n        def _get_device_type(torch_tensor):\n            assert torch_tensor.device.type in [\"cpu\", \"cuda\"]\n            assert torch_tensor.device.index == 0\n            return torch_tensor.device.type\n\n        predict_net = self.protobuf_model.net.Proto()\n        input_device_types = {\n            (name, 0): _get_device_type(tensor) for name, tensor in inputs_dict.items()\n        }\n        device_type_map = infer_device_type(\n            predict_net, known_status=input_device_types, device_name_style=\"pytorch\"\n        )\n        ssa, versions = core.get_ssa(predict_net)\n        versioned_outputs = [(name, versions[name]) for name in predict_net.external_output]\n        output_devices = [device_type_map[outp] for outp in versioned_outputs]\n        return output_devices\n\n    def _convert_inputs(self, batched_inputs):\n        # currently all models convert inputs in the same way\n        data, im_info = convert_batched_inputs_to_c2_format(\n            batched_inputs, self.size_divisibility, self.device\n        )\n        return {\"data\": data, \"im_info\": im_info}\n\n    def forward(self, batched_inputs):\n        c2_inputs = self._convert_inputs(batched_inputs)\n        c2_results = self.protobuf_model(c2_inputs)\n\n        if any(t.device.type != \"cpu\" for _, t in c2_inputs.items()):\n            output_devices = self._infer_output_devices(c2_inputs)\n        else:\n            output_devices = [\"cpu\" for _ in self.protobuf_model.net.Proto().external_output]\n\n        def _cast_caffe2_blob_to_torch_tensor(blob, device):\n            return torch.Tensor(blob).to(device) if isinstance(blob, np.ndarray) else None\n\n        c2_results = {\n            name: _cast_caffe2_blob_to_torch_tensor(c2_results[name], device)\n            for name, device in zip(self.protobuf_model.net.Proto().external_output, output_devices)\n        }\n\n        return self._convert_outputs(batched_inputs, c2_inputs, c2_results)\n"
  },
  {
    "path": "detectron2/export/caffe2_modeling.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport functools\nimport io\nimport struct\nimport types\nimport torch\n\nfrom detectron2.modeling import meta_arch\nfrom detectron2.modeling.box_regression import Box2BoxTransform\nfrom detectron2.modeling.meta_arch.panoptic_fpn import combine_semantic_and_instance_outputs\nfrom detectron2.modeling.postprocessing import detector_postprocess, sem_seg_postprocess\nfrom detectron2.modeling.roi_heads import keypoint_head\nfrom detectron2.structures import Boxes, ImageList, Instances\n\nfrom .c10 import Caffe2Compatible\nfrom .patcher import ROIHeadsPatcher, patch_generalized_rcnn\nfrom .shared import (\n    alias,\n    check_set_pb_arg,\n    get_pb_arg_floats,\n    get_pb_arg_valf,\n    get_pb_arg_vali,\n    get_pb_arg_vals,\n    mock_torch_nn_functional_interpolate,\n)\n\n\ndef assemble_rcnn_outputs_by_name(image_sizes, tensor_outputs, force_mask_on=False):\n    \"\"\"\n    A function to assemble caffe2 model's outputs (i.e. Dict[str, Tensor])\n    to detectron2's format (i.e. list of Instances instance).\n    This only works when the model follows the Caffe2 detectron's naming convention.\n\n    Args:\n        image_sizes (List[List[int, int]]): [H, W] of every image.\n        tensor_outputs (Dict[str, Tensor]): external_output to its tensor.\n\n        force_mask_on (Bool): if true, the it make sure there'll be pred_masks even\n            if the mask is not found from tensor_outputs (usually due to model crash)\n    \"\"\"\n\n    results = [Instances(image_size) for image_size in image_sizes]\n\n    batch_splits = tensor_outputs.get(\"batch_splits\", None)\n    if batch_splits:\n        raise NotImplementedError()\n    assert len(image_sizes) == 1\n    result = results[0]\n\n    bbox_nms = tensor_outputs[\"bbox_nms\"]\n    score_nms = tensor_outputs[\"score_nms\"]\n    class_nms = tensor_outputs[\"class_nms\"]\n    # Detection will always success because Conv support 0-batch\n    assert bbox_nms is not None\n    assert score_nms is not None\n    assert class_nms is not None\n    result.pred_boxes = Boxes(bbox_nms)\n    result.scores = score_nms\n    result.pred_classes = class_nms.to(torch.int64)\n\n    mask_fcn_probs = tensor_outputs.get(\"mask_fcn_probs\", None)\n    if mask_fcn_probs is not None:\n        # finish the mask pred\n        mask_probs_pred = mask_fcn_probs\n        num_masks = mask_probs_pred.shape[0]\n        class_pred = result.pred_classes\n        indices = torch.arange(num_masks, device=class_pred.device)\n        mask_probs_pred = mask_probs_pred[indices, class_pred][:, None]\n        result.pred_masks = mask_probs_pred\n    elif force_mask_on:\n        # NOTE: there's no way to know the height/width of mask here, it won't be\n        # used anyway when batch size is 0, so just set them to 0.\n        result.pred_masks = torch.zeros([0, 1, 0, 0], dtype=torch.uint8)\n\n    keypoints_out = tensor_outputs.get(\"keypoints_out\", None)\n    kps_score = tensor_outputs.get(\"kps_score\", None)\n    if keypoints_out is not None:\n        # keypoints_out: [N, 4, #kypoints], where 4 is in order of (x, y, score, prob)\n        keypoints_tensor = keypoints_out\n        # NOTE: it's possible that prob is not calculated if \"should_output_softmax\"\n        # is set to False in HeatmapMaxKeypoint, so just using raw score, seems\n        # it doesn't affect mAP. TODO: check more carefully.\n        keypoint_xyp = keypoints_tensor.transpose(1, 2)[:, :, [0, 1, 2]]\n        result.pred_keypoints = keypoint_xyp\n    elif kps_score is not None:\n        # keypoint heatmap to sparse data structure\n        pred_keypoint_logits = kps_score\n        keypoint_head.keypoint_rcnn_inference(pred_keypoint_logits, [result])\n\n    return results\n\n\ndef _cast_to_f32(f64):\n    return struct.unpack(\"f\", struct.pack(\"f\", f64))[0]\n\n\ndef set_caffe2_compatible_tensor_mode(model, enable=True):\n    def _fn(m):\n        if isinstance(m, Caffe2Compatible):\n            m.tensor_mode = enable\n\n    model.apply(_fn)\n\n\ndef convert_batched_inputs_to_c2_format(batched_inputs, size_divisibility, device):\n    \"\"\"\n    See get_caffe2_inputs() below.\n    \"\"\"\n    assert all(isinstance(x, dict) for x in batched_inputs)\n    assert all(x[\"image\"].dim() == 3 for x in batched_inputs)\n\n    images = [x[\"image\"] for x in batched_inputs]\n    images = ImageList.from_tensors(images, size_divisibility)\n\n    im_info = []\n    for input_per_image, image_size in zip(batched_inputs, images.image_sizes):\n        target_height = input_per_image.get(\"height\", image_size[0])\n        target_width = input_per_image.get(\"width\", image_size[1])  # noqa\n        # NOTE: The scale inside im_info is kept as convention and for providing\n        # post-processing information if further processing is needed. For\n        # current Caffe2 model definitions that don't include post-processing inside\n        # the model, this number is not used.\n        # NOTE: There can be a slight difference between width and height\n        # scales, using a single number can results in numerical difference\n        # compared with D2's post-processing.\n        scale = target_height / image_size[0]\n        im_info.append([image_size[0], image_size[1], scale])\n    im_info = torch.Tensor(im_info)\n\n    return images.tensor.to(device), im_info.to(device)\n\n\nclass Caffe2MetaArch(Caffe2Compatible, torch.nn.Module):\n    \"\"\"\n    Base class for caffe2-compatible implementation of a meta architecture.\n    The forward is traceable and its traced graph can be converted to caffe2\n    graph through ONNX.\n    \"\"\"\n\n    def __init__(self, cfg, torch_model):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            torch_model (nn.Module): the detectron2 model (meta_arch) to be\n                converted.\n        \"\"\"\n        super().__init__()\n        self._wrapped_model = torch_model\n        self.eval()\n        set_caffe2_compatible_tensor_mode(self, True)\n\n    def get_caffe2_inputs(self, batched_inputs):\n        \"\"\"\n        Convert pytorch-style structured inputs to caffe2-style inputs that\n        are tuples of tensors.\n\n        Args:\n            batched_inputs (list[dict]): inputs to a detectron2 model\n                in its standard format. Each dict has \"image\" (CHW tensor), and optionally\n                \"height\" and \"width\".\n\n        Returns:\n            tuple[Tensor]:\n                tuple of tensors that will be the inputs to the\n                :meth:`forward` method. For existing models, the first\n                is an NCHW tensor (padded and batched); the second is\n                a im_info Nx3 tensor, where the rows are\n                (height, width, unused legacy parameter)\n        \"\"\"\n        return convert_batched_inputs_to_c2_format(\n            batched_inputs,\n            self._wrapped_model.backbone.size_divisibility,\n            self._wrapped_model.device,\n        )\n\n    def encode_additional_info(self, predict_net, init_net):\n        \"\"\"\n        Save extra metadata that will be used by inference in the output protobuf.\n        \"\"\"\n        pass\n\n    def forward(self, inputs):\n        \"\"\"\n        Run the forward in caffe2-style. It has to use caffe2-compatible ops\n        and the method will be used for tracing.\n\n        Args:\n            inputs (tuple[Tensor]): inputs defined by :meth:`get_caffe2_input`.\n                They will be the inputs of the converted caffe2 graph.\n\n        Returns:\n            tuple[Tensor]: output tensors. They will be the outputs of the\n                converted caffe2 graph.\n        \"\"\"\n        raise NotImplementedError\n\n    def _caffe2_preprocess_image(self, inputs):\n        \"\"\"\n        Caffe2 implementation of preprocess_image, which is called inside each MetaArch's forward.\n        It normalizes the input images, and the final caffe2 graph assumes the\n        inputs have been batched already.\n        \"\"\"\n        data, im_info = inputs\n        data = alias(data, \"data\")\n        im_info = alias(im_info, \"im_info\")\n        normalized_data = self._wrapped_model.normalizer(data)\n        normalized_data = alias(normalized_data, \"normalized_data\")\n\n        # Pack (data, im_info) into ImageList which is recognized by self.inference.\n        images = ImageList(tensor=normalized_data, image_sizes=im_info)\n        return images\n\n    @staticmethod\n    def get_outputs_converter(predict_net, init_net):\n        \"\"\"\n        Creates a function that converts outputs of the caffe2 model to\n        detectron2's standard format.\n        The function uses information in `predict_net` and `init_net` that are\n        available at inferene time. Therefore the function logic can be used in inference.\n\n        The returned function has the following signature:\n\n            def convert(batched_inputs, c2_inputs, c2_results) -> detectron2_outputs\n\n        Where\n\n            * batched_inputs (list[dict]): the original input format of the meta arch\n            * c2_inputs (dict[str, Tensor]): the caffe2 inputs.\n            * c2_results (dict[str, Tensor]): the caffe2 output format,\n                corresponding to the outputs of the :meth:`forward` function.\n            * detectron2_outputs: the original output format of the meta arch.\n\n        This function can be used to compare the outputs of the original meta arch and\n        the converted caffe2 graph.\n\n        Returns:\n            callable: a callable of the above signature.\n        \"\"\"\n        raise NotImplementedError\n\n\nclass Caffe2GeneralizedRCNN(Caffe2MetaArch):\n    def __init__(self, cfg, torch_model):\n        assert isinstance(torch_model, meta_arch.GeneralizedRCNN)\n        torch_model = patch_generalized_rcnn(torch_model)\n        super().__init__(cfg, torch_model)\n\n        self.roi_heads_patcher = ROIHeadsPatcher(cfg, self._wrapped_model.roi_heads)\n\n    def encode_additional_info(self, predict_net, init_net):\n        size_divisibility = self._wrapped_model.backbone.size_divisibility\n        check_set_pb_arg(predict_net, \"size_divisibility\", \"i\", size_divisibility)\n        check_set_pb_arg(\n            predict_net, \"device\", \"s\", str.encode(str(self._wrapped_model.device), \"ascii\")\n        )\n        check_set_pb_arg(predict_net, \"meta_architecture\", \"s\", b\"GeneralizedRCNN\")\n\n    @mock_torch_nn_functional_interpolate()\n    def forward(self, inputs):\n        if not self.tensor_mode:\n            return self._wrapped_model.inference(inputs)\n        images = self._caffe2_preprocess_image(inputs)\n        features = self._wrapped_model.backbone(images.tensor)\n        proposals, _ = self._wrapped_model.proposal_generator(images, features)\n        with self.roi_heads_patcher.mock_roi_heads():\n            detector_results, _ = self._wrapped_model.roi_heads(images, features, proposals)\n        return tuple(detector_results[0].flatten())\n\n    @staticmethod\n    def get_outputs_converter(predict_net, init_net):\n        def f(batched_inputs, c2_inputs, c2_results):\n            image_sizes = [[int(im[0]), int(im[1])] for im in c2_inputs[\"im_info\"]]\n            results = assemble_rcnn_outputs_by_name(image_sizes, c2_results)\n            return meta_arch.GeneralizedRCNN._postprocess(results, batched_inputs, image_sizes)\n\n        return f\n\n\nclass Caffe2PanopticFPN(Caffe2MetaArch):\n    def __init__(self, cfg, torch_model):\n        assert isinstance(torch_model, meta_arch.PanopticFPN)\n        torch_model = patch_generalized_rcnn(torch_model)\n        super().__init__(cfg, torch_model)\n\n        self.roi_heads_patcher = ROIHeadsPatcher(cfg, self._wrapped_model.roi_heads)\n\n    @mock_torch_nn_functional_interpolate()\n    def forward(self, inputs):\n        assert self.tensor_mode\n        images = self._caffe2_preprocess_image(inputs)\n        features = self._wrapped_model.backbone(images.tensor)\n\n        sem_seg_results, _ = self._wrapped_model.sem_seg_head(features)\n        sem_seg_results = alias(sem_seg_results, \"sem_seg\")\n\n        proposals, _ = self._wrapped_model.proposal_generator(images, features)\n\n        with self.roi_heads_patcher.mock_roi_heads(self.tensor_mode):\n            detector_results, _ = self._wrapped_model.roi_heads(images, features, proposals)\n\n        return tuple(detector_results[0].flatten()) + (sem_seg_results,)\n\n    def encode_additional_info(self, predict_net, init_net):\n        size_divisibility = self._wrapped_model.backbone.size_divisibility\n        check_set_pb_arg(predict_net, \"size_divisibility\", \"i\", size_divisibility)\n        check_set_pb_arg(\n            predict_net, \"device\", \"s\", str.encode(str(self._wrapped_model.device), \"ascii\")\n        )\n        check_set_pb_arg(predict_net, \"meta_architecture\", \"s\", b\"PanopticFPN\")\n\n        # Inference parameters:\n        check_set_pb_arg(predict_net, \"combine_on\", \"i\", self._wrapped_model.combine_on)\n        check_set_pb_arg(\n            predict_net,\n            \"combine_overlap_threshold\",\n            \"f\",\n            _cast_to_f32(self._wrapped_model.combine_overlap_threshold),\n        )\n        check_set_pb_arg(\n            predict_net,\n            \"combine_stuff_area_limit\",\n            \"i\",\n            self._wrapped_model.combine_stuff_area_limit,\n        )\n        check_set_pb_arg(\n            predict_net,\n            \"combine_instances_confidence_threshold\",\n            \"f\",\n            _cast_to_f32(self._wrapped_model.combine_instances_confidence_threshold),\n        )\n\n    @staticmethod\n    def get_outputs_converter(predict_net, init_net):\n        combine_on = get_pb_arg_vali(predict_net, \"combine_on\", None)\n        combine_overlap_threshold = get_pb_arg_valf(predict_net, \"combine_overlap_threshold\", None)\n        combine_stuff_area_limit = get_pb_arg_vali(predict_net, \"combine_stuff_area_limit\", None)\n        combine_instances_confidence_threshold = get_pb_arg_valf(\n            predict_net, \"combine_instances_confidence_threshold\", None\n        )\n\n        def f(batched_inputs, c2_inputs, c2_results):\n            image_sizes = [[int(im[0]), int(im[1])] for im in c2_inputs[\"im_info\"]]\n            detector_results = assemble_rcnn_outputs_by_name(\n                image_sizes, c2_results, force_mask_on=True\n            )\n            sem_seg_results = c2_results[\"sem_seg\"]\n\n            # copied from meta_arch/panoptic_fpn.py ...\n            processed_results = []\n            for sem_seg_result, detector_result, input_per_image, image_size in zip(\n                sem_seg_results, detector_results, batched_inputs, image_sizes\n            ):\n                height = input_per_image.get(\"height\", image_size[0])\n                width = input_per_image.get(\"width\", image_size[1])\n                sem_seg_r = sem_seg_postprocess(sem_seg_result, image_size, height, width)\n                detector_r = detector_postprocess(detector_result, height, width)\n\n                processed_results.append({\"sem_seg\": sem_seg_r, \"instances\": detector_r})\n\n                if combine_on:\n                    panoptic_r = combine_semantic_and_instance_outputs(\n                        detector_r,\n                        sem_seg_r.argmax(dim=0),\n                        combine_overlap_threshold,\n                        combine_stuff_area_limit,\n                        combine_instances_confidence_threshold,\n                    )\n                    processed_results[-1][\"panoptic_seg\"] = panoptic_r\n            return processed_results\n\n        return f\n\n\nclass Caffe2RetinaNet(Caffe2MetaArch):\n    def __init__(self, cfg, torch_model):\n        assert isinstance(torch_model, meta_arch.RetinaNet)\n        super().__init__(cfg, torch_model)\n\n    @mock_torch_nn_functional_interpolate()\n    def forward(self, inputs):\n        assert self.tensor_mode\n        images = self._caffe2_preprocess_image(inputs)\n\n        # explicitly return the images sizes to avoid removing \"im_info\" by ONNX\n        # since it's not used in the forward path\n        return_tensors = [images.image_sizes]\n\n        features = self._wrapped_model.backbone(images.tensor)\n        features = [features[f] for f in self._wrapped_model.in_features]\n        for i, feature_i in enumerate(features):\n            features[i] = alias(feature_i, \"feature_{}\".format(i), is_backward=True)\n            return_tensors.append(features[i])\n\n        box_cls, box_delta = self._wrapped_model.head(features)\n        for i, (box_cls_i, box_delta_i) in enumerate(zip(box_cls, box_delta)):\n            return_tensors.append(alias(box_cls_i, \"box_cls_{}\".format(i)))\n            return_tensors.append(alias(box_delta_i, \"box_delta_{}\".format(i)))\n\n        return tuple(return_tensors)\n\n    def encode_additional_info(self, predict_net, init_net):\n        size_divisibility = self._wrapped_model.backbone.size_divisibility\n        check_set_pb_arg(predict_net, \"size_divisibility\", \"i\", size_divisibility)\n        check_set_pb_arg(\n            predict_net, \"device\", \"s\", str.encode(str(self._wrapped_model.device), \"ascii\")\n        )\n        check_set_pb_arg(predict_net, \"meta_architecture\", \"s\", b\"RetinaNet\")\n\n        # Inference parameters:\n        check_set_pb_arg(\n            predict_net, \"score_threshold\", \"f\", _cast_to_f32(self._wrapped_model.score_threshold)\n        )\n        check_set_pb_arg(predict_net, \"topk_candidates\", \"i\", self._wrapped_model.topk_candidates)\n        check_set_pb_arg(\n            predict_net, \"nms_threshold\", \"f\", _cast_to_f32(self._wrapped_model.nms_threshold)\n        )\n        check_set_pb_arg(\n            predict_net,\n            \"max_detections_per_image\",\n            \"i\",\n            self._wrapped_model.max_detections_per_image,\n        )\n\n        check_set_pb_arg(\n            predict_net,\n            \"bbox_reg_weights\",\n            \"floats\",\n            [_cast_to_f32(w) for w in self._wrapped_model.box2box_transform.weights],\n        )\n        self._encode_anchor_generator_cfg(predict_net)\n\n    def _encode_anchor_generator_cfg(self, predict_net):\n        # serialize anchor_generator for future use\n        serialized_anchor_generator = io.BytesIO()\n        torch.save(self._wrapped_model.anchor_generator, serialized_anchor_generator)\n        # Ideally we can put anchor generating inside the model, then we don't\n        # need to store this information.\n        bytes = serialized_anchor_generator.getvalue()\n        check_set_pb_arg(predict_net, \"serialized_anchor_generator\", \"s\", bytes)\n\n    @staticmethod\n    def get_outputs_converter(predict_net, init_net):\n        self = types.SimpleNamespace()\n        serialized_anchor_generator = io.BytesIO(\n            get_pb_arg_vals(predict_net, \"serialized_anchor_generator\", None)\n        )\n        self.anchor_generator = torch.load(serialized_anchor_generator)\n        bbox_reg_weights = get_pb_arg_floats(predict_net, \"bbox_reg_weights\", None)\n        self.box2box_transform = Box2BoxTransform(weights=tuple(bbox_reg_weights))\n        self.score_threshold = get_pb_arg_valf(predict_net, \"score_threshold\", None)\n        self.topk_candidates = get_pb_arg_vali(predict_net, \"topk_candidates\", None)\n        self.nms_threshold = get_pb_arg_valf(predict_net, \"nms_threshold\", None)\n        self.max_detections_per_image = get_pb_arg_vali(\n            predict_net, \"max_detections_per_image\", None\n        )\n\n        # hack to reuse inference code from RetinaNet\n        self.inference = functools.partial(meta_arch.RetinaNet.inference, self)\n        self.inference_single_image = functools.partial(\n            meta_arch.RetinaNet.inference_single_image, self\n        )\n\n        def f(batched_inputs, c2_inputs, c2_results):\n            image_sizes = [[int(im[0]), int(im[1])] for im in c2_inputs[\"im_info\"]]\n\n            num_features = len([x for x in c2_results.keys() if x.startswith(\"box_cls_\")])\n            box_cls = [c2_results[\"box_cls_{}\".format(i)] for i in range(num_features)]\n            box_delta = [c2_results[\"box_delta_{}\".format(i)] for i in range(num_features)]\n\n            # For each feature level, feature should have the same batch size and\n            # spatial dimension as the box_cls and box_delta.\n            dummy_features = [box_delta[i].clone()[:, 0:0, :, :] for i in range(num_features)]\n            anchors = self.anchor_generator(dummy_features)\n\n            # self.num_classess can be inferred\n            self.num_classes = box_cls[0].shape[1] // (box_delta[0].shape[1] // 4)\n\n            results = self.inference(box_cls, box_delta, anchors, image_sizes)\n            return meta_arch.GeneralizedRCNN._postprocess(results, batched_inputs, image_sizes)\n\n        return f\n\n\nMETA_ARCH_CAFFE2_EXPORT_TYPE_MAP = {\n    \"GeneralizedRCNN\": Caffe2GeneralizedRCNN,\n    \"PanopticFPN\": Caffe2PanopticFPN,\n    \"RetinaNet\": Caffe2RetinaNet,\n}\n"
  },
  {
    "path": "detectron2/export/patcher.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport contextlib\nimport mock\nimport torch\n\nfrom detectron2.modeling import poolers\nfrom detectron2.modeling.proposal_generator import rpn\nfrom detectron2.modeling.roi_heads import roi_heads\nfrom detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputs\n\nfrom .c10 import (\n    Caffe2Compatible,\n    Caffe2FastRCNNOutputsInference,\n    Caffe2KeypointRCNNInference,\n    Caffe2MaskRCNNInference,\n    Caffe2ROIPooler,\n    Caffe2RPN,\n)\n\n\nclass GenericMixin(object):\n    pass\n\n\nclass Caffe2CompatibleConverter(object):\n    \"\"\"\n    A GenericUpdater which implements the `create_from` interface, by modifying\n    module object and assign it with another class replaceCls.\n    \"\"\"\n\n    def __init__(self, replaceCls):\n        self.replaceCls = replaceCls\n\n    def create_from(self, module):\n        # update module's class to the new class\n        assert isinstance(module, torch.nn.Module)\n        if issubclass(self.replaceCls, GenericMixin):\n            # replaceCls should act as mixin, create a new class on-the-fly\n            new_class = type(\n                \"{}MixedWith{}\".format(self.replaceCls.__name__, module.__class__.__name__),\n                (self.replaceCls, module.__class__),\n                {},  # {\"new_method\": lambda self: ...},\n            )\n            module.__class__ = new_class\n        else:\n            # replaceCls is complete class, this allow arbitrary class swap\n            module.__class__ = self.replaceCls\n\n        # initialize Caffe2Compatible\n        if isinstance(module, Caffe2Compatible):\n            module.tensor_mode = False\n\n        return module\n\n\ndef patch(model, target, updater, *args, **kwargs):\n    \"\"\"\n    recursively (post-order) update all modules with the target type and its\n    subclasses, make a initialization/composition/inheritance/... via the\n    updater.create_from.\n    \"\"\"\n    for name, module in model.named_children():\n        model._modules[name] = patch(module, target, updater, *args, **kwargs)\n    if isinstance(model, target):\n        return updater.create_from(model, *args, **kwargs)\n    return model\n\n\ndef patch_generalized_rcnn(model):\n    ccc = Caffe2CompatibleConverter\n    model = patch(model, rpn.RPN, ccc(Caffe2RPN))\n    model = patch(model, poolers.ROIPooler, ccc(Caffe2ROIPooler))\n\n    return model\n\n\n@contextlib.contextmanager\ndef mock_fastrcnn_outputs_inference(tensor_mode, check=True):\n    with mock.patch.object(\n        FastRCNNOutputs,\n        \"inference\",\n        autospec=True,\n        side_effect=Caffe2FastRCNNOutputsInference(tensor_mode),\n    ) as mocked_func:\n        yield\n    if check:\n        assert mocked_func.call_count > 0\n\n\n@contextlib.contextmanager\ndef mock_mask_rcnn_inference(tensor_mode, patched_module, check=True):\n    with mock.patch(\n        \"{}.mask_rcnn_inference\".format(patched_module), side_effect=Caffe2MaskRCNNInference()\n    ) as mocked_func:\n        yield\n    if check:\n        assert mocked_func.call_count > 0\n\n\n@contextlib.contextmanager\ndef mock_keypoint_rcnn_inference(tensor_mode, patched_module, use_heatmap_max_keypoint, check=True):\n    with mock.patch(\n        \"{}.keypoint_rcnn_inference\".format(patched_module),\n        side_effect=Caffe2KeypointRCNNInference(use_heatmap_max_keypoint),\n    ) as mocked_func:\n        yield\n    if check:\n        assert mocked_func.call_count > 0\n\n\nclass ROIHeadsPatcher:\n    def __init__(self, cfg, heads):\n        self.heads = heads\n\n        self.use_heatmap_max_keypoint = cfg.EXPORT_CAFFE2.USE_HEATMAP_MAX_KEYPOINT\n\n    @contextlib.contextmanager\n    def mock_roi_heads(self, tensor_mode=True):\n        \"\"\"\n        Patching several inference functions inside ROIHeads and its subclasses\n\n        Args:\n            tensor_mode (bool): whether the inputs/outputs are caffe2's tensor\n                format or not. Default to True.\n        \"\"\"\n        # NOTE: this requries the `keypoint_rcnn_inference` and `mask_rcnn_inference`\n        # are called inside the same file as ROIHeads due to using mock.patch.\n        module = roi_heads.ROIHeads.__module__\n\n        mock_ctx_managers = [mock_fastrcnn_outputs_inference(tensor_mode)]\n        if getattr(self.heads, \"keypoint_on\", False):\n            mock_ctx_managers += [\n                mock_keypoint_rcnn_inference(tensor_mode, module, self.use_heatmap_max_keypoint)\n            ]\n        if getattr(self.heads, \"mask_on\", False):\n            mock_ctx_managers += [mock_mask_rcnn_inference(tensor_mode, module)]\n\n        with contextlib.ExitStack() as stack:  # python 3.3+\n            for mgr in mock_ctx_managers:\n                stack.enter_context(mgr)\n            yield\n"
  },
  {
    "path": "detectron2/export/shared.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport collections\nimport contextlib\nimport copy\nimport functools\nimport logging\nimport mock\nimport numpy as np\nimport os\nfrom typing import Any, Callable, Dict, List, Optional, Tuple, Union\nimport caffe2.python.utils as putils\nimport torch\nimport torch.nn.functional as F\nfrom caffe2.proto import caffe2_pb2\nfrom caffe2.python import core, net_drawer, workspace\nfrom torch.nn.functional import interpolate as interp\n\nlogger = logging.getLogger(__name__)\n\n\n# ==== torch/utils_toffee/cast.py =======================================\n\n\ndef to_device(t, device_str):\n    \"\"\"\n    This function is a replacement of .to(another_device) such that it allows the\n    casting to be traced properly by explicitly calling the underlying copy ops.\n    It also avoids introducing unncessary op when casting to the same device.\n    \"\"\"\n    src = t.device\n    dst = torch.device(device_str)\n\n    if src == dst:\n        return t\n    elif src.type == \"cuda\" and dst.type == \"cpu\":\n        return torch.ops._caffe2.CopyGPUToCPU(t)\n    elif src.type == \"cpu\" and dst.type == \"cuda\":\n        return torch.ops._caffe2.CopyCPUToGPU(t)\n    else:\n        raise RuntimeError(\"Can't cast tensor from device {} to device {}\".format(src, dst))\n\n\n# ==== torch/utils_toffee/interpolate.py =======================================\n\n\n# Note: borrowed from vision/detection/fair/detectron/detectron/modeling/detector.py\ndef BilinearInterpolation(tensor_in, up_scale):\n    assert up_scale % 2 == 0, \"Scale should be even\"\n\n    def upsample_filt(size):\n        factor = (size + 1) // 2\n        if size % 2 == 1:\n            center = factor - 1\n        else:\n            center = factor - 0.5\n\n        og = np.ogrid[:size, :size]\n        return (1 - abs(og[0] - center) / factor) * (1 - abs(og[1] - center) / factor)\n\n    kernel_size = int(up_scale) * 2\n    bil_filt = upsample_filt(kernel_size)\n\n    dim = int(tensor_in.shape[1])\n    kernel = np.zeros((dim, dim, kernel_size, kernel_size), dtype=np.float32)\n    kernel[range(dim), range(dim), :, :] = bil_filt\n\n    tensor_out = F.conv_transpose2d(\n        tensor_in,\n        weight=to_device(torch.Tensor(kernel), tensor_in.device),\n        bias=None,\n        stride=int(up_scale),\n        padding=int(up_scale / 2),\n    )\n\n    return tensor_out\n\n\n# NOTE: ONNX is incompatible with traced torch.nn.functional.interpolate if\n# using dynamic `scale_factor` rather than static `size`. (T43166860)\n# NOTE: Caffe2 Int8 conversion might not be able to quantize `size` properly.\ndef onnx_compatibale_interpolate(\n    input, size=None, scale_factor=None, mode=\"nearest\", align_corners=None\n):\n    # NOTE: The input dimensions are interpreted in the form:\n    # `mini-batch x channels x [optional depth] x [optional height] x width`.\n    if size is None and scale_factor is not None:\n        if input.dim() == 4:\n            if isinstance(scale_factor, (int, float)):\n                height_scale, width_scale = (scale_factor, scale_factor)\n            else:\n                assert isinstance(scale_factor, (tuple, list))\n                assert len(scale_factor) == 2\n                height_scale, width_scale = scale_factor\n\n            assert not align_corners, \"No matching C2 op for align_corners == True\"\n            if mode == \"nearest\":\n                return torch.ops._caffe2.ResizeNearest(\n                    input, order=\"NCHW\", width_scale=width_scale, height_scale=height_scale\n                )\n            elif mode == \"bilinear\":\n                logger.warning(\n                    \"Use F.conv_transpose2d for bilinear interpolate\"\n                    \" because there's no such C2 op, this may cause significant\"\n                    \" slowdown and the boundary pixels won't be as same as\"\n                    \" using F.interpolate due to padding.\"\n                )\n                assert height_scale == width_scale\n                return BilinearInterpolation(input, up_scale=height_scale)\n        logger.warning(\"Output size is not static, it might cause ONNX conversion issue\")\n\n    return interp(input, size, scale_factor, mode, align_corners)\n\n\n@contextlib.contextmanager\ndef mock_torch_nn_functional_interpolate():\n    if torch.onnx.is_in_onnx_export():\n        with mock.patch(\n            \"torch.nn.functional.interpolate\", side_effect=onnx_compatibale_interpolate\n        ):\n            yield\n    else:\n        yield\n\n\n# ==== torch/utils_caffe2/ws_utils.py ==========================================\n\n\nclass ScopedWS(object):\n    def __init__(self, ws_name, is_reset, is_cleanup=False):\n        self.ws_name = ws_name\n        self.is_reset = is_reset\n        self.is_cleanup = is_cleanup\n        self.org_ws = \"\"\n\n    def __enter__(self):\n        self.org_ws = workspace.CurrentWorkspace()\n        if self.ws_name is not None:\n            workspace.SwitchWorkspace(self.ws_name, True)\n        if self.is_reset:\n            workspace.ResetWorkspace()\n\n        return workspace\n\n    def __exit__(self, *args):\n        if self.is_cleanup:\n            workspace.ResetWorkspace()\n        if self.ws_name is not None:\n            workspace.SwitchWorkspace(self.org_ws)\n\n\ndef fetch_any_blob(name):\n    bb = None\n    try:\n        bb = workspace.FetchBlob(name)\n    except TypeError:\n        bb = workspace.FetchInt8Blob(name)\n    except Exception as e:\n        logger.error(\"Get blob {} error: {}\".format(name, e))\n\n    return bb\n\n\n# ==== torch/utils_caffe2/protobuf.py ==========================================\n\n\ndef get_pb_arg(pb, arg_name):\n    for x in pb.arg:\n        if x.name == arg_name:\n            return x\n    return None\n\n\ndef get_pb_arg_valf(pb, arg_name, default_val):\n    arg = get_pb_arg(pb, arg_name)\n    return arg.f if arg is not None else default_val\n\n\ndef get_pb_arg_floats(pb, arg_name, default_val):\n    arg = get_pb_arg(pb, arg_name)\n    return list(map(float, arg.floats)) if arg is not None else default_val\n\n\ndef get_pb_arg_ints(pb, arg_name, default_val):\n    arg = get_pb_arg(pb, arg_name)\n    return list(map(int, arg.ints)) if arg is not None else default_val\n\n\ndef get_pb_arg_vali(pb, arg_name, default_val):\n    arg = get_pb_arg(pb, arg_name)\n    return arg.i if arg is not None else default_val\n\n\ndef get_pb_arg_vals(pb, arg_name, default_val):\n    arg = get_pb_arg(pb, arg_name)\n    return arg.s if arg is not None else default_val\n\n\ndef get_pb_arg_valstrings(pb, arg_name, default_val):\n    arg = get_pb_arg(pb, arg_name)\n    return list(arg.strings) if arg is not None else default_val\n\n\ndef check_set_pb_arg(pb, arg_name, arg_attr, arg_value, allow_override=False):\n    arg = get_pb_arg(pb, arg_name)\n    if arg is None:\n        arg = putils.MakeArgument(arg_name, arg_value)\n        assert hasattr(arg, arg_attr)\n        pb.arg.extend([arg])\n    if allow_override and getattr(arg, arg_attr) != arg_value:\n        logger.warning(\n            \"Override argument {}: {} -> {}\".format(arg_name, getattr(arg, arg_attr), arg_value)\n        )\n        setattr(arg, arg_attr, arg_value)\n    else:\n        assert arg is not None\n        assert getattr(arg, arg_attr) == arg_value, \"Existing value {}, new value {}\".format(\n            getattr(arg, arg_attr), arg_value\n        )\n\n\ndef _create_const_fill_op_from_numpy(name, tensor, device_option=None):\n    assert type(tensor) == np.ndarray\n    kTypeNameMapper = {\n        np.dtype(\"float32\"): \"GivenTensorFill\",\n        np.dtype(\"int32\"): \"GivenTensorIntFill\",\n        np.dtype(\"int64\"): \"GivenTensorInt64Fill\",\n        np.dtype(\"uint8\"): \"GivenTensorStringFill\",\n    }\n\n    args_dict = {}\n    if tensor.dtype == np.dtype(\"uint8\"):\n        args_dict.update({\"values\": [str(tensor.data)], \"shape\": [1]})\n    else:\n        args_dict.update({\"values\": tensor, \"shape\": tensor.shape})\n\n    if device_option is not None:\n        args_dict[\"device_option\"] = device_option\n\n    return core.CreateOperator(kTypeNameMapper[tensor.dtype], [], [name], **args_dict)\n\n\ndef _create_const_fill_op_from_c2_int8_tensor(name, int8_tensor):\n    assert type(int8_tensor) == workspace.Int8Tensor\n    kTypeNameMapper = {\n        np.dtype(\"int32\"): \"Int8GivenIntTensorFill\",\n        np.dtype(\"uint8\"): \"Int8GivenTensorFill\",\n    }\n\n    tensor = int8_tensor.data\n    assert tensor.dtype in [np.dtype(\"uint8\"), np.dtype(\"int32\")]\n    values = tensor.tobytes() if tensor.dtype == np.dtype(\"uint8\") else tensor\n\n    return core.CreateOperator(\n        kTypeNameMapper[tensor.dtype],\n        [],\n        [name],\n        values=values,\n        shape=tensor.shape,\n        Y_scale=int8_tensor.scale,\n        Y_zero_point=int8_tensor.zero_point,\n    )\n\n\ndef create_const_fill_op(\n    name: str,\n    blob: Union[np.ndarray, workspace.Int8Tensor],\n    device_option: Optional[caffe2_pb2.DeviceOption] = None,\n) -> caffe2_pb2.OperatorDef:\n    \"\"\"\n    Given a blob object, return the Caffe2 operator that creates this blob\n    as constant. Currently support NumPy tensor and Caffe2 Int8Tensor.\n    \"\"\"\n\n    tensor_type = type(blob)\n    assert tensor_type in [np.ndarray, workspace.Int8Tensor], (\n        'Error when creating const fill op for \"{}\", unsupported blob type: {}'\n    ).format(name, type(blob))\n\n    if tensor_type == np.ndarray:\n        return _create_const_fill_op_from_numpy(name, blob, device_option)\n    elif tensor_type == workspace.Int8Tensor:\n        assert device_option is None\n        return _create_const_fill_op_from_c2_int8_tensor(name, blob)\n\n\ndef construct_init_net_from_params(\n    params: Dict[str, Any], device_options: Optional[Dict[str, caffe2_pb2.DeviceOption]] = None\n) -> caffe2_pb2.NetDef:\n    \"\"\"\n    Construct the init_net from params dictionary\n    \"\"\"\n    init_net = caffe2_pb2.NetDef()\n    device_options = device_options or {}\n    for name, blob in params.items():\n        if isinstance(blob, str):\n            logger.warning(\n                (\n                    \"Blob {} with type {} is not supported in generating init net,\"\n                    \" skipped.\".format(name, type(blob))\n                )\n            )\n            continue\n        init_net.op.extend(\n            [create_const_fill_op(name, blob, device_option=device_options.get(name, None))]\n        )\n        init_net.external_output.append(name)\n    return init_net\n\n\ndef get_producer_map(ssa):\n    \"\"\"\n    Return dict from versioned blob to (i, j),\n        where i is index of producer op, j is the index of output of that op.\n    \"\"\"\n    producer_map = {}\n    for i in range(len(ssa)):\n        outputs = ssa[i][1]\n        for j, outp in enumerate(outputs):\n            producer_map[outp] = (i, j)\n    return producer_map\n\n\ndef get_consumer_map(ssa):\n    \"\"\"\n    Return dict from versioned blob to list of (i, j),\n        where i is index of consumer op, j is the index of input of that op.\n    \"\"\"\n    consumer_map = collections.defaultdict(list)\n    for i in range(len(ssa)):\n        inputs = ssa[i][0]\n        for j, inp in enumerate(inputs):\n            consumer_map[inp].append((i, j))\n    return consumer_map\n\n\ndef get_params_from_init_net(\n    init_net: caffe2_pb2.NetDef\n) -> [Dict[str, Any], Dict[str, caffe2_pb2.DeviceOption]]:\n    \"\"\"\n    Take the output blobs from init_net by running it.\n    Outputs:\n        params: dict from blob name to numpy array\n        device_options: dict from blob name to the device option of its creating op\n    \"\"\"\n    # NOTE: this assumes that the params is determined by producer op with the\n    # only exception be CopyGPUToCPU which is CUDA op but returns CPU tensor.\n    def _get_device_option(producer_op):\n        if producer_op.type == \"CopyGPUToCPU\":\n            return caffe2_pb2.DeviceOption()\n        else:\n            return producer_op.device_option\n\n    with ScopedWS(\"__get_params_from_init_net__\", is_reset=True, is_cleanup=True) as ws:\n        ws.RunNetOnce(init_net)\n        params = {b: fetch_any_blob(b) for b in init_net.external_output}\n    ssa, versions = core.get_ssa(init_net)\n    producer_map = get_producer_map(ssa)\n    device_options = {\n        b: _get_device_option(init_net.op[producer_map[(b, versions[b])][0]])\n        for b in init_net.external_output\n    }\n    return params, device_options\n\n\ndef _updater_raise(op, input_types, output_types):\n    raise RuntimeError(\n        \"Failed to apply updater for op {} given input_types {} and\"\n        \" output_types {}\".format(op, input_types, output_types)\n    )\n\n\ndef _generic_status_identifier(\n    predict_net: caffe2_pb2.NetDef,\n    status_updater: Callable,\n    known_status: Dict[Tuple[str, int], Any],\n) -> Dict[Tuple[str, int], Any]:\n    \"\"\"\n    Statically infer the status of each blob, the status can be such as device type\n        (CPU/GPU), layout (NCHW/NHWC), data type (float32/int8), etc. \"Blob\" here\n        is versioned blob (Tuple[str, int]) in the format compatible with ssa.\n    Inputs:\n        predict_net: the caffe2 network\n        status_updater: a callable, given an op and the status of its input/output,\n            it returns the updated status of input/output. `None` is used for\n            representing unknown status.\n        known_status: a dict containing known status, used as initialization.\n    Outputs:\n        A dict mapping from versioned blob to its status\n    \"\"\"\n    ssa, versions = core.get_ssa(predict_net)\n    versioned_ext_input = [(b, 0) for b in predict_net.external_input]\n    versioned_ext_output = [(b, versions[b]) for b in predict_net.external_output]\n    all_versioned_blobs = set().union(*[set(x[0] + x[1]) for x in ssa])\n\n    allowed_vbs = all_versioned_blobs.union(versioned_ext_input).union(versioned_ext_output)\n    assert all(k in allowed_vbs for k in known_status)\n    assert all(v is not None for v in known_status.values())\n    _known_status = copy.deepcopy(known_status)\n\n    def _check_and_update(key, value):\n        assert value is not None\n        if key in _known_status:\n            if not _known_status[key] == value:\n                raise RuntimeError(\n                    \"Confilict status for {}, existing status {}, new status {}\".format(\n                        key, _known_status[key], value\n                    )\n                )\n        _known_status[key] = value\n\n    def _update_i(op, ssa_i):\n        versioned_inputs = ssa_i[0]\n        versioned_outputs = ssa_i[1]\n\n        inputs_status = [_known_status.get(b, None) for b in versioned_inputs]\n        outputs_status = [_known_status.get(b, None) for b in versioned_outputs]\n\n        new_inputs_status, new_outputs_status = status_updater(op, inputs_status, outputs_status)\n\n        for versioned_blob, status in zip(\n            versioned_inputs + versioned_outputs, new_inputs_status + new_outputs_status\n        ):\n            if status is not None:\n                _check_and_update(versioned_blob, status)\n\n    for op, ssa_i in zip(predict_net.op, ssa):\n        _update_i(op, ssa_i)\n    for op, ssa_i in zip(reversed(predict_net.op), reversed(ssa)):\n        _update_i(op, ssa_i)\n\n    # NOTE: This strictly checks all the blob from predict_net must be assgined\n    # a known status. However sometimes it's impossible (eg. having deadend op),\n    # we may relax this constraint if\n    for k in all_versioned_blobs:\n        if k not in _known_status:\n            raise NotImplementedError(\n                \"Can not infer the status for {}. Currently only support the case where\"\n                \" a single forward and backward pass can identify status for all blobs.\".format(k)\n            )\n\n    return _known_status\n\n\ndef infer_device_type(\n    predict_net: caffe2_pb2.NetDef,\n    known_status: Dict[Tuple[str, int], Any],\n    device_name_style: str = \"caffe2\",\n) -> Dict[Tuple[str, int], str]:\n    \"\"\" Return the device type (\"cpu\" or \"gpu\"/\"cuda\") of each (versioned) blob \"\"\"\n\n    assert device_name_style in [\"caffe2\", \"pytorch\"]\n    _CPU_STR = \"cpu\"\n    _GPU_STR = \"gpu\" if device_name_style == \"caffe2\" else \"cuda\"\n\n    def _copy_cpu_to_gpu_updater(op, input_types, output_types):\n        if input_types[0] == _GPU_STR or output_types[0] == _CPU_STR:\n            _updater_raise(op, input_types, output_types)\n        return ([_CPU_STR], [_GPU_STR])\n\n    def _copy_gpu_to_cpu_updater(op, input_types, output_types):\n        if input_types[0] == _CPU_STR or output_types[0] == _GPU_STR:\n            _updater_raise(op, input_types, output_types)\n        return ([_GPU_STR], [_CPU_STR])\n\n    def _other_ops_updater(op, input_types, output_types):\n        non_none_types = [x for x in input_types + output_types if x is not None]\n        if len(non_none_types) > 0:\n            the_type = non_none_types[0]\n            if not all(x == the_type for x in non_none_types):\n                _updater_raise(op, input_types, output_types)\n        else:\n            the_type = None\n        return ([the_type for _ in op.input], [the_type for _ in op.output])\n\n    def _device_updater(op, *args, **kwargs):\n        return {\n            \"CopyCPUToGPU\": _copy_cpu_to_gpu_updater,\n            \"CopyGPUToCPU\": _copy_gpu_to_cpu_updater,\n        }.get(op.type, _other_ops_updater)(op, *args, **kwargs)\n\n    return _generic_status_identifier(predict_net, _device_updater, known_status)\n\n\n# ==== torch/utils_caffe2/vis.py ===============================================\n\n\ndef _modify_blob_names(ops, blob_rename_f):\n    ret = []\n\n    def _replace_list(blob_list, replaced_list):\n        del blob_list[:]\n        blob_list.extend(replaced_list)\n\n    for x in ops:\n        cur = copy.deepcopy(x)\n        _replace_list(cur.input, list(map(blob_rename_f, cur.input)))\n        _replace_list(cur.output, list(map(blob_rename_f, cur.output)))\n        ret.append(cur)\n\n    return ret\n\n\ndef _rename_blob(name, blob_sizes, blob_ranges):\n    def _list_to_str(bsize):\n        ret = \", \".join([str(x) for x in bsize])\n        ret = \"[\" + ret + \"]\"\n        return ret\n\n    ret = name\n    if blob_sizes is not None and name in blob_sizes:\n        ret += \"\\n\" + _list_to_str(blob_sizes[name])\n    if blob_ranges is not None and name in blob_ranges:\n        ret += \"\\n\" + _list_to_str(blob_ranges[name])\n\n    return ret\n\n\n# graph_name could not contain word 'graph'\ndef save_graph(net, file_name, graph_name=\"net\", op_only=True, blob_sizes=None, blob_ranges=None):\n    blob_rename_f = functools.partial(_rename_blob, blob_sizes=blob_sizes, blob_ranges=blob_ranges)\n    return save_graph_base(net, file_name, graph_name, op_only, blob_rename_f)\n\n\ndef save_graph_base(net, file_name, graph_name=\"net\", op_only=True, blob_rename_func=None):\n    graph = None\n    ops = net.op\n    if blob_rename_func is not None:\n        ops = _modify_blob_names(ops, blob_rename_func)\n    if not op_only:\n        graph = net_drawer.GetPydotGraph(ops, graph_name, rankdir=\"TB\")\n    else:\n        graph = net_drawer.GetPydotGraphMinimal(\n            ops, graph_name, rankdir=\"TB\", minimal_dependency=True\n        )\n\n    try:\n        par_dir = os.path.dirname(file_name)\n        if not os.path.exists(par_dir):\n            os.makedirs(par_dir)\n\n        format = os.path.splitext(os.path.basename(file_name))[-1]\n        if format == \".png\":\n            graph.write_png(file_name)\n        elif format == \".pdf\":\n            graph.write_pdf(file_name)\n        elif format == \".svg\":\n            graph.write_svg(file_name)\n        else:\n            print(\"Incorrect format {}\".format(format))\n    except Exception as e:\n        print(\"Error when writing graph to image {}\".format(e))\n\n    return graph\n\n\n# ==== torch/utils_toffee/aten_to_caffe2.py ====================================\n\n\ndef group_norm_replace_aten_with_caffe2(predict_net: caffe2_pb2.NetDef):\n    \"\"\"\n    For ONNX exported model, GroupNorm will be represented as ATen op,\n        this can be a drop in replacement from ATen to GroupNorm\n    \"\"\"\n    count = 0\n    for op in predict_net.op:\n        if op.type == \"ATen\":\n            op_name = get_pb_arg_vals(op, \"operator\", None)  # return byte in py3\n            if op_name and op_name.decode() == \"group_norm\":\n                op.arg.remove(get_pb_arg(op, \"operator\"))\n\n                if get_pb_arg_vali(op, \"cudnn_enabled\", None):\n                    op.arg.remove(get_pb_arg(op, \"cudnn_enabled\"))\n\n                num_groups = get_pb_arg_vali(op, \"num_groups\", None)\n                if num_groups is not None:\n                    op.arg.remove(get_pb_arg(op, \"num_groups\"))\n                    check_set_pb_arg(op, \"group\", \"i\", num_groups)\n\n                op.type = \"GroupNorm\"\n                count += 1\n    if count > 1:\n        logger.info(\"Replaced {} ATen operator to GroupNormOp\".format(count))\n\n\n# ==== torch/utils_toffee/alias.py =============================================\n\n\ndef alias(x, name, is_backward=False):\n    if not torch.onnx.is_in_onnx_export():\n        return x\n    assert isinstance(x, torch.Tensor)\n    return torch.ops._caffe2.AliasWithName(x, name, is_backward=is_backward)\n\n\ndef fuse_alias_placeholder(predict_net, init_net):\n    \"\"\" Remove AliasWithName placeholder and rename the input/output of it \"\"\"\n    # First we finish all the re-naming\n    for i, op in enumerate(predict_net.op):\n        if op.type == \"AliasWithName\":\n            assert len(op.input) == 1\n            assert len(op.output) == 1\n            name = get_pb_arg_vals(op, \"name\", None).decode()\n            is_backward = bool(get_pb_arg_vali(op, \"is_backward\", 0))\n            rename_op_input(predict_net, init_net, i, 0, name, from_producer=is_backward)\n            rename_op_output(predict_net, i, 0, name)\n\n    # Remove AliasWithName, should be very safe since it's a non-op\n    new_ops = []\n    for op in predict_net.op:\n        if op.type != \"AliasWithName\":\n            new_ops.append(op)\n        else:\n            # safety check\n            assert op.input == op.output\n            assert op.input[0] == op.arg[0].s.decode()\n    del predict_net.op[:]\n    predict_net.op.extend(new_ops)\n\n\n# ==== torch/utils_caffe2/graph_transform.py ===================================\n\n\nclass IllegalGraphTransformError(ValueError):\n    \"\"\" When a graph transform function call can't be executed. \"\"\"\n\n\ndef _rename_versioned_blob_in_proto(\n    proto: caffe2_pb2.NetDef,\n    old_name: str,\n    new_name: str,\n    version: int,\n    ssa: List[Tuple[List[Tuple[str, int]], List[Tuple[str, int]]]],\n    start_versions: Dict[str, int],\n    end_versions: Dict[str, int],\n):\n    \"\"\" In given proto, rename all blobs with matched version \"\"\"\n    # Operater list\n    for op, i_th_ssa in zip(proto.op, ssa):\n        versioned_inputs, versioned_outputs = i_th_ssa\n        for i in range(len(op.input)):\n            if versioned_inputs[i] == (old_name, version):\n                op.input[i] = new_name\n        for i in range(len(op.output)):\n            if versioned_outputs[i] == (old_name, version):\n                op.output[i] = new_name\n    # external_input\n    if start_versions.get(old_name, 0) == version:\n        for i in range(len(proto.external_input)):\n            if proto.external_input[i] == old_name:\n                proto.external_input[i] = new_name\n    # external_output\n    if end_versions.get(old_name, 0) == version:\n        for i in range(len(proto.external_output)):\n            if proto.external_output[i] == old_name:\n                proto.external_output[i] = new_name\n\n\ndef rename_op_input(\n    predict_net: caffe2_pb2.NetDef,\n    init_net: caffe2_pb2.NetDef,\n    op_id: int,\n    input_id: int,\n    new_name: str,\n    from_producer: bool = False,\n):\n    \"\"\"\n    Rename the op_id-th operator in predict_net, change it's input_id-th input's\n        name to the new_name. It also does automatic re-route and change\n        external_input and init_net if necessary.\n    - It requires the input is only consumed by this op.\n    - This function modifies predict_net and init_net in-place.\n    - When from_producer is enable, this also updates other operators that consumes\n        the same input. Be cautious because may trigger unintended behaviour.\n    \"\"\"\n    assert isinstance(predict_net, caffe2_pb2.NetDef)\n    assert isinstance(init_net, caffe2_pb2.NetDef)\n\n    init_net_ssa, init_net_versions = core.get_ssa(init_net)\n    predict_net_ssa, predict_net_versions = core.get_ssa(\n        predict_net, copy.deepcopy(init_net_versions)\n    )\n\n    versioned_inputs, versioned_outputs = predict_net_ssa[op_id]\n    old_name, version = versioned_inputs[input_id]\n\n    if from_producer:\n        producer_map = get_producer_map(predict_net_ssa)\n        if not (old_name, version) in producer_map:\n            raise NotImplementedError(\n                \"Can't find producer, the input {} is probably from\"\n                \" init_net, this is not supported yet.\".format(old_name)\n            )\n        producer = producer_map[(old_name, version)]\n        rename_op_output(predict_net, producer[0], producer[1], new_name)\n        return\n\n    def contain_targets(op_ssa):\n        return (old_name, version) in op_ssa[0]\n\n    is_consumer = [contain_targets(op_ssa) for op_ssa in predict_net_ssa]\n    if sum(is_consumer) > 1:\n        raise IllegalGraphTransformError(\n            (\n                \"Input '{}' of operator(#{}) are consumed by other ops, please use\"\n                + \" rename_op_output on the producer instead. Offending op: \\n{}\"\n            ).format(old_name, op_id, predict_net.op[op_id])\n        )\n\n    # update init_net\n    _rename_versioned_blob_in_proto(\n        init_net, old_name, new_name, version, init_net_ssa, {}, init_net_versions\n    )\n    # update predict_net\n    _rename_versioned_blob_in_proto(\n        predict_net,\n        old_name,\n        new_name,\n        version,\n        predict_net_ssa,\n        init_net_versions,\n        predict_net_versions,\n    )\n\n\ndef rename_op_output(predict_net: caffe2_pb2.NetDef, op_id: int, output_id: int, new_name: str):\n    \"\"\"\n    Rename the op_id-th operator in predict_net, change it's output_id-th input's\n        name to the new_name. It also does automatic re-route and change\n        external_output and if necessary.\n    - It allows multiple consumers of its output.\n    - This function modifies predict_net in-place, doesn't need init_net.\n    \"\"\"\n    assert isinstance(predict_net, caffe2_pb2.NetDef)\n\n    ssa, blob_versions = core.get_ssa(predict_net)\n\n    versioned_inputs, versioned_outputs = ssa[op_id]\n    old_name, version = versioned_outputs[output_id]\n\n    # update predict_net\n    _rename_versioned_blob_in_proto(\n        predict_net, old_name, new_name, version, ssa, {}, blob_versions\n    )\n\n\ndef get_sub_graph_external_input_output(\n    predict_net: caffe2_pb2.NetDef, sub_graph_op_indices: List[int]\n) -> Tuple[List[Tuple[str, int]], List[Tuple[str, int]]]:\n    \"\"\"\n    Return the list of external input/output of sub-graph,\n    each element is tuple of the name and corresponding version in predict_net.\n\n    external input/output is defined the same way as caffe2 NetDef.\n    \"\"\"\n    ssa, versions = core.get_ssa(predict_net)\n\n    all_inputs = []\n    all_outputs = []\n    for op_id in sub_graph_op_indices:\n        all_inputs += [inp for inp in ssa[op_id][0] if inp not in all_inputs]\n        all_outputs += list(ssa[op_id][1])  # ssa output won't repeat\n\n    # for versioned blobs, external inputs are just those blob in all_inputs\n    # but not in all_outputs\n    ext_inputs = [inp for inp in all_inputs if inp not in all_outputs]\n\n    # external outputs are essentially outputs of this subgraph that are used\n    # outside of this sub-graph (including predict_net.external_output)\n    all_other_inputs = sum(\n        (ssa[i][0] for i in range(len(ssa)) if i not in sub_graph_op_indices),\n        [(outp, versions[outp]) for outp in predict_net.external_output],\n    )\n    ext_outputs = [outp for outp in all_outputs if outp in set(all_other_inputs)]\n\n    return ext_inputs, ext_outputs\n\n\nclass DiGraph:\n    \"\"\" A DAG representation of caffe2 graph, each vertice is a versioned blob. \"\"\"\n\n    def __init__(self):\n        self.vertices = set()\n        self.graph = collections.defaultdict(list)\n\n    def add_edge(self, u, v):\n        self.graph[u].append(v)\n        self.vertices.add(u)\n        self.vertices.add(v)\n\n    # grab from https://www.geeksforgeeks.org/find-paths-given-source-destination/\n    def get_all_paths(self, s, d):\n        visited = {k: False for k in self.vertices}\n        path = []\n        all_paths = []\n\n        def _get_all_paths_util(graph, u, d, visited, path):\n            visited[u] = True\n            path.append(u)\n            if u == d:\n                all_paths.append(copy.deepcopy(path))\n            else:\n                for i in graph[u]:\n                    if not visited[i]:\n                        _get_all_paths_util(graph, i, d, visited, path)\n            path.pop()\n            visited[u] = False\n\n        _get_all_paths_util(self.graph, s, d, visited, path)\n        return all_paths\n\n    @staticmethod\n    def from_ssa(ssa):\n        graph = DiGraph()\n        for op_id in range(len(ssa)):\n            for inp in ssa[op_id][0]:\n                for outp in ssa[op_id][1]:\n                    graph.add_edge(inp, outp)\n        return graph\n\n\ndef _get_dependency_chain(ssa, versioned_target, versioned_source):\n    \"\"\"\n    Return the index list of relevant operator to produce target blob from source blob,\n        if there's no dependency, return empty list.\n    \"\"\"\n\n    # finding all paths between nodes can be O(N!), thus we can only search\n    # in the subgraph using the op starting from the first consumer of source blob\n    # to the producer of the target blob.\n    consumer_map = get_consumer_map(ssa)\n    producer_map = get_producer_map(ssa)\n    start_op = min(x[0] for x in consumer_map[versioned_source]) - 15\n    end_op = (\n        producer_map[versioned_target][0] + 15 if versioned_target in producer_map else start_op\n    )\n    sub_graph_ssa = ssa[start_op : end_op + 1]\n    if len(sub_graph_ssa) > 30:\n        logger.warning(\n            \"Subgraph bebetween {} and {} is large (from op#{} to op#{}), it\"\n            \" might take non-trival time to find all paths between them.\".format(\n                versioned_source, versioned_target, start_op, end_op\n            )\n        )\n\n    dag = DiGraph.from_ssa(sub_graph_ssa)\n    paths = dag.get_all_paths(versioned_source, versioned_target)  # include two ends\n    ops_in_paths = [[producer_map[blob][0] for blob in path[1:]] for path in paths]\n    return sorted(set().union(*[set(ops) for ops in ops_in_paths]))\n\n\ndef identify_reshape_sub_graph(predict_net: caffe2_pb2.NetDef,) -> List[List[int]]:\n    \"\"\"\n    Idenfity the reshape sub-graph in a protobuf.\n    The reshape sub-graph is defined as matching the following pattern:\n\n    (input_blob) -> Op_1 -> ... -> Op_N -> (new_shape) -─┐\n        └-------------------------------------------> Reshape -> (output_blob)\n\n    Return:\n        List of sub-graphs, each sub-graph is represented as a list of indices\n        of the relavent ops, [Op_1, Op_2, ..., Op_N, Reshape]\n    \"\"\"\n\n    ssa, _ = core.get_ssa(predict_net)\n\n    ret = []\n    for i, op in enumerate(predict_net.op):\n        if op.type == \"Reshape\":\n            assert len(op.input) == 2\n            input_ssa = ssa[i][0]\n            data_source = input_ssa[0]\n            shape_source = input_ssa[1]\n            op_indices = _get_dependency_chain(ssa, shape_source, data_source)\n            ret.append(op_indices + [i])\n    return ret\n\n\ndef remove_reshape_for_fc(predict_net, params):\n    \"\"\"\n    In PyTorch nn.Linear has to take 2D tensor, this often leads to reshape\n        a 4D tensor to 2D by calling .view(). However this (dynamic) reshaping\n        doesn't work well with ONNX and Int8 tools, and cause using extra\n        ops (eg. ExpandDims) that might not be available on mobile.\n    Luckily Caffe2 supports 4D tensor for FC, so we can remove those reshape\n        after exporting ONNX model.\n    \"\"\"\n    from caffe2.python import core\n\n    # find all reshape sub-graph that can be removed, which is now all Reshape\n    # sub-graph whose output is only consumed by FC.\n    # TODO: to make it safer, we may need the actually value to better determine\n    # if a Reshape before FC is removable.\n    reshape_sub_graphs = identify_reshape_sub_graph(predict_net)\n    sub_graphs_to_remove = []\n    for reshape_sub_graph in reshape_sub_graphs:\n        reshape_op_id = reshape_sub_graph[-1]\n        assert predict_net.op[reshape_op_id].type == \"Reshape\"\n        ssa, _ = core.get_ssa(predict_net)\n        reshape_output = ssa[reshape_op_id][1][0]\n        consumers = [i for i in range(len(ssa)) if reshape_output in ssa[i][0]]\n        if all(predict_net.op[consumer].type == \"FC\" for consumer in consumers):\n            # safety check if the sub-graph is isolated, for this reshape sub-graph,\n            # it means it has one non-param external input and one external output.\n            ext_inputs, ext_outputs = get_sub_graph_external_input_output(\n                predict_net, reshape_sub_graph\n            )\n            non_params_ext_inputs = [inp for inp in ext_inputs if inp[1] != 0]\n            if len(non_params_ext_inputs) == 1 and len(ext_outputs) == 1:\n                sub_graphs_to_remove.append(reshape_sub_graph)\n\n    # perform removing subgraph by:\n    # 1: rename the Reshape's output to its input, then the graph can be\n    #   seen as in-place itentify, meaning whose external input/output are the same.\n    # 2: simply remove those ops.\n    remove_op_ids = []\n    params_to_remove = []\n    for sub_graph in sub_graphs_to_remove:\n        logger.info(\n            \"Remove Reshape sub-graph:\\n{}\".format(\n                \"\".join([\"(#{:>4})\\n{}\".format(i, predict_net.op[i]) for i in sub_graph])\n            )\n        )\n        reshape_op_id = sub_graph[-1]\n        new_reshap_output = predict_net.op[reshape_op_id].input[0]\n        rename_op_output(predict_net, reshape_op_id, 0, new_reshap_output)\n        ext_inputs, ext_outputs = get_sub_graph_external_input_output(predict_net, sub_graph)\n        non_params_ext_inputs = [inp for inp in ext_inputs if inp[1] != 0]\n        params_ext_inputs = [inp for inp in ext_inputs if inp[1] == 0]\n        assert len(non_params_ext_inputs) == 1 and len(ext_outputs) == 1\n        assert ext_outputs[0][0] == non_params_ext_inputs[0][0]\n        assert ext_outputs[0][1] == non_params_ext_inputs[0][1] + 1\n        remove_op_ids.extend(sub_graph)\n        params_to_remove.extend(params_ext_inputs)\n\n    predict_net = copy.deepcopy(predict_net)\n    new_ops = [op for i, op in enumerate(predict_net.op) if i not in remove_op_ids]\n    del predict_net.op[:]\n    predict_net.op.extend(new_ops)\n    for versioned_params in params_to_remove:\n        name = versioned_params[0]\n        logger.info(\"Remove params: {} from init_net and predict_net.external_input\".format(name))\n        del params[name]\n        predict_net.external_input.remove(name)\n\n    return predict_net, params\n\n\ndef fuse_copy_between_cpu_and_gpu(predict_net: caffe2_pb2.NetDef):\n    \"\"\"\n    In-place fuse extra copy ops between cpu/gpu for the following case:\n        a -CopyAToB-> b -CopyBToA> c1 -NextOp1-> d1\n                        -CopyBToA> c2 -NextOp2-> d2\n    The fused network will look like:\n        a -NextOp1-> d1\n          -NextOp2-> d2\n    \"\"\"\n\n    _COPY_OPS = [\"CopyCPUToGPU\", \"CopyGPUToCPU\"]\n\n    def _fuse_once(predict_net):\n        ssa, blob_versions = core.get_ssa(predict_net)\n        consumer_map = get_consumer_map(ssa)\n        versioned_external_output = [\n            (name, blob_versions[name]) for name in predict_net.external_output\n        ]\n\n        for op_id, op in enumerate(predict_net.op):\n            if op.type in _COPY_OPS:\n                fw_copy_versioned_output = ssa[op_id][1][0]\n                consumer_ids = [x[0] for x in consumer_map[fw_copy_versioned_output]]\n                reverse_op_type = _COPY_OPS[1 - _COPY_OPS.index(op.type)]\n\n                is_fusable = (\n                    len(consumer_ids) > 0\n                    and fw_copy_versioned_output not in versioned_external_output\n                    and all(\n                        predict_net.op[_op_id].type == reverse_op_type\n                        and ssa[_op_id][1][0] not in versioned_external_output\n                        for _op_id in consumer_ids\n                    )\n                )\n\n                if is_fusable:\n                    for rv_copy_op_id in consumer_ids:\n                        # making each NextOp uses \"a\" directly and removing Copy ops\n                        rs_copy_versioned_output = ssa[rv_copy_op_id][1][0]\n                        next_op_id, inp_id = consumer_map[rs_copy_versioned_output][0]\n                        predict_net.op[next_op_id].input[inp_id] = op.input[0]\n                    # remove CopyOps\n                    new_ops = [\n                        op\n                        for i, op in enumerate(predict_net.op)\n                        if i != op_id and i not in consumer_ids\n                    ]\n                    del predict_net.op[:]\n                    predict_net.op.extend(new_ops)\n                    return True\n\n        return False\n\n    # _fuse_once returns False is nothing can be fused\n    while _fuse_once(predict_net):\n        pass\n\n\ndef remove_dead_end_ops(net_def: caffe2_pb2.NetDef):\n    \"\"\" remove ops if its output is not used or not in external_output \"\"\"\n    ssa, versions = core.get_ssa(net_def)\n    versioned_external_output = [(name, versions[name]) for name in net_def.external_output]\n    consumer_map = get_consumer_map(ssa)\n    removed_op_ids = set()\n\n    def _is_dead_end(versioned_blob):\n        return not (\n            versioned_blob in versioned_external_output\n            or (\n                len(consumer_map[versioned_blob]) > 0\n                and all(x[0] not in removed_op_ids for x in consumer_map[versioned_blob])\n            )\n        )\n\n    for i, ssa_i in reversed(list(enumerate(ssa))):\n        versioned_outputs = ssa_i[1]\n        if all(_is_dead_end(outp) for outp in versioned_outputs):\n            removed_op_ids.add(i)\n\n    # simply removing those deadend ops should have no effect to external_output\n    new_ops = [op for i, op in enumerate(net_def.op) if i not in removed_op_ids]\n    del net_def.op[:]\n    net_def.op.extend(new_ops)\n"
  },
  {
    "path": "detectron2/layers/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nfrom .batch_norm import FrozenBatchNorm2d, get_norm, NaiveSyncBatchNorm, CycleBatchNormList\nfrom .deform_conv import DeformConv, ModulatedDeformConv\nfrom .mask_ops import paste_masks_in_image\nfrom .nms import batched_nms, batched_nms_rotated, nms, nms_rotated\nfrom .roi_align import ROIAlign, roi_align\nfrom .roi_align_rotated import ROIAlignRotated, roi_align_rotated\nfrom .shape_spec import ShapeSpec\nfrom .wrappers import (\n    BatchNorm2d,\n    Conv2d,\n    ConvTranspose2d,\n    cat,\n    interpolate,\n    Linear,\n    nonzero_tuple,\n    cross_entropy,\n    empty_input_loss_func_wrapper,\n    shapes_to_tensor,\n    move_device_like,\n)\nfrom .blocks import CNNBlockBase, DepthwiseSeparableConv2d\nfrom .aspp import ASPP\nfrom .losses import ciou_loss, diou_loss\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/layers/aspp.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\n\nfrom copy import deepcopy\nimport fvcore.nn.weight_init as weight_init\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom .batch_norm import get_norm\nfrom .blocks import DepthwiseSeparableConv2d\nfrom .wrappers import Conv2d\n\n\nclass ASPP(nn.Module):\n    \"\"\"\n    Atrous Spatial Pyramid Pooling (ASPP).\n    \"\"\"\n\n    def __init__(\n        self,\n        in_channels,\n        out_channels,\n        dilations,\n        *,\n        norm,\n        activation,\n        pool_kernel_size=None,\n        dropout: float = 0.0,\n        use_depthwise_separable_conv=False,\n    ):\n        \"\"\"\n        Args:\n            in_channels (int): number of input channels for ASPP.\n            out_channels (int): number of output channels.\n            dilations (list): a list of 3 dilations in ASPP.\n            norm (str or callable): normalization for all conv layers.\n                See :func:`layers.get_norm` for supported format. norm is\n                applied to all conv layers except the conv following\n                global average pooling.\n            activation (callable): activation function.\n            pool_kernel_size (tuple, list): the average pooling size (kh, kw)\n                for image pooling layer in ASPP. If set to None, it always\n                performs global average pooling. If not None, it must be\n                divisible by the shape of inputs in forward(). It is recommended\n                to use a fixed input feature size in training, and set this\n                option to match this size, so that it performs global average\n                pooling in training, and the size of the pooling window stays\n                consistent in inference.\n            dropout (float): apply dropout on the output of ASPP. It is used in\n                the official DeepLab implementation with a rate of 0.1:\n                https://github.com/tensorflow/models/blob/21b73d22f3ed05b650e85ac50849408dd36de32e/research/deeplab/model.py#L532  # noqa\n            use_depthwise_separable_conv (bool): use DepthwiseSeparableConv2d\n                for 3x3 convs in ASPP, proposed in :paper:`DeepLabV3+`.\n        \"\"\"\n        super(ASPP, self).__init__()\n        assert len(dilations) == 3, \"ASPP expects 3 dilations, got {}\".format(len(dilations))\n        self.pool_kernel_size = pool_kernel_size\n        self.dropout = dropout\n        use_bias = norm == \"\"\n        self.convs = nn.ModuleList()\n        # conv 1x1\n        self.convs.append(\n            Conv2d(\n                in_channels,\n                out_channels,\n                kernel_size=1,\n                bias=use_bias,\n                norm=get_norm(norm, out_channels),\n                activation=deepcopy(activation),\n            )\n        )\n        weight_init.c2_xavier_fill(self.convs[-1])\n        # atrous convs\n        for dilation in dilations:\n            if use_depthwise_separable_conv:\n                self.convs.append(\n                    DepthwiseSeparableConv2d(\n                        in_channels,\n                        out_channels,\n                        kernel_size=3,\n                        padding=dilation,\n                        dilation=dilation,\n                        norm1=norm,\n                        activation1=deepcopy(activation),\n                        norm2=norm,\n                        activation2=deepcopy(activation),\n                    )\n                )\n            else:\n                self.convs.append(\n                    Conv2d(\n                        in_channels,\n                        out_channels,\n                        kernel_size=3,\n                        padding=dilation,\n                        dilation=dilation,\n                        bias=use_bias,\n                        norm=get_norm(norm, out_channels),\n                        activation=deepcopy(activation),\n                    )\n                )\n                weight_init.c2_xavier_fill(self.convs[-1])\n        # image pooling\n        # We do not add BatchNorm because the spatial resolution is 1x1,\n        # the original TF implementation has BatchNorm.\n        if pool_kernel_size is None:\n            image_pooling = nn.Sequential(\n                nn.AdaptiveAvgPool2d(1),\n                Conv2d(in_channels, out_channels, 1, bias=True, activation=deepcopy(activation)),\n            )\n        else:\n            image_pooling = nn.Sequential(\n                nn.AvgPool2d(kernel_size=pool_kernel_size, stride=1),\n                Conv2d(in_channels, out_channels, 1, bias=True, activation=deepcopy(activation)),\n            )\n        weight_init.c2_xavier_fill(image_pooling[1])\n        self.convs.append(image_pooling)\n\n        self.project = Conv2d(\n            5 * out_channels,\n            out_channels,\n            kernel_size=1,\n            bias=use_bias,\n            norm=get_norm(norm, out_channels),\n            activation=deepcopy(activation),\n        )\n        weight_init.c2_xavier_fill(self.project)\n\n    def forward(self, x):\n        size = x.shape[-2:]\n        if self.pool_kernel_size is not None:\n            if size[0] % self.pool_kernel_size[0] or size[1] % self.pool_kernel_size[1]:\n                raise ValueError(\n                    \"`pool_kernel_size` must be divisible by the shape of inputs. \"\n                    \"Input size: {} `pool_kernel_size`: {}\".format(size, self.pool_kernel_size)\n                )\n        res = []\n        for conv in self.convs:\n            res.append(conv(x))\n        res[-1] = F.interpolate(res[-1], size=size, mode=\"bilinear\", align_corners=False)\n        res = torch.cat(res, dim=1)\n        res = self.project(res)\n        res = F.dropout(res, self.dropout, training=self.training) if self.dropout > 0 else res\n        return res\n"
  },
  {
    "path": "detectron2/layers/batch_norm.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nimport torch\nimport torch.distributed as dist\nfrom fvcore.nn.distributed import differentiable_all_reduce\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom detectron2.utils import comm, env\n\nfrom .wrappers import BatchNorm2d\n\n\nclass FrozenBatchNorm2d(nn.Module):\n    \"\"\"\n    BatchNorm2d where the batch statistics and the affine parameters are fixed.\n\n    It contains non-trainable buffers called\n    \"weight\" and \"bias\", \"running_mean\", \"running_var\",\n    initialized to perform identity transformation.\n\n    The pre-trained backbone models from Caffe2 only contain \"weight\" and \"bias\",\n    which are computed from the original four parameters of BN.\n    The affine transform `x * weight + bias` will perform the equivalent\n    computation of `(x - running_mean) / sqrt(running_var) * weight + bias`.\n    When loading a backbone model from Caffe2, \"running_mean\" and \"running_var\"\n    will be left unchanged as identity transformation.\n\n    Other pre-trained backbone models may contain all 4 parameters.\n\n    The forward is implemented by `F.batch_norm(..., training=False)`.\n    \"\"\"\n\n    _version = 3\n\n    def __init__(self, num_features, eps=1e-5):\n        super().__init__()\n        self.num_features = num_features\n        self.eps = eps\n        self.register_buffer(\"weight\", torch.ones(num_features))\n        self.register_buffer(\"bias\", torch.zeros(num_features))\n        self.register_buffer(\"running_mean\", torch.zeros(num_features))\n        self.register_buffer(\"running_var\", torch.ones(num_features) - eps)\n\n    def forward(self, x):\n        if x.requires_grad:\n            # When gradients are needed, F.batch_norm will use extra memory\n            # because its backward op computes gradients for weight/bias as well.\n            scale = self.weight * (self.running_var + self.eps).rsqrt()\n            bias = self.bias - self.running_mean * scale\n            scale = scale.reshape(1, -1, 1, 1)\n            bias = bias.reshape(1, -1, 1, 1)\n            out_dtype = x.dtype  # may be half\n            return x * scale.to(out_dtype) + bias.to(out_dtype)\n        else:\n            # When gradients are not needed, F.batch_norm is a single fused op\n            # and provide more optimization opportunities.\n            return F.batch_norm(\n                x,\n                self.running_mean,\n                self.running_var,\n                self.weight,\n                self.bias,\n                training=False,\n                eps=self.eps,\n            )\n\n    def _load_from_state_dict(\n        self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs\n    ):\n        version = local_metadata.get(\"version\", None)\n\n        if version is None or version < 2:\n            # No running_mean/var in early versions\n            # This will silent the warnings\n            if prefix + \"running_mean\" not in state_dict:\n                state_dict[prefix + \"running_mean\"] = torch.zeros_like(self.running_mean)\n            if prefix + \"running_var\" not in state_dict:\n                state_dict[prefix + \"running_var\"] = torch.ones_like(self.running_var)\n\n        super()._load_from_state_dict(\n            state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs\n        )\n\n    def __repr__(self):\n        return \"FrozenBatchNorm2d(num_features={}, eps={})\".format(self.num_features, self.eps)\n\n    @classmethod\n    def convert_frozen_batchnorm(cls, module):\n        \"\"\"\n        Convert all BatchNorm/SyncBatchNorm in module into FrozenBatchNorm.\n\n        Args:\n            module (torch.nn.Module):\n\n        Returns:\n            If module is BatchNorm/SyncBatchNorm, returns a new module.\n            Otherwise, in-place convert module and return it.\n\n        Similar to convert_sync_batchnorm in\n        https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/batchnorm.py\n        \"\"\"\n        bn_module = nn.modules.batchnorm\n        bn_module = (bn_module.BatchNorm2d, bn_module.SyncBatchNorm)\n        res = module\n        if isinstance(module, bn_module):\n            res = cls(module.num_features)\n            if module.affine:\n                res.weight.data = module.weight.data.clone().detach()\n                res.bias.data = module.bias.data.clone().detach()\n            res.running_mean.data = module.running_mean.data\n            res.running_var.data = module.running_var.data\n            res.eps = module.eps\n        else:\n            for name, child in module.named_children():\n                new_child = cls.convert_frozen_batchnorm(child)\n                if new_child is not child:\n                    res.add_module(name, new_child)\n        return res\n\n\ndef get_norm(norm, out_channels):\n    \"\"\"\n    Args:\n        norm (str or callable): either one of BN, SyncBN, FrozenBN, GN;\n            or a callable that takes a channel number and returns\n            the normalization layer as a nn.Module.\n\n    Returns:\n        nn.Module or None: the normalization layer\n    \"\"\"\n    if norm is None:\n        return None\n    if isinstance(norm, str):\n        if len(norm) == 0:\n            return None\n        norm = {\n            \"BN\": BatchNorm2d,\n            # Fixed in https://github.com/pytorch/pytorch/pull/36382\n            \"SyncBN\": NaiveSyncBatchNorm if env.TORCH_VERSION <= (1, 5) else nn.SyncBatchNorm,\n            \"FrozenBN\": FrozenBatchNorm2d,\n            \"GN\": lambda channels: nn.GroupNorm(32, channels),\n            # for debugging:\n            \"nnSyncBN\": nn.SyncBatchNorm,\n            \"naiveSyncBN\": NaiveSyncBatchNorm,\n            # expose stats_mode N as an option to caller, required for zero-len inputs\n            \"naiveSyncBN_N\": lambda channels: NaiveSyncBatchNorm(channels, stats_mode=\"N\"),\n            \"LN\": lambda channels: LayerNorm(channels),\n        }[norm]\n    return norm(out_channels)\n\n\nclass NaiveSyncBatchNorm(BatchNorm2d):\n    \"\"\"\n    In PyTorch<=1.5, ``nn.SyncBatchNorm`` has incorrect gradient\n    when the batch size on each worker is different.\n    (e.g., when scale augmentation is used, or when it is applied to mask head).\n\n    This is a slower but correct alternative to `nn.SyncBatchNorm`.\n\n    Note:\n        There isn't a single definition of Sync BatchNorm.\n\n        When ``stats_mode==\"\"``, this module computes overall statistics by using\n        statistics of each worker with equal weight.  The result is true statistics\n        of all samples (as if they are all on one worker) only when all workers\n        have the same (N, H, W). This mode does not support inputs with zero batch size.\n\n        When ``stats_mode==\"N\"``, this module computes overall statistics by weighting\n        the statistics of each worker by their ``N``. The result is true statistics\n        of all samples (as if they are all on one worker) only when all workers\n        have the same (H, W). It is slower than ``stats_mode==\"\"``.\n\n        Even though the result of this module may not be the true statistics of all samples,\n        it may still be reasonable because it might be preferrable to assign equal weights\n        to all workers, regardless of their (H, W) dimension, instead of putting larger weight\n        on larger images. From preliminary experiments, little difference is found between such\n        a simplified implementation and an accurate computation of overall mean & variance.\n    \"\"\"\n\n    def __init__(self, *args, stats_mode=\"\", **kwargs):\n        super().__init__(*args, **kwargs)\n        assert stats_mode in [\"\", \"N\"]\n        self._stats_mode = stats_mode\n\n    def forward(self, input):\n        if comm.get_world_size() == 1 or not self.training:\n            return super().forward(input)\n\n        B, C = input.shape[0], input.shape[1]\n\n        half_input = input.dtype == torch.float16\n        if half_input:\n            # fp16 does not have good enough numerics for the reduction here\n            input = input.float()\n        mean = torch.mean(input, dim=[0, 2, 3])\n        meansqr = torch.mean(input * input, dim=[0, 2, 3])\n\n        if self._stats_mode == \"\":\n            assert B > 0, 'SyncBatchNorm(stats_mode=\"\") does not support zero batch size.'\n            vec = torch.cat([mean, meansqr], dim=0)\n            vec = differentiable_all_reduce(vec) * (1.0 / dist.get_world_size())\n            mean, meansqr = torch.split(vec, C)\n            momentum = self.momentum\n        else:\n            if B == 0:\n                vec = torch.zeros([2 * C + 1], device=mean.device, dtype=mean.dtype)\n                vec = vec + input.sum()  # make sure there is gradient w.r.t input\n            else:\n                vec = torch.cat(\n                    [mean, meansqr, torch.ones([1], device=mean.device, dtype=mean.dtype)], dim=0\n                )\n            vec = differentiable_all_reduce(vec * B)\n\n            total_batch = vec[-1].detach()\n            momentum = total_batch.clamp(max=1) * self.momentum  # no update if total_batch is 0\n            mean, meansqr, _ = torch.split(vec / total_batch.clamp(min=1), C)  # avoid div-by-zero\n\n        var = meansqr - mean * mean\n        invstd = torch.rsqrt(var + self.eps)\n        scale = self.weight * invstd\n        bias = self.bias - mean * scale\n        scale = scale.reshape(1, -1, 1, 1)\n        bias = bias.reshape(1, -1, 1, 1)\n\n        self.running_mean += momentum * (mean.detach() - self.running_mean)\n        self.running_var += momentum * (var.detach() - self.running_var)\n        ret = input * scale + bias\n        if half_input:\n            ret = ret.half()\n        return ret\n\n\nclass CycleBatchNormList(nn.ModuleList):\n    \"\"\"\n    Implement domain-specific BatchNorm by cycling.\n\n    When a BatchNorm layer is used for multiple input domains or input\n    features, it might need to maintain a separate test-time statistics\n    for each domain. See Sec 5.2 in :paper:`rethinking-batchnorm`.\n\n    This module implements it by using N separate BN layers\n    and it cycles through them every time a forward() is called.\n\n    NOTE: The caller of this module MUST guarantee to always call\n    this module by multiple of N times. Otherwise its test-time statistics\n    will be incorrect.\n    \"\"\"\n\n    def __init__(self, length: int, bn_class=nn.BatchNorm2d, **kwargs):\n        \"\"\"\n        Args:\n            length: number of BatchNorm layers to cycle.\n            bn_class: the BatchNorm class to use\n            kwargs: arguments of the BatchNorm class, such as num_features.\n        \"\"\"\n        self._affine = kwargs.pop(\"affine\", True)\n        super().__init__([bn_class(**kwargs, affine=False) for k in range(length)])\n        if self._affine:\n            # shared affine, domain-specific BN\n            channels = self[0].num_features\n            self.weight = nn.Parameter(torch.ones(channels))\n            self.bias = nn.Parameter(torch.zeros(channels))\n        self._pos = 0\n\n    def forward(self, x):\n        ret = self[self._pos](x)\n        self._pos = (self._pos + 1) % len(self)\n\n        if self._affine:\n            w = self.weight.reshape(1, -1, 1, 1)\n            b = self.bias.reshape(1, -1, 1, 1)\n            return ret * w + b\n        else:\n            return ret\n\n    def extra_repr(self):\n        return f\"affine={self._affine}\"\n\n\nclass LayerNorm(nn.Module):\n    \"\"\"\n    A LayerNorm variant, popularized by Transformers, that performs point-wise mean and\n    variance normalization over the channel dimension for inputs that have shape\n    (batch_size, channels, height, width).\n    https://github.com/facebookresearch/ConvNeXt/blob/d1fa8f6fef0a165b27399986cc2bdacc92777e40/models/convnext.py#L119  # noqa B950\n    \"\"\"\n\n    def __init__(self, normalized_shape, eps=1e-6):\n        super().__init__()\n        self.weight = nn.Parameter(torch.ones(normalized_shape))\n        self.bias = nn.Parameter(torch.zeros(normalized_shape))\n        self.eps = eps\n        self.normalized_shape = (normalized_shape,)\n\n    def forward(self, x):\n        u = x.mean(1, keepdim=True)\n        s = (x - u).pow(2).mean(1, keepdim=True)\n        x = (x - u) / torch.sqrt(s + self.eps)\n        x = self.weight[:, None, None] * x + self.bias[:, None, None]\n        return x\n"
  },
  {
    "path": "detectron2/layers/blocks.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates.\n\nimport fvcore.nn.weight_init as weight_init\nfrom torch import nn\n\nfrom .batch_norm import FrozenBatchNorm2d, get_norm\nfrom .wrappers import Conv2d\n\n\n\"\"\"\nCNN building blocks.\n\"\"\"\n\n\nclass CNNBlockBase(nn.Module):\n    \"\"\"\n    A CNN block is assumed to have input channels, output channels and a stride.\n    The input and output of `forward()` method must be NCHW tensors.\n    The method can perform arbitrary computation but must match the given\n    channels and stride specification.\n\n    Attribute:\n        in_channels (int):\n        out_channels (int):\n        stride (int):\n    \"\"\"\n\n    def __init__(self, in_channels, out_channels, stride):\n        \"\"\"\n        The `__init__` method of any subclass should also contain these arguments.\n\n        Args:\n            in_channels (int):\n            out_channels (int):\n            stride (int):\n        \"\"\"\n        super().__init__()\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.stride = stride\n\n    def freeze(self):\n        \"\"\"\n        Make this block not trainable.\n        This method sets all parameters to `requires_grad=False`,\n        and convert all BatchNorm layers to FrozenBatchNorm\n\n        Returns:\n            the block itself\n        \"\"\"\n        for p in self.parameters():\n            p.requires_grad = False\n        FrozenBatchNorm2d.convert_frozen_batchnorm(self)\n        return self\n\n\nclass DepthwiseSeparableConv2d(nn.Module):\n    \"\"\"\n    A kxk depthwise convolution + a 1x1 convolution.\n\n    In :paper:`xception`, norm & activation are applied on the second conv.\n    :paper:`mobilenet` uses norm & activation on both convs.\n    \"\"\"\n\n    def __init__(\n        self,\n        in_channels,\n        out_channels,\n        kernel_size=3,\n        padding=1,\n        dilation=1,\n        *,\n        norm1=None,\n        activation1=None,\n        norm2=None,\n        activation2=None,\n    ):\n        \"\"\"\n        Args:\n            norm1, norm2 (str or callable): normalization for the two conv layers.\n            activation1, activation2 (callable(Tensor) -> Tensor): activation\n                function for the two conv layers.\n        \"\"\"\n        super().__init__()\n        self.depthwise = Conv2d(\n            in_channels,\n            in_channels,\n            kernel_size=kernel_size,\n            padding=padding,\n            dilation=dilation,\n            groups=in_channels,\n            bias=not norm1,\n            norm=get_norm(norm1, in_channels),\n            activation=activation1,\n        )\n        self.pointwise = Conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=1,\n            bias=not norm2,\n            norm=get_norm(norm2, out_channels),\n            activation=activation2,\n        )\n\n        # default initialization\n        weight_init.c2_msra_fill(self.depthwise)\n        weight_init.c2_msra_fill(self.pointwise)\n\n    def forward(self, x):\n        return self.pointwise(self.depthwise(x))\n"
  },
  {
    "path": "detectron2/layers/csrc/README.md",
    "content": "\n\nTo add a new Op:\n\n1. Create a new directory\n2. Implement new ops there\n3. Delcare its Python interface in `vision.cpp`.\n"
  },
  {
    "path": "detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated.h",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#pragma once\n#include <torch/types.h>\n\nnamespace detectron2 {\n\nat::Tensor ROIAlignRotated_forward_cpu(\n    const at::Tensor& input,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio);\n\nat::Tensor ROIAlignRotated_backward_cpu(\n    const at::Tensor& grad,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int batch_size,\n    const int channels,\n    const int height,\n    const int width,\n    const int sampling_ratio);\n\n#if defined(WITH_CUDA) || defined(WITH_HIP)\nat::Tensor ROIAlignRotated_forward_cuda(\n    const at::Tensor& input,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio);\n\nat::Tensor ROIAlignRotated_backward_cuda(\n    const at::Tensor& grad,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int batch_size,\n    const int channels,\n    const int height,\n    const int width,\n    const int sampling_ratio);\n#endif\n\n// Interface for Python\ninline at::Tensor ROIAlignRotated_forward(\n    const at::Tensor& input,\n    const at::Tensor& rois,\n    const double spatial_scale,\n    const int64_t pooled_height,\n    const int64_t pooled_width,\n    const int64_t sampling_ratio) {\n  if (input.is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    return ROIAlignRotated_forward_cuda(\n        input,\n        rois,\n        spatial_scale,\n        pooled_height,\n        pooled_width,\n        sampling_ratio);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n  return ROIAlignRotated_forward_cpu(\n      input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio);\n}\n\ninline at::Tensor ROIAlignRotated_backward(\n    const at::Tensor& grad,\n    const at::Tensor& rois,\n    const double spatial_scale,\n    const int64_t pooled_height,\n    const int64_t pooled_width,\n    const int64_t batch_size,\n    const int64_t channels,\n    const int64_t height,\n    const int64_t width,\n    const int64_t sampling_ratio) {\n  if (grad.is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    return ROIAlignRotated_backward_cuda(\n        grad,\n        rois,\n        spatial_scale,\n        pooled_height,\n        pooled_width,\n        batch_size,\n        channels,\n        height,\n        width,\n        sampling_ratio);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n  return ROIAlignRotated_backward_cpu(\n      grad,\n      rois,\n      spatial_scale,\n      pooled_height,\n      pooled_width,\n      batch_size,\n      channels,\n      height,\n      width,\n      sampling_ratio);\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cpu.cpp",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#include <ATen/TensorUtils.h>\n#include \"ROIAlignRotated.h\"\n\n// Note: this implementation originates from the Caffe2 ROIAlignRotated Op\n// and PyTorch ROIAlign (non-rotated) Op implementations.\n// The key difference between this implementation and those ones is\n// we don't do \"legacy offset\" in this version, as there aren't many previous\n// works, if any, using the \"legacy\" ROIAlignRotated Op.\n// This would make the interface a bit cleaner.\n\nnamespace detectron2 {\n\nnamespace {\ntemplate <typename T>\nstruct PreCalc {\n  int pos1;\n  int pos2;\n  int pos3;\n  int pos4;\n  T w1;\n  T w2;\n  T w3;\n  T w4;\n};\n\ntemplate <typename T>\nvoid pre_calc_for_bilinear_interpolate(\n    const int height,\n    const int width,\n    const int pooled_height,\n    const int pooled_width,\n    const int iy_upper,\n    const int ix_upper,\n    T roi_start_h,\n    T roi_start_w,\n    T bin_size_h,\n    T bin_size_w,\n    int roi_bin_grid_h,\n    int roi_bin_grid_w,\n    T roi_center_h,\n    T roi_center_w,\n    T cos_theta,\n    T sin_theta,\n    std::vector<PreCalc<T>>& pre_calc) {\n  int pre_calc_index = 0;\n  for (int ph = 0; ph < pooled_height; ph++) {\n    for (int pw = 0; pw < pooled_width; pw++) {\n      for (int iy = 0; iy < iy_upper; iy++) {\n        const T yy = roi_start_h + ph * bin_size_h +\n            static_cast<T>(iy + .5f) * bin_size_h /\n                static_cast<T>(roi_bin_grid_h); // e.g., 0.5, 1.5\n        for (int ix = 0; ix < ix_upper; ix++) {\n          const T xx = roi_start_w + pw * bin_size_w +\n              static_cast<T>(ix + .5f) * bin_size_w /\n                  static_cast<T>(roi_bin_grid_w);\n\n          // Rotate by theta around the center and translate\n          // In image space, (y, x) is the order for Right Handed System,\n          // and this is essentially multiplying the point by a rotation matrix\n          // to rotate it counterclockwise through angle theta.\n          T y = yy * cos_theta - xx * sin_theta + roi_center_h;\n          T x = yy * sin_theta + xx * cos_theta + roi_center_w;\n          // deal with: inverse elements are out of feature map boundary\n          if (y < -1.0 || y > height || x < -1.0 || x > width) {\n            // empty\n            PreCalc<T> pc;\n            pc.pos1 = 0;\n            pc.pos2 = 0;\n            pc.pos3 = 0;\n            pc.pos4 = 0;\n            pc.w1 = 0;\n            pc.w2 = 0;\n            pc.w3 = 0;\n            pc.w4 = 0;\n            pre_calc[pre_calc_index] = pc;\n            pre_calc_index += 1;\n            continue;\n          }\n\n          if (y < 0) {\n            y = 0;\n          }\n          if (x < 0) {\n            x = 0;\n          }\n\n          int y_low = (int)y;\n          int x_low = (int)x;\n          int y_high;\n          int x_high;\n\n          if (y_low >= height - 1) {\n            y_high = y_low = height - 1;\n            y = (T)y_low;\n          } else {\n            y_high = y_low + 1;\n          }\n\n          if (x_low >= width - 1) {\n            x_high = x_low = width - 1;\n            x = (T)x_low;\n          } else {\n            x_high = x_low + 1;\n          }\n\n          T ly = y - y_low;\n          T lx = x - x_low;\n          T hy = 1. - ly, hx = 1. - lx;\n          T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx;\n\n          // save weights and indices\n          PreCalc<T> pc;\n          pc.pos1 = y_low * width + x_low;\n          pc.pos2 = y_low * width + x_high;\n          pc.pos3 = y_high * width + x_low;\n          pc.pos4 = y_high * width + x_high;\n          pc.w1 = w1;\n          pc.w2 = w2;\n          pc.w3 = w3;\n          pc.w4 = w4;\n          pre_calc[pre_calc_index] = pc;\n\n          pre_calc_index += 1;\n        }\n      }\n    }\n  }\n}\n\ntemplate <typename T>\nvoid bilinear_interpolate_gradient(\n    const int height,\n    const int width,\n    T y,\n    T x,\n    T& w1,\n    T& w2,\n    T& w3,\n    T& w4,\n    int& x_low,\n    int& x_high,\n    int& y_low,\n    int& y_high) {\n  // deal with cases that inverse elements are out of feature map boundary\n  if (y < -1.0 || y > height || x < -1.0 || x > width) {\n    // empty\n    w1 = w2 = w3 = w4 = 0.;\n    x_low = x_high = y_low = y_high = -1;\n    return;\n  }\n\n  if (y < 0) {\n    y = 0;\n  }\n\n  if (x < 0) {\n    x = 0;\n  }\n\n  y_low = (int)y;\n  x_low = (int)x;\n\n  if (y_low >= height - 1) {\n    y_high = y_low = height - 1;\n    y = (T)y_low;\n  } else {\n    y_high = y_low + 1;\n  }\n\n  if (x_low >= width - 1) {\n    x_high = x_low = width - 1;\n    x = (T)x_low;\n  } else {\n    x_high = x_low + 1;\n  }\n\n  T ly = y - y_low;\n  T lx = x - x_low;\n  T hy = 1. - ly, hx = 1. - lx;\n\n  // reference in forward\n  // T v1 = input[y_low * width + x_low];\n  // T v2 = input[y_low * width + x_high];\n  // T v3 = input[y_high * width + x_low];\n  // T v4 = input[y_high * width + x_high];\n  // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4);\n\n  w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx;\n\n  return;\n}\n\ntemplate <class T>\ninline void add(T* address, const T& val) {\n  *address += val;\n}\n\n} // namespace\n\ntemplate <typename T>\nvoid ROIAlignRotatedForward(\n    const int nthreads,\n    const T* input,\n    const T& spatial_scale,\n    const int channels,\n    const int height,\n    const int width,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio,\n    const T* rois,\n    T* output) {\n  int n_rois = nthreads / channels / pooled_width / pooled_height;\n  // (n, c, ph, pw) is an element in the pooled output\n  // can be parallelized using omp\n  // #pragma omp parallel for num_threads(32)\n  for (int n = 0; n < n_rois; n++) {\n    int index_n = n * channels * pooled_width * pooled_height;\n\n    const T* current_roi = rois + n * 6;\n    int roi_batch_ind = current_roi[0];\n\n    // Do not use rounding; this implementation detail is critical\n    // ROIAlignRotated supports align == true, i.e., continuous coordinate\n    // by default, thus the 0.5 offset\n    T offset = (T)0.5;\n    T roi_center_w = current_roi[1] * spatial_scale - offset;\n    T roi_center_h = current_roi[2] * spatial_scale - offset;\n    T roi_width = current_roi[3] * spatial_scale;\n    T roi_height = current_roi[4] * spatial_scale;\n    T theta = current_roi[5] * M_PI / 180.0;\n    T cos_theta = cos(theta);\n    T sin_theta = sin(theta);\n\n    AT_ASSERTM(\n        roi_width >= 0 && roi_height >= 0,\n        \"ROIs in ROIAlignRotated do not have non-negative size!\");\n\n    T bin_size_h = static_cast<T>(roi_height) / static_cast<T>(pooled_height);\n    T bin_size_w = static_cast<T>(roi_width) / static_cast<T>(pooled_width);\n\n    // We use roi_bin_grid to sample the grid and mimic integral\n    int roi_bin_grid_h = (sampling_ratio > 0)\n        ? sampling_ratio\n        : ceil(roi_height / pooled_height); // e.g., = 2\n    int roi_bin_grid_w =\n        (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width);\n\n    // We do average (integral) pooling inside a bin\n    const T count = std::max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4\n\n    // we want to precalculate indices and weights shared by all channels,\n    // this is the key point of optimization\n    std::vector<PreCalc<T>> pre_calc(\n        roi_bin_grid_h * roi_bin_grid_w * pooled_width * pooled_height);\n\n    // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y).\n    // Appropriate translation needs to be applied after.\n    T roi_start_h = -roi_height / 2.0;\n    T roi_start_w = -roi_width / 2.0;\n\n    pre_calc_for_bilinear_interpolate(\n        height,\n        width,\n        pooled_height,\n        pooled_width,\n        roi_bin_grid_h,\n        roi_bin_grid_w,\n        roi_start_h,\n        roi_start_w,\n        bin_size_h,\n        bin_size_w,\n        roi_bin_grid_h,\n        roi_bin_grid_w,\n        roi_center_h,\n        roi_center_w,\n        cos_theta,\n        sin_theta,\n        pre_calc);\n\n    for (int c = 0; c < channels; c++) {\n      int index_n_c = index_n + c * pooled_width * pooled_height;\n      const T* offset_input =\n          input + (roi_batch_ind * channels + c) * height * width;\n      int pre_calc_index = 0;\n\n      for (int ph = 0; ph < pooled_height; ph++) {\n        for (int pw = 0; pw < pooled_width; pw++) {\n          int index = index_n_c + ph * pooled_width + pw;\n\n          T output_val = 0.;\n          for (int iy = 0; iy < roi_bin_grid_h; iy++) {\n            for (int ix = 0; ix < roi_bin_grid_w; ix++) {\n              PreCalc<T> pc = pre_calc[pre_calc_index];\n              output_val += pc.w1 * offset_input[pc.pos1] +\n                  pc.w2 * offset_input[pc.pos2] +\n                  pc.w3 * offset_input[pc.pos3] + pc.w4 * offset_input[pc.pos4];\n\n              pre_calc_index += 1;\n            }\n          }\n          output_val /= count;\n\n          output[index] = output_val;\n        } // for pw\n      } // for ph\n    } // for c\n  } // for n\n}\n\ntemplate <typename T>\nvoid ROIAlignRotatedBackward(\n    const int nthreads,\n    // may not be contiguous. should index using n_stride, etc\n    const T* grad_output,\n    const T& spatial_scale,\n    const int channels,\n    const int height,\n    const int width,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio,\n    T* grad_input,\n    const T* rois,\n    const int n_stride,\n    const int c_stride,\n    const int h_stride,\n    const int w_stride) {\n  for (int index = 0; index < nthreads; index++) {\n    // (n, c, ph, pw) is an element in the pooled output\n    int pw = index % pooled_width;\n    int ph = (index / pooled_width) % pooled_height;\n    int c = (index / pooled_width / pooled_height) % channels;\n    int n = index / pooled_width / pooled_height / channels;\n\n    const T* current_roi = rois + n * 6;\n    int roi_batch_ind = current_roi[0];\n\n    // Do not use rounding; this implementation detail is critical\n    // ROIAlignRotated supports align == true, i.e., continuous coordinate\n    // by default, thus the 0.5 offset\n    T offset = (T)0.5;\n    T roi_center_w = current_roi[1] * spatial_scale - offset;\n    T roi_center_h = current_roi[2] * spatial_scale - offset;\n    T roi_width = current_roi[3] * spatial_scale;\n    T roi_height = current_roi[4] * spatial_scale;\n    T theta = current_roi[5] * M_PI / 180.0;\n    T cos_theta = cos(theta);\n    T sin_theta = sin(theta);\n\n    AT_ASSERTM(\n        roi_width >= 0 && roi_height >= 0,\n        \"ROIs in ROIAlignRotated do not have non-negative size!\");\n\n    T bin_size_h = static_cast<T>(roi_height) / static_cast<T>(pooled_height);\n    T bin_size_w = static_cast<T>(roi_width) / static_cast<T>(pooled_width);\n\n    T* offset_grad_input =\n        grad_input + ((roi_batch_ind * channels + c) * height * width);\n\n    int output_offset = n * n_stride + c * c_stride;\n    const T* offset_grad_output = grad_output + output_offset;\n    const T grad_output_this_bin =\n        offset_grad_output[ph * h_stride + pw * w_stride];\n\n    // We use roi_bin_grid to sample the grid and mimic integral\n    int roi_bin_grid_h = (sampling_ratio > 0)\n        ? sampling_ratio\n        : ceil(roi_height / pooled_height); // e.g., = 2\n    int roi_bin_grid_w =\n        (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width);\n\n    // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y).\n    // Appropriate translation needs to be applied after.\n    T roi_start_h = -roi_height / 2.0;\n    T roi_start_w = -roi_width / 2.0;\n\n    // We do average (integral) pooling inside a bin\n    const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4\n\n    for (int iy = 0; iy < roi_bin_grid_h; iy++) {\n      const T yy = roi_start_h + ph * bin_size_h +\n          static_cast<T>(iy + .5f) * bin_size_h /\n              static_cast<T>(roi_bin_grid_h); // e.g., 0.5, 1.5\n      for (int ix = 0; ix < roi_bin_grid_w; ix++) {\n        const T xx = roi_start_w + pw * bin_size_w +\n            static_cast<T>(ix + .5f) * bin_size_w /\n                static_cast<T>(roi_bin_grid_w);\n\n        // Rotate by theta around the center and translate\n        T y = yy * cos_theta - xx * sin_theta + roi_center_h;\n        T x = yy * sin_theta + xx * cos_theta + roi_center_w;\n\n        T w1, w2, w3, w4;\n        int x_low, x_high, y_low, y_high;\n\n        bilinear_interpolate_gradient(\n            height, width, y, x, w1, w2, w3, w4, x_low, x_high, y_low, y_high);\n\n        T g1 = grad_output_this_bin * w1 / count;\n        T g2 = grad_output_this_bin * w2 / count;\n        T g3 = grad_output_this_bin * w3 / count;\n        T g4 = grad_output_this_bin * w4 / count;\n\n        if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) {\n          // atomic add is not needed for now since it is single threaded\n          add(offset_grad_input + y_low * width + x_low, static_cast<T>(g1));\n          add(offset_grad_input + y_low * width + x_high, static_cast<T>(g2));\n          add(offset_grad_input + y_high * width + x_low, static_cast<T>(g3));\n          add(offset_grad_input + y_high * width + x_high, static_cast<T>(g4));\n        } // if\n      } // ix\n    } // iy\n  } // for\n} // ROIAlignRotatedBackward\n\nat::Tensor ROIAlignRotated_forward_cpu(\n    const at::Tensor& input,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio) {\n  AT_ASSERTM(input.device().is_cpu(), \"input must be a CPU tensor\");\n  AT_ASSERTM(rois.device().is_cpu(), \"rois must be a CPU tensor\");\n\n  at::TensorArg input_t{input, \"input\", 1}, rois_t{rois, \"rois\", 2};\n\n  at::CheckedFrom c = \"ROIAlign_forward_cpu\";\n  at::checkAllSameType(c, {input_t, rois_t});\n\n  auto num_rois = rois.size(0);\n  auto channels = input.size(1);\n  auto height = input.size(2);\n  auto width = input.size(3);\n\n  at::Tensor output = at::zeros(\n      {num_rois, channels, pooled_height, pooled_width}, input.options());\n\n  auto output_size = num_rois * pooled_height * pooled_width * channels;\n\n  if (output.numel() == 0) {\n    return output;\n  }\n\n  auto input_ = input.contiguous(), rois_ = rois.contiguous();\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      input.scalar_type(), \"ROIAlignRotated_forward\", [&] {\n        ROIAlignRotatedForward<scalar_t>(\n            output_size,\n            input_.data_ptr<scalar_t>(),\n            spatial_scale,\n            channels,\n            height,\n            width,\n            pooled_height,\n            pooled_width,\n            sampling_ratio,\n            rois_.data_ptr<scalar_t>(),\n            output.data_ptr<scalar_t>());\n      });\n  return output;\n}\n\nat::Tensor ROIAlignRotated_backward_cpu(\n    const at::Tensor& grad,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int batch_size,\n    const int channels,\n    const int height,\n    const int width,\n    const int sampling_ratio) {\n  AT_ASSERTM(grad.device().is_cpu(), \"grad must be a CPU tensor\");\n  AT_ASSERTM(rois.device().is_cpu(), \"rois must be a CPU tensor\");\n\n  at::TensorArg grad_t{grad, \"grad\", 1}, rois_t{rois, \"rois\", 2};\n\n  at::CheckedFrom c = \"ROIAlignRotated_backward_cpu\";\n  at::checkAllSameType(c, {grad_t, rois_t});\n\n  at::Tensor grad_input =\n      at::zeros({batch_size, channels, height, width}, grad.options());\n\n  // handle possibly empty gradients\n  if (grad.numel() == 0) {\n    return grad_input;\n  }\n\n  // get stride values to ensure indexing into gradients is correct.\n  int n_stride = grad.stride(0);\n  int c_stride = grad.stride(1);\n  int h_stride = grad.stride(2);\n  int w_stride = grad.stride(3);\n\n  auto rois_ = rois.contiguous();\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      grad.scalar_type(), \"ROIAlignRotated_forward\", [&] {\n        ROIAlignRotatedBackward<scalar_t>(\n            grad.numel(),\n            grad.data_ptr<scalar_t>(),\n            spatial_scale,\n            channels,\n            height,\n            width,\n            pooled_height,\n            pooled_width,\n            sampling_ratio,\n            grad_input.data_ptr<scalar_t>(),\n            rois_.data_ptr<scalar_t>(),\n            n_stride,\n            c_stride,\n            h_stride,\n            w_stride);\n      });\n  return grad_input;\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cuda.cu",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#include <ATen/ATen.h>\n#include <ATen/cuda/CUDAContext.h>\n#include <c10/cuda/CUDAGuard.h>\n#include <ATen/cuda/CUDAApplyUtils.cuh>\n\n// TODO make it in a common file\n#define CUDA_1D_KERNEL_LOOP(i, n)                            \\\n  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n; \\\n       i += blockDim.x * gridDim.x)\n\n// Note: this implementation originates from the Caffe2 ROIAlignRotated Op\n// and PyTorch ROIAlign (non-rotated) Op implementations.\n// The key difference between this implementation and those ones is\n// we don't do \"legacy offset\" in this version, as there aren't many previous\n// works, if any, using the \"legacy\" ROIAlignRotated Op.\n// This would make the interface a bit cleaner.\n\nnamespace detectron2 {\n\nnamespace {\n\ntemplate <typename T>\n__device__ T bilinear_interpolate(\n    const T* input,\n    const int height,\n    const int width,\n    T y,\n    T x) {\n  // deal with cases that inverse elements are out of feature map boundary\n  if (y < -1.0 || y > height || x < -1.0 || x > width) {\n    // empty\n    return 0;\n  }\n\n  if (y < 0) {\n    y = 0;\n  }\n\n  if (x < 0) {\n    x = 0;\n  }\n\n  int y_low = (int)y;\n  int x_low = (int)x;\n  int y_high;\n  int x_high;\n\n  if (y_low >= height - 1) {\n    y_high = y_low = height - 1;\n    y = (T)y_low;\n  } else {\n    y_high = y_low + 1;\n  }\n\n  if (x_low >= width - 1) {\n    x_high = x_low = width - 1;\n    x = (T)x_low;\n  } else {\n    x_high = x_low + 1;\n  }\n\n  T ly = y - y_low;\n  T lx = x - x_low;\n  T hy = 1. - ly, hx = 1. - lx;\n  // do bilinear interpolation\n  T v1 = input[y_low * width + x_low];\n  T v2 = input[y_low * width + x_high];\n  T v3 = input[y_high * width + x_low];\n  T v4 = input[y_high * width + x_high];\n  T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx;\n\n  T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4);\n\n  return val;\n}\n\ntemplate <typename T>\n__device__ void bilinear_interpolate_gradient(\n    const int height,\n    const int width,\n    T y,\n    T x,\n    T& w1,\n    T& w2,\n    T& w3,\n    T& w4,\n    int& x_low,\n    int& x_high,\n    int& y_low,\n    int& y_high) {\n  // deal with cases that inverse elements are out of feature map boundary\n  if (y < -1.0 || y > height || x < -1.0 || x > width) {\n    // empty\n    w1 = w2 = w3 = w4 = 0.;\n    x_low = x_high = y_low = y_high = -1;\n    return;\n  }\n\n  if (y < 0) {\n    y = 0;\n  }\n\n  if (x < 0) {\n    x = 0;\n  }\n\n  y_low = (int)y;\n  x_low = (int)x;\n\n  if (y_low >= height - 1) {\n    y_high = y_low = height - 1;\n    y = (T)y_low;\n  } else {\n    y_high = y_low + 1;\n  }\n\n  if (x_low >= width - 1) {\n    x_high = x_low = width - 1;\n    x = (T)x_low;\n  } else {\n    x_high = x_low + 1;\n  }\n\n  T ly = y - y_low;\n  T lx = x - x_low;\n  T hy = 1. - ly, hx = 1. - lx;\n\n  // reference in forward\n  // T v1 = input[y_low * width + x_low];\n  // T v2 = input[y_low * width + x_high];\n  // T v3 = input[y_high * width + x_low];\n  // T v4 = input[y_high * width + x_high];\n  // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4);\n\n  w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx;\n\n  return;\n}\n\n} // namespace\n\ntemplate <typename T>\n__global__ void RoIAlignRotatedForward(\n    const int nthreads,\n    const T* input,\n    const T spatial_scale,\n    const int channels,\n    const int height,\n    const int width,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio,\n    const T* rois,\n    T* top_data) {\n  CUDA_1D_KERNEL_LOOP(index, nthreads) {\n    // (n, c, ph, pw) is an element in the pooled output\n    int pw = index % pooled_width;\n    int ph = (index / pooled_width) % pooled_height;\n    int c = (index / pooled_width / pooled_height) % channels;\n    int n = index / pooled_width / pooled_height / channels;\n\n    const T* current_roi = rois + n * 6;\n    int roi_batch_ind = current_roi[0];\n\n    // Do not use rounding; this implementation detail is critical\n    // ROIAlignRotated supports align == true, i.e., continuous coordinate\n    // by default, thus the 0.5 offset\n    T offset = (T)0.5;\n    T roi_center_w = current_roi[1] * spatial_scale - offset;\n    T roi_center_h = current_roi[2] * spatial_scale - offset;\n    T roi_width = current_roi[3] * spatial_scale;\n    T roi_height = current_roi[4] * spatial_scale;\n    T theta = current_roi[5] * M_PI / 180.0;\n    T cos_theta = cos(theta);\n    T sin_theta = sin(theta);\n\n    T bin_size_h = static_cast<T>(roi_height) / static_cast<T>(pooled_height);\n    T bin_size_w = static_cast<T>(roi_width) / static_cast<T>(pooled_width);\n\n    const T* offset_input =\n        input + (roi_batch_ind * channels + c) * height * width;\n\n    // We use roi_bin_grid to sample the grid and mimic integral\n    int roi_bin_grid_h = (sampling_ratio > 0)\n        ? sampling_ratio\n        : ceil(roi_height / pooled_height); // e.g., = 2\n    int roi_bin_grid_w =\n        (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width);\n\n    // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y).\n    // Appropriate translation needs to be applied after.\n    T roi_start_h = -roi_height / 2.0;\n    T roi_start_w = -roi_width / 2.0;\n\n    // We do average (inte  gral) pooling inside a bin\n    const T count = max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4\n\n    T output_val = 0.;\n    for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1\n    {\n      const T yy = roi_start_h + ph * bin_size_h +\n          static_cast<T>(iy + .5f) * bin_size_h /\n              static_cast<T>(roi_bin_grid_h); // e.g., 0.5, 1.5\n      for (int ix = 0; ix < roi_bin_grid_w; ix++) {\n        const T xx = roi_start_w + pw * bin_size_w +\n            static_cast<T>(ix + .5f) * bin_size_w /\n                static_cast<T>(roi_bin_grid_w);\n\n        // Rotate by theta around the center and translate\n        T y = yy * cos_theta - xx * sin_theta + roi_center_h;\n        T x = yy * sin_theta + xx * cos_theta + roi_center_w;\n\n        T val = bilinear_interpolate(offset_input, height, width, y, x);\n        output_val += val;\n      }\n    }\n    output_val /= count;\n\n    top_data[index] = output_val;\n  }\n}\n\ntemplate <typename T>\n__global__ void RoIAlignRotatedBackwardFeature(\n    const int nthreads,\n    const T* top_diff,\n    const int num_rois,\n    const T spatial_scale,\n    const int channels,\n    const int height,\n    const int width,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio,\n    T* bottom_diff,\n    const T* rois) {\n  CUDA_1D_KERNEL_LOOP(index, nthreads) {\n    // (n, c, ph, pw) is an element in the pooled output\n    int pw = index % pooled_width;\n    int ph = (index / pooled_width) % pooled_height;\n    int c = (index / pooled_width / pooled_height) % channels;\n    int n = index / pooled_width / pooled_height / channels;\n\n    const T* current_roi = rois + n * 6;\n    int roi_batch_ind = current_roi[0];\n\n    // Do not use rounding; this implementation detail is critical\n    // ROIAlignRotated supports align == true, i.e., continuous coordinate\n    // by default, thus the 0.5 offset\n    T offset = (T)0.5;\n    T roi_center_w = current_roi[1] * spatial_scale - offset;\n    T roi_center_h = current_roi[2] * spatial_scale - offset;\n    T roi_width = current_roi[3] * spatial_scale;\n    T roi_height = current_roi[4] * spatial_scale;\n    T theta = current_roi[5] * M_PI / 180.0;\n    T cos_theta = cos(theta);\n    T sin_theta = sin(theta);\n\n    T bin_size_h = static_cast<T>(roi_height) / static_cast<T>(pooled_height);\n    T bin_size_w = static_cast<T>(roi_width) / static_cast<T>(pooled_width);\n\n    T* offset_bottom_diff =\n        bottom_diff + (roi_batch_ind * channels + c) * height * width;\n\n    int top_offset = (n * channels + c) * pooled_height * pooled_width;\n    const T* offset_top_diff = top_diff + top_offset;\n    const T top_diff_this_bin = offset_top_diff[ph * pooled_width + pw];\n\n    // We use roi_bin_grid to sample the grid and mimic integral\n    int roi_bin_grid_h = (sampling_ratio > 0)\n        ? sampling_ratio\n        : ceil(roi_height / pooled_height); // e.g., = 2\n    int roi_bin_grid_w =\n        (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width);\n\n    // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y).\n    // Appropriate translation needs to be applied after.\n    T roi_start_h = -roi_height / 2.0;\n    T roi_start_w = -roi_width / 2.0;\n\n    // We do average (integral) pooling inside a bin\n    const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4\n\n    for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1\n    {\n      const T yy = roi_start_h + ph * bin_size_h +\n          static_cast<T>(iy + .5f) * bin_size_h /\n              static_cast<T>(roi_bin_grid_h); // e.g., 0.5, 1.5\n      for (int ix = 0; ix < roi_bin_grid_w; ix++) {\n        const T xx = roi_start_w + pw * bin_size_w +\n            static_cast<T>(ix + .5f) * bin_size_w /\n                static_cast<T>(roi_bin_grid_w);\n\n        // Rotate by theta around the center and translate\n        T y = yy * cos_theta - xx * sin_theta + roi_center_h;\n        T x = yy * sin_theta + xx * cos_theta + roi_center_w;\n\n        T w1, w2, w3, w4;\n        int x_low, x_high, y_low, y_high;\n\n        bilinear_interpolate_gradient(\n            height, width, y, x, w1, w2, w3, w4, x_low, x_high, y_low, y_high);\n\n        T g1 = top_diff_this_bin * w1 / count;\n        T g2 = top_diff_this_bin * w2 / count;\n        T g3 = top_diff_this_bin * w3 / count;\n        T g4 = top_diff_this_bin * w4 / count;\n\n        if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) {\n          atomicAdd(\n              offset_bottom_diff + y_low * width + x_low, static_cast<T>(g1));\n          atomicAdd(\n              offset_bottom_diff + y_low * width + x_high, static_cast<T>(g2));\n          atomicAdd(\n              offset_bottom_diff + y_high * width + x_low, static_cast<T>(g3));\n          atomicAdd(\n              offset_bottom_diff + y_high * width + x_high, static_cast<T>(g4));\n        } // if\n      } // ix\n    } // iy\n  } // CUDA_1D_KERNEL_LOOP\n} // RoIAlignRotatedBackward\n\nat::Tensor ROIAlignRotated_forward_cuda(\n    const at::Tensor& input,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int sampling_ratio) {\n  AT_ASSERTM(input.device().is_cuda(), \"input must be a CUDA tensor\");\n  AT_ASSERTM(rois.device().is_cuda(), \"rois must be a CUDA tensor\");\n  at::TensorArg input_t{input, \"input\", 1}, rois_t{rois, \"rois\", 2};\n\n  at::CheckedFrom c = \"ROIAlignRotated_forward_cuda\";\n  at::checkAllSameGPU(c, {input_t, rois_t});\n  at::checkAllSameType(c, {input_t, rois_t});\n  at::cuda::CUDAGuard device_guard(input.device());\n\n  auto num_rois = rois.size(0);\n  auto channels = input.size(1);\n  auto height = input.size(2);\n  auto width = input.size(3);\n\n  auto output = at::empty(\n      {num_rois, channels, pooled_height, pooled_width}, input.options());\n  auto output_size = num_rois * pooled_height * pooled_width * channels;\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  dim3 grid(std::min(\n      at::cuda::ATenCeilDiv(\n          static_cast<int64_t>(output_size), static_cast<int64_t>(512)),\n      static_cast<int64_t>(4096)));\n  dim3 block(512);\n\n  if (output.numel() == 0) {\n    AT_CUDA_CHECK(cudaGetLastError());\n    return output;\n  }\n\n  auto input_ = input.contiguous(), rois_ = rois.contiguous();\n  AT_DISPATCH_FLOATING_TYPES(\n      input.scalar_type(), \"ROIAlignRotated_forward\", [&] {\n        RoIAlignRotatedForward<scalar_t><<<grid, block, 0, stream>>>(\n            output_size,\n            input_.data_ptr<scalar_t>(),\n            spatial_scale,\n            channels,\n            height,\n            width,\n            pooled_height,\n            pooled_width,\n            sampling_ratio,\n            rois_.data_ptr<scalar_t>(),\n            output.data_ptr<scalar_t>());\n      });\n  cudaDeviceSynchronize();\n  AT_CUDA_CHECK(cudaGetLastError());\n  return output;\n}\n\n// TODO remove the dependency on input and use instead its sizes -> save memory\nat::Tensor ROIAlignRotated_backward_cuda(\n    const at::Tensor& grad,\n    const at::Tensor& rois,\n    const float spatial_scale,\n    const int pooled_height,\n    const int pooled_width,\n    const int batch_size,\n    const int channels,\n    const int height,\n    const int width,\n    const int sampling_ratio) {\n  AT_ASSERTM(grad.device().is_cuda(), \"grad must be a CUDA tensor\");\n  AT_ASSERTM(rois.device().is_cuda(), \"rois must be a CUDA tensor\");\n\n  at::TensorArg grad_t{grad, \"grad\", 1}, rois_t{rois, \"rois\", 2};\n  at::CheckedFrom c = \"ROIAlign_backward_cuda\";\n  at::checkAllSameGPU(c, {grad_t, rois_t});\n  at::checkAllSameType(c, {grad_t, rois_t});\n  at::cuda::CUDAGuard device_guard(grad.device());\n\n  auto num_rois = rois.size(0);\n  auto grad_input =\n      at::zeros({batch_size, channels, height, width}, grad.options());\n\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  dim3 grid(std::min(\n      at::cuda::ATenCeilDiv(\n          static_cast<int64_t>(grad.numel()), static_cast<int64_t>(512)),\n      static_cast<int64_t>(4096)));\n  dim3 block(512);\n\n  // handle possibly empty gradients\n  if (grad.numel() == 0) {\n    AT_CUDA_CHECK(cudaGetLastError());\n    return grad_input;\n  }\n\n  auto grad_ = grad.contiguous(), rois_ = rois.contiguous();\n  AT_DISPATCH_FLOATING_TYPES(\n      grad.scalar_type(), \"ROIAlignRotated_backward\", [&] {\n        RoIAlignRotatedBackwardFeature<scalar_t><<<grid, block, 0, stream>>>(\n            grad.numel(),\n            grad_.data_ptr<scalar_t>(),\n            num_rois,\n            spatial_scale,\n            channels,\n            height,\n            width,\n            pooled_height,\n            pooled_width,\n            sampling_ratio,\n            grad_input.data_ptr<scalar_t>(),\n            rois_.data_ptr<scalar_t>());\n      });\n  AT_CUDA_CHECK(cudaGetLastError());\n  return grad_input;\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/box_iou_rotated/box_iou_rotated.h",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#pragma once\n#include <torch/types.h>\n\nnamespace detectron2 {\n\nat::Tensor box_iou_rotated_cpu(\n    const at::Tensor& boxes1,\n    const at::Tensor& boxes2);\n\n#if defined(WITH_CUDA) || defined(WITH_HIP)\nat::Tensor box_iou_rotated_cuda(\n    const at::Tensor& boxes1,\n    const at::Tensor& boxes2);\n#endif\n\n// Interface for Python\n// inline is needed to prevent multiple function definitions when this header is\n// included by different cpps\ninline at::Tensor box_iou_rotated(\n    const at::Tensor& boxes1,\n    const at::Tensor& boxes2) {\n  assert(boxes1.device().is_cuda() == boxes2.device().is_cuda());\n  if (boxes1.device().is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    return box_iou_rotated_cuda(boxes1.contiguous(), boxes2.contiguous());\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n\n  return box_iou_rotated_cpu(boxes1.contiguous(), boxes2.contiguous());\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cpu.cpp",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#include \"box_iou_rotated.h\"\n#include \"box_iou_rotated_utils.h\"\n\nnamespace detectron2 {\n\ntemplate <typename T>\nvoid box_iou_rotated_cpu_kernel(\n    const at::Tensor& boxes1,\n    const at::Tensor& boxes2,\n    at::Tensor& ious) {\n  auto num_boxes1 = boxes1.size(0);\n  auto num_boxes2 = boxes2.size(0);\n\n  for (int i = 0; i < num_boxes1; i++) {\n    for (int j = 0; j < num_boxes2; j++) {\n      ious[i * num_boxes2 + j] = single_box_iou_rotated<T>(\n          boxes1[i].data_ptr<T>(), boxes2[j].data_ptr<T>());\n    }\n  }\n}\n\nat::Tensor box_iou_rotated_cpu(\n    // input must be contiguous:\n    const at::Tensor& boxes1,\n    const at::Tensor& boxes2) {\n  auto num_boxes1 = boxes1.size(0);\n  auto num_boxes2 = boxes2.size(0);\n  at::Tensor ious =\n      at::empty({num_boxes1 * num_boxes2}, boxes1.options().dtype(at::kFloat));\n\n  box_iou_rotated_cpu_kernel<float>(boxes1, boxes2, ious);\n\n  // reshape from 1d array to 2d array\n  auto shape = std::vector<int64_t>{num_boxes1, num_boxes2};\n  return ious.reshape(shape);\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cuda.cu",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#include <ATen/ATen.h>\n#include <ATen/cuda/CUDAContext.h>\n#include <c10/cuda/CUDAGuard.h>\n#include <ATen/cuda/CUDAApplyUtils.cuh>\n#include \"box_iou_rotated_utils.h\"\n\nnamespace detectron2 {\n\n// 2D block with 32 * 16 = 512 threads per block\nconst int BLOCK_DIM_X = 32;\nconst int BLOCK_DIM_Y = 16;\n\ntemplate <typename T>\n__global__ void box_iou_rotated_cuda_kernel(\n    const int n_boxes1,\n    const int n_boxes2,\n    const T* dev_boxes1,\n    const T* dev_boxes2,\n    T* dev_ious) {\n  const int row_start = blockIdx.x * blockDim.x;\n  const int col_start = blockIdx.y * blockDim.y;\n\n  const int row_size = min(n_boxes1 - row_start, blockDim.x);\n  const int col_size = min(n_boxes2 - col_start, blockDim.y);\n\n  __shared__ float block_boxes1[BLOCK_DIM_X * 5];\n  __shared__ float block_boxes2[BLOCK_DIM_Y * 5];\n\n  // It's safe to copy using threadIdx.x since BLOCK_DIM_X >= BLOCK_DIM_Y\n  if (threadIdx.x < row_size && threadIdx.y == 0) {\n    block_boxes1[threadIdx.x * 5 + 0] =\n        dev_boxes1[(row_start + threadIdx.x) * 5 + 0];\n    block_boxes1[threadIdx.x * 5 + 1] =\n        dev_boxes1[(row_start + threadIdx.x) * 5 + 1];\n    block_boxes1[threadIdx.x * 5 + 2] =\n        dev_boxes1[(row_start + threadIdx.x) * 5 + 2];\n    block_boxes1[threadIdx.x * 5 + 3] =\n        dev_boxes1[(row_start + threadIdx.x) * 5 + 3];\n    block_boxes1[threadIdx.x * 5 + 4] =\n        dev_boxes1[(row_start + threadIdx.x) * 5 + 4];\n  }\n\n  if (threadIdx.x < col_size && threadIdx.y == 0) {\n    block_boxes2[threadIdx.x * 5 + 0] =\n        dev_boxes2[(col_start + threadIdx.x) * 5 + 0];\n    block_boxes2[threadIdx.x * 5 + 1] =\n        dev_boxes2[(col_start + threadIdx.x) * 5 + 1];\n    block_boxes2[threadIdx.x * 5 + 2] =\n        dev_boxes2[(col_start + threadIdx.x) * 5 + 2];\n    block_boxes2[threadIdx.x * 5 + 3] =\n        dev_boxes2[(col_start + threadIdx.x) * 5 + 3];\n    block_boxes2[threadIdx.x * 5 + 4] =\n        dev_boxes2[(col_start + threadIdx.x) * 5 + 4];\n  }\n  __syncthreads();\n\n  if (threadIdx.x < row_size && threadIdx.y < col_size) {\n    int offset = (row_start + threadIdx.x) * n_boxes2 + col_start + threadIdx.y;\n    dev_ious[offset] = single_box_iou_rotated<T>(\n        block_boxes1 + threadIdx.x * 5, block_boxes2 + threadIdx.y * 5);\n  }\n}\n\nat::Tensor box_iou_rotated_cuda(\n    // input must be contiguous\n    const at::Tensor& boxes1,\n    const at::Tensor& boxes2) {\n  using scalar_t = float;\n  AT_ASSERTM(\n      boxes1.scalar_type() == at::kFloat, \"boxes1 must be a float tensor\");\n  AT_ASSERTM(\n      boxes2.scalar_type() == at::kFloat, \"boxes2 must be a float tensor\");\n  AT_ASSERTM(boxes1.is_cuda(), \"boxes1 must be a CUDA tensor\");\n  AT_ASSERTM(boxes2.is_cuda(), \"boxes2 must be a CUDA tensor\");\n  at::cuda::CUDAGuard device_guard(boxes1.device());\n\n  auto num_boxes1 = boxes1.size(0);\n  auto num_boxes2 = boxes2.size(0);\n\n  at::Tensor ious =\n      at::empty({num_boxes1 * num_boxes2}, boxes1.options().dtype(at::kFloat));\n\n  bool transpose = false;\n  if (num_boxes1 > 0 && num_boxes2 > 0) {\n    scalar_t *data1 = boxes1.data_ptr<scalar_t>(),\n             *data2 = boxes2.data_ptr<scalar_t>();\n\n    if (num_boxes2 > 65535 * BLOCK_DIM_Y) {\n      AT_ASSERTM(\n          num_boxes1 <= 65535 * BLOCK_DIM_Y,\n          \"Too many boxes for box_iou_rotated_cuda!\");\n      // x dim is allowed to be large, but y dim cannot,\n      // so we transpose the two to avoid \"invalid configuration argument\"\n      // error. We assume one of them is small. Otherwise the result is hard to\n      // fit in memory anyway.\n      std::swap(num_boxes1, num_boxes2);\n      std::swap(data1, data2);\n      transpose = true;\n    }\n\n    const int blocks_x =\n        at::cuda::ATenCeilDiv(static_cast<int>(num_boxes1), BLOCK_DIM_X);\n    const int blocks_y =\n        at::cuda::ATenCeilDiv(static_cast<int>(num_boxes2), BLOCK_DIM_Y);\n\n    dim3 blocks(blocks_x, blocks_y);\n    dim3 threads(BLOCK_DIM_X, BLOCK_DIM_Y);\n    cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n    box_iou_rotated_cuda_kernel<scalar_t><<<blocks, threads, 0, stream>>>(\n        num_boxes1,\n        num_boxes2,\n        data1,\n        data2,\n        (scalar_t*)ious.data_ptr<scalar_t>());\n\n    AT_CUDA_CHECK(cudaGetLastError());\n  }\n\n  // reshape from 1d array to 2d array\n  auto shape = std::vector<int64_t>{num_boxes1, num_boxes2};\n  if (transpose) {\n    return ious.view(shape).t();\n  } else {\n    return ious.view(shape);\n  }\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_utils.h",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#pragma once\n\n#include <cassert>\n#include <cmath>\n\n#if defined(__CUDACC__) || __HCC__ == 1 || __HIP__ == 1\n// Designates functions callable from the host (CPU) and the device (GPU)\n#define HOST_DEVICE __host__ __device__\n#define HOST_DEVICE_INLINE HOST_DEVICE __forceinline__\n#else\n#include <algorithm>\n#define HOST_DEVICE\n#define HOST_DEVICE_INLINE HOST_DEVICE inline\n#endif\n\nnamespace detectron2 {\n\nnamespace {\n\ntemplate <typename T>\nstruct RotatedBox {\n  T x_ctr, y_ctr, w, h, a;\n};\n\ntemplate <typename T>\nstruct Point {\n  T x, y;\n  HOST_DEVICE_INLINE Point(const T& px = 0, const T& py = 0) : x(px), y(py) {}\n  HOST_DEVICE_INLINE Point operator+(const Point& p) const {\n    return Point(x + p.x, y + p.y);\n  }\n  HOST_DEVICE_INLINE Point& operator+=(const Point& p) {\n    x += p.x;\n    y += p.y;\n    return *this;\n  }\n  HOST_DEVICE_INLINE Point operator-(const Point& p) const {\n    return Point(x - p.x, y - p.y);\n  }\n  HOST_DEVICE_INLINE Point operator*(const T coeff) const {\n    return Point(x * coeff, y * coeff);\n  }\n};\n\ntemplate <typename T>\nHOST_DEVICE_INLINE T dot_2d(const Point<T>& A, const Point<T>& B) {\n  return A.x * B.x + A.y * B.y;\n}\n\n// R: result type. can be different from input type\ntemplate <typename T, typename R = T>\nHOST_DEVICE_INLINE R cross_2d(const Point<T>& A, const Point<T>& B) {\n  return static_cast<R>(A.x) * static_cast<R>(B.y) -\n      static_cast<R>(B.x) * static_cast<R>(A.y);\n}\n\ntemplate <typename T>\nHOST_DEVICE_INLINE void get_rotated_vertices(\n    const RotatedBox<T>& box,\n    Point<T> (&pts)[4]) {\n  // M_PI / 180. == 0.01745329251\n  double theta = box.a * 0.01745329251;\n  T cosTheta2 = (T)cos(theta) * 0.5f;\n  T sinTheta2 = (T)sin(theta) * 0.5f;\n\n  // y: top --> down; x: left --> right\n  pts[0].x = box.x_ctr + sinTheta2 * box.h + cosTheta2 * box.w;\n  pts[0].y = box.y_ctr + cosTheta2 * box.h - sinTheta2 * box.w;\n  pts[1].x = box.x_ctr - sinTheta2 * box.h + cosTheta2 * box.w;\n  pts[1].y = box.y_ctr - cosTheta2 * box.h - sinTheta2 * box.w;\n  pts[2].x = 2 * box.x_ctr - pts[0].x;\n  pts[2].y = 2 * box.y_ctr - pts[0].y;\n  pts[3].x = 2 * box.x_ctr - pts[1].x;\n  pts[3].y = 2 * box.y_ctr - pts[1].y;\n}\n\ntemplate <typename T>\nHOST_DEVICE_INLINE int get_intersection_points(\n    const Point<T> (&pts1)[4],\n    const Point<T> (&pts2)[4],\n    Point<T> (&intersections)[24]) {\n  // Line vector\n  // A line from p1 to p2 is: p1 + (p2-p1)*t, t=[0,1]\n  Point<T> vec1[4], vec2[4];\n  for (int i = 0; i < 4; i++) {\n    vec1[i] = pts1[(i + 1) % 4] - pts1[i];\n    vec2[i] = pts2[(i + 1) % 4] - pts2[i];\n  }\n\n  // When computing the intersection area, it doesn't hurt if we have\n  // more (duplicated/approximate) intersections/vertices than needed,\n  // while it can cause drastic difference if we miss an intersection/vertex.\n  // Therefore, we add an epsilon to relax the comparisons between\n  // the float point numbers that decide the intersection points.\n  double EPS = 1e-5;\n\n  // Line test - test all line combos for intersection\n  int num = 0; // number of intersections\n  for (int i = 0; i < 4; i++) {\n    for (int j = 0; j < 4; j++) {\n      // Solve for 2x2 Ax=b\n      T det = cross_2d<T>(vec2[j], vec1[i]);\n\n      // This takes care of parallel lines\n      if (fabs(det) <= 1e-14) {\n        continue;\n      }\n\n      auto vec12 = pts2[j] - pts1[i];\n\n      T t1 = cross_2d<T>(vec2[j], vec12) / det;\n      T t2 = cross_2d<T>(vec1[i], vec12) / det;\n\n      if (t1 > -EPS && t1 < 1.0f + EPS && t2 > -EPS && t2 < 1.0f + EPS) {\n        intersections[num++] = pts1[i] + vec1[i] * t1;\n      }\n    }\n  }\n\n  // Check for vertices of rect1 inside rect2\n  {\n    const auto& AB = vec2[0];\n    const auto& DA = vec2[3];\n    auto ABdotAB = dot_2d<T>(AB, AB);\n    auto ADdotAD = dot_2d<T>(DA, DA);\n    for (int i = 0; i < 4; i++) {\n      // assume ABCD is the rectangle, and P is the point to be judged\n      // P is inside ABCD iff. P's projection on AB lies within AB\n      // and P's projection on AD lies within AD\n\n      auto AP = pts1[i] - pts2[0];\n\n      auto APdotAB = dot_2d<T>(AP, AB);\n      auto APdotAD = -dot_2d<T>(AP, DA);\n\n      if ((APdotAB > -EPS) && (APdotAD > -EPS) && (APdotAB < ABdotAB + EPS) &&\n          (APdotAD < ADdotAD + EPS)) {\n        intersections[num++] = pts1[i];\n      }\n    }\n  }\n\n  // Reverse the check - check for vertices of rect2 inside rect1\n  {\n    const auto& AB = vec1[0];\n    const auto& DA = vec1[3];\n    auto ABdotAB = dot_2d<T>(AB, AB);\n    auto ADdotAD = dot_2d<T>(DA, DA);\n    for (int i = 0; i < 4; i++) {\n      auto AP = pts2[i] - pts1[0];\n\n      auto APdotAB = dot_2d<T>(AP, AB);\n      auto APdotAD = -dot_2d<T>(AP, DA);\n\n      if ((APdotAB > -EPS) && (APdotAD > -EPS) && (APdotAB < ABdotAB + EPS) &&\n          (APdotAD < ADdotAD + EPS)) {\n        intersections[num++] = pts2[i];\n      }\n    }\n  }\n\n  return num;\n}\n\ntemplate <typename T>\nHOST_DEVICE_INLINE int convex_hull_graham(\n    const Point<T> (&p)[24],\n    const int& num_in,\n    Point<T> (&q)[24],\n    bool shift_to_zero = false) {\n  assert(num_in >= 2);\n\n  // Step 1:\n  // Find point with minimum y\n  // if more than 1 points have the same minimum y,\n  // pick the one with the minimum x.\n  int t = 0;\n  for (int i = 1; i < num_in; i++) {\n    if (p[i].y < p[t].y || (p[i].y == p[t].y && p[i].x < p[t].x)) {\n      t = i;\n    }\n  }\n  auto& start = p[t]; // starting point\n\n  // Step 2:\n  // Subtract starting point from every points (for sorting in the next step)\n  for (int i = 0; i < num_in; i++) {\n    q[i] = p[i] - start;\n  }\n\n  // Swap the starting point to position 0\n  auto tmp = q[0];\n  q[0] = q[t];\n  q[t] = tmp;\n\n  // Step 3:\n  // Sort point 1 ~ num_in according to their relative cross-product values\n  // (essentially sorting according to angles)\n  // If the angles are the same, sort according to their distance to origin\n  T dist[24];\n#if defined(__CUDACC__) || __HCC__ == 1 || __HIP__ == 1\n  // compute distance to origin before sort, and sort them together with the\n  // points\n  for (int i = 0; i < num_in; i++) {\n    dist[i] = dot_2d<T>(q[i], q[i]);\n  }\n\n  // CUDA version\n  // In the future, we can potentially use thrust\n  // for sorting here to improve speed (though not guaranteed)\n  for (int i = 1; i < num_in - 1; i++) {\n    for (int j = i + 1; j < num_in; j++) {\n      T crossProduct = cross_2d<T>(q[i], q[j]);\n      if ((crossProduct < -1e-6) ||\n          (fabs(crossProduct) < 1e-6 && dist[i] > dist[j])) {\n        auto q_tmp = q[i];\n        q[i] = q[j];\n        q[j] = q_tmp;\n        auto dist_tmp = dist[i];\n        dist[i] = dist[j];\n        dist[j] = dist_tmp;\n      }\n    }\n  }\n#else\n  // CPU version\n  std::sort(\n      q + 1, q + num_in, [](const Point<T>& A, const Point<T>& B) -> bool {\n        T temp = cross_2d<T>(A, B);\n        if (fabs(temp) < 1e-6) {\n          return dot_2d<T>(A, A) < dot_2d<T>(B, B);\n        } else {\n          return temp > 0;\n        }\n      });\n  // compute distance to origin after sort, since the points are now different.\n  for (int i = 0; i < num_in; i++) {\n    dist[i] = dot_2d<T>(q[i], q[i]);\n  }\n#endif\n\n  // Step 4:\n  // Make sure there are at least 2 points (that don't overlap with each other)\n  // in the stack\n  int k; // index of the non-overlapped second point\n  for (k = 1; k < num_in; k++) {\n    if (dist[k] > 1e-8) {\n      break;\n    }\n  }\n  if (k == num_in) {\n    // We reach the end, which means the convex hull is just one point\n    q[0] = p[t];\n    return 1;\n  }\n  q[1] = q[k];\n  int m = 2; // 2 points in the stack\n  // Step 5:\n  // Finally we can start the scanning process.\n  // When a non-convex relationship between the 3 points is found\n  // (either concave shape or duplicated points),\n  // we pop the previous point from the stack\n  // until the 3-point relationship is convex again, or\n  // until the stack only contains two points\n  for (int i = k + 1; i < num_in; i++) {\n    while (m > 1) {\n      auto q1 = q[i] - q[m - 2], q2 = q[m - 1] - q[m - 2];\n      // cross_2d() uses FMA and therefore computes round(round(q1.x*q2.y) -\n      // q2.x*q1.y) So it may not return 0 even when q1==q2. Therefore we\n      // compare round(q1.x*q2.y) and round(q2.x*q1.y) directly. (round means\n      // round to nearest floating point).\n      if (q1.x * q2.y >= q2.x * q1.y)\n        m--;\n      else\n        break;\n    }\n    // Using double also helps, but float can solve the issue for now.\n    // while (m > 1 && cross_2d<T, double>(q[i] - q[m - 2], q[m - 1] - q[m - 2])\n    // >= 0) {\n    //     m--;\n    // }\n    q[m++] = q[i];\n  }\n\n  // Step 6 (Optional):\n  // In general sense we need the original coordinates, so we\n  // need to shift the points back (reverting Step 2)\n  // But if we're only interested in getting the area/perimeter of the shape\n  // We can simply return.\n  if (!shift_to_zero) {\n    for (int i = 0; i < m; i++) {\n      q[i] += start;\n    }\n  }\n\n  return m;\n}\n\ntemplate <typename T>\nHOST_DEVICE_INLINE T polygon_area(const Point<T> (&q)[24], const int& m) {\n  if (m <= 2) {\n    return 0;\n  }\n\n  T area = 0;\n  for (int i = 1; i < m - 1; i++) {\n    area += fabs(cross_2d<T>(q[i] - q[0], q[i + 1] - q[0]));\n  }\n\n  return area / 2.0;\n}\n\ntemplate <typename T>\nHOST_DEVICE_INLINE T rotated_boxes_intersection(\n    const RotatedBox<T>& box1,\n    const RotatedBox<T>& box2) {\n  // There are up to 4 x 4 + 4 + 4 = 24 intersections (including dups) returned\n  // from rotated_rect_intersection_pts\n  Point<T> intersectPts[24], orderedPts[24];\n\n  Point<T> pts1[4];\n  Point<T> pts2[4];\n  get_rotated_vertices<T>(box1, pts1);\n  get_rotated_vertices<T>(box2, pts2);\n\n  int num = get_intersection_points<T>(pts1, pts2, intersectPts);\n\n  if (num <= 2) {\n    return 0.0;\n  }\n\n  // Convex Hull to order the intersection points in clockwise order and find\n  // the contour area.\n  int num_convex = convex_hull_graham<T>(intersectPts, num, orderedPts, true);\n  return polygon_area<T>(orderedPts, num_convex);\n}\n\n} // namespace\n\ntemplate <typename T>\nHOST_DEVICE_INLINE T\nsingle_box_iou_rotated(T const* const box1_raw, T const* const box2_raw) {\n  // shift center to the middle point to achieve higher precision in result\n  RotatedBox<T> box1, box2;\n  auto center_shift_x = (box1_raw[0] + box2_raw[0]) / 2.0;\n  auto center_shift_y = (box1_raw[1] + box2_raw[1]) / 2.0;\n  box1.x_ctr = box1_raw[0] - center_shift_x;\n  box1.y_ctr = box1_raw[1] - center_shift_y;\n  box1.w = box1_raw[2];\n  box1.h = box1_raw[3];\n  box1.a = box1_raw[4];\n  box2.x_ctr = box2_raw[0] - center_shift_x;\n  box2.y_ctr = box2_raw[1] - center_shift_y;\n  box2.w = box2_raw[2];\n  box2.h = box2_raw[3];\n  box2.a = box2_raw[4];\n\n  T area1 = box1.w * box1.h;\n  T area2 = box2.w * box2.h;\n  if (area1 < 1e-14 || area2 < 1e-14) {\n    return 0.f;\n  }\n\n  T intersection = rotated_boxes_intersection<T>(box1, box2);\n  T iou = intersection / (area1 + area2 - intersection);\n  return iou;\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/cocoeval/cocoeval.cpp",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#include \"cocoeval.h\"\n#include <time.h>\n#include <algorithm>\n#include <cstdint>\n#include <numeric>\n\nusing namespace pybind11::literals;\n\nnamespace detectron2 {\n\nnamespace COCOeval {\n\n// Sort detections from highest score to lowest, such that\n// detection_instances[detection_sorted_indices[t]] >=\n// detection_instances[detection_sorted_indices[t+1]].  Use stable_sort to match\n// original COCO API\nvoid SortInstancesByDetectionScore(\n    const std::vector<InstanceAnnotation>& detection_instances,\n    std::vector<uint64_t>* detection_sorted_indices) {\n  detection_sorted_indices->resize(detection_instances.size());\n  std::iota(\n      detection_sorted_indices->begin(), detection_sorted_indices->end(), 0);\n  std::stable_sort(\n      detection_sorted_indices->begin(),\n      detection_sorted_indices->end(),\n      [&detection_instances](size_t j1, size_t j2) {\n        return detection_instances[j1].score > detection_instances[j2].score;\n      });\n}\n\n// Partition the ground truth objects based on whether or not to ignore them\n// based on area\nvoid SortInstancesByIgnore(\n    const std::array<double, 2>& area_range,\n    const std::vector<InstanceAnnotation>& ground_truth_instances,\n    std::vector<uint64_t>* ground_truth_sorted_indices,\n    std::vector<bool>* ignores) {\n  ignores->clear();\n  ignores->reserve(ground_truth_instances.size());\n  for (auto o : ground_truth_instances) {\n    ignores->push_back(\n        o.ignore || o.area < area_range[0] || o.area > area_range[1]);\n  }\n\n  ground_truth_sorted_indices->resize(ground_truth_instances.size());\n  std::iota(\n      ground_truth_sorted_indices->begin(),\n      ground_truth_sorted_indices->end(),\n      0);\n  std::stable_sort(\n      ground_truth_sorted_indices->begin(),\n      ground_truth_sorted_indices->end(),\n      [&ignores](size_t j1, size_t j2) {\n        return (int)(*ignores)[j1] < (int)(*ignores)[j2];\n      });\n}\n\n// For each IOU threshold, greedily match each detected instance to a ground\n// truth instance (if possible) and store the results\nvoid MatchDetectionsToGroundTruth(\n    const std::vector<InstanceAnnotation>& detection_instances,\n    const std::vector<uint64_t>& detection_sorted_indices,\n    const std::vector<InstanceAnnotation>& ground_truth_instances,\n    const std::vector<uint64_t>& ground_truth_sorted_indices,\n    const std::vector<bool>& ignores,\n    const std::vector<std::vector<double>>& ious,\n    const std::vector<double>& iou_thresholds,\n    const std::array<double, 2>& area_range,\n    ImageEvaluation* results) {\n  // Initialize memory to store return data matches and ignore\n  const int num_iou_thresholds = iou_thresholds.size();\n  const int num_ground_truth = ground_truth_sorted_indices.size();\n  const int num_detections = detection_sorted_indices.size();\n  std::vector<uint64_t> ground_truth_matches(\n      num_iou_thresholds * num_ground_truth, 0);\n  std::vector<uint64_t>& detection_matches = results->detection_matches;\n  std::vector<bool>& detection_ignores = results->detection_ignores;\n  std::vector<bool>& ground_truth_ignores = results->ground_truth_ignores;\n  detection_matches.resize(num_iou_thresholds * num_detections, 0);\n  detection_ignores.resize(num_iou_thresholds * num_detections, false);\n  ground_truth_ignores.resize(num_ground_truth);\n  for (auto g = 0; g < num_ground_truth; ++g) {\n    ground_truth_ignores[g] = ignores[ground_truth_sorted_indices[g]];\n  }\n\n  for (auto t = 0; t < num_iou_thresholds; ++t) {\n    for (auto d = 0; d < num_detections; ++d) {\n      // information about best match so far (match=-1 -> unmatched)\n      double best_iou = std::min(iou_thresholds[t], 1 - 1e-10);\n      int match = -1;\n      for (auto g = 0; g < num_ground_truth; ++g) {\n        // if this ground truth instance is already matched and not a\n        // crowd, it cannot be matched to another detection\n        if (ground_truth_matches[t * num_ground_truth + g] > 0 &&\n            !ground_truth_instances[ground_truth_sorted_indices[g]].is_crowd) {\n          continue;\n        }\n\n        // if detected instance matched to a regular ground truth\n        // instance, we can break on the first ground truth instance\n        // tagged as ignore (because they are sorted by the ignore tag)\n        if (match >= 0 && !ground_truth_ignores[match] &&\n            ground_truth_ignores[g]) {\n          break;\n        }\n\n        // if IOU overlap is the best so far, store the match appropriately\n        if (ious[d][ground_truth_sorted_indices[g]] >= best_iou) {\n          best_iou = ious[d][ground_truth_sorted_indices[g]];\n          match = g;\n        }\n      }\n      // if match was made, store id of match for both detection and\n      // ground truth\n      if (match >= 0) {\n        detection_ignores[t * num_detections + d] = ground_truth_ignores[match];\n        detection_matches[t * num_detections + d] =\n            ground_truth_instances[ground_truth_sorted_indices[match]].id;\n        ground_truth_matches[t * num_ground_truth + match] =\n            detection_instances[detection_sorted_indices[d]].id;\n      }\n\n      // set unmatched detections outside of area range to ignore\n      const InstanceAnnotation& detection =\n          detection_instances[detection_sorted_indices[d]];\n      detection_ignores[t * num_detections + d] =\n          detection_ignores[t * num_detections + d] ||\n          (detection_matches[t * num_detections + d] == 0 &&\n           (detection.area < area_range[0] || detection.area > area_range[1]));\n    }\n  }\n\n  // store detection score results\n  results->detection_scores.resize(detection_sorted_indices.size());\n  for (size_t d = 0; d < detection_sorted_indices.size(); ++d) {\n    results->detection_scores[d] =\n        detection_instances[detection_sorted_indices[d]].score;\n  }\n}\n\nstd::vector<ImageEvaluation> EvaluateImages(\n    const std::vector<std::array<double, 2>>& area_ranges,\n    int max_detections,\n    const std::vector<double>& iou_thresholds,\n    const ImageCategoryInstances<std::vector<double>>& image_category_ious,\n    const ImageCategoryInstances<InstanceAnnotation>&\n        image_category_ground_truth_instances,\n    const ImageCategoryInstances<InstanceAnnotation>&\n        image_category_detection_instances) {\n  const int num_area_ranges = area_ranges.size();\n  const int num_images = image_category_ground_truth_instances.size();\n  const int num_categories =\n      image_category_ious.size() > 0 ? image_category_ious[0].size() : 0;\n  std::vector<uint64_t> detection_sorted_indices;\n  std::vector<uint64_t> ground_truth_sorted_indices;\n  std::vector<bool> ignores;\n  std::vector<ImageEvaluation> results_all(\n      num_images * num_area_ranges * num_categories);\n\n  // Store results for each image, category, and area range combination. Results\n  // for each IOU threshold are packed into the same ImageEvaluation object\n  for (auto i = 0; i < num_images; ++i) {\n    for (auto c = 0; c < num_categories; ++c) {\n      const std::vector<InstanceAnnotation>& ground_truth_instances =\n          image_category_ground_truth_instances[i][c];\n      const std::vector<InstanceAnnotation>& detection_instances =\n          image_category_detection_instances[i][c];\n\n      SortInstancesByDetectionScore(\n          detection_instances, &detection_sorted_indices);\n      if ((int)detection_sorted_indices.size() > max_detections) {\n        detection_sorted_indices.resize(max_detections);\n      }\n\n      for (size_t a = 0; a < area_ranges.size(); ++a) {\n        SortInstancesByIgnore(\n            area_ranges[a],\n            ground_truth_instances,\n            &ground_truth_sorted_indices,\n            &ignores);\n\n        MatchDetectionsToGroundTruth(\n            detection_instances,\n            detection_sorted_indices,\n            ground_truth_instances,\n            ground_truth_sorted_indices,\n            ignores,\n            image_category_ious[i][c],\n            iou_thresholds,\n            area_ranges[a],\n            &results_all\n                [c * num_area_ranges * num_images + a * num_images + i]);\n      }\n    }\n  }\n\n  return results_all;\n}\n\n// Convert a python list to a vector\ntemplate <typename T>\nstd::vector<T> list_to_vec(const py::list& l) {\n  std::vector<T> v(py::len(l));\n  for (int i = 0; i < (int)py::len(l); ++i) {\n    v[i] = l[i].cast<T>();\n  }\n  return v;\n}\n\n// Helper function to Accumulate()\n// Considers the evaluation results applicable to a particular category, area\n// range, and max_detections parameter setting, which begin at\n// evaluations[evaluation_index].  Extracts a sorted list of length n of all\n// applicable detection instances concatenated across all images in the dataset,\n// which are represented by the outputs evaluation_indices, detection_scores,\n// image_detection_indices, and detection_sorted_indices--all of which are\n// length n. evaluation_indices[i] stores the applicable index into\n// evaluations[] for instance i, which has detection score detection_score[i],\n// and is the image_detection_indices[i]'th of the list of detections\n// for the image containing i.  detection_sorted_indices[] defines a sorted\n// permutation of the 3 other outputs\nint BuildSortedDetectionList(\n    const std::vector<ImageEvaluation>& evaluations,\n    const int64_t evaluation_index,\n    const int64_t num_images,\n    const int max_detections,\n    std::vector<uint64_t>* evaluation_indices,\n    std::vector<double>* detection_scores,\n    std::vector<uint64_t>* detection_sorted_indices,\n    std::vector<uint64_t>* image_detection_indices) {\n  assert(evaluations.size() >= evaluation_index + num_images);\n\n  // Extract a list of object instances of the applicable category, area\n  // range, and max detections requirements such that they can be sorted\n  image_detection_indices->clear();\n  evaluation_indices->clear();\n  detection_scores->clear();\n  image_detection_indices->reserve(num_images * max_detections);\n  evaluation_indices->reserve(num_images * max_detections);\n  detection_scores->reserve(num_images * max_detections);\n  int num_valid_ground_truth = 0;\n  for (auto i = 0; i < num_images; ++i) {\n    const ImageEvaluation& evaluation = evaluations[evaluation_index + i];\n\n    for (int d = 0;\n         d < (int)evaluation.detection_scores.size() && d < max_detections;\n         ++d) { // detected instances\n      evaluation_indices->push_back(evaluation_index + i);\n      image_detection_indices->push_back(d);\n      detection_scores->push_back(evaluation.detection_scores[d]);\n    }\n    for (auto ground_truth_ignore : evaluation.ground_truth_ignores) {\n      if (!ground_truth_ignore) {\n        ++num_valid_ground_truth;\n      }\n    }\n  }\n\n  // Sort detections by decreasing score, using stable sort to match\n  // python implementation\n  detection_sorted_indices->resize(detection_scores->size());\n  std::iota(\n      detection_sorted_indices->begin(), detection_sorted_indices->end(), 0);\n  std::stable_sort(\n      detection_sorted_indices->begin(),\n      detection_sorted_indices->end(),\n      [&detection_scores](size_t j1, size_t j2) {\n        return (*detection_scores)[j1] > (*detection_scores)[j2];\n      });\n\n  return num_valid_ground_truth;\n}\n\n// Helper function to Accumulate()\n// Compute a precision recall curve given a sorted list of detected instances\n// encoded in evaluations, evaluation_indices, detection_scores,\n// detection_sorted_indices, image_detection_indices (see\n// BuildSortedDetectionList()). Using vectors precisions and recalls\n// and temporary storage, output the results into precisions_out, recalls_out,\n// and scores_out, which are large buffers containing many precion/recall curves\n// for all possible parameter settings, with precisions_out_index and\n// recalls_out_index defining the applicable indices to store results.\nvoid ComputePrecisionRecallCurve(\n    const int64_t precisions_out_index,\n    const int64_t precisions_out_stride,\n    const int64_t recalls_out_index,\n    const std::vector<double>& recall_thresholds,\n    const int iou_threshold_index,\n    const int num_iou_thresholds,\n    const int num_valid_ground_truth,\n    const std::vector<ImageEvaluation>& evaluations,\n    const std::vector<uint64_t>& evaluation_indices,\n    const std::vector<double>& detection_scores,\n    const std::vector<uint64_t>& detection_sorted_indices,\n    const std::vector<uint64_t>& image_detection_indices,\n    std::vector<double>* precisions,\n    std::vector<double>* recalls,\n    std::vector<double>* precisions_out,\n    std::vector<double>* scores_out,\n    std::vector<double>* recalls_out) {\n  assert(recalls_out->size() > recalls_out_index);\n\n  // Compute precision/recall for each instance in the sorted list of detections\n  int64_t true_positives_sum = 0, false_positives_sum = 0;\n  precisions->clear();\n  recalls->clear();\n  precisions->reserve(detection_sorted_indices.size());\n  recalls->reserve(detection_sorted_indices.size());\n  assert(!evaluations.empty() || detection_sorted_indices.empty());\n  for (auto detection_sorted_index : detection_sorted_indices) {\n    const ImageEvaluation& evaluation =\n        evaluations[evaluation_indices[detection_sorted_index]];\n    const auto num_detections =\n        evaluation.detection_matches.size() / num_iou_thresholds;\n    const auto detection_index = iou_threshold_index * num_detections +\n        image_detection_indices[detection_sorted_index];\n    assert(evaluation.detection_matches.size() > detection_index);\n    assert(evaluation.detection_ignores.size() > detection_index);\n    const int64_t detection_match =\n        evaluation.detection_matches[detection_index];\n    const bool detection_ignores =\n        evaluation.detection_ignores[detection_index];\n    const auto true_positive = detection_match > 0 && !detection_ignores;\n    const auto false_positive = detection_match == 0 && !detection_ignores;\n    if (true_positive) {\n      ++true_positives_sum;\n    }\n    if (false_positive) {\n      ++false_positives_sum;\n    }\n\n    const double recall =\n        static_cast<double>(true_positives_sum) / num_valid_ground_truth;\n    recalls->push_back(recall);\n    const int64_t num_valid_detections =\n        true_positives_sum + false_positives_sum;\n    const double precision = num_valid_detections > 0\n        ? static_cast<double>(true_positives_sum) / num_valid_detections\n        : 0.0;\n    precisions->push_back(precision);\n  }\n\n  (*recalls_out)[recalls_out_index] = !recalls->empty() ? recalls->back() : 0;\n\n  for (int64_t i = static_cast<int64_t>(precisions->size()) - 1; i > 0; --i) {\n    if ((*precisions)[i] > (*precisions)[i - 1]) {\n      (*precisions)[i - 1] = (*precisions)[i];\n    }\n  }\n\n  // Sample the per instance precision/recall list at each recall threshold\n  for (size_t r = 0; r < recall_thresholds.size(); ++r) {\n    // first index in recalls >= recall_thresholds[r]\n    std::vector<double>::iterator low = std::lower_bound(\n        recalls->begin(), recalls->end(), recall_thresholds[r]);\n    size_t precisions_index = low - recalls->begin();\n\n    const auto results_ind = precisions_out_index + r * precisions_out_stride;\n    assert(results_ind < precisions_out->size());\n    assert(results_ind < scores_out->size());\n    if (precisions_index < precisions->size()) {\n      (*precisions_out)[results_ind] = (*precisions)[precisions_index];\n      (*scores_out)[results_ind] =\n          detection_scores[detection_sorted_indices[precisions_index]];\n    } else {\n      (*precisions_out)[results_ind] = 0;\n      (*scores_out)[results_ind] = 0;\n    }\n  }\n}\npy::dict Accumulate(\n    const py::object& params,\n    const std::vector<ImageEvaluation>& evaluations) {\n  const std::vector<double> recall_thresholds =\n      list_to_vec<double>(params.attr(\"recThrs\"));\n  const std::vector<int> max_detections =\n      list_to_vec<int>(params.attr(\"maxDets\"));\n  const int num_iou_thresholds = py::len(params.attr(\"iouThrs\"));\n  const int num_recall_thresholds = py::len(params.attr(\"recThrs\"));\n  const int num_categories = params.attr(\"useCats\").cast<int>() == 1\n      ? py::len(params.attr(\"catIds\"))\n      : 1;\n  const int num_area_ranges = py::len(params.attr(\"areaRng\"));\n  const int num_max_detections = py::len(params.attr(\"maxDets\"));\n  const int num_images = py::len(params.attr(\"imgIds\"));\n\n  std::vector<double> precisions_out(\n      num_iou_thresholds * num_recall_thresholds * num_categories *\n          num_area_ranges * num_max_detections,\n      -1);\n  std::vector<double> recalls_out(\n      num_iou_thresholds * num_categories * num_area_ranges *\n          num_max_detections,\n      -1);\n  std::vector<double> scores_out(\n      num_iou_thresholds * num_recall_thresholds * num_categories *\n          num_area_ranges * num_max_detections,\n      -1);\n\n  // Consider the list of all detected instances in the entire dataset in one\n  // large list.  evaluation_indices, detection_scores,\n  // image_detection_indices, and detection_sorted_indices all have the same\n  // length as this list, such that each entry corresponds to one detected\n  // instance\n  std::vector<uint64_t> evaluation_indices; // indices into evaluations[]\n  std::vector<double> detection_scores; // detection scores of each instance\n  std::vector<uint64_t> detection_sorted_indices; // sorted indices of all\n                                                  // instances in the dataset\n  std::vector<uint64_t>\n      image_detection_indices; // indices into the list of detected instances in\n                               // the same image as each instance\n  std::vector<double> precisions, recalls;\n\n  for (auto c = 0; c < num_categories; ++c) {\n    for (auto a = 0; a < num_area_ranges; ++a) {\n      for (auto m = 0; m < num_max_detections; ++m) {\n        // The COCO PythonAPI assumes evaluations[] (the return value of\n        // COCOeval::EvaluateImages() is one long list storing results for each\n        // combination of category, area range, and image id, with categories in\n        // the outermost loop and images in the innermost loop.\n        const int64_t evaluations_index =\n            c * num_area_ranges * num_images + a * num_images;\n        int num_valid_ground_truth = BuildSortedDetectionList(\n            evaluations,\n            evaluations_index,\n            num_images,\n            max_detections[m],\n            &evaluation_indices,\n            &detection_scores,\n            &detection_sorted_indices,\n            &image_detection_indices);\n\n        if (num_valid_ground_truth == 0) {\n          continue;\n        }\n\n        for (auto t = 0; t < num_iou_thresholds; ++t) {\n          // recalls_out is a flattened vectors representing a\n          // num_iou_thresholds X num_categories X num_area_ranges X\n          // num_max_detections matrix\n          const int64_t recalls_out_index =\n              t * num_categories * num_area_ranges * num_max_detections +\n              c * num_area_ranges * num_max_detections +\n              a * num_max_detections + m;\n\n          // precisions_out and scores_out are flattened vectors\n          // representing a num_iou_thresholds X num_recall_thresholds X\n          // num_categories X num_area_ranges X num_max_detections matrix\n          const int64_t precisions_out_stride =\n              num_categories * num_area_ranges * num_max_detections;\n          const int64_t precisions_out_index = t * num_recall_thresholds *\n                  num_categories * num_area_ranges * num_max_detections +\n              c * num_area_ranges * num_max_detections +\n              a * num_max_detections + m;\n\n          ComputePrecisionRecallCurve(\n              precisions_out_index,\n              precisions_out_stride,\n              recalls_out_index,\n              recall_thresholds,\n              t,\n              num_iou_thresholds,\n              num_valid_ground_truth,\n              evaluations,\n              evaluation_indices,\n              detection_scores,\n              detection_sorted_indices,\n              image_detection_indices,\n              &precisions,\n              &recalls,\n              &precisions_out,\n              &scores_out,\n              &recalls_out);\n        }\n      }\n    }\n  }\n\n  time_t rawtime;\n  struct tm local_time;\n  std::array<char, 200> buffer;\n  time(&rawtime);\n#ifdef _WIN32\n  localtime_s(&local_time, &rawtime);\n#else\n  localtime_r(&rawtime, &local_time);\n#endif\n  strftime(\n      buffer.data(), 200, \"%Y-%m-%d %H:%num_max_detections:%S\", &local_time);\n  return py::dict(\n      \"params\"_a = params,\n      \"counts\"_a = std::vector<int64_t>(\n          {num_iou_thresholds,\n           num_recall_thresholds,\n           num_categories,\n           num_area_ranges,\n           num_max_detections}),\n      \"date\"_a = buffer,\n      \"precision\"_a = precisions_out,\n      \"recall\"_a = recalls_out,\n      \"scores\"_a = scores_out);\n}\n\n} // namespace COCOeval\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/cocoeval/cocoeval.h",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#pragma once\n\n#include <pybind11/numpy.h>\n#include <pybind11/pybind11.h>\n#include <pybind11/stl.h>\n#include <pybind11/stl_bind.h>\n#include <vector>\n\nnamespace py = pybind11;\n\nnamespace detectron2 {\n\nnamespace COCOeval {\n\n// Annotation data for a single object instance in an image\nstruct InstanceAnnotation {\n  InstanceAnnotation(\n      uint64_t id,\n      double score,\n      double area,\n      bool is_crowd,\n      bool ignore)\n      : id{id}, score{score}, area{area}, is_crowd{is_crowd}, ignore{ignore} {}\n  uint64_t id;\n  double score = 0.;\n  double area = 0.;\n  bool is_crowd = false;\n  bool ignore = false;\n};\n\n// Stores intermediate results for evaluating detection results for a single\n// image that has D detected instances and G ground truth instances. This stores\n// matches between detected and ground truth instances\nstruct ImageEvaluation {\n  // For each of the D detected instances, the id of the matched ground truth\n  // instance, or 0 if unmatched\n  std::vector<uint64_t> detection_matches;\n\n  // The detection score of each of the D detected instances\n  std::vector<double> detection_scores;\n\n  // Marks whether or not each of G instances was ignored from evaluation (e.g.,\n  // because it's outside area_range)\n  std::vector<bool> ground_truth_ignores;\n\n  // Marks whether or not each of D instances was ignored from evaluation (e.g.,\n  // because it's outside aRng)\n  std::vector<bool> detection_ignores;\n};\n\ntemplate <class T>\nusing ImageCategoryInstances = std::vector<std::vector<std::vector<T>>>;\n\n// C++ implementation of COCO API cocoeval.py::COCOeval.evaluateImg().  For each\n// combination of image, category, area range settings, and IOU thresholds to\n// evaluate, it matches detected instances to ground truth instances and stores\n// the results into a vector of ImageEvaluation results, which will be\n// interpreted by the COCOeval::Accumulate() function to produce precion-recall\n// curves.  The parameters of nested vectors have the following semantics:\n//   image_category_ious[i][c][d][g] is the intersection over union of the d'th\n//     detected instance and g'th ground truth instance of\n//     category category_ids[c] in image image_ids[i]\n//   image_category_ground_truth_instances[i][c] is a vector of ground truth\n//     instances in image image_ids[i] of category category_ids[c]\n//   image_category_detection_instances[i][c] is a vector of detected\n//     instances in image image_ids[i] of category category_ids[c]\nstd::vector<ImageEvaluation> EvaluateImages(\n    const std::vector<std::array<double, 2>>& area_ranges, // vector of 2-tuples\n    int max_detections,\n    const std::vector<double>& iou_thresholds,\n    const ImageCategoryInstances<std::vector<double>>& image_category_ious,\n    const ImageCategoryInstances<InstanceAnnotation>&\n        image_category_ground_truth_instances,\n    const ImageCategoryInstances<InstanceAnnotation>&\n        image_category_detection_instances);\n\n// C++ implementation of COCOeval.accumulate(), which generates precision\n// recall curves for each set of category, IOU threshold, detection area range,\n// and max number of detections parameters.  It is assumed that the parameter\n// evaluations is the return value of the functon COCOeval::EvaluateImages(),\n// which was called with the same parameter settings params\npy::dict Accumulate(\n    const py::object& params,\n    const std::vector<ImageEvaluation>& evalutations);\n\n} // namespace COCOeval\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/cuda_version.cu",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n\n#include <cuda_runtime_api.h>\n\nnamespace detectron2 {\nint get_cudart_version() {\n// Not a ROCM platform: Either HIP is not used, or\n// it is used, but platform is not ROCM (i.e. it is CUDA)\n#if !defined(__HIP_PLATFORM_HCC__)\n  return CUDART_VERSION;\n#else\n  int version = 0;\n\n#if HIP_VERSION_MAJOR != 0\n  // Create a convention similar to that of CUDA, as assumed by other\n  // parts of the code.\n\n  version = HIP_VERSION_MINOR;\n  version += (HIP_VERSION_MAJOR * 100);\n#else\n  hipRuntimeGetVersion(&version);\n#endif\n  return version;\n#endif\n}\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/deformable/deform_conv.h",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#pragma once\n#include <torch/types.h>\n\nnamespace detectron2 {\n\n#if defined(WITH_CUDA) || defined(WITH_HIP)\nint deform_conv_forward_cuda(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor offset,\n    at::Tensor output,\n    at::Tensor columns,\n    at::Tensor ones,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    int im2col_step);\n\nint deform_conv_backward_input_cuda(\n    at::Tensor input,\n    at::Tensor offset,\n    at::Tensor gradOutput,\n    at::Tensor gradInput,\n    at::Tensor gradOffset,\n    at::Tensor weight,\n    at::Tensor columns,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    int im2col_step);\n\nint deform_conv_backward_parameters_cuda(\n    at::Tensor input,\n    at::Tensor offset,\n    at::Tensor gradOutput,\n    at::Tensor gradWeight, // at::Tensor gradBias,\n    at::Tensor columns,\n    at::Tensor ones,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    float scale,\n    int im2col_step);\n\nvoid modulated_deform_conv_cuda_forward(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor bias,\n    at::Tensor ones,\n    at::Tensor offset,\n    at::Tensor mask,\n    at::Tensor output,\n    at::Tensor columns,\n    int kernel_h,\n    int kernel_w,\n    const int stride_h,\n    const int stride_w,\n    const int pad_h,\n    const int pad_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int group,\n    const int deformable_group,\n    const bool with_bias);\n\nvoid modulated_deform_conv_cuda_backward(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor bias,\n    at::Tensor ones,\n    at::Tensor offset,\n    at::Tensor mask,\n    at::Tensor columns,\n    at::Tensor grad_input,\n    at::Tensor grad_weight,\n    at::Tensor grad_bias,\n    at::Tensor grad_offset,\n    at::Tensor grad_mask,\n    at::Tensor grad_output,\n    int kernel_h,\n    int kernel_w,\n    int stride_h,\n    int stride_w,\n    int pad_h,\n    int pad_w,\n    int dilation_h,\n    int dilation_w,\n    int group,\n    int deformable_group,\n    const bool with_bias);\n\n#endif\n\ninline int deform_conv_forward(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor offset,\n    at::Tensor output,\n    at::Tensor columns,\n    at::Tensor ones,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    int im2col_step) {\n  if (input.is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    TORCH_CHECK(weight.is_cuda(), \"weight tensor is not on GPU!\");\n    TORCH_CHECK(offset.is_cuda(), \"offset tensor is not on GPU!\");\n    return deform_conv_forward_cuda(\n        input,\n        weight,\n        offset,\n        output,\n        columns,\n        ones,\n        kW,\n        kH,\n        dW,\n        dH,\n        padW,\n        padH,\n        dilationW,\n        dilationH,\n        group,\n        deformable_group,\n        im2col_step);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n  AT_ERROR(\"This operator is not implemented on CPU\");\n}\n\ninline int deform_conv_backward_input(\n    at::Tensor input,\n    at::Tensor offset,\n    at::Tensor gradOutput,\n    at::Tensor gradInput,\n    at::Tensor gradOffset,\n    at::Tensor weight,\n    at::Tensor columns,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    int im2col_step) {\n  if (gradOutput.is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    TORCH_CHECK(input.is_cuda(), \"input tensor is not on GPU!\");\n    TORCH_CHECK(weight.is_cuda(), \"weight tensor is not on GPU!\");\n    TORCH_CHECK(offset.is_cuda(), \"offset tensor is not on GPU!\");\n    return deform_conv_backward_input_cuda(\n        input,\n        offset,\n        gradOutput,\n        gradInput,\n        gradOffset,\n        weight,\n        columns,\n        kW,\n        kH,\n        dW,\n        dH,\n        padW,\n        padH,\n        dilationW,\n        dilationH,\n        group,\n        deformable_group,\n        im2col_step);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n  AT_ERROR(\"This operator is not implemented on CPU\");\n}\n\ninline int deform_conv_backward_filter(\n    at::Tensor input,\n    at::Tensor offset,\n    at::Tensor gradOutput,\n    at::Tensor gradWeight, // at::Tensor gradBias,\n    at::Tensor columns,\n    at::Tensor ones,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    float scale,\n    int im2col_step) {\n  if (gradOutput.is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    TORCH_CHECK(input.is_cuda(), \"input tensor is not on GPU!\");\n    TORCH_CHECK(offset.is_cuda(), \"offset tensor is not on GPU!\");\n    return deform_conv_backward_parameters_cuda(\n        input,\n        offset,\n        gradOutput,\n        gradWeight,\n        columns,\n        ones,\n        kW,\n        kH,\n        dW,\n        dH,\n        padW,\n        padH,\n        dilationW,\n        dilationH,\n        group,\n        deformable_group,\n        scale,\n        im2col_step);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n  AT_ERROR(\"This operator is not implemented on CPU\");\n}\n\ninline void modulated_deform_conv_forward(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor bias,\n    at::Tensor ones,\n    at::Tensor offset,\n    at::Tensor mask,\n    at::Tensor output,\n    at::Tensor columns,\n    int kernel_h,\n    int kernel_w,\n    const int stride_h,\n    const int stride_w,\n    const int pad_h,\n    const int pad_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int group,\n    const int deformable_group,\n    const bool with_bias) {\n  if (input.is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    TORCH_CHECK(weight.is_cuda(), \"weight tensor is not on GPU!\");\n    TORCH_CHECK(bias.is_cuda(), \"bias tensor is not on GPU!\");\n    TORCH_CHECK(offset.is_cuda(), \"offset tensor is not on GPU!\");\n    return modulated_deform_conv_cuda_forward(\n        input,\n        weight,\n        bias,\n        ones,\n        offset,\n        mask,\n        output,\n        columns,\n        kernel_h,\n        kernel_w,\n        stride_h,\n        stride_w,\n        pad_h,\n        pad_w,\n        dilation_h,\n        dilation_w,\n        group,\n        deformable_group,\n        with_bias);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n  AT_ERROR(\"This operator is not implemented on CPU\");\n}\n\ninline void modulated_deform_conv_backward(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor bias,\n    at::Tensor ones,\n    at::Tensor offset,\n    at::Tensor mask,\n    at::Tensor columns,\n    at::Tensor grad_input,\n    at::Tensor grad_weight,\n    at::Tensor grad_bias,\n    at::Tensor grad_offset,\n    at::Tensor grad_mask,\n    at::Tensor grad_output,\n    int kernel_h,\n    int kernel_w,\n    int stride_h,\n    int stride_w,\n    int pad_h,\n    int pad_w,\n    int dilation_h,\n    int dilation_w,\n    int group,\n    int deformable_group,\n    const bool with_bias) {\n  if (grad_output.is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    TORCH_CHECK(input.is_cuda(), \"input tensor is not on GPU!\");\n    TORCH_CHECK(weight.is_cuda(), \"weight tensor is not on GPU!\");\n    TORCH_CHECK(bias.is_cuda(), \"bias tensor is not on GPU!\");\n    TORCH_CHECK(offset.is_cuda(), \"offset tensor is not on GPU!\");\n    return modulated_deform_conv_cuda_backward(\n        input,\n        weight,\n        bias,\n        ones,\n        offset,\n        mask,\n        columns,\n        grad_input,\n        grad_weight,\n        grad_bias,\n        grad_offset,\n        grad_mask,\n        grad_output,\n        kernel_h,\n        kernel_w,\n        stride_h,\n        stride_w,\n        pad_h,\n        pad_w,\n        dilation_h,\n        dilation_w,\n        group,\n        deformable_group,\n        with_bias);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n  AT_ERROR(\"This operator is not implemented on CPU\");\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/deformable/deform_conv_cuda.cu",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n\n// modified from\n// https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/dcn/src/deform_conv_cuda.cpp\n// Original license: Apache 2.0\n\n// modify from\n// https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/blob/mmdetection/mmdet/ops/dcn/src/deform_conv_cuda.c\n// Original license: Apache 2.0\n\n#include <torch/types.h>\n\n#include \"deform_conv.h\"\n\n#include <cmath>\n#include <vector>\n\nnamespace detectron2 {\n\nvoid deformable_im2col(\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int ksize_h,\n    const int ksize_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int parallel_imgs,\n    const int deformable_group,\n    at::Tensor data_col);\n\nvoid deformable_col2im(\n    const at::Tensor data_col,\n    const at::Tensor data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int ksize_h,\n    const int ksize_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int parallel_imgs,\n    const int deformable_group,\n    at::Tensor grad_im);\n\nvoid deformable_col2im_coord(\n    const at::Tensor data_col,\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int ksize_h,\n    const int ksize_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int parallel_imgs,\n    const int deformable_group,\n    at::Tensor grad_offset);\n\nvoid modulated_deformable_im2col_cuda(\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const at::Tensor data_mask,\n    const int batch_size,\n    const int channels,\n    const int height_im,\n    const int width_im,\n    const int height_col,\n    const int width_col,\n    const int kernel_h,\n    const int kenerl_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int deformable_group,\n    at::Tensor data_col);\n\nvoid modulated_deformable_col2im_cuda(\n    const at::Tensor data_col,\n    const at::Tensor data_offset,\n    const at::Tensor data_mask,\n    const int batch_size,\n    const int channels,\n    const int height_im,\n    const int width_im,\n    const int height_col,\n    const int width_col,\n    const int kernel_h,\n    const int kenerl_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int deformable_group,\n    at::Tensor grad_im);\n\nvoid modulated_deformable_col2im_coord_cuda(\n    const at::Tensor data_col,\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const at::Tensor data_mask,\n    const int batch_size,\n    const int channels,\n    const int height_im,\n    const int width_im,\n    const int height_col,\n    const int width_col,\n    const int kernel_h,\n    const int kenerl_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int deformable_group,\n    at::Tensor grad_offset,\n    at::Tensor grad_mask);\n\nvoid shape_check(\n    at::Tensor input,\n    at::Tensor offset,\n    at::Tensor* gradOutput,\n    at::Tensor weight,\n    int kH,\n    int kW,\n    int dH,\n    int dW,\n    int padH,\n    int padW,\n    int dilationH,\n    int dilationW,\n    int group,\n    int deformable_group) {\n  TORCH_CHECK(\n      weight.ndimension() == 4,\n      \"4D weight tensor (nOutputPlane,nInputPlane,kH,kW) expected, \"\n      \"but got: %s\",\n      weight.ndimension());\n\n  TORCH_CHECK(weight.is_contiguous(), \"weight tensor has to be contiguous\");\n\n  TORCH_CHECK(\n      kW > 0 && kH > 0,\n      \"kernel size should be greater than zero, but got kH: %d kW: %d\",\n      kH,\n      kW);\n\n  TORCH_CHECK(\n      (weight.size(2) == kH && weight.size(3) == kW),\n      \"kernel size should be consistent with weight, \",\n      \"but got kH: %d kW: %d weight.size(2): %d, weight.size(3): %d\",\n      kH,\n      kW,\n      weight.size(2),\n      weight.size(3));\n\n  TORCH_CHECK(\n      dW > 0 && dH > 0,\n      \"stride should be greater than zero, but got dH: %d dW: %d\",\n      dH,\n      dW);\n\n  TORCH_CHECK(\n      dilationW > 0 && dilationH > 0,\n      \"dilation should be greater than 0, but got dilationH: %d dilationW: %d\",\n      dilationH,\n      dilationW);\n\n  int ndim = input.ndimension();\n  int dimf = 0;\n  int dimh = 1;\n  int dimw = 2;\n\n  if (ndim == 4) {\n    dimf++;\n    dimh++;\n    dimw++;\n  }\n\n  TORCH_CHECK(\n      ndim == 3 || ndim == 4,\n      \"3D or 4D input tensor expected but got: %s\",\n      ndim);\n\n  long nInputPlane = weight.size(1) * group;\n  long inputHeight = input.size(dimh);\n  long inputWidth = input.size(dimw);\n  long nOutputPlane = weight.size(0);\n  long outputHeight =\n      (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1;\n  long outputWidth =\n      (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1;\n\n  TORCH_CHECK(\n      nInputPlane % deformable_group == 0,\n      \"input channels must divide deformable group size\");\n\n  if (outputWidth < 1 || outputHeight < 1)\n    AT_ERROR(\n        \"Given input size: (%ld x %ld x %ld). \"\n        \"Calculated output size: (%ld x %ld x %ld). Output size is too small\",\n        nInputPlane,\n        inputHeight,\n        inputWidth,\n        nOutputPlane,\n        outputHeight,\n        outputWidth);\n\n  TORCH_CHECK(\n      input.size(1) == nInputPlane,\n      \"invalid number of input planes, expected: %d, but got: %d\",\n      nInputPlane,\n      input.size(1));\n\n  TORCH_CHECK(\n      (inputHeight + 2 * padH >= kH && inputWidth + 2 * padW >= kW),\n      \"input image is smaller than kernel\");\n\n  TORCH_CHECK(\n      (offset.size(2) == outputHeight && offset.size(3) == outputWidth),\n      \"invalid spatial size of offset, expected height: %d width: %d, but \"\n      \"got height: %d width: %d\",\n      outputHeight,\n      outputWidth,\n      offset.size(2),\n      offset.size(3));\n\n  TORCH_CHECK(\n      (offset.size(1) == deformable_group * 2 * kH * kW),\n      \"invalid number of channels of offset\");\n\n  if (gradOutput != NULL) {\n    TORCH_CHECK(\n        gradOutput->size(dimf) == nOutputPlane,\n        \"invalid number of gradOutput planes, expected: %d, but got: %d\",\n        nOutputPlane,\n        gradOutput->size(dimf));\n\n    TORCH_CHECK(\n        (gradOutput->size(dimh) == outputHeight &&\n         gradOutput->size(dimw) == outputWidth),\n        \"invalid size of gradOutput, expected height: %d width: %d , but \"\n        \"got height: %d width: %d\",\n        outputHeight,\n        outputWidth,\n        gradOutput->size(dimh),\n        gradOutput->size(dimw));\n  }\n}\n\nint deform_conv_forward_cuda(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor offset,\n    at::Tensor output,\n    at::Tensor columns,\n    at::Tensor ones,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    int im2col_step) {\n  // todo: resize columns to include im2col: done\n  // todo: add im2col_step as input\n  // todo: add new output buffer and transpose it to output (or directly\n  // transpose output) todo: possibly change data indexing because of\n  // parallel_imgs\n\n  shape_check(\n      input,\n      offset,\n      NULL,\n      weight,\n      kH,\n      kW,\n      dH,\n      dW,\n      padH,\n      padW,\n      dilationH,\n      dilationW,\n      group,\n      deformable_group);\n\n  input = input.contiguous();\n  offset = offset.contiguous();\n  weight = weight.contiguous();\n\n  int batch = 1;\n  if (input.ndimension() == 3) {\n    // Force batch\n    batch = 0;\n    input.unsqueeze_(0);\n    offset.unsqueeze_(0);\n  }\n\n  // todo: assert batchsize dividable by im2col_step\n\n  long batchSize = input.size(0);\n  long nInputPlane = input.size(1);\n  long inputHeight = input.size(2);\n  long inputWidth = input.size(3);\n\n  long nOutputPlane = weight.size(0);\n\n  long outputWidth =\n      (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1;\n  long outputHeight =\n      (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1;\n\n  TORCH_CHECK((offset.size(0) == batchSize), \"invalid batch size of offset\");\n\n  output = output.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       nOutputPlane,\n       outputHeight,\n       outputWidth});\n  columns = at::zeros(\n      {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth},\n      input.options());\n\n  if (ones.ndimension() != 2 ||\n      ones.size(0) * ones.size(1) < outputHeight * outputWidth) {\n    ones = at::ones({outputHeight, outputWidth}, input.options());\n  }\n\n  input = input.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       nInputPlane,\n       inputHeight,\n       inputWidth});\n  offset = offset.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       deformable_group * 2 * kH * kW,\n       outputHeight,\n       outputWidth});\n\n  at::Tensor output_buffer = at::zeros(\n      {batchSize / im2col_step,\n       nOutputPlane,\n       im2col_step * outputHeight,\n       outputWidth},\n      output.options());\n\n  output_buffer = output_buffer.view(\n      {output_buffer.size(0),\n       group,\n       output_buffer.size(1) / group,\n       output_buffer.size(2),\n       output_buffer.size(3)});\n\n  for (int elt = 0; elt < batchSize / im2col_step; elt++) {\n    deformable_im2col(\n        input[elt],\n        offset[elt],\n        nInputPlane,\n        inputHeight,\n        inputWidth,\n        kH,\n        kW,\n        padH,\n        padW,\n        dH,\n        dW,\n        dilationH,\n        dilationW,\n        im2col_step,\n        deformable_group,\n        columns);\n\n    columns = columns.view({group, columns.size(0) / group, columns.size(1)});\n    weight = weight.view(\n        {group,\n         weight.size(0) / group,\n         weight.size(1),\n         weight.size(2),\n         weight.size(3)});\n\n    for (int g = 0; g < group; g++) {\n      output_buffer[elt][g] = output_buffer[elt][g]\n                                  .flatten(1)\n                                  .addmm_(weight[g].flatten(1), columns[g])\n                                  .view_as(output_buffer[elt][g]);\n    }\n  }\n\n  output_buffer = output_buffer.view(\n      {output_buffer.size(0),\n       output_buffer.size(1) * output_buffer.size(2),\n       output_buffer.size(3),\n       output_buffer.size(4)});\n\n  output_buffer = output_buffer.view(\n      {batchSize / im2col_step,\n       nOutputPlane,\n       im2col_step,\n       outputHeight,\n       outputWidth});\n  output_buffer.transpose_(1, 2);\n  output.copy_(output_buffer);\n  output = output.view({batchSize, nOutputPlane, outputHeight, outputWidth});\n\n  input = input.view({batchSize, nInputPlane, inputHeight, inputWidth});\n  offset = offset.view(\n      {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth});\n\n  if (batch == 0) {\n    output = output.view({nOutputPlane, outputHeight, outputWidth});\n    input = input.view({nInputPlane, inputHeight, inputWidth});\n    offset = offset.view({offset.size(1), offset.size(2), offset.size(3)});\n  }\n\n  return 1;\n}\n\nint deform_conv_backward_input_cuda(\n    at::Tensor input,\n    at::Tensor offset,\n    at::Tensor gradOutput,\n    at::Tensor gradInput,\n    at::Tensor gradOffset,\n    at::Tensor weight,\n    at::Tensor columns,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    int im2col_step) {\n  shape_check(\n      input,\n      offset,\n      &gradOutput,\n      weight,\n      kH,\n      kW,\n      dH,\n      dW,\n      padH,\n      padW,\n      dilationH,\n      dilationW,\n      group,\n      deformable_group);\n\n  input = input.contiguous();\n  offset = offset.contiguous();\n  gradOutput = gradOutput.contiguous();\n  weight = weight.contiguous();\n\n  int batch = 1;\n\n  if (input.ndimension() == 3) {\n    // Force batch\n    batch = 0;\n    input = input.view({1, input.size(0), input.size(1), input.size(2)});\n    offset = offset.view({1, offset.size(0), offset.size(1), offset.size(2)});\n    gradOutput = gradOutput.view(\n        {1, gradOutput.size(0), gradOutput.size(1), gradOutput.size(2)});\n  }\n\n  long batchSize = input.size(0);\n  long nInputPlane = input.size(1);\n  long inputHeight = input.size(2);\n  long inputWidth = input.size(3);\n\n  long nOutputPlane = weight.size(0);\n\n  long outputWidth =\n      (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1;\n  long outputHeight =\n      (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1;\n\n  TORCH_CHECK((offset.size(0) == batchSize), 3, \"invalid batch size of offset\");\n  gradInput = gradInput.view({batchSize, nInputPlane, inputHeight, inputWidth});\n  columns = at::zeros(\n      {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth},\n      input.options());\n\n  // change order of grad output\n  gradOutput = gradOutput.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       nOutputPlane,\n       outputHeight,\n       outputWidth});\n  gradOutput.transpose_(1, 2);\n\n  gradInput = gradInput.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       nInputPlane,\n       inputHeight,\n       inputWidth});\n  input = input.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       nInputPlane,\n       inputHeight,\n       inputWidth});\n  gradOffset = gradOffset.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       deformable_group * 2 * kH * kW,\n       outputHeight,\n       outputWidth});\n  offset = offset.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       deformable_group * 2 * kH * kW,\n       outputHeight,\n       outputWidth});\n\n  for (int elt = 0; elt < batchSize / im2col_step; elt++) {\n    // divide into groups\n    columns = columns.view({group, columns.size(0) / group, columns.size(1)});\n    weight = weight.view(\n        {group,\n         weight.size(0) / group,\n         weight.size(1),\n         weight.size(2),\n         weight.size(3)});\n    gradOutput = gradOutput.view(\n        {gradOutput.size(0),\n         group,\n         gradOutput.size(1) / group,\n         gradOutput.size(2),\n         gradOutput.size(3),\n         gradOutput.size(4)});\n\n    for (int g = 0; g < group; g++) {\n      columns[g] = columns[g].addmm_(\n          weight[g].flatten(1).transpose(0, 1),\n          gradOutput[elt][g].flatten(1),\n          0.0f,\n          1.0f);\n    }\n\n    columns =\n        columns.view({columns.size(0) * columns.size(1), columns.size(2)});\n    gradOutput = gradOutput.view(\n        {gradOutput.size(0),\n         gradOutput.size(1) * gradOutput.size(2),\n         gradOutput.size(3),\n         gradOutput.size(4),\n         gradOutput.size(5)});\n\n    deformable_col2im_coord(\n        columns,\n        input[elt],\n        offset[elt],\n        nInputPlane,\n        inputHeight,\n        inputWidth,\n        kH,\n        kW,\n        padH,\n        padW,\n        dH,\n        dW,\n        dilationH,\n        dilationW,\n        im2col_step,\n        deformable_group,\n        gradOffset[elt]);\n\n    deformable_col2im(\n        columns,\n        offset[elt],\n        nInputPlane,\n        inputHeight,\n        inputWidth,\n        kH,\n        kW,\n        padH,\n        padW,\n        dH,\n        dW,\n        dilationH,\n        dilationW,\n        im2col_step,\n        deformable_group,\n        gradInput[elt]);\n  }\n\n  gradOutput.transpose_(1, 2);\n  gradOutput =\n      gradOutput.view({batchSize, nOutputPlane, outputHeight, outputWidth});\n\n  gradInput = gradInput.view({batchSize, nInputPlane, inputHeight, inputWidth});\n  input = input.view({batchSize, nInputPlane, inputHeight, inputWidth});\n  gradOffset = gradOffset.view(\n      {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth});\n  offset = offset.view(\n      {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth});\n\n  if (batch == 0) {\n    gradOutput = gradOutput.view({nOutputPlane, outputHeight, outputWidth});\n    input = input.view({nInputPlane, inputHeight, inputWidth});\n    gradInput = gradInput.view({nInputPlane, inputHeight, inputWidth});\n    offset = offset.view({offset.size(1), offset.size(2), offset.size(3)});\n    gradOffset =\n        gradOffset.view({offset.size(1), offset.size(2), offset.size(3)});\n  }\n\n  return 1;\n}\n\nint deform_conv_backward_parameters_cuda(\n    at::Tensor input,\n    at::Tensor offset,\n    at::Tensor gradOutput,\n    at::Tensor gradWeight, // at::Tensor gradBias,\n    at::Tensor columns,\n    at::Tensor ones,\n    int kW,\n    int kH,\n    int dW,\n    int dH,\n    int padW,\n    int padH,\n    int dilationW,\n    int dilationH,\n    int group,\n    int deformable_group,\n    float scale,\n    int im2col_step) {\n  // todo: transpose and reshape outGrad\n  // todo: reshape columns\n  // todo: add im2col_step as input\n\n  shape_check(\n      input,\n      offset,\n      &gradOutput,\n      gradWeight,\n      kH,\n      kW,\n      dH,\n      dW,\n      padH,\n      padW,\n      dilationH,\n      dilationW,\n      group,\n      deformable_group);\n\n  input = input.contiguous();\n  offset = offset.contiguous();\n  gradOutput = gradOutput.contiguous();\n\n  int batch = 1;\n\n  if (input.ndimension() == 3) {\n    // Force batch\n    batch = 0;\n    input = input.view(\n        at::IntList({1, input.size(0), input.size(1), input.size(2)}));\n    gradOutput = gradOutput.view(\n        {1, gradOutput.size(0), gradOutput.size(1), gradOutput.size(2)});\n  }\n\n  long batchSize = input.size(0);\n  long nInputPlane = input.size(1);\n  long inputHeight = input.size(2);\n  long inputWidth = input.size(3);\n\n  long nOutputPlane = gradWeight.size(0);\n\n  long outputWidth =\n      (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1;\n  long outputHeight =\n      (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1;\n\n  TORCH_CHECK((offset.size(0) == batchSize), \"invalid batch size of offset\");\n\n  columns = at::zeros(\n      {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth},\n      input.options());\n\n  gradOutput = gradOutput.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       nOutputPlane,\n       outputHeight,\n       outputWidth});\n  gradOutput.transpose_(1, 2);\n\n  at::Tensor gradOutputBuffer = at::zeros_like(gradOutput);\n  gradOutputBuffer = gradOutputBuffer.view(\n      {batchSize / im2col_step,\n       nOutputPlane,\n       im2col_step,\n       outputHeight,\n       outputWidth});\n  gradOutputBuffer.copy_(gradOutput);\n  // gradOutput is not contiguous, so we do reshape (instead of view) next\n  gradOutputBuffer = gradOutputBuffer.reshape(\n      {batchSize / im2col_step,\n       nOutputPlane,\n       im2col_step * outputHeight,\n       outputWidth});\n\n  gradOutput.transpose_(1, 2);\n  gradOutput =\n      gradOutput.view({batchSize, nOutputPlane, outputHeight, outputWidth});\n\n  input = input.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       nInputPlane,\n       inputHeight,\n       inputWidth});\n  offset = offset.view(\n      {batchSize / im2col_step,\n       im2col_step,\n       deformable_group * 2 * kH * kW,\n       outputHeight,\n       outputWidth});\n\n  for (int elt = 0; elt < batchSize / im2col_step; elt++) {\n    deformable_im2col(\n        input[elt],\n        offset[elt],\n        nInputPlane,\n        inputHeight,\n        inputWidth,\n        kH,\n        kW,\n        padH,\n        padW,\n        dH,\n        dW,\n        dilationH,\n        dilationW,\n        im2col_step,\n        deformable_group,\n        columns);\n\n    // divide into group\n    gradOutputBuffer = gradOutputBuffer.view(\n        {gradOutputBuffer.size(0),\n         group,\n         gradOutputBuffer.size(1) / group,\n         gradOutputBuffer.size(2),\n         gradOutputBuffer.size(3)});\n    columns = columns.view({group, columns.size(0) / group, columns.size(1)});\n    gradWeight = gradWeight.view(\n        {group,\n         gradWeight.size(0) / group,\n         gradWeight.size(1),\n         gradWeight.size(2),\n         gradWeight.size(3)});\n\n    for (int g = 0; g < group; g++) {\n      gradWeight[g] = gradWeight[g]\n                          .flatten(1)\n                          .addmm_(\n                              gradOutputBuffer[elt][g].flatten(1),\n                              columns[g].transpose(1, 0),\n                              1.0,\n                              scale)\n                          .view_as(gradWeight[g]);\n    }\n    gradOutputBuffer = gradOutputBuffer.view(\n        {gradOutputBuffer.size(0),\n         gradOutputBuffer.size(1) * gradOutputBuffer.size(2),\n         gradOutputBuffer.size(3),\n         gradOutputBuffer.size(4)});\n    columns =\n        columns.view({columns.size(0) * columns.size(1), columns.size(2)});\n    gradWeight = gradWeight.view(\n        {gradWeight.size(0) * gradWeight.size(1),\n         gradWeight.size(2),\n         gradWeight.size(3),\n         gradWeight.size(4)});\n  }\n\n  input = input.view({batchSize, nInputPlane, inputHeight, inputWidth});\n  offset = offset.view(\n      {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth});\n\n  if (batch == 0) {\n    gradOutput = gradOutput.view({nOutputPlane, outputHeight, outputWidth});\n    input = input.view({nInputPlane, inputHeight, inputWidth});\n  }\n\n  return 1;\n}\n\nvoid modulated_deform_conv_cuda_forward(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor bias,\n    at::Tensor ones,\n    at::Tensor offset,\n    at::Tensor mask,\n    at::Tensor output,\n    at::Tensor columns,\n    int kernel_h,\n    int kernel_w,\n    const int stride_h,\n    const int stride_w,\n    const int pad_h,\n    const int pad_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int group,\n    const int deformable_group,\n    const bool with_bias) {\n  shape_check(\n      input,\n      offset,\n      NULL,\n      weight,\n      kernel_h,\n      kernel_w,\n      stride_h,\n      stride_w,\n      pad_h,\n      pad_w,\n      dilation_h,\n      dilation_w,\n      group,\n      deformable_group);\n\n  TORCH_CHECK(input.is_contiguous(), \"input tensor has to be contiguous\");\n  TORCH_CHECK(weight.is_contiguous(), \"weight tensor has to be contiguous\");\n\n  const int batch = input.size(0);\n  const int channels = input.size(1);\n  const int height = input.size(2);\n  const int width = input.size(3);\n\n  const int channels_out = weight.size(0);\n  const int channels_kernel = weight.size(1);\n  const int kernel_h_ = weight.size(2);\n  const int kernel_w_ = weight.size(3);\n\n  if (kernel_h_ != kernel_h || kernel_w_ != kernel_w)\n    AT_ERROR(\n        \"Input shape and kernel shape wont match: (%d x %d vs %d x %d).\",\n        kernel_h_,\n        kernel_w,\n        kernel_h_,\n        kernel_w_);\n  if (channels != channels_kernel * group)\n    AT_ERROR(\n        \"Input shape and kernel channels wont match: (%d vs %d).\",\n        channels,\n        channels_kernel * group);\n\n  const int height_out =\n      (height + 2 * pad_h - (dilation_h * (kernel_h - 1) + 1)) / stride_h + 1;\n  const int width_out =\n      (width + 2 * pad_w - (dilation_w * (kernel_w - 1) + 1)) / stride_w + 1;\n\n  // mask shape check\n  TORCH_CHECK(\n      (mask.size(2) == height_out && mask.size(3) == width_out),\n      \"invalid spatial size of mask, expected height: %d width: %d, but \"\n      \"got height: %d width: %d\",\n      height_out,\n      width_out,\n      mask.size(2),\n      mask.size(3));\n\n  TORCH_CHECK(\n      (mask.size(1) == deformable_group * kernel_h * kernel_w),\n      \"invalid number of channels of mask\");\n\n  if (ones.ndimension() != 2 ||\n      ones.size(0) * ones.size(1) < height_out * width_out) {\n    // Resize plane and fill with ones...\n    ones = at::ones({height_out, width_out}, input.options());\n  }\n\n  // resize output\n  output = output.view({batch, channels_out, height_out, width_out}).zero_();\n  // resize temporary columns\n  columns = at::zeros(\n      {channels * kernel_h * kernel_w, 1 * height_out * width_out},\n      input.options());\n\n  output = output.view(\n      {output.size(0),\n       group,\n       output.size(1) / group,\n       output.size(2),\n       output.size(3)});\n\n  for (int b = 0; b < batch; b++) {\n    modulated_deformable_im2col_cuda(\n        input[b],\n        offset[b],\n        mask[b],\n        1,\n        channels,\n        height,\n        width,\n        height_out,\n        width_out,\n        kernel_h,\n        kernel_w,\n        pad_h,\n        pad_w,\n        stride_h,\n        stride_w,\n        dilation_h,\n        dilation_w,\n        deformable_group,\n        columns);\n\n    // divide into group\n    weight = weight.view(\n        {group,\n         weight.size(0) / group,\n         weight.size(1),\n         weight.size(2),\n         weight.size(3)});\n    columns = columns.view({group, columns.size(0) / group, columns.size(1)});\n\n    for (int g = 0; g < group; g++) {\n      output[b][g] = output[b][g]\n                         .flatten(1)\n                         .addmm_(weight[g].flatten(1), columns[g])\n                         .view_as(output[b][g]);\n    }\n\n    weight = weight.view(\n        {weight.size(0) * weight.size(1),\n         weight.size(2),\n         weight.size(3),\n         weight.size(4)});\n    columns =\n        columns.view({columns.size(0) * columns.size(1), columns.size(2)});\n  }\n\n  output = output.view(\n      {output.size(0),\n       output.size(1) * output.size(2),\n       output.size(3),\n       output.size(4)});\n\n  if (with_bias) {\n    output += bias.view({1, bias.size(0), 1, 1});\n  }\n}\n\nvoid modulated_deform_conv_cuda_backward(\n    at::Tensor input,\n    at::Tensor weight,\n    at::Tensor bias,\n    at::Tensor ones,\n    at::Tensor offset,\n    at::Tensor mask,\n    at::Tensor columns,\n    at::Tensor grad_input,\n    at::Tensor grad_weight,\n    at::Tensor grad_bias,\n    at::Tensor grad_offset,\n    at::Tensor grad_mask,\n    at::Tensor grad_output,\n    int kernel_h,\n    int kernel_w,\n    int stride_h,\n    int stride_w,\n    int pad_h,\n    int pad_w,\n    int dilation_h,\n    int dilation_w,\n    int group,\n    int deformable_group,\n    const bool with_bias) {\n  shape_check(\n      input,\n      offset,\n      &grad_output,\n      weight,\n      kernel_h,\n      kernel_w,\n      stride_h,\n      stride_w,\n      pad_h,\n      pad_w,\n      dilation_h,\n      dilation_w,\n      group,\n      deformable_group);\n\n  TORCH_CHECK(input.is_contiguous(), \"input tensor has to be contiguous\");\n  TORCH_CHECK(weight.is_contiguous(), \"weight tensor has to be contiguous\");\n\n  const int batch = input.size(0);\n  const int channels = input.size(1);\n  const int height = input.size(2);\n  const int width = input.size(3);\n\n  const int channels_kernel = weight.size(1);\n  const int kernel_h_ = weight.size(2);\n  const int kernel_w_ = weight.size(3);\n  if (kernel_h_ != kernel_h || kernel_w_ != kernel_w)\n    AT_ERROR(\n        \"Input shape and kernel shape wont match: (%d x %d vs %d x %d).\",\n        kernel_h_,\n        kernel_w,\n        kernel_h_,\n        kernel_w_);\n  if (channels != channels_kernel * group)\n    AT_ERROR(\n        \"Input shape and kernel channels wont match: (%d vs %d).\",\n        channels,\n        channels_kernel * group);\n\n  const int height_out =\n      (height + 2 * pad_h - (dilation_h * (kernel_h - 1) + 1)) / stride_h + 1;\n  const int width_out =\n      (width + 2 * pad_w - (dilation_w * (kernel_w - 1) + 1)) / stride_w + 1;\n\n  // mask shape check\n  TORCH_CHECK(\n      (mask.size(2) == height_out && mask.size(3) == width_out),\n      \"invalid spatial size of mask, expected height: %d width: %d, but \"\n      \"got height: %d width: %d\",\n      height_out,\n      width_out,\n      mask.size(2),\n      mask.size(3));\n\n  TORCH_CHECK(\n      (mask.size(1) == deformable_group * kernel_h * kernel_w),\n      \"invalid number of channels of mask\");\n\n  if (ones.ndimension() != 2 ||\n      ones.size(0) * ones.size(1) < height_out * width_out) {\n    // Resize plane and fill with ones...\n    ones = at::ones({height_out, width_out}, input.options());\n  }\n\n  grad_input = grad_input.view({batch, channels, height, width});\n  columns = at::zeros(\n      {channels * kernel_h * kernel_w, height_out * width_out},\n      input.options());\n\n  grad_output = grad_output.view(\n      {grad_output.size(0),\n       group,\n       grad_output.size(1) / group,\n       grad_output.size(2),\n       grad_output.size(3)});\n\n  for (int b = 0; b < batch; b++) {\n    // divide int group\n    columns = columns.view({group, columns.size(0) / group, columns.size(1)});\n    weight = weight.view(\n        {group,\n         weight.size(0) / group,\n         weight.size(1),\n         weight.size(2),\n         weight.size(3)});\n\n    for (int g = 0; g < group; g++) {\n      columns[g].addmm_(\n          weight[g].flatten(1).transpose(0, 1),\n          grad_output[b][g].flatten(1),\n          0.0f,\n          1.0f);\n    }\n\n    columns =\n        columns.view({columns.size(0) * columns.size(1), columns.size(2)});\n    weight = weight.view(\n        {weight.size(0) * weight.size(1),\n         weight.size(2),\n         weight.size(3),\n         weight.size(4)});\n\n    // gradient w.r.t. input coordinate data\n    modulated_deformable_col2im_coord_cuda(\n        columns,\n        input[b],\n        offset[b],\n        mask[b],\n        1,\n        channels,\n        height,\n        width,\n        height_out,\n        width_out,\n        kernel_h,\n        kernel_w,\n        pad_h,\n        pad_w,\n        stride_h,\n        stride_w,\n        dilation_h,\n        dilation_w,\n        deformable_group,\n        grad_offset[b],\n        grad_mask[b]);\n    // gradient w.r.t. input data\n    modulated_deformable_col2im_cuda(\n        columns,\n        offset[b],\n        mask[b],\n        1,\n        channels,\n        height,\n        width,\n        height_out,\n        width_out,\n        kernel_h,\n        kernel_w,\n        pad_h,\n        pad_w,\n        stride_h,\n        stride_w,\n        dilation_h,\n        dilation_w,\n        deformable_group,\n        grad_input[b]);\n\n    // gradient w.r.t. weight, dWeight should accumulate across the batch and\n    // group\n    modulated_deformable_im2col_cuda(\n        input[b],\n        offset[b],\n        mask[b],\n        1,\n        channels,\n        height,\n        width,\n        height_out,\n        width_out,\n        kernel_h,\n        kernel_w,\n        pad_h,\n        pad_w,\n        stride_h,\n        stride_w,\n        dilation_h,\n        dilation_w,\n        deformable_group,\n        columns);\n\n    columns = columns.view({group, columns.size(0) / group, columns.size(1)});\n    grad_weight = grad_weight.view(\n        {group,\n         grad_weight.size(0) / group,\n         grad_weight.size(1),\n         grad_weight.size(2),\n         grad_weight.size(3)});\n    if (with_bias)\n      grad_bias = grad_bias.view({group, grad_bias.size(0) / group});\n\n    for (int g = 0; g < group; g++) {\n      grad_weight[g] =\n          grad_weight[g]\n              .flatten(1)\n              .addmm_(grad_output[b][g].flatten(1), columns[g].transpose(0, 1))\n              .view_as(grad_weight[g]);\n      if (with_bias) {\n        grad_bias[g] =\n            grad_bias[g]\n                .view({-1, 1})\n                .addmm_(grad_output[b][g].flatten(1), ones.view({-1, 1}))\n                .view(-1);\n      }\n    }\n\n    columns =\n        columns.view({columns.size(0) * columns.size(1), columns.size(2)});\n    grad_weight = grad_weight.view(\n        {grad_weight.size(0) * grad_weight.size(1),\n         grad_weight.size(2),\n         grad_weight.size(3),\n         grad_weight.size(4)});\n    if (with_bias)\n      grad_bias = grad_bias.view({grad_bias.size(0) * grad_bias.size(1)});\n  }\n  grad_output = grad_output.view(\n      {grad_output.size(0) * grad_output.size(1),\n       grad_output.size(2),\n       grad_output.size(3),\n       grad_output.size(4)});\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/deformable/deform_conv_cuda_kernel.cu",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n\n// modified from\n// https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/dcn/src/deform_conv_cuda_kernel.cu\n// Original license: Apache 2.0\n// clang-format off\n\n// modify from\n// https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/blob/mmdetection/mmdet/ops/dcn/src/deform_conv_cuda_kernel.cu\n\n/*!\n ******************* BEGIN Caffe Copyright Notice and Disclaimer *****************\n *\n * COPYRIGHT\n *\n * All contributions by the University of California:\n * Copyright (c) 2014-2017 The Regents of the University of California (Regents)\n * All rights reserved.\n *\n * All other contributions:\n * Copyright (c) 2014-2017, the respective contributors\n * All rights reserved.\n *\n * Caffe uses a shared copyright model: each contributor holds copyright over\n * their contributions to Caffe. The project versioning records all such\n * contribution and copyright details. If a contributor wants to further mark\n * their specific copyright on a particular contribution, they should indicate\n * their copyright solely in the commit message of the change when it is\n * committed.\n *\n * LICENSE\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions are met:\n *\n * 1. Redistributions of source code must retain the above copyright notice, this\n * list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright notice,\n * this list of conditions and the following disclaimer in the documentation\n * and/or other materials provided with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n *AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n *IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE\n *FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n *DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n *SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n *CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n *OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n *OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n *\n * CONTRIBUTION AGREEMENT\n *\n * By contributing to the BVLC/caffe repository through pull-request, comment,\n * or otherwise, the contributor releases their content to the\n * license and copyright terms herein.\n *\n ***************** END Caffe Copyright Notice and Disclaimer *********************\n *\n * Copyright (c) 2018 Microsoft\n * Licensed under The MIT License [see LICENSE for details]\n * \\file modulated_deformable_im2col.cuh\n * \\brief Function definitions of converting an image to\n * column matrix based on kernel, padding, dilation, and offset.\n * These functions are mainly used in deformable convolution operators.\n * \\ref: https://arxiv.org/abs/1703.06211\n * \\author Yuwen Xiong, Haozhi Qi, Jifeng Dai, Xizhou Zhu, Han Hu, Dazhi Cheng\n */\n\n#include <ATen/ATen.h>\n#include <c10/cuda/CUDAGuard.h>\n#include <float.h>\n#include <math.h>\n#include <stdio.h>\n#include <THC/THCAtomics.cuh>\n\nusing namespace at;\n\n#define CUDA_KERNEL_LOOP(i, n)                                 \\\n  for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < (n); \\\n       i += blockDim.x * gridDim.x)\n\n\nnamespace {\n\nconst int CUDA_NUM_THREADS = 1024;\nconst int kMaxGridNum = 65535;\n\ninline int GET_BLOCKS(const int N) {\n  return std::min(kMaxGridNum, (N + CUDA_NUM_THREADS - 1) / CUDA_NUM_THREADS);\n}\n\n}\n\ntemplate <typename scalar_t>\n__device__ scalar_t deformable_im2col_bilinear(\n    const scalar_t* bottom_data,\n    const int data_width,\n    const int height,\n    const int width,\n    scalar_t h,\n    scalar_t w) {\n  int h_low = floor(h);\n  int w_low = floor(w);\n  int h_high = h_low + 1;\n  int w_high = w_low + 1;\n\n  scalar_t lh = h - h_low;\n  scalar_t lw = w - w_low;\n  scalar_t hh = 1 - lh, hw = 1 - lw;\n\n  scalar_t v1 = 0;\n  if (h_low >= 0 && w_low >= 0)\n    v1 = bottom_data[h_low * data_width + w_low];\n  scalar_t v2 = 0;\n  if (h_low >= 0 && w_high <= width - 1)\n    v2 = bottom_data[h_low * data_width + w_high];\n  scalar_t v3 = 0;\n  if (h_high <= height - 1 && w_low >= 0)\n    v3 = bottom_data[h_high * data_width + w_low];\n  scalar_t v4 = 0;\n  if (h_high <= height - 1 && w_high <= width - 1)\n    v4 = bottom_data[h_high * data_width + w_high];\n\n  scalar_t w1 = hh * hw, w2 = hh * lw, w3 = lh * hw, w4 = lh * lw;\n\n  scalar_t val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4);\n  return val;\n}\n\ntemplate <typename scalar_t>\n__device__ scalar_t get_gradient_weight(\n    scalar_t argmax_h,\n    scalar_t argmax_w,\n    const int h,\n    const int w,\n    const int height,\n    const int width) {\n  if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 ||\n      argmax_w >= width) {\n    // empty\n    return 0;\n  }\n\n  int argmax_h_low = floor(argmax_h);\n  int argmax_w_low = floor(argmax_w);\n  int argmax_h_high = argmax_h_low + 1;\n  int argmax_w_high = argmax_w_low + 1;\n\n  scalar_t weight = 0;\n  if (h == argmax_h_low && w == argmax_w_low)\n    weight = (h + 1 - argmax_h) * (w + 1 - argmax_w);\n  if (h == argmax_h_low && w == argmax_w_high)\n    weight = (h + 1 - argmax_h) * (argmax_w + 1 - w);\n  if (h == argmax_h_high && w == argmax_w_low)\n    weight = (argmax_h + 1 - h) * (w + 1 - argmax_w);\n  if (h == argmax_h_high && w == argmax_w_high)\n    weight = (argmax_h + 1 - h) * (argmax_w + 1 - w);\n  return weight;\n}\n\ntemplate <typename scalar_t>\n__device__ scalar_t get_coordinate_weight(\n    scalar_t argmax_h,\n    scalar_t argmax_w,\n    const int height,\n    const int width,\n    const scalar_t* im_data,\n    const int data_width,\n    const int bp_dir) {\n  if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 ||\n      argmax_w >= width) {\n    // empty\n    return 0;\n  }\n\n  int argmax_h_low = floor(argmax_h);\n  int argmax_w_low = floor(argmax_w);\n  int argmax_h_high = argmax_h_low + 1;\n  int argmax_w_high = argmax_w_low + 1;\n\n  scalar_t weight = 0;\n\n  if (bp_dir == 0) {\n    if (argmax_h_low >= 0 && argmax_w_low >= 0)\n      weight += -1 * (argmax_w_low + 1 - argmax_w) *\n          im_data[argmax_h_low * data_width + argmax_w_low];\n    if (argmax_h_low >= 0 && argmax_w_high <= width - 1)\n      weight += -1 * (argmax_w - argmax_w_low) *\n          im_data[argmax_h_low * data_width + argmax_w_high];\n    if (argmax_h_high <= height - 1 && argmax_w_low >= 0)\n      weight += (argmax_w_low + 1 - argmax_w) *\n          im_data[argmax_h_high * data_width + argmax_w_low];\n    if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1)\n      weight += (argmax_w - argmax_w_low) *\n          im_data[argmax_h_high * data_width + argmax_w_high];\n  } else if (bp_dir == 1) {\n    if (argmax_h_low >= 0 && argmax_w_low >= 0)\n      weight += -1 * (argmax_h_low + 1 - argmax_h) *\n          im_data[argmax_h_low * data_width + argmax_w_low];\n    if (argmax_h_low >= 0 && argmax_w_high <= width - 1)\n      weight += (argmax_h_low + 1 - argmax_h) *\n          im_data[argmax_h_low * data_width + argmax_w_high];\n    if (argmax_h_high <= height - 1 && argmax_w_low >= 0)\n      weight += -1 * (argmax_h - argmax_h_low) *\n          im_data[argmax_h_high * data_width + argmax_w_low];\n    if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1)\n      weight += (argmax_h - argmax_h_low) *\n          im_data[argmax_h_high * data_width + argmax_w_high];\n  }\n\n  return weight;\n}\n\ntemplate <typename scalar_t>\n__global__ void deformable_im2col_gpu_kernel(\n    const int n,\n    const scalar_t* data_im,\n    const scalar_t* data_offset,\n    const int height,\n    const int width,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int channel_per_deformable_group,\n    const int batch_size,\n    const int num_channels,\n    const int deformable_group,\n    const int height_col,\n    const int width_col,\n    scalar_t* data_col) {\n  CUDA_KERNEL_LOOP(index, n) {\n    // index index of output matrix\n    const int w_col = index % width_col;\n    const int h_col = (index / width_col) % height_col;\n    const int b_col = (index / width_col / height_col) % batch_size;\n    const int c_im = (index / width_col / height_col) / batch_size;\n    const int c_col = c_im * kernel_h * kernel_w;\n\n    // compute deformable group index\n    const int deformable_group_index = c_im / channel_per_deformable_group;\n\n    const int h_in = h_col * stride_h - pad_h;\n    const int w_in = w_col * stride_w - pad_w;\n    scalar_t* data_col_ptr = data_col +\n        ((c_col * batch_size + b_col) * height_col + h_col) * width_col + w_col;\n    // const scalar_t* data_im_ptr = data_im + ((b_col * num_channels + c_im) *\n    // height + h_in) * width + w_in;\n    const scalar_t* data_im_ptr =\n        data_im + (b_col * num_channels + c_im) * height * width;\n    const scalar_t* data_offset_ptr = data_offset +\n        (b_col * deformable_group + deformable_group_index) * 2 * kernel_h *\n            kernel_w * height_col * width_col;\n\n    for (int i = 0; i < kernel_h; ++i) {\n      for (int j = 0; j < kernel_w; ++j) {\n        const int data_offset_h_ptr =\n            ((2 * (i * kernel_w + j)) * height_col + h_col) * width_col + w_col;\n        const int data_offset_w_ptr =\n            ((2 * (i * kernel_w + j) + 1) * height_col + h_col) * width_col +\n            w_col;\n        const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr];\n        const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr];\n        scalar_t val = static_cast<scalar_t>(0);\n        const scalar_t h_im = h_in + i * dilation_h + offset_h;\n        const scalar_t w_im = w_in + j * dilation_w + offset_w;\n        if (h_im > -1 && w_im > -1 && h_im < height && w_im < width) {\n          // const scalar_t map_h = i * dilation_h + offset_h;\n          // const scalar_t map_w = j * dilation_w + offset_w;\n          // const int cur_height = height - h_in;\n          // const int cur_width = width - w_in;\n          // val = deformable_im2col_bilinear(data_im_ptr, width, cur_height,\n          // cur_width, map_h, map_w);\n          val = deformable_im2col_bilinear(\n              data_im_ptr, width, height, width, h_im, w_im);\n        }\n        *data_col_ptr = val;\n        data_col_ptr += batch_size * height_col * width_col;\n      }\n    }\n  }\n}\n\n\ntemplate <typename scalar_t>\n__global__ void deformable_col2im_gpu_kernel(\n    const int n,\n    const scalar_t* data_col,\n    const scalar_t* data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int channel_per_deformable_group,\n    const int batch_size,\n    const int deformable_group,\n    const int height_col,\n    const int width_col,\n    scalar_t* grad_im) {\n  CUDA_KERNEL_LOOP(index, n) {\n    const int j = (index / width_col / height_col / batch_size) % kernel_w;\n    const int i =\n        (index / width_col / height_col / batch_size / kernel_w) % kernel_h;\n    const int c =\n        index / width_col / height_col / batch_size / kernel_w / kernel_h;\n    // compute the start and end of the output\n\n    const int deformable_group_index = c / channel_per_deformable_group;\n\n    int w_out = index % width_col;\n    int h_out = (index / width_col) % height_col;\n    int b = (index / width_col / height_col) % batch_size;\n    int w_in = w_out * stride_w - pad_w;\n    int h_in = h_out * stride_h - pad_h;\n\n    const scalar_t* data_offset_ptr = data_offset +\n        (b * deformable_group + deformable_group_index) * 2 * kernel_h *\n            kernel_w * height_col * width_col;\n    const int data_offset_h_ptr =\n        ((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out;\n    const int data_offset_w_ptr =\n        ((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + w_out;\n    const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr];\n    const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr];\n    const scalar_t cur_inv_h_data = h_in + i * dilation_h + offset_h;\n    const scalar_t cur_inv_w_data = w_in + j * dilation_w + offset_w;\n\n    const scalar_t cur_top_grad = data_col[index];\n    const int cur_h = (int)cur_inv_h_data;\n    const int cur_w = (int)cur_inv_w_data;\n    for (int dy = -2; dy <= 2; dy++) {\n      for (int dx = -2; dx <= 2; dx++) {\n        if (cur_h + dy >= 0 && cur_h + dy < height && cur_w + dx >= 0 &&\n            cur_w + dx < width && abs(cur_inv_h_data - (cur_h + dy)) < 1 &&\n            abs(cur_inv_w_data - (cur_w + dx)) < 1) {\n          int cur_bottom_grad_pos =\n              ((b * channels + c) * height + cur_h + dy) * width + cur_w + dx;\n          scalar_t weight = get_gradient_weight(\n              cur_inv_h_data,\n              cur_inv_w_data,\n              cur_h + dy,\n              cur_w + dx,\n              height,\n              width);\n          atomicAdd(grad_im + cur_bottom_grad_pos, weight * cur_top_grad);\n        }\n      }\n    }\n  }\n}\n\n\ntemplate <typename scalar_t>\n__global__ void deformable_col2im_coord_gpu_kernel(\n    const int n,\n    const scalar_t* data_col,\n    const scalar_t* data_im,\n    const scalar_t* data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int channel_per_deformable_group,\n    const int batch_size,\n    const int offset_channels,\n    const int deformable_group,\n    const int height_col,\n    const int width_col,\n    scalar_t* grad_offset) {\n  CUDA_KERNEL_LOOP(index, n) {\n    scalar_t val = 0;\n    int w = index % width_col;\n    int h = (index / width_col) % height_col;\n    int c = (index / width_col / height_col) % offset_channels;\n    int b = (index / width_col / height_col) / offset_channels;\n    // compute the start and end of the output\n\n    const int deformable_group_index = c / (2 * kernel_h * kernel_w);\n    const int col_step = kernel_h * kernel_w;\n    int cnt = 0;\n    const scalar_t* data_col_ptr = data_col +\n        deformable_group_index * channel_per_deformable_group * batch_size *\n            width_col * height_col;\n    const scalar_t* data_im_ptr = data_im +\n        (b * deformable_group + deformable_group_index) *\n            channel_per_deformable_group / kernel_h / kernel_w * height * width;\n    const scalar_t* data_offset_ptr = data_offset +\n        (b * deformable_group + deformable_group_index) * 2 * kernel_h *\n            kernel_w * height_col * width_col;\n\n    const int offset_c = c - deformable_group_index * 2 * kernel_h * kernel_w;\n\n    for (int col_c = (offset_c / 2); col_c < channel_per_deformable_group;\n         col_c += col_step) {\n      const int col_pos =\n          (((col_c * batch_size + b) * height_col) + h) * width_col + w;\n      const int bp_dir = offset_c % 2;\n\n      int j = (col_pos / width_col / height_col / batch_size) % kernel_w;\n      int i =\n          (col_pos / width_col / height_col / batch_size / kernel_w) % kernel_h;\n      int w_out = col_pos % width_col;\n      int h_out = (col_pos / width_col) % height_col;\n      int w_in = w_out * stride_w - pad_w;\n      int h_in = h_out * stride_h - pad_h;\n      const int data_offset_h_ptr =\n          (((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out);\n      const int data_offset_w_ptr =\n          (((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col +\n           w_out);\n      const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr];\n      const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr];\n      scalar_t inv_h = h_in + i * dilation_h + offset_h;\n      scalar_t inv_w = w_in + j * dilation_w + offset_w;\n      if (inv_h <= -1 || inv_w <= -1 || inv_h >= height || inv_w >= width) {\n        inv_h = inv_w = -2;\n      }\n      const scalar_t weight = get_coordinate_weight(\n          inv_h,\n          inv_w,\n          height,\n          width,\n          data_im_ptr + cnt * height * width,\n          width,\n          bp_dir);\n      val += weight * data_col_ptr[col_pos];\n      cnt += 1;\n    }\n\n    grad_offset[index] = val;\n  }\n}\n\n\nnamespace detectron2 {\n\nvoid deformable_im2col(\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int ksize_h,\n    const int ksize_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int parallel_imgs,\n    const int deformable_group,\n    at::Tensor data_col) {\n  // num_axes should be smaller than block size\n  // todo: check parallel_imgs is correctly passed in\n  int height_col =\n      (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1;\n  int width_col =\n      (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1;\n  int num_kernels = channels * height_col * width_col * parallel_imgs;\n  int channel_per_deformable_group = channels / deformable_group;\n\n  at::cuda::CUDAGuard device_guard(data_im.device());\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      data_im.scalar_type(), \"deformable_im2col_gpu\", ([&] {\n        const scalar_t* data_im_ = data_im.data_ptr<scalar_t>();\n        const scalar_t* data_offset_ = data_offset.data_ptr<scalar_t>();\n        scalar_t* data_col_ = data_col.data_ptr<scalar_t>();\n\n        deformable_im2col_gpu_kernel<<<\n            GET_BLOCKS(num_kernels),\n            CUDA_NUM_THREADS,\n            0,\n            stream>>>(\n            num_kernels,\n            data_im_,\n            data_offset_,\n            height,\n            width,\n            ksize_h,\n            ksize_w,\n            pad_h,\n            pad_w,\n            stride_h,\n            stride_w,\n            dilation_h,\n            dilation_w,\n            channel_per_deformable_group,\n            parallel_imgs,\n            channels,\n            deformable_group,\n            height_col,\n            width_col,\n            data_col_);\n      }));\n\n  cudaError_t err = cudaGetLastError();\n  if (err != cudaSuccess) {\n    printf(\"error in deformable_im2col: %s\\n\", cudaGetErrorString(err));\n  }\n}\n\n\nvoid deformable_col2im(\n    const at::Tensor data_col,\n    const at::Tensor data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int ksize_h,\n    const int ksize_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int parallel_imgs,\n    const int deformable_group,\n    at::Tensor grad_im) {\n  // todo: make sure parallel_imgs is passed in correctly\n  int height_col =\n      (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1;\n  int width_col =\n      (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1;\n  int num_kernels =\n      channels * ksize_h * ksize_w * height_col * width_col * parallel_imgs;\n  int channel_per_deformable_group = channels / deformable_group;\n\n  at::cuda::CUDAGuard device_guard(data_col.device());\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      data_col.scalar_type(), \"deformable_col2im_gpu\", ([&] {\n        const scalar_t* data_col_ = data_col.data_ptr<scalar_t>();\n        const scalar_t* data_offset_ = data_offset.data_ptr<scalar_t>();\n        scalar_t* grad_im_ = grad_im.data_ptr<scalar_t>();\n\n        deformable_col2im_gpu_kernel<<<\n            GET_BLOCKS(num_kernels),\n            CUDA_NUM_THREADS,\n            0,\n            stream>>>(\n            num_kernels,\n            data_col_,\n            data_offset_,\n            channels,\n            height,\n            width,\n            ksize_h,\n            ksize_w,\n            pad_h,\n            pad_w,\n            stride_h,\n            stride_w,\n            dilation_h,\n            dilation_w,\n            channel_per_deformable_group,\n            parallel_imgs,\n            deformable_group,\n            height_col,\n            width_col,\n            grad_im_);\n      }));\n\n  cudaError_t err = cudaGetLastError();\n  if (err != cudaSuccess) {\n    printf(\"error in deformable_col2im: %s\\n\", cudaGetErrorString(err));\n  }\n}\n\n\nvoid deformable_col2im_coord(\n    const at::Tensor data_col,\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const int channels,\n    const int height,\n    const int width,\n    const int ksize_h,\n    const int ksize_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int parallel_imgs,\n    const int deformable_group,\n    at::Tensor grad_offset) {\n  int height_col =\n      (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1;\n  int width_col =\n      (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1;\n  int num_kernels = height_col * width_col * 2 * ksize_h * ksize_w *\n      deformable_group * parallel_imgs;\n  int channel_per_deformable_group =\n      channels * ksize_h * ksize_w / deformable_group;\n\n  at::cuda::CUDAGuard device_guard(data_col.device());\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      data_col.scalar_type(), \"deformable_col2im_coord_gpu\", ([&] {\n        const scalar_t* data_col_ = data_col.data_ptr<scalar_t>();\n        const scalar_t* data_im_ = data_im.data_ptr<scalar_t>();\n        const scalar_t* data_offset_ = data_offset.data_ptr<scalar_t>();\n        scalar_t* grad_offset_ = grad_offset.data_ptr<scalar_t>();\n\n        deformable_col2im_coord_gpu_kernel<<<\n            GET_BLOCKS(num_kernels),\n            CUDA_NUM_THREADS,\n            0,\n            stream>>>(\n            num_kernels,\n            data_col_,\n            data_im_,\n            data_offset_,\n            channels,\n            height,\n            width,\n            ksize_h,\n            ksize_w,\n            pad_h,\n            pad_w,\n            stride_h,\n            stride_w,\n            dilation_h,\n            dilation_w,\n            channel_per_deformable_group,\n            parallel_imgs,\n            2 * ksize_h * ksize_w * deformable_group,\n            deformable_group,\n            height_col,\n            width_col,\n            grad_offset_);\n      }));\n}\n\n} // namespace detectron2\n\n\ntemplate <typename scalar_t>\n__device__ scalar_t dmcn_im2col_bilinear(\n    const scalar_t* bottom_data,\n    const int data_width,\n    const int height,\n    const int width,\n    scalar_t h,\n    scalar_t w) {\n  int h_low = floor(h);\n  int w_low = floor(w);\n  int h_high = h_low + 1;\n  int w_high = w_low + 1;\n\n  scalar_t lh = h - h_low;\n  scalar_t lw = w - w_low;\n  scalar_t hh = 1 - lh, hw = 1 - lw;\n\n  scalar_t v1 = 0;\n  if (h_low >= 0 && w_low >= 0)\n    v1 = bottom_data[h_low * data_width + w_low];\n  scalar_t v2 = 0;\n  if (h_low >= 0 && w_high <= width - 1)\n    v2 = bottom_data[h_low * data_width + w_high];\n  scalar_t v3 = 0;\n  if (h_high <= height - 1 && w_low >= 0)\n    v3 = bottom_data[h_high * data_width + w_low];\n  scalar_t v4 = 0;\n  if (h_high <= height - 1 && w_high <= width - 1)\n    v4 = bottom_data[h_high * data_width + w_high];\n\n  scalar_t w1 = hh * hw, w2 = hh * lw, w3 = lh * hw, w4 = lh * lw;\n\n  scalar_t val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4);\n  return val;\n}\n\ntemplate <typename scalar_t>\n__device__ scalar_t dmcn_get_gradient_weight(\n    scalar_t argmax_h,\n    scalar_t argmax_w,\n    const int h,\n    const int w,\n    const int height,\n    const int width) {\n  if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 ||\n      argmax_w >= width) {\n    // empty\n    return 0;\n  }\n\n  int argmax_h_low = floor(argmax_h);\n  int argmax_w_low = floor(argmax_w);\n  int argmax_h_high = argmax_h_low + 1;\n  int argmax_w_high = argmax_w_low + 1;\n\n  scalar_t weight = 0;\n  if (h == argmax_h_low && w == argmax_w_low)\n    weight = (h + 1 - argmax_h) * (w + 1 - argmax_w);\n  if (h == argmax_h_low && w == argmax_w_high)\n    weight = (h + 1 - argmax_h) * (argmax_w + 1 - w);\n  if (h == argmax_h_high && w == argmax_w_low)\n    weight = (argmax_h + 1 - h) * (w + 1 - argmax_w);\n  if (h == argmax_h_high && w == argmax_w_high)\n    weight = (argmax_h + 1 - h) * (argmax_w + 1 - w);\n  return weight;\n}\n\ntemplate <typename scalar_t>\n__device__ scalar_t dmcn_get_coordinate_weight(\n    scalar_t argmax_h,\n    scalar_t argmax_w,\n    const int height,\n    const int width,\n    const scalar_t* im_data,\n    const int data_width,\n    const int bp_dir) {\n  if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 ||\n      argmax_w >= width) {\n    // empty\n    return 0;\n  }\n\n  int argmax_h_low = floor(argmax_h);\n  int argmax_w_low = floor(argmax_w);\n  int argmax_h_high = argmax_h_low + 1;\n  int argmax_w_high = argmax_w_low + 1;\n\n  scalar_t weight = 0;\n\n  if (bp_dir == 0) {\n    if (argmax_h_low >= 0 && argmax_w_low >= 0)\n      weight += -1 * (argmax_w_low + 1 - argmax_w) *\n          im_data[argmax_h_low * data_width + argmax_w_low];\n    if (argmax_h_low >= 0 && argmax_w_high <= width - 1)\n      weight += -1 * (argmax_w - argmax_w_low) *\n          im_data[argmax_h_low * data_width + argmax_w_high];\n    if (argmax_h_high <= height - 1 && argmax_w_low >= 0)\n      weight += (argmax_w_low + 1 - argmax_w) *\n          im_data[argmax_h_high * data_width + argmax_w_low];\n    if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1)\n      weight += (argmax_w - argmax_w_low) *\n          im_data[argmax_h_high * data_width + argmax_w_high];\n  } else if (bp_dir == 1) {\n    if (argmax_h_low >= 0 && argmax_w_low >= 0)\n      weight += -1 * (argmax_h_low + 1 - argmax_h) *\n          im_data[argmax_h_low * data_width + argmax_w_low];\n    if (argmax_h_low >= 0 && argmax_w_high <= width - 1)\n      weight += (argmax_h_low + 1 - argmax_h) *\n          im_data[argmax_h_low * data_width + argmax_w_high];\n    if (argmax_h_high <= height - 1 && argmax_w_low >= 0)\n      weight += -1 * (argmax_h - argmax_h_low) *\n          im_data[argmax_h_high * data_width + argmax_w_low];\n    if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1)\n      weight += (argmax_h - argmax_h_low) *\n          im_data[argmax_h_high * data_width + argmax_w_high];\n  }\n\n  return weight;\n}\n\ntemplate <typename scalar_t>\n__global__ void modulated_deformable_im2col_gpu_kernel(\n    const int n,\n    const scalar_t* data_im,\n    const scalar_t* data_offset,\n    const scalar_t* data_mask,\n    const int height,\n    const int width,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int channel_per_deformable_group,\n    const int batch_size,\n    const int num_channels,\n    const int deformable_group,\n    const int height_col,\n    const int width_col,\n    scalar_t* data_col) {\n  CUDA_KERNEL_LOOP(index, n) {\n    // index index of output matrix\n    const int w_col = index % width_col;\n    const int h_col = (index / width_col) % height_col;\n    const int b_col = (index / width_col / height_col) % batch_size;\n    const int c_im = (index / width_col / height_col) / batch_size;\n    const int c_col = c_im * kernel_h * kernel_w;\n\n    // compute deformable group index\n    const int deformable_group_index = c_im / channel_per_deformable_group;\n\n    const int h_in = h_col * stride_h - pad_h;\n    const int w_in = w_col * stride_w - pad_w;\n\n    scalar_t* data_col_ptr = data_col +\n        ((c_col * batch_size + b_col) * height_col + h_col) * width_col + w_col;\n    // const float* data_im_ptr = data_im + ((b_col * num_channels + c_im) *\n    // height + h_in) * width + w_in;\n    const scalar_t* data_im_ptr =\n        data_im + (b_col * num_channels + c_im) * height * width;\n    const scalar_t* data_offset_ptr = data_offset +\n        (b_col * deformable_group + deformable_group_index) * 2 * kernel_h *\n            kernel_w * height_col * width_col;\n\n    const scalar_t* data_mask_ptr = data_mask +\n        (b_col * deformable_group + deformable_group_index) * kernel_h *\n            kernel_w * height_col * width_col;\n\n    for (int i = 0; i < kernel_h; ++i) {\n      for (int j = 0; j < kernel_w; ++j) {\n        const int data_offset_h_ptr =\n            ((2 * (i * kernel_w + j)) * height_col + h_col) * width_col + w_col;\n        const int data_offset_w_ptr =\n            ((2 * (i * kernel_w + j) + 1) * height_col + h_col) * width_col +\n            w_col;\n        const int data_mask_hw_ptr =\n            ((i * kernel_w + j) * height_col + h_col) * width_col + w_col;\n        const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr];\n        const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr];\n        const scalar_t mask = data_mask_ptr[data_mask_hw_ptr];\n        scalar_t val = static_cast<scalar_t>(0);\n        const scalar_t h_im = h_in + i * dilation_h + offset_h;\n        const scalar_t w_im = w_in + j * dilation_w + offset_w;\n        // if (h_im >= 0 && w_im >= 0 && h_im < height && w_im < width) {\n        if (h_im > -1 && w_im > -1 && h_im < height && w_im < width) {\n          // const float map_h = i * dilation_h + offset_h;\n          // const float map_w = j * dilation_w + offset_w;\n          // const int cur_height = height - h_in;\n          // const int cur_width = width - w_in;\n          // val = dmcn_im2col_bilinear(data_im_ptr, width, cur_height,\n          // cur_width, map_h, map_w);\n          val = dmcn_im2col_bilinear(\n              data_im_ptr, width, height, width, h_im, w_im);\n        }\n        *data_col_ptr = val * mask;\n        data_col_ptr += batch_size * height_col * width_col;\n        // data_col_ptr += height_col * width_col;\n      }\n    }\n  }\n}\n\ntemplate <typename scalar_t>\n__global__ void modulated_deformable_col2im_gpu_kernel(\n    const int n,\n    const scalar_t* data_col,\n    const scalar_t* data_offset,\n    const scalar_t* data_mask,\n    const int channels,\n    const int height,\n    const int width,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int channel_per_deformable_group,\n    const int batch_size,\n    const int deformable_group,\n    const int height_col,\n    const int width_col,\n    scalar_t* grad_im) {\n  CUDA_KERNEL_LOOP(index, n) {\n    const int j = (index / width_col / height_col / batch_size) % kernel_w;\n    const int i =\n        (index / width_col / height_col / batch_size / kernel_w) % kernel_h;\n    const int c =\n        index / width_col / height_col / batch_size / kernel_w / kernel_h;\n    // compute the start and end of the output\n\n    const int deformable_group_index = c / channel_per_deformable_group;\n\n    int w_out = index % width_col;\n    int h_out = (index / width_col) % height_col;\n    int b = (index / width_col / height_col) % batch_size;\n    int w_in = w_out * stride_w - pad_w;\n    int h_in = h_out * stride_h - pad_h;\n\n    const scalar_t* data_offset_ptr = data_offset +\n        (b * deformable_group + deformable_group_index) * 2 * kernel_h *\n            kernel_w * height_col * width_col;\n    const scalar_t* data_mask_ptr = data_mask +\n        (b * deformable_group + deformable_group_index) * kernel_h * kernel_w *\n            height_col * width_col;\n    const int data_offset_h_ptr =\n        ((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out;\n    const int data_offset_w_ptr =\n        ((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + w_out;\n    const int data_mask_hw_ptr =\n        ((i * kernel_w + j) * height_col + h_out) * width_col + w_out;\n    const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr];\n    const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr];\n    const scalar_t mask = data_mask_ptr[data_mask_hw_ptr];\n    const scalar_t cur_inv_h_data = h_in + i * dilation_h + offset_h;\n    const scalar_t cur_inv_w_data = w_in + j * dilation_w + offset_w;\n\n    const scalar_t cur_top_grad = data_col[index] * mask;\n    const int cur_h = (int)cur_inv_h_data;\n    const int cur_w = (int)cur_inv_w_data;\n    for (int dy = -2; dy <= 2; dy++) {\n      for (int dx = -2; dx <= 2; dx++) {\n        if (cur_h + dy >= 0 && cur_h + dy < height && cur_w + dx >= 0 &&\n            cur_w + dx < width && abs(cur_inv_h_data - (cur_h + dy)) < 1 &&\n            abs(cur_inv_w_data - (cur_w + dx)) < 1) {\n          int cur_bottom_grad_pos =\n              ((b * channels + c) * height + cur_h + dy) * width + cur_w + dx;\n          scalar_t weight = dmcn_get_gradient_weight(\n              cur_inv_h_data,\n              cur_inv_w_data,\n              cur_h + dy,\n              cur_w + dx,\n              height,\n              width);\n          atomicAdd(grad_im + cur_bottom_grad_pos, weight * cur_top_grad);\n        }\n      }\n    }\n  }\n}\n\ntemplate <typename scalar_t>\n__global__ void modulated_deformable_col2im_coord_gpu_kernel(\n    const int n,\n    const scalar_t* data_col,\n    const scalar_t* data_im,\n    const scalar_t* data_offset,\n    const scalar_t* data_mask,\n    const int channels,\n    const int height,\n    const int width,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int channel_per_deformable_group,\n    const int batch_size,\n    const int offset_channels,\n    const int deformable_group,\n    const int height_col,\n    const int width_col,\n    scalar_t* grad_offset,\n    scalar_t* grad_mask) {\n  CUDA_KERNEL_LOOP(index, n) {\n    scalar_t val = 0, mval = 0;\n    int w = index % width_col;\n    int h = (index / width_col) % height_col;\n    int c = (index / width_col / height_col) % offset_channels;\n    int b = (index / width_col / height_col) / offset_channels;\n    // compute the start and end of the output\n\n    const int deformable_group_index = c / (2 * kernel_h * kernel_w);\n    const int col_step = kernel_h * kernel_w;\n    int cnt = 0;\n    const scalar_t* data_col_ptr = data_col +\n        deformable_group_index * channel_per_deformable_group * batch_size *\n            width_col * height_col;\n    const scalar_t* data_im_ptr = data_im +\n        (b * deformable_group + deformable_group_index) *\n            channel_per_deformable_group / kernel_h / kernel_w * height * width;\n    const scalar_t* data_offset_ptr = data_offset +\n        (b * deformable_group + deformable_group_index) * 2 * kernel_h *\n            kernel_w * height_col * width_col;\n    const scalar_t* data_mask_ptr = data_mask +\n        (b * deformable_group + deformable_group_index) * kernel_h * kernel_w *\n            height_col * width_col;\n\n    const int offset_c = c - deformable_group_index * 2 * kernel_h * kernel_w;\n\n    for (int col_c = (offset_c / 2); col_c < channel_per_deformable_group;\n         col_c += col_step) {\n      const int col_pos =\n          (((col_c * batch_size + b) * height_col) + h) * width_col + w;\n      const int bp_dir = offset_c % 2;\n\n      int j = (col_pos / width_col / height_col / batch_size) % kernel_w;\n      int i =\n          (col_pos / width_col / height_col / batch_size / kernel_w) % kernel_h;\n      int w_out = col_pos % width_col;\n      int h_out = (col_pos / width_col) % height_col;\n      int w_in = w_out * stride_w - pad_w;\n      int h_in = h_out * stride_h - pad_h;\n      const int data_offset_h_ptr =\n          (((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out);\n      const int data_offset_w_ptr =\n          (((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col +\n           w_out);\n      const int data_mask_hw_ptr =\n          (((i * kernel_w + j) * height_col + h_out) * width_col + w_out);\n      const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr];\n      const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr];\n      const scalar_t mask = data_mask_ptr[data_mask_hw_ptr];\n      scalar_t inv_h = h_in + i * dilation_h + offset_h;\n      scalar_t inv_w = w_in + j * dilation_w + offset_w;\n      if (inv_h <= -1 || inv_w <= -1 || inv_h >= height || inv_w >= width) {\n        inv_h = inv_w = -2;\n      } else {\n        mval += data_col_ptr[col_pos] *\n            dmcn_im2col_bilinear(\n                    data_im_ptr + cnt * height * width,\n                    width,\n                    height,\n                    width,\n                    inv_h,\n                    inv_w);\n      }\n      const scalar_t weight = dmcn_get_coordinate_weight(\n          inv_h,\n          inv_w,\n          height,\n          width,\n          data_im_ptr + cnt * height * width,\n          width,\n          bp_dir);\n      val += weight * data_col_ptr[col_pos] * mask;\n      cnt += 1;\n    }\n    // KERNEL_ASSIGN(grad_offset[index], offset_req, val);\n    grad_offset[index] = val;\n    if (offset_c % 2 == 0)\n      // KERNEL_ASSIGN(grad_mask[(((b * deformable_group +\n      // deformable_group_index) * kernel_h * kernel_w + offset_c / 2) *\n      // height_col + h) * width_col + w], mask_req, mval);\n      grad_mask\n          [(((b * deformable_group + deformable_group_index) * kernel_h *\n                 kernel_w +\n             offset_c / 2) *\n                height_col +\n            h) *\n               width_col +\n           w] = mval;\n  }\n}\n\n\nnamespace detectron2 {\n\nvoid modulated_deformable_im2col_cuda(\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const at::Tensor data_mask,\n    const int batch_size,\n    const int channels,\n    const int height_im,\n    const int width_im,\n    const int height_col,\n    const int width_col,\n    const int kernel_h,\n    const int kenerl_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int deformable_group,\n    at::Tensor data_col) {\n  // num_axes should be smaller than block size\n  const int channel_per_deformable_group = channels / deformable_group;\n  const int num_kernels = channels * batch_size * height_col * width_col;\n\n  at::cuda::CUDAGuard device_guard(data_im.device());\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      data_im.scalar_type(), \"modulated_deformable_im2col_gpu\", ([&] {\n        const scalar_t* data_im_ = data_im.data_ptr<scalar_t>();\n        const scalar_t* data_offset_ = data_offset.data_ptr<scalar_t>();\n        const scalar_t* data_mask_ = data_mask.data_ptr<scalar_t>();\n        scalar_t* data_col_ = data_col.data_ptr<scalar_t>();\n\n        modulated_deformable_im2col_gpu_kernel<<<\n            GET_BLOCKS(num_kernels),\n            CUDA_NUM_THREADS,\n            0,\n            stream>>>(\n            num_kernels,\n            data_im_,\n            data_offset_,\n            data_mask_,\n            height_im,\n            width_im,\n            kernel_h,\n            kenerl_w,\n            pad_h,\n            pad_w,\n            stride_h,\n            stride_w,\n            dilation_h,\n            dilation_w,\n            channel_per_deformable_group,\n            batch_size,\n            channels,\n            deformable_group,\n            height_col,\n            width_col,\n            data_col_);\n      }));\n\n  cudaError_t err = cudaGetLastError();\n  if (err != cudaSuccess) {\n    printf(\n        \"error in modulated_deformable_im2col_cuda: %s\\n\",\n        cudaGetErrorString(err));\n  }\n}\n\nvoid modulated_deformable_col2im_cuda(\n    const at::Tensor data_col,\n    const at::Tensor data_offset,\n    const at::Tensor data_mask,\n    const int batch_size,\n    const int channels,\n    const int height_im,\n    const int width_im,\n    const int height_col,\n    const int width_col,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int deformable_group,\n    at::Tensor grad_im) {\n  const int channel_per_deformable_group = channels / deformable_group;\n  const int num_kernels =\n      channels * kernel_h * kernel_w * batch_size * height_col * width_col;\n\n  at::cuda::CUDAGuard device_guard(data_col.device());\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      data_col.scalar_type(), \"modulated_deformable_col2im_gpu\", ([&] {\n        const scalar_t* data_col_ = data_col.data_ptr<scalar_t>();\n        const scalar_t* data_offset_ = data_offset.data_ptr<scalar_t>();\n        const scalar_t* data_mask_ = data_mask.data_ptr<scalar_t>();\n        scalar_t* grad_im_ = grad_im.data_ptr<scalar_t>();\n\n        modulated_deformable_col2im_gpu_kernel<<<\n            GET_BLOCKS(num_kernels),\n            CUDA_NUM_THREADS,\n            0,\n            stream>>>(\n            num_kernels,\n            data_col_,\n            data_offset_,\n            data_mask_,\n            channels,\n            height_im,\n            width_im,\n            kernel_h,\n            kernel_w,\n            pad_h,\n            pad_w,\n            stride_h,\n            stride_w,\n            dilation_h,\n            dilation_w,\n            channel_per_deformable_group,\n            batch_size,\n            deformable_group,\n            height_col,\n            width_col,\n            grad_im_);\n      }));\n\n  cudaError_t err = cudaGetLastError();\n  if (err != cudaSuccess) {\n    printf(\n        \"error in modulated_deformable_col2im_cuda: %s\\n\",\n        cudaGetErrorString(err));\n  }\n}\n\nvoid modulated_deformable_col2im_coord_cuda(\n    const at::Tensor data_col,\n    const at::Tensor data_im,\n    const at::Tensor data_offset,\n    const at::Tensor data_mask,\n    const int batch_size,\n    const int channels,\n    const int height_im,\n    const int width_im,\n    const int height_col,\n    const int width_col,\n    const int kernel_h,\n    const int kernel_w,\n    const int pad_h,\n    const int pad_w,\n    const int stride_h,\n    const int stride_w,\n    const int dilation_h,\n    const int dilation_w,\n    const int deformable_group,\n    at::Tensor grad_offset,\n    at::Tensor grad_mask) {\n  const int num_kernels = batch_size * height_col * width_col * 2 * kernel_h *\n      kernel_w * deformable_group;\n  const int channel_per_deformable_group =\n      channels * kernel_h * kernel_w / deformable_group;\n\n  at::cuda::CUDAGuard device_guard(data_col.device());\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  AT_DISPATCH_FLOATING_TYPES_AND_HALF(\n      data_col.scalar_type(), \"modulated_deformable_col2im_coord_gpu\", ([&] {\n        const scalar_t* data_col_ = data_col.data_ptr<scalar_t>();\n        const scalar_t* data_im_ = data_im.data_ptr<scalar_t>();\n        const scalar_t* data_offset_ = data_offset.data_ptr<scalar_t>();\n        const scalar_t* data_mask_ = data_mask.data_ptr<scalar_t>();\n        scalar_t* grad_offset_ = grad_offset.data_ptr<scalar_t>();\n        scalar_t* grad_mask_ = grad_mask.data_ptr<scalar_t>();\n\n        modulated_deformable_col2im_coord_gpu_kernel<<<\n            GET_BLOCKS(num_kernels),\n            CUDA_NUM_THREADS,\n            0,\n            stream>>>(\n            num_kernels,\n            data_col_,\n            data_im_,\n            data_offset_,\n            data_mask_,\n            channels,\n            height_im,\n            width_im,\n            kernel_h,\n            kernel_w,\n            pad_h,\n            pad_w,\n            stride_h,\n            stride_w,\n            dilation_h,\n            dilation_w,\n            channel_per_deformable_group,\n            batch_size,\n            2 * kernel_h * kernel_w * deformable_group,\n            deformable_group,\n            height_col,\n            width_col,\n            grad_offset_,\n            grad_mask_);\n      }));\n  cudaError_t err = cudaGetLastError();\n  if (err != cudaSuccess) {\n    printf(\n        \"error in modulated_deformable_col2im_coord_cuda: %s\\n\",\n        cudaGetErrorString(err));\n  }\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/nms_rotated/nms_rotated.h",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#pragma once\n#include <torch/types.h>\n\nnamespace detectron2 {\n\nat::Tensor nms_rotated_cpu(\n    const at::Tensor& dets,\n    const at::Tensor& scores,\n    const double iou_threshold);\n\n#if defined(WITH_CUDA) || defined(WITH_HIP)\nat::Tensor nms_rotated_cuda(\n    const at::Tensor& dets,\n    const at::Tensor& scores,\n    const double iou_threshold);\n#endif\n\n// Interface for Python\n// inline is needed to prevent multiple function definitions when this header is\n// included by different cpps\ninline at::Tensor nms_rotated(\n    const at::Tensor& dets,\n    const at::Tensor& scores,\n    const double iou_threshold) {\n  assert(dets.device().is_cuda() == scores.device().is_cuda());\n  if (dets.device().is_cuda()) {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n    return nms_rotated_cuda(\n        dets.contiguous(), scores.contiguous(), iou_threshold);\n#else\n    AT_ERROR(\"Detectron2 is not compiled with GPU support!\");\n#endif\n  }\n\n  return nms_rotated_cpu(dets.contiguous(), scores.contiguous(), iou_threshold);\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/nms_rotated/nms_rotated_cpu.cpp",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#include \"../box_iou_rotated/box_iou_rotated_utils.h\"\n#include \"nms_rotated.h\"\n\nnamespace detectron2 {\n\ntemplate <typename scalar_t>\nat::Tensor nms_rotated_cpu_kernel(\n    const at::Tensor& dets,\n    const at::Tensor& scores,\n    const double iou_threshold) {\n  // nms_rotated_cpu_kernel is modified from torchvision's nms_cpu_kernel,\n  // however, the code in this function is much shorter because\n  // we delegate the IoU computation for rotated boxes to\n  // the single_box_iou_rotated function in box_iou_rotated_utils.h\n  AT_ASSERTM(dets.device().is_cpu(), \"dets must be a CPU tensor\");\n  AT_ASSERTM(scores.device().is_cpu(), \"scores must be a CPU tensor\");\n  AT_ASSERTM(\n      dets.scalar_type() == scores.scalar_type(),\n      \"dets should have the same type as scores\");\n\n  if (dets.numel() == 0) {\n    return at::empty({0}, dets.options().dtype(at::kLong));\n  }\n\n  auto order_t = std::get<1>(scores.sort(0, /* descending=*/true));\n\n  auto ndets = dets.size(0);\n  at::Tensor suppressed_t = at::zeros({ndets}, dets.options().dtype(at::kByte));\n  at::Tensor keep_t = at::zeros({ndets}, dets.options().dtype(at::kLong));\n\n  auto suppressed = suppressed_t.data_ptr<uint8_t>();\n  auto keep = keep_t.data_ptr<int64_t>();\n  auto order = order_t.data_ptr<int64_t>();\n\n  int64_t num_to_keep = 0;\n\n  for (int64_t _i = 0; _i < ndets; _i++) {\n    auto i = order[_i];\n    if (suppressed[i] == 1) {\n      continue;\n    }\n\n    keep[num_to_keep++] = i;\n\n    for (int64_t _j = _i + 1; _j < ndets; _j++) {\n      auto j = order[_j];\n      if (suppressed[j] == 1) {\n        continue;\n      }\n\n      auto ovr = single_box_iou_rotated<scalar_t>(\n          dets[i].data_ptr<scalar_t>(), dets[j].data_ptr<scalar_t>());\n      if (ovr >= iou_threshold) {\n        suppressed[j] = 1;\n      }\n    }\n  }\n  return keep_t.narrow(/*dim=*/0, /*start=*/0, /*length=*/num_to_keep);\n}\n\nat::Tensor nms_rotated_cpu(\n    // input must be contiguous\n    const at::Tensor& dets,\n    const at::Tensor& scores,\n    const double iou_threshold) {\n  auto result = at::empty({0}, dets.options());\n\n  AT_DISPATCH_FLOATING_TYPES(dets.scalar_type(), \"nms_rotated\", [&] {\n    result = nms_rotated_cpu_kernel<scalar_t>(dets, scores, iou_threshold);\n  });\n  return result;\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/nms_rotated/nms_rotated_cuda.cu",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n#include <ATen/ATen.h>\n#include <ATen/cuda/CUDAContext.h>\n#include <c10/cuda/CUDAGuard.h>\n#include <ATen/cuda/CUDAApplyUtils.cuh>\n#ifdef WITH_CUDA\n#include \"../box_iou_rotated/box_iou_rotated_utils.h\"\n#endif\n// TODO avoid this when pytorch supports \"same directory\" hipification\n#ifdef WITH_HIP\n#include \"box_iou_rotated/box_iou_rotated_utils.h\"\n#endif\n\nusing namespace detectron2;\n\nnamespace {\nint const threadsPerBlock = sizeof(unsigned long long) * 8;\n}\n\ntemplate <typename T>\n__global__ void nms_rotated_cuda_kernel(\n    const int n_boxes,\n    const double iou_threshold,\n    const T* dev_boxes,\n    unsigned long long* dev_mask) {\n  // nms_rotated_cuda_kernel is modified from torchvision's nms_cuda_kernel\n\n  const int row_start = blockIdx.y;\n  const int col_start = blockIdx.x;\n\n  // if (row_start > col_start) return;\n\n  const int row_size =\n      min(n_boxes - row_start * threadsPerBlock, threadsPerBlock);\n  const int col_size =\n      min(n_boxes - col_start * threadsPerBlock, threadsPerBlock);\n\n  // Compared to nms_cuda_kernel, where each box is represented with 4 values\n  // (x1, y1, x2, y2), each rotated box is represented with 5 values\n  // (x_center, y_center, width, height, angle_degrees) here.\n  __shared__ T block_boxes[threadsPerBlock * 5];\n  if (threadIdx.x < col_size) {\n    block_boxes[threadIdx.x * 5 + 0] =\n        dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 0];\n    block_boxes[threadIdx.x * 5 + 1] =\n        dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 1];\n    block_boxes[threadIdx.x * 5 + 2] =\n        dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 2];\n    block_boxes[threadIdx.x * 5 + 3] =\n        dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 3];\n    block_boxes[threadIdx.x * 5 + 4] =\n        dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 4];\n  }\n  __syncthreads();\n\n  if (threadIdx.x < row_size) {\n    const int cur_box_idx = threadsPerBlock * row_start + threadIdx.x;\n    const T* cur_box = dev_boxes + cur_box_idx * 5;\n    int i = 0;\n    unsigned long long t = 0;\n    int start = 0;\n    if (row_start == col_start) {\n      start = threadIdx.x + 1;\n    }\n    for (i = start; i < col_size; i++) {\n      // Instead of devIoU used by original horizontal nms, here\n      // we use the single_box_iou_rotated function from box_iou_rotated_utils.h\n      if (single_box_iou_rotated<T>(cur_box, block_boxes + i * 5) >\n          iou_threshold) {\n        t |= 1ULL << i;\n      }\n    }\n    const int col_blocks = at::cuda::ATenCeilDiv(n_boxes, threadsPerBlock);\n    dev_mask[cur_box_idx * col_blocks + col_start] = t;\n  }\n}\n\nnamespace detectron2 {\n\nat::Tensor nms_rotated_cuda(\n    // input must be contiguous\n    const at::Tensor& dets,\n    const at::Tensor& scores,\n    double iou_threshold) {\n  // using scalar_t = float;\n  AT_ASSERTM(dets.is_cuda(), \"dets must be a CUDA tensor\");\n  AT_ASSERTM(scores.is_cuda(), \"scores must be a CUDA tensor\");\n  at::cuda::CUDAGuard device_guard(dets.device());\n\n  auto order_t = std::get<1>(scores.sort(0, /* descending=*/true));\n  auto dets_sorted = dets.index_select(0, order_t);\n\n  auto dets_num = dets.size(0);\n\n  const int col_blocks =\n      at::cuda::ATenCeilDiv(static_cast<int>(dets_num), threadsPerBlock);\n\n  at::Tensor mask =\n      at::empty({dets_num * col_blocks}, dets.options().dtype(at::kLong));\n\n  dim3 blocks(col_blocks, col_blocks);\n  dim3 threads(threadsPerBlock);\n  cudaStream_t stream = at::cuda::getCurrentCUDAStream();\n\n  AT_DISPATCH_FLOATING_TYPES(\n      dets_sorted.scalar_type(), \"nms_rotated_kernel_cuda\", [&] {\n        nms_rotated_cuda_kernel<scalar_t><<<blocks, threads, 0, stream>>>(\n            dets_num,\n            iou_threshold,\n            dets_sorted.data_ptr<scalar_t>(),\n            (unsigned long long*)mask.data_ptr<int64_t>());\n      });\n\n  at::Tensor mask_cpu = mask.to(at::kCPU);\n  unsigned long long* mask_host =\n      (unsigned long long*)mask_cpu.data_ptr<int64_t>();\n\n  std::vector<unsigned long long> remv(col_blocks);\n  memset(&remv[0], 0, sizeof(unsigned long long) * col_blocks);\n\n  at::Tensor keep =\n      at::empty({dets_num}, dets.options().dtype(at::kLong).device(at::kCPU));\n  int64_t* keep_out = keep.data_ptr<int64_t>();\n\n  int num_to_keep = 0;\n  for (int i = 0; i < dets_num; i++) {\n    int nblock = i / threadsPerBlock;\n    int inblock = i % threadsPerBlock;\n\n    if (!(remv[nblock] & (1ULL << inblock))) {\n      keep_out[num_to_keep++] = i;\n      unsigned long long* p = mask_host + i * col_blocks;\n      for (int j = nblock; j < col_blocks; j++) {\n        remv[j] |= p[j];\n      }\n    }\n  }\n\n  AT_CUDA_CHECK(cudaGetLastError());\n  return order_t.index(\n      {keep.narrow(/*dim=*/0, /*start=*/0, /*length=*/num_to_keep)\n           .to(order_t.device(), keep.scalar_type())});\n}\n\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/csrc/vision.cpp",
    "content": "// Copyright (c) Facebook, Inc. and its affiliates.\n\n#include <torch/extension.h>\n#include \"ROIAlignRotated/ROIAlignRotated.h\"\n#include \"box_iou_rotated/box_iou_rotated.h\"\n#include \"cocoeval/cocoeval.h\"\n#include \"deformable/deform_conv.h\"\n#include \"nms_rotated/nms_rotated.h\"\n\nnamespace detectron2 {\n\n#if defined(WITH_CUDA) || defined(WITH_HIP)\nextern int get_cudart_version();\n#endif\n\nstd::string get_cuda_version() {\n#if defined(WITH_CUDA) || defined(WITH_HIP)\n  std::ostringstream oss;\n\n#if defined(WITH_CUDA)\n  oss << \"CUDA \";\n#else\n  oss << \"HIP \";\n#endif\n\n  // copied from\n  // https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/cuda/detail/CUDAHooks.cpp#L231\n  auto printCudaStyleVersion = [&](int v) {\n    oss << (v / 1000) << \".\" << (v / 10 % 100);\n    if (v % 10 != 0) {\n      oss << \".\" << (v % 10);\n    }\n  };\n  printCudaStyleVersion(get_cudart_version());\n  return oss.str();\n#else // neither CUDA nor HIP\n  return std::string(\"not available\");\n#endif\n}\n\nbool has_cuda() {\n#if defined(WITH_CUDA)\n  return true;\n#else\n  return false;\n#endif\n}\n\n// similar to\n// https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/Version.cpp\nstd::string get_compiler_version() {\n  std::ostringstream ss;\n#if defined(__GNUC__)\n#ifndef __clang__\n\n#if ((__GNUC__ <= 4) && (__GNUC_MINOR__ <= 8))\n#error \"GCC >= 4.9 is required!\"\n#endif\n\n  { ss << \"GCC \" << __GNUC__ << \".\" << __GNUC_MINOR__; }\n#endif\n#endif\n\n#if defined(__clang_major__)\n  {\n    ss << \"clang \" << __clang_major__ << \".\" << __clang_minor__ << \".\"\n       << __clang_patchlevel__;\n  }\n#endif\n\n#if defined(_MSC_VER)\n  { ss << \"MSVC \" << _MSC_FULL_VER; }\n#endif\n  return ss.str();\n}\n\nPYBIND11_MODULE(TORCH_EXTENSION_NAME, m) {\n  m.def(\"get_compiler_version\", &get_compiler_version, \"get_compiler_version\");\n  m.def(\"get_cuda_version\", &get_cuda_version, \"get_cuda_version\");\n  m.def(\"has_cuda\", &has_cuda, \"has_cuda\");\n\n  m.def(\"deform_conv_forward\", &deform_conv_forward, \"deform_conv_forward\");\n  m.def(\n      \"deform_conv_backward_input\",\n      &deform_conv_backward_input,\n      \"deform_conv_backward_input\");\n  m.def(\n      \"deform_conv_backward_filter\",\n      &deform_conv_backward_filter,\n      \"deform_conv_backward_filter\");\n  m.def(\n      \"modulated_deform_conv_forward\",\n      &modulated_deform_conv_forward,\n      \"modulated_deform_conv_forward\");\n  m.def(\n      \"modulated_deform_conv_backward\",\n      &modulated_deform_conv_backward,\n      \"modulated_deform_conv_backward\");\n\n  m.def(\"COCOevalAccumulate\", &COCOeval::Accumulate, \"COCOeval::Accumulate\");\n  m.def(\n      \"COCOevalEvaluateImages\",\n      &COCOeval::EvaluateImages,\n      \"COCOeval::EvaluateImages\");\n  pybind11::class_<COCOeval::InstanceAnnotation>(m, \"InstanceAnnotation\")\n      .def(pybind11::init<uint64_t, double, double, bool, bool>());\n  pybind11::class_<COCOeval::ImageEvaluation>(m, \"ImageEvaluation\")\n      .def(pybind11::init<>());\n}\n\nTORCH_LIBRARY(detectron2, m) {\n  m.def(\"nms_rotated\", &nms_rotated);\n  m.def(\"box_iou_rotated\", &box_iou_rotated);\n  m.def(\"roi_align_rotated_forward\", &ROIAlignRotated_forward);\n  m.def(\"roi_align_rotated_backward\", &ROIAlignRotated_backward);\n}\n} // namespace detectron2\n"
  },
  {
    "path": "detectron2/layers/deform_conv.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nimport math\nfrom functools import lru_cache\nimport torch\nfrom torch import nn\nfrom torch.autograd import Function\nfrom torch.autograd.function import once_differentiable\nfrom torch.nn.modules.utils import _pair\nfrom torchvision.ops import deform_conv2d\n\nfrom detectron2.utils.develop import create_dummy_class, create_dummy_func\n\nfrom .wrappers import _NewEmptyTensorOp\n\n\nclass _DeformConv(Function):\n    @staticmethod\n    def forward(\n        ctx,\n        input,\n        offset,\n        weight,\n        stride=1,\n        padding=0,\n        dilation=1,\n        groups=1,\n        deformable_groups=1,\n        im2col_step=64,\n    ):\n        if input is not None and input.dim() != 4:\n            raise ValueError(\n                \"Expected 4D tensor as input, got {}D tensor instead.\".format(input.dim())\n            )\n        ctx.stride = _pair(stride)\n        ctx.padding = _pair(padding)\n        ctx.dilation = _pair(dilation)\n        ctx.groups = groups\n        ctx.deformable_groups = deformable_groups\n        ctx.im2col_step = im2col_step\n\n        ctx.save_for_backward(input, offset, weight)\n\n        output = input.new_empty(\n            _DeformConv._output_size(input, weight, ctx.padding, ctx.dilation, ctx.stride)\n        )\n\n        ctx.bufs_ = [input.new_empty(0), input.new_empty(0)]  # columns, ones\n\n        if not input.is_cuda:\n            # TODO: let torchvision support full features of our deformconv.\n            if deformable_groups != 1:\n                raise NotImplementedError(\n                    \"Deformable Conv with deformable_groups != 1 is not supported on CPUs!\"\n                )\n            return deform_conv2d(\n                input, offset, weight, stride=stride, padding=padding, dilation=dilation\n            )\n        else:\n            cur_im2col_step = _DeformConv._cal_im2col_step(input.shape[0], ctx.im2col_step)\n            assert (input.shape[0] % cur_im2col_step) == 0, \"im2col step must divide batchsize\"\n\n            _C.deform_conv_forward(\n                input,\n                weight,\n                offset,\n                output,\n                ctx.bufs_[0],\n                ctx.bufs_[1],\n                weight.size(3),\n                weight.size(2),\n                ctx.stride[1],\n                ctx.stride[0],\n                ctx.padding[1],\n                ctx.padding[0],\n                ctx.dilation[1],\n                ctx.dilation[0],\n                ctx.groups,\n                ctx.deformable_groups,\n                cur_im2col_step,\n            )\n        return output\n\n    @staticmethod\n    @once_differentiable\n    def backward(ctx, grad_output):\n        input, offset, weight = ctx.saved_tensors\n\n        grad_input = grad_offset = grad_weight = None\n\n        if not grad_output.is_cuda:\n            raise NotImplementedError(\"Deformable Conv is not supported on CPUs!\")\n        else:\n            cur_im2col_step = _DeformConv._cal_im2col_step(input.shape[0], ctx.im2col_step)\n            assert (input.shape[0] % cur_im2col_step) == 0, \"im2col step must divide batchsize\"\n\n            if ctx.needs_input_grad[0] or ctx.needs_input_grad[1]:\n                grad_input = torch.zeros_like(input)\n                grad_offset = torch.zeros_like(offset)\n                _C.deform_conv_backward_input(\n                    input,\n                    offset,\n                    grad_output,\n                    grad_input,\n                    grad_offset,\n                    weight,\n                    ctx.bufs_[0],\n                    weight.size(3),\n                    weight.size(2),\n                    ctx.stride[1],\n                    ctx.stride[0],\n                    ctx.padding[1],\n                    ctx.padding[0],\n                    ctx.dilation[1],\n                    ctx.dilation[0],\n                    ctx.groups,\n                    ctx.deformable_groups,\n                    cur_im2col_step,\n                )\n\n            if ctx.needs_input_grad[2]:\n                grad_weight = torch.zeros_like(weight)\n                _C.deform_conv_backward_filter(\n                    input,\n                    offset,\n                    grad_output,\n                    grad_weight,\n                    ctx.bufs_[0],\n                    ctx.bufs_[1],\n                    weight.size(3),\n                    weight.size(2),\n                    ctx.stride[1],\n                    ctx.stride[0],\n                    ctx.padding[1],\n                    ctx.padding[0],\n                    ctx.dilation[1],\n                    ctx.dilation[0],\n                    ctx.groups,\n                    ctx.deformable_groups,\n                    1,\n                    cur_im2col_step,\n                )\n\n        return grad_input, grad_offset, grad_weight, None, None, None, None, None, None\n\n    @staticmethod\n    def _output_size(input, weight, padding, dilation, stride):\n        channels = weight.size(0)\n        output_size = (input.size(0), channels)\n        for d in range(input.dim() - 2):\n            in_size = input.size(d + 2)\n            pad = padding[d]\n            kernel = dilation[d] * (weight.size(d + 2) - 1) + 1\n            stride_ = stride[d]\n            output_size += ((in_size + (2 * pad) - kernel) // stride_ + 1,)\n        if not all(map(lambda s: s > 0, output_size)):\n            raise ValueError(\n                \"convolution input is too small (output would be {})\".format(\n                    \"x\".join(map(str, output_size))\n                )\n            )\n        return output_size\n\n    @staticmethod\n    @lru_cache(maxsize=128)\n    def _cal_im2col_step(input_size, default_size):\n        \"\"\"\n        Calculate proper im2col step size, which should be divisible by input_size and not larger\n        than prefer_size. Meanwhile the step size should be as large as possible to be more\n        efficient. So we choose the largest one among all divisors of input_size which are smaller\n        than prefer_size.\n        :param input_size: input batch size .\n        :param default_size: default preferred im2col step size.\n        :return: the largest proper step size.\n        \"\"\"\n        if input_size <= default_size:\n            return input_size\n        best_step = 1\n        for step in range(2, min(int(math.sqrt(input_size)) + 1, default_size)):\n            if input_size % step == 0:\n                if input_size // step <= default_size:\n                    return input_size // step\n                best_step = step\n\n        return best_step\n\n\nclass _ModulatedDeformConv(Function):\n    @staticmethod\n    def forward(\n        ctx,\n        input,\n        offset,\n        mask,\n        weight,\n        bias=None,\n        stride=1,\n        padding=0,\n        dilation=1,\n        groups=1,\n        deformable_groups=1,\n    ):\n        ctx.stride = stride\n        ctx.padding = padding\n        ctx.dilation = dilation\n        ctx.groups = groups\n        ctx.deformable_groups = deformable_groups\n        ctx.with_bias = bias is not None\n        if not ctx.with_bias:\n            bias = input.new_empty(1)  # fake tensor\n        if not input.is_cuda:\n            raise NotImplementedError(\"Deformable Conv is not supported on CPUs!\")\n        if (\n            weight.requires_grad\n            or mask.requires_grad\n            or offset.requires_grad\n            or input.requires_grad\n        ):\n            ctx.save_for_backward(input, offset, mask, weight, bias)\n        output = input.new_empty(_ModulatedDeformConv._infer_shape(ctx, input, weight))\n        ctx._bufs = [input.new_empty(0), input.new_empty(0)]\n        _C.modulated_deform_conv_forward(\n            input,\n            weight,\n            bias,\n            ctx._bufs[0],\n            offset,\n            mask,\n            output,\n            ctx._bufs[1],\n            weight.shape[2],\n            weight.shape[3],\n            ctx.stride,\n            ctx.stride,\n            ctx.padding,\n            ctx.padding,\n            ctx.dilation,\n            ctx.dilation,\n            ctx.groups,\n            ctx.deformable_groups,\n            ctx.with_bias,\n        )\n        return output\n\n    @staticmethod\n    @once_differentiable\n    def backward(ctx, grad_output):\n        if not grad_output.is_cuda:\n            raise NotImplementedError(\"Deformable Conv is not supported on CPUs!\")\n        input, offset, mask, weight, bias = ctx.saved_tensors\n        grad_input = torch.zeros_like(input)\n        grad_offset = torch.zeros_like(offset)\n        grad_mask = torch.zeros_like(mask)\n        grad_weight = torch.zeros_like(weight)\n        grad_bias = torch.zeros_like(bias)\n        _C.modulated_deform_conv_backward(\n            input,\n            weight,\n            bias,\n            ctx._bufs[0],\n            offset,\n            mask,\n            ctx._bufs[1],\n            grad_input,\n            grad_weight,\n            grad_bias,\n            grad_offset,\n            grad_mask,\n            grad_output,\n            weight.shape[2],\n            weight.shape[3],\n            ctx.stride,\n            ctx.stride,\n            ctx.padding,\n            ctx.padding,\n            ctx.dilation,\n            ctx.dilation,\n            ctx.groups,\n            ctx.deformable_groups,\n            ctx.with_bias,\n        )\n        if not ctx.with_bias:\n            grad_bias = None\n\n        return (\n            grad_input,\n            grad_offset,\n            grad_mask,\n            grad_weight,\n            grad_bias,\n            None,\n            None,\n            None,\n            None,\n            None,\n        )\n\n    @staticmethod\n    def _infer_shape(ctx, input, weight):\n        n = input.size(0)\n        channels_out = weight.size(0)\n        height, width = input.shape[2:4]\n        kernel_h, kernel_w = weight.shape[2:4]\n        height_out = (\n            height + 2 * ctx.padding - (ctx.dilation * (kernel_h - 1) + 1)\n        ) // ctx.stride + 1\n        width_out = (\n            width + 2 * ctx.padding - (ctx.dilation * (kernel_w - 1) + 1)\n        ) // ctx.stride + 1\n        return n, channels_out, height_out, width_out\n\n\ndeform_conv = _DeformConv.apply\nmodulated_deform_conv = _ModulatedDeformConv.apply\n\n\nclass DeformConv(nn.Module):\n    def __init__(\n        self,\n        in_channels,\n        out_channels,\n        kernel_size,\n        stride=1,\n        padding=0,\n        dilation=1,\n        groups=1,\n        deformable_groups=1,\n        bias=False,\n        norm=None,\n        activation=None,\n    ):\n        \"\"\"\n        Deformable convolution from :paper:`deformconv`.\n\n        Arguments are similar to :class:`Conv2D`. Extra arguments:\n\n        Args:\n            deformable_groups (int): number of groups used in deformable convolution.\n            norm (nn.Module, optional): a normalization layer\n            activation (callable(Tensor) -> Tensor): a callable activation function\n        \"\"\"\n        super(DeformConv, self).__init__()\n\n        assert not bias\n        assert in_channels % groups == 0, \"in_channels {} cannot be divisible by groups {}\".format(\n            in_channels, groups\n        )\n        assert (\n            out_channels % groups == 0\n        ), \"out_channels {} cannot be divisible by groups {}\".format(out_channels, groups)\n\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.kernel_size = _pair(kernel_size)\n        self.stride = _pair(stride)\n        self.padding = _pair(padding)\n        self.dilation = _pair(dilation)\n        self.groups = groups\n        self.deformable_groups = deformable_groups\n        self.norm = norm\n        self.activation = activation\n\n        self.weight = nn.Parameter(\n            torch.Tensor(out_channels, in_channels // self.groups, *self.kernel_size)\n        )\n        self.bias = None\n\n        nn.init.kaiming_uniform_(self.weight, nonlinearity=\"relu\")\n\n    def forward(self, x, offset):\n        if x.numel() == 0:\n            # When input is empty, we want to return a empty tensor with \"correct\" shape,\n            # So that the following operations will not panic\n            # if they check for the shape of the tensor.\n            # This computes the height and width of the output tensor\n            output_shape = [\n                (i + 2 * p - (di * (k - 1) + 1)) // s + 1\n                for i, p, di, k, s in zip(\n                    x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride\n                )\n            ]\n            output_shape = [x.shape[0], self.weight.shape[0]] + output_shape\n            return _NewEmptyTensorOp.apply(x, output_shape)\n\n        x = deform_conv(\n            x,\n            offset,\n            self.weight,\n            self.stride,\n            self.padding,\n            self.dilation,\n            self.groups,\n            self.deformable_groups,\n        )\n        if self.norm is not None:\n            x = self.norm(x)\n        if self.activation is not None:\n            x = self.activation(x)\n        return x\n\n    def extra_repr(self):\n        tmpstr = \"in_channels=\" + str(self.in_channels)\n        tmpstr += \", out_channels=\" + str(self.out_channels)\n        tmpstr += \", kernel_size=\" + str(self.kernel_size)\n        tmpstr += \", stride=\" + str(self.stride)\n        tmpstr += \", padding=\" + str(self.padding)\n        tmpstr += \", dilation=\" + str(self.dilation)\n        tmpstr += \", groups=\" + str(self.groups)\n        tmpstr += \", deformable_groups=\" + str(self.deformable_groups)\n        tmpstr += \", bias=False\"\n        return tmpstr\n\n\nclass ModulatedDeformConv(nn.Module):\n    def __init__(\n        self,\n        in_channels,\n        out_channels,\n        kernel_size,\n        stride=1,\n        padding=0,\n        dilation=1,\n        groups=1,\n        deformable_groups=1,\n        bias=True,\n        norm=None,\n        activation=None,\n    ):\n        \"\"\"\n        Modulated deformable convolution from :paper:`deformconv2`.\n\n        Arguments are similar to :class:`Conv2D`. Extra arguments:\n\n        Args:\n            deformable_groups (int): number of groups used in deformable convolution.\n            norm (nn.Module, optional): a normalization layer\n            activation (callable(Tensor) -> Tensor): a callable activation function\n        \"\"\"\n        super(ModulatedDeformConv, self).__init__()\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.kernel_size = _pair(kernel_size)\n        self.stride = stride\n        self.padding = padding\n        self.dilation = dilation\n        self.groups = groups\n        self.deformable_groups = deformable_groups\n        self.with_bias = bias\n        self.norm = norm\n        self.activation = activation\n\n        self.weight = nn.Parameter(\n            torch.Tensor(out_channels, in_channels // groups, *self.kernel_size)\n        )\n        if bias:\n            self.bias = nn.Parameter(torch.Tensor(out_channels))\n        else:\n            self.bias = None\n\n        nn.init.kaiming_uniform_(self.weight, nonlinearity=\"relu\")\n        if self.bias is not None:\n            nn.init.constant_(self.bias, 0)\n\n    def forward(self, x, offset, mask):\n        if x.numel() == 0:\n            output_shape = [\n                (i + 2 * p - (di * (k - 1) + 1)) // s + 1\n                for i, p, di, k, s in zip(\n                    x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride\n                )\n            ]\n            output_shape = [x.shape[0], self.weight.shape[0]] + output_shape\n            return _NewEmptyTensorOp.apply(x, output_shape)\n\n        x = modulated_deform_conv(\n            x,\n            offset,\n            mask,\n            self.weight,\n            self.bias,\n            self.stride,\n            self.padding,\n            self.dilation,\n            self.groups,\n            self.deformable_groups,\n        )\n        if self.norm is not None:\n            x = self.norm(x)\n        if self.activation is not None:\n            x = self.activation(x)\n        return x\n\n    def extra_repr(self):\n        tmpstr = \"in_channels=\" + str(self.in_channels)\n        tmpstr += \", out_channels=\" + str(self.out_channels)\n        tmpstr += \", kernel_size=\" + str(self.kernel_size)\n        tmpstr += \", stride=\" + str(self.stride)\n        tmpstr += \", padding=\" + str(self.padding)\n        tmpstr += \", dilation=\" + str(self.dilation)\n        tmpstr += \", groups=\" + str(self.groups)\n        tmpstr += \", deformable_groups=\" + str(self.deformable_groups)\n        tmpstr += \", bias=\" + str(self.with_bias)\n        return tmpstr\n\n\ntry:\n    from detectron2 import _C\nexcept ImportError:\n    # TODO: register ops natively so there is no need to import _C.\n    _msg = \"detectron2 is not compiled successfully, please build following the instructions!\"\n    _args = (\"detectron2._C\", _msg)\n    DeformConv = create_dummy_class(\"DeformConv\", *_args)\n    ModulatedDeformConv = create_dummy_class(\"ModulatedDeformConv\", *_args)\n    deform_conv = create_dummy_func(\"deform_conv\", *_args)\n    modulated_deform_conv = create_dummy_func(\"modulated_deform_conv\", *_args)\n"
  },
  {
    "path": "detectron2/layers/losses.py",
    "content": "import math\nimport torch\n\n\ndef diou_loss(\n    boxes1: torch.Tensor,\n    boxes2: torch.Tensor,\n    reduction: str = \"none\",\n    eps: float = 1e-7,\n) -> torch.Tensor:\n    \"\"\"\n    Distance Intersection over Union Loss (Zhaohui Zheng et. al)\n    https://arxiv.org/abs/1911.08287\n    Args:\n        boxes1, boxes2 (Tensor): box locations in XYXY format, shape (N, 4) or (4,).\n        reduction: 'none' | 'mean' | 'sum'\n                 'none': No reduction will be applied to the output.\n                 'mean': The output will be averaged.\n                 'sum': The output will be summed.\n        eps (float): small number to prevent division by zero\n    \"\"\"\n\n    x1, y1, x2, y2 = boxes1.unbind(dim=-1)\n    x1g, y1g, x2g, y2g = boxes2.unbind(dim=-1)\n\n    # TODO: use torch._assert_async() when pytorch 1.8 support is dropped\n    assert (x2 >= x1).all(), \"bad box: x1 larger than x2\"\n    assert (y2 >= y1).all(), \"bad box: y1 larger than y2\"\n\n    # Intersection keypoints\n    xkis1 = torch.max(x1, x1g)\n    ykis1 = torch.max(y1, y1g)\n    xkis2 = torch.min(x2, x2g)\n    ykis2 = torch.min(y2, y2g)\n\n    intsct = torch.zeros_like(x1)\n    mask = (ykis2 > ykis1) & (xkis2 > xkis1)\n    intsct[mask] = (xkis2[mask] - xkis1[mask]) * (ykis2[mask] - ykis1[mask])\n    union = (x2 - x1) * (y2 - y1) + (x2g - x1g) * (y2g - y1g) - intsct + eps\n    iou = intsct / union\n\n    # smallest enclosing box\n    xc1 = torch.min(x1, x1g)\n    yc1 = torch.min(y1, y1g)\n    xc2 = torch.max(x2, x2g)\n    yc2 = torch.max(y2, y2g)\n    diag_len = ((xc2 - xc1) ** 2) + ((yc2 - yc1) ** 2) + eps\n\n    # centers of boxes\n    x_p = (x2 + x1) / 2\n    y_p = (y2 + y1) / 2\n    x_g = (x1g + x2g) / 2\n    y_g = (y1g + y2g) / 2\n    distance = ((x_p - x_g) ** 2) + ((y_p - y_g) ** 2)\n\n    # Eqn. (7)\n    loss = 1 - iou + (distance / diag_len)\n    if reduction == \"mean\":\n        loss = loss.mean() if loss.numel() > 0 else 0.0 * loss.sum()\n    elif reduction == \"sum\":\n        loss = loss.sum()\n\n    return loss\n\n\ndef ciou_loss(\n    boxes1: torch.Tensor,\n    boxes2: torch.Tensor,\n    reduction: str = \"none\",\n    eps: float = 1e-7,\n) -> torch.Tensor:\n    \"\"\"\n    Complete Intersection over Union Loss (Zhaohui Zheng et. al)\n    https://arxiv.org/abs/1911.08287\n    Args:\n        boxes1, boxes2 (Tensor): box locations in XYXY format, shape (N, 4) or (4,).\n        reduction: 'none' | 'mean' | 'sum'\n                 'none': No reduction will be applied to the output.\n                 'mean': The output will be averaged.\n                 'sum': The output will be summed.\n        eps (float): small number to prevent division by zero\n    \"\"\"\n\n    x1, y1, x2, y2 = boxes1.unbind(dim=-1)\n    x1g, y1g, x2g, y2g = boxes2.unbind(dim=-1)\n\n    # TODO: use torch._assert_async() when pytorch 1.8 support is dropped\n    assert (x2 >= x1).all(), \"bad box: x1 larger than x2\"\n    assert (y2 >= y1).all(), \"bad box: y1 larger than y2\"\n\n    # Intersection keypoints\n    xkis1 = torch.max(x1, x1g)\n    ykis1 = torch.max(y1, y1g)\n    xkis2 = torch.min(x2, x2g)\n    ykis2 = torch.min(y2, y2g)\n\n    intsct = torch.zeros_like(x1)\n    mask = (ykis2 > ykis1) & (xkis2 > xkis1)\n    intsct[mask] = (xkis2[mask] - xkis1[mask]) * (ykis2[mask] - ykis1[mask])\n    union = (x2 - x1) * (y2 - y1) + (x2g - x1g) * (y2g - y1g) - intsct + eps\n    iou = intsct / union\n\n    # smallest enclosing box\n    xc1 = torch.min(x1, x1g)\n    yc1 = torch.min(y1, y1g)\n    xc2 = torch.max(x2, x2g)\n    yc2 = torch.max(y2, y2g)\n    diag_len = ((xc2 - xc1) ** 2) + ((yc2 - yc1) ** 2) + eps\n\n    # centers of boxes\n    x_p = (x2 + x1) / 2\n    y_p = (y2 + y1) / 2\n    x_g = (x1g + x2g) / 2\n    y_g = (y1g + y2g) / 2\n    distance = ((x_p - x_g) ** 2) + ((y_p - y_g) ** 2)\n\n    # width and height of boxes\n    w_pred = x2 - x1\n    h_pred = y2 - y1\n    w_gt = x2g - x1g\n    h_gt = y2g - y1g\n    v = (4 / (math.pi**2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2)\n    with torch.no_grad():\n        alpha = v / (1 - iou + v + eps)\n\n    # Eqn. (10)\n    loss = 1 - iou + (distance / diag_len) + alpha * v\n    if reduction == \"mean\":\n        loss = loss.mean() if loss.numel() > 0 else 0.0 * loss.sum()\n    elif reduction == \"sum\":\n        loss = loss.sum()\n\n    return loss\n"
  },
  {
    "path": "detectron2/layers/mask_ops.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nimport numpy as np\nfrom typing import Tuple\nimport torch\nfrom PIL import Image\nfrom torch.nn import functional as F\n\n__all__ = [\"paste_masks_in_image\"]\n\n\nBYTES_PER_FLOAT = 4\n# TODO: This memory limit may be too much or too little. It would be better to\n# determine it based on available resources.\nGPU_MEM_LIMIT = 1024**3  # 1 GB memory limit\n\n\ndef _do_paste_mask(masks, boxes, img_h: int, img_w: int, skip_empty: bool = True):\n    \"\"\"\n    Args:\n        masks: N, 1, H, W\n        boxes: N, 4\n        img_h, img_w (int):\n        skip_empty (bool): only paste masks within the region that\n            tightly bound all boxes, and returns the results this region only.\n            An important optimization for CPU.\n\n    Returns:\n        if skip_empty == False, a mask of shape (N, img_h, img_w)\n        if skip_empty == True, a mask of shape (N, h', w'), and the slice\n            object for the corresponding region.\n    \"\"\"\n    # On GPU, paste all masks together (up to chunk size)\n    # by using the entire image to sample the masks\n    # Compared to pasting them one by one,\n    # this has more operations but is faster on COCO-scale dataset.\n    device = masks.device\n\n    if skip_empty and not torch.jit.is_scripting():\n        x0_int, y0_int = torch.clamp(boxes.min(dim=0).values.floor()[:2] - 1, min=0).to(\n            dtype=torch.int32\n        )\n        x1_int = torch.clamp(boxes[:, 2].max().ceil() + 1, max=img_w).to(dtype=torch.int32)\n        y1_int = torch.clamp(boxes[:, 3].max().ceil() + 1, max=img_h).to(dtype=torch.int32)\n    else:\n        x0_int, y0_int = 0, 0\n        x1_int, y1_int = img_w, img_h\n    x0, y0, x1, y1 = torch.split(boxes, 1, dim=1)  # each is Nx1\n\n    N = masks.shape[0]\n\n    img_y = torch.arange(y0_int, y1_int, device=device, dtype=torch.float32) + 0.5\n    img_x = torch.arange(x0_int, x1_int, device=device, dtype=torch.float32) + 0.5\n    img_y = (img_y - y0) / (y1 - y0) * 2 - 1\n    img_x = (img_x - x0) / (x1 - x0) * 2 - 1\n    # img_x, img_y have shapes (N, w), (N, h)\n\n    gx = img_x[:, None, :].expand(N, img_y.size(1), img_x.size(1))\n    gy = img_y[:, :, None].expand(N, img_y.size(1), img_x.size(1))\n    grid = torch.stack([gx, gy], dim=3)\n\n    if not torch.jit.is_scripting():\n        if not masks.dtype.is_floating_point:\n            masks = masks.float()\n    img_masks = F.grid_sample(masks, grid.to(masks.dtype), align_corners=False)\n\n    if skip_empty and not torch.jit.is_scripting():\n        return img_masks[:, 0], (slice(y0_int, y1_int), slice(x0_int, x1_int))\n    else:\n        return img_masks[:, 0], ()\n\n\n# Annotate boxes as Tensor (but not Boxes) in order to use scripting\n@torch.jit.script_if_tracing\ndef paste_masks_in_image(\n    masks: torch.Tensor, boxes: torch.Tensor, image_shape: Tuple[int, int], threshold: float = 0.5\n):\n    \"\"\"\n    Paste a set of masks that are of a fixed resolution (e.g., 28 x 28) into an image.\n    The location, height, and width for pasting each mask is determined by their\n    corresponding bounding boxes in boxes.\n\n    Note:\n        This is a complicated but more accurate implementation. In actual deployment, it is\n        often enough to use a faster but less accurate implementation.\n        See :func:`paste_mask_in_image_old` in this file for an alternative implementation.\n\n    Args:\n        masks (tensor): Tensor of shape (Bimg, Hmask, Wmask), where Bimg is the number of\n            detected object instances in the image and Hmask, Wmask are the mask width and mask\n            height of the predicted mask (e.g., Hmask = Wmask = 28). Values are in [0, 1].\n        boxes (Boxes or Tensor): A Boxes of length Bimg or Tensor of shape (Bimg, 4).\n            boxes[i] and masks[i] correspond to the same object instance.\n        image_shape (tuple): height, width\n        threshold (float): A threshold in [0, 1] for converting the (soft) masks to\n            binary masks.\n\n    Returns:\n        img_masks (Tensor): A tensor of shape (Bimg, Himage, Wimage), where Bimg is the\n        number of detected object instances and Himage, Wimage are the image width\n        and height. img_masks[i] is a binary mask for object instance i.\n    \"\"\"\n\n    assert masks.shape[-1] == masks.shape[-2], \"Only square mask predictions are supported\"\n    N = len(masks)\n    if N == 0:\n        return masks.new_empty((0,) + image_shape, dtype=torch.uint8)\n    if not isinstance(boxes, torch.Tensor):\n        boxes = boxes.tensor\n    device = boxes.device\n    assert len(boxes) == N, boxes.shape\n\n    img_h, img_w = image_shape\n\n    # The actual implementation split the input into chunks,\n    # and paste them chunk by chunk.\n    if device.type == \"cpu\" or torch.jit.is_scripting():\n        # CPU is most efficient when they are pasted one by one with skip_empty=True\n        # so that it performs minimal number of operations.\n        num_chunks = N\n    else:\n        # GPU benefits from parallelism for larger chunks, but may have memory issue\n        # int(img_h) because shape may be tensors in tracing\n        num_chunks = int(np.ceil(N * int(img_h) * int(img_w) * BYTES_PER_FLOAT / GPU_MEM_LIMIT))\n        assert (\n            num_chunks <= N\n        ), \"Default GPU_MEM_LIMIT in mask_ops.py is too small; try increasing it\"\n    chunks = torch.chunk(torch.arange(N, device=device), num_chunks)\n\n    img_masks = torch.zeros(\n        N, img_h, img_w, device=device, dtype=torch.bool if threshold >= 0 else torch.uint8\n    )\n    for inds in chunks:\n        masks_chunk, spatial_inds = _do_paste_mask(\n            masks[inds, None, :, :], boxes[inds], img_h, img_w, skip_empty=device.type == \"cpu\"\n        )\n\n        if threshold >= 0:\n            masks_chunk = (masks_chunk >= threshold).to(dtype=torch.bool)\n        else:\n            # for visualization and debugging\n            masks_chunk = (masks_chunk * 255).to(dtype=torch.uint8)\n\n        if torch.jit.is_scripting():  # Scripting does not use the optimized codepath\n            img_masks[inds] = masks_chunk\n        else:\n            img_masks[(inds,) + spatial_inds] = masks_chunk\n    return img_masks\n\n\n# The below are the original paste function (from Detectron1) which has\n# larger quantization error.\n# It is faster on CPU, while the aligned one is faster on GPU thanks to grid_sample.\n\n\ndef paste_mask_in_image_old(mask, box, img_h, img_w, threshold):\n    \"\"\"\n    Paste a single mask in an image.\n    This is a per-box implementation of :func:`paste_masks_in_image`.\n    This function has larger quantization error due to incorrect pixel\n    modeling and is not used any more.\n\n    Args:\n        mask (Tensor): A tensor of shape (Hmask, Wmask) storing the mask of a single\n            object instance. Values are in [0, 1].\n        box (Tensor): A tensor of shape (4, ) storing the x0, y0, x1, y1 box corners\n            of the object instance.\n        img_h, img_w (int): Image height and width.\n        threshold (float): Mask binarization threshold in [0, 1].\n\n    Returns:\n        im_mask (Tensor):\n            The resized and binarized object mask pasted into the original\n            image plane (a tensor of shape (img_h, img_w)).\n    \"\"\"\n    # Conversion from continuous box coordinates to discrete pixel coordinates\n    # via truncation (cast to int32). This determines which pixels to paste the\n    # mask onto.\n    box = box.to(dtype=torch.int32)  # Continuous to discrete coordinate conversion\n    # An example (1D) box with continuous coordinates (x0=0.7, x1=4.3) will map to\n    # a discrete coordinates (x0=0, x1=4). Note that box is mapped to 5 = x1 - x0 + 1\n    # pixels (not x1 - x0 pixels).\n    samples_w = box[2] - box[0] + 1  # Number of pixel samples, *not* geometric width\n    samples_h = box[3] - box[1] + 1  # Number of pixel samples, *not* geometric height\n\n    # Resample the mask from it's original grid to the new samples_w x samples_h grid\n    mask = Image.fromarray(mask.cpu().numpy())\n    mask = mask.resize((samples_w, samples_h), resample=Image.BILINEAR)\n    mask = np.array(mask, copy=False)\n\n    if threshold >= 0:\n        mask = np.array(mask > threshold, dtype=np.uint8)\n        mask = torch.from_numpy(mask)\n    else:\n        # for visualization and debugging, we also\n        # allow it to return an unmodified mask\n        mask = torch.from_numpy(mask * 255).to(torch.uint8)\n\n    im_mask = torch.zeros((img_h, img_w), dtype=torch.uint8)\n    x_0 = max(box[0], 0)\n    x_1 = min(box[2] + 1, img_w)\n    y_0 = max(box[1], 0)\n    y_1 = min(box[3] + 1, img_h)\n\n    im_mask[y_0:y_1, x_0:x_1] = mask[\n        (y_0 - box[1]) : (y_1 - box[1]), (x_0 - box[0]) : (x_1 - box[0])\n    ]\n    return im_mask\n\n\n# Our pixel modeling requires extrapolation for any continuous\n# coordinate < 0.5 or > length - 0.5. When sampling pixels on the masks,\n# we would like this extrapolation to be an interpolation between boundary values and zero,\n# instead of using absolute zero or boundary values.\n# Therefore `paste_mask_in_image_old` is often used with zero padding around the masks like this:\n# masks, scale = pad_masks(masks[:, 0, :, :], 1)\n# boxes = scale_boxes(boxes.tensor, scale)\n\n\ndef pad_masks(masks, padding):\n    \"\"\"\n    Args:\n        masks (tensor): A tensor of shape (B, M, M) representing B masks.\n        padding (int): Number of cells to pad on all sides.\n\n    Returns:\n        The padded masks and the scale factor of the padding size / original size.\n    \"\"\"\n    B = masks.shape[0]\n    M = masks.shape[-1]\n    pad2 = 2 * padding\n    scale = float(M + pad2) / M\n    padded_masks = masks.new_zeros((B, M + pad2, M + pad2))\n    padded_masks[:, padding:-padding, padding:-padding] = masks\n    return padded_masks, scale\n\n\ndef scale_boxes(boxes, scale):\n    \"\"\"\n    Args:\n        boxes (tensor): A tensor of shape (B, 4) representing B boxes with 4\n            coords representing the corners x0, y0, x1, y1,\n        scale (float): The box scaling factor.\n\n    Returns:\n        Scaled boxes.\n    \"\"\"\n    w_half = (boxes[:, 2] - boxes[:, 0]) * 0.5\n    h_half = (boxes[:, 3] - boxes[:, 1]) * 0.5\n    x_c = (boxes[:, 2] + boxes[:, 0]) * 0.5\n    y_c = (boxes[:, 3] + boxes[:, 1]) * 0.5\n\n    w_half *= scale\n    h_half *= scale\n\n    scaled_boxes = torch.zeros_like(boxes)\n    scaled_boxes[:, 0] = x_c - w_half\n    scaled_boxes[:, 2] = x_c + w_half\n    scaled_boxes[:, 1] = y_c - h_half\n    scaled_boxes[:, 3] = y_c + h_half\n    return scaled_boxes\n\n\n@torch.jit.script_if_tracing\ndef _paste_masks_tensor_shape(\n    masks: torch.Tensor,\n    boxes: torch.Tensor,\n    image_shape: Tuple[torch.Tensor, torch.Tensor],\n    threshold: float = 0.5,\n):\n    \"\"\"\n    A wrapper of paste_masks_in_image where image_shape is Tensor.\n    During tracing, shapes might be tensors instead of ints. The Tensor->int\n    conversion should be scripted rather than traced.\n    \"\"\"\n    return paste_masks_in_image(masks, boxes, (int(image_shape[0]), int(image_shape[1])), threshold)\n"
  },
  {
    "path": "detectron2/layers/nms.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates.\n\nimport torch\nfrom torchvision.ops import boxes as box_ops\nfrom torchvision.ops import nms  # noqa . for compatibility\n\n\ndef batched_nms(\n    boxes: torch.Tensor, scores: torch.Tensor, idxs: torch.Tensor, iou_threshold: float\n):\n    \"\"\"\n    Same as torchvision.ops.boxes.batched_nms, but with float().\n    \"\"\"\n    assert boxes.shape[-1] == 4\n    # Note: Torchvision already has a strategy (https://github.com/pytorch/vision/issues/1311)\n    # to decide whether to use coordinate trick or for loop to implement batched_nms. So we\n    # just call it directly.\n    # Fp16 does not have enough range for batched NMS, so adding float().\n    return box_ops.batched_nms(boxes.float(), scores, idxs, iou_threshold)\n\n\n# Note: this function (nms_rotated) might be moved into\n# torchvision/ops/boxes.py in the future\ndef nms_rotated(boxes, scores, iou_threshold):\n    \"\"\"\n    Performs non-maximum suppression (NMS) on the rotated boxes according\n    to their intersection-over-union (IoU).\n\n    Rotated NMS iteratively removes lower scoring rotated boxes which have an\n    IoU greater than iou_threshold with another (higher scoring) rotated box.\n\n    Note that RotatedBox (5, 3, 4, 2, -90) covers exactly the same region as\n    RotatedBox (5, 3, 4, 2, 90) does, and their IoU will be 1. However, they\n    can be representing completely different objects in certain tasks, e.g., OCR.\n\n    As for the question of whether rotated-NMS should treat them as faraway boxes\n    even though their IOU is 1, it depends on the application and/or ground truth annotation.\n\n    As an extreme example, consider a single character v and the square box around it.\n\n    If the angle is 0 degree, the object (text) would be read as 'v';\n\n    If the angle is 90 degrees, the object (text) would become '>';\n\n    If the angle is 180 degrees, the object (text) would become '^';\n\n    If the angle is 270/-90 degrees, the object (text) would become '<'\n\n    All of these cases have IoU of 1 to each other, and rotated NMS that only\n    uses IoU as criterion would only keep one of them with the highest score -\n    which, practically, still makes sense in most cases because typically\n    only one of theses orientations is the correct one. Also, it does not matter\n    as much if the box is only used to classify the object (instead of transcribing\n    them with a sequential OCR recognition model) later.\n\n    On the other hand, when we use IoU to filter proposals that are close to the\n    ground truth during training, we should definitely take the angle into account if\n    we know the ground truth is labeled with the strictly correct orientation (as in,\n    upside-down words are annotated with -180 degrees even though they can be covered\n    with a 0/90/-90 degree box, etc.)\n\n    The way the original dataset is annotated also matters. For example, if the dataset\n    is a 4-point polygon dataset that does not enforce ordering of vertices/orientation,\n    we can estimate a minimum rotated bounding box to this polygon, but there's no way\n    we can tell the correct angle with 100% confidence (as shown above, there could be 4 different\n    rotated boxes, with angles differed by 90 degrees to each other, covering the exactly\n    same region). In that case we have to just use IoU to determine the box\n    proximity (as many detection benchmarks (even for text) do) unless there're other\n    assumptions we can make (like width is always larger than height, or the object is not\n    rotated by more than 90 degrees CCW/CW, etc.)\n\n    In summary, not considering angles in rotated NMS seems to be a good option for now,\n    but we should be aware of its implications.\n\n    Args:\n        boxes (Tensor[N, 5]): Rotated boxes to perform NMS on. They are expected to be in\n           (x_center, y_center, width, height, angle_degrees) format.\n        scores (Tensor[N]): Scores for each one of the rotated boxes\n        iou_threshold (float): Discards all overlapping rotated boxes with IoU < iou_threshold\n\n    Returns:\n        keep (Tensor): int64 tensor with the indices of the elements that have been kept\n        by Rotated NMS, sorted in decreasing order of scores\n    \"\"\"\n    return torch.ops.detectron2.nms_rotated(boxes, scores, iou_threshold)\n\n\n# Note: this function (batched_nms_rotated) might be moved into\n# torchvision/ops/boxes.py in the future\ndef batched_nms_rotated(boxes, scores, idxs, iou_threshold):\n    \"\"\"\n    Performs non-maximum suppression in a batched fashion.\n\n    Each index value correspond to a category, and NMS\n    will not be applied between elements of different categories.\n\n    Args:\n        boxes (Tensor[N, 5]):\n           boxes where NMS will be performed. They\n           are expected to be in (x_ctr, y_ctr, width, height, angle_degrees) format\n        scores (Tensor[N]):\n           scores for each one of the boxes\n        idxs (Tensor[N]):\n           indices of the categories for each one of the boxes.\n        iou_threshold (float):\n           discards all overlapping boxes\n           with IoU < iou_threshold\n\n    Returns:\n        Tensor:\n            int64 tensor with the indices of the elements that have been kept\n            by NMS, sorted in decreasing order of scores\n    \"\"\"\n    assert boxes.shape[-1] == 5\n\n    if boxes.numel() == 0:\n        return torch.empty((0,), dtype=torch.int64, device=boxes.device)\n    boxes = boxes.float()  # fp16 does not have enough range for batched NMS\n    # Strategy: in order to perform NMS independently per class,\n    # we add an offset to all the boxes. The offset is dependent\n    # only on the class idx, and is large enough so that boxes\n    # from different classes do not overlap\n\n    # Note that batched_nms in torchvision/ops/boxes.py only uses max_coordinate,\n    # which won't handle negative coordinates correctly.\n    # Here by using min_coordinate we can make sure the negative coordinates are\n    # correctly handled.\n    max_coordinate = (\n        torch.max(boxes[:, 0], boxes[:, 1]) + torch.max(boxes[:, 2], boxes[:, 3]) / 2\n    ).max()\n    min_coordinate = (\n        torch.min(boxes[:, 0], boxes[:, 1]) - torch.max(boxes[:, 2], boxes[:, 3]) / 2\n    ).min()\n    offsets = idxs.to(boxes) * (max_coordinate - min_coordinate + 1)\n    boxes_for_nms = boxes.clone()  # avoid modifying the original values in boxes\n    boxes_for_nms[:, :2] += offsets[:, None]\n    keep = nms_rotated(boxes_for_nms, scores, iou_threshold)\n    return keep\n"
  },
  {
    "path": "detectron2/layers/roi_align.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nfrom torch import nn\nfrom torchvision.ops import roi_align\n\n\n# NOTE: torchvision's RoIAlign has a different default aligned=False\nclass ROIAlign(nn.Module):\n    def __init__(self, output_size, spatial_scale, sampling_ratio, aligned=True):\n        \"\"\"\n        Args:\n            output_size (tuple): h, w\n            spatial_scale (float): scale the input boxes by this number\n            sampling_ratio (int): number of inputs samples to take for each output\n                sample. 0 to take samples densely.\n            aligned (bool): if False, use the legacy implementation in\n                Detectron. If True, align the results more perfectly.\n\n        Note:\n            The meaning of aligned=True:\n\n            Given a continuous coordinate c, its two neighboring pixel indices (in our\n            pixel model) are computed by floor(c - 0.5) and ceil(c - 0.5). For example,\n            c=1.3 has pixel neighbors with discrete indices [0] and [1] (which are sampled\n            from the underlying signal at continuous coordinates 0.5 and 1.5). But the original\n            roi_align (aligned=False) does not subtract the 0.5 when computing neighboring\n            pixel indices and therefore it uses pixels with a slightly incorrect alignment\n            (relative to our pixel model) when performing bilinear interpolation.\n\n            With `aligned=True`,\n            we first appropriately scale the ROI and then shift it by -0.5\n            prior to calling roi_align. This produces the correct neighbors; see\n            detectron2/tests/test_roi_align.py for verification.\n\n            The difference does not make a difference to the model's performance if\n            ROIAlign is used together with conv layers.\n        \"\"\"\n        super().__init__()\n        self.output_size = output_size\n        self.spatial_scale = spatial_scale\n        self.sampling_ratio = sampling_ratio\n        self.aligned = aligned\n\n        from torchvision import __version__\n\n        version = tuple(int(x) for x in __version__.split(\".\")[:2])\n        # https://github.com/pytorch/vision/pull/2438\n        assert version >= (0, 7), \"Require torchvision >= 0.7\"\n\n    def forward(self, input, rois):\n        \"\"\"\n        Args:\n            input: NCHW images\n            rois: Bx5 boxes. First column is the index into N. The other 4 columns are xyxy.\n        \"\"\"\n        assert rois.dim() == 2 and rois.size(1) == 5\n        if input.is_quantized:\n            input = input.dequantize()\n        return roi_align(\n            input,\n            rois.to(dtype=input.dtype),\n            self.output_size,\n            self.spatial_scale,\n            self.sampling_ratio,\n            self.aligned,\n        )\n\n    def __repr__(self):\n        tmpstr = self.__class__.__name__ + \"(\"\n        tmpstr += \"output_size=\" + str(self.output_size)\n        tmpstr += \", spatial_scale=\" + str(self.spatial_scale)\n        tmpstr += \", sampling_ratio=\" + str(self.sampling_ratio)\n        tmpstr += \", aligned=\" + str(self.aligned)\n        tmpstr += \")\"\n        return tmpstr\n"
  },
  {
    "path": "detectron2/layers/roi_align_rotated.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nimport torch\nfrom torch import nn\nfrom torch.autograd import Function\nfrom torch.autograd.function import once_differentiable\nfrom torch.nn.modules.utils import _pair\n\n\nclass _ROIAlignRotated(Function):\n    @staticmethod\n    def forward(ctx, input, roi, output_size, spatial_scale, sampling_ratio):\n        ctx.save_for_backward(roi)\n        ctx.output_size = _pair(output_size)\n        ctx.spatial_scale = spatial_scale\n        ctx.sampling_ratio = sampling_ratio\n        ctx.input_shape = input.size()\n        output = torch.ops.detectron2.roi_align_rotated_forward(\n            input, roi, spatial_scale, output_size[0], output_size[1], sampling_ratio\n        )\n        return output\n\n    @staticmethod\n    @once_differentiable\n    def backward(ctx, grad_output):\n        (rois,) = ctx.saved_tensors\n        output_size = ctx.output_size\n        spatial_scale = ctx.spatial_scale\n        sampling_ratio = ctx.sampling_ratio\n        bs, ch, h, w = ctx.input_shape\n        grad_input = torch.ops.detectron2.roi_align_rotated_backward(\n            grad_output,\n            rois,\n            spatial_scale,\n            output_size[0],\n            output_size[1],\n            bs,\n            ch,\n            h,\n            w,\n            sampling_ratio,\n        )\n        return grad_input, None, None, None, None, None\n\n\nroi_align_rotated = _ROIAlignRotated.apply\n\n\nclass ROIAlignRotated(nn.Module):\n    def __init__(self, output_size, spatial_scale, sampling_ratio):\n        \"\"\"\n        Args:\n            output_size (tuple): h, w\n            spatial_scale (float): scale the input boxes by this number\n            sampling_ratio (int): number of inputs samples to take for each output\n                sample. 0 to take samples densely.\n\n        Note:\n            ROIAlignRotated supports continuous coordinate by default:\n            Given a continuous coordinate c, its two neighboring pixel indices (in our\n            pixel model) are computed by floor(c - 0.5) and ceil(c - 0.5). For example,\n            c=1.3 has pixel neighbors with discrete indices [0] and [1] (which are sampled\n            from the underlying signal at continuous coordinates 0.5 and 1.5).\n        \"\"\"\n        super(ROIAlignRotated, self).__init__()\n        self.output_size = output_size\n        self.spatial_scale = spatial_scale\n        self.sampling_ratio = sampling_ratio\n\n    def forward(self, input, rois):\n        \"\"\"\n        Args:\n            input: NCHW images\n            rois: Bx6 boxes. First column is the index into N.\n                The other 5 columns are (x_ctr, y_ctr, width, height, angle_degrees).\n        \"\"\"\n        assert rois.dim() == 2 and rois.size(1) == 6\n        orig_dtype = input.dtype\n        if orig_dtype == torch.float16:\n            input = input.float()\n            rois = rois.float()\n        return roi_align_rotated(\n            input, rois, self.output_size, self.spatial_scale, self.sampling_ratio\n        ).to(dtype=orig_dtype)\n\n    def __repr__(self):\n        tmpstr = self.__class__.__name__ + \"(\"\n        tmpstr += \"output_size=\" + str(self.output_size)\n        tmpstr += \", spatial_scale=\" + str(self.spatial_scale)\n        tmpstr += \", sampling_ratio=\" + str(self.sampling_ratio)\n        tmpstr += \")\"\n        return tmpstr\n"
  },
  {
    "path": "detectron2/layers/rotated_boxes.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nfrom __future__ import absolute_import, division, print_function, unicode_literals\nimport torch\n\n\ndef pairwise_iou_rotated(boxes1, boxes2):\n    \"\"\"\n    Return intersection-over-union (Jaccard index) of boxes.\n\n    Both sets of boxes are expected to be in\n    (x_center, y_center, width, height, angle) format.\n\n    Arguments:\n        boxes1 (Tensor[N, 5])\n        boxes2 (Tensor[M, 5])\n\n    Returns:\n        iou (Tensor[N, M]): the NxM matrix containing the pairwise\n            IoU values for every element in boxes1 and boxes2\n    \"\"\"\n    return torch.ops.detectron2.box_iou_rotated(boxes1, boxes2)\n"
  },
  {
    "path": "detectron2/layers/shape_spec.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates.\nfrom dataclasses import dataclass\nfrom typing import Optional\n\n\n@dataclass\nclass ShapeSpec:\n    \"\"\"\n    A simple structure that contains basic shape specification about a tensor.\n    It is often used as the auxiliary inputs/outputs of models,\n    to complement the lack of shape inference ability among pytorch modules.\n    \"\"\"\n\n    channels: Optional[int] = None\n    height: Optional[int] = None\n    width: Optional[int] = None\n    stride: Optional[int] = None\n"
  },
  {
    "path": "detectron2/layers/wrappers.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\n\"\"\"\nWrappers around on some nn functions, mainly to support empty tensors.\n\nIdeally, add support directly in PyTorch to empty tensors in those functions.\n\nThese can be removed once https://github.com/pytorch/pytorch/issues/12013\nis implemented\n\"\"\"\n\nimport warnings\nfrom typing import List, Optional\nimport torch\nfrom torch.nn import functional as F\n\n\ndef shapes_to_tensor(x: List[int], device: Optional[torch.device] = None) -> torch.Tensor:\n    \"\"\"\n    Turn a list of integer scalars or integer Tensor scalars into a vector,\n    in a way that's both traceable and scriptable.\n\n    In tracing, `x` should be a list of scalar Tensor, so the output can trace to the inputs.\n    In scripting or eager, `x` should be a list of int.\n    \"\"\"\n    if torch.jit.is_scripting():\n        return torch.as_tensor(x, device=device)\n    if torch.jit.is_tracing():\n        assert all(\n            [isinstance(t, torch.Tensor) for t in x]\n        ), \"Shape should be tensor during tracing!\"\n        # as_tensor should not be used in tracing because it records a constant\n        ret = torch.stack(x)\n        if ret.device != device:  # avoid recording a hard-coded device if not necessary\n            ret = ret.to(device=device)\n        return ret\n    return torch.as_tensor(x, device=device)\n\n\ndef cat(tensors: List[torch.Tensor], dim: int = 0):\n    \"\"\"\n    Efficient version of torch.cat that avoids a copy if there is only a single element in a list\n    \"\"\"\n    assert isinstance(tensors, (list, tuple))\n    if len(tensors) == 1:\n        return tensors[0]\n    return torch.cat(tensors, dim)\n\n\ndef empty_input_loss_func_wrapper(loss_func):\n    def wrapped_loss_func(input, target, *, reduction=\"mean\", **kwargs):\n        \"\"\"\n        Same as `loss_func`, but returns 0 (instead of nan) for empty inputs.\n        \"\"\"\n        if target.numel() == 0 and reduction == \"mean\":\n            return input.sum() * 0.0  # connect the gradient\n        return loss_func(input, target, reduction=reduction, **kwargs)\n\n    return wrapped_loss_func\n\n\ncross_entropy = empty_input_loss_func_wrapper(F.cross_entropy)\n\n\nclass _NewEmptyTensorOp(torch.autograd.Function):\n    @staticmethod\n    def forward(ctx, x, new_shape):\n        ctx.shape = x.shape\n        return x.new_empty(new_shape)\n\n    @staticmethod\n    def backward(ctx, grad):\n        shape = ctx.shape\n        return _NewEmptyTensorOp.apply(grad, shape), None\n\n\nclass Conv2d(torch.nn.Conv2d):\n    \"\"\"\n    A wrapper around :class:`torch.nn.Conv2d` to support empty inputs and more features.\n    \"\"\"\n\n    def __init__(self, *args, **kwargs):\n        \"\"\"\n        Extra keyword arguments supported in addition to those in `torch.nn.Conv2d`:\n\n        Args:\n            norm (nn.Module, optional): a normalization layer\n            activation (callable(Tensor) -> Tensor): a callable activation function\n\n        It assumes that norm layer is used before activation.\n        \"\"\"\n        norm = kwargs.pop(\"norm\", None)\n        activation = kwargs.pop(\"activation\", None)\n        super().__init__(*args, **kwargs)\n\n        self.norm = norm\n        self.activation = activation\n\n    def forward(self, x):\n        # torchscript does not support SyncBatchNorm yet\n        # https://github.com/pytorch/pytorch/issues/40507\n        # and we skip these codes in torchscript since:\n        # 1. currently we only support torchscript in evaluation mode\n        # 2. features needed by exporting module to torchscript are added in PyTorch 1.6 or\n        # later version, `Conv2d` in these PyTorch versions has already supported empty inputs.\n        if not torch.jit.is_scripting():\n            with warnings.catch_warnings(record=True):\n                if x.numel() == 0 and self.training:\n                    # https://github.com/pytorch/pytorch/issues/12013\n                    assert not isinstance(\n                        self.norm, torch.nn.SyncBatchNorm\n                    ), \"SyncBatchNorm does not support empty inputs!\"\n\n        x = F.conv2d(\n            x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups\n        )\n        if self.norm is not None:\n            x = self.norm(x)\n        if self.activation is not None:\n            x = self.activation(x)\n        return x\n\n\nConvTranspose2d = torch.nn.ConvTranspose2d\nBatchNorm2d = torch.nn.BatchNorm2d\ninterpolate = F.interpolate\nLinear = torch.nn.Linear\n\n\ndef nonzero_tuple(x):\n    \"\"\"\n    A 'as_tuple=True' version of torch.nonzero to support torchscript.\n    because of https://github.com/pytorch/pytorch/issues/38718\n    \"\"\"\n    if torch.jit.is_scripting():\n        if x.dim() == 0:\n            return x.unsqueeze(0).nonzero().unbind(1)\n        return x.nonzero().unbind(1)\n    else:\n        return x.nonzero(as_tuple=True)\n\n\n@torch.jit.script_if_tracing\ndef move_device_like(src: torch.Tensor, dst: torch.Tensor) -> torch.Tensor:\n    \"\"\"\n    Tracing friendly way to cast tensor to another tensor's device. Device will be treated\n    as constant during tracing, scripting the casting process as whole can workaround this issue.\n    \"\"\"\n    return src.to(dst.device)\n"
  },
  {
    "path": "detectron2/modeling/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport torch\n\nfrom detectron2.layers import ShapeSpec\n\nfrom .anchor_generator import build_anchor_generator, ANCHOR_GENERATOR_REGISTRY\nfrom .backbone import (\n    BACKBONE_REGISTRY,\n    FPN,\n    Backbone,\n    ResNet,\n    ResNetBlockBase,\n    build_backbone,\n    build_resnet_backbone,\n    make_stage,\n)\nfrom .meta_arch import (\n    META_ARCH_REGISTRY,\n    SEM_SEG_HEADS_REGISTRY,\n    GeneralizedRCNN,\n    PanopticFPN,\n    ProposalNetwork,\n    RetinaNet,\n    SemanticSegmentor,\n    build_model,\n    build_sem_seg_head,\n)\nfrom .postprocessing import detector_postprocess\nfrom .proposal_generator import (\n    PROPOSAL_GENERATOR_REGISTRY,\n    build_proposal_generator,\n    RPN_HEAD_REGISTRY,\n    build_rpn_head,\n)\nfrom .roi_heads import (\n    ROI_BOX_HEAD_REGISTRY,\n    ROI_HEADS_REGISTRY,\n    ROI_KEYPOINT_HEAD_REGISTRY,\n    ROI_MASK_HEAD_REGISTRY,\n    ROIHeads,\n    StandardROIHeads,\n    build_box_head,\n    build_keypoint_head,\n    build_mask_head,\n    build_roi_heads,\n)\nfrom .test_time_augmentation import DatasetMapperTTA, GeneralizedRCNNWithTTA\n\n_EXCLUDE = {\"torch\", \"ShapeSpec\"}\n__all__ = [k for k in globals().keys() if k not in _EXCLUDE and not k.startswith(\"_\")]\n\nassert (\n    torch.Tensor([1]) == torch.Tensor([2])\n).dtype == torch.bool, \"Your Pytorch is too old. Please update to contain https://github.com/pytorch/pytorch/pull/21113\"\n"
  },
  {
    "path": "detectron2/modeling/anchor_generator.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport math\nfrom typing import List\nimport torch\nfrom torch import nn\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.structures import Boxes, RotatedBoxes\nfrom detectron2.utils.registry import Registry\n\nANCHOR_GENERATOR_REGISTRY = Registry(\"ANCHOR_GENERATOR\")\n\"\"\"\nRegistry for modules that creates object detection anchors for feature maps.\n\"\"\"\n\n\nclass BufferList(nn.Module):\n    \"\"\"\n    Similar to nn.ParameterList, but for buffers\n    \"\"\"\n\n    def __init__(self, buffers=None):\n        super(BufferList, self).__init__()\n        if buffers is not None:\n            self.extend(buffers)\n\n    def extend(self, buffers):\n        offset = len(self)\n        for i, buffer in enumerate(buffers):\n            self.register_buffer(str(offset + i), buffer)\n        return self\n\n    def __len__(self):\n        return len(self._buffers)\n\n    def __iter__(self):\n        return iter(self._buffers.values())\n\n\ndef _create_grid_offsets(size, stride, device):\n    grid_height, grid_width = size\n    shifts_x = torch.arange(0, grid_width * stride, step=stride, dtype=torch.float32, device=device)\n    shifts_y = torch.arange(\n        0, grid_height * stride, step=stride, dtype=torch.float32, device=device\n    )\n    shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x)\n    shift_x = shift_x.reshape(-1)\n    shift_y = shift_y.reshape(-1)\n    return shift_x, shift_y\n\n\n@ANCHOR_GENERATOR_REGISTRY.register()\nclass DefaultAnchorGenerator(nn.Module):\n    \"\"\"\n    For a set of image sizes and feature maps, computes a set of anchors.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: List[ShapeSpec]):\n        super().__init__()\n        # fmt: off\n        sizes         = cfg.MODEL.ANCHOR_GENERATOR.SIZES\n        aspect_ratios = cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS\n        self.strides  = [x.stride for x in input_shape]\n        # fmt: on\n        \"\"\"\n        sizes (list[list[int]]): sizes[i] is the list of anchor sizes to use\n            for the i-th feature map. If len(sizes) == 1, then the same list of\n            anchor sizes, given by sizes[0], is used for all feature maps. Anchor\n            sizes are given in absolute lengths in units of the input image;\n            they do not dynamically scale if the input image size changes.\n        aspect_ratios (list[list[float]]): aspect_ratios[i] is the list of\n            anchor aspect ratios to use for the i-th feature map. If\n            len(aspect_ratios) == 1, then the same list of anchor aspect ratios,\n            given by aspect_ratios[0], is used for all feature maps.\n        strides (list[int]): stride of each input feature.\n        \"\"\"\n\n        self.num_features = len(self.strides)\n        self.cell_anchors = self._calculate_anchors(sizes, aspect_ratios)\n\n    def _calculate_anchors(self, sizes, aspect_ratios):\n        # If one size (or aspect ratio) is specified and there are multiple feature\n        # maps, then we \"broadcast\" anchors of that single size (or aspect ratio)\n        # over all feature maps.\n        if len(sizes) == 1:\n            sizes *= self.num_features\n        if len(aspect_ratios) == 1:\n            aspect_ratios *= self.num_features\n        assert self.num_features == len(sizes)\n        assert self.num_features == len(aspect_ratios)\n\n        cell_anchors = [\n            self.generate_cell_anchors(s, a).float() for s, a in zip(sizes, aspect_ratios)\n        ]\n\n        return BufferList(cell_anchors)\n\n    @property\n    def box_dim(self):\n        \"\"\"\n        Returns:\n            int: the dimension of each anchor box.\n        \"\"\"\n        return 4\n\n    @property\n    def num_cell_anchors(self):\n        \"\"\"\n        Returns:\n            list[int]: Each int is the number of anchors at every pixel\n                location, on that feature map.\n                For example, if at every pixel we use anchors of 3 aspect\n                ratios and 5 sizes, the number of anchors is 15.\n                (See also ANCHOR_GENERATOR.SIZES and ANCHOR_GENERATOR.ASPECT_RATIOS in config)\n\n                In standard RPN models, `num_cell_anchors` on every feature map is the same.\n        \"\"\"\n        return [len(cell_anchors) for cell_anchors in self.cell_anchors]\n\n    def grid_anchors(self, grid_sizes):\n        anchors = []\n        for size, stride, base_anchors in zip(grid_sizes, self.strides, self.cell_anchors):\n            shift_x, shift_y = _create_grid_offsets(size, stride, base_anchors.device)\n            shifts = torch.stack((shift_x, shift_y, shift_x, shift_y), dim=1)\n\n            anchors.append((shifts.view(-1, 1, 4) + base_anchors.view(1, -1, 4)).reshape(-1, 4))\n\n        return anchors\n\n    def generate_cell_anchors(self, sizes=(32, 64, 128, 256, 512), aspect_ratios=(0.5, 1, 2)):\n        \"\"\"\n        Generate a tensor storing anchor boxes, which are continuous geometric rectangles\n        centered on one feature map point sample. We can later build the set of anchors\n        for the entire feature map by tiling these tensors; see `meth:grid_anchors`.\n\n        Args:\n            sizes (tuple[float]): Absolute size of the anchors in the units of the input\n                image (the input received by the network, after undergoing necessary scaling).\n                The absolute size is given as the side length of a box.\n            aspect_ratios (tuple[float]]): Aspect ratios of the boxes computed as box\n                height / width.\n\n        Returns:\n            Tensor of shape (len(sizes) * len(aspect_ratios), 4) storing anchor boxes\n                in XYXY format.\n        \"\"\"\n\n        # This is different from the anchor generator defined in the original Faster R-CNN\n        # code or Detectron. They yield the same AP, however the old version defines cell\n        # anchors in a less natural way with a shift relative to the feature grid and\n        # quantization that results in slightly different sizes for different aspect ratios.\n        # See also https://github.com/facebookresearch/Detectron/issues/227\n\n        anchors = []\n        for size in sizes:\n            area = size ** 2.0\n            for aspect_ratio in aspect_ratios:\n                # ... some algebra ...\n                # w = sqrt(s * s / a)\n                # h = a * w\n                w = math.sqrt(area / aspect_ratio)\n                h = aspect_ratio * w\n                x0, y0, x1, y1 = -w / 2.0, -h / 2.0, w / 2.0, h / 2.0\n                anchors.append([x0, y0, x1, y1])\n        return torch.tensor(anchors)\n\n    def forward(self, features):\n        \"\"\"\n        Args:\n            features (list[Tensor]): list of backbone feature maps on which to generate anchors.\n\n        Returns:\n            list[list[Boxes]]: a list of #image elements. Each is a list of #feature level Boxes.\n                The Boxes contains anchors of this image on the specific feature level.\n        \"\"\"\n        num_images = len(features[0])\n        grid_sizes = [feature_map.shape[-2:] for feature_map in features]\n        anchors_over_all_feature_maps = self.grid_anchors(grid_sizes)\n\n        anchors_in_image = []\n        for anchors_per_feature_map in anchors_over_all_feature_maps:\n            boxes = Boxes(anchors_per_feature_map)\n            anchors_in_image.append(boxes)\n\n        anchors = [copy.deepcopy(anchors_in_image) for _ in range(num_images)]\n        return anchors\n\n\n@ANCHOR_GENERATOR_REGISTRY.register()\nclass RotatedAnchorGenerator(nn.Module):\n    \"\"\"\n    The anchor generator used by Rotated RPN (RRPN).\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: List[ShapeSpec]):\n        super().__init__()\n        # fmt: off\n        sizes         = cfg.MODEL.ANCHOR_GENERATOR.SIZES\n        aspect_ratios = cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS\n        angles        = cfg.MODEL.ANCHOR_GENERATOR.ANGLES\n        self.strides  = [x.stride for x in input_shape]\n        # fmt: on\n\n        self.num_features = len(self.strides)\n        self.cell_anchors = self._calculate_anchors(sizes, aspect_ratios, angles, self.strides)\n\n    def _calculate_anchors(self, sizes, aspect_ratios, angles, feature_strides):\n        \"\"\"\n        Args:\n            sizes (list[list[int]]): sizes[i] is the list of anchor sizes to use\n                for the i-th feature map. If len(sizes) == 1, then the same list of\n                anchor sizes, given by sizes[0], is used for all feature maps. Anchor\n                sizes are given in absolute lengths in units of the input image;\n                they do not dynamically scale if the input image size changes.\n            aspect_ratios (list[list[float]]): aspect_ratios[i] is the list of\n                anchor aspect ratios to use for the i-th feature map. If\n                len(aspect_ratios) == 1, then the same list of anchor aspect ratios,\n                given by aspect_ratios[0], is used for all feature maps.\n            angles (list[list[float]]): angles[i] is the list of\n                anchor angles to use for the i-th feature map. If\n                len(angles) == 1, then the same list of anchor angles,\n                given by angles[0], is used for all feature maps.\n            feature_strides (list[number]): list of feature map strides (with respect\n                to the input image) for each input feature map.\n        \"\"\"\n\n        # If one size (or aspect ratio) is specified and there are multiple feature\n        # maps, then we \"broadcast\" anchors of that single size\n        # (or aspect ratio/angle) over all feature maps.\n\n        if len(sizes) == 1:\n            sizes *= self.num_features\n        if len(aspect_ratios) == 1:\n            aspect_ratios *= self.num_features\n        if len(angles) == 1:\n            angles *= self.num_features\n        assert self.num_features == len(sizes)\n        assert self.num_features == len(aspect_ratios)\n        assert self.num_features == len(angles)\n\n        cell_anchors = [\n            self.generate_cell_anchors(size, aspect_ratio, angle).float()\n            for size, aspect_ratio, angle in zip(sizes, aspect_ratios, angles)\n        ]\n\n        return BufferList(cell_anchors)\n\n    @property\n    def box_dim(self):\n        \"\"\"\n        Returns:\n            int: the dimension of each anchor box.\n        \"\"\"\n        return 5\n\n    @property\n    def num_cell_anchors(self):\n        \"\"\"\n        Returns:\n            list[int]: Each int is the number of anchors at every pixel\n                location, on that feature map.\n                For example, if at every pixel we use anchors of 3 aspect\n                ratios, 2 sizes and 5 angles, the number of anchors is 30.\n                (See also ANCHOR_GENERATOR.SIZES, ANCHOR_GENERATOR.ASPECT_RATIOS\n                and ANCHOR_GENERATOR.ANGLES in config)\n\n                In standard RRPN models, `num_cell_anchors` on every feature map is the same.\n        \"\"\"\n        return [len(cell_anchors) for cell_anchors in self.cell_anchors]\n\n    def grid_anchors(self, grid_sizes):\n        anchors = []\n        for size, stride, base_anchors in zip(grid_sizes, self.strides, self.cell_anchors):\n            shift_x, shift_y = _create_grid_offsets(size, stride, base_anchors.device)\n            zeros = torch.zeros_like(shift_x)\n            shifts = torch.stack((shift_x, shift_y, zeros, zeros, zeros), dim=1)\n\n            anchors.append((shifts.view(-1, 1, 5) + base_anchors.view(1, -1, 5)).reshape(-1, 5))\n\n        return anchors\n\n    def generate_cell_anchors(\n        self,\n        sizes=(32, 64, 128, 256, 512),\n        aspect_ratios=(0.5, 1, 2),\n        angles=(-90, -60, -30, 0, 30, 60, 90),\n    ):\n        \"\"\"\n        Generate a tensor storing anchor boxes, which are continuous geometric rectangles\n        centered on one feature map point sample. We can later build the set of anchors\n        for the entire feature map by tiling these tensors; see `meth:grid_anchors`.\n\n        Args:\n            sizes (tuple[float]): Absolute size of the anchors in the units of the input\n                image (the input received by the network, after undergoing necessary scaling).\n                The absolute size is given as the side length of a box.\n            aspect_ratios (tuple[float]]): Aspect ratios of the boxes computed as box\n                height / width.\n            angles (tuple[float]]): Angles of boxes indicating how many degrees\n                the boxes are rotated counter-clockwise.\n\n        Returns:\n            Tensor of shape (len(sizes) * len(aspect_ratios) * len(angles), 5)\n                storing anchor boxes in (x_ctr, y_ctr, w, h, angle) format.\n        \"\"\"\n        anchors = []\n        for size in sizes:\n            area = size ** 2.0\n            for aspect_ratio in aspect_ratios:\n                # s * s = w * h\n                # a = h / w\n                # ... some algebra ...\n                # w = sqrt(s * s / a)\n                # h = a * w\n                w = math.sqrt(area / aspect_ratio)\n                h = aspect_ratio * w\n                anchors.extend([0, 0, w, h, a] for a in angles)\n\n        return torch.tensor(anchors)\n\n    def forward(self, features):\n        \"\"\"\n        Args:\n            features (list[Tensor]): list of backbone feature maps on which to generate anchors.\n\n        Returns:\n            list[list[RotatedBoxes]]:\n                a list of #image elements. Each is a list of #feature level RotatedBoxes.\n                The RotatedBoxes contains anchors of this image on the specific feature level.\n        \"\"\"\n        num_images = len(features[0])\n        grid_sizes = [feature_map.shape[-2:] for feature_map in features]\n        anchors_over_all_feature_maps = self.grid_anchors(grid_sizes)\n\n        anchors_in_image = []\n        for anchors_per_feature_map in anchors_over_all_feature_maps:\n            boxes = RotatedBoxes(anchors_per_feature_map)\n            anchors_in_image.append(boxes)\n\n        anchors = [copy.deepcopy(anchors_in_image) for _ in range(num_images)]\n        return anchors\n\n\ndef build_anchor_generator(cfg, input_shape):\n    \"\"\"\n    Built an anchor generator from `cfg.MODEL.ANCHOR_GENERATOR.NAME`.\n    \"\"\"\n    anchor_generator = cfg.MODEL.ANCHOR_GENERATOR.NAME\n    return ANCHOR_GENERATOR_REGISTRY.get(anchor_generator)(cfg, input_shape)\n"
  },
  {
    "path": "detectron2/modeling/backbone/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .build import build_backbone, BACKBONE_REGISTRY  # noqa F401 isort:skip\n\nfrom .backbone import Backbone\nfrom .fpn import FPN\nfrom .resnet import ResNet, ResNetBlockBase, build_resnet_backbone, make_stage\n\n# TODO can expose more resnet blocks after careful consideration\n"
  },
  {
    "path": "detectron2/modeling/backbone/backbone.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom abc import ABCMeta, abstractmethod\nimport torch.nn as nn\n\nfrom detectron2.layers import ShapeSpec\n\n__all__ = [\"Backbone\"]\n\n\nclass Backbone(nn.Module, metaclass=ABCMeta):\n    \"\"\"\n    Abstract base class for network backbones.\n    \"\"\"\n\n    def __init__(self):\n        \"\"\"\n        The `__init__` method of any subclass can specify its own set of arguments.\n        \"\"\"\n        super().__init__()\n\n    @abstractmethod\n    def forward(self):\n        \"\"\"\n        Subclasses must override this method, but adhere to the same return type.\n\n        Returns:\n            dict[str: Tensor]: mapping from feature name (e.g., \"res2\") to tensor\n        \"\"\"\n        pass\n\n    @property\n    def size_divisibility(self):\n        \"\"\"\n        Some backbones require the input height and width to be divisible by a\n        specific integer. This is typically true for encoder / decoder type networks\n        with lateral connection (e.g., FPN) for which feature maps need to match\n        dimension in the \"bottom up\" and \"top down\" paths. Set to 0 if no specific\n        input size divisibility is required.\n        \"\"\"\n        return 0\n\n    def output_shape(self):\n        \"\"\"\n        Returns:\n            dict[str->ShapeSpec]\n        \"\"\"\n        # this is a backward-compatible default\n        return {\n            name: ShapeSpec(\n                channels=self._out_feature_channels[name], stride=self._out_feature_strides[name]\n            )\n            for name in self._out_features\n        }\n\n    # the properties below are not used any more\n\n    @property\n    def out_features(self):\n        \"\"\"deprecated\"\"\"\n        return self._out_features\n\n    @property\n    def out_feature_strides(self):\n        \"\"\"deprecated\"\"\"\n        return {f: self._out_feature_strides[f] for f in self._out_features}\n\n    @property\n    def out_feature_channels(self):\n        \"\"\"deprecated\"\"\"\n        return {f: self._out_feature_channels[f] for f in self._out_features}\n"
  },
  {
    "path": "detectron2/modeling/backbone/build.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.utils.registry import Registry\n\nfrom .backbone import Backbone\n\nBACKBONE_REGISTRY = Registry(\"BACKBONE\")\nBACKBONE_REGISTRY.__doc__ = \"\"\"\nRegistry for backbones, which extract feature maps from images\n\nThe registered object must be a callable that accepts two arguments:\n\n1. A :class:`detectron2.config.CfgNode`\n2. A :class:`detectron2.layers.ShapeSpec`, which contains the input shape specification.\n\nIt must returns an instance of :class:`Backbone`.\n\"\"\"\n\n\ndef build_backbone(cfg, input_shape=None):\n    \"\"\"\n    Build a backbone from `cfg.MODEL.BACKBONE.NAME`.\n\n    Returns:\n        an instance of :class:`Backbone`\n    \"\"\"\n    if input_shape is None:\n        input_shape = ShapeSpec(channels=len(cfg.MODEL.PIXEL_MEAN))\n\n    backbone_name = cfg.MODEL.BACKBONE.NAME\n    backbone = BACKBONE_REGISTRY.get(backbone_name)(cfg, input_shape)\n    assert isinstance(backbone, Backbone)\n    return backbone\n"
  },
  {
    "path": "detectron2/modeling/backbone/fpn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport math\nimport fvcore.nn.weight_init as weight_init\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom detectron2.layers import Conv2d, ShapeSpec, get_norm\n\nfrom .backbone import Backbone\nfrom .build import BACKBONE_REGISTRY\nfrom .resnet import build_resnet_backbone\n\n__all__ = [\"build_resnet_fpn_backbone\", \"build_retinanet_resnet_fpn_backbone\", \"FPN\"]\n\n\nclass FPN(Backbone):\n    \"\"\"\n    This module implements Feature Pyramid Network.\n    It creates pyramid features built on top of some input feature maps.\n    \"\"\"\n\n    def __init__(\n        self, bottom_up, in_features, out_channels, norm=\"\", top_block=None, fuse_type=\"sum\"\n    ):\n        \"\"\"\n        Args:\n            bottom_up (Backbone): module representing the bottom up subnetwork.\n                Must be a subclass of :class:`Backbone`. The multi-scale feature\n                maps generated by the bottom up network, and listed in `in_features`,\n                are used to generate FPN levels.\n            in_features (list[str]): names of the input feature maps coming\n                from the backbone to which FPN is attached. For example, if the\n                backbone produces [\"res2\", \"res3\", \"res4\"], any *contiguous* sublist\n                of these may be used; order must be from high to low resolution.\n            out_channels (int): number of channels in the output feature maps.\n            norm (str): the normalization to use.\n            top_block (nn.Module or None): if provided, an extra operation will\n                be performed on the output of the last (smallest resolution)\n                FPN output, and the result will extend the result list. The top_block\n                further downsamples the feature map. It must have an attribute\n                \"num_levels\", meaning the number of extra FPN levels added by\n                this block, and \"in_feature\", which is a string representing\n                its input feature (e.g., p5).\n            fuse_type (str): types for fusing the top down features and the lateral\n                ones. It can be \"sum\" (default), which sums up element-wise; or \"avg\",\n                which takes the element-wise mean of the two.\n        \"\"\"\n        super(FPN, self).__init__()\n        assert isinstance(bottom_up, Backbone)\n\n        # Feature map strides and channels from the bottom up network (e.g. ResNet)\n        in_strides = [bottom_up.out_feature_strides[f] for f in in_features]\n        in_channels = [bottom_up.out_feature_channels[f] for f in in_features]\n\n        _assert_strides_are_log2_contiguous(in_strides)\n        lateral_convs = []\n        output_convs = []\n\n        use_bias = norm == \"\"\n        for idx, in_channels in enumerate(in_channels):\n            lateral_norm = get_norm(norm, out_channels)\n            output_norm = get_norm(norm, out_channels)\n\n            lateral_conv = Conv2d(\n                in_channels, out_channels, kernel_size=1, bias=use_bias, norm=lateral_norm\n            )\n            output_conv = Conv2d(\n                out_channels,\n                out_channels,\n                kernel_size=3,\n                stride=1,\n                padding=1,\n                bias=use_bias,\n                norm=output_norm,\n            )\n            weight_init.c2_xavier_fill(lateral_conv)\n            weight_init.c2_xavier_fill(output_conv)\n            stage = int(math.log2(in_strides[idx]))\n            self.add_module(\"fpn_lateral{}\".format(stage), lateral_conv)\n            self.add_module(\"fpn_output{}\".format(stage), output_conv)\n\n            lateral_convs.append(lateral_conv)\n            output_convs.append(output_conv)\n        # Place convs into top-down order (from low to high resolution)\n        # to make the top-down computation in forward clearer.\n        self.lateral_convs = lateral_convs[::-1]\n        self.output_convs = output_convs[::-1]\n        self.top_block = top_block\n        self.in_features = in_features\n        self.bottom_up = bottom_up\n        # Return feature names are \"p<stage>\", like [\"p2\", \"p3\", ..., \"p6\"]\n        self._out_feature_strides = {\"p{}\".format(int(math.log2(s))): s for s in in_strides}\n        # top block output feature maps.\n        if self.top_block is not None:\n            for s in range(stage, stage + self.top_block.num_levels):\n                self._out_feature_strides[\"p{}\".format(s + 1)] = 2 ** (s + 1)\n\n        self._out_features = list(self._out_feature_strides.keys())\n        self._out_feature_channels = {k: out_channels for k in self._out_features}\n        self._size_divisibility = in_strides[-1]\n        assert fuse_type in {\"avg\", \"sum\"}\n        self._fuse_type = fuse_type\n\n    @property\n    def size_divisibility(self):\n        return self._size_divisibility\n\n    def forward(self, x):\n        \"\"\"\n        Args:\n            input (dict[str: Tensor]): mapping feature map name (e.g., \"res5\") to\n                feature map tensor for each feature level in high to low resolution order.\n\n        Returns:\n            dict[str: Tensor]:\n                mapping from feature map name to FPN feature map tensor\n                in high to low resolution order. Returned feature names follow the FPN\n                paper convention: \"p<stage>\", where stage has stride = 2 ** stage e.g.,\n                [\"p2\", \"p3\", ..., \"p6\"].\n        \"\"\"\n        # Reverse feature maps into top-down order (from low to high resolution)\n        bottom_up_features = self.bottom_up(x)\n        x = [bottom_up_features[f] for f in self.in_features[::-1]]\n        results = []\n        prev_features = self.lateral_convs[0](x[0])\n        results.append(self.output_convs[0](prev_features))\n        for features, lateral_conv, output_conv in zip(\n            x[1:], self.lateral_convs[1:], self.output_convs[1:]\n        ):\n            top_down_features = F.interpolate(prev_features, scale_factor=2, mode=\"nearest\")\n            lateral_features = lateral_conv(features)\n            prev_features = lateral_features + top_down_features\n            if self._fuse_type == \"avg\":\n                prev_features /= 2\n            results.insert(0, output_conv(prev_features))\n\n        if self.top_block is not None:\n            top_block_in_feature = bottom_up_features.get(self.top_block.in_feature, None)\n            if top_block_in_feature is None:\n                top_block_in_feature = results[self._out_features.index(self.top_block.in_feature)]\n            results.extend(self.top_block(top_block_in_feature))\n        assert len(self._out_features) == len(results)\n        return dict(zip(self._out_features, results))\n\n    def output_shape(self):\n        return {\n            name: ShapeSpec(\n                channels=self._out_feature_channels[name], stride=self._out_feature_strides[name]\n            )\n            for name in self._out_features\n        }\n\n\ndef _assert_strides_are_log2_contiguous(strides):\n    \"\"\"\n    Assert that each stride is 2x times its preceding stride, i.e. \"contiguous in log2\".\n    \"\"\"\n    for i, stride in enumerate(strides[1:], 1):\n        assert stride == 2 * strides[i - 1], \"Strides {} {} are not log2 contiguous\".format(\n            stride, strides[i - 1]\n        )\n\n\nclass LastLevelMaxPool(nn.Module):\n    \"\"\"\n    This module is used in the original FPN to generate a downsampled\n    P6 feature from P5.\n    \"\"\"\n\n    def __init__(self):\n        super().__init__()\n        self.num_levels = 1\n        self.in_feature = \"p5\"\n\n    def forward(self, x):\n        return [F.max_pool2d(x, kernel_size=1, stride=2, padding=0)]\n\n\nclass LastLevelP6P7(nn.Module):\n    \"\"\"\n    This module is used in RetinaNet to generate extra layers, P6 and P7 from\n    C5 feature.\n    \"\"\"\n\n    def __init__(self, in_channels, out_channels):\n        super().__init__()\n        self.num_levels = 2\n        self.in_feature = \"res5\"\n        self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1)\n        self.p7 = nn.Conv2d(out_channels, out_channels, 3, 2, 1)\n        for module in [self.p6, self.p7]:\n            weight_init.c2_xavier_fill(module)\n\n    def forward(self, c5):\n        p6 = self.p6(c5)\n        p7 = self.p7(F.relu(p6))\n        return [p6, p7]\n\n\n@BACKBONE_REGISTRY.register()\ndef build_resnet_fpn_backbone(cfg, input_shape: ShapeSpec):\n    \"\"\"\n    Args:\n        cfg: a detectron2 CfgNode\n\n    Returns:\n        backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.\n    \"\"\"\n    bottom_up = build_resnet_backbone(cfg, input_shape)\n    in_features = cfg.MODEL.FPN.IN_FEATURES\n    out_channels = cfg.MODEL.FPN.OUT_CHANNELS\n    backbone = FPN(\n        bottom_up=bottom_up,\n        in_features=in_features,\n        out_channels=out_channels,\n        norm=cfg.MODEL.FPN.NORM,\n        top_block=LastLevelMaxPool(),\n        fuse_type=cfg.MODEL.FPN.FUSE_TYPE,\n    )\n    return backbone\n\n\n@BACKBONE_REGISTRY.register()\ndef build_retinanet_resnet_fpn_backbone(cfg, input_shape: ShapeSpec):\n    \"\"\"\n    Args:\n        cfg: a detectron2 CfgNode\n\n    Returns:\n        backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`.\n    \"\"\"\n    bottom_up = build_resnet_backbone(cfg, input_shape)\n    in_features = cfg.MODEL.FPN.IN_FEATURES\n    out_channels = cfg.MODEL.FPN.OUT_CHANNELS\n    in_channels_p6p7 = bottom_up.out_feature_channels[\"res5\"]\n    backbone = FPN(\n        bottom_up=bottom_up,\n        in_features=in_features,\n        out_channels=out_channels,\n        norm=cfg.MODEL.FPN.NORM,\n        top_block=LastLevelP6P7(in_channels_p6p7, out_channels),\n        fuse_type=cfg.MODEL.FPN.FUSE_TYPE,\n    )\n    return backbone\n"
  },
  {
    "path": "detectron2/modeling/backbone/resnet.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport numpy as np\nimport fvcore.nn.weight_init as weight_init\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom detectron2.layers import (\n    Conv2d,\n    DeformConv,\n    FrozenBatchNorm2d,\n    ModulatedDeformConv,\n    ShapeSpec,\n    get_norm,\n)\n\nfrom .backbone import Backbone\nfrom .build import BACKBONE_REGISTRY\n\n__all__ = [\n    \"ResNetBlockBase\",\n    \"BottleneckBlock\",\n    \"DeformBottleneckBlock\",\n    \"BasicStem\",\n    \"ResNet\",\n    \"make_stage\",\n    \"build_resnet_backbone\",\n]\n\n\nclass ResNetBlockBase(nn.Module):\n    def __init__(self, in_channels, out_channels, stride):\n        \"\"\"\n        The `__init__` method of any subclass should also contain these arguments.\n\n        Args:\n            in_channels (int):\n            out_channels (int):\n            stride (int):\n        \"\"\"\n        super().__init__()\n        self.in_channels = in_channels\n        self.out_channels = out_channels\n        self.stride = stride\n\n    def freeze(self):\n        for p in self.parameters():\n            p.requires_grad = False\n        FrozenBatchNorm2d.convert_frozen_batchnorm(self)\n        return self\n\n\nclass BottleneckBlock(ResNetBlockBase):\n    def __init__(\n        self,\n        in_channels,\n        out_channels,\n        *,\n        bottleneck_channels,\n        stride=1,\n        num_groups=1,\n        norm=\"BN\",\n        stride_in_1x1=False,\n        dilation=1,\n    ):\n        \"\"\"\n        Args:\n            norm (str or callable): a callable that takes the number of\n                channels and return a `nn.Module`, or a pre-defined string\n                (one of {\"FrozenBN\", \"BN\", \"GN\"}).\n            stride_in_1x1 (bool): when stride==2, whether to put stride in the\n                first 1x1 convolution or the bottleneck 3x3 convolution.\n        \"\"\"\n        super().__init__(in_channels, out_channels, stride)\n\n        if in_channels != out_channels:\n            self.shortcut = Conv2d(\n                in_channels,\n                out_channels,\n                kernel_size=1,\n                stride=stride,\n                bias=False,\n                norm=get_norm(norm, out_channels),\n            )\n        else:\n            self.shortcut = None\n\n        # The original MSRA ResNet models have stride in the first 1x1 conv\n        # The subsequent fb.torch.resnet and Caffe2 ResNe[X]t implementations have\n        # stride in the 3x3 conv\n        stride_1x1, stride_3x3 = (stride, 1) if stride_in_1x1 else (1, stride)\n\n        self.conv1 = Conv2d(\n            in_channels,\n            bottleneck_channels,\n            kernel_size=1,\n            stride=stride_1x1,\n            bias=False,\n            norm=get_norm(norm, bottleneck_channels),\n        )\n\n        self.conv2 = Conv2d(\n            bottleneck_channels,\n            bottleneck_channels,\n            kernel_size=3,\n            stride=stride_3x3,\n            padding=1 * dilation,\n            bias=False,\n            groups=num_groups,\n            dilation=dilation,\n            norm=get_norm(norm, bottleneck_channels),\n        )\n\n        self.conv3 = Conv2d(\n            bottleneck_channels,\n            out_channels,\n            kernel_size=1,\n            bias=False,\n            norm=get_norm(norm, out_channels),\n        )\n\n        for layer in [self.conv1, self.conv2, self.conv3, self.shortcut]:\n            if layer is not None:  # shortcut can be None\n                weight_init.c2_msra_fill(layer)\n\n        # Zero-initialize the last normalization in each residual branch,\n        # so that at the beginning, the residual branch starts with zeros,\n        # and each residual block behaves like an identity.\n        # See Sec 5.1 in \"Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour\":\n        # \"For BN layers, the learnable scaling coefficient γ is initialized\n        # to be 1, except for each residual block's last BN\n        # where γ is initialized to be 0.\"\n\n        # nn.init.constant_(self.conv3.norm.weight, 0)\n        # TODO this somehow hurts performance when training GN models from scratch.\n        # Add it as an option when we need to use this code to train a backbone.\n\n    def forward(self, x):\n        out = self.conv1(x)\n        out = F.relu_(out)\n\n        out = self.conv2(out)\n        out = F.relu_(out)\n\n        out = self.conv3(out)\n\n        if self.shortcut is not None:\n            shortcut = self.shortcut(x)\n        else:\n            shortcut = x\n\n        out += shortcut\n        out = F.relu_(out)\n        return out\n\n\nclass DeformBottleneckBlock(ResNetBlockBase):\n    def __init__(\n        self,\n        in_channels,\n        out_channels,\n        *,\n        bottleneck_channels,\n        stride=1,\n        num_groups=1,\n        norm=\"BN\",\n        stride_in_1x1=False,\n        dilation=1,\n        deform_modulated=False,\n        deform_num_groups=1,\n    ):\n        \"\"\"\n        Similar to :class:`BottleneckBlock`, but with deformable conv in the 3x3 convolution.\n        \"\"\"\n        super().__init__(in_channels, out_channels, stride)\n        self.deform_modulated = deform_modulated\n\n        if in_channels != out_channels:\n            self.shortcut = Conv2d(\n                in_channels,\n                out_channels,\n                kernel_size=1,\n                stride=stride,\n                bias=False,\n                norm=get_norm(norm, out_channels),\n            )\n        else:\n            self.shortcut = None\n\n        stride_1x1, stride_3x3 = (stride, 1) if stride_in_1x1 else (1, stride)\n\n        self.conv1 = Conv2d(\n            in_channels,\n            bottleneck_channels,\n            kernel_size=1,\n            stride=stride_1x1,\n            bias=False,\n            norm=get_norm(norm, bottleneck_channels),\n        )\n\n        if deform_modulated:\n            deform_conv_op = ModulatedDeformConv\n            # offset channels are 2 or 3 (if with modulated) * kernel_size * kernel_size\n            offset_channels = 27\n        else:\n            deform_conv_op = DeformConv\n            offset_channels = 18\n\n        self.conv2_offset = Conv2d(\n            bottleneck_channels,\n            offset_channels * deform_num_groups,\n            kernel_size=3,\n            stride=stride_3x3,\n            padding=1 * dilation,\n            dilation=dilation,\n        )\n        self.conv2 = deform_conv_op(\n            bottleneck_channels,\n            bottleneck_channels,\n            kernel_size=3,\n            stride=stride_3x3,\n            padding=1 * dilation,\n            bias=False,\n            groups=num_groups,\n            dilation=dilation,\n            deformable_groups=deform_num_groups,\n            norm=get_norm(norm, bottleneck_channels),\n        )\n\n        self.conv3 = Conv2d(\n            bottleneck_channels,\n            out_channels,\n            kernel_size=1,\n            bias=False,\n            norm=get_norm(norm, out_channels),\n        )\n\n        for layer in [self.conv1, self.conv2, self.conv3, self.shortcut]:\n            if layer is not None:  # shortcut can be None\n                weight_init.c2_msra_fill(layer)\n\n        nn.init.constant_(self.conv2_offset.weight, 0)\n        nn.init.constant_(self.conv2_offset.bias, 0)\n\n    def forward(self, x):\n        out = self.conv1(x)\n        out = F.relu_(out)\n\n        if self.deform_modulated:\n            offset_mask = self.conv2_offset(out)\n            offset_x, offset_y, mask = torch.chunk(offset_mask, 3, dim=1)\n            offset = torch.cat((offset_x, offset_y), dim=1)\n            mask = mask.sigmoid()\n            out = self.conv2(out, offset, mask)\n        else:\n            offset = self.conv2_offset(out)\n            out = self.conv2(out, offset)\n        out = F.relu_(out)\n\n        out = self.conv3(out)\n\n        if self.shortcut is not None:\n            shortcut = self.shortcut(x)\n        else:\n            shortcut = x\n\n        out += shortcut\n        out = F.relu_(out)\n        return out\n\n\ndef make_stage(block_class, num_blocks, first_stride, **kwargs):\n    \"\"\"\n    Create a resnet stage by creating many blocks.\n    Args:\n        block_class (class): a subclass of ResNetBlockBase\n        num_blocks (int):\n        first_stride (int): the stride of the first block. The other blocks will have stride=1.\n            A `stride` argument will be passed to the block constructor.\n        kwargs: other arguments passed to the block constructor.\n\n    Returns:\n        list[nn.Module]: a list of block module.\n    \"\"\"\n    blocks = []\n    for i in range(num_blocks):\n        blocks.append(block_class(stride=first_stride if i == 0 else 1, **kwargs))\n        kwargs[\"in_channels\"] = kwargs[\"out_channels\"]\n    return blocks\n\n\nclass BasicStem(nn.Module):\n    def __init__(self, in_channels=3, out_channels=64, norm=\"BN\"):\n        \"\"\"\n        Args:\n            norm (str or callable): a callable that takes the number of\n                channels and return a `nn.Module`, or a pre-defined string\n                (one of {\"FrozenBN\", \"BN\", \"GN\"}).\n        \"\"\"\n        super().__init__()\n        self.conv1 = Conv2d(\n            in_channels,\n            out_channels,\n            kernel_size=7,\n            stride=2,\n            padding=3,\n            bias=False,\n            norm=get_norm(norm, out_channels),\n        )\n        weight_init.c2_msra_fill(self.conv1)\n\n    def forward(self, x):\n        x = self.conv1(x)\n        x = F.relu_(x)\n        x = F.max_pool2d(x, kernel_size=3, stride=2, padding=1)\n        return x\n\n    @property\n    def out_channels(self):\n        return self.conv1.out_channels\n\n    @property\n    def stride(self):\n        return 4  # = stride 2 conv -> stride 2 max pool\n\n\nclass ResNet(Backbone):\n    def __init__(self, stem, stages, num_classes=None, out_features=None):\n        \"\"\"\n        Args:\n            stem (nn.Module): a stem module\n            stages (list[list[ResNetBlock]]): several (typically 4) stages,\n                each contains multiple :class:`ResNetBlockBase`.\n            num_classes (None or int): if None, will not perform classification.\n            out_features (list[str]): name of the layers whose outputs should\n                be returned in forward. Can be anything in \"stem\", \"linear\", or \"res2\" ...\n                If None, will return the output of the last layer.\n        \"\"\"\n        super(ResNet, self).__init__()\n        self.stem = stem\n        self.num_classes = num_classes\n\n        current_stride = self.stem.stride\n        self._out_feature_strides = {\"stem\": current_stride}\n        self._out_feature_channels = {\"stem\": self.stem.out_channels}\n\n        self.stages_and_names = []\n        for i, blocks in enumerate(stages):\n            for block in blocks:\n                assert isinstance(block, ResNetBlockBase), block\n                curr_channels = block.out_channels\n            stage = nn.Sequential(*blocks)\n            name = \"res\" + str(i + 2)\n            self.add_module(name, stage)\n            self.stages_and_names.append((stage, name))\n            self._out_feature_strides[name] = current_stride = int(\n                current_stride * np.prod([k.stride for k in blocks])\n            )\n            self._out_feature_channels[name] = blocks[-1].out_channels\n\n        if num_classes is not None:\n            self.avgpool = nn.AdaptiveAvgPool2d((1, 1))\n            self.linear = nn.Linear(curr_channels, num_classes)\n\n            # Sec 5.1 in \"Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour\":\n            # \"The 1000-way fully-connected layer is initialized by\n            # drawing weights from a zero-mean Gaussian with standard deviation of 0.01.\"\n            nn.init.normal_(self.linear.weight, stddev=0.01)\n            name = \"linear\"\n\n        if out_features is None:\n            out_features = [name]\n        self._out_features = out_features\n        assert len(self._out_features)\n        children = [x[0] for x in self.named_children()]\n        for out_feature in self._out_features:\n            assert out_feature in children, \"Available children: {}\".format(\", \".join(children))\n\n    def forward(self, x):\n        outputs = {}\n        x = self.stem(x)\n        if \"stem\" in self._out_features:\n            outputs[\"stem\"] = x\n        for stage, name in self.stages_and_names:\n            x = stage(x)\n            if name in self._out_features:\n                outputs[name] = x\n        if self.num_classes is not None:\n            x = self.avgpool(x)\n            x = self.linear(x)\n            if \"linear\" in self._out_features:\n                outputs[\"linear\"] = x\n        return outputs\n\n    def output_shape(self):\n        return {\n            name: ShapeSpec(\n                channels=self._out_feature_channels[name], stride=self._out_feature_strides[name]\n            )\n            for name in self._out_features\n        }\n\n\n@BACKBONE_REGISTRY.register()\ndef build_resnet_backbone(cfg, input_shape):\n    \"\"\"\n    Create a ResNet instance from config.\n\n    Returns:\n        ResNet: a :class:`ResNet` instance.\n    \"\"\"\n    # need registration of new blocks/stems?\n    norm = cfg.MODEL.RESNETS.NORM\n    stem = BasicStem(\n        in_channels=input_shape.channels,\n        out_channels=cfg.MODEL.RESNETS.STEM_OUT_CHANNELS,\n        norm=norm,\n    )\n    freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT\n\n    if freeze_at >= 1:\n        for p in stem.parameters():\n            p.requires_grad = False\n        stem = FrozenBatchNorm2d.convert_frozen_batchnorm(stem)\n\n    # fmt: off\n    out_features        = cfg.MODEL.RESNETS.OUT_FEATURES\n    depth               = cfg.MODEL.RESNETS.DEPTH\n    num_groups          = cfg.MODEL.RESNETS.NUM_GROUPS\n    width_per_group     = cfg.MODEL.RESNETS.WIDTH_PER_GROUP\n    bottleneck_channels = num_groups * width_per_group\n    in_channels         = cfg.MODEL.RESNETS.STEM_OUT_CHANNELS\n    out_channels        = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS\n    stride_in_1x1       = cfg.MODEL.RESNETS.STRIDE_IN_1X1\n    res5_dilation       = cfg.MODEL.RESNETS.RES5_DILATION\n    deform_on_per_stage = cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE\n    deform_modulated    = cfg.MODEL.RESNETS.DEFORM_MODULATED\n    deform_num_groups   = cfg.MODEL.RESNETS.DEFORM_NUM_GROUPS\n    # fmt: on\n    assert res5_dilation in {1, 2}, \"res5_dilation cannot be {}.\".format(res5_dilation)\n\n    num_blocks_per_stage = {50: [3, 4, 6, 3], 101: [3, 4, 23, 3], 152: [3, 8, 36, 3]}[depth]\n\n    stages = []\n\n    # Avoid creating variables without gradients\n    # It consumes extra memory and may cause allreduce to fail\n    out_stage_idx = [{\"res2\": 2, \"res3\": 3, \"res4\": 4, \"res5\": 5}[f] for f in out_features]\n    max_stage_idx = max(out_stage_idx)\n    for idx, stage_idx in enumerate(range(2, max_stage_idx + 1)):\n        dilation = res5_dilation if stage_idx == 5 else 1\n        first_stride = 1 if idx == 0 or (stage_idx == 5 and dilation == 2) else 2\n        stage_kargs = {\n            \"num_blocks\": num_blocks_per_stage[idx],\n            \"first_stride\": first_stride,\n            \"in_channels\": in_channels,\n            \"bottleneck_channels\": bottleneck_channels,\n            \"out_channels\": out_channels,\n            \"num_groups\": num_groups,\n            \"norm\": norm,\n            \"stride_in_1x1\": stride_in_1x1,\n            \"dilation\": dilation,\n        }\n        if deform_on_per_stage[idx]:\n            stage_kargs[\"block_class\"] = DeformBottleneckBlock\n            stage_kargs[\"deform_modulated\"] = deform_modulated\n            stage_kargs[\"deform_num_groups\"] = deform_num_groups\n        else:\n            stage_kargs[\"block_class\"] = BottleneckBlock\n        blocks = make_stage(**stage_kargs)\n        in_channels = out_channels\n        out_channels *= 2\n        bottleneck_channels *= 2\n\n        if freeze_at >= stage_idx:\n            for block in blocks:\n                block.freeze()\n        stages.append(blocks)\n    return ResNet(stem, stages, out_features=out_features)\n"
  },
  {
    "path": "detectron2/modeling/box_regression.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport math\nimport torch\n\n# Value for clamping large dw and dh predictions. The heuristic is that we clamp\n# such that dw and dh are no larger than what would transform a 16px box into a\n# 1000px box (based on a small anchor, 16px, and a typical image size, 1000px).\n_DEFAULT_SCALE_CLAMP = math.log(1000.0 / 16)\n\n\n__all__ = [\"Box2BoxTransform\", \"Box2BoxTransformRotated\"]\n\n\nclass Box2BoxTransform(object):\n    \"\"\"\n    The box-to-box transform defined in R-CNN. The transformation is parameterized\n    by 4 deltas: (dx, dy, dw, dh). The transformation scales the box's width and height\n    by exp(dw), exp(dh) and shifts a box's center by the offset (dx * width, dy * height).\n    \"\"\"\n\n    def __init__(self, weights, scale_clamp=_DEFAULT_SCALE_CLAMP):\n        \"\"\"\n        Args:\n            weights (4-element tuple): Scaling factors that are applied to the\n                (dx, dy, dw, dh) deltas. In Fast R-CNN, these were originally set\n                such that the deltas have unit variance; now they are treated as\n                hyperparameters of the system.\n            scale_clamp (float): When predicting deltas, the predicted box scaling\n                factors (dw and dh) are clamped such that they are <= scale_clamp.\n        \"\"\"\n        self.weights = weights\n        self.scale_clamp = scale_clamp\n\n    def get_deltas(self, src_boxes, target_boxes):\n        \"\"\"\n        Get box regression transformation deltas (dx, dy, dw, dh) that can be used\n        to transform the `src_boxes` into the `target_boxes`. That is, the relation\n        ``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless\n        any delta is too large and is clamped).\n\n        Args:\n            src_boxes (Tensor): source boxes, e.g., object proposals\n            target_boxes (Tensor): target of the transformation, e.g., ground-truth\n                boxes.\n        \"\"\"\n        assert isinstance(src_boxes, torch.Tensor), type(src_boxes)\n        assert isinstance(target_boxes, torch.Tensor), type(target_boxes)\n\n        src_widths = src_boxes[:, 2] - src_boxes[:, 0]\n        src_heights = src_boxes[:, 3] - src_boxes[:, 1]\n        src_ctr_x = src_boxes[:, 0] + 0.5 * src_widths\n        src_ctr_y = src_boxes[:, 1] + 0.5 * src_heights\n\n        target_widths  = target_boxes[:, 2] - target_boxes[:, 0]\n        target_heights = target_boxes[:, 3] - target_boxes[:, 1]\n        target_ctr_x   = target_boxes[:, 0] + 0.5 * target_widths\n        target_ctr_y   = target_boxes[:, 1] + 0.5 * target_heights\n\n        wx, wy, ww, wh = self.weights\n        dx = wx * (target_ctr_x - src_ctr_x) / src_widths\n        dy = wy * (target_ctr_y - src_ctr_y) / src_heights\n        dw = ww * torch.log(target_widths / src_widths)\n        dh = wh * torch.log(target_heights / src_heights)\n\n        deltas = torch.stack((dx, dy, dw, dh), dim=1)\n        assert (src_widths > 0).all().item(), \"Input boxes to Box2BoxTransform are not valid!\"\n        return deltas\n    def trans_light(self, src_boxes):\n        x1 = src_boxes[:, 0]\n        y1 = src_boxes[:, 1]\n        x2 = src_boxes[:, 2]\n        y2 = src_boxes[:, 3]\n        return torch.stack((x1,y1,x2,y2),dim=1)\n    def apply_deltas(self, deltas, boxes):\n        \"\"\"\n        Apply transformation `deltas` (dx, dy, dw, dh) to `boxes`.\n\n        Args:\n            deltas (Tensor): transformation deltas of shape (N, k*4), where k >= 1.\n                deltas[i] represents k potentially different class-specific\n                box transformations for the single box boxes[i].\n            boxes (Tensor): boxes to transform, of shape (N, 4)\n        \"\"\"\n        assert torch.isfinite(deltas).all().item()\n        boxes = boxes.to(deltas.dtype)\n\n        widths = boxes[:, 2] - boxes[:, 0]\n        heights = boxes[:, 3] - boxes[:, 1]\n        ctr_x = boxes[:, 0] + 0.5 * widths\n        ctr_y = boxes[:, 1] + 0.5 * heights\n\n        wx, wy, ww, wh = self.weights\n        dx = deltas[:, 0::4] / wx\n        dy = deltas[:, 1::4] / wy\n        dw = deltas[:, 2::4] / ww\n        dh = deltas[:, 3::4] / wh\n\n        # Prevent sending too large values into torch.exp()\n        dw = torch.clamp(dw, max=self.scale_clamp)\n        dh = torch.clamp(dh, max=self.scale_clamp)\n\n        pred_ctr_x = dx * widths[:, None] + ctr_x[:, None]\n        pred_ctr_y = dy * heights[:, None] + ctr_y[:, None]\n        pred_w = torch.exp(dw) * widths[:, None]\n        pred_h = torch.exp(dh) * heights[:, None]\n\n        pred_boxes = torch.zeros_like(deltas)\n        pred_boxes[:, 0::4] = pred_ctr_x - 0.5 * pred_w  # x1\n        pred_boxes[:, 1::4] = pred_ctr_y - 0.5 * pred_h  # y1\n        pred_boxes[:, 2::4] = pred_ctr_x + 0.5 * pred_w  # x2\n        pred_boxes[:, 3::4] = pred_ctr_y + 0.5 * pred_h  # y2\n        return pred_boxes\n\n\nclass Box2BoxTransformRotated(object):\n    \"\"\"\n    The box-to-box transform defined in Rotated R-CNN. The transformation is parameterized\n    by 5 deltas: (dx, dy, dw, dh, da). The transformation scales the box's width and height\n    by exp(dw), exp(dh), shifts a box's center by the offset (dx * width, dy * height),\n    and rotate a box's angle by da (radians).\n    Note: angles of deltas are in radians while angles of boxes are in degrees.\n    \"\"\"\n\n    def __init__(self, weights, scale_clamp=_DEFAULT_SCALE_CLAMP):\n        \"\"\"\n        Args:\n            weights (5-element tuple): Scaling factors that are applied to the\n                (dx, dy, dw, dh, da) deltas. These are treated as\n                hyperparameters of the system.\n            scale_clamp (float): When predicting deltas, the predicted box scaling\n                factors (dw and dh) are clamped such that they are <= scale_clamp.\n        \"\"\"\n        self.weights = weights\n        self.scale_clamp = scale_clamp\n\n    def get_deltas(self, src_boxes, target_boxes):\n        \"\"\"\n        Get box regression transformation deltas (dx, dy, dw, dh, da) that can be used\n        to transform the `src_boxes` into the `target_boxes`. That is, the relation\n        ``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless\n        any delta is too large and is clamped).\n\n        Args:\n            src_boxes (Tensor): Nx5 source boxes, e.g., object proposals\n            target_boxes (Tensor): Nx5 target of the transformation, e.g., ground-truth\n                boxes.\n        \"\"\"\n        assert isinstance(src_boxes, torch.Tensor), type(src_boxes)\n        assert isinstance(target_boxes, torch.Tensor), type(target_boxes)\n\n        src_ctr_x, src_ctr_y, src_widths, src_heights, src_angles = torch.unbind(src_boxes, dim=1)\n\n        target_ctr_x, target_ctr_y, target_widths, target_heights, target_angles = torch.unbind(\n            target_boxes, dim=1\n        )\n\n        wx, wy, ww, wh, wa = self.weights\n        dx = wx * (target_ctr_x - src_ctr_x) / src_widths\n        dy = wy * (target_ctr_y - src_ctr_y) / src_heights\n        dw = ww * torch.log(target_widths / src_widths)\n        dh = wh * torch.log(target_heights / src_heights)\n        # Angles of deltas are in radians while angles of boxes are in degrees.\n        # the conversion to radians serve as a way to normalize the values\n        da = target_angles - src_angles\n        while len(torch.where(da < -180.0)[0]) > 0:\n            da[torch.where(da < -180.0)] += 360.0\n        while len(torch.where(da > 180.0)[0]) > 0:\n            da[torch.where(da > 180.0)] -= 360.0\n        da *= wa * math.pi / 180.0\n\n        deltas = torch.stack((dx, dy, dw, dh, da), dim=1)\n        assert (\n            (src_widths > 0).all().item()\n        ), \"Input boxes to Box2BoxTransformRotated are not valid!\"\n        return deltas\n\n    def apply_deltas(self, deltas, boxes):\n        \"\"\"\n        Apply transformation `deltas` (dx, dy, dw, dh, da) to `boxes`.\n\n        Args:\n            deltas (Tensor): transformation deltas of shape (N, 5).\n                deltas[i] represents box transformation for the single box boxes[i].\n            boxes (Tensor): boxes to transform, of shape (N, 5)\n        \"\"\"\n        assert deltas.shape[1] == 5 and boxes.shape[1] == 5\n        assert torch.isfinite(deltas).all().item()\n\n        boxes = boxes.to(deltas.dtype)\n\n        ctr_x, ctr_y, widths, heights, angles = torch.unbind(boxes, dim=1)\n        wx, wy, ww, wh, wa = self.weights\n        dx, dy, dw, dh, da = torch.unbind(deltas, dim=1)\n\n        dx.div_(wx)\n        dy.div_(wy)\n        dw.div_(ww)\n        dh.div_(wh)\n        da.div_(wa)\n\n        # Prevent sending too large values into torch.exp()\n        dw = torch.clamp(dw, max=self.scale_clamp)\n        dh = torch.clamp(dh, max=self.scale_clamp)\n\n        pred_boxes = torch.zeros_like(deltas)\n        pred_boxes[:, 0] = dx * widths + ctr_x  # x_ctr\n        pred_boxes[:, 1] = dy * heights + ctr_y  # y_ctr\n        pred_boxes[:, 2] = torch.exp(dw) * widths  # width\n        pred_boxes[:, 3] = torch.exp(dh) * heights  # height\n\n        # Following original RRPN implementation,\n        # angles of deltas are in radians while angles of boxes are in degrees.\n        pred_angle = da * 180.0 / math.pi + angles\n\n        while len(torch.where(pred_angle < -180.0)[0]) > 0:\n            pred_angle[torch.where(pred_angle < -180.0)] += 360.0\n        while len(torch.where(pred_angle > 180.0)[0]) > 0:\n            pred_angle[torch.where(pred_angle > 180.0)] -= 360.0\n\n        pred_boxes[:, 4] = pred_angle\n\n        return pred_boxes\n"
  },
  {
    "path": "detectron2/modeling/matcher.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport torch\n\n\nclass Matcher(object):\n    \"\"\"\n    This class assigns to each predicted \"element\" (e.g., a box) a ground-truth\n    element. Each predicted element will have exactly zero or one matches; each\n    ground-truth element may be matched to zero or more predicted elements.\n\n    The matching is determined by the MxN match_quality_matrix, that characterizes\n    how well each (ground-truth, prediction)-pair match each other. For example,\n    if the elements are boxes, this matrix may contain box intersection-over-union\n    overlap values.\n\n    The matcher returns (a) a vector of length N containing the index of the\n    ground-truth element m in [0, M) that matches to prediction n in [0, N).\n    (b) a vector of length N containing the labels for each prediction.\n    \"\"\"\n\n    def __init__(self, thresholds, labels, allow_low_quality_matches=False):\n        \"\"\"\n        Args:\n            thresholds (list): a list of thresholds used to stratify predictions\n                into levels.\n            labels (list): a list of values to label predictions belonging at\n                each level. A label can be one of {-1, 0, 1} signifying\n                {ignore, negative class, positive class}, respectively.\n            allow_low_quality_matches (bool): if True, produce additional matches\n                for predictions with maximum match quality lower than high_threshold.\n                See set_low_quality_matches_ for more details.\n\n            For example,\n                thresholds = [0.3, 0.5]\n                labels = [0, -1, 1]\n                All predictions with iou < 0.3 will be marked with 0 and\n                thus will be considered as false positives while training.\n                All predictions with 0.3 <= iou < 0.5 will be marked with -1 and\n                thus will be ignored.\n                All predictions with 0.5 <= iou will be marked with 1 and\n                thus will be considered as true positives.\n        \"\"\"\n        # Add -inf and +inf to first and last position in thresholds\n        thresholds = thresholds[:]\n        thresholds.insert(0, -float(\"inf\"))\n        thresholds.append(float(\"inf\"))\n        assert all(low <= high for (low, high) in zip(thresholds[:-1], thresholds[1:]))\n        assert all(l in [-1, 0, 1] for l in labels)\n        assert len(labels) == len(thresholds) - 1\n        self.thresholds = thresholds\n        self.labels = labels\n        self.allow_low_quality_matches = allow_low_quality_matches\n\n    def __call__(self, match_quality_matrix):\n        \"\"\"\n        Args:\n            match_quality_matrix (Tensor[float]): an MxN tensor, containing the\n                pairwise quality between M ground-truth elements and N predicted\n                elements. All elements must be >= 0 (due to the us of `torch.nonzero`\n                for selecting indices in :meth:`set_low_quality_matches_`).\n\n        Returns:\n            matches (Tensor[int64]): a vector of length N, where matches[i] is a matched\n                ground-truth index in [0, M)\n            match_labels (Tensor[int8]): a vector of length N, where pred_labels[i] indicates\n                whether a prediction is a true or false positive or ignored\n        \"\"\"\n        assert match_quality_matrix.dim() == 2\n        if match_quality_matrix.numel() == 0:\n            return (\n                match_quality_matrix.new_full(\n                    (match_quality_matrix.size(1),), 0, dtype=torch.int64\n                ),\n                match_quality_matrix.new_full(\n                    (match_quality_matrix.size(1),), -1, dtype=torch.int8\n                ),\n            )\n        assert torch.all(match_quality_matrix >= 0)\n\n        # match_quality_matrix is M (gt) x N (predicted)\n        # Max over gt elements (dim 0) to find best gt candidate for each prediction\n        matched_vals, matches = match_quality_matrix.max(dim=0)\n\n        match_labels = matches.new_full(matches.size(), 1, dtype=torch.int8)\n\n        for (l, low, high) in zip(self.labels, self.thresholds[:-1], self.thresholds[1:]):\n            low_high = (matched_vals >= low) & (matched_vals < high)\n            match_labels[low_high] = l\n\n        if self.allow_low_quality_matches:\n            self.set_low_quality_matches_(match_labels, match_quality_matrix)\n\n        return matches, match_labels\n\n    def set_low_quality_matches_(self, match_labels, match_quality_matrix):\n        \"\"\"\n        Produce additional matches for predictions that have only low-quality matches.\n        Specifically, for each ground-truth G find the set of predictions that have\n        maximum overlap with it (including ties); for each prediction in that set, if\n        it is unmatched, then match it to the ground-truth G.\n\n        This function implements the RPN assignment case (i) in Sec. 3.1.2 of the\n        Faster R-CNN paper: https://arxiv.org/pdf/1506.01497v3.pdf.\n        \"\"\"\n        # For each gt, find the prediction with which it has highest quality\n        highest_quality_foreach_gt, _ = match_quality_matrix.max(dim=1)\n        # Find the highest quality match available, even if it is low, including ties.\n        # Note that the matches qualities must be positive due to the use of\n        # `torch.nonzero`.\n        gt_pred_pairs_of_highest_quality = torch.nonzero(\n            match_quality_matrix == highest_quality_foreach_gt[:, None]\n        )\n        # Example gt_pred_pairs_of_highest_quality:\n        #   tensor([[    0, 39796],\n        #           [    1, 32055],\n        #           [    1, 32070],\n        #           [    2, 39190],\n        #           [    2, 40255],\n        #           [    3, 40390],\n        #           [    3, 41455],\n        #           [    4, 45470],\n        #           [    5, 45325],\n        #           [    5, 46390]])\n        # Each row is a (gt index, prediction index)\n        # Note how gt items 1, 2, 3, and 5 each have two ties\n\n        pred_inds_to_update = gt_pred_pairs_of_highest_quality[:, 1]\n        match_labels[pred_inds_to_update] = 1\n"
  },
  {
    "path": "detectron2/modeling/meta_arch/LISA_meta_arch.py",
    "content": "#  Copyright (c) Tianyu Wang. All Rights Reserved.\nimport logging\nimport torch\nfrom torch import nn\n\nfrom detectron2.structures import ImageList\nfrom detectron2.utils.logger import log_first_n\n\nfrom detectron2.modeling.backbone import build_backbone\nfrom detectron2.modeling.postprocessing import detector_postprocess, matchor, combine_association\nfrom .LISA_rpn  import build_proposal_generator\nfrom detectron2.modeling.roi_heads import build_roi_heads\nfrom detectron2.modeling.meta_arch.build import META_ARCH_REGISTRY\nfrom detectron2.modeling.meta_arch.rcnn import __all__, GeneralizedRCNN\nfrom detectron2.utils.registry import Registry\n__all__.append(\"LISARCNN\")\n\n@META_ARCH_REGISTRY.register()\nclass LISARCNN(GeneralizedRCNN):\n\n    def __init__(self,cfg):\n        super(LISARCNN, self).__init__(cfg)\n        self.association_proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape(), shadow_object_part= False)\n        self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape(), shadow_object_part= True)\n        self.to(self.device)\n    def forward(self, batched_inputs):\n\n        if not self.training:\n            return self.inference(batched_inputs)\n\n        images = self.preprocess_image(batched_inputs)\n        # print(batched_inputs[1])\n        if \"instances\" in batched_inputs[0]:\n            gt_instances = [x[\"instances\"].to(self.device) for x in batched_inputs]\n        if \"associations\" in batched_inputs[0]:\n            gt_associations = [x[\"associations\"].to(self.device) for x in batched_inputs]\n        elif \"targets\" in batched_inputs[0]:\n            log_first_n(\n                logging.WARN, \"'targets' in the model inputs is now renamed to 'instances'!\", n=10\n            )\n            gt_instances = [x[\"targets\"].to(self.device) for x in batched_inputs]\n        else:\n            gt_instances = None\n\n        features = self.backbone(images.tensor)\n\n        if self.association_proposal_generator:\n            association_proposals, association_losses, pre_features, pre_proposals = self.association_proposal_generator(images, features, gt_associations)\n        \n        if self.proposal_generator:\n            # concat_features = {}\n            # for pre_feature, (k,v) in zip(pre_features,features.items()):\n            #     concat_features[k] = torch.cat([v,pre_feature],1)\n            proposals, proposal_losses = self.proposal_generator(images,features,gt_instances,pre_proposals)\n\n        _, detector_losses = self.roi_heads(images, features, association_proposals, proposals, gt_associations, gt_instances)\n\n        losses = {}\n        losses.update(detector_losses)\n        losses.update(proposal_losses)\n        losses.update(association_losses)\n        return losses\n\n    def inference(self, batched_inputs, detected_instances=None, do_postprocess=True):\n        assert not self.training\n        images = self.preprocess_image(batched_inputs)\n        features = self.backbone(images.tensor)\n\n        if detected_instances is None:\n            if self.association_proposal_generator:\n                association_proposals, _, pre_features, pre_proposals = self.association_proposal_generator(images, features)\n            else:\n                assert \"associations\" in batched_inputs[0]\n                proposals = [x[\"associations\"].to(self.device) for x in batched_inputs]\n            if self.proposal_generator:\n                # concat_features = {}\n                # for pre_features,(k,v) in zip(pre_features,features.items()):\n                #     concat_features[k] = torch.cat([v,pre_features],1)\n                proposals, _ = self.proposal_generator(images,features,pre_proposals = pre_proposals)\n            else:\n                assert \"proposals\" in batched_inputs[0]\n                proposals = [x[\"proposals\"].to(self.device) for x in batched_inputs]\n\n            results,associations, _ = self.roi_heads(images, features, association_proposals, proposals, None, None)\n        \n        if do_postprocess:\n            processed_results = []\n            for results_per_image, input_per_image, image_size in zip(\n                results, batched_inputs, images.image_sizes\n            ):\n                height = input_per_image.get(\"height\", image_size[0])\n                width = input_per_image.get(\"width\", image_size[1])\n                # print(results_per_image)\n                r = detector_postprocess(results_per_image, height, width)\n                processed_results.append({\"instances\": r.to(torch.device('cpu'))})\n\n            processed_associations = []\n            for results_per_image, input_per_image, image_size in zip(\n                associations, batched_inputs, images.image_sizes\n            ):\n                height = input_per_image.get(\"height\", image_size[0])\n                width = input_per_image.get(\"width\", image_size[1])\n                r = detector_postprocess(results_per_image, height, width)\n                processed_associations.append({\"instances\": r.to(torch.device('cpu'))})\n            \n            for instances, associations in zip(processed_results, processed_associations):\n                _instances, _associations = matchor(instances[\"instances\"],associations[\"instances\"])\n                _associations,_instances = combine_association(_instances,_associations)\n                associations[\"instances\"] = _associations\n                instances[\"instances\"] = _instances\n                    \n\n            return processed_results,processed_associations\n        else:\n            return results,associations\n\n\n\n\n"
  },
  {
    "path": "detectron2/modeling/meta_arch/__init__.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nfrom .build import META_ARCH_REGISTRY, build_model  # isort:skip\n\nfrom .panoptic_fpn import PanopticFPN\n\n# import all the meta_arch, so they will be registered\nfrom .rcnn import GeneralizedRCNN, ProposalNetwork\nfrom .retinanet import RetinaNet\nfrom .semantic_seg import SEM_SEG_HEADS_REGISTRY, SemanticSegmentor, build_sem_seg_head\n"
  },
  {
    "path": "detectron2/modeling/meta_arch/build.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom detectron2.utils.registry import Registry\n\nMETA_ARCH_REGISTRY = Registry(\"META_ARCH\")  # noqa F401 isort:skip\nMETA_ARCH_REGISTRY.__doc__ = \"\"\"\nRegistry for meta-architectures, i.e. the whole model.\n\nThe registered object will be called with `obj(cfg)`\nand expected to return a `nn.Module` object.\n\"\"\"\n\n\ndef build_model(cfg):\n    \"\"\"\n    Built the whole model, defined by `cfg.MODEL.META_ARCHITECTURE`.\n    \"\"\"\n    meta_arch = cfg.MODEL.META_ARCHITECTURE\n    return META_ARCH_REGISTRY.get(meta_arch)(cfg)\n"
  },
  {
    "path": "detectron2/modeling/meta_arch/panoptic_fpn.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport torch\nfrom torch import nn\n\nfrom detectron2.structures import ImageList\n\nfrom ..backbone import build_backbone\nfrom ..postprocessing import detector_postprocess, sem_seg_postprocess\nfrom ..proposal_generator import build_proposal_generator\nfrom ..roi_heads import build_roi_heads\nfrom .build import META_ARCH_REGISTRY\nfrom .semantic_seg import build_sem_seg_head\n\n__all__ = [\"PanopticFPN\"]\n\n\n@META_ARCH_REGISTRY.register()\nclass PanopticFPN(nn.Module):\n    \"\"\"\n    Main class for Panoptic FPN architectures (see https://arxiv.org/abd/1901.02446).\n    \"\"\"\n\n    def __init__(self, cfg):\n        super().__init__()\n\n        self.device = torch.device(cfg.MODEL.DEVICE)\n\n        self.instance_loss_weight = cfg.MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT\n\n        # options when combining instance & semantic outputs\n        self.combine_on = cfg.MODEL.PANOPTIC_FPN.COMBINE.ENABLED\n        self.combine_overlap_threshold = cfg.MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH\n        self.combine_stuff_area_limit = cfg.MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT\n        self.combine_instances_confidence_threshold = (\n            cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH\n        )\n\n        self.backbone = build_backbone(cfg)\n        self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape())\n        self.roi_heads = build_roi_heads(cfg, self.backbone.output_shape())\n        self.sem_seg_head = build_sem_seg_head(cfg, self.backbone.output_shape())\n\n        pixel_mean = torch.Tensor(cfg.MODEL.PIXEL_MEAN).to(self.device).view(3, 1, 1)\n        pixel_std = torch.Tensor(cfg.MODEL.PIXEL_STD).to(self.device).view(3, 1, 1)\n        self.normalizer = lambda x: (x - pixel_mean) / pixel_std\n        self.to(self.device)\n\n    def forward(self, batched_inputs):\n        \"\"\"\n        Args:\n            batched_inputs: a list, batched outputs of :class:`DatasetMapper`.\n                Each item in the list contains the inputs for one image.\n\n        For now, each item in the list is a dict that contains:\n            image: Tensor, image in (C, H, W) format.\n            instances: Instances\n            sem_seg: semantic segmentation ground truth.\n            Other information that's included in the original dicts, such as:\n                \"height\", \"width\" (int): the output resolution of the model, used in inference.\n                    See :meth:`postprocess` for details.\n\n        Returns:\n            list[dict]: each dict is the results for one image. The dict\n                contains the following keys:\n                \"instances\": see :meth:`GeneralizedRCNN.forward` for its format.\n                \"sem_seg\": see :meth:`SemanticSegmentor.forward` for its format.\n                \"panoptic_seg\": available when `PANOPTIC_FPN.COMBINE.ENABLED`.\n                    See the return value of\n                    :func:`combine_semantic_and_instance_outputs` for its format.\n        \"\"\"\n        images = [x[\"image\"].to(self.device) for x in batched_inputs]\n        images = [self.normalizer(x) for x in images]\n        images = ImageList.from_tensors(images, self.backbone.size_divisibility)\n        features = self.backbone(images.tensor)\n\n        if \"proposals\" in batched_inputs[0]:\n            proposals = [x[\"proposals\"].to(self.device) for x in batched_inputs]\n            proposal_losses = {}\n\n        if \"sem_seg\" in batched_inputs[0]:\n            gt_sem_seg = [x[\"sem_seg\"].to(self.device) for x in batched_inputs]\n            gt_sem_seg = ImageList.from_tensors(\n                gt_sem_seg, self.backbone.size_divisibility, self.sem_seg_head.ignore_value\n            ).tensor\n        else:\n            gt_sem_seg = None\n        sem_seg_results, sem_seg_losses = self.sem_seg_head(features, gt_sem_seg)\n\n        if \"instances\" in batched_inputs[0]:\n            gt_instances = [x[\"instances\"].to(self.device) for x in batched_inputs]\n        else:\n            gt_instances = None\n        if self.proposal_generator:\n            proposals, proposal_losses = self.proposal_generator(images, features, gt_instances)\n        detector_results, detector_losses = self.roi_heads(\n            images, features, proposals, gt_instances\n        )\n\n        if self.training:\n            losses = {}\n            losses.update(sem_seg_losses)\n            losses.update({k: v * self.instance_loss_weight for k, v in detector_losses.items()})\n            losses.update(proposal_losses)\n            return losses\n\n        processed_results = []\n        for sem_seg_result, detector_result, input_per_image, image_size in zip(\n            sem_seg_results, detector_results, batched_inputs, images.image_sizes\n        ):\n            height = input_per_image.get(\"height\")\n            width = input_per_image.get(\"width\")\n            sem_seg_r = sem_seg_postprocess(sem_seg_result, image_size, height, width)\n            detector_r = detector_postprocess(detector_result, height, width)\n\n            processed_results.append({\"sem_seg\": sem_seg_r, \"instances\": detector_r})\n\n            if self.combine_on:\n                panoptic_r = combine_semantic_and_instance_outputs(\n                    detector_r,\n                    sem_seg_r.argmax(dim=0),\n                    self.combine_overlap_threshold,\n                    self.combine_stuff_area_limit,\n                    self.combine_instances_confidence_threshold,\n                )\n                processed_results[-1][\"panoptic_seg\"] = panoptic_r\n        return processed_results\n\n\ndef combine_semantic_and_instance_outputs(\n    instance_results,\n    semantic_results,\n    overlap_threshold,\n    stuff_area_limit,\n    instances_confidence_threshold,\n):\n    \"\"\"\n    Implement a simple combining logic following\n    \"combine_semantic_and_instance_predictions.py\" in panopticapi\n    to produce panoptic segmentation outputs.\n\n    Args:\n        instance_results: output of :func:`detector_postprocess`.\n        semantic_results: an (H, W) tensor, each is the contiguous semantic\n            category id\n\n    Returns:\n        panoptic_seg (Tensor): of shape (height, width) where the values are ids for each segment.\n        segments_info (list[dict]): Describe each segment in `panoptic_seg`.\n            Each dict contains keys \"id\", \"category_id\", \"isthing\".\n    \"\"\"\n    panoptic_seg = torch.zeros_like(semantic_results, dtype=torch.int32)\n\n    # sort instance outputs by scores\n    sorted_inds = torch.argsort(-instance_results.scores)\n\n    current_segment_id = 0\n    segments_info = []\n\n    instance_masks = instance_results.pred_masks.to(dtype=torch.bool, device=panoptic_seg.device)\n\n    # Add instances one-by-one, check for overlaps with existing ones\n    for inst_id in sorted_inds:\n        score = instance_results.scores[inst_id].item()\n        if score < instances_confidence_threshold:\n            break\n        mask = instance_masks[inst_id]  # H,W\n        mask_area = mask.sum().item()\n\n        if mask_area == 0:\n            continue\n\n        intersect = (mask > 0) & (panoptic_seg > 0)\n        intersect_area = intersect.sum().item()\n\n        if intersect_area * 1.0 / mask_area > overlap_threshold:\n            continue\n\n        if intersect_area > 0:\n            mask = mask & (panoptic_seg == 0)\n\n        current_segment_id += 1\n        panoptic_seg[mask] = current_segment_id\n        segments_info.append(\n            {\n                \"id\": current_segment_id,\n                \"isthing\": True,\n                \"score\": score,\n                \"category_id\": instance_results.pred_classes[inst_id].item(),\n                \"instance_id\": inst_id.item(),\n            }\n        )\n\n    # Add semantic results to remaining empty areas\n    semantic_labels = torch.unique(semantic_results).cpu().tolist()\n    for semantic_label in semantic_labels:\n        if semantic_label == 0:  # 0 is a special \"thing\" class\n            continue\n        mask = (semantic_results == semantic_label) & (panoptic_seg == 0)\n        mask_area = mask.sum().item()\n        if mask_area < stuff_area_limit:\n            continue\n\n        current_segment_id += 1\n        panoptic_seg[mask] = current_segment_id\n        segments_info.append(\n            {\n                \"id\": current_segment_id,\n                \"isthing\": False,\n                \"category_id\": semantic_label,\n                \"area\": mask_area,\n            }\n        )\n\n    return panoptic_seg, segments_info\n"
  },
  {
    "path": "detectron2/modeling/meta_arch/rcnn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport torch\nfrom torch import nn\n\nfrom detectron2.structures import ImageList\nfrom detectron2.utils.logger import log_first_n\n\nfrom ..backbone import build_backbone\nfrom ..postprocessing import detector_postprocess\nfrom ..proposal_generator import build_proposal_generator\nfrom ..roi_heads import build_roi_heads\nfrom .build import META_ARCH_REGISTRY\n\n__all__ = [\"GeneralizedRCNN\", \"ProposalNetwork\"]\n\n\n@META_ARCH_REGISTRY.register()\nclass GeneralizedRCNN(nn.Module):\n    \"\"\"\n    Generalized R-CNN. Any models that contains the following three components:\n    1. Per-image feature extraction (aka backbone)\n    2. Region proposal generation\n    3. Per-region feature extraction and prediction\n    \"\"\"\n\n    def __init__(self, cfg):\n        super().__init__()\n\n        self.device = torch.device(cfg.MODEL.DEVICE)\n        self.backbone = build_backbone(cfg)\n        self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape())\n        self.roi_heads = build_roi_heads(cfg, self.backbone.output_shape())\n\n        assert len(cfg.MODEL.PIXEL_MEAN) == len(cfg.MODEL.PIXEL_STD)\n        num_channels = len(cfg.MODEL.PIXEL_MEAN)\n        pixel_mean = torch.Tensor(cfg.MODEL.PIXEL_MEAN).to(self.device).view(num_channels, 1, 1)\n        pixel_std = torch.Tensor(cfg.MODEL.PIXEL_STD).to(self.device).view(num_channels, 1, 1)\n        self.normalizer = lambda x: (x - pixel_mean) / pixel_std\n        self.to(self.device)\n\n    def forward(self, batched_inputs):\n        \"\"\"\n        Args:\n            batched_inputs: a list, batched outputs of :class:`DatasetMapper` .\n                Each item in the list contains the inputs for one image.\n                For now, each item in the list is a dict that contains:\n\n                * image: Tensor, image in (C, H, W) format.\n                * instances (optional): groundtruth :class:`Instances`\n                * proposals (optional): :class:`Instances`, precomputed proposals.\n\n                Other information that's included in the original dicts, such as:\n\n                * \"height\", \"width\" (int): the output resolution of the model, used in inference.\n                    See :meth:`postprocess` for details.\n\n        Returns:\n            list[dict]:\n                Each dict is the output for one input image.\n                The dict contains one key \"instances\" whose value is a :class:`Instances`.\n                The :class:`Instances` object has the following keys:\n                    \"pred_boxes\", \"pred_classes\", \"scores\", \"pred_masks\", \"pred_keypoints\"\n        \"\"\"\n        if not self.training:\n            return self.inference(batched_inputs)\n\n        images = self.preprocess_image(batched_inputs)\n        if \"instances\" in batched_inputs[0]:\n            gt_instances = [x[\"instances\"].to(self.device) for x in batched_inputs]\n        elif \"targets\" in batched_inputs[0]:\n            log_first_n(\n                logging.WARN, \"'targets' in the model inputs is now renamed to 'instances'!\", n=10\n            )\n            gt_instances = [x[\"targets\"].to(self.device) for x in batched_inputs]\n        else:\n            gt_instances = None\n\n        features = self.backbone(images.tensor)\n\n        if self.proposal_generator:\n            proposals, proposal_losses = self.proposal_generator(images, features, gt_instances)\n        else:\n            assert \"proposals\" in batched_inputs[0]\n            proposals = [x[\"proposals\"].to(self.device) for x in batched_inputs]\n            proposal_losses = {}\n\n        _, detector_losses = self.roi_heads(images, features, proposals, gt_instances)\n\n        losses = {}\n        losses.update(detector_losses)\n        losses.update(proposal_losses)\n        return losses\n\n    def inference(self, batched_inputs, detected_instances=None, do_postprocess=True):\n        \"\"\"\n        Run inference on the given inputs.\n\n        Args:\n            batched_inputs (list[dict]): same as in :meth:`forward`\n            detected_instances (None or list[Instances]): if not None, it\n                contains an `Instances` object per image. The `Instances`\n                object contains \"pred_boxes\" and \"pred_classes\" which are\n                known boxes in the image.\n                The inference will then skip the detection of bounding boxes,\n                and only predict other per-ROI outputs.\n            do_postprocess (bool): whether to apply post-processing on the outputs.\n\n        Returns:\n            same as in :meth:`forward`.\n        \"\"\"\n        assert not self.training\n\n        images = self.preprocess_image(batched_inputs)\n        features = self.backbone(images.tensor)\n\n        if detected_instances is None:\n            if self.proposal_generator:\n                proposals, _ = self.proposal_generator(images, features, None)\n            else:\n                assert \"proposals\" in batched_inputs[0]\n                proposals = [x[\"proposals\"].to(self.device) for x in batched_inputs]\n\n            results, _ = self.roi_heads(images, features, proposals, None)\n        else:\n            detected_instances = [x.to(self.device) for x in detected_instances]\n            results = self.roi_heads.forward_with_given_boxes(features, detected_instances)\n\n        if do_postprocess:\n            processed_results = []\n            for results_per_image, input_per_image, image_size in zip(\n                results, batched_inputs, images.image_sizes\n            ):\n                height = input_per_image.get(\"height\", image_size[0])\n                width = input_per_image.get(\"width\", image_size[1])\n                r = detector_postprocess(results_per_image, height, width)\n                processed_results.append({\"instances\": r})\n            return processed_results\n        else:\n            return results\n\n    def preprocess_image(self, batched_inputs):\n        \"\"\"\n        Normalize, pad and batch the input images.\n        \"\"\"\n        images = [x[\"image\"].to(self.device) for x in batched_inputs]\n        images = [self.normalizer(x) for x in images]\n        images = ImageList.from_tensors(images, self.backbone.size_divisibility)\n        return images\n\n\n@META_ARCH_REGISTRY.register()\nclass ProposalNetwork(nn.Module):\n    def __init__(self, cfg):\n        super().__init__()\n        self.device = torch.device(cfg.MODEL.DEVICE)\n\n        self.backbone = build_backbone(cfg)\n        self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape())\n\n        pixel_mean = torch.Tensor(cfg.MODEL.PIXEL_MEAN).to(self.device).view(-1, 1, 1)\n        pixel_std = torch.Tensor(cfg.MODEL.PIXEL_STD).to(self.device).view(-1, 1, 1)\n        self.normalizer = lambda x: (x - pixel_mean) / pixel_std\n        self.to(self.device)\n\n    def forward(self, batched_inputs):\n        \"\"\"\n        Args:\n            Same as in :class:`GeneralizedRCNN.forward`\n\n        Returns:\n            list[dict]: Each dict is the output for one input image.\n                The dict contains one key \"proposals\" whose value is a\n                :class:`Instances` with keys \"proposal_boxes\" and \"objectness_logits\".\n        \"\"\"\n        images = [x[\"image\"].to(self.device) for x in batched_inputs]\n        images = [self.normalizer(x) for x in images]\n        images = ImageList.from_tensors(images, self.backbone.size_divisibility)\n        features = self.backbone(images.tensor)\n\n        if \"instances\" in batched_inputs[0]:\n            gt_instances = [x[\"instances\"].to(self.device) for x in batched_inputs]\n        elif \"targets\" in batched_inputs[0]:\n            log_first_n(\n                logging.WARN, \"'targets' in the model inputs is now renamed to 'instances'!\", n=10\n            )\n            gt_instances = [x[\"targets\"].to(self.device) for x in batched_inputs]\n        else:\n            gt_instances = None\n        proposals, proposal_losses = self.proposal_generator(images, features, gt_instances)\n        # In training, the proposals are not useful at all but we generate them anyway.\n        # This makes RPN-only models about 5% slower.\n        if self.training:\n            return proposal_losses\n\n        processed_results = []\n        for results_per_image, input_per_image, image_size in zip(\n            proposals, batched_inputs, images.image_sizes\n        ):\n            height = input_per_image.get(\"height\", image_size[0])\n            width = input_per_image.get(\"width\", image_size[1])\n            r = detector_postprocess(results_per_image, height, width)\n            processed_results.append({\"proposals\": r})\n        return processed_results\n"
  },
  {
    "path": "detectron2/modeling/meta_arch/retinanet.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport math\nfrom typing import List\nimport torch\nfrom fvcore.nn import sigmoid_focal_loss_jit, smooth_l1_loss\nfrom torch import nn\n\nfrom detectron2.layers import ShapeSpec, batched_nms, cat\nfrom detectron2.structures import Boxes, ImageList, Instances, pairwise_iou\nfrom detectron2.utils.logger import log_first_n\n\nfrom ..anchor_generator import build_anchor_generator\nfrom ..backbone import build_backbone\nfrom ..box_regression import Box2BoxTransform\nfrom ..matcher import Matcher\nfrom ..postprocessing import detector_postprocess\nfrom .build import META_ARCH_REGISTRY\n\n__all__ = [\"RetinaNet\"]\n\n\ndef permute_to_N_HWA_K(tensor, K):\n    \"\"\"\n    Transpose/reshape a tensor from (N, (A x K), H, W) to (N, (HxWxA), K)\n    \"\"\"\n    assert tensor.dim() == 4, tensor.shape\n    N, _, H, W = tensor.shape\n    tensor = tensor.view(N, -1, K, H, W)\n    tensor = tensor.permute(0, 3, 4, 1, 2)\n    tensor = tensor.reshape(N, -1, K)  # Size=(N,HWA,K)\n    return tensor\n\n\ndef permute_all_cls_and_box_to_N_HWA_K_and_concat(box_cls, box_delta, num_classes=80):\n    \"\"\"\n    Rearrange the tensor layout from the network output, i.e.:\n    list[Tensor]: #lvl tensors of shape (N, A x K, Hi, Wi)\n    to per-image predictions, i.e.:\n    Tensor: of shape (N x sum(Hi x Wi x A), K)\n    \"\"\"\n    # for each feature level, permute the outputs to make them be in the\n    # same format as the labels. Note that the labels are computed for\n    # all feature levels concatenated, so we keep the same representation\n    # for the objectness and the box_delta\n    box_cls_flattened = [permute_to_N_HWA_K(x, num_classes) for x in box_cls]\n    box_delta_flattened = [permute_to_N_HWA_K(x, 4) for x in box_delta]\n    # concatenate on the first dimension (representing the feature levels), to\n    # take into account the way the labels were generated (with all feature maps\n    # being concatenated as well)\n    box_cls = cat(box_cls_flattened, dim=1).view(-1, num_classes)\n    box_delta = cat(box_delta_flattened, dim=1).view(-1, 4)\n    return box_cls, box_delta\n\n\n@META_ARCH_REGISTRY.register()\nclass RetinaNet(nn.Module):\n    \"\"\"\n    Implement RetinaNet (https://arxiv.org/abs/1708.02002).\n    \"\"\"\n\n    def __init__(self, cfg):\n        super().__init__()\n\n        self.device = torch.device(cfg.MODEL.DEVICE)\n\n        # fmt: off\n        self.num_classes              = cfg.MODEL.RETINANET.NUM_CLASSES\n        self.in_features              = cfg.MODEL.RETINANET.IN_FEATURES\n        # Loss parameters:\n        self.focal_loss_alpha         = cfg.MODEL.RETINANET.FOCAL_LOSS_ALPHA\n        self.focal_loss_gamma         = cfg.MODEL.RETINANET.FOCAL_LOSS_GAMMA\n        self.smooth_l1_loss_beta      = cfg.MODEL.RETINANET.SMOOTH_L1_LOSS_BETA\n        # Inference parameters:\n        self.score_threshold          = cfg.MODEL.RETINANET.SCORE_THRESH_TEST\n        self.topk_candidates          = cfg.MODEL.RETINANET.TOPK_CANDIDATES_TEST\n        self.nms_threshold            = cfg.MODEL.RETINANET.NMS_THRESH_TEST\n        self.max_detections_per_image = cfg.TEST.DETECTIONS_PER_IMAGE\n        # fmt: on\n\n        self.backbone = build_backbone(cfg)\n\n        backbone_shape = self.backbone.output_shape()\n        feature_shapes = [backbone_shape[f] for f in self.in_features]\n        self.head = RetinaNetHead(cfg, feature_shapes)\n        self.anchor_generator = build_anchor_generator(cfg, feature_shapes)\n\n        # Matching and loss\n        self.box2box_transform = Box2BoxTransform(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS)\n        self.matcher = Matcher(\n            cfg.MODEL.RETINANET.IOU_THRESHOLDS,\n            cfg.MODEL.RETINANET.IOU_LABELS,\n            allow_low_quality_matches=True,\n        )\n\n        pixel_mean = torch.Tensor(cfg.MODEL.PIXEL_MEAN).to(self.device).view(3, 1, 1)\n        pixel_std = torch.Tensor(cfg.MODEL.PIXEL_STD).to(self.device).view(3, 1, 1)\n        self.normalizer = lambda x: (x - pixel_mean) / pixel_std\n        self.to(self.device)\n\n    def forward(self, batched_inputs):\n        \"\"\"\n        Args:\n            batched_inputs: a list, batched outputs of :class:`DatasetMapper` .\n                Each item in the list contains the inputs for one image.\n                For now, each item in the list is a dict that contains:\n\n                * image: Tensor, image in (C, H, W) format.\n                * instances: Instances\n\n                Other information that's included in the original dicts, such as:\n\n                * \"height\", \"width\" (int): the output resolution of the model, used in inference.\n                    See :meth:`postprocess` for details.\n        Returns:\n            dict[str: Tensor]:\n                mapping from a named loss to a tensor storing the loss. Used during training only.\n        \"\"\"\n        images = self.preprocess_image(batched_inputs)\n        if \"instances\" in batched_inputs[0]:\n            gt_instances = [x[\"instances\"].to(self.device) for x in batched_inputs]\n        elif \"targets\" in batched_inputs[0]:\n            log_first_n(\n                logging.WARN, \"'targets' in the model inputs is now renamed to 'instances'!\", n=10\n            )\n            gt_instances = [x[\"targets\"].to(self.device) for x in batched_inputs]\n        else:\n            gt_instances = None\n\n        features = self.backbone(images.tensor)\n        features = [features[f] for f in self.in_features]\n        box_cls, box_delta = self.head(features)\n        anchors = self.anchor_generator(features)\n\n        if self.training:\n            gt_classes, gt_anchors_reg_deltas = self.get_ground_truth(anchors, gt_instances)\n            return self.losses(gt_classes, gt_anchors_reg_deltas, box_cls, box_delta)\n        else:\n            results = self.inference(box_cls, box_delta, anchors, images)\n            processed_results = []\n            for results_per_image, input_per_image, image_size in zip(\n                results, batched_inputs, images.image_sizes\n            ):\n                height = input_per_image.get(\"height\", image_size[0])\n                width = input_per_image.get(\"width\", image_size[1])\n                r = detector_postprocess(results_per_image, height, width)\n                processed_results.append({\"instances\": r})\n            return processed_results\n\n    def losses(self, gt_classes, gt_anchors_deltas, pred_class_logits, pred_anchor_deltas):\n        \"\"\"\n        Args:\n            For `gt_classes` and `gt_anchors_deltas` parameters, see\n                :meth:`RetinaNet.get_ground_truth`.\n            Their shapes are (N, R) and (N, R, 4), respectively, where R is\n            the total number of anchors across levels, i.e. sum(Hi x Wi x A)\n            For `pred_class_logits` and `pred_anchor_deltas`, see\n                :meth:`RetinaNetHead.forward`.\n\n        Returns:\n            dict[str: Tensor]:\n                mapping from a named loss to a scalar tensor\n                storing the loss. Used during training only. The dict keys are:\n                \"loss_cls\" and \"loss_box_reg\"\n        \"\"\"\n        pred_class_logits, pred_anchor_deltas = permute_all_cls_and_box_to_N_HWA_K_and_concat(\n            pred_class_logits, pred_anchor_deltas, self.num_classes\n        )  # Shapes: (N x R, K) and (N x R, 4), respectively.\n\n        gt_classes = gt_classes.flatten()\n        gt_anchors_deltas = gt_anchors_deltas.view(-1, 4)\n\n        valid_idxs = gt_classes >= 0\n        foreground_idxs = (gt_classes >= 0) & (gt_classes != self.num_classes)\n        num_foreground = foreground_idxs.sum()\n\n        gt_classes_target = torch.zeros_like(pred_class_logits)\n        gt_classes_target[foreground_idxs, gt_classes[foreground_idxs]] = 1\n\n        # logits loss\n        loss_cls = sigmoid_focal_loss_jit(\n            pred_class_logits[valid_idxs],\n            gt_classes_target[valid_idxs],\n            alpha=self.focal_loss_alpha,\n            gamma=self.focal_loss_gamma,\n            reduction=\"sum\",\n        ) / max(1, num_foreground)\n\n        # regression loss\n        loss_box_reg = smooth_l1_loss(\n            pred_anchor_deltas[foreground_idxs],\n            gt_anchors_deltas[foreground_idxs],\n            beta=self.smooth_l1_loss_beta,\n            reduction=\"sum\",\n        ) / max(1, num_foreground)\n\n        return {\"loss_cls\": loss_cls, \"loss_box_reg\": loss_box_reg}\n\n    @torch.no_grad()\n    def get_ground_truth(self, anchors, targets):\n        \"\"\"\n        Args:\n            anchors (list[list[Boxes]]): a list of N=#image elements. Each is a\n                list of #feature level Boxes. The Boxes contains anchors of\n                this image on the specific feature level.\n            targets (list[Instances]): a list of N `Instances`s. The i-th\n                `Instances` contains the ground-truth per-instance annotations\n                for the i-th input image.  Specify `targets` during training only.\n\n        Returns:\n            gt_classes (Tensor):\n                An integer tensor of shape (N, R) storing ground-truth\n                labels for each anchor.\n                R is the total number of anchors, i.e. the sum of Hi x Wi x A for all levels.\n                Anchors with an IoU with some target higher than the foreground threshold\n                are assigned their corresponding label in the [0, K-1] range.\n                Anchors whose IoU are below the background threshold are assigned\n                the label \"K\". Anchors whose IoU are between the foreground and background\n                thresholds are assigned a label \"-1\", i.e. ignore.\n            gt_anchors_deltas (Tensor):\n                Shape (N, R, 4).\n                The last dimension represents ground-truth box2box transform\n                targets (dx, dy, dw, dh) that map each anchor to its matched ground-truth box.\n                The values in the tensor are meaningful only when the corresponding\n                anchor is labeled as foreground.\n        \"\"\"\n        gt_classes = []\n        gt_anchors_deltas = []\n        anchors = [Boxes.cat(anchors_i) for anchors_i in anchors]\n        # list[Tensor(R, 4)], one for each image\n\n        for anchors_per_image, targets_per_image in zip(anchors, targets):\n            match_quality_matrix = pairwise_iou(targets_per_image.gt_boxes, anchors_per_image)\n            gt_matched_idxs, anchor_labels = self.matcher(match_quality_matrix)\n\n            # ground truth box regression\n            matched_gt_boxes = targets_per_image[gt_matched_idxs].gt_boxes\n            gt_anchors_reg_deltas_i = self.box2box_transform.get_deltas(\n                anchors_per_image.tensor, matched_gt_boxes.tensor\n            )\n\n            # ground truth classes\n            has_gt = len(targets_per_image) > 0\n            if has_gt:\n                gt_classes_i = targets_per_image.gt_classes[gt_matched_idxs]\n                # Anchors with label 0 are treated as background.\n                gt_classes_i[anchor_labels == 0] = self.num_classes\n                # Anchors with label -1 are ignored.\n                gt_classes_i[anchor_labels == -1] = -1\n            else:\n                gt_classes_i = torch.zeros_like(gt_matched_idxs) + self.num_classes\n\n            gt_classes.append(gt_classes_i)\n            gt_anchors_deltas.append(gt_anchors_reg_deltas_i)\n\n        return torch.stack(gt_classes), torch.stack(gt_anchors_deltas)\n\n    def inference(self, box_cls, box_delta, anchors, images):\n        \"\"\"\n        Arguments:\n            box_cls, box_delta: Same as the output of :meth:`RetinaNetHead.forward`\n            anchors (list[list[Boxes]]): a list of #images elements. Each is a\n                list of #feature level Boxes. The Boxes contain anchors of this\n                image on the specific feature level.\n            images (ImageList): the input images\n\n        Returns:\n            results (List[Instances]): a list of #images elements.\n        \"\"\"\n        assert len(anchors) == len(images)\n        results = []\n\n        box_cls = [permute_to_N_HWA_K(x, self.num_classes) for x in box_cls]\n        box_delta = [permute_to_N_HWA_K(x, 4) for x in box_delta]\n        # list[Tensor], one per level, each has shape (N, Hi x Wi x A, K or 4)\n\n        for img_idx, anchors_per_image in enumerate(anchors):\n            image_size = images.image_sizes[img_idx]\n            box_cls_per_image = [box_cls_per_level[img_idx] for box_cls_per_level in box_cls]\n            box_reg_per_image = [box_reg_per_level[img_idx] for box_reg_per_level in box_delta]\n            results_per_image = self.inference_single_image(\n                box_cls_per_image, box_reg_per_image, anchors_per_image, tuple(image_size)\n            )\n            results.append(results_per_image)\n        return results\n\n    def inference_single_image(self, box_cls, box_delta, anchors, image_size):\n        \"\"\"\n        Single-image inference. Return bounding-box detection results by thresholding\n        on scores and applying non-maximum suppression (NMS).\n\n        Arguments:\n            box_cls (list[Tensor]): list of #feature levels. Each entry contains\n                tensor of size (H x W x A, K)\n            box_delta (list[Tensor]): Same shape as 'box_cls' except that K becomes 4.\n            anchors (list[Boxes]): list of #feature levels. Each entry contains\n                a Boxes object, which contains all the anchors for that\n                image in that feature level.\n            image_size (tuple(H, W)): a tuple of the image height and width.\n\n        Returns:\n            Same as `inference`, but for only one image.\n        \"\"\"\n        boxes_all = []\n        scores_all = []\n        class_idxs_all = []\n\n        # Iterate over every feature level\n        for box_cls_i, box_reg_i, anchors_i in zip(box_cls, box_delta, anchors):\n            # (HxWxAxK,)\n            box_cls_i = box_cls_i.flatten().sigmoid_()\n\n            # Keep top k top scoring indices only.\n            num_topk = min(self.topk_candidates, box_reg_i.size(0))\n            # torch.sort is actually faster than .topk (at least on GPUs)\n            predicted_prob, topk_idxs = box_cls_i.sort(descending=True)\n            predicted_prob = predicted_prob[:num_topk]\n            topk_idxs = topk_idxs[:num_topk]\n\n            # filter out the proposals with low confidence score\n            keep_idxs = predicted_prob > self.score_threshold\n            predicted_prob = predicted_prob[keep_idxs]\n            topk_idxs = topk_idxs[keep_idxs]\n\n            anchor_idxs = topk_idxs // self.num_classes\n            classes_idxs = topk_idxs % self.num_classes\n\n            box_reg_i = box_reg_i[anchor_idxs]\n            anchors_i = anchors_i[anchor_idxs]\n            # predict boxes\n            predicted_boxes = self.box2box_transform.apply_deltas(box_reg_i, anchors_i.tensor)\n\n            boxes_all.append(predicted_boxes)\n            scores_all.append(predicted_prob)\n            class_idxs_all.append(classes_idxs)\n\n        boxes_all, scores_all, class_idxs_all = [\n            cat(x) for x in [boxes_all, scores_all, class_idxs_all]\n        ]\n        keep = batched_nms(boxes_all, scores_all, class_idxs_all, self.nms_threshold)\n        keep = keep[: self.max_detections_per_image]\n\n        result = Instances(image_size)\n        result.pred_boxes = Boxes(boxes_all[keep])\n        result.scores = scores_all[keep]\n        result.pred_classes = class_idxs_all[keep]\n        return result\n\n    def preprocess_image(self, batched_inputs):\n        \"\"\"\n        Normalize, pad and batch the input images.\n        \"\"\"\n        images = [x[\"image\"].to(self.device) for x in batched_inputs]\n        images = [self.normalizer(x) for x in images]\n        images = ImageList.from_tensors(images, self.backbone.size_divisibility)\n        return images\n\n\nclass RetinaNetHead(nn.Module):\n    \"\"\"\n    The head used in RetinaNet for object classification and box regression.\n    It has two subnets for the two tasks, with a common structure but separate parameters.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: List[ShapeSpec]):\n        super().__init__()\n        # fmt: off\n        in_channels      = input_shape[0].channels\n        num_classes      = cfg.MODEL.RETINANET.NUM_CLASSES\n        num_convs        = cfg.MODEL.RETINANET.NUM_CONVS\n        prior_prob       = cfg.MODEL.RETINANET.PRIOR_PROB\n        num_anchors      = build_anchor_generator(cfg, input_shape).num_cell_anchors\n        # fmt: on\n        assert (\n            len(set(num_anchors)) == 1\n        ), \"Using different number of anchors between levels is not currently supported!\"\n        num_anchors = num_anchors[0]\n\n        cls_subnet = []\n        bbox_subnet = []\n        for _ in range(num_convs):\n            cls_subnet.append(\n                nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1)\n            )\n            cls_subnet.append(nn.ReLU())\n            bbox_subnet.append(\n                nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1)\n            )\n            bbox_subnet.append(nn.ReLU())\n\n        self.cls_subnet = nn.Sequential(*cls_subnet)\n        self.bbox_subnet = nn.Sequential(*bbox_subnet)\n        self.cls_score = nn.Conv2d(\n            in_channels, num_anchors * num_classes, kernel_size=3, stride=1, padding=1\n        )\n        self.bbox_pred = nn.Conv2d(in_channels, num_anchors * 4, kernel_size=3, stride=1, padding=1)\n\n        # Initialization\n        for modules in [self.cls_subnet, self.bbox_subnet, self.cls_score, self.bbox_pred]:\n            for layer in modules.modules():\n                if isinstance(layer, nn.Conv2d):\n                    torch.nn.init.normal_(layer.weight, mean=0, std=0.01)\n                    torch.nn.init.constant_(layer.bias, 0)\n\n        # Use prior in model initialization to improve stability\n        bias_value = -math.log((1 - prior_prob) / prior_prob)\n        torch.nn.init.constant_(self.cls_score.bias, bias_value)\n\n    def forward(self, features):\n        \"\"\"\n        Arguments:\n            features (list[Tensor]): FPN feature map tensors in high to low resolution.\n                Each tensor in the list correspond to different feature levels.\n\n        Returns:\n            logits (list[Tensor]): #lvl tensors, each has shape (N, AxK, Hi, Wi).\n                The tensor predicts the classification probability\n                at each spatial position for each of the A anchors and K object\n                classes.\n            bbox_reg (list[Tensor]): #lvl tensors, each has shape (N, Ax4, Hi, Wi).\n                The tensor predicts 4-vector (dx,dy,dw,dh) box\n                regression values for every anchor. These values are the\n                relative offset between the anchor and the ground truth box.\n        \"\"\"\n        logits = []\n        bbox_reg = []\n        for feature in features:\n            logits.append(self.cls_score(self.cls_subnet(feature)))\n            bbox_reg.append(self.bbox_pred(self.bbox_subnet(feature)))\n        return logits, bbox_reg\n"
  },
  {
    "path": "detectron2/modeling/meta_arch/semantic_seg.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport numpy as np\nfrom typing import Dict\nimport fvcore.nn.weight_init as weight_init\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom detectron2.layers import Conv2d, ShapeSpec\nfrom detectron2.structures import ImageList\nfrom detectron2.utils.registry import Registry\n\nfrom ..backbone import build_backbone\nfrom ..postprocessing import sem_seg_postprocess\nfrom .build import META_ARCH_REGISTRY\n\n__all__ = [\"SemanticSegmentor\", \"SEM_SEG_HEADS_REGISTRY\", \"SemSegFPNHead\", \"build_sem_seg_head\"]\n\n\nSEM_SEG_HEADS_REGISTRY = Registry(\"SEM_SEG_HEADS\")\n\"\"\"\nRegistry for semantic segmentation heads, which make semantic segmentation predictions\nfrom feature maps.\n\"\"\"\n\n\n@META_ARCH_REGISTRY.register()\nclass SemanticSegmentor(nn.Module):\n    \"\"\"\n    Main class for semantic segmentation architectures.\n    \"\"\"\n\n    def __init__(self, cfg):\n        super().__init__()\n\n        self.device = torch.device(cfg.MODEL.DEVICE)\n\n        self.backbone = build_backbone(cfg)\n        self.sem_seg_head = build_sem_seg_head(cfg, self.backbone.output_shape())\n\n        pixel_mean = torch.Tensor(cfg.MODEL.PIXEL_MEAN).to(self.device).view(-1, 1, 1)\n        pixel_std = torch.Tensor(cfg.MODEL.PIXEL_STD).to(self.device).view(-1, 1, 1)\n        self.normalizer = lambda x: (x - pixel_mean) / pixel_std\n\n        self.to(self.device)\n\n    def forward(self, batched_inputs):\n        \"\"\"\n        Args:\n            batched_inputs: a list, batched outputs of :class:`DatasetMapper` .\n                Each item in the list contains the inputs for one image.\n\n        For now, each item in the list is a dict that contains:\n            image: Tensor, image in (C, H, W) format.\n            sem_seg: semantic segmentation ground truth\n            Other information that's included in the original dicts, such as:\n                \"height\", \"width\" (int): the output resolution of the model, used in inference.\n                    See :meth:`postprocess` for details.\n\n        Returns:\n            list[dict]: Each dict is the output for one input image.\n                The dict contains one key \"sem_seg\" whose value is a\n                Tensor of the output resolution that represents the\n                per-pixel segmentation prediction.\n        \"\"\"\n        images = [x[\"image\"].to(self.device) for x in batched_inputs]\n        images = [self.normalizer(x) for x in images]\n        images = ImageList.from_tensors(images, self.backbone.size_divisibility)\n\n        features = self.backbone(images.tensor)\n\n        if \"sem_seg\" in batched_inputs[0]:\n            targets = [x[\"sem_seg\"].to(self.device) for x in batched_inputs]\n            targets = ImageList.from_tensors(\n                targets, self.backbone.size_divisibility, self.sem_seg_head.ignore_value\n            ).tensor\n        else:\n            targets = None\n        results, losses = self.sem_seg_head(features, targets)\n\n        if self.training:\n            return losses\n\n        processed_results = []\n        for result, input_per_image, image_size in zip(results, batched_inputs, images.image_sizes):\n            height = input_per_image.get(\"height\")\n            width = input_per_image.get(\"width\")\n            r = sem_seg_postprocess(result, image_size, height, width)\n            processed_results.append({\"sem_seg\": r})\n        return processed_results\n\n\ndef build_sem_seg_head(cfg, input_shape):\n    \"\"\"\n    Build a semantic segmentation head from `cfg.MODEL.SEM_SEG_HEAD.NAME`.\n    \"\"\"\n    name = cfg.MODEL.SEM_SEG_HEAD.NAME\n    return SEM_SEG_HEADS_REGISTRY.get(name)(cfg, input_shape)\n\n\n@SEM_SEG_HEADS_REGISTRY.register()\nclass SemSegFPNHead(nn.Module):\n    \"\"\"\n    A semantic segmentation head described in detail in the Panoptic Feature Pyramid Networks paper\n    (https://arxiv.org/abs/1901.02446). It takes FPN features as input and merges information from\n    all levels of the FPN into single output.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n        super().__init__()\n\n        # fmt: off\n        self.in_features      = cfg.MODEL.SEM_SEG_HEAD.IN_FEATURES\n        feature_strides       = {k: v.stride for k, v in input_shape.items()}\n        feature_channels      = {k: v.channels for k, v in input_shape.items()}\n        self.ignore_value     = cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE\n        num_classes           = cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES\n        conv_dims             = cfg.MODEL.SEM_SEG_HEAD.CONVS_DIM\n        self.common_stride    = cfg.MODEL.SEM_SEG_HEAD.COMMON_STRIDE\n        norm                  = cfg.MODEL.SEM_SEG_HEAD.NORM\n        self.loss_weight      = cfg.MODEL.SEM_SEG_HEAD.LOSS_WEIGHT\n        # fmt: on\n\n        self.scale_heads = []\n        for in_feature in self.in_features:\n            head_ops = []\n            head_length = max(\n                1, int(np.log2(feature_strides[in_feature]) - np.log2(self.common_stride))\n            )\n            for k in range(head_length):\n                norm_module = nn.GroupNorm(32, conv_dims) if norm == \"GN\" else None\n                conv = Conv2d(\n                    feature_channels[in_feature] if k == 0 else conv_dims,\n                    conv_dims,\n                    kernel_size=3,\n                    stride=1,\n                    padding=1,\n                    bias=not norm,\n                    norm=norm_module,\n                    activation=F.relu,\n                )\n                weight_init.c2_msra_fill(conv)\n                head_ops.append(conv)\n                if feature_strides[in_feature] != self.common_stride:\n                    head_ops.append(\n                        nn.Upsample(scale_factor=2, mode=\"bilinear\", align_corners=False)\n                    )\n            self.scale_heads.append(nn.Sequential(*head_ops))\n            self.add_module(in_feature, self.scale_heads[-1])\n        self.predictor = Conv2d(conv_dims, num_classes, kernel_size=1, stride=1, padding=0)\n        weight_init.c2_msra_fill(self.predictor)\n\n    def forward(self, features, targets=None):\n        for i, f in enumerate(self.in_features):\n            if i == 0:\n                x = self.scale_heads[i](features[f])\n            else:\n                x = x + self.scale_heads[i](features[f])\n        x = self.predictor(x)\n        x = F.interpolate(x, scale_factor=self.common_stride, mode=\"bilinear\", align_corners=False)\n\n        if self.training:\n            losses = {}\n            losses[\"loss_sem_seg\"] = (\n                F.cross_entropy(x, targets, reduction=\"mean\", ignore_index=self.ignore_value)\n                * self.loss_weight\n            )\n            return [], losses\n        else:\n            return x, {}\n"
  },
  {
    "path": "detectron2/modeling/poolers.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\nimport math\nimport sys\nimport torch\nfrom torch import nn\nfrom torchvision.ops import RoIPool\n\nfrom detectron2.layers import ROIAlign, ROIAlignRotated, cat\n\n__all__ = [\"ROIPooler\"]\n\n\ndef assign_boxes_to_levels(box_lists, min_level, max_level, canonical_box_size, canonical_level):\n    \"\"\"\n    Map each box in `box_lists` to a feature map level index and return the assignment\n    vector.\n\n    Args:\n        box_lists (list[Boxes] | list[RotatedBoxes]): A list of N Boxes or N RotatedBoxes,\n            where N is the number of images in the batch.\n        min_level (int): Smallest feature map level index. The input is considered index 0,\n            the output of stage 1 is index 1, and so.\n        max_level (int): Largest feature map level index.\n        canonical_box_size (int): A canonical box size in pixels (sqrt(box area)).\n        canonical_level (int): The feature map level index on which a canonically-sized box\n            should be placed.\n\n    Returns:\n        A tensor of length M, where M is the total number of boxes aggregated over all\n            N batch images. The memory layout corresponds to the concatenation of boxes\n            from all images. Each element is the feature map index, as an offset from\n            `self.min_level`, for the corresponding box (so value i means the box is at\n            `self.min_level + i`).\n    \"\"\"\n    eps = sys.float_info.epsilon\n    box_sizes = torch.sqrt(cat([boxes.area() for boxes in box_lists]))\n    # Eqn.(1) in FPN paper\n    level_assignments = torch.floor(\n        canonical_level + torch.log2(box_sizes / canonical_box_size + eps)\n    )\n    level_assignments = torch.clamp(level_assignments, min=min_level, max=max_level)\n    return level_assignments.to(torch.int64) - min_level\n\n\ndef convert_boxes_to_pooler_format(box_lists):\n    \"\"\"\n    Convert all boxes in `box_lists` to the low-level format used by ROI pooling ops\n    (see description under Returns).\n\n    Args:\n        box_lists (list[Boxes] | list[RotatedBoxes]):\n            A list of N Boxes or N RotatedBoxes, where N is the number of images in the batch.\n\n    Returns:\n        When input is list[Boxes]:\n            A tensor of shape (M, 5), where M is the total number of boxes aggregated over all\n            N batch images.\n            The 5 columns are (batch index, x0, y0, x1, y1), where batch index\n            is the index in [0, N) identifying which batch image the box with corners at\n            (x0, y0, x1, y1) comes from.\n        When input is list[RotatedBoxes]:\n            A tensor of shape (M, 6), where M is the total number of boxes aggregated over all\n            N batch images.\n            The 6 columns are (batch index, x_ctr, y_ctr, width, height, angle_degrees),\n            where batch index is the index in [0, N) identifying which batch image the\n            rotated box (x_ctr, y_ctr, width, height, angle_degrees) comes from.\n    \"\"\"\n\n    def fmt_box_list(box_tensor, batch_index):\n        repeated_index = torch.full(\n            (len(box_tensor), 1), batch_index, dtype=box_tensor.dtype, device=box_tensor.device\n        )\n        return cat((repeated_index, box_tensor), dim=1)\n\n    pooler_fmt_boxes = cat(\n        [fmt_box_list(box_list.tensor, i) for i, box_list in enumerate(box_lists)], dim=0\n    )\n\n    return pooler_fmt_boxes\n\n\nclass ROIPooler(nn.Module):\n    \"\"\"\n    Region of interest feature map pooler that supports pooling from one or more\n    feature maps.\n    \"\"\"\n\n    def __init__(\n        self,\n        output_size,\n        scales,\n        sampling_ratio,\n        pooler_type,\n        canonical_box_size=224,\n        canonical_level=4,\n    ):\n        \"\"\"\n        Args:\n            output_size (int, tuple[int] or list[int]): output size of the pooled region,\n                e.g., 14 x 14. If tuple or list is given, the length must be 2.\n            scales (list[float]): The scale for each low-level pooling op relative to\n                the input image. For a feature map with stride s relative to the input\n                image, scale is defined as a 1 / s.\n            sampling_ratio (int): The `sampling_ratio` parameter for the ROIAlign op.\n            pooler_type (string): Name of the type of pooling operation that should be applied.\n                For instance, \"ROIPool\" or \"ROIAlignV2\".\n            canonical_box_size (int): A canonical box size in pixels (sqrt(box area)). The default\n                is heuristically defined as 224 pixels in the FPN paper (based on ImageNet\n                pre-training).\n            canonical_level (int): The feature map level index on which a canonically-sized box\n                should be placed. The default is defined as level 4 in the FPN paper.\n        \"\"\"\n        super().__init__()\n\n        if isinstance(output_size, int):\n            output_size = (output_size, output_size)\n        assert len(output_size) == 2\n        assert isinstance(output_size[0], int) and isinstance(output_size[1], int)\n        self.output_size = output_size\n\n        if pooler_type == \"ROIAlign\":\n            self.level_poolers = nn.ModuleList(\n                ROIAlign(\n                    output_size, spatial_scale=scale, sampling_ratio=sampling_ratio, aligned=False\n                )\n                for scale in scales\n            )\n        elif pooler_type == \"ROIAlignV2\":\n            self.level_poolers = nn.ModuleList(\n                ROIAlign(\n                    output_size, spatial_scale=scale, sampling_ratio=sampling_ratio, aligned=True\n                )\n                for scale in scales\n            )\n        elif pooler_type == \"ROIPool\":\n            self.level_poolers = nn.ModuleList(\n                RoIPool(output_size, spatial_scale=scale) for scale in scales\n            )\n        elif pooler_type == \"ROIAlignRotated\":\n            self.level_poolers = nn.ModuleList(\n                ROIAlignRotated(output_size, spatial_scale=scale, sampling_ratio=sampling_ratio)\n                for scale in scales\n            )\n        else:\n            raise ValueError(\"Unknown pooler type: {}\".format(pooler_type))\n\n        # Map scale (defined as 1 / stride) to its feature map level under the\n        # assumption that stride is a power of 2.\n        min_level = -math.log2(scales[0])\n        max_level = -math.log2(scales[-1])\n        assert math.isclose(min_level, int(min_level)) and math.isclose(max_level, int(max_level))\n        self.min_level = int(min_level)\n        self.max_level = int(max_level)\n        assert 0 < self.min_level and self.min_level <= self.max_level\n        assert self.min_level <= canonical_level and canonical_level <= self.max_level\n        self.canonical_level = canonical_level\n        assert canonical_box_size > 0\n        self.canonical_box_size = canonical_box_size\n\n    def forward(self, x, box_lists):\n        \"\"\"\n        Args:\n            x (list[Tensor]): A list of feature maps with scales matching those used to\n                construct this module.\n            box_lists (list[Boxes] | list[RotatedBoxes]):\n                A list of N Boxes or N RotatedBoxes, where N is the number of images in the batch.\n\n        Returns:\n            Tensor:\n                A tensor of shape (M, C, output_size, output_size) where M is the total number of\n                boxes aggregated over all N batch images and C is the number of channels in `x`.\n        \"\"\"\n        num_level_assignments = len(self.level_poolers)\n        pooler_fmt_boxes = convert_boxes_to_pooler_format(box_lists)\n\n        if num_level_assignments == 1:\n            return self.level_poolers[0](x[0], pooler_fmt_boxes)\n\n        level_assignments = assign_boxes_to_levels(\n            box_lists, self.min_level, self.max_level, self.canonical_box_size, self.canonical_level\n        )\n\n        num_boxes = len(pooler_fmt_boxes)\n        num_channels = x[0].shape[1]\n        output_size = self.output_size[0]\n\n        dtype, device = x[0].dtype, x[0].device\n        output = torch.zeros(\n            (num_boxes, num_channels, output_size, output_size), dtype=dtype, device=device\n        )\n\n        for level, (x_level, pooler) in enumerate(zip(x, self.level_poolers)):\n            inds = torch.nonzero(level_assignments == level).squeeze(1)\n            pooler_fmt_boxes_level = pooler_fmt_boxes[inds]\n            output[inds] = pooler(x_level, pooler_fmt_boxes_level)\n\n        return output\n"
  },
  {
    "path": "detectron2/modeling/postprocessing.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom torch.nn import functional as F\nimport math\nimport numpy as np\n\nfrom detectron2.layers import paste_masks_in_image\nfrom detectron2.structures import Boxes, BoxMode, Instances\nimport pysobatools.sobaeval as eval\n\n\ndef decode(segm):\n    return eval.maskUtils.decode(segm).astype('uint8')\n\n\ndef encode(segm):\n    return eval.maskUtils.encode(segm)\n\n\ndef detector_postprocess(results, output_height, output_width, mask_threshold=0.5):\n    \"\"\"\n    Resize the output instances.\n    The input images are often resized when entering an object detector.\n    As a result, we often need the outputs of the detector in a different\n    resolution from its inputs.\n\n    This function will resize the raw outputs of an R-CNN detector\n    to produce outputs according to the desired output resolution.\n\n    Args:\n        results (Instances): the raw outputs from the detector.\n            `results.image_size` contains the input image resolution the detector sees.\n            This object might be modified in-place.\n        output_height, output_width: the desired output resolution.\n\n    Returns:\n        Instances: the resized output from the model, based on the output resolution\n    \"\"\"\n    scale_x, scale_y = (output_width / results.image_size[1], output_height / results.image_size[0])\n    results = Instances((output_height, output_width), **results.get_fields())\n\n    if results.has(\"pred_boxes\"):\n        output_boxes = results.pred_boxes\n    elif results.has(\"proposal_boxes\"):\n        output_boxes = results.proposal_boxes\n\n    output_boxes.tensor[:, 0::2] *= scale_x\n    output_boxes.tensor[:, 1::2] *= scale_y\n    output_boxes.clip(results.image_size)\n\n    results = results[output_boxes.nonempty()]\n\n    if results.has(\"pred_masks\"):\n        results.pred_masks = paste_masks_in_image(\n            results.pred_masks[:, 0, :, :],  # N, 1, M, M\n            results.pred_boxes,\n            results.image_size,\n            threshold=mask_threshold,\n        )\n\n    if results.has(\"pred_keypoints\"):\n        results.pred_keypoints[:, :, 0] *= scale_x\n        results.pred_keypoints[:, :, 1] *= scale_y\n\n    return results\n\n\ndef sem_seg_postprocess(result, img_size, output_height, output_width):\n    \"\"\"\n    Return semantic segmentation predictions in the original resolution.\n\n    The input images are often resized when entering semantic segmentor. Moreover, in same\n    cases, they also padded inside segmentor to be divisible by maximum network stride.\n    As a result, we often need the predictions of the segmentor in a different\n    resolution from its inputs.\n\n    Args:\n        result (Tensor): semantic segmentation prediction logits. A tensor of shape (C, H, W),\n            where C is the number of classes, and H, W are the height and width of the prediction.\n        img_size (tuple): image size that segmentor is taking as input.\n        output_height, output_width: the desired output resolution.\n\n    Returns:\n        semantic segmentation prediction (Tensor): A tensor of the shape\n            (C, output_height, output_width) that contains per-pixel soft predictions.\n    \"\"\"\n    result = result[:, : img_size[0], : img_size[1]].expand(1, -1, -1, -1)\n    result = F.interpolate(\n        result, size=(output_height, output_width), mode=\"bilinear\", align_corners=False\n    )[0]\n    return result\n\ndef takeTwo(elm):\n    return elm[1]\n#\n#     \"\"\"\n#     param:\n#     rec1: (x0, y0, w, h)\n#     rec2: (x0, y0, w, h)\n#     x0, y0: the upper left point of rec.\n#     w, h:  the length and width of rec.\n#     \"\"\"\n\n\ndef compute_iou(rec1, rec2):\n    left_x = max(rec1[0], rec2[0])\n    left_y = max(rec1[1], rec2[1])\n    right_x = min(rec1[0] + rec1[2], rec2[0] + rec2[2])\n    right_y = min(rec1[1] + rec1[3], rec2[1] + rec2[3])\n    if left_x >= right_x or left_y >= right_y:\n        return 0\n    else:\n        S_mid = (right_y - left_y) * (right_x - left_x)\n        S_total = (rec1[2] * rec1[3]) + (rec2[2] * rec2[3]) - S_mid\n        return S_mid / S_total\n\n\ndef box_combine(o, s, box1, box2):\n    \"\"\"\n    args:\n        box1 : (x1_0,  y1_0,  x1_1, y1_1)\n        box2: (x2_0, y2_0, x2_1, y2_1)\n    return:\n        dict[\"1_2\":(min(x1_0,x2_0),min(y1_0,y2_0),max(x1_1,x2-1),max(y2_1,y2_2))]\n    \"\"\"\n    name = '{}_{}'.format(o, s)\n    combine = (min(box1[0], box2[0]), min(box1[1], box2[1]),\n               max(box1[2], box2[2]), max(box1[3], box2[3]))\n    combine = (combine[0], combine[1], combine[2] - combine[0],\n               combine[3] - combine[1])  # XYXY to XYWH\n    return [name, combine]\n\n\ndef compute_direction(box1,box2):\n    pass\n\ndef rect_distance(a, b):\n    x1, y1, x1b, y1b = a\n    x2, y2, x2b, y2b = b\n    left = x2b < x1\n    right = x1b < x2\n    bottom = y2b < y1\n    top = y1b < y2\n    if top and left:\n        return dist((x1, y1b), (x2b, y2))\n    elif left and bottom:\n        return dist((x1, y1), (x2b, y2b))\n    elif bottom and right:\n        return dist((x1b, y1), (x2, y2b))\n    elif right and top:\n        return dist((x1b, y1b), (x2, y2))\n    elif left:\n        return x1 - x2b\n    elif right:\n        return x2 - x1b\n    elif bottom:\n        return y1 - y2b\n    elif top:\n        return y2 - y1b\n    else:\n        return 0\n\n\ndef dist(a, b):\n    return math.sqrt((a[0] - b[0])**2 + (a[1] - b[1])**2)\n\n\ndef matchor(instance, association):\n    results = []\n    objects = [i for i, v in enumerate(instance.pred_classes) if v == 0]\n    shadows = [i for i, v in enumerate(instance.pred_classes) if v == 1]\n    boxes = []\n    for o in objects:\n        if instance.scores[o] < 0.5:\n            continue\n        for s in shadows:\n            if instance.scores[s] < 0.5:\n                continue\n            o_box = instance.pred_boxes[o].tensor[0].numpy()\n            s_box = instance.pred_boxes[s].tensor[0].numpy()\n            o_area = (o_box[2] - o_box[0]) * (o_box[3] - o_box[1])\n            s_area = (s_box[2] - s_box[0]) * (s_box[3] - s_box[1])\n            if compute_iou((o_box[0], o_box[1], o_box[2] - o_box[0], o_box[3] - o_box[1]), (s_box[0], s_box[1], s_box[2] - s_box[0], s_box[3] - s_box[1])) == 0:\n                if rect_distance(o_box, s_box) >= s_box[3] -  s_box[1]:\n                    continue\n            boxes.append(box_combine(o, s, o_box, s_box))\n    ass_boxes = association.pred_boxes.tensor.numpy()\n    pair = []\n\n    for i, ass_box in enumerate(ass_boxes):\n        scores = []\n        ass_box = [ass_box[0], ass_box[1], ass_box[2] - ass_box[0], ass_box[3] - ass_box[1]]\n        for box in boxes:\n            k, v = box\n\n            scores.append([str(i) + '_' + k, compute_iou(ass_box, v)])\n\n        if len(ass_boxes) == 1:\n            pair.append(sorted(scores, key=takeTwo, reverse=True)[:1])\n        else:\n            pair.append(sorted(scores, key=takeTwo, reverse=True)[:1])\n            if not sum([sc[1] > 0.5 for sc in pair[i]]):\n                pair[i] = [[0, 0]]\n    O = {}\n    S = {}\n    for k, v in enumerate(pair):\n        if v != [[0, 0]] and v != []:\n            r, o, s = v[0][0].split('_')\n            if o in O:\n                if s in S:\n                    if v[0][1] > O[o][1] and v[0][1] > S[s][1]:\n                        O[o] = v[0]\n                        S[s] = v[0]\n                else:\n                    if v[0][1] > O[o][1]:\n                        O[o] = v[0]\n            elif s in S:\n                if v[0][1] > S[s][1]:\n                    S[s] = v[0]                    \n            else:\n                O[o] = v[0]\n                S[s] = v[0]\n    for k, v in S.items():\n        r, o, s = v[0].split('_')\n        results.append((int(o), int(s), int(r)))\n\n    ins_association = instance.pred_classes * 0\n    ret_association = association.pred_classes * 0\n    if results == []:\n        instance.pred_associations = ins_association\n        association.pred_associations = ret_association\n        return instance, association\n    association_id = 1\n\n    for i in results:\n        if ins_association[i[0]]+ins_association[i[1]] == 0:\n            ins_association[i[0]] = association_id\n            ins_association[i[1]] = association_id\n            ret_association[i[2]] = association_id\n            association_id += 1\n\n    instance.pred_associations = ins_association\n    association.pred_associations = ret_association\n    return instance, association\n\n\ndef combine_association(instance, association):\n    pred_masks = [mask.numpy() for mask in instance.pred_masks]\n    pred_scores = instance.scores.numpy()\n    pred_boxes = instance.pred_boxes.tensor.numpy().tolist()\n    pred_classes = instance.pred_classes.numpy()\n    h, w = pred_masks[0].shape\n    pred_associations = instance.pred_associations.numpy()\n    pred_light = association.pred_light.tensor.numpy()\n    ret = Instances((h,w))\n    ins = Instances((h,w))\n\n    if np.sum(pred_associations) == 0:\n        ret.pred_boxes = association.pred_boxes\n        ret.scores = association.scores\n        ret.pred_classes = association.pred_classes\n        ret.pred_light = association.pred_light.tensor.numpy().tolist()\n        segm = np.zeros((h,w,1),order='F',dtype='uint8')\n        ret.pred_masks = [segm] * len(association.pred_boxes)\n        ret.pred_associations = association.pred_associations.numpy().astype('int').tolist()\n        instance.pred_associations = pred_associations.astype('int').tolist()\n        return ret,instance\n\n    mask_map = {}\n    for i, ass in enumerate(pred_associations):\n        if ass != 0:\n            if ass in mask_map:\n                if pred_classes[i] == 1:\n                    mask_map[ass].append((pred_masks[i], pred_scores[i],pred_classes[i],pred_boxes[i]))\n                else:\n                    mask_map[ass] = [(pred_masks[i], pred_scores[i],pred_classes[i],pred_boxes[i]),mask_map[ass][0]]\n            else:\n                \n                mask_map[ass] = [(pred_masks[i], pred_scores[i],pred_classes[i],pred_boxes[i])]\n\n    results = []\n    boxes = []\n    scores = []\n    classes = []\n    associations = []\n    light = []\n\n    for i,ass in enumerate(association.pred_associations):\n        if ass != 0:\n            light.append(pred_light[i].tolist())\n\n    for k, v in mask_map.items():\n        associations.append(int(k))\n        s, o = v\n        avg_score = float((s[1]+ o[1])/2)\n        _s = s[0].reshape(h,w,1)\n        _o = o[0].reshape(h,w,1)\n\n        comb = _s + _o\n        classes.append(0)\n        segm = encode(np.array(comb,order='F',dtype='uint8'))[0]\n        boxes.append(BoxMode.convert(eval.maskUtils.toBbox(segm), BoxMode.XYWH_ABS, BoxMode.XYXY_ABS))\n        results.append(comb)\n        scores.append(avg_score)\n\n    ret.pred_masks = results\n    ret.pred_boxes = boxes\n    ret.scores = scores\n    ret.pred_classes = classes\n    ret.pred_associations = associations\n    ret.pred_light= light\n\n    instance.pred_associations = instance.pred_associations.numpy().astype('int').tolist()\n\n    return ret,instance\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/LISA_rpn.py",
    "content": "#  Copyright (c) Tianyu Wang. All Rights Reserved.\nfrom typing import Dict, List\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.utils.registry import Registry\n\nfrom detectron2.modeling.anchor_generator import build_anchor_generator\nfrom detectron2.modeling.box_regression import Box2BoxTransform\nfrom detectron2.modeling.matcher import Matcher\nfrom detectron2.modeling import PROPOSAL_GENERATOR_REGISTRY, RPN_HEAD_REGISTRY\nfrom detectron2.modeling.proposal_generator.rpn_outputs import RPNOutputs, find_top_rpn_proposals\nfrom detectron2.modeling.proposal_generator.rpn import StandardRPNHead, RPN\nfrom detectron2.structures import BoxMode,Boxes\n\n\"\"\"\nRegistry for LISA RPN heads, which take CNN feature maps and perform\nobjectness classification and bounding box regression for anchors.\n\"\"\"\n\n# NOTE: `cfg.MODEL.RPN.HEAD_NAME` should be \"LISARPNHead\".\n\ndef build_rpn_head(cfg, input_shape,shadow_object_part=False):\n    \"\"\"\n    Build an RPN head defined by `cfg.MODEL.RPN.HEAD_NAME`.\n    \"\"\"\n    name = cfg.MODEL.RPN.HEAD_NAME\n    return RPN_HEAD_REGISTRY.get(name)(cfg, input_shape,shadow_object_part)\n\n\n@RPN_HEAD_REGISTRY.register()\nclass LISARPNHead(StandardRPNHead):\n    def __init__(self, cfg, input_shape: List[ShapeSpec], shadow_object_part= False):\n        super(LISARPNHead, self).__init__(cfg,input_shape)\n        self.shadow_object_part = shadow_object_part\n        if self.shadow_object_part:\n            in_channels = [s.channels for s in input_shape]\n            assert len(set(in_channels)) == 1, \"Each level must have the same channel!\"\n            in_channels = in_channels[0]\n            self.conv = nn.Conv2d(in_channels , in_channels, kernel_size=3, stride=1, padding=1)\n            for l in [self.conv]:\n                nn.init.normal_(l.weight, std=0.01)\n                nn.init.constant_(l.bias, 0)\n\n    def forward(self, features):\n        \"\"\"\n        Args:\n            features (list[Tensor]): list of feature maps\n        \"\"\"\n        \n        pred_objectness_logits = []\n        pred_anchor_deltas = []\n        if self.shadow_object_part == False:\n            pre_features = []\n        for i,x in enumerate(features):\n\n            t = F.relu(self.conv(x))\n\n            # if self.shadow_object_part == False:\n                # pre_features.append(t)\n            \n            pred_objectness_logits.append(self.objectness_logits(t))\n            pred_anchor_deltas.append(self.anchor_deltas(t))\n        \n        if self.shadow_object_part == False:\n            return pred_objectness_logits, pred_anchor_deltas, None\n        else:\n            return pred_objectness_logits, pred_anchor_deltas\n\n\ndef build_proposal_generator(cfg, input_shape, **args):\n    \"\"\"\n    Build a proposal generator from `cfg.MODEL.PROPOSAL_GENERATOR.NAME`.\n    The name can be \"PrecomputedProposals\" to use no proposal generator.\n    \"\"\"\n    name = cfg.MODEL.PROPOSAL_GENERATOR.NAME\n    if name == \"PrecomputedProposals\":\n        return None\n\n    return PROPOSAL_GENERATOR_REGISTRY.get(name)(cfg, input_shape,**args)\n    \n@PROPOSAL_GENERATOR_REGISTRY.register()\nclass LISARPN(RPN):\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec], shadow_object_part= False):\n        super(LISARPN, self).__init__(cfg, input_shape)\n        self.shadow_object_part = shadow_object_part\n        if self.shadow_object_part:\n            self.rpn_head = build_rpn_head(cfg, [input_shape[f] for f in self.in_features], self.shadow_object_part)\n    \n    def forward(self, images, features, gt_instances=None, pre_proposals=None):\n        gt_boxes = [x.gt_boxes for x in gt_instances] if gt_instances is not None else None\n        del gt_instances\n\n        if self.shadow_object_part == False:\n            features = [features[f] for f in self.in_features]\n            pred_objectness_logits, pred_anchor_deltas, pre_features = self.rpn_head(features)\n            anchors = self.anchor_generator(features)\n        else:\n            features = [features[f] for f in self.in_features]\n            pred_objectness_logits, pred_anchor_deltas = self.rpn_head(features)\n            anchors = self.anchor_generator(features)\n            assert len(anchors[0]) == len(pre_proposals), \"number of pre_proposals {} and pre_anchors {} should be same.\".format(len(anchors[0]),len(pre_proposals))\n\n        outputs = RPNOutputs(\n            self.box2box_transform,\n            self.anchor_matcher,\n            self.batch_size_per_image,\n            self.positive_fraction,\n            images,\n            pred_objectness_logits,\n            pred_anchor_deltas,\n            anchors,\n            self.boundary_threshold,\n            gt_boxes,\n            self.smooth_l1_beta,\n        )\n        \n        if self.training:\n            if self.shadow_object_part == False:\n                losses = {k+'_rela': v * self.loss_weight for k, v in outputs.losses().items()}\n            else:\n                losses = {k: v * self.loss_weight for k, v in outputs.losses().items()}\n        else:\n            losses = {}\n\n        with torch.no_grad():\n\n            \n            pre_proposals = outputs.predict_proposals()\n            # Find the top proposals by applying NMS and removing boxes that\n            # are too small. The proposals are treated as fixed for approximate\n            # joint training with roi heads. This approach ignores the derivative\n            # w.r.t. the proposal boxes’ coordinates that are also network\n            # responses, so is approximate.\n            proposals = find_top_rpn_proposals(\n                pre_proposals,\n                outputs.predict_objectness_logits(),\n                images,\n                self.nms_thresh,\n                self.pre_nms_topk[self.training],\n                self.post_nms_topk[self.training],\n                self.min_box_side_len,\n                self.training,\n            )\n            # For RPN-only models, the proposals are the final output and we return them in\n            # high-to-low confidence order.\n            # For end-to-end models, the RPN proposals are an intermediate state\n            # and this sorting is actually not needed. But the cost is negligible.\n            inds = [p.objectness_logits.sort(descending=True)[1] for p in proposals]\n            proposals = [p[ind] for p, ind in zip(proposals, inds)]\n        if self.shadow_object_part == False:\n            return proposals, losses, pre_features, pre_proposals\n        else:\n            return proposals, losses\n    \n\n\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .build import PROPOSAL_GENERATOR_REGISTRY, build_proposal_generator\nfrom .rpn import RPN_HEAD_REGISTRY, build_rpn_head\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/build.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom detectron2.utils.registry import Registry\n\nPROPOSAL_GENERATOR_REGISTRY = Registry(\"PROPOSAL_GENERATOR\")\nPROPOSAL_GENERATOR_REGISTRY.__doc__ = \"\"\"\nRegistry for proposal generator, which produces object proposals from feature maps.\n\nThe registered object will be called with `obj(cfg, input_shape)`.\nThe call should return a `nn.Module` object.\n\"\"\"\n\nfrom . import rpn, rrpn  # noqa F401 isort:skip\n\n\ndef build_proposal_generator(cfg, input_shape):\n    \"\"\"\n    Build a proposal generator from `cfg.MODEL.PROPOSAL_GENERATOR.NAME`.\n    The name can be \"PrecomputedProposals\" to use no proposal generator.\n    \"\"\"\n    name = cfg.MODEL.PROPOSAL_GENERATOR.NAME\n    if name == \"PrecomputedProposals\":\n        return None\n\n    return PROPOSAL_GENERATOR_REGISTRY.get(name)(cfg, input_shape)\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/proposal_utils.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport math\nimport torch\n\nfrom detectron2.structures import Instances\n\n\ndef add_ground_truth_to_proposals(gt_boxes, proposals, gt_light =None):\n    \"\"\"\n    Call `add_ground_truth_to_proposals_single_image` for all images.\n\n    Args:\n        gt_boxes(list[Boxes]): list of N elements. Element i is a Boxes\n            representing the gound-truth for image i.\n        proposals (list[Instances]): list of N elements. Element i is a Instances\n            representing the proposals for image i.\n\n    Returns:\n        list[Instances]: list of N Instances. Each is the proposals for the image,\n            with field \"proposal_boxes\" and \"objectness_logits\".\n    \"\"\"\n    assert gt_boxes is not None\n\n    assert len(proposals) == len(gt_boxes)\n    if len(proposals) == 0:\n        return proposals\n    if gt_light == None:\n        gt_light = [None] * len(gt_boxes)\n    return [\n        add_ground_truth_to_proposals_single_image(gt_boxes_i, proposals_i, gt_light_i)\n        for gt_boxes_i, proposals_i,gt_light_i in zip(gt_boxes, proposals,gt_light)\n    ]\n\n\ndef add_ground_truth_to_proposals_single_image(gt_boxes, proposals,gt_light):\n    \"\"\"\n    Augment `proposals` with ground-truth boxes from `gt_boxes`.\n\n    Args:\n        Same as `add_ground_truth_to_proposals`, but with gt_boxes and proposals\n        per image.\n\n    Returns:\n        Same as `add_ground_truth_to_proposals`, but for only one image.\n    \"\"\"\n    device = proposals.objectness_logits.device\n    # Concatenating gt_boxes with proposals requires them to have the same fields\n    # Assign all ground-truth boxes an objectness logit corresponding to P(object) \\approx 1.\n    gt_logit_value = math.log((1.0 - 1e-10) / (1 - (1.0 - 1e-10)))\n\n    gt_logits = gt_logit_value * torch.ones(len(gt_boxes), device=device)\n    gt_proposal = Instances(proposals.image_size)\n\n    gt_proposal.proposal_boxes = gt_boxes\n    gt_proposal.objectness_logits = gt_logits\n    if gt_light != None:\n        gt_proposal.proposal_light = gt_light\n    new_proposals = Instances.cat([proposals, gt_proposal])\n\n    return new_proposals\n\n\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/rpn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom typing import Dict, List\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.utils.registry import Registry\n\nfrom ..anchor_generator import build_anchor_generator\nfrom ..box_regression import Box2BoxTransform\nfrom ..matcher import Matcher\nfrom .build import PROPOSAL_GENERATOR_REGISTRY\nfrom .rpn_outputs import RPNOutputs, find_top_rpn_proposals\n\nRPN_HEAD_REGISTRY = Registry(\"RPN_HEAD\")\n\"\"\"\nRegistry for RPN heads, which take feature maps and perform\nobjectness classification and bounding box regression for anchors.\n\"\"\"\n\n\ndef build_rpn_head(cfg, input_shape):\n    \"\"\"\n    Build an RPN head defined by `cfg.MODEL.RPN.HEAD_NAME`.\n    \"\"\"\n    name = cfg.MODEL.RPN.HEAD_NAME\n    return RPN_HEAD_REGISTRY.get(name)(cfg, input_shape)\n\n\n@RPN_HEAD_REGISTRY.register()\nclass StandardRPNHead(nn.Module):\n    \"\"\"\n    RPN classification and regression heads. Uses a 3x3 conv to produce a shared\n    hidden state from which one 1x1 conv predicts objectness logits for each anchor\n    and a second 1x1 conv predicts bounding-box deltas specifying how to deform\n    each anchor into an object proposal.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: List[ShapeSpec]):\n        super().__init__()\n\n        # Standard RPN is shared across levels:\n        in_channels = [s.channels for s in input_shape]\n        assert len(set(in_channels)) == 1, \"Each level must have the same channel!\"\n        in_channels = in_channels[0]\n\n        # RPNHead should take the same input as anchor generator\n        # NOTE: it assumes that creating an anchor generator does not have unwanted side effect.\n        anchor_generator = build_anchor_generator(cfg, input_shape)\n        num_cell_anchors = anchor_generator.num_cell_anchors\n        box_dim = anchor_generator.box_dim\n        assert (\n            len(set(num_cell_anchors)) == 1\n        ), \"Each level must have the same number of cell anchors\"\n        num_cell_anchors = num_cell_anchors[0]\n\n        # 3x3 conv for the hidden representation\n        self.conv = nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1)\n        # 1x1 conv for predicting objectness logits\n        self.objectness_logits = nn.Conv2d(in_channels, num_cell_anchors, kernel_size=1, stride=1)\n        # 1x1 conv for predicting box2box transform deltas\n        self.anchor_deltas = nn.Conv2d(\n            in_channels, num_cell_anchors * box_dim, kernel_size=1, stride=1\n        )\n\n        for l in [self.conv, self.objectness_logits, self.anchor_deltas]:\n            nn.init.normal_(l.weight, std=0.01)\n            nn.init.constant_(l.bias, 0)\n\n    def forward(self, features):\n        \"\"\"\n        Args:\n            features (list[Tensor]): list of feature maps\n        \"\"\"\n        pred_objectness_logits = []\n        pred_anchor_deltas = []\n        for x in features:\n            t = F.relu(self.conv(x))\n            pred_objectness_logits.append(self.objectness_logits(t))\n            pred_anchor_deltas.append(self.anchor_deltas(t))\n        return pred_objectness_logits, pred_anchor_deltas\n\n\n@PROPOSAL_GENERATOR_REGISTRY.register()\nclass RPN(nn.Module):\n    \"\"\"\n    Region Proposal Network, introduced by the Faster R-CNN paper.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n        super().__init__()\n\n        # fmt: off\n        self.min_box_side_len        = cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZE\n        self.in_features             = cfg.MODEL.RPN.IN_FEATURES\n        self.nms_thresh              = cfg.MODEL.RPN.NMS_THRESH\n        self.batch_size_per_image    = cfg.MODEL.RPN.BATCH_SIZE_PER_IMAGE\n        self.positive_fraction       = cfg.MODEL.RPN.POSITIVE_FRACTION\n        self.smooth_l1_beta          = cfg.MODEL.RPN.SMOOTH_L1_BETA\n        self.loss_weight             = cfg.MODEL.RPN.LOSS_WEIGHT\n        # fmt: on\n\n        # Map from self.training state to train/test settings\n        self.pre_nms_topk = {\n            True: cfg.MODEL.RPN.PRE_NMS_TOPK_TRAIN,\n            False: cfg.MODEL.RPN.PRE_NMS_TOPK_TEST,\n        }\n        self.post_nms_topk = {\n            True: cfg.MODEL.RPN.POST_NMS_TOPK_TRAIN,\n            False: cfg.MODEL.RPN.POST_NMS_TOPK_TEST,\n        }\n        self.boundary_threshold = cfg.MODEL.RPN.BOUNDARY_THRESH\n\n        self.anchor_generator = build_anchor_generator(\n            cfg, [input_shape[f] for f in self.in_features]\n        )\n        self.box2box_transform = Box2BoxTransform(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS)\n        self.anchor_matcher = Matcher(\n            cfg.MODEL.RPN.IOU_THRESHOLDS, cfg.MODEL.RPN.IOU_LABELS, allow_low_quality_matches=True\n        )\n        self.rpn_head = build_rpn_head(cfg, [input_shape[f] for f in self.in_features])\n\n    def forward(self, images, features, gt_instances=None):\n        \"\"\"\n        Args:\n            images (ImageList): input images of length `N`\n            features (dict[str: Tensor]): input data as a mapping from feature\n                map name to tensor. Axis 0 represents the number of images `N` in\n                the input data; axes 1-3 are channels, height, and width, which may\n                vary between feature maps (e.g., if a feature pyramid is used).\n            gt_instances (list[Instances], optional): a length `N` list of `Instances`s.\n                Each `Instances` stores ground-truth instances for the corresponding image.\n\n        Returns:\n            proposals: list[Instances] or None\n            loss: dict[Tensor]\n        \"\"\"\n        gt_boxes = [x.gt_boxes for x in gt_instances] if gt_instances is not None else None\n        del gt_instances\n        features = [features[f] for f in self.in_features]\n        pred_objectness_logits, pred_anchor_deltas = self.rpn_head(features)\n        anchors = self.anchor_generator(features) # Real anchors\n        # TODO: The anchors only depend on the feature map shape; there's probably\n        # an opportunity for some optimizations (e.g., caching anchors).\n        outputs = RPNOutputs(\n            self.box2box_transform,\n            self.anchor_matcher,\n            self.batch_size_per_image,\n            self.positive_fraction,\n            images,\n            pred_objectness_logits,\n            pred_anchor_deltas,\n            anchors,\n            self.boundary_threshold,\n            gt_boxes,\n            self.smooth_l1_beta,\n        )\n\n        if self.training:\n            losses = {k: v * self.loss_weight for k, v in outputs.losses().items()}\n        else:\n            losses = {}\n\n        with torch.no_grad():\n            # Find the top proposals by applying NMS and removing boxes that\n            # are too small. The proposals are treated as fixed for approximate\n            # joint training with roi heads. This approach ignores the derivative\n            # w.r.t. the proposal boxes’ coordinates that are also network\n            # responses, so is approximate.\n            proposals = find_top_rpn_proposals(\n                outputs.predict_proposals(),\n                outputs.predict_objectness_logits(),\n                images,\n                self.nms_thresh,\n                self.pre_nms_topk[self.training],\n                self.post_nms_topk[self.training],\n                self.min_box_side_len,\n                self.training,\n            )\n            # For RPN-only models, the proposals are the final output and we return them in\n            # high-to-low confidence order.\n            # For end-to-end models, the RPN proposals are an intermediate state\n            # and this sorting is actually not needed. But the cost is negligible.\n            inds = [p.objectness_logits.sort(descending=True)[1] for p in proposals]\n            proposals = [p[ind] for p, ind in zip(proposals, inds)]\n\n        return proposals, losses\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/rpn_outputs.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport itertools\nimport logging\nimport numpy as np\nimport torch\nimport torch.nn.functional as F\nfrom fvcore.nn import smooth_l1_loss\n\nfrom detectron2.layers import batched_nms, cat\nfrom detectron2.structures import Boxes, Instances, pairwise_iou\nfrom detectron2.utils.events import get_event_storage\n\nfrom ..sampling import subsample_labels\n\nlogger = logging.getLogger(__name__)\n\n# TODO: comments for future refactoring of this module\n#\n# From @rbg:\n# This code involves a significant amount of tensor reshaping and permuting. Look for\n# ways to simplify this.\n\n\"\"\"\nShape shorthand in this module:\n\n    N: number of images in the minibatch\n    L: number of feature maps per image on which RPN is run\n    A: number of cell anchors (must be the same for all feature maps)\n    Hi, Wi: height and width of the i-th feature map\n    4: size of the box parameterization\n\nNaming convention:\n\n    objectness: refers to the binary classification of an anchor as object vs. not\n    object.\n\n    deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box\n    transform (see :class:`box_regression.Box2BoxTransform`).\n\n    pred_objectness_logits: predicted objectness scores in [-inf, +inf]; use\n        sigmoid(pred_objectness_logits) to estimate P(object).\n\n    gt_objectness_logits: ground-truth binary classification labels for objectness\n\n    pred_anchor_deltas: predicted box2box transform deltas\n\n    gt_anchor_deltas: ground-truth box2box transform deltas\n\"\"\"\n\n\ndef find_top_rpn_proposals(\n    proposals,\n    pred_objectness_logits,\n    images,\n    nms_thresh,\n    pre_nms_topk,\n    post_nms_topk,\n    min_box_side_len,\n    training,\n):\n    \"\"\"\n    For each feature map, select the `pre_nms_topk` highest scoring proposals,\n    apply NMS, clip proposals, and remove small boxes. Return the `post_nms_topk`\n    highest scoring proposals among all the feature maps if `training` is True,\n    otherwise, returns the highest `post_nms_topk` scoring proposals for each\n    feature map.\n\n    Args:\n        proposals (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A, 4).\n            All proposal predictions on the feature maps.\n        pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A).\n        images (ImageList): Input images as an :class:`ImageList`.\n        nms_thresh (float): IoU threshold to use for NMS\n        pre_nms_topk (int): number of top k scoring proposals to keep before applying NMS.\n            When RPN is run on multiple feature maps (as in FPN) this number is per\n            feature map.\n        post_nms_topk (int): number of top k scoring proposals to keep after applying NMS.\n            When RPN is run on multiple feature maps (as in FPN) this number is total,\n            over all feature maps.\n        min_box_side_len (float): minimum proposal box side length in pixels (absolute units\n            wrt input images).\n        training (bool): True if proposals are to be used in training, otherwise False.\n            This arg exists only to support a legacy bug; look for the \"NB: Legacy bug ...\"\n            comment.\n\n    Returns:\n        proposals (list[Instances]): list of N Instances. The i-th Instances\n            stores post_nms_topk object proposals for image i.\n    \"\"\"\n    image_sizes = images.image_sizes  # in (h, w) order\n    num_images = len(image_sizes)\n    device = proposals[0].device\n\n    # 1. Select top-k anchor for every level and every image\n    topk_scores = []  # #lvl Tensor, each of shape N x topk\n    topk_proposals = []\n    level_ids = []  # #lvl Tensor, each of shape (topk,)\n    batch_idx = torch.arange(num_images, device=device)\n    for level_id, proposals_i, logits_i in zip(\n        itertools.count(), proposals, pred_objectness_logits\n    ):\n        Hi_Wi_A = logits_i.shape[1]\n        num_proposals_i = min(pre_nms_topk, Hi_Wi_A)\n\n        # sort is faster than topk (https://github.com/pytorch/pytorch/issues/22812)\n        # topk_scores_i, topk_idx = logits_i.topk(num_proposals_i, dim=1)\n        logits_i, idx = logits_i.sort(descending=True, dim=1)\n        topk_scores_i = logits_i[batch_idx, :num_proposals_i]\n        topk_idx = idx[batch_idx, :num_proposals_i]\n\n        # each is N x topk\n        topk_proposals_i = proposals_i[batch_idx[:, None], topk_idx]  # N x topk x 4\n\n        topk_proposals.append(topk_proposals_i)\n        topk_scores.append(topk_scores_i)\n        level_ids.append(torch.full((num_proposals_i,), level_id, dtype=torch.int64, device=device))\n\n    # 2. Concat all levels together\n    topk_scores = cat(topk_scores, dim=1)\n    topk_proposals = cat(topk_proposals, dim=1)\n    level_ids = cat(level_ids, dim=0)\n\n    # 3. For each image, run a per-level NMS, and choose topk results.\n    results = []\n    for n, image_size in enumerate(image_sizes):\n        boxes = Boxes(topk_proposals[n])\n        scores_per_img = topk_scores[n]\n        boxes.clip(image_size)\n\n        # filter empty boxes\n        keep = boxes.nonempty(threshold=min_box_side_len)\n        lvl = level_ids\n        if keep.sum().item() != len(boxes):\n            boxes, scores_per_img, lvl = boxes[keep], scores_per_img[keep], level_ids[keep]\n\n        keep = batched_nms(boxes.tensor, scores_per_img, lvl, nms_thresh)\n        # In Detectron1, there was different behavior during training vs. testing.\n        # (https://github.com/facebookresearch/Detectron/issues/459)\n        # During training, topk is over the proposals from *all* images in the training batch.\n        # During testing, it is over the proposals for each image separately.\n        # As a result, the training behavior becomes batch-dependent,\n        # and the configuration \"POST_NMS_TOPK_TRAIN\" end up relying on the batch size.\n        # This bug is addressed in Detectron2 to make the behavior independent of batch size.\n        keep = keep[:post_nms_topk]\n\n        res = Instances(image_size)\n        res.proposal_boxes = boxes[keep]\n        res.objectness_logits = scores_per_img[keep]\n        results.append(res)\n    return results\n\n\ndef rpn_losses(\n    gt_objectness_logits,\n    gt_anchor_deltas,\n    pred_objectness_logits,\n    pred_anchor_deltas,\n    smooth_l1_beta,\n):\n    \"\"\"\n    Args:\n        gt_objectness_logits (Tensor): shape (N,), each element in {-1, 0, 1} representing\n            ground-truth objectness labels with: -1 = ignore; 0 = not object; 1 = object.\n        gt_anchor_deltas (Tensor): shape (N, box_dim), row i represents ground-truth\n            box2box transform targets (dx, dy, dw, dh) or (dx, dy, dw, dh, da) that map anchor i to\n            its matched ground-truth box.\n        pred_objectness_logits (Tensor): shape (N,), each element is a predicted objectness\n            logit.\n        pred_anchor_deltas (Tensor): shape (N, box_dim), each row is a predicted box2box\n            transform (dx, dy, dw, dh) or (dx, dy, dw, dh, da)\n        smooth_l1_beta (float): The transition point between L1 and L2 loss in\n            the smooth L1 loss function. When set to 0, the loss becomes L1. When\n            set to +inf, the loss becomes constant 0.\n\n    Returns:\n        objectness_loss, localization_loss, both unnormalized (summed over samples).\n    \"\"\"\n    pos_masks = gt_objectness_logits == 1\n    localization_loss = smooth_l1_loss(\n        pred_anchor_deltas[pos_masks], gt_anchor_deltas[pos_masks], smooth_l1_beta, reduction=\"sum\"\n    )\n\n    valid_masks = gt_objectness_logits >= 0\n    objectness_loss = F.binary_cross_entropy_with_logits(\n        pred_objectness_logits[valid_masks],\n        gt_objectness_logits[valid_masks].to(torch.float32),\n        reduction=\"sum\",\n    )\n    return objectness_loss, localization_loss\n\n\nclass RPNOutputs(object):\n    def __init__(\n        self,\n        box2box_transform,\n        anchor_matcher,\n        batch_size_per_image,\n        positive_fraction,\n        images,\n        pred_objectness_logits,\n        pred_anchor_deltas,\n        anchors,\n        boundary_threshold=0,\n        gt_boxes=None,\n        smooth_l1_beta=0.0,\n    ):\n        \"\"\"\n        Args:\n            box2box_transform (Box2BoxTransform): :class:`Box2BoxTransform` instance for\n                anchor-proposal transformations.\n            anchor_matcher (Matcher): :class:`Matcher` instance for matching anchors to\n                ground-truth boxes; used to determine training labels.\n            batch_size_per_image (int): number of proposals to sample when training\n            positive_fraction (float): target fraction of sampled proposals that should be positive\n            images (ImageList): :class:`ImageList` instance representing N input images\n            pred_objectness_logits (list[Tensor]): A list of L elements.\n                Element i is a tensor of shape (N, A, Hi, Wi) representing\n                the predicted objectness logits for anchors.\n            pred_anchor_deltas (list[Tensor]): A list of L elements. Element i is a tensor of shape\n                (N, A*4, Hi, Wi) representing the predicted \"deltas\" used to transform anchors\n                to proposals.\n            anchors (list[list[Boxes]]): A list of N elements. Each element is a list of L\n                Boxes. The Boxes at (n, l) stores the entire anchor array for feature map l in image\n                n (i.e. the cell anchors repeated over all locations in feature map (n, l)).\n            boundary_threshold (int): if >= 0, then anchors that extend beyond the image\n                boundary by more than boundary_thresh are not used in training. Set to a very large\n                number or < 0 to disable this behavior. Only needed in training.\n            gt_boxes (list[Boxes], optional): A list of N elements. Element i a Boxes storing\n                the ground-truth (\"gt\") boxes for image i.\n            smooth_l1_beta (float): The transition point between L1 and L2 loss in\n                the smooth L1 loss function. When set to 0, the loss becomes L1. When\n                set to +inf, the loss becomes constant 0.\n        \"\"\"\n        self.box2box_transform = box2box_transform\n        self.anchor_matcher = anchor_matcher\n        self.batch_size_per_image = batch_size_per_image\n        self.positive_fraction = positive_fraction\n        self.pred_objectness_logits = pred_objectness_logits\n        self.pred_anchor_deltas = pred_anchor_deltas\n\n        self.anchors = anchors\n        self.gt_boxes = gt_boxes\n        self.num_feature_maps = len(pred_objectness_logits)\n        self.num_images = len(images)\n        self.image_sizes = images.image_sizes\n        self.boundary_threshold = boundary_threshold\n        self.smooth_l1_beta = smooth_l1_beta\n\n    def _get_ground_truth(self):\n        \"\"\"\n        Returns:\n            gt_objectness_logits: list of N tensors. Tensor i is a vector whose length is the\n                total number of anchors in image i (i.e., len(anchors[i])). Label values are\n                in {-1, 0, 1}, with meanings: -1 = ignore; 0 = negative class; 1 = positive class.\n            gt_anchor_deltas: list of N tensors. Tensor i has shape (len(anchors[i]), 4).\n        \"\"\"\n        gt_objectness_logits = []\n        gt_anchor_deltas = []\n        # Concatenate anchors from all feature maps into a single Boxes per image\n        anchors = [Boxes.cat(anchors_i) for anchors_i in self.anchors]\n        for image_size_i, anchors_i, gt_boxes_i in zip(self.image_sizes, anchors, self.gt_boxes):\n            \"\"\"\n            image_size_i: (h, w) for the i-th image\n            anchors_i: anchors for i-th image\n            gt_boxes_i: ground-truth boxes for i-th image\n            \"\"\"\n            match_quality_matrix = pairwise_iou(gt_boxes_i, anchors_i)\n            matched_idxs, gt_objectness_logits_i = self.anchor_matcher(match_quality_matrix)\n\n            if self.boundary_threshold >= 0:\n                # Discard anchors that go out of the boundaries of the image\n                # NOTE: This is legacy functionality that is turned off by default in Detectron2\n                anchors_inside_image = anchors_i.inside_box(image_size_i, self.boundary_threshold)\n                gt_objectness_logits_i[~anchors_inside_image] = -1\n\n            if len(gt_boxes_i) == 0:\n                # These values won't be used anyway since the anchor is labeled as background\n                gt_anchor_deltas_i = torch.zeros_like(anchors_i.tensor)\n            else:\n                # TODO wasted computation for ignored boxes\n                matched_gt_boxes = gt_boxes_i[matched_idxs]\n                gt_anchor_deltas_i = self.box2box_transform.get_deltas(\n                    anchors_i.tensor, matched_gt_boxes.tensor\n                )\n\n            gt_objectness_logits.append(gt_objectness_logits_i)\n            gt_anchor_deltas.append(gt_anchor_deltas_i)\n\n        return gt_objectness_logits, gt_anchor_deltas\n\n    def losses(self):\n        \"\"\"\n        Return the losses from a set of RPN predictions and their associated ground-truth.\n\n        Returns:\n            dict[loss name -> loss value]: A dict mapping from loss name to loss value.\n                Loss names are: `loss_rpn_cls` for objectness classification and\n                `loss_rpn_loc` for proposal localization.\n        \"\"\"\n\n        def resample(label):\n            \"\"\"\n            Randomly sample a subset of positive and negative examples by overwriting\n            the label vector to the ignore value (-1) for all elements that are not\n            included in the sample.\n            \"\"\"\n            pos_idx, neg_idx = subsample_labels(\n                label, self.batch_size_per_image, self.positive_fraction, 0\n            )\n            # Fill with the ignore label (-1), then set positive and negative labels\n            label.fill_(-1)\n            label.scatter_(0, pos_idx, 1)\n            label.scatter_(0, neg_idx, 0)\n            return label\n\n        gt_objectness_logits, gt_anchor_deltas = self._get_ground_truth()\n        \"\"\"\n        gt_objectness_logits: list of N tensors. Tensor i is a vector whose length is the\n            total number of anchors in image i (i.e., len(anchors[i]))\n        gt_anchor_deltas: list of N tensors. Tensor i has shape (len(anchors[i]), B),\n            where B is the box dimension\n        \"\"\"\n        # Collect all objectness labels and delta targets over feature maps and images\n        # The final ordering is L, N, H, W, A from slowest to fastest axis.\n        num_anchors_per_map = [np.prod(x.shape[1:]) for x in self.pred_objectness_logits]\n        num_anchors_per_image = sum(num_anchors_per_map)\n\n        # Stack to: (N, num_anchors_per_image)\n        gt_objectness_logits = torch.stack(\n            [resample(label) for label in gt_objectness_logits], dim=0\n        )\n\n        # Log the number of positive/negative anchors per-image that's used in training\n        num_pos_anchors = (gt_objectness_logits == 1).sum().item()\n        num_neg_anchors = (gt_objectness_logits == 0).sum().item()\n        storage = get_event_storage()\n        storage.put_scalar(\"rpn/num_pos_anchors\", num_pos_anchors / self.num_images)\n        storage.put_scalar(\"rpn/num_neg_anchors\", num_neg_anchors / self.num_images)\n\n        assert gt_objectness_logits.shape[1] == num_anchors_per_image\n        # Split to tuple of L tensors, each with shape (N, num_anchors_per_map)\n        gt_objectness_logits = torch.split(gt_objectness_logits, num_anchors_per_map, dim=1)\n        # Concat from all feature maps\n        gt_objectness_logits = cat([x.flatten() for x in gt_objectness_logits], dim=0)\n\n        # Stack to: (N, num_anchors_per_image, B)\n        gt_anchor_deltas = torch.stack(gt_anchor_deltas, dim=0)\n        assert gt_anchor_deltas.shape[1] == num_anchors_per_image\n        B = gt_anchor_deltas.shape[2]  # box dimension (4 or 5)\n\n        # Split to tuple of L tensors, each with shape (N, num_anchors_per_image)\n        gt_anchor_deltas = torch.split(gt_anchor_deltas, num_anchors_per_map, dim=1)\n        # Concat from all feature maps\n        gt_anchor_deltas = cat([x.reshape(-1, B) for x in gt_anchor_deltas], dim=0)\n\n        # Collect all objectness logits and delta predictions over feature maps\n        # and images to arrive at the same shape as the labels and targets\n        # The final ordering is L, N, H, W, A from slowest to fastest axis.\n        pred_objectness_logits = cat(\n            [\n                # Reshape: (N, A, Hi, Wi) -> (N, Hi, Wi, A) -> (N*Hi*Wi*A, )\n                x.permute(0, 2, 3, 1).flatten()\n                for x in self.pred_objectness_logits\n            ],\n            dim=0,\n        )\n        pred_anchor_deltas = cat(\n            [\n                # Reshape: (N, A*B, Hi, Wi) -> (N, A, B, Hi, Wi) -> (N, Hi, Wi, A, B)\n                #          -> (N*Hi*Wi*A, B)\n                x.view(x.shape[0], -1, B, x.shape[-2], x.shape[-1])\n                .permute(0, 3, 4, 1, 2)\n                .reshape(-1, B)\n                for x in self.pred_anchor_deltas\n            ],\n            dim=0,\n        )\n\n        objectness_loss, localization_loss = rpn_losses(\n            gt_objectness_logits,\n            gt_anchor_deltas,\n            pred_objectness_logits,\n            pred_anchor_deltas,\n            self.smooth_l1_beta,\n        )\n        normalizer = 1.0 / (self.batch_size_per_image * self.num_images)\n        loss_cls = objectness_loss * normalizer  # cls: classification loss\n        loss_loc = localization_loss * normalizer  # loc: localization loss\n        losses = {\"loss_rpn_cls\": loss_cls, \"loss_rpn_loc\": loss_loc}\n\n        return losses\n\n    def predict_proposals(self):\n        \"\"\"\n        Transform anchors into proposals by applying the predicted anchor deltas.\n        \n        anchors (list[list[Boxes]]): A list of N elements. Each element is a list of L\n                Boxes. The Boxes at (n, l) stores the entire anchor array for feature map l in image\n                n (i.e. the cell anchors repeated over all locations in feature map (n, l)).\n\n        Returns:\n            proposals (list[Tensor]): A list of L tensors. Tensor i has shape\n                (N, Hi*Wi*A, B), where B is box dimension (4 or 5).\n        \"\"\"\n        proposals = []\n        # Transpose anchors from images-by-feature-maps (N, L) to feature-maps-by-images (L, N)\n        anchors = list(zip(*self.anchors))\n        # For each feature map\n        for anchors_i, pred_anchor_deltas_i in zip(anchors, self.pred_anchor_deltas):\n            B = anchors_i[0].tensor.size(1)\n            N, _, Hi, Wi = pred_anchor_deltas_i.shape\n            # Reshape: (N, A*B, Hi, Wi) -> (N, A, B, Hi, Wi) -> (N, Hi, Wi, A, B) -> (N*Hi*Wi*A, B)\n            pred_anchor_deltas_i = (\n                pred_anchor_deltas_i.view(N, -1, B, Hi, Wi).permute(0, 3, 4, 1, 2).reshape(-1, B)\n            )\n            # Concatenate all anchors to shape (N*Hi*Wi*A, B)\n            # type(anchors_i[0]) is Boxes (B = 4) or RotatedBoxes (B = 5)\n            anchors_i = type(anchors_i[0]).cat(anchors_i)\n            proposals_i = self.box2box_transform.apply_deltas(\n                pred_anchor_deltas_i, anchors_i.tensor\n            )\n            # Append feature map proposals with shape (N, Hi*Wi*A, B)\n            proposals.append(proposals_i.view(N, -1, B))\n        return proposals\n\n    def predict_objectness_logits(self):\n        \"\"\"\n        Return objectness logits in the same format as the proposals returned by\n        :meth:`predict_proposals`.\n\n        Returns:\n            pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape\n                (N, Hi*Wi*A).\n        \"\"\"\n        pred_objectness_logits = [\n            # Reshape: (N, A, Hi, Wi) -> (N, Hi, Wi, A) -> (N, Hi*Wi*A)\n            score.permute(0, 2, 3, 1).reshape(self.num_images, -1)\n            for score in self.pred_objectness_logits\n        ]\n        return pred_objectness_logits\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/rrpn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nfrom typing import Dict\nimport torch\n\nfrom detectron2.layers import ShapeSpec\n\nfrom ..box_regression import Box2BoxTransformRotated\nfrom .build import PROPOSAL_GENERATOR_REGISTRY\nfrom .rpn import RPN\nfrom .rrpn_outputs import RRPNOutputs, find_top_rrpn_proposals\n\nlogger = logging.getLogger(__name__)\n\n\n@PROPOSAL_GENERATOR_REGISTRY.register()\nclass RRPN(RPN):\n    \"\"\"\n    Rotated RPN subnetwork.\n    Please refer to https://arxiv.org/pdf/1703.01086.pdf for the original RRPN paper:\n    Ma, J., Shao, W., Ye, H., Wang, L., Wang, H., Zheng, Y., & Xue, X. (2018).\n    Arbitrary-oriented scene text detection via rotation proposals.\n    IEEE Transactions on Multimedia, 20(11), 3111-3122.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n        super().__init__(cfg, input_shape)\n        self.box2box_transform = Box2BoxTransformRotated(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS)\n\n    def forward(self, images, features, gt_instances=None):\n        \"\"\"\n        Args:\n            images (ImageList): input images of length `N`\n            features (dict[str: Tensor]): input data as a mapping from feature\n                map name to tensor. Axis 0 represents the number of images `N` in\n                the input data; axes 1-3 are channels, height, and width, which may\n                vary between feature maps (e.g., if a feature pyramid is used).\n            gt_instances (list[Instances], optional): a length `N` list of `Instances`s.\n                Each `Instances` stores ground-truth instances for the corresponding image.\n\n        Returns:\n            proposals: list[Instances] or None\n            loss: dict[Tensor]\n        \"\"\"\n        gt_boxes = [x.gt_boxes for x in gt_instances] if gt_instances is not None else None\n        del gt_instances\n        features = [features[f] for f in self.in_features]\n        pred_objectness_logits, pred_anchor_deltas = self.rpn_head(features)\n        anchors = self.anchor_generator(features)\n\n        outputs = RRPNOutputs(\n            self.box2box_transform,\n            self.anchor_matcher,\n            self.batch_size_per_image,\n            self.positive_fraction,\n            images,\n            pred_objectness_logits,\n            pred_anchor_deltas,\n            anchors,\n            self.boundary_threshold,\n            gt_boxes,\n            self.smooth_l1_beta,\n        )\n\n        if self.training:\n            losses = outputs.losses()\n        else:\n            losses = {}\n\n        with torch.no_grad():\n            # Find the top proposals by applying NMS and removing boxes that\n            # are too small. The proposals are treated as fixed for approximate\n            # joint training with roi heads. This approach ignores the derivative\n            # w.r.t. the proposal boxes’ coordinates that are also network\n            # responses, so is approximate.\n            proposals = find_top_rrpn_proposals(\n                outputs.predict_proposals(),\n                outputs.predict_objectness_logits(),\n                images,\n                self.nms_thresh,\n                self.pre_nms_topk[self.training],\n                self.post_nms_topk[self.training],\n                self.min_box_side_len,\n                self.training,\n            )\n\n        return proposals, losses\n"
  },
  {
    "path": "detectron2/modeling/proposal_generator/rrpn_outputs.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport itertools\nimport logging\nimport torch\n\nfrom detectron2.layers import batched_nms_rotated, cat\nfrom detectron2.structures import Instances, RotatedBoxes, pairwise_iou_rotated\n\nfrom .rpn_outputs import RPNOutputs\n\nlogger = logging.getLogger(__name__)\n\n\"\"\"\nShape shorthand in this module:\n\n    N: number of images in the minibatch\n    L: number of feature maps per image on which RRPN is run\n    A: number of cell anchors (must be the same for all feature maps)\n    Hi, Wi: height and width of the i-th feature map\n    5: size of the box parameterization\n\nNaming convention:\n\n    objectness: refers to the binary classification of an anchor as object vs. not\n    object.\n\n    deltas: refers to the 5-d (dx, dy, dw, dh, da) deltas that parameterize the rotated box2box\n    transform (see :class:`box_regression.Box2BoxTransformRotated`).\n\n    pred_objectness_logits: predicted objectness scores in [-inf, +inf]; use\n        sigmoid(pred_objectness_logits) to estimate P(object).\n\n    gt_objectness_logits: ground-truth binary classification labels for objectness\n\n    pred_anchor_deltas: predicted rotated box2box transform deltas\n\n    gt_anchor_deltas: ground-truth rotated box2box transform deltas\n\"\"\"\n\n\ndef find_top_rrpn_proposals(\n    proposals,\n    pred_objectness_logits,\n    images,\n    nms_thresh,\n    pre_nms_topk,\n    post_nms_topk,\n    min_box_side_len,\n    training,\n):\n    \"\"\"\n    For each feature map, select the `pre_nms_topk` highest scoring proposals,\n    apply NMS, clip proposals, and remove small boxes. Return the `post_nms_topk`\n    highest scoring proposals among all the feature maps if `training` is True,\n    otherwise, returns the highest `post_nms_topk` scoring proposals for each\n    feature map.\n\n    Args:\n        proposals (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A, 5).\n            All proposal predictions on the feature maps.\n        pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A).\n        images (ImageList): Input images as an :class:`ImageList`.\n        nms_thresh (float): IoU threshold to use for NMS\n        pre_nms_topk (int): number of top k scoring proposals to keep before applying NMS.\n            When RRPN is run on multiple feature maps (as in FPN) this number is per\n            feature map.\n        post_nms_topk (int): number of top k scoring proposals to keep after applying NMS.\n            When RRPN is run on multiple feature maps (as in FPN) this number is total,\n            over all feature maps.\n        min_box_side_len (float): minimum proposal box side length in pixels (absolute units\n            wrt input images).\n        training (bool): True if proposals are to be used in training, otherwise False.\n            This arg exists only to support a legacy bug; look for the \"NB: Legacy bug ...\"\n            comment.\n\n    Returns:\n        proposals (list[Instances]): list of N Instances. The i-th Instances\n            stores post_nms_topk object proposals for image i.\n    \"\"\"\n    image_sizes = images.image_sizes  # in (h, w) order\n    num_images = len(image_sizes)\n    device = proposals[0].device\n\n    # 1. Select top-k anchor for every level and every image\n    topk_scores = []  # #lvl Tensor, each of shape N x topk\n    topk_proposals = []\n    level_ids = []  # #lvl Tensor, each of shape (topk,)\n    batch_idx = torch.arange(num_images, device=device)\n    for level_id, proposals_i, logits_i in zip(\n        itertools.count(), proposals, pred_objectness_logits\n    ):\n        Hi_Wi_A = logits_i.shape[1]\n        num_proposals_i = min(pre_nms_topk, Hi_Wi_A)\n\n        # sort is faster than topk (https://github.com/pytorch/pytorch/issues/22812)\n        # topk_scores_i, topk_idx = logits_i.topk(num_proposals_i, dim=1)\n        logits_i, idx = logits_i.sort(descending=True, dim=1)\n        topk_scores_i = logits_i[batch_idx, :num_proposals_i]\n        topk_idx = idx[batch_idx, :num_proposals_i]\n\n        # each is N x topk\n        topk_proposals_i = proposals_i[batch_idx[:, None], topk_idx]  # N x topk x 5\n\n        topk_proposals.append(topk_proposals_i)\n        topk_scores.append(topk_scores_i)\n        level_ids.append(torch.full((num_proposals_i,), level_id, dtype=torch.int64, device=device))\n\n    # 2. Concat all levels together\n    topk_scores = cat(topk_scores, dim=1)\n    topk_proposals = cat(topk_proposals, dim=1)\n    level_ids = cat(level_ids, dim=0)\n\n    # 3. For each image, run a per-level NMS, and choose topk results.\n    results = []\n    for n, image_size in enumerate(image_sizes):\n        boxes = RotatedBoxes(topk_proposals[n])\n        scores_per_img = topk_scores[n]\n        boxes.clip(image_size)\n\n        # filter empty boxes\n        keep = boxes.nonempty(threshold=min_box_side_len)\n        lvl = level_ids\n        if keep.sum().item() != len(boxes):\n            boxes, scores_per_img, lvl = (boxes[keep], scores_per_img[keep], level_ids[keep])\n\n        keep = batched_nms_rotated(boxes.tensor, scores_per_img, lvl, nms_thresh)\n        # In Detectron1, there was different behavior during training vs. testing.\n        # (https://github.com/facebookresearch/Detectron/issues/459)\n        # During training, topk is over the proposals from *all* images in the training batch.\n        # During testing, it is over the proposals for each image separately.\n        # As a result, the training behavior becomes batch-dependent,\n        # and the configuration \"POST_NMS_TOPK_TRAIN\" end up relying on the batch size.\n        # This bug is addressed in Detectron2 to make the behavior independent of batch size.\n        keep = keep[:post_nms_topk]\n\n        res = Instances(image_size)\n        res.proposal_boxes = boxes[keep]\n        res.objectness_logits = scores_per_img[keep]\n        results.append(res)\n    return results\n\n\nclass RRPNOutputs(RPNOutputs):\n    def __init__(\n        self,\n        box2box_transform,\n        anchor_matcher,\n        batch_size_per_image,\n        positive_fraction,\n        images,\n        pred_objectness_logits,\n        pred_anchor_deltas,\n        anchors,\n        boundary_threshold=0,\n        gt_boxes=None,\n        smooth_l1_beta=0.0,\n    ):\n        \"\"\"\n        Args:\n            box2box_transform (Box2BoxTransformRotated): :class:`Box2BoxTransformRotated`\n                instance for anchor-proposal transformations.\n            anchor_matcher (Matcher): :class:`Matcher` instance for matching anchors to\n                ground-truth boxes; used to determine training labels.\n            batch_size_per_image (int): number of proposals to sample when training\n            positive_fraction (float): target fraction of sampled proposals that should be positive\n            images (ImageList): :class:`ImageList` instance representing N input images\n            pred_objectness_logits (list[Tensor]): A list of L elements.\n                Element i is a tensor of shape (N, A, Hi, Wi) representing\n                the predicted objectness logits for anchors.\n            pred_anchor_deltas (list[Tensor]): A list of L elements. Element i is a tensor of shape\n                (N, A*5, Hi, Wi) representing the predicted \"deltas\" used to transform anchors\n                to proposals.\n            anchors (list[list[RotatedBoxes]]): A list of N elements. Each element is a list of L\n                RotatedBoxes. The RotatedBoxes at (n, l) stores the entire anchor array for\n                feature map l in image n (i.e. the cell anchors repeated over all locations in\n                feature map (n, l)).\n            boundary_threshold (int): if >= 0, then anchors that extend beyond the image\n                boundary by more than boundary_thresh are not used in training. Set to a very large\n                number or < 0 to disable this behavior. Only needed in training.\n            gt_boxes (list[RotatedBoxes], optional): A list of N elements. Element i a RotatedBoxes\n                storing the ground-truth (\"gt\") rotated boxes for image i.\n            smooth_l1_beta (float): The transition point between L1 and L2 loss in\n                the smooth L1 loss function. When set to 0, the loss becomes L1. When\n                set to +inf, the loss becomes constant 0.\n        \"\"\"\n        super(RRPNOutputs, self).__init__(\n            box2box_transform,\n            anchor_matcher,\n            batch_size_per_image,\n            positive_fraction,\n            images,\n            pred_objectness_logits,\n            pred_anchor_deltas,\n            anchors,\n            boundary_threshold,\n            gt_boxes,\n            smooth_l1_beta,\n        )\n\n    def _get_ground_truth(self):\n        \"\"\"\n        Returns:\n            gt_objectness_logits: list of N tensors. Tensor i is a vector whose length is the\n                total number of anchors in image i (i.e., len(anchors[i])). Label values are\n                in {-1, 0, 1}, with meanings: -1 = ignore; 0 = negative class; 1 = positive class.\n            gt_anchor_deltas: list of N tensors. Tensor i has shape (len(anchors[i]), 5).\n        \"\"\"\n        gt_objectness_logits = []\n        gt_anchor_deltas = []\n        # Concatenate anchors from all feature maps into a single RotatedBoxes per image\n        anchors = [RotatedBoxes.cat(anchors_i) for anchors_i in self.anchors]\n        for image_size_i, anchors_i, gt_boxes_i in zip(self.image_sizes, anchors, self.gt_boxes):\n            \"\"\"\n            image_size_i: (h, w) for the i-th image\n            anchors_i: anchors for i-th image\n            gt_boxes_i: ground-truth boxes for i-th image\n            \"\"\"\n            match_quality_matrix = pairwise_iou_rotated(gt_boxes_i, anchors_i)\n            matched_idxs, gt_objectness_logits_i = self.anchor_matcher(match_quality_matrix)\n\n            if self.boundary_threshold >= 0:\n                # Discard anchors that go out of the boundaries of the image\n                # NOTE: This is legacy functionality that is turned off by default in Detectron2\n                anchors_inside_image = anchors_i.inside_box(image_size_i, self.boundary_threshold)\n                gt_objectness_logits_i[~anchors_inside_image] = -1\n\n            if len(gt_boxes_i) == 0:\n                # These values won't be used anyway since the anchor is labeled as background\n                gt_anchor_deltas_i = torch.zeros_like(anchors_i.tensor)\n            else:\n                # TODO wasted computation for ignored boxes\n                matched_gt_boxes = gt_boxes_i[matched_idxs]\n                gt_anchor_deltas_i = self.box2box_transform.get_deltas(\n                    anchors_i.tensor, matched_gt_boxes.tensor\n                )\n\n            gt_objectness_logits.append(gt_objectness_logits_i)\n            gt_anchor_deltas.append(gt_anchor_deltas_i)\n\n        return gt_objectness_logits, gt_anchor_deltas\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/LISA_rcnn.py",
    "content": "#  Copyright (c) Tianyu Wang. All Rights Reserved.\nimport torch\nfrom torch import nn\nfrom torch.autograd.function import Function\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.structures import Boxes, Instances, RotatedBoxes, pairwise_iou, pairwise_iou_rotated\nfrom detectron2.utils.events import get_event_storage\nfrom detectron2.utils.registry import Registry\n\nfrom detectron2.modeling.matcher import Matcher\nfrom detectron2.modeling.poolers import ROIPooler\nfrom detectron2.modeling import ROI_HEADS_REGISTRY, StandardROIHeads\nfrom detectron2.modeling.roi_heads.roi_heads import Res5ROIHeads\nfrom detectron2.modeling.roi_heads.box_head import build_box_head\nfrom detectron2.modeling.box_regression import Box2BoxTransform\nfrom detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs, fast_rcnn_inference,LightdirectionOutputLayer\nfrom detectron2.modeling.proposal_generator.proposal_utils import add_ground_truth_to_proposals\nfrom detectron2.modeling.sampling import subsample_labels\nimport numpy as np\n\n# class\n@ROI_HEADS_REGISTRY.register()\nclass RelationROIHeads(StandardROIHeads):\n    pass\n    \"\"\"\n    This class is used by Relation RPN.\n    \"\"\"\n\n    # def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n    #     super(RelationROIHeads, self).__init__(cfg,input_shape)\n    #     pass\n        \n\n    def _init_box_head(self,cfg):\n        \n        pooler_resolution        = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION\n        pooler_scales            = tuple(1.0 / self.feature_strides[k] for k in self.in_features)\n        sampling_ratio           = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type              = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE\n\n        in_channels = [self.feature_channels[f] for f in self.in_features]\n        # Check all channel counts are equal\n        assert len(set(in_channels)) == 1, in_channels\n        in_channels = in_channels[0]\n\n        self.box_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n\n        pooled_shape = ShapeSpec(\n            channels=in_channels, width=pooler_resolution, height=pooler_resolution\n        )\n        self.relation_box_head = build_box_head(cfg,pooled_shape)\n        self.relation_box_predictor = FastRCNNOutputLayers(\n                    self.relation_box_head.output_size, self.num_classes -1 , cls_agnostic_bbox_reg = False\n                )\n        self.box_head = build_box_head(cfg,pooled_shape)\n\n        self.box_predictor = FastRCNNOutputLayers(\n                    self.box_head.output_size, self.num_classes, cls_agnostic_bbox_reg = False\n                )\n        self.light_direction_head = build_box_head(cfg,pooled_shape)\n\n        self.light_direction_predictor = LightdirectionOutputLayer(\n            self.light_direction_head.output_size)\n\n    def forward(self,images, features, relation_proposals, proposals, relation_targets=None, targets=None):\n        del images\n        if self.training:\n            relation_proposals = self.label_and_sample_proposals(relation_proposals, relation_targets, True)\n            proposals = self.label_and_sample_proposals(proposals,targets)\n        del targets\n        del relation_targets\n\n        features_list = [features[f] for f in self.in_features]\n        if self.training:\n            losses = self._forward_relation_box(features_list,relation_proposals)\n            losses.update(self._forward_box(features_list, proposals))\n            # During training the proposals used by the box head are\n            # used by the mask, keypoint (and densepose) heads.\n            losses.update(self._forward_mask(features_list, proposals))\n            losses.update(self._forward_keypoint(features_list, relation_proposals))\n            return proposals, losses\n        else:\n            \n            pred_instances = self._forward_box(features_list, proposals)\n            pred_instances = self.forward_with_given_boxes(features, pred_instances)\n            pred_relations = self._forward_relation_box(features_list,proposals)\n            # During inference cascaded prediction is used: the mask and keypoints heads are only\n            # applied to the top scoring box detections.\n            \n            # pred_relations = self.forward_with_given_relation_boxes(features,pred_relations)\n            return pred_instances, pred_relations ,{}\n    \n    \n    \n    # def forward_with_given_relation_boxes(self,feature, instances):\n\n    #     assert not self.training\n    #     assert instances[0].has(\"pred_boxes\") and instances[0].has(\"pred_classes\")\n    #     features = [feature[f] for f in self.in_features]\n\n    #     # instances = self._forward_mask(features, instances)\n    #     instances = self._forward_keypoint(features, instances)\n    #     return instances\n\n\n\n    def _forward_relation_box(self, features, relation_proposals):\n        box_features = self.box_pooler(features, [x.proposal_boxes for x in relation_proposals])\n        # light_features = self.box_pooler(s)\n        light_features = self.light_direction_head(box_features)\n        box_features = self.relation_box_head(box_features)\n        pred_light_direction = self.light_direction_predictor(light_features)\n        pred_class_logits, pred_proposal_deltas = self.relation_box_predictor(box_features)\n        del box_features, light_features\n\n        outputs = FastRCNNOutputs(\n            self.box2box_transform,\n            pred_class_logits,\n            pred_proposal_deltas,\n            relation_proposals,\n            self.smooth_l1_beta,\n            pred_light_direction\n        )\n        if self.training:\n            return {k+'_rela': v for k, v in outputs.losses().items()}\n        else:\n            pred_instances, _ = outputs.inference(\n                self.test_score_thresh, self.test_nms_thresh, self.test_detections_per_img\n            )\n            # print(pred_instances)\n            return pred_instances\n        \n    @torch.no_grad()\n    def label_and_sample_proposals(self, proposals, targets, isrelation = False):\n        \"\"\"\n        Prepare some proposals to be used to train the ROI heads.\n        It performs box matching between `proposals` and `targets`, and assigns\n        training labels to the proposals.\n        It returns `self.batch_size_per_image` random samples from proposals and groundtruth boxes,\n        with a fraction of positives that is no larger than `self.positive_sample_fraction.\n\n        Args:\n            See :meth:`ROIHeads.forward`\n\n        Returns:\n            list[Instances]:\n                length `N` list of `Instances`s containing the proposals\n                sampled for training. Each `Instances` has the following fields:\n                - proposal_boxes: the proposal boxes\n                - gt_boxes: the ground-truth box that the proposal is assigned to\n                  (this is only meaningful if the proposal has a label > 0; if label = 0\n                   then the ground-truth box is random)\n                Other fields such as \"gt_classes\", \"gt_masks\", that's included in `targets`.\n        \"\"\"\n        gt_boxes = [x.gt_boxes for x in targets]\n        if targets[0].has('gt_light'):\n            gt_light = [x.gt_light for x in targets]\n        # Augment proposals with ground-truth boxes.\n        # In the case of learned proposals (e.g., RPN), when training starts\n        # the proposals will be low quality due to random initialization.\n        # It's possible that none of these initial\n        # proposals have high enough overlap with the gt objects to be used\n        # as positive examples for the second stage components (box head,\n        # cls head, mask head). Adding the gt boxes to the set of proposals\n        # ensures that the second stage components will have some positive\n        # examples from the start of training. For RPN, this augmentation improves\n        # convergence and empirically improves box AP on COCO by about 0.5\n        # points (under one tested configuration).\n            if self.proposal_append_gt:\n                proposals = add_ground_truth_to_proposals(gt_boxes, proposals,gt_light)\n        else:\n            gt_light = None\n            if self.proposal_append_gt:\n                proposals = add_ground_truth_to_proposals(gt_boxes,proposals)\n\n\n        proposals_with_gt = []\n\n        num_fg_samples = []\n        num_bg_samples = []\n        for proposals_per_image, targets_per_image in zip(proposals, targets):\n            has_gt = len(targets_per_image) > 0\n            match_quality_matrix = pairwise_iou(\n                targets_per_image.gt_boxes, proposals_per_image.proposal_boxes\n            )\n            matched_idxs, proposals_labels = self.proposal_matcher(match_quality_matrix)\n            if isrelation:\n                num_classes = self.num_classes - 1\n            else:\n                num_classes = self.num_classes\n            # Get the corresponding GT for each proposal\n            if has_gt:\n                gt_classes = targets_per_image.gt_classes[matched_idxs]\n                # print(gt_classes)\n                # Label unmatched proposals (0 label from matcher) as background (label=num_classes)\n                \n                gt_classes[proposals_labels == 0] = num_classes\n                # Label ignore proposals (-1 label)\n                gt_classes[proposals_labels == -1] = -1\n            else:\n                gt_classes = torch.zeros_like(matched_idxs) + num_classes\n\n            sampled_fg_inds, sampled_bg_inds = subsample_labels(\n                gt_classes,\n                self.batch_size_per_image,\n                self.positive_sample_fraction,\n                num_classes,\n            )\n\n            sampled_inds = torch.cat([sampled_fg_inds, sampled_bg_inds], dim=0)\n\n            proposals_per_image = proposals_per_image[sampled_inds]\n            proposals_per_image.gt_classes = gt_classes[sampled_inds]\n\n            # We index all the attributes of targets that start with \"gt_\"\n            # and have not been added to proposals yet (=\"gt_classes\").\n            if has_gt:\n                sampled_targets = matched_idxs[sampled_inds]\n                # NOTE: here the indexing waste some compute, because heads\n                # like masks, keypoints, etc, will filter the proposals again,\n                # (by foreground/background, or number of keypoints in the image, etc)\n                # so we essentially index the data twice.\n                for (trg_name, trg_value) in targets_per_image.get_fields().items():\n                    if trg_name.startswith(\"gt_\") and not proposals_per_image.has(trg_name):\n                        proposals_per_image.set(trg_name, trg_value[sampled_targets])\n            else:\n                gt_boxes = Boxes(\n                    targets_per_image.gt_boxes.tensor.new_zeros((len(sampled_inds), 4))\n                )\n                proposals_per_image.gt_boxes = gt_boxes\n                if gt_light != None:\n                    gt_light = Boxes(\n                        targets_per_image.gt_light.tensor.new_zeros((len(sampled_inds), 4))\n                    )\n                    proposal_per_image.gt_light = gt_light\n\n            num_fg_samples.append(sampled_fg_inds.numel())\n            num_bg_samples.append(sampled_bg_inds.numel())\n            proposals_with_gt.append(proposals_per_image)\n\n        # Log the number of fg/bg samples that are selected for training ROI heads\n        storage = get_event_storage()\n        storage.put_scalar(\"roi_head/num_fg_samples\", np.mean(num_fg_samples))\n        storage.put_scalar(\"roi_head/num_bg_samples\", np.mean(num_bg_samples))\n\n        return proposals_with_gt"
  },
  {
    "path": "detectron2/modeling/roi_heads/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .box_head import ROI_BOX_HEAD_REGISTRY, build_box_head\nfrom .keypoint_head import ROI_KEYPOINT_HEAD_REGISTRY, build_keypoint_head\nfrom .mask_head import ROI_MASK_HEAD_REGISTRY, build_mask_head\nfrom .roi_heads import ROI_HEADS_REGISTRY, ROIHeads, StandardROIHeads, build_roi_heads\n\nfrom . import cascade_rcnn  # isort:skip\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/box_head.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport numpy as np\nimport fvcore.nn.weight_init as weight_init\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom detectron2.layers import Conv2d, ShapeSpec, get_norm\nfrom detectron2.utils.registry import Registry\n\nROI_BOX_HEAD_REGISTRY = Registry(\"ROI_BOX_HEAD\")\nROI_BOX_HEAD_REGISTRY.__doc__ = \"\"\"\nRegistry for box heads, which make box predictions from per-region features.\n\nThe registered object will be called with `obj(cfg, input_shape)`.\n\"\"\"\n\n\n@ROI_BOX_HEAD_REGISTRY.register()\nclass FastRCNNConvFCHead(nn.Module):\n    \"\"\"\n    A head with several 3x3 conv layers (each followed by norm & relu) and\n    several fc layers (each followed by relu).\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: ShapeSpec):\n        \"\"\"\n        The following attributes are parsed from config:\n            num_conv, num_fc: the number of conv/fc layers\n            conv_dim/fc_dim: the dimension of the conv/fc layers\n            norm: normalization for the conv layers\n        \"\"\"\n        super().__init__()\n\n        # fmt: off\n        num_conv   = cfg.MODEL.ROI_BOX_HEAD.NUM_CONV\n        conv_dim   = cfg.MODEL.ROI_BOX_HEAD.CONV_DIM\n        num_fc     = cfg.MODEL.ROI_BOX_HEAD.NUM_FC\n        fc_dim     = cfg.MODEL.ROI_BOX_HEAD.FC_DIM\n        norm       = cfg.MODEL.ROI_BOX_HEAD.NORM\n        # fmt: on\n        assert num_conv + num_fc > 0\n\n        self._output_size = (input_shape.channels, input_shape.height, input_shape.width)\n\n        self.conv_norm_relus = []\n        for k in range(num_conv):\n            conv = Conv2d(\n                self._output_size[0],\n                conv_dim,\n                kernel_size=3,\n                padding=1,\n                bias=not norm,\n                norm=get_norm(norm, conv_dim),\n                activation=F.relu,\n            )\n            self.add_module(\"conv{}\".format(k + 1), conv)\n            self.conv_norm_relus.append(conv)\n            self._output_size = (conv_dim, self._output_size[1], self._output_size[2])\n\n        self.fcs = []\n        for k in range(num_fc):\n            fc = nn.Linear(np.prod(self._output_size), fc_dim)\n            self.add_module(\"fc{}\".format(k + 1), fc)\n            self.fcs.append(fc)\n            self._output_size = fc_dim\n\n        for layer in self.conv_norm_relus:\n            weight_init.c2_msra_fill(layer)\n        for layer in self.fcs:\n            weight_init.c2_xavier_fill(layer)\n\n    def forward(self, x):\n        for layer in self.conv_norm_relus:\n            x = layer(x)\n        if len(self.fcs):\n            if x.dim() > 2:\n                x = torch.flatten(x, start_dim=1)\n            for layer in self.fcs:\n                x = F.relu(layer(x))\n        return x\n\n    @property\n    def output_size(self):\n        return self._output_size\n\n\ndef build_box_head(cfg, input_shape):\n    \"\"\"\n    Build a box head defined by `cfg.MODEL.ROI_BOX_HEAD.NAME`.\n    \"\"\"\n    name = cfg.MODEL.ROI_BOX_HEAD.NAME\n    return ROI_BOX_HEAD_REGISTRY.get(name)(cfg, input_shape)\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/cascade_rcnn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport torch\nfrom torch import nn\nfrom torch.autograd.function import Function\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.structures import Boxes, Instances, pairwise_iou\nfrom detectron2.utils.events import get_event_storage\n\nfrom ..box_regression import Box2BoxTransform\nfrom ..matcher import Matcher\nfrom ..poolers import ROIPooler\nfrom .box_head import build_box_head\nfrom .fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs, fast_rcnn_inference\nfrom .roi_heads import ROI_HEADS_REGISTRY, StandardROIHeads\n\n\nclass _ScaleGradient(Function):\n    @staticmethod\n    def forward(ctx, input, scale):\n        ctx.scale = scale\n        return input\n\n    @staticmethod\n    def backward(ctx, grad_output):\n        return grad_output * ctx.scale, None\n\n\n@ROI_HEADS_REGISTRY.register()\nclass CascadeROIHeads(StandardROIHeads):\n    def _init_box_head(self, cfg):\n        # fmt: off\n        pooler_resolution        = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION\n        pooler_scales            = tuple(1.0 / self.feature_strides[k] for k in self.in_features)\n        sampling_ratio           = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type              = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE\n        cascade_bbox_reg_weights = cfg.MODEL.ROI_BOX_CASCADE_HEAD.BBOX_REG_WEIGHTS\n        cascade_ious             = cfg.MODEL.ROI_BOX_CASCADE_HEAD.IOUS\n        self.num_cascade_stages  = len(cascade_ious)\n        assert len(cascade_bbox_reg_weights) == self.num_cascade_stages\n        assert cfg.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG,  \\\n            \"CascadeROIHeads only support class-agnostic regression now!\"\n        assert cascade_ious[0] == cfg.MODEL.ROI_HEADS.IOU_THRESHOLDS[0]\n        # fmt: on\n\n        in_channels = [self.feature_channels[f] for f in self.in_features]\n        # Check all channel counts are equal\n        assert len(set(in_channels)) == 1, in_channels\n        in_channels = in_channels[0]\n\n        self.box_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n        pooled_shape = ShapeSpec(\n            channels=in_channels, width=pooler_resolution, height=pooler_resolution\n        )\n\n        self.box_head = nn.ModuleList()\n        self.box_predictor = nn.ModuleList()\n        self.box2box_transform = []\n        self.proposal_matchers = []\n        for k in range(self.num_cascade_stages):\n            box_head = build_box_head(cfg, pooled_shape)\n            self.box_head.append(box_head)\n            self.box_predictor.append(\n                FastRCNNOutputLayers(\n                    box_head.output_size, self.num_classes, cls_agnostic_bbox_reg=True\n                )\n            )\n            self.box2box_transform.append(Box2BoxTransform(weights=cascade_bbox_reg_weights[k]))\n\n            if k == 0:\n                # The first matching is done by the matcher of ROIHeads (self.proposal_matcher).\n                self.proposal_matchers.append(None)\n            else:\n                self.proposal_matchers.append(\n                    Matcher([cascade_ious[k]], [0, 1], allow_low_quality_matches=False)\n                )\n\n    def forward(self, images, features, proposals, targets=None):\n        del images\n        if self.training:\n            proposals = self.label_and_sample_proposals(proposals, targets)\n\n        features_list = [features[f] for f in self.in_features]\n\n        if self.training:\n            # Need targets to box head\n            losses = self._forward_box(features_list, proposals, targets)\n            losses.update(self._forward_mask(features_list, proposals))\n            losses.update(self._forward_keypoint(features_list, proposals))\n            return proposals, losses\n        else:\n            pred_instances = self._forward_box(features_list, proposals)\n            pred_instances = self.forward_with_given_boxes(features, pred_instances)\n            return pred_instances, {}\n\n    def _forward_box(self, features, proposals, targets=None):\n        head_outputs = []\n        image_sizes = [x.image_size for x in proposals]\n        for k in range(self.num_cascade_stages):\n            if k > 0:\n                # The output boxes of the previous stage are the input proposals of the next stage\n                proposals = self._create_proposals_from_boxes(\n                    head_outputs[-1].predict_boxes(), image_sizes\n                )\n                if self.training:\n                    proposals = self._match_and_label_boxes(proposals, k, targets)\n            head_outputs.append(self._run_stage(features, proposals, k))\n\n        if self.training:\n            losses = {}\n            storage = get_event_storage()\n            for stage, output in enumerate(head_outputs):\n                with storage.name_scope(\"stage{}\".format(stage)):\n                    stage_losses = output.losses()\n                losses.update({k + \"_stage{}\".format(stage): v for k, v in stage_losses.items()})\n            return losses\n        else:\n            # Each is a list[Tensor] of length #image. Each tensor is Ri x (K+1)\n            scores_per_stage = [h.predict_probs() for h in head_outputs]\n\n            # Average the scores across heads\n            scores = [\n                sum(list(scores_per_image)) * (1.0 / self.num_cascade_stages)\n                for scores_per_image in zip(*scores_per_stage)\n            ]\n            # Use the boxes of the last head\n            boxes = head_outputs[-1].predict_boxes()\n            pred_instances, _ = fast_rcnn_inference(\n                boxes,\n                scores,\n                image_sizes,\n                self.test_score_thresh,\n                self.test_nms_thresh,\n                self.test_detections_per_img,\n            )\n            return pred_instances\n\n    @torch.no_grad()\n    def _match_and_label_boxes(self, proposals, stage, targets):\n        \"\"\"\n        Match proposals with groundtruth using the matcher at the given stage.\n        Label the proposals as foreground or background based on the match.\n\n        Args:\n            proposals (list[Instances]): One Instances for each image, with\n                the field \"proposal_boxes\".\n            stage (int): the current stage\n            targets (list[Instances]): the ground truth instances\n\n        Returns:\n            list[Instances]: the same proposals, but with fields \"gt_classes\" and \"gt_boxes\"\n        \"\"\"\n        num_fg_samples, num_bg_samples = [], []\n        for proposals_per_image, targets_per_image in zip(proposals, targets):\n            match_quality_matrix = pairwise_iou(\n                targets_per_image.gt_boxes, proposals_per_image.proposal_boxes\n            )\n            # proposal_labels are 0 or 1\n            matched_idxs, proposal_labels = self.proposal_matchers[stage](match_quality_matrix)\n            if len(targets_per_image) > 0:\n                gt_classes = targets_per_image.gt_classes[matched_idxs]\n                # Label unmatched proposals (0 label from matcher) as background (label=num_classes)\n                gt_classes[proposal_labels == 0] = self.num_classes\n                gt_boxes = targets_per_image.gt_boxes[matched_idxs]\n            else:\n                gt_classes = torch.zeros_like(matched_idxs) + self.num_classes\n                gt_boxes = Boxes(\n                    targets_per_image.gt_boxes.tensor.new_zeros((len(proposals_per_image), 4))\n                )\n            proposals_per_image.gt_classes = gt_classes\n            proposals_per_image.gt_boxes = gt_boxes\n\n            num_fg_samples.append((proposal_labels == 1).sum().item())\n            num_bg_samples.append(proposal_labels.numel() - num_fg_samples[-1])\n\n        # Log the number of fg/bg samples in each stage\n        storage = get_event_storage()\n        storage.put_scalar(\n            \"stage{}/roi_head/num_fg_samples\".format(stage),\n            sum(num_fg_samples) / len(num_fg_samples),\n        )\n        storage.put_scalar(\n            \"stage{}/roi_head/num_bg_samples\".format(stage),\n            sum(num_bg_samples) / len(num_bg_samples),\n        )\n        return proposals\n\n    def _run_stage(self, features, proposals, stage):\n        \"\"\"\n        Args:\n            features (list[Tensor]): #lvl input features to ROIHeads\n            proposals (list[Instances]): #image Instances, with the field \"proposal_boxes\"\n            stage (int): the current stage\n\n        Returns:\n            FastRCNNOutputs: the output of this stage\n        \"\"\"\n        box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals])\n        # The original implementation averages the losses among heads,\n        # but scale up the parameter gradients of the heads.\n        # This is equivalent to adding the losses among heads,\n        # but scale down the gradients on features.\n        box_features = _ScaleGradient.apply(box_features, 1.0 / self.num_cascade_stages)\n        box_features = self.box_head[stage](box_features)\n        pred_class_logits, pred_proposal_deltas = self.box_predictor[stage](box_features)\n        del box_features\n\n        outputs = FastRCNNOutputs(\n            self.box2box_transform[stage],\n            pred_class_logits,\n            pred_proposal_deltas,\n            proposals,\n            self.smooth_l1_beta,\n        )\n        return outputs\n\n    def _create_proposals_from_boxes(self, boxes, image_sizes):\n        \"\"\"\n        Args:\n            boxes (list[Tensor]): per-image predicted boxes, each of shape Ri x 4\n            image_sizes (list[tuple]): list of image shapes in (h, w)\n\n        Returns:\n            list[Instances]: per-image proposals with the given boxes.\n        \"\"\"\n        # Just like RPN, the proposals should not have gradients\n        boxes = [Boxes(b.detach()) for b in boxes]\n        proposals = []\n        for boxes_per_image, image_size in zip(boxes, image_sizes):\n            boxes_per_image.clip(image_size)\n            if self.training:\n                # do not filter empty boxes at inference time,\n                # because the scores from each stage need to be aligned and added later\n                boxes_per_image = boxes_per_image[boxes_per_image.nonempty()]\n            prop = Instances(image_size)\n            prop.proposal_boxes = boxes_per_image\n            proposals.append(prop)\n        return proposals\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/fast_rcnn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport numpy as np\nimport torch\nfrom fvcore.nn import smooth_l1_loss\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom detectron2.layers import batched_nms, cat\nfrom detectron2.structures import Boxes, Instances\nfrom detectron2.utils.events import get_event_storage\n\nlogger = logging.getLogger(__name__)\n\n\"\"\"\nShape shorthand in this module:\n\n    N: number of images in the minibatch\n    R: number of ROIs, combined over all images, in the minibatch\n    Ri: number of ROIs in image i\n    K: number of foreground classes. E.g.,there are 80 foreground classes in COCO.\n\nNaming convention:\n\n    deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box\n    transform (see :class:`box_regression.Box2BoxTransform`).\n\n    pred_class_logits: predicted class scores in [-inf, +inf]; use\n        softmax(pred_class_logits) to estimate P(class).\n\n    gt_classes: ground-truth classification labels in [0, K], where [0, K) represent\n        foreground object classes and K represents the background class.\n\n    pred_proposal_deltas: predicted box2box transform deltas for transforming proposals\n        to detection box predictions.\n\n    gt_proposal_deltas: ground-truth box2box transform deltas\n\"\"\"\n\n\ndef fast_rcnn_losses(\n    gt_classes, gt_proposal_deltas, pred_class_logits, pred_proposal_deltas,smooth_l1_beta,gt_light_direction= None,pred_light_direction=None\n):\n    \"\"\"\n    When box dimension is 4:\n        Computes the classification and box delta losses defined in the Fast R-CNN paper.\n    When box dimension is 5:\n        Computes the same losses for Fast R-CNN with rotated boxes.\n\n    Args:\n        gt_classes (Tensor): A tensor of shape (R,) storing ground-truth classification\n            labels in [0, K], including K fg class and 1 bg class.\n        gt_proposal_deltas (Tensor):\n            Shape (R, box_dim), row i represents ground-truth box2box transform targets\n            (dx, dy, dw, dh) or (dx, dy, dw, dh, da) that map object instance i to\n            its matched ground-truth box.\n        pred_class_logits (Tensor): A tensor for shape (R, K + 1) storing predicted classification\n            logits for the K+1-way classification problem. Each row corresponds to a predicted\n            object instance.\n        pred_proposal_deltas (Tensor): shape depends on whether we are doing\n            cls-agnostic or cls-specific regression, and the box dimensions.\n            When box_dim is 4:\n            1. cls-specific: Shape (R, 4 * K), each row stores a list of class-specific\n            predicted box2box transform [dx_0, dy_0, dw_0, dh_0, ..., dx_k, dy_k, dw_k, dh_k, ...]\n            for each class k in [0, K). (No predictions for the background class.)\n            2. cls-agnostic: Shape (R, 4), the second row stores the class-agnostic (foreground)\n            predicted box2box transform.\n            When box_dim is 5:\n            1. cls-specific: Shape (R, 5 * K), each row stores a list of class-specific\n            predicted rotated box2box transform\n            [dx_0, dy_0, dw_0, dh_0, da_0, ..., dx_k, dy_k, dw_k, dh_k, da_k, ...]\n            for each class k in [0, K). (No predictions for the background class.)\n            2. cls-agnostic: Shape (R, 5), the second row stores the class-agnostic (foreground)\n            predicted rotated box2box transform.\n        smooth_l1_beta (float): The transition point between L1 and L2 loss in\n            the smooth L1 loss function. When set to 0, the loss becomes L1. When\n            set to +inf, the loss becomes constant 0.\n\n    Returns:\n        loss_cls, loss_box_reg (Tensor): Scalar loss values.\n    \"\"\"\n    box_dim = gt_proposal_deltas.size(1)\n    cls_agnostic_bbox_reg = pred_proposal_deltas.size(1) == box_dim\n    device = pred_class_logits.device\n\n    loss_cls = F.cross_entropy(pred_class_logits, gt_classes, reduction=\"mean\")\n\n    bg_class_ind = pred_class_logits.shape[1] - 1\n\n    # Box delta loss is only computed between the prediction for the gt class k\n    # (if 0 <= k < bg_class_ind) and the target; there is no loss defined on predictions\n    # for non-gt classes and background.\n    # Empty fg_inds produces a valid loss of zero as long as the size_average\n    # arg to smooth_l1_loss is False (otherwise it uses torch.mean internally\n    # and would produce a nan loss).\n    fg_inds = torch.nonzero((gt_classes >= 0) & (gt_classes < bg_class_ind)).squeeze(1)\n    if cls_agnostic_bbox_reg:\n        # pred_proposal_deltas only corresponds to foreground class for agnostic\n        gt_class_cols = torch.arange(box_dim, device=device)\n    else:\n        fg_gt_classes = gt_classes[fg_inds]\n        # pred_proposal_deltas for class k are located in columns [b * k : b * k + b],\n        # where b is the dimension of box representation (4 or 5)\n        # Note that compared to Detectron1,\n        # we do not perform bounding box regression for background classes.\n        gt_class_cols = box_dim * fg_gt_classes[:, None] + torch.arange(box_dim, device=device)\n\n    loss_box_reg = smooth_l1_loss(\n        pred_proposal_deltas[fg_inds[:, None], gt_class_cols],\n        gt_proposal_deltas[fg_inds],\n        smooth_l1_beta,\n        reduction=\"sum\",\n    )\n\n    if type(pred_light_direction) == type(pred_proposal_deltas):\n        \n        pred_light_direction_ = pred_light_direction[fg_inds[:,None],gt_class_cols]\n        gt_light_direction_ = gt_light_direction[fg_inds]\n\n        # gt_gradient = \n        x2 = pred_light_direction_[:,0]\n        y2 = pred_light_direction_[:,1]\n        x1 = pred_light_direction_[:,2]\n        y1 = pred_light_direction_[:,3]\n\n        pred_angle = torch.atan2(y2-y1,x2-x1)\n        gt_angle = torch.atan2(gt_light_direction_[:,3]-gt_light_direction_[:,1],gt_light_direction_[:,2]-gt_light_direction_[:,0])\n\n        loss_light_reg = smooth_l1_loss(\n            pred_angle,\n            gt_angle,\n            smooth_l1_beta,\n            reduction='sum',\n\n        )\n        loss_light_reg =loss_light_reg/gt_classes.numel()\n    else: \n        loss_light_reg = None\n    # The loss is normalized using the total number of regions (R), not the number\n    # of foreground regions even though the box regression loss is only defined on\n    # foreground regions. Why? Because doing so gives equal training influence to\n    # each foreground example. To see how, consider two different minibatches:\n    #  (1) Contains a single foreground region\n    #  (2) Contains 100 foreground regions\n    # If we normalize by the number of foreground regions, the single example in\n    # minibatch (1) will be given 100 times as much influence as each foreground\n    # example in minibatch (2). Normalizing by the total number of regions, R,\n    # means that the single example in minibatch (1) and each of the 100 examples\n    # in minibatch (2) are given equal influence.\n    loss_box_reg = loss_box_reg / gt_classes.numel()\n\n    return loss_cls, loss_box_reg,loss_light_reg\n\n\ndef fast_rcnn_inference(boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image,light= None):\n    \"\"\"\n    Call `fast_rcnn_inference_single_image` for all images.\n\n    Args:\n        boxes (list[Tensor]): A list of Tensors of predicted class-specific or class-agnostic\n            boxes for each image. Element i has shape (Ri, K * 4) if doing\n            class-specific regression, or (Ri, 4) if doing class-agnostic\n            regression, where Ri is the number of predicted objects for image i.\n            This is compatible with the output of :meth:`FastRCNNOutputs.predict_boxes`.\n        scores (list[Tensor]): A list of Tensors of predicted class scores for each image.\n            Element i has shape (Ri, K + 1), where Ri is the number of predicted objects\n            for image i. Compatible with the output of :meth:`FastRCNNOutputs.predict_probs`.\n        image_shapes (list[tuple]): A list of (width, height) tuples for each image in the batch.\n        score_thresh (float): Only return detections with a confidence score exceeding this\n            threshold.\n        nms_thresh (float):  The threshold to use for box non-maximum suppression. Value in [0, 1].\n        topk_per_image (int): The number of top scoring detections to return. Set < 0 to return\n            all detections.\n\n    Returns:\n        instances: (list[Instances]): A list of N instances, one for each image in the batch,\n            that stores the topk most confidence detections.\n        kept_indices: (list[Tensor]): A list of 1D tensor of length of N, each element indicates\n            the corresponding boxes/scores index in [0, Ri) from the input, for image i.\n    \"\"\"\n    if type(light) == type(boxes):\n        result_per_image = [\n            fast_rcnn_inference_single_image(\n                boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image,light_per_image\n            )\n            for scores_per_image, boxes_per_image, image_shape,light_per_image in zip(scores, boxes, image_shapes,light)\n        ]\n    else:\n        result_per_image = [\n            fast_rcnn_inference_single_image(\n                boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image\n            )\n            for scores_per_image, boxes_per_image, image_shape in zip(scores,boxes,image_shapes)\n        ]\n\n    return tuple(list(x) for x in zip(*result_per_image))\n\n\ndef fast_rcnn_inference_single_image(\n    boxes, scores, image_shape, score_thresh, nms_thresh, topk_per_image,light=None\n):\n    \"\"\"\n    Single-image inference. Return bounding-box detection results by thresholding\n    on scores and applying non-maximum suppression (NMS).\n\n    Args:\n        Same as `fast_rcnn_inference`, but with boxes, scores, and image shapes\n        per image.\n\n    Returns:\n        Same as `fast_rcnn_inference`, but for only one image.\n    \"\"\"\n    scores = scores[:, :-1]\n    num_bbox_reg_classes = boxes.shape[1] // 4\n    # Convert to Boxes to use the `clip` function ...\n    boxes = Boxes(boxes.reshape(-1, 4))\n    boxes.clip(image_shape)\n    boxes = boxes.tensor.view(-1, num_bbox_reg_classes, 4)  # R x C x 4\n\n    if type(light) == type(boxes):\n        # print(light)\n        light = Boxes(light.reshape(-1,4))\n        # light.clip(image_shape)\n        light = light.tensor.view(-1,num_bbox_reg_classes,4)\n\n    # Filter results based on detection scores\n    filter_mask = scores > score_thresh  # R x K\n    # R' x 2. First column contains indices of the R predictions;\n    # Second column contains indices of classes.\n    filter_inds = filter_mask.nonzero()\n    if num_bbox_reg_classes == 1:\n        boxes = boxes[filter_inds[:, 0], 0]\n        if type(light) == type(boxes):\n            light = light[filter_inds[:, 0], 0]\n    else:\n        boxes = boxes[filter_mask]\n        if type(light) == type(boxes):\n            light = light[filter_mask]\n    scores = scores[filter_mask]\n\n    # Apply per-class NMS\n    keep = batched_nms(boxes, scores, filter_inds[:, 1], nms_thresh)\n    keep = keep[:topk_per_image]\n    boxes, scores, filter_inds = boxes[keep], scores[keep], filter_inds[keep]\n    if type(light) ==  type(boxes):\n        light = light[keep]\n    result = Instances(image_shape)\n    result.pred_boxes = Boxes(boxes)\n    result.scores = scores\n    result.pred_classes = filter_inds[:, 1]\n    if type(light) == type(boxes):\n        result.pred_light = Boxes(light)\n    return result, filter_inds[:, 0]\n\n\nclass FastRCNNOutputs(object):\n    \"\"\"\n    A class that stores information about outputs of a Fast R-CNN head.\n    \"\"\"\n\n    def __init__(\n        self, box2box_transform, pred_class_logits, pred_proposal_deltas, proposals, smooth_l1_beta,pred_light_direction=None\n    ):\n        \"\"\"\n        Args:\n            box2box_transform (Box2BoxTransform/Box2BoxTransformRotated):\n                box2box transform instance for proposal-to-detection transformations.\n            pred_class_logits (Tensor): A tensor of shape (R, K + 1) storing the predicted class\n                logits for all R predicted object instances.\n            pred_proposal_deltas (Tensor): A tensor of shape (R, K * B) or (R, B) for\n                class-specific or class-agnostic storing the predicted deltas that\n                transform proposals into final box detections, where B is the box dimension (4 or 5)\n            proposals (list[Instances]): A list of N Instances, where Instances i stores the\n                proposals for image i, in the field \"proposal_boxes\".\n                When training, each Instances must have ground-truth labels\n                stored in the field \"gt_classes\" and \"gt_boxes\".\n            smooth_l1_beta (float): The transition point between L1 and L2 loss in\n                the smooth L1 loss function. When set to 0, the loss becomes L1. When\n                set to +inf, the loss becomes constant 0.\n        \"\"\"\n        self.box2box_transform = box2box_transform\n        self.num_preds_per_image = [len(p) for p in proposals]\n        self.pred_class_logits = pred_class_logits\n        self.pred_proposal_deltas = pred_proposal_deltas\n        self.smooth_l1_beta = smooth_l1_beta\n        self.pred_light_direction = pred_light_direction\n        box_type = type(proposals[0].proposal_boxes)\n        # cat(..., dim=0) concatenates over all images in the batch\n        self.proposals = box_type.cat([p.proposal_boxes for p in proposals])\n        assert not self.proposals.tensor.requires_grad, \"Proposals should not require gradients!\"\n        self.image_shapes = [x.image_size for x in proposals]\n\n        # The following fields should exist only when training.\n        if proposals[0].has(\"gt_boxes\"):\n            self.gt_boxes = box_type.cat([p.gt_boxes for p in proposals])\n            assert proposals[0].has(\"gt_classes\")\n            self.gt_classes = cat([p.gt_classes for p in proposals], dim=0)\n        if proposals[0].has(\"gt_light\"):\n            self.gt_light_direction = box_type.cat([p.gt_light for p in proposals])\n        else:\n            self.gt_light_direction = None\n\n    def _log_accuracy(self):\n        \"\"\"\n        Log the accuracy metrics to EventStorage.\n        \"\"\"\n        num_instances = self.gt_classes.numel()\n        pred_classes = self.pred_class_logits.argmax(dim=1)\n        bg_class_ind = self.pred_class_logits.shape[1] - 1\n\n        fg_inds = (self.gt_classes >= 0) & (self.gt_classes < bg_class_ind)\n        num_fg = fg_inds.nonzero().numel()\n        fg_gt_classes = self.gt_classes[fg_inds]\n        fg_pred_classes = pred_classes[fg_inds]\n\n        num_false_negative = (fg_pred_classes == bg_class_ind).nonzero().numel()\n        num_accurate = (pred_classes == self.gt_classes).nonzero().numel()\n        fg_num_accurate = (fg_pred_classes == fg_gt_classes).nonzero().numel()\n\n        storage = get_event_storage()\n        storage.put_scalar(\"fast_rcnn/cls_accuracy\", num_accurate / num_instances)\n        if num_fg > 0:\n            storage.put_scalar(\"fast_rcnn/fg_cls_accuracy\", fg_num_accurate / num_fg)\n            storage.put_scalar(\"fast_rcnn/false_negative\", num_false_negative / num_fg)\n\n    def losses(self):\n        \"\"\"\n        Returns:\n            A dict of losses (scalar tensors) containing keys \"loss_cls\" and \"loss_box_reg\".\n        \"\"\"\n        self._log_accuracy()\n        gt_proposal_deltas = self.box2box_transform.get_deltas(\n            self.proposals.tensor, self.gt_boxes.tensor\n        )\n        if self.gt_light_direction != None:\n            gt_light_direction = self.box2box_transform.trans_light(\n                self.gt_light_direction.tensor\n            )\n            # gt_light_direction = self.gt_light_direction\n        else:\n            gt_light_direction = None\n\n        loss_cls, loss_box_reg,loss_light_reg = fast_rcnn_losses(\n            self.gt_classes,\n            gt_proposal_deltas,\n            self.pred_class_logits,\n            self.pred_proposal_deltas,\n            self.smooth_l1_beta,\n            gt_light_direction,\n            self.pred_light_direction\n        )\n        if type(loss_light_reg) == type(loss_box_reg):\n            return {\"loss_cls\": loss_cls, \"loss_box_reg\": loss_box_reg,'loss_light_reg': loss_light_reg}\n        else:\n            return{\"loss_cls\":loss_cls, \"loss_box_reg\": loss_box_reg}\n\n    def predict_boxes(self):\n        \"\"\"\n        Returns:\n            list[Tensor]: A list of Tensors of predicted class-specific or class-agnostic boxes\n                for each image. Element i has shape (Ri, K * B) or (Ri, B), where Ri is\n                the number of predicted objects for image i and B is the box dimension (4 or 5)\n        \"\"\"\n        boxes = self.box2box_transform.apply_deltas(\n            self.pred_proposal_deltas, self.proposals.tensor\n        )\n        return boxes.split(self.num_preds_per_image, dim=0)\n    def predict_light(self,light):\n        return light.split(self.num_preds_per_image,dim=0)\n\n    def predict_probs(self):\n        \"\"\"\n        Returns:\n            list[Tensor]: A list of Tensors of predicted class probabilities for each image.\n                Element i has shape (Ri, K + 1), where Ri is the number of predicted objects\n                for image i.\n        \"\"\"\n        probs = F.softmax(self.pred_class_logits, dim=-1)\n        return probs.split(self.num_preds_per_image, dim=0)\n\n\n    def inference(self, score_thresh, nms_thresh, topk_per_image):\n        \"\"\"\n        Args:\n            score_thresh (float): same as fast_rcnn_inference.\n            nms_thresh (float): same as fast_rcnn_inference.\n            topk_per_image (int): same as fast_rcnn_inference.\n        Returns:\n            list[Instances]: same as fast_rcnn_inference.\n            list[Tensor]: same as fast_rcnn_inference.\n        \"\"\"\n        boxes = self.predict_boxes()\n        scores = self.predict_probs()\n        if type(self.pred_proposal_deltas) == type(self.pred_light_direction):\n            light = self.predict_light(self.pred_light_direction)\n        else:\n            light = None\n        image_shapes = self.image_shapes\n        return fast_rcnn_inference(\n            boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image,light\n        )\n\n\nclass FastRCNNOutputLayers(nn.Module):\n    \"\"\"\n    Two linear layers for predicting Fast R-CNN outputs:\n      (1) proposal-to-detection box regression deltas\n      (2) classification scores\n    \"\"\"\n\n    def __init__(self, input_size, num_classes, cls_agnostic_bbox_reg, box_dim=4):\n        \"\"\"\n        Args:\n            input_size (int): channels, or (channels, height, width)\n            num_classes (int): number of foreground classes\n            cls_agnostic_bbox_reg (bool): whether to use class agnostic for bbox regression\n            box_dim (int): the dimension of bounding boxes.\n                Example box dimensions: 4 for regular XYXY boxes and 5 for rotated XYWHA boxes\n        \"\"\"\n        super(FastRCNNOutputLayers, self).__init__()\n\n        if not isinstance(input_size, int):\n            input_size = np.prod(input_size)\n\n        # The prediction layer for num_classes foreground classes and one background class\n        # (hence + 1)\n        self.cls_score = nn.Linear(input_size, num_classes + 1)\n        num_bbox_reg_classes = 1 if cls_agnostic_bbox_reg else num_classes\n        self.bbox_pred = nn.Linear(input_size, num_bbox_reg_classes * box_dim)\n\n        nn.init.normal_(self.cls_score.weight, std=0.01)\n        nn.init.normal_(self.bbox_pred.weight, std=0.001)\n        for l in [self.cls_score, self.bbox_pred]:\n            nn.init.constant_(l.bias, 0)\n\n    def forward(self, x):\n        if x.dim() > 2:\n            x = torch.flatten(x, start_dim=1)\n        scores = self.cls_score(x)\n        proposal_deltas = self.bbox_pred(x)\n        return scores, proposal_deltas\n\nclass LightdirectionOutputLayer(nn.Module):\n\n    def __init__(self, input_size, box_dim =4):\n        super(LightdirectionOutputLayer, self).__init__()\n\n        if not isinstance(input_size, int):\n            input_size = np.prod(input_size)\n        \n        self.bbox_pred = nn.Linear(input_size, box_dim)\n        nn.init.normal_(self.bbox_pred.weight, std=0.001)\n        for l in [self.bbox_pred]:\n            nn.init.constant_(l.bias,0)\n    \n    def forward(self,x):\n        if x.dim() > 2:\n            x = torch.flatten(x, start_dim=1)\n        proposal_light_direction = self.bbox_pred(x)\n        return proposal_light_direction\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/keypoint_head.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom detectron2.layers import Conv2d, ConvTranspose2d, ShapeSpec, cat, interpolate\nfrom detectron2.structures import heatmaps_to_keypoints\nfrom detectron2.utils.events import get_event_storage\nfrom detectron2.utils.registry import Registry\n\n_TOTAL_SKIPPED = 0\n\nROI_KEYPOINT_HEAD_REGISTRY = Registry(\"ROI_KEYPOINT_HEAD\")\nROI_KEYPOINT_HEAD_REGISTRY.__doc__ = \"\"\"\nRegistry for keypoint heads, which make keypoint predictions from per-region features.\n\nThe registered object will be called with `obj(cfg, input_shape)`.\n\"\"\"\n\n\ndef build_keypoint_head(cfg, input_shape):\n    \"\"\"\n    Build a keypoint head from `cfg.MODEL.ROI_KEYPOINT_HEAD.NAME`.\n    \"\"\"\n    name = cfg.MODEL.ROI_KEYPOINT_HEAD.NAME\n    return ROI_KEYPOINT_HEAD_REGISTRY.get(name)(cfg, input_shape)\n\n\ndef keypoint_rcnn_loss(pred_keypoint_logits, instances, normalizer):\n    \"\"\"\n    Arguments:\n        pred_keypoint_logits (Tensor): A tensor of shape (N, K, S, S) where N is the total number\n            of instances in the batch, K is the number of keypoints, and S is the side length\n            of the keypoint heatmap. The values are spatial logits.\n        instances (list[Instances]): A list of M Instances, where M is the batch size.\n            These instances are predictions from the model\n            that are in 1:1 correspondence with pred_keypoint_logits.\n            Each Instances should contain a `gt_keypoints` field containing a `structures.Keypoint`\n            instance.\n        normalizer (float): Normalize the loss by this amount.\n            If not specified, we normalize by the number of visible keypoints in the minibatch.\n\n    Returns a scalar tensor containing the loss.\n    \"\"\"\n    heatmaps = []\n    valid = []\n\n    keypoint_side_len = pred_keypoint_logits.shape[2]\n    for instances_per_image in instances:\n        if len(instances_per_image) == 0:\n            continue\n        keypoints = instances_per_image.gt_keypoints\n        heatmaps_per_image, valid_per_image = keypoints.to_heatmap(\n            instances_per_image.proposal_boxes.tensor, keypoint_side_len\n        )\n        heatmaps.append(heatmaps_per_image.view(-1))\n        valid.append(valid_per_image.view(-1))\n\n    if len(heatmaps):\n        keypoint_targets = cat(heatmaps, dim=0)\n        valid = cat(valid, dim=0).to(dtype=torch.uint8)\n        valid = torch.nonzero(valid).squeeze(1)\n\n    # torch.mean (in binary_cross_entropy_with_logits) doesn't\n    # accept empty tensors, so handle it separately\n    if len(heatmaps) == 0 or valid.numel() == 0:\n        global _TOTAL_SKIPPED\n        _TOTAL_SKIPPED += 1\n        storage = get_event_storage()\n        storage.put_scalar(\"kpts_num_skipped_batches\", _TOTAL_SKIPPED, smoothing_hint=False)\n        return pred_keypoint_logits.sum() * 0\n\n    N, K, H, W = pred_keypoint_logits.shape\n    pred_keypoint_logits = pred_keypoint_logits.view(N * K, H * W)\n\n    keypoint_loss = F.cross_entropy(\n        pred_keypoint_logits[valid], keypoint_targets[valid], reduction=\"sum\"\n    )\n\n    # If a normalizer isn't specified, normalize by the number of visible keypoints in the minibatch\n    # print(valid.shape,'<-- target')\n    # print(pred_keypoint_logits[valid].shape, \"<-- pred keypoint\")\n    # print(keypoint_targets[valid].shape,'<-- target')\n    # assert 1 != 1\n    if normalizer is None:\n        normalizer = valid.numel()\n    keypoint_loss /= normalizer\n\n    return keypoint_loss\n\n\ndef keypoint_rcnn_inference(pred_keypoint_logits, pred_instances):\n    \"\"\"\n    Post process each predicted keypoint heatmap in `pred_keypoint_logits` into (x, y, score, prob)\n        and add it to the `pred_instances` as a `pred_keypoints` field.\n\n    Args:\n        pred_keypoint_logits (Tensor): A tensor of shape (N, K, S, S) where N is the total number\n           of instances in the batch, K is the number of keypoints, and S is the side length of\n           the keypoint heatmap. The values are spatial logits.\n        pred_instances (list[Instances]): A list of M Instances, where M is the batch size.\n\n    Returns:\n        None. boxes will contain an extra \"pred_keypoints\" field.\n            The field is a tensor of shape (#instance, K, 3) where the last\n            dimension corresponds to (x, y, probability).\n    \"\"\"\n    # flatten all bboxes from all images together (list[Boxes] -> Nx4 tensor)\n    bboxes_flat = cat([b.pred_boxes.tensor for b in pred_instances], dim=0)\n\n    keypoint_results = heatmaps_to_keypoints(pred_keypoint_logits.detach(), bboxes_flat.detach())\n    num_instances_per_image = [len(i) for i in pred_instances]\n    keypoint_results = keypoint_results.split(num_instances_per_image, dim=0)\n\n    for keypoint_results_per_image, instances_per_image in zip(keypoint_results, pred_instances):\n        # keypoint_results_per_image is (num instances)x(num keypoints)x(x, y, score, prob)\n        keypoint_xyp = keypoint_results_per_image[:, :, [0, 1, 3]]\n        instances_per_image.pred_keypoints = keypoint_xyp\n\n\n@ROI_KEYPOINT_HEAD_REGISTRY.register()\nclass KRCNNConvDeconvUpsampleHead(nn.Module):\n    \"\"\"\n    A standard keypoint head containing a series of 3x3 convs, followed by\n    a transpose convolution and bilinear interpolation for upsampling.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: ShapeSpec):\n        \"\"\"\n        The following attributes are parsed from config:\n            conv_dims: an iterable of output channel counts for each conv in the head\n                         e.g. (512, 512, 512) for three convs outputting 512 channels.\n            num_keypoints: number of keypoint heatmaps to predicts, determines the number of\n                           channels in the final output.\n        \"\"\"\n        super(KRCNNConvDeconvUpsampleHead, self).__init__()\n\n        # fmt: off\n        # default up_scale to 2 (this can eventually be moved to config)\n        up_scale      = 2\n        conv_dims     = cfg.MODEL.ROI_KEYPOINT_HEAD.CONV_DIMS\n        num_keypoints = cfg.MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS\n        in_channels   = input_shape.channels\n        # fmt: on\n\n        self.blocks = []\n        for idx, layer_channels in enumerate(conv_dims, 1):\n            module = Conv2d(in_channels, layer_channels, 3, stride=1, padding=1)\n            self.add_module(\"conv_fcn{}\".format(idx), module)\n            self.blocks.append(module)\n            in_channels = layer_channels\n\n        deconv_kernel = 4\n        self.score_lowres = ConvTranspose2d(\n            in_channels, num_keypoints, deconv_kernel, stride=2, padding=deconv_kernel // 2 - 1\n        )\n        self.up_scale = up_scale\n\n        for name, param in self.named_parameters():\n            if \"bias\" in name:\n                nn.init.constant_(param, 0)\n            elif \"weight\" in name:\n                # Caffe2 implementation uses MSRAFill, which in fact\n                # corresponds to kaiming_normal_ in PyTorch\n                nn.init.kaiming_normal_(param, mode=\"fan_out\", nonlinearity=\"relu\")\n\n    def forward(self, x):\n        for layer in self.blocks:\n            x = F.relu(layer(x))\n        x = self.score_lowres(x)\n        x = interpolate(x, scale_factor=self.up_scale, mode=\"bilinear\", align_corners=False)\n        return x\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/mask_head.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport fvcore.nn.weight_init as weight_init\nimport torch\nfrom torch import nn\nfrom torch.nn import functional as F\n\nfrom detectron2.layers import Conv2d, ConvTranspose2d, ShapeSpec, cat, get_norm\nfrom detectron2.utils.events import get_event_storage\nfrom detectron2.utils.registry import Registry\n\nROI_MASK_HEAD_REGISTRY = Registry(\"ROI_MASK_HEAD\")\nROI_MASK_HEAD_REGISTRY.__doc__ = \"\"\"\nRegistry for mask heads, which predicts instance masks given\nper-region features.\n\nThe registered object will be called with `obj(cfg, input_shape)`.\n\"\"\"\n\n\ndef mask_rcnn_loss(pred_mask_logits, instances):\n    \"\"\"\n    Compute the mask prediction loss defined in the Mask R-CNN paper.\n\n    Args:\n        pred_mask_logits (Tensor): A tensor of shape (B, C, Hmask, Wmask) or (B, 1, Hmask, Wmask)\n            for class-specific or class-agnostic, where B is the total number of predicted masks\n            in all images, C is the number of foreground classes, and Hmask, Wmask are the height\n            and width of the mask predictions. The values are logits.\n        instances (list[Instances]): A list of N Instances, where N is the number of images\n            in the batch. These instances are in 1:1\n            correspondence with the pred_mask_logits. The ground-truth labels (class, box, mask,\n            ...) associated with each instance are stored in fields.\n\n    Returns:\n        mask_loss (Tensor): A scalar tensor containing the loss.\n    \"\"\"\n    cls_agnostic_mask = pred_mask_logits.size(1) == 1\n    total_num_masks = pred_mask_logits.size(0)\n    mask_side_len = pred_mask_logits.size(2)\n    assert pred_mask_logits.size(2) == pred_mask_logits.size(3), \"Mask prediction must be square!\"\n\n    gt_classes = []\n    gt_masks = []\n    for instances_per_image in instances:\n        if len(instances_per_image) == 0:\n            continue\n        if not cls_agnostic_mask:\n            gt_classes_per_image = instances_per_image.gt_classes.to(dtype=torch.int64)\n            gt_classes.append(gt_classes_per_image)\n\n        gt_masks_per_image = instances_per_image.gt_masks.crop_and_resize(\n            instances_per_image.proposal_boxes.tensor, mask_side_len\n        ).to(device=pred_mask_logits.device)\n        # A tensor of shape (N, M, M), N=#instances in the image; M=mask_side_len\n        gt_masks.append(gt_masks_per_image)\n\n    if len(gt_masks) == 0:\n        return pred_mask_logits.sum() * 0\n\n    gt_masks = cat(gt_masks, dim=0)\n\n    if cls_agnostic_mask:\n        pred_mask_logits = pred_mask_logits[:, 0]\n    else:\n        indices = torch.arange(total_num_masks)\n        gt_classes = cat(gt_classes, dim=0)\n        pred_mask_logits = pred_mask_logits[indices, gt_classes]\n\n    if gt_masks.dtype == torch.bool:\n        gt_masks_bool = gt_masks\n    else:\n        # Here we allow gt_masks to be float as well (depend on the implementation of rasterize())\n        gt_masks_bool = gt_masks > 0.5\n\n    # Log the training accuracy (using gt classes and 0.5 threshold)\n    mask_incorrect = (pred_mask_logits > 0.0) != gt_masks_bool\n    mask_accuracy = 1 - (mask_incorrect.sum().item() / max(mask_incorrect.numel(), 1.0))\n    num_positive = gt_masks_bool.sum().item()\n    false_positive = (mask_incorrect & ~gt_masks_bool).sum().item() / max(\n        gt_masks_bool.numel() - num_positive, 1.0\n    )\n    false_negative = (mask_incorrect & gt_masks_bool).sum().item() / max(num_positive, 1.0)\n\n    storage = get_event_storage()\n    storage.put_scalar(\"mask_rcnn/accuracy\", mask_accuracy)\n    storage.put_scalar(\"mask_rcnn/false_positive\", false_positive)\n    storage.put_scalar(\"mask_rcnn/false_negative\", false_negative)\n\n    mask_loss = F.binary_cross_entropy_with_logits(\n        pred_mask_logits, gt_masks.to(dtype=torch.float32), reduction=\"mean\"\n    )\n    return mask_loss\n\n\ndef mask_rcnn_inference(pred_mask_logits, pred_instances):\n    \"\"\"\n    Convert pred_mask_logits to estimated foreground probability masks while also\n    extracting only the masks for the predicted classes in pred_instances. For each\n    predicted box, the mask of the same class is attached to the instance by adding a\n    new \"pred_masks\" field to pred_instances.\n\n    Args:\n        pred_mask_logits (Tensor): A tensor of shape (B, C, Hmask, Wmask) or (B, 1, Hmask, Wmask)\n            for class-specific or class-agnostic, where B is the total number of predicted masks\n            in all images, C is the number of foreground classes, and Hmask, Wmask are the height\n            and width of the mask predictions. The values are logits.\n        pred_instances (list[Instances]): A list of N Instances, where N is the number of images\n            in the batch. Each Instances must have field \"pred_classes\".\n\n    Returns:\n        None. pred_instances will contain an extra \"pred_masks\" field storing a mask of size (Hmask,\n            Wmask) for predicted class. Note that the masks are returned as a soft (non-quantized)\n            masks the resolution predicted by the network; post-processing steps, such as resizing\n            the predicted masks to the original image resolution and/or binarizing them, is left\n            to the caller.\n    \"\"\"\n    cls_agnostic_mask = pred_mask_logits.size(1) == 1\n\n    if cls_agnostic_mask:\n        mask_probs_pred = pred_mask_logits.sigmoid()\n    else:\n        # Select masks corresponding to the predicted classes\n        num_masks = pred_mask_logits.shape[0]\n        class_pred = cat([i.pred_classes for i in pred_instances])\n        indices = torch.arange(num_masks, device=class_pred.device)\n        mask_probs_pred = pred_mask_logits[indices, class_pred][:, None].sigmoid()\n    # mask_probs_pred.shape: (B, 1, Hmask, Wmask)\n\n    num_boxes_per_image = [len(i) for i in pred_instances]\n    mask_probs_pred = mask_probs_pred.split(num_boxes_per_image, dim=0)\n\n    for prob, instances in zip(mask_probs_pred, pred_instances):\n        instances.pred_masks = prob  # (1, Hmask, Wmask)\n\n\n@ROI_MASK_HEAD_REGISTRY.register()\nclass MaskRCNNConvUpsampleHead(nn.Module):\n    \"\"\"\n    A mask head with several conv layers, plus an upsample layer (with `ConvTranspose2d`).\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: ShapeSpec):\n        \"\"\"\n        The following attributes are parsed from config:\n            num_conv: the number of conv layers\n            conv_dim: the dimension of the conv layers\n            norm: normalization for the conv layers\n        \"\"\"\n        super(MaskRCNNConvUpsampleHead, self).__init__()\n\n        # fmt: off\n        num_classes       = cfg.MODEL.ROI_HEADS.NUM_CLASSES\n        conv_dims         = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM\n        self.norm         = cfg.MODEL.ROI_MASK_HEAD.NORM\n        num_conv          = cfg.MODEL.ROI_MASK_HEAD.NUM_CONV\n        input_channels    = input_shape.channels\n        cls_agnostic_mask = cfg.MODEL.ROI_MASK_HEAD.CLS_AGNOSTIC_MASK\n        # fmt: on\n\n        self.conv_norm_relus = []\n\n        for k in range(num_conv):\n            conv = Conv2d(\n                input_channels if k == 0 else conv_dims,\n                conv_dims,\n                kernel_size=3,\n                stride=1,\n                padding=1,\n                bias=not self.norm,\n                norm=get_norm(self.norm, conv_dims),\n                activation=F.relu,\n            )\n            self.add_module(\"mask_fcn{}\".format(k + 1), conv)\n            self.conv_norm_relus.append(conv)\n\n        self.deconv = ConvTranspose2d(\n            conv_dims if num_conv > 0 else input_channels,\n            conv_dims,\n            kernel_size=2,\n            stride=2,\n            padding=0,\n        )\n\n        num_mask_classes = 1 if cls_agnostic_mask else num_classes\n        self.predictor = Conv2d(conv_dims, num_mask_classes, kernel_size=1, stride=1, padding=0)\n\n        for layer in self.conv_norm_relus + [self.deconv]:\n            weight_init.c2_msra_fill(layer)\n        # use normal distribution initialization for mask prediction layer\n        nn.init.normal_(self.predictor.weight, std=0.001)\n        if self.predictor.bias is not None:\n            nn.init.constant_(self.predictor.bias, 0)\n\n    def forward(self, x):\n        for layer in self.conv_norm_relus:\n            x = layer(x)\n        x = F.relu(self.deconv(x))\n        return self.predictor(x)\n\n\ndef build_mask_head(cfg, input_shape):\n    \"\"\"\n    Build a mask head defined by `cfg.MODEL.ROI_MASK_HEAD.NAME`.\n    \"\"\"\n    name = cfg.MODEL.ROI_MASK_HEAD.NAME\n    return ROI_MASK_HEAD_REGISTRY.get(name)(cfg, input_shape)\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/roi_heads.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport numpy as np\nfrom typing import Dict\nimport torch\nfrom torch import nn\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.structures import Boxes, Instances, RotatedBoxes, pairwise_iou, pairwise_iou_rotated\nfrom detectron2.utils.events import get_event_storage\nfrom detectron2.utils.registry import Registry\n\nfrom ..backbone.resnet import BottleneckBlock, make_stage\nfrom ..box_regression import Box2BoxTransform, Box2BoxTransformRotated\nfrom ..matcher import Matcher\nfrom ..poolers import ROIPooler\nfrom ..proposal_generator.proposal_utils import add_ground_truth_to_proposals\nfrom ..sampling import subsample_labels\nfrom .box_head import build_box_head\nfrom .fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs\nfrom .keypoint_head import build_keypoint_head, keypoint_rcnn_inference, keypoint_rcnn_loss\nfrom .mask_head import build_mask_head, mask_rcnn_inference, mask_rcnn_loss\n\nROI_HEADS_REGISTRY = Registry(\"ROI_HEADS\")\nROI_HEADS_REGISTRY.__doc__ = \"\"\"\nRegistry for ROI heads in a generalized R-CNN model.\nROIHeads take feature maps and region proposals, and\nperform per-region computation.\n\nThe registered object will be called with `obj(cfg, input_shape)`.\nThe call is expected to return an :class:`ROIHeads`.\n\"\"\"\n\nlogger = logging.getLogger(__name__)\n\n\ndef build_roi_heads(cfg, input_shape):\n    \"\"\"\n    Build ROIHeads defined by `cfg.MODEL.ROI_HEADS.NAME`.\n    \"\"\"\n    name = cfg.MODEL.ROI_HEADS.NAME\n    return ROI_HEADS_REGISTRY.get(name)(cfg, input_shape)\n\n\ndef select_foreground_proposals(proposals, bg_label):\n    \"\"\"\n    Given a list of N Instances (for N images), each containing a `gt_classes` field,\n    return a list of Instances that contain only instances with `gt_classes != -1 &&\n    gt_classes != bg_label`.\n\n    Args:\n        proposals (list[Instances]): A list of N Instances, where N is the number of\n            images in the batch.\n        bg_label: label index of background class.\n\n    Returns:\n        list[Instances]: N Instances, each contains only the selected foreground instances.\n        list[Tensor]: N boolean vector, correspond to the selection mask of\n            each instance. True for selected instances.\n    \"\"\"\n    assert isinstance(proposals, (list, tuple))\n    assert isinstance(proposals[0], Instances)\n    assert proposals[0].has(\"gt_classes\")\n    fg_proposals = []\n    fg_selection_masks = []\n    for proposals_per_image in proposals:\n        gt_classes = proposals_per_image.gt_classes\n        fg_selection_mask = (gt_classes != -1) & (gt_classes != bg_label)\n        fg_inds = fg_selection_mask.nonzero().squeeze(1)\n        fg_proposals.append(proposals_per_image[fg_inds])\n        fg_selection_masks.append(fg_selection_mask)\n    return fg_proposals, fg_selection_masks\n\n\ndef select_proposals_with_visible_keypoints(proposals):\n    \"\"\"\n    Args:\n        proposals (list[Instances]): a list of N Instances, where N is the\n            number of images.\n\n    Returns:\n        proposals: only contains proposals with at least one visible keypoint.\n\n    Note that this is still slightly different from Detectron.\n    In Detectron, proposals for training keypoint head are re-sampled from\n    all the proposals with IOU>threshold & >=1 visible keypoint.\n\n    Here, the proposals are first sampled from all proposals with\n    IOU>threshold, then proposals with no visible keypoint are filtered out.\n    This strategy seems to make no difference on Detectron and is easier to implement.\n    \"\"\"\n    ret = []\n    all_num_fg = []\n    for proposals_per_image in proposals:\n        gt_keypoints = proposals_per_image.gt_keypoints.tensor\n        # #fg x K x 3\n        vis_mask = gt_keypoints[:, :, 2] >= 1\n        xs, ys = gt_keypoints[:, :, 0], gt_keypoints[:, :, 1]\n        proposal_boxes = proposals_per_image.proposal_boxes.tensor.unsqueeze(dim=1)  # #fg x 1 x 4\n        kp_in_box = (\n            (xs >= proposal_boxes[:, :, 0])\n            & (xs <= proposal_boxes[:, :, 2])\n            & (ys >= proposal_boxes[:, :, 1])\n            & (ys <= proposal_boxes[:, :, 3])\n        )\n        selection = (kp_in_box & vis_mask).any(dim=1)\n        selection_idxs = torch.nonzero(selection).squeeze(1)\n        all_num_fg.append(selection_idxs.numel())\n        ret.append(proposals_per_image[selection_idxs])\n\n    storage = get_event_storage()\n    storage.put_scalar(\"keypoint_head/num_fg_samples\", np.mean(all_num_fg))\n    return ret\n\n\nclass ROIHeads(torch.nn.Module):\n    \"\"\"\n    ROIHeads perform all per-region computation in an R-CNN.\n\n    It contains logic of cropping the regions, extract per-region features,\n    and make per-region predictions.\n\n    It can have many variants, implemented as subclasses of this class.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n        super(ROIHeads, self).__init__()\n\n        # fmt: off\n        self.batch_size_per_image     = cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE\n        self.positive_sample_fraction = cfg.MODEL.ROI_HEADS.POSITIVE_FRACTION\n        self.test_score_thresh        = cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST\n        self.test_nms_thresh          = cfg.MODEL.ROI_HEADS.NMS_THRESH_TEST\n        self.test_detections_per_img  = cfg.TEST.DETECTIONS_PER_IMAGE\n        self.in_features              = cfg.MODEL.ROI_HEADS.IN_FEATURES\n        self.num_classes              = cfg.MODEL.ROI_HEADS.NUM_CLASSES\n        self.proposal_append_gt       = cfg.MODEL.ROI_HEADS.PROPOSAL_APPEND_GT\n        self.feature_strides          = {k: v.stride for k, v in input_shape.items()}\n        self.feature_channels         = {k: v.channels for k, v in input_shape.items()}\n        self.cls_agnostic_bbox_reg    = cfg.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG\n        self.smooth_l1_beta           = cfg.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA\n        # fmt: on\n\n        # Matcher to assign box proposals to gt boxes\n        self.proposal_matcher = Matcher(\n            cfg.MODEL.ROI_HEADS.IOU_THRESHOLDS,\n            cfg.MODEL.ROI_HEADS.IOU_LABELS,\n            allow_low_quality_matches=False,\n        )\n\n        # Box2BoxTransform for bounding box regression\n        self.box2box_transform = Box2BoxTransform(weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS)\n\n    @torch.no_grad()\n    def label_and_sample_proposals(self, proposals, targets):\n        \"\"\"\n        Prepare some proposals to be used to train the ROI heads.\n        It performs box matching between `proposals` and `targets`, and assigns\n        training labels to the proposals.\n        It returns `self.batch_size_per_image` random samples from proposals and groundtruth boxes,\n        with a fraction of positives that is no larger than `self.positive_sample_fraction.\n\n        Args:\n            See :meth:`ROIHeads.forward`\n\n        Returns:\n            list[Instances]:\n                length `N` list of `Instances`s containing the proposals\n                sampled for training. Each `Instances` has the following fields:\n                - proposal_boxes: the proposal boxes\n                - gt_boxes: the ground-truth box that the proposal is assigned to\n                  (this is only meaningful if the proposal has a label > 0; if label = 0\n                   then the ground-truth box is random)\n                Other fields such as \"gt_classes\", \"gt_masks\", that's included in `targets`.\n        \"\"\"\n        gt_boxes = [x.gt_boxes for x in targets]\n        # Augment proposals with ground-truth boxes.\n        # In the case of learned proposals (e.g., RPN), when training starts\n        # the proposals will be low quality due to random initialization.\n        # It's possible that none of these initial\n        # proposals have high enough overlap with the gt objects to be used\n        # as positive examples for the second stage components (box head,\n        # cls head, mask head). Adding the gt boxes to the set of proposals\n        # ensures that the second stage components will have some positive\n        # examples from the start of training. For RPN, this augmentation improves\n        # convergence and empirically improves box AP on COCO by about 0.5\n        # points (under one tested configuration).\n        if self.proposal_append_gt:\n            proposals = add_ground_truth_to_proposals(gt_boxes, proposals)\n\n        proposals_with_gt = []\n\n        num_fg_samples = []\n        num_bg_samples = []\n        for proposals_per_image, targets_per_image in zip(proposals, targets):\n            has_gt = len(targets_per_image) > 0\n            match_quality_matrix = pairwise_iou(\n                targets_per_image.gt_boxes, proposals_per_image.proposal_boxes\n            )\n            matched_idxs, proposals_labels = self.proposal_matcher(match_quality_matrix)\n\n            # Get the corresponding GT for each proposal\n            if has_gt:\n                gt_classes = targets_per_image.gt_classes[matched_idxs]\n                # print(gt_classes)\n                # Label unmatched proposals (0 label from matcher) as background (label=num_classes)\n                gt_classes[proposals_labels == 0] = self.num_classes\n                # Label ignore proposals (-1 label)\n                gt_classes[proposals_labels == -1] = -1\n            else:\n                gt_classes = torch.zeros_like(matched_idxs) + self.num_classes\n\n            sampled_fg_inds, sampled_bg_inds = subsample_labels(\n                gt_classes,\n                self.batch_size_per_image,\n                self.positive_sample_fraction,\n                self.num_classes,\n            )\n\n            sampled_inds = torch.cat([sampled_fg_inds, sampled_bg_inds], dim=0)\n\n            proposals_per_image = proposals_per_image[sampled_inds]\n            proposals_per_image.gt_classes = gt_classes[sampled_inds]\n\n            # We index all the attributes of targets that start with \"gt_\"\n            # and have not been added to proposals yet (=\"gt_classes\").\n            if has_gt:\n                sampled_targets = matched_idxs[sampled_inds]\n                # NOTE: here the indexing waste some compute, because heads\n                # like masks, keypoints, etc, will filter the proposals again,\n                # (by foreground/background, or number of keypoints in the image, etc)\n                # so we essentially index the data twice.\n                for (trg_name, trg_value) in targets_per_image.get_fields().items():\n                    if trg_name.startswith(\"gt_\") and not proposals_per_image.has(trg_name):\n                        proposals_per_image.set(trg_name, trg_value[sampled_targets])\n            else:\n                gt_boxes = Boxes(\n                    targets_per_image.gt_boxes.tensor.new_zeros((len(sampled_inds), 4))\n                )\n                proposals_per_image.gt_boxes = gt_boxes\n\n            num_fg_samples.append(sampled_fg_inds.numel())\n            num_bg_samples.append(sampled_bg_inds.numel())\n            proposals_with_gt.append(proposals_per_image)\n\n        # Log the number of fg/bg samples that are selected for training ROI heads\n        storage = get_event_storage()\n        storage.put_scalar(\"roi_head/num_fg_samples\", np.mean(num_fg_samples))\n        storage.put_scalar(\"roi_head/num_bg_samples\", np.mean(num_bg_samples))\n\n        return proposals_with_gt\n\n    def forward(self, images, features, proposals, targets=None):\n        \"\"\"\n        Args:\n            images (ImageList):\n            features (dict[str: Tensor]): input data as a mapping from feature\n                map name to tensor. Axis 0 represents the number of images `N` in\n                the input data; axes 1-3 are channels, height, and width, which may\n                vary between feature maps (e.g., if a feature pyramid is used).\n            proposals (list[Instances]): length `N` list of `Instances`s. The i-th\n                `Instances` contains object proposals for the i-th input image,\n                with fields \"proposal_boxes\" and \"objectness_logits\".\n            targets (list[Instances], optional): length `N` list of `Instances`s. The i-th\n                `Instances` contains the ground-truth per-instance annotations\n                for the i-th input image.  Specify `targets` during training only.\n                It may have the following fields:\n                - gt_boxes: the bounding box of each instance.\n                - gt_classes: the label for each instance with a category ranging in [0, #class].\n                - gt_masks: the ground-truth mask of the instance.\n\n        Returns:\n            results (list[Instances]): length `N` list of `Instances`s containing the\n                detected instances. Returned during inference only; may be []\n                during training.\n            losses (dict[str: Tensor]): mapping from a named loss to a tensor\n                storing the loss. Used during training only.\n        \"\"\"\n        raise NotImplementedError()\n\n\n@ROI_HEADS_REGISTRY.register()\nclass Res5ROIHeads(ROIHeads):\n    \"\"\"\n    The ROIHeads in a typical \"C4\" R-CNN model, where\n    the box and mask head share the cropping and\n    the per-region feature computation by a Res5 block.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape):\n        super().__init__(cfg, input_shape)\n\n        assert len(self.in_features) == 1\n\n        # fmt: off\n        pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION\n        pooler_type   = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE\n        pooler_scales     = (1.0 / self.feature_strides[self.in_features[0]], )\n        sampling_ratio    = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO\n        self.mask_on      = cfg.MODEL.MASK_ON\n        # fmt: on\n        assert not cfg.MODEL.KEYPOINT_ON\n\n        self.pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n\n        self.res5, out_channels = self._build_res5_block(cfg)\n        self.box_predictor = FastRCNNOutputLayers(\n            out_channels, self.num_classes, self.cls_agnostic_bbox_reg\n        )\n\n        if self.mask_on:\n            self.mask_head = build_mask_head(\n                cfg,\n                ShapeSpec(channels=out_channels, width=pooler_resolution, height=pooler_resolution),\n            )\n\n    def _build_res5_block(self, cfg):\n        # fmt: off\n        stage_channel_factor = 2 ** 3  # res5 is 8x res2\n        num_groups           = cfg.MODEL.RESNETS.NUM_GROUPS\n        width_per_group      = cfg.MODEL.RESNETS.WIDTH_PER_GROUP\n        bottleneck_channels  = num_groups * width_per_group * stage_channel_factor\n        out_channels         = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS * stage_channel_factor\n        stride_in_1x1        = cfg.MODEL.RESNETS.STRIDE_IN_1X1\n        norm                 = cfg.MODEL.RESNETS.NORM\n        assert not cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE[-1], \\\n            \"Deformable conv is not yet supported in res5 head.\"\n        # fmt: on\n\n        blocks = make_stage(\n            BottleneckBlock,\n            3,\n            first_stride=2,\n            in_channels=out_channels // 2,\n            bottleneck_channels=bottleneck_channels,\n            out_channels=out_channels,\n            num_groups=num_groups,\n            norm=norm,\n            stride_in_1x1=stride_in_1x1,\n        )\n        return nn.Sequential(*blocks), out_channels\n\n    def _shared_roi_transform(self, features, boxes):\n        x = self.pooler(features, boxes)\n        return self.res5(x)\n\n    def forward(self, images, features, proposals, targets=None):\n        \"\"\"\n        See :class:`ROIHeads.forward`.\n        \"\"\"\n        del images\n\n        if self.training:\n            proposals = self.label_and_sample_proposals(proposals, targets)\n        del targets\n\n        proposal_boxes = [x.proposal_boxes for x in proposals]\n        box_features = self._shared_roi_transform(\n            [features[f] for f in self.in_features], proposal_boxes\n        )\n        feature_pooled = box_features.mean(dim=[2, 3])  # pooled to 1x1\n        pred_class_logits, pred_proposal_deltas = self.box_predictor(feature_pooled)\n        del feature_pooled\n\n        outputs = FastRCNNOutputs(\n            self.box2box_transform,\n            pred_class_logits,\n            pred_proposal_deltas,\n            proposals,\n            self.smooth_l1_beta,\n        )\n\n        if self.training:\n            del features\n            losses = outputs.losses()\n            if self.mask_on:\n                proposals, fg_selection_masks = select_foreground_proposals(\n                    proposals, self.num_classes\n                )\n                # Since the ROI feature transform is shared between boxes and masks,\n                # we don't need to recompute features. The mask loss is only defined\n                # on foreground proposals, so we need to select out the foreground\n                # features.\n                mask_features = box_features[torch.cat(fg_selection_masks, dim=0)]\n                del box_features\n                mask_logits = self.mask_head(mask_features)\n                losses[\"loss_mask\"] = mask_rcnn_loss(mask_logits, proposals)\n            return [], losses\n        else:\n            pred_instances, _ = outputs.inference(\n                self.test_score_thresh, self.test_nms_thresh, self.test_detections_per_img\n            )\n            pred_instances = self.forward_with_given_boxes(features, pred_instances)\n            return pred_instances, {}\n\n    def forward_with_given_boxes(self, features, instances):\n        \"\"\"\n        Use the given boxes in `instances` to produce other (non-box) per-ROI outputs.\n\n        Args:\n            features: same as in `forward()`\n            instances (list[Instances]): instances to predict other outputs. Expect the keys\n                \"pred_boxes\" and \"pred_classes\" to exist.\n\n        Returns:\n            instances (Instances):\n                the same `Instances` object, with extra\n                fields such as `pred_masks` or `pred_keypoints`.\n        \"\"\"\n        assert not self.training\n        assert instances[0].has(\"pred_boxes\") and instances[0].has(\"pred_classes\")\n\n        if self.mask_on:\n            features = [features[f] for f in self.in_features]\n            x = self._shared_roi_transform(features, [x.pred_boxes for x in instances])\n            mask_logits = self.mask_head(x)\n            mask_rcnn_inference(mask_logits, instances)\n        return instances\n\n\n@ROI_HEADS_REGISTRY.register()\nclass StandardROIHeads(ROIHeads):\n    \"\"\"\n    It's \"standard\" in a sense that there is no ROI transform sharing\n    or feature sharing between tasks.\n    The cropped rois go to separate branches (boxes and masks) directly.\n    This way, it is easier to make separate abstractions for different branches.\n\n    This class is used by most models, such as FPN and C5.\n    To implement more models, you can subclass it and implement a different\n    :meth:`forward()` or a head.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape):\n        super(StandardROIHeads, self).__init__(cfg, input_shape)\n        self._init_box_head(cfg)\n        self._init_mask_head(cfg)\n        self._init_keypoint_head(cfg)\n\n    def _init_box_head(self, cfg):\n        # fmt: off\n        pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION\n        pooler_scales     = tuple(1.0 / self.feature_strides[k] for k in self.in_features)\n        sampling_ratio    = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type       = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE\n        # fmt: on\n\n        # If StandardROIHeads is applied on multiple feature maps (as in FPN),\n        # then we share the same predictors and therefore the channel counts must be the same\n        in_channels = [self.feature_channels[f] for f in self.in_features]\n        # Check all channel counts are equal\n        assert len(set(in_channels)) == 1, in_channels\n        in_channels = in_channels[0]\n\n        self.box_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n        # Here we split \"box head\" and \"box predictor\", which is mainly due to historical reasons.\n        # They are used together so the \"box predictor\" layers should be part of the \"box head\".\n        # New subclasses of ROIHeads do not need \"box predictor\"s.\n        self.box_head = build_box_head(\n            cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution)\n        )\n        self.box_predictor = FastRCNNOutputLayers(\n            self.box_head.output_size, self.num_classes, self.cls_agnostic_bbox_reg\n        )\n\n    def _init_mask_head(self, cfg):\n        # fmt: off\n        self.mask_on           = cfg.MODEL.MASK_ON\n        if not self.mask_on:\n            return\n        pooler_resolution = cfg.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION\n        pooler_scales     = tuple(1.0 / self.feature_strides[k] for k in self.in_features)\n        sampling_ratio    = cfg.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type       = cfg.MODEL.ROI_MASK_HEAD.POOLER_TYPE\n        # fmt: on\n\n        in_channels = [self.feature_channels[f] for f in self.in_features][0]\n\n        self.mask_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n        self.mask_head = build_mask_head(\n            cfg, ShapeSpec(channels=in_channels, width=pooler_resolution, height=pooler_resolution)\n        )\n\n    def _init_keypoint_head(self, cfg):\n        # fmt: off\n        self.keypoint_on                         = cfg.MODEL.KEYPOINT_ON\n        if not self.keypoint_on:\n            return\n        pooler_resolution                        = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION\n        pooler_scales                            = tuple(1.0 / self.feature_strides[k] for k in self.in_features)  # noqa\n        sampling_ratio                           = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type                              = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_TYPE\n        self.normalize_loss_by_visible_keypoints = cfg.MODEL.ROI_KEYPOINT_HEAD.NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS  # noqa\n        self.keypoint_loss_weight                = cfg.MODEL.ROI_KEYPOINT_HEAD.LOSS_WEIGHT\n        # fmt: on\n\n        in_channels = [self.feature_channels[f] for f in self.in_features][0]\n\n        self.keypoint_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n        self.keypoint_head = build_keypoint_head(\n            cfg, ShapeSpec(channels=in_channels, width=pooler_resolution, height=pooler_resolution)\n        )\n\n    def forward(self, images, features, proposals, targets=None):\n        \"\"\"\n        See :class:`ROIHeads.forward`.\n        \"\"\"\n        del images\n        if self.training:\n            proposals = self.label_and_sample_proposals(proposals, targets)\n        del targets\n\n        features_list = [features[f] for f in self.in_features]\n\n        if self.training:\n            losses = self._forward_box(features_list, proposals)\n            # During training the proposals used by the box head are\n            # used by the mask, keypoint (and densepose) heads.\n            losses.update(self._forward_mask(features_list, proposals))\n            losses.update(self._forward_keypoint(features_list, proposals))\n            return proposals, losses\n        else:\n            pred_instances = self._forward_box(features_list, proposals)\n            # During inference cascaded prediction is used: the mask and keypoints heads are only\n            # applied to the top scoring box detections.\n            pred_instances = self.forward_with_given_boxes(features, pred_instances)\n            return pred_instances, {}\n\n    def forward_with_given_boxes(self, features, instances):\n        \"\"\"\n        Use the given boxes in `instances` to produce other (non-box) per-ROI outputs.\n\n        This is useful for downstream tasks where a box is known, but need to obtain\n        other attributes (outputs of other heads).\n        Test-time augmentation also uses this.\n\n        Args:\n            features: same as in `forward()`\n            instances (list[Instances]): instances to predict other outputs. Expect the keys\n                \"pred_boxes\" and \"pred_classes\" to exist.\n\n        Returns:\n            instances (Instances):\n                the same `Instances` object, with extra\n                fields such as `pred_masks` or `pred_keypoints`.\n        \"\"\"\n        assert not self.training\n        assert instances[0].has(\"pred_boxes\") and instances[0].has(\"pred_classes\")\n        features = [features[f] for f in self.in_features]\n\n        instances = self._forward_mask(features, instances)\n        instances = self._forward_keypoint(features, instances)\n        return instances\n\n    def _forward_box(self, features, proposals):\n        \"\"\"\n        Forward logic of the box prediction branch.\n\n        Args:\n            features (list[Tensor]): #level input features for box prediction\n            proposals (list[Instances]): the per-image object proposals with\n                their matching ground truth.\n                Each has fields \"proposal_boxes\", and \"objectness_logits\",\n                \"gt_classes\", \"gt_boxes\".\n\n        Returns:\n            In training, a dict of losses.\n            In inference, a list of `Instances`, the predicted instances.\n        \"\"\"\n        box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals])\n        box_features = self.box_head(box_features)\n        pred_class_logits, pred_proposal_deltas = self.box_predictor(box_features)\n        del box_features\n\n        outputs = FastRCNNOutputs(\n            self.box2box_transform,\n            pred_class_logits,\n            pred_proposal_deltas,\n            proposals,\n            self.smooth_l1_beta,\n        )\n        if self.training:\n            return outputs.losses()\n        else:\n            pred_instances, _ = outputs.inference(\n                self.test_score_thresh, self.test_nms_thresh, self.test_detections_per_img\n            )\n            return pred_instances\n\n    def _forward_mask(self, features, instances):\n        \"\"\"\n        Forward logic of the mask prediction branch.\n\n        Args:\n            features (list[Tensor]): #level input features for mask prediction\n            instances (list[Instances]): the per-image instances to train/predict masks.\n                In training, they can be the proposals.\n                In inference, they can be the predicted boxes.\n\n        Returns:\n            In training, a dict of losses.\n            In inference, update `instances` with new fields \"pred_masks\" and return it.\n        \"\"\"\n        if not self.mask_on:\n            return {} if self.training else instances\n\n        if self.training:\n            # The loss is only defined on positive proposals.\n            proposals, _ = select_foreground_proposals(instances, self.num_classes)\n            proposal_boxes = [x.proposal_boxes for x in proposals]\n            mask_features = self.mask_pooler(features, proposal_boxes)\n            mask_logits = self.mask_head(mask_features)\n            return {\"loss_mask\": mask_rcnn_loss(mask_logits, proposals)}\n        else:\n            pred_boxes = [x.pred_boxes for x in instances]\n            mask_features = self.mask_pooler(features, pred_boxes)\n            mask_logits = self.mask_head(mask_features)\n            mask_rcnn_inference(mask_logits, instances)\n            return instances\n\n    def _forward_keypoint(self, features, instances):\n        \"\"\"\n        Forward logic of the keypoint prediction branch.\n\n        Args:\n            features (list[Tensor]): #level input features for keypoint prediction\n            instances (list[Instances]): the per-image instances to train/predict keypoints.\n                In training, they can be the proposals.\n                In inference, they can be the predicted boxes.\n\n        Returns:\n            In training, a dict of losses.\n            In inference, update `instances` with new fields \"pred_keypoints\" and return it.\n        \"\"\"\n        if not self.keypoint_on:\n            return {} if self.training else instances\n\n        num_images = len(instances)\n\n        if self.training:\n            # The loss is defined on positive proposals with at >=1 visible keypoints.\n            proposals, _ = select_foreground_proposals(instances, self.num_classes)\n            proposals = select_proposals_with_visible_keypoints(proposals)\n            proposal_boxes = [x.proposal_boxes for x in proposals]\n\n            keypoint_features = self.keypoint_pooler(features, proposal_boxes)\n            keypoint_logits = self.keypoint_head(keypoint_features)\n\n            normalizer = (\n                num_images\n                * self.batch_size_per_image\n                * self.positive_sample_fraction\n                * keypoint_logits.shape[1]\n            )\n            loss = keypoint_rcnn_loss(\n                keypoint_logits,\n                proposals,\n                normalizer=None if self.normalize_loss_by_visible_keypoints else normalizer,\n            )\n            return {\"loss_keypoint\": loss * self.keypoint_loss_weight}\n        else:\n            pred_boxes = [x.pred_boxes for x in instances]\n            keypoint_features = self.keypoint_pooler(features, pred_boxes)\n            keypoint_logits = self.keypoint_head(keypoint_features)\n            keypoint_rcnn_inference(keypoint_logits, instances)\n            return instances\n\n\n@ROI_HEADS_REGISTRY.register()\nclass RROIHeads(StandardROIHeads):\n    \"\"\"\n    This class is used by Rotated RPN (RRPN).\n    For now, it just supports box_head but not mask or keypoints.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n        super(RROIHeads, self).__init__(cfg, input_shape)\n        self.box2box_transform = Box2BoxTransformRotated(\n            weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS\n        )\n\n    def _init_box_head(self, cfg):\n        # fmt: off\n        pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION\n        pooler_scales     = tuple(1.0 / self.feature_strides[k] for k in self.in_features)\n        sampling_ratio    = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type       = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE\n        # fmt: on\n\n        # If StandardROIHeads is applied on multiple feature maps (as in FPN),\n        # then we share the same predictors and therefore the channel counts must be the same\n        in_channels = [self.feature_channels[f] for f in self.in_features]\n        # Check all channel counts are equal\n        assert len(set(in_channels)) == 1, in_channels\n        in_channels = in_channels[0]\n\n        self.box_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n        self.box_head = build_box_head(\n            cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution)\n        )\n\n        self.box_predictor = FastRCNNOutputLayers(\n            input_size=self.box_head.output_size,\n            num_classes=self.num_classes,\n            cls_agnostic_bbox_reg=self.cls_agnostic_bbox_reg,\n            box_dim=5,\n        )\n\n    @torch.no_grad()\n    def label_and_sample_proposals(self, proposals, targets):\n        \"\"\"\n        Prepare some proposals to be used to train the RROI heads.\n        It performs box matching between `proposals` and `targets`, and assigns\n        training labels to the proposals.\n        It returns `self.batch_size_per_image` random samples from proposals and groundtruth boxes,\n        with a fraction of positives that is no larger than `self.positive_sample_fraction.\n\n        Args:\n            See :meth:`StandardROIHeads.forward`\n\n        Returns:\n            list[Instances]: length `N` list of `Instances`s containing the proposals\n                sampled for training. Each `Instances` has the following fields:\n                - proposal_boxes: the proposal rotated boxes\n                - gt_boxes: the ground-truth rotated boxes that the proposal is assigned to\n                  (this is only meaningful if the proposal has a label > 0; if label = 0\n                   then the ground-truth box is random)\n                - other fields such as \"gt_classes\" and \"gt_masks\" that are included in `targets`.\n        \"\"\"\n        gt_boxes = [x.gt_boxes for x in targets]\n        # Augment proposals with ground-truth boxes.\n        # In the case of learned proposals (e.g., RPN), in the beginning of training\n        # the proposals are of low quality due to random initialization.\n        # It's possible that none of these initial\n        # proposals have high enough overlap with the gt objects to be used\n        # as positive examples for the second stage components (box head,\n        # cls head, mask head). Adding the gt boxes to the set of proposals\n        # ensures that the second stage components will have some positive\n        # examples from the start of training. For RPN, this augmentation improves\n        # convergence and empirically improves box AP on COCO by about 0.5\n        # points (under one tested configuration).\n        proposals = add_ground_truth_to_proposals(gt_boxes, proposals)\n\n        proposals_with_gt = []\n\n        num_fg_samples = []\n        num_bg_samples = []\n        for proposals_per_image, targets_per_image in zip(proposals, targets):\n            has_gt = len(targets_per_image) > 0\n            match_quality_matrix = pairwise_iou_rotated(\n                targets_per_image.gt_boxes, proposals_per_image.proposal_boxes\n            )\n            matched_idxs, proposals_labels = self.proposal_matcher(match_quality_matrix)\n\n            # Get the corresponding GT for each proposal\n            if has_gt:\n                gt_classes = targets_per_image.gt_classes[matched_idxs]\n                # Label unmatched proposals (0 label from matcher) as background (label=num_classes)\n                gt_classes[proposals_labels == 0] = self.num_classes\n                # Label ignore proposals (-1 label)\n                gt_classes[proposals_labels == -1] = -1\n            else:\n                gt_classes = torch.zeros_like(matched_idxs) + self.num_classes\n\n            sampled_fg_inds, sampled_bg_inds = subsample_labels(\n                gt_classes,\n                self.batch_size_per_image,\n                self.positive_sample_fraction,\n                self.num_classes,\n            )\n\n            sampled_inds = torch.cat([sampled_fg_inds, sampled_bg_inds], dim=0)\n\n            proposals_per_image = proposals_per_image[sampled_inds]\n            proposals_per_image.gt_classes = gt_classes[sampled_inds]\n\n            if has_gt:\n                sampled_targets = matched_idxs[sampled_inds]\n                proposals_per_image.gt_boxes = targets_per_image.gt_boxes[sampled_targets]\n            else:\n                gt_boxes = RotatedBoxes(\n                    targets_per_image.gt_boxes.tensor.new_zeros((len(sampled_inds), 5))\n                )\n                proposals_per_image.gt_boxes = gt_boxes\n\n            num_fg_samples.append(sampled_fg_inds.numel())\n            num_bg_samples.append(sampled_bg_inds.numel())\n            proposals_with_gt.append(proposals_per_image)\n\n        # Log the number of fg/bg samples that are selected for training ROI heads\n        storage = get_event_storage()\n        storage.put_scalar(\"roi_head/num_fg_samples\", np.mean(num_fg_samples))\n        storage.put_scalar(\"roi_head/num_bg_samples\", np.mean(num_bg_samples))\n\n        return proposals_with_gt\n"
  },
  {
    "path": "detectron2/modeling/roi_heads/rotated_fast_rcnn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport numpy as np\nfrom typing import Dict\nimport torch\n\nfrom detectron2.layers import ShapeSpec, batched_nms_rotated\nfrom detectron2.structures import Instances, RotatedBoxes, pairwise_iou_rotated\nfrom detectron2.utils.events import get_event_storage\n\nfrom ..box_regression import Box2BoxTransformRotated\nfrom ..poolers import ROIPooler\nfrom ..proposal_generator.proposal_utils import add_ground_truth_to_proposals\nfrom .box_head import build_box_head\nfrom .fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs\nfrom .roi_heads import ROI_HEADS_REGISTRY, StandardROIHeads\n\nlogger = logging.getLogger(__name__)\n\n\"\"\"\nShape shorthand in this module:\n\n    N: number of images in the minibatch\n    R: number of ROIs, combined over all images, in the minibatch\n    Ri: number of ROIs in image i\n    K: number of foreground classes. E.g.,there are 80 foreground classes in COCO.\n\nNaming convention:\n\n    deltas: refers to the 5-d (dx, dy, dw, dh, da) deltas that parameterize the box2box\n    transform (see :class:`box_regression.Box2BoxTransformRotated`).\n\n    pred_class_logits: predicted class scores in [-inf, +inf]; use\n        softmax(pred_class_logits) to estimate P(class).\n\n    gt_classes: ground-truth classification labels in [0, K], where [0, K) represent\n        foreground object classes and K represents the background class.\n\n    pred_proposal_deltas: predicted rotated box2box transform deltas for transforming proposals\n        to detection box predictions.\n\n    gt_proposal_deltas: ground-truth rotated box2box transform deltas\n\"\"\"\n\n\ndef fast_rcnn_inference_rotated(\n    boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image\n):\n    \"\"\"\n    Call `fast_rcnn_inference_single_image_rotated` for all images.\n\n    Args:\n        boxes (list[Tensor]): A list of Tensors of predicted class-specific or class-agnostic\n            boxes for each image. Element i has shape (Ri, K * 5) if doing\n            class-specific regression, or (Ri, 5) if doing class-agnostic\n            regression, where Ri is the number of predicted objects for image i.\n            This is compatible with the output of :meth:`FastRCNNOutputs.predict_boxes`.\n        scores (list[Tensor]): A list of Tensors of predicted class scores for each image.\n            Element i has shape (Ri, K + 1), where Ri is the number of predicted objects\n            for image i. Compatible with the output of :meth:`FastRCNNOutputs.predict_probs`.\n        image_shapes (list[tuple]): A list of (width, height) tuples for each image in the batch.\n        score_thresh (float): Only return detections with a confidence score exceeding this\n            threshold.\n        nms_thresh (float):  The threshold to use for box non-maximum suppression. Value in [0, 1].\n        topk_per_image (int): The number of top scoring detections to return. Set < 0 to return\n            all detections.\n\n    Returns:\n        instances: (list[Instances]): A list of N instances, one for each image in the batch,\n            that stores the topk most confidence detections.\n        kept_indices: (list[Tensor]): A list of 1D tensor of length of N, each element indicates\n            the corresponding boxes/scores index in [0, Ri) from the input, for image i.\n    \"\"\"\n    result_per_image = [\n        fast_rcnn_inference_single_image_rotated(\n            boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image\n        )\n        for scores_per_image, boxes_per_image, image_shape in zip(scores, boxes, image_shapes)\n    ]\n    return tuple(list(x) for x in zip(*result_per_image))\n\n\ndef fast_rcnn_inference_single_image_rotated(\n    boxes, scores, image_shape, score_thresh, nms_thresh, topk_per_image\n):\n    \"\"\"\n    Single-image inference. Return rotated bounding-box detection results by thresholding\n    on scores and applying rotated non-maximum suppression (Rotated NMS).\n\n    Args:\n        Same as `fast_rcnn_inference_rotated`, but with rotated boxes, scores, and image shapes\n        per image.\n\n    Returns:\n        Same as `fast_rcnn_inference_rotated`, but for only one image.\n    \"\"\"\n    B = 5  # box dimension\n    scores = scores[:, :-1]\n    num_bbox_reg_classes = boxes.shape[1] // B\n    # Convert to Boxes to use the `clip` function ...\n    boxes = RotatedBoxes(boxes.reshape(-1, B))\n    boxes.clip(image_shape)\n    boxes = boxes.tensor.view(-1, num_bbox_reg_classes, B)  # R x C x B\n    # Filter results based on detection scores\n    filter_mask = scores > score_thresh  # R x K\n    # R' x 2. First column contains indices of the R predictions;\n    # Second column contains indices of classes.\n    filter_inds = filter_mask.nonzero()\n    if num_bbox_reg_classes == 1:\n        boxes = boxes[filter_inds[:, 0], 0]\n    else:\n        boxes = boxes[filter_mask]\n    scores = scores[filter_mask]\n\n    # Apply per-class Rotated NMS\n    keep = batched_nms_rotated(boxes, scores, filter_inds[:, 1], nms_thresh)\n    if topk_per_image >= 0:\n        keep = keep[:topk_per_image]\n    boxes, scores, filter_inds = boxes[keep], scores[keep], filter_inds[keep]\n\n    result = Instances(image_shape)\n    result.pred_boxes = RotatedBoxes(boxes)\n    result.scores = scores\n    result.pred_classes = filter_inds[:, 1]\n\n    return result, filter_inds[:, 0]\n\n\nclass RotatedFastRCNNOutputs(FastRCNNOutputs):\n    \"\"\"\n    A class that stores information about outputs of a Fast R-CNN head with RotatedBoxes.\n    \"\"\"\n\n    def inference(self, score_thresh, nms_thresh, topk_per_image):\n        \"\"\"\n        Args:\n            score_thresh (float): same as `fast_rcnn_inference_rotated`.\n            nms_thresh (float): same as `fast_rcnn_inference_rotated`.\n            topk_per_image (int): same as `fast_rcnn_inference_rotated`.\n        Returns:\n            list[Instances]: same as `fast_rcnn_inference_rotated`.\n            list[Tensor]: same as `fast_rcnn_inference_rotated`.\n        \"\"\"\n        boxes = self.predict_boxes()\n        scores = self.predict_probs()\n        image_shapes = self.image_shapes\n\n        return fast_rcnn_inference_rotated(\n            boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image\n        )\n\n\n@ROI_HEADS_REGISTRY.register()\nclass RROIHeads(StandardROIHeads):\n    \"\"\"\n    This class is used by Rotated RPN (RRPN).\n    For now, it just supports box head but not mask or keypoints.\n    \"\"\"\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n        super().__init__(cfg, input_shape)\n        self.box2box_transform = Box2BoxTransformRotated(\n            weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS\n        )\n        assert (\n            not self.mask_on and not self.keypoint_on\n        ), \"Mask/Keypoints not supported in Rotated ROIHeads.\"\n\n    def _init_box_head(self, cfg):\n        # fmt: off\n        pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION\n        pooler_scales     = tuple(1.0 / self.feature_strides[k] for k in self.in_features)\n        sampling_ratio    = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type       = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE\n        # fmt: on\n\n        # If StandardROIHeads is applied on multiple feature maps (as in FPN),\n        # then we share the same predictors and therefore the channel counts must be the same\n        in_channels = [self.feature_channels[f] for f in self.in_features]\n        # Check all channel counts are equal\n        assert len(set(in_channels)) == 1, in_channels\n        in_channels = in_channels[0]\n\n        assert pooler_type in [\"ROIAlignRotated\"]\n\n        self.box_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n        self.box_head = build_box_head(\n            cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution)\n        )\n\n        self.box_predictor = FastRCNNOutputLayers(\n            input_size=self.box_head.output_size,\n            num_classes=self.num_classes,\n            cls_agnostic_bbox_reg=self.cls_agnostic_bbox_reg,\n            box_dim=5,\n        )\n\n    @torch.no_grad()\n    def label_and_sample_proposals(self, proposals, targets):\n        \"\"\"\n        Prepare some proposals to be used to train the RROI heads.\n        It performs box matching between `proposals` and `targets`, and assigns\n        training labels to the proposals.\n        It returns `self.batch_size_per_image` random samples from proposals and groundtruth boxes,\n        with a fraction of positives that is no larger than `self.positive_sample_fraction.\n\n        Args:\n            See :meth:`StandardROIHeads.forward`\n\n        Returns:\n            list[Instances]: length `N` list of `Instances`s containing the proposals\n                sampled for training. Each `Instances` has the following fields:\n                - proposal_boxes: the rotated proposal boxes\n                - gt_boxes: the ground-truth rotated boxes that the proposal is assigned to\n                  (this is only meaningful if the proposal has a label > 0; if label = 0\n                   then the ground-truth box is random)\n                - gt_classes: the ground-truth classification lable for each proposal\n        \"\"\"\n        gt_boxes = [x.gt_boxes for x in targets]\n        if self.proposal_append_gt:\n            proposals = add_ground_truth_to_proposals(gt_boxes, proposals)\n\n        proposals_with_gt = []\n\n        num_fg_samples = []\n        num_bg_samples = []\n        for proposals_per_image, targets_per_image in zip(proposals, targets):\n            has_gt = len(targets_per_image) > 0\n            match_quality_matrix = pairwise_iou_rotated(\n                targets_per_image.gt_boxes, proposals_per_image.proposal_boxes\n            )\n            matched_idxs, matched_labels = self.proposal_matcher(match_quality_matrix)\n            sampled_idxs, gt_classes = self._sample_proposals(\n                matched_idxs, matched_labels, targets_per_image.gt_classes\n            )\n\n            proposals_per_image = proposals_per_image[sampled_idxs]\n            proposals_per_image.gt_classes = gt_classes\n\n            if has_gt:\n                sampled_targets = matched_idxs[sampled_idxs]\n                proposals_per_image.gt_boxes = targets_per_image.gt_boxes[sampled_targets]\n            else:\n                gt_boxes = RotatedBoxes(\n                    targets_per_image.gt_boxes.tensor.new_zeros((len(sampled_idxs), 5))\n                )\n                proposals_per_image.gt_boxes = gt_boxes\n\n            num_bg_samples.append((gt_classes == self.num_classes).sum().item())\n            num_fg_samples.append(gt_classes.numel() - num_bg_samples[-1])\n            proposals_with_gt.append(proposals_per_image)\n\n        # Log the number of fg/bg samples that are selected for training ROI heads\n        storage = get_event_storage()\n        storage.put_scalar(\"roi_head/num_fg_samples\", np.mean(num_fg_samples))\n        storage.put_scalar(\"roi_head/num_bg_samples\", np.mean(num_bg_samples))\n\n        return proposals_with_gt\n\n    def _forward_box(self, features, proposals):\n        \"\"\"\n        Forward logic of the box prediction branch.\n\n        Args:\n            features (list[Tensor]): #level input features for box prediction\n            proposals (list[Instances]): the per-image object proposals with\n                their matching ground truth.\n                Each has fields \"proposal_boxes\", and \"objectness_logits\",\n                \"gt_classes\", \"gt_boxes\".\n\n        Returns:\n            In training, a dict of losses.\n            In inference, a list of `Instances`, the predicted instances.\n        \"\"\"\n        box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals])\n        box_features = self.box_head(box_features)\n        pred_class_logits, pred_proposal_deltas = self.box_predictor(box_features)\n        del box_features\n\n        outputs = RotatedFastRCNNOutputs(\n            self.box2box_transform,\n            pred_class_logits,\n            pred_proposal_deltas,\n            proposals,\n            self.smooth_l1_beta,\n        )\n        if self.training:\n            return outputs.losses()\n        else:\n            pred_instances, _ = outputs.inference(\n                self.test_score_thresh, self.test_nms_thresh, self.test_detections_per_img\n            )\n            return pred_instances\n"
  },
  {
    "path": "detectron2/modeling/sampling.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport torch\n\n__all__ = [\"subsample_labels\"]\n\n\ndef subsample_labels(labels, num_samples, positive_fraction, bg_label):\n    \"\"\"\n    Return `num_samples` random samples from `labels`, with a fraction of\n    positives no larger than `positive_fraction`.\n\n    Args:\n        labels (Tensor): (N, ) label vector with values:\n            * -1: ignore\n            * bg_label: background (\"negative\") class\n            * otherwise: one or more foreground (\"positive\") classes\n        num_samples (int): The total number of labels with value >= 0 to return.\n            Values that are not sampled will be filled with -1 (ignore).\n        positive_fraction (float): The number of subsampled labels with values > 0\n            is `min(num_positives, int(positive_fraction * num_samples))`. The number\n            of negatives sampled is `min(num_negatives, num_samples - num_positives_sampled)`.\n            In order words, if there are not enough positives, the sample is filled with\n            negatives. If there are also not enough negatives, then as many elements are\n            sampled as is possible.\n        bg_label (int): label index of background (\"negative\") class.\n\n    Returns:\n        pos_idx, neg_idx (Tensor):\n            1D indices. The total number of indices is `num_samples` if possible.\n            The fraction of positive indices is `positive_fraction` if possible.\n    \"\"\"\n    positive = torch.nonzero((labels != -1) & (labels != bg_label)).squeeze(1)\n    negative = torch.nonzero(labels == bg_label).squeeze(1)\n\n    num_pos = int(num_samples * positive_fraction)\n    # protect against not enough positive examples\n    num_pos = min(positive.numel(), num_pos)\n    num_neg = num_samples - num_pos\n    # protect against not enough negative examples\n    num_neg = min(negative.numel(), num_neg)\n\n    # randomly select positive and negative examples\n    perm1 = torch.randperm(positive.numel(), device=positive.device)[:num_pos]\n    perm2 = torch.randperm(negative.numel(), device=negative.device)[:num_neg]\n\n    pos_idx = positive[perm1]\n    neg_idx = negative[perm2]\n    return pos_idx, neg_idx\n"
  },
  {
    "path": "detectron2/modeling/test_time_augmentation.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport copy\nimport numpy as np\nfrom contextlib import contextmanager\nfrom itertools import count\nimport torch\nfrom torch import nn\n\nfrom detectron2.data.detection_utils import read_image\nfrom detectron2.data.transforms import ResizeShortestEdge\nfrom detectron2.structures import Instances\n\nfrom .meta_arch import GeneralizedRCNN\nfrom .postprocessing import detector_postprocess\nfrom .roi_heads.fast_rcnn import fast_rcnn_inference_single_image\n\n__all__ = [\"DatasetMapperTTA\", \"GeneralizedRCNNWithTTA\"]\n\n\nclass DatasetMapperTTA:\n    \"\"\"\n    Implement test-time augmentation for detection data.\n    It is a callable which takes a dataset dict from a detection dataset,\n    and returns a list of dataset dicts where the images\n    are augmented from the input image by the transformations defined in the config.\n    This is used for test-time augmentation.\n    \"\"\"\n\n    def __init__(self, cfg):\n        self.min_sizes = cfg.TEST.AUG.MIN_SIZES\n        self.max_size = cfg.TEST.AUG.MAX_SIZE\n        self.flip = cfg.TEST.AUG.FLIP\n        self.image_format = cfg.INPUT.FORMAT\n\n    def __call__(self, dataset_dict):\n        \"\"\"\n        Args:\n            dict: a detection dataset dict\n\n        Returns:\n            list[dict]:\n                a list of dataset dicts, which contain augmented version of the input image.\n                The total number of dicts is ``len(min_sizes) * (2 if flip else 1)``.\n        \"\"\"\n        ret = []\n        if \"image\" not in dataset_dict:\n            numpy_image = read_image(dataset_dict[\"file_name\"], self.image_format)\n        else:\n            numpy_image = dataset_dict[\"image\"].permute(1, 2, 0).numpy().astype(\"uint8\")\n        for min_size in self.min_sizes:\n            image = np.copy(numpy_image)\n            tfm = ResizeShortestEdge(min_size, self.max_size).get_transform(image)\n            resized = tfm.apply_image(image)\n            resized = torch.as_tensor(resized.transpose(2, 0, 1).astype(\"float32\"))\n\n            dic = copy.deepcopy(dataset_dict)\n            dic[\"horiz_flip\"] = False\n            dic[\"image\"] = resized\n            ret.append(dic)\n\n            if self.flip:\n                dic = copy.deepcopy(dataset_dict)\n                dic[\"horiz_flip\"] = True\n                dic[\"image\"] = torch.flip(resized, dims=[2])\n                ret.append(dic)\n        return ret\n\n\nclass GeneralizedRCNNWithTTA(nn.Module):\n    \"\"\"\n    A GeneralizedRCNN with test-time augmentation enabled.\n    Its :meth:`__call__` method has the same interface as :meth:`GeneralizedRCNN.forward`.\n    \"\"\"\n\n    def __init__(self, cfg, model, tta_mapper=None, batch_size=3):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            model (GeneralizedRCNN): a GeneralizedRCNN to apply TTA on.\n            tta_mapper (callable): takes a dataset dict and returns a list of\n                augmented versions of the dataset dict. Defaults to\n                `DatasetMapperTTA(cfg)`.\n            batch_size (int): batch the augmented images into this batch size for inference.\n        \"\"\"\n        super().__init__()\n        assert isinstance(\n            model, GeneralizedRCNN\n        ), \"TTA is only supported on GeneralizedRCNN. Got a model of type {}\".format(type(model))\n        self.cfg = cfg.clone()\n        assert not self.cfg.MODEL.KEYPOINT_ON, \"TTA for keypoint is not supported yet\"\n        assert (\n            not self.cfg.MODEL.LOAD_PROPOSALS\n        ), \"TTA for pre-computed proposals is not supported yet\"\n\n        self.model = model\n\n        if tta_mapper is None:\n            tta_mapper = DatasetMapperTTA(cfg)\n        self.tta_mapper = tta_mapper\n        self.batch_size = batch_size\n\n    @contextmanager\n    def _turn_off_roi_head(self, attr):\n        \"\"\"\n        Open a context where one head in `model.roi_heads` is temporarily turned off.\n        Args:\n            attr (str): the attribute in `model.roi_heads` which can be used\n                to turn off a specific head, e.g., \"mask_on\", \"keypoint_on\".\n        \"\"\"\n        roi_heads = self.model.roi_heads\n        try:\n            old = getattr(roi_heads, attr)\n        except AttributeError:\n            # The head may not be implemented in certain ROIHeads\n            old = None\n\n        if old is None:\n            yield\n        else:\n            setattr(roi_heads, attr, False)\n            yield\n            setattr(roi_heads, attr, old)\n\n    def _batch_inference(self, batched_inputs, detected_instances=None, do_postprocess=True):\n        \"\"\"\n        Execute inference on a list of inputs,\n        using batch size = self.batch_size, instead of the length of the list.\n\n        Inputs & outputs have the same format as :meth:`GeneralizedRCNN.inference`\n        \"\"\"\n        if detected_instances is None:\n            detected_instances = [None] * len(batched_inputs)\n\n        outputs = []\n        inputs, instances = [], []\n        for idx, input, instance in zip(count(), batched_inputs, detected_instances):\n            inputs.append(input)\n            instances.append(instance)\n            if len(inputs) == self.batch_size or idx == len(batched_inputs) - 1:\n                outputs.extend(\n                    self.model.inference(\n                        inputs,\n                        instances if instances[0] is not None else None,\n                        do_postprocess=do_postprocess,\n                    )\n                )\n                inputs, instances = [], []\n        return outputs\n\n    def __call__(self, batched_inputs):\n        \"\"\"\n        Same input/output format as :meth:`GeneralizedRCNN.forward`\n        \"\"\"\n        return [self._inference_one_image(x) for x in batched_inputs]\n\n    def _inference_one_image(self, input):\n        \"\"\"\n        Args:\n            input (dict): one dataset dict\n\n        Returns:\n            dict: one output dict\n        \"\"\"\n        augmented_inputs = self.tta_mapper(input)\n\n        do_hflip = [k.pop(\"horiz_flip\", False) for k in augmented_inputs]\n        heights = [k[\"height\"] for k in augmented_inputs]\n        widths = [k[\"width\"] for k in augmented_inputs]\n        assert (\n            len(set(heights)) == 1 and len(set(widths)) == 1\n        ), \"Augmented version of the inputs should have the same original resolution!\"\n        height = heights[0]\n        width = widths[0]\n\n        # 1. Detect boxes from all augmented versions\n        # 1.1: forward with all augmented images\n        with self._turn_off_roi_head(\"mask_on\"), self._turn_off_roi_head(\"keypoint_on\"):\n            # temporarily disable mask/keypoint head\n            outputs = self._batch_inference(augmented_inputs, do_postprocess=False)\n        # 1.2: union the results\n        all_boxes = []\n        all_scores = []\n        all_classes = []\n        for idx, output in enumerate(outputs):\n            rescaled_output = detector_postprocess(output, height, width)\n            pred_boxes = rescaled_output.pred_boxes.tensor\n            if do_hflip[idx]:\n                pred_boxes[:, [0, 2]] = width - pred_boxes[:, [2, 0]]\n            all_boxes.append(pred_boxes)\n            all_scores.extend(rescaled_output.scores)\n            all_classes.extend(rescaled_output.pred_classes)\n        all_boxes = torch.cat(all_boxes, dim=0).cpu()\n        num_boxes = len(all_boxes)\n\n        # 1.3: select from the union of all results\n        num_classes = self.cfg.MODEL.ROI_HEADS.NUM_CLASSES\n        all_scores_2d = torch.zeros(num_boxes, num_classes, device=all_boxes.device)\n        for idx, cls, score in zip(count(), all_classes, all_scores):\n            all_scores_2d[idx, cls] = score\n\n        merged_instances, _ = fast_rcnn_inference_single_image(\n            all_boxes,\n            all_scores_2d,\n            (height, width),\n            1e-8,\n            self.cfg.MODEL.ROI_HEADS.NMS_THRESH_TEST,\n            self.cfg.TEST.DETECTIONS_PER_IMAGE,\n        )\n\n        if not self.cfg.MODEL.MASK_ON:\n            return {\"instances\": merged_instances}\n\n        # 2. Use the detected boxes to obtain masks\n        # 2.1: rescale the detected boxes\n        augmented_instances = []\n        for idx, input in enumerate(augmented_inputs):\n            actual_height, actual_width = input[\"image\"].shape[1:3]\n            scale_x = actual_width * 1.0 / width\n            scale_y = actual_height * 1.0 / height\n            pred_boxes = merged_instances.pred_boxes.clone()\n            pred_boxes.tensor[:, 0::2] *= scale_x\n            pred_boxes.tensor[:, 1::2] *= scale_y\n            if do_hflip[idx]:\n                pred_boxes.tensor[:, [0, 2]] = actual_width - pred_boxes.tensor[:, [2, 0]]\n\n            aug_instances = Instances(\n                image_size=(actual_height, actual_width),\n                pred_boxes=pred_boxes,\n                pred_classes=merged_instances.pred_classes,\n                scores=merged_instances.scores,\n            )\n            augmented_instances.append(aug_instances)\n        # 2.2: run forward on the detected boxes\n        outputs = self._batch_inference(augmented_inputs, augmented_instances, do_postprocess=False)\n        for idx, output in enumerate(outputs):\n            if do_hflip[idx]:\n                output.pred_masks = output.pred_masks.flip(dims=[3])\n        # 2.3: average the predictions\n        all_pred_masks = torch.stack([o.pred_masks for o in outputs], dim=0)\n        avg_pred_masks = torch.mean(all_pred_masks, dim=0)\n        output = outputs[0]\n        output.pred_masks = avg_pred_masks\n        output = detector_postprocess(output, height, width)\n        return {\"instances\": output}\n"
  },
  {
    "path": "detectron2/solver/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .build import build_lr_scheduler, build_optimizer\nfrom .lr_scheduler import WarmupCosineLR, WarmupMultiStepLR\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/solver/build.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom typing import Any, Dict, List\nimport torch\n\nfrom detectron2.config import CfgNode\n\nfrom .lr_scheduler import WarmupCosineLR, WarmupMultiStepLR\n\n\ndef build_optimizer(cfg: CfgNode, model: torch.nn.Module) -> torch.optim.Optimizer:\n    \"\"\"\n    Build an optimizer from config.\n    \"\"\"\n    params: List[Dict[str, Any]] = []\n    for key, value in model.named_parameters():\n        if not value.requires_grad:\n            continue\n        lr = cfg.SOLVER.BASE_LR\n        weight_decay = cfg.SOLVER.WEIGHT_DECAY\n        if key.endswith(\"norm.weight\") or key.endswith(\"norm.bias\"):\n            weight_decay = cfg.SOLVER.WEIGHT_DECAY_NORM\n        elif key.endswith(\".bias\"):\n            # NOTE: unlike Detectron v1, we now default BIAS_LR_FACTOR to 1.0\n            # and WEIGHT_DECAY_BIAS to WEIGHT_DECAY so that bias optimizer\n            # hyperparameters are by default exactly the same as for regular\n            # weights.\n            lr = cfg.SOLVER.BASE_LR * cfg.SOLVER.BIAS_LR_FACTOR\n            weight_decay = cfg.SOLVER.WEIGHT_DECAY_BIAS\n        params += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}]\n\n    optimizer = torch.optim.SGD(params, lr, momentum=cfg.SOLVER.MOMENTUM)\n    return optimizer\n\n\ndef build_lr_scheduler(\n    cfg: CfgNode, optimizer: torch.optim.Optimizer\n) -> torch.optim.lr_scheduler._LRScheduler:\n    \"\"\"\n    Build a LR scheduler from config.\n    \"\"\"\n    name = cfg.SOLVER.LR_SCHEDULER_NAME\n    if name == \"WarmupMultiStepLR\":\n        return WarmupMultiStepLR(\n            optimizer,\n            cfg.SOLVER.STEPS,\n            cfg.SOLVER.GAMMA,\n            warmup_factor=cfg.SOLVER.WARMUP_FACTOR,\n            warmup_iters=cfg.SOLVER.WARMUP_ITERS,\n            warmup_method=cfg.SOLVER.WARMUP_METHOD,\n        )\n    elif name == \"WarmupCosineLR\":\n        return WarmupCosineLR(\n            optimizer,\n            cfg.SOLVER.MAX_ITER,\n            warmup_factor=cfg.SOLVER.WARMUP_FACTOR,\n            warmup_iters=cfg.SOLVER.WARMUP_ITERS,\n            warmup_method=cfg.SOLVER.WARMUP_METHOD,\n        )\n    else:\n        raise ValueError(\"Unknown LR scheduler: {}\".format(name))\n"
  },
  {
    "path": "detectron2/solver/lr_scheduler.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport math\nfrom bisect import bisect_right\nfrom typing import List\nimport torch\n\n# NOTE: PyTorch's LR scheduler interface uses names that assume the LR changes\n# only on epoch boundaries. We typically use iteration based schedules instead.\n# As a result, \"epoch\" (e.g., as in self.last_epoch) should be understood to mean\n# \"iteration\" instead.\n\n# FIXME: ideally this would be achieved with a CombinedLRScheduler, separating\n# MultiStepLR with WarmupLR but the current LRScheduler design doesn't allow it.\n\n\nclass WarmupMultiStepLR(torch.optim.lr_scheduler._LRScheduler):\n    def __init__(\n        self,\n        optimizer: torch.optim.Optimizer,\n        milestones: List[int],\n        gamma: float = 0.1,\n        warmup_factor: float = 0.001,\n        warmup_iters: int = 1000,\n        warmup_method: str = \"linear\",\n        last_epoch: int = -1,\n    ):\n        if not list(milestones) == sorted(milestones):\n            raise ValueError(\n                \"Milestones should be a list of\" \" increasing integers. Got {}\", milestones\n            )\n        self.milestones = milestones\n        self.gamma = gamma\n        self.warmup_factor = warmup_factor\n        self.warmup_iters = warmup_iters\n        self.warmup_method = warmup_method\n        super().__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        warmup_factor = _get_warmup_factor_at_iter(\n            self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor\n        )\n        return [\n            base_lr * warmup_factor * self.gamma ** bisect_right(self.milestones, self.last_epoch)\n            for base_lr in self.base_lrs\n        ]\n\n    def _compute_values(self) -> List[float]:\n        # The new interface\n        return self.get_lr()\n\n\nclass WarmupCosineLR(torch.optim.lr_scheduler._LRScheduler):\n    def __init__(\n        self,\n        optimizer: torch.optim.Optimizer,\n        max_iters: int,\n        warmup_factor: float = 0.001,\n        warmup_iters: int = 1000,\n        warmup_method: str = \"linear\",\n        last_epoch: int = -1,\n    ):\n        self.max_iters = max_iters\n        self.warmup_factor = warmup_factor\n        self.warmup_iters = warmup_iters\n        self.warmup_method = warmup_method\n        super().__init__(optimizer, last_epoch)\n\n    def get_lr(self) -> List[float]:\n        warmup_factor = _get_warmup_factor_at_iter(\n            self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor\n        )\n        # Different definitions of half-cosine with warmup are possible. For\n        # simplicity we multiply the standard half-cosine schedule by the warmup\n        # factor. An alternative is to start the period of the cosine at warmup_iters\n        # instead of at 0. In the case that warmup_iters << max_iters the two are\n        # very close to each other.\n        return [\n            base_lr\n            * warmup_factor\n            * 0.5\n            * (1.0 + math.cos(math.pi * self.last_epoch / self.max_iters))\n            for base_lr in self.base_lrs\n        ]\n\n    def _compute_values(self) -> List[float]:\n        # The new interface\n        return self.get_lr()\n\n\ndef _get_warmup_factor_at_iter(\n    method: str, iter: int, warmup_iters: int, warmup_factor: float\n) -> float:\n    \"\"\"\n    Return the learning rate warmup factor at a specific iteration.\n    See https://arxiv.org/abs/1706.02677 for more details.\n\n    Args:\n        method (str): warmup method; either \"constant\" or \"linear\".\n        iter (int): iteration at which to calculate the warmup factor.\n        warmup_iters (int): the number of warmup iterations.\n        warmup_factor (float): the base warmup factor (the meaning changes according\n            to the method used).\n\n    Returns:\n        float: the effective warmup factor at the given iteration.\n    \"\"\"\n    if iter >= warmup_iters:\n        return 1.0\n\n    if method == \"constant\":\n        return warmup_factor\n    elif method == \"linear\":\n        alpha = iter / warmup_iters\n        return warmup_factor * (1 - alpha) + alpha\n    else:\n        raise ValueError(\"Unknown warmup method: {}\".format(method))\n"
  },
  {
    "path": "detectron2/structures/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom .boxes import Boxes, BoxMode, pairwise_iou\nfrom .image_list import ImageList\nfrom .instances import Instances\nfrom .keypoints import Keypoints, heatmaps_to_keypoints\nfrom .masks import BitMasks, PolygonMasks, rasterize_polygons_within_box\nfrom .rotated_boxes import RotatedBoxes\nfrom .rotated_boxes import pairwise_iou as pairwise_iou_rotated\n\n__all__ = [k for k in globals().keys() if not k.startswith(\"_\")]\n"
  },
  {
    "path": "detectron2/structures/boxes.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport numpy as np\nfrom enum import Enum, unique\nfrom typing import Iterator, List, Tuple, Union\nimport torch\n\nfrom detectron2.layers import cat\n\n_RawBoxType = Union[List[float], Tuple[float, ...], torch.Tensor, np.ndarray]\n\n\n@unique\nclass BoxMode(Enum):\n    \"\"\"\n    Enum of different ways to represent a box.\n\n    Attributes:\n\n        XYXY_ABS: (x0, y0, x1, y1) in absolute floating points coordinates.\n            The coordinates in range [0, width or height].\n        XYWH_ABS: (x0, y0, w, h) in absolute floating points coordinates.\n        XYXY_REL: (x0, y0, x1, y1) in range [0, 1]. They are relative to the size of the image.\n        XYWH_REL: (x0, y0, w, h) in range [0, 1]. They are relative to the size of the image.\n    \"\"\"\n\n    XYXY_ABS = 0\n    XYWH_ABS = 1\n    XYXY_REL = 2\n    XYWH_REL = 3\n\n    @staticmethod\n    def convert(box: _RawBoxType, from_mode: \"BoxMode\", to_mode: \"BoxMode\") -> _RawBoxType:\n        \"\"\"\n        Args:\n            box: can be a 4-tuple, 4-list or a Nx4 array/tensor.\n            from_mode, to_mode (BoxMode)\n\n        Returns:\n            The converted box of the same type.\n        \"\"\"\n        if from_mode == to_mode:\n            return box\n\n        original_type = type(box)\n        single_box = isinstance(box, (list, tuple))\n        if single_box:\n            arr = np.array(box)\n            assert arr.shape == (\n                4,\n            ), \"BoxMode.convert takes either a 4-tuple/list or a Nx4 array/tensor\"\n        else:\n            arr = box\n\n        assert to_mode.value < 2 and from_mode.value < 2, \"Relative mode not yet supported!\"\n\n        original_shape = arr.shape\n        arr = arr.reshape(-1, 4)\n        if to_mode == BoxMode.XYXY_ABS and from_mode == BoxMode.XYWH_ABS:\n            arr[:, 2] += arr[:, 0]\n            arr[:, 3] += arr[:, 1]\n        elif from_mode == BoxMode.XYXY_ABS and to_mode == BoxMode.XYWH_ABS:\n            arr[:, 2] -= arr[:, 0]\n            arr[:, 3] -= arr[:, 1]\n        else:\n            raise RuntimeError(\"Cannot be here!\")\n        if single_box:\n            return original_type(arr.flatten())\n        return arr.reshape(*original_shape)\n\n\nclass Boxes:\n    \"\"\"\n    This structure stores a list of boxes as a Nx4 torch.Tensor.\n    It supports some common methods about boxes\n    (`area`, `clip`, `nonempty`, etc),\n    and also behaves like a Tensor\n    (support indexing, `to(device)`, `.device`, and iteration over all boxes)\n\n    Attributes:\n        tensor: float matrix of Nx4.\n    \"\"\"\n\n    BoxSizeType = Union[List[int], Tuple[int, int]]\n\n    def __init__(self, tensor: torch.Tensor):\n        \"\"\"\n        Args:\n            tensor (Tensor[float]): a Nx4 matrix.  Each row is (x1, y1, x2, y2).\n        \"\"\"\n        device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device(\"cpu\")\n        tensor = torch.as_tensor(tensor, dtype=torch.float32, device=device)\n        if tensor.numel() == 0:\n            tensor = torch.zeros(0, 4, dtype=torch.float32, device=device)\n        assert tensor.dim() == 2 and tensor.size(-1) == 4, tensor.size()\n\n        self.tensor = tensor\n\n    def clone(self) -> \"Boxes\":\n        \"\"\"\n        Clone the Boxes.\n\n        Returns:\n            Boxes\n        \"\"\"\n        return Boxes(self.tensor.clone())\n\n    def to(self, device: str) -> \"Boxes\":\n        return Boxes(self.tensor.to(device))\n\n    def area(self) -> torch.Tensor:\n        \"\"\"\n        Computes the area of all the boxes.\n\n        Returns:\n            torch.Tensor: a vector with areas of each box.\n        \"\"\"\n        box = self.tensor\n        area = (box[:, 2] - box[:, 0]) * (box[:, 3] - box[:, 1])\n        return area\n\n    def clip(self, box_size: BoxSizeType) -> None:\n        \"\"\"\n        Clip (in place) the boxes by limiting x coordinates to the range [0, width]\n        and y coordinates to the range [0, height].\n\n        Args:\n            box_size (height, width): The clipping box's size.\n        \"\"\"\n        assert torch.isfinite(self.tensor).all()\n        h, w = box_size\n        self.tensor[:, 0].clamp_(min=0, max=w)\n        self.tensor[:, 1].clamp_(min=0, max=h)\n        self.tensor[:, 2].clamp_(min=0, max=w)\n        self.tensor[:, 3].clamp_(min=0, max=h)\n\n    def nonempty(self, threshold: int = 0) -> torch.Tensor:\n        \"\"\"\n        Find boxes that are non-empty.\n        A box is considered empty, if either of its side is no larger than threshold.\n\n        Returns:\n            Tensor:\n                a binary vector which represents whether each box is empty\n                (False) or non-empty (True).\n        \"\"\"\n        box = self.tensor\n        widths = box[:, 2] - box[:, 0]\n        heights = box[:, 3] - box[:, 1]\n        keep = (widths > threshold) & (heights > threshold)\n        return keep\n\n    def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> \"Boxes\":\n        \"\"\"\n        Returns:\n            Boxes: Create a new :class:`Boxes` by indexing.\n\n        The following usage are allowed:\n        1. `new_boxes = boxes[3]`: return a `Boxes` which contains only one box.\n        2. `new_boxes = boxes[2:10]`: return a slice of boxes.\n        3. `new_boxes = boxes[vector]`, where vector is a torch.BoolTensor\n           with `length = len(boxes)`. Nonzero elements in the vector will be selected.\n\n        Note that the returned Boxes might share storage with this Boxes,\n        subject to Pytorch's indexing semantics.\n        \"\"\"\n        if isinstance(item, int):\n            return Boxes(self.tensor[item].view(1, -1))\n        b = self.tensor[item]\n        assert b.dim() == 2, \"Indexing on Boxes with {} failed to return a matrix!\".format(item)\n        return Boxes(b)\n\n    def __len__(self) -> int:\n        return self.tensor.shape[0]\n\n    def __repr__(self) -> str:\n        return \"Boxes(\" + str(self.tensor) + \")\"\n\n    def inside_box(self, box_size: BoxSizeType, boundary_threshold: int = 0) -> torch.Tensor:\n        \"\"\"\n        Args:\n            box_size (height, width): Size of the reference box.\n            boundary_threshold (int): Boxes that extend beyond the reference box\n                boundary by more than boundary_threshold are considered \"outside\".\n\n        Returns:\n            a binary vector, indicating whether each box is inside the reference box.\n        \"\"\"\n        height, width = box_size\n        inds_inside = (\n            (self.tensor[..., 0] >= -boundary_threshold)\n            & (self.tensor[..., 1] >= -boundary_threshold)\n            & (self.tensor[..., 2] < width + boundary_threshold)\n            & (self.tensor[..., 3] < height + boundary_threshold)\n        )\n        return inds_inside\n\n    def get_centers(self) -> torch.Tensor:\n        \"\"\"\n        Returns:\n            The box centers in a Nx2 array of (x, y).\n        \"\"\"\n        return (self.tensor[:, :2] + self.tensor[:, 2:]) / 2\n\n    @staticmethod\n    def cat(boxes_list: List[\"Boxes\"]) -> \"Boxes\":\n        \"\"\"\n        Concatenates a list of Boxes into a single Boxes\n\n        Arguments:\n            boxes_list (list[Boxes])\n\n        Returns:\n            Boxes: the concatenated Boxes\n        \"\"\"\n        assert isinstance(boxes_list, (list, tuple))\n        assert len(boxes_list) > 0\n        assert all(isinstance(box, Boxes) for box in boxes_list)\n\n        cat_boxes = type(boxes_list[0])(cat([b.tensor for b in boxes_list], dim=0))\n        return cat_boxes\n\n    @property\n    def device(self) -> str:\n        return self.tensor.device\n\n    def __iter__(self) -> Iterator[torch.Tensor]:\n        \"\"\"\n        Yield a box as a Tensor of shape (4,) at a time.\n        \"\"\"\n        yield from self.tensor\n\n\n# implementation from https://github.com/kuangliu/torchcv/blob/master/torchcv/utils/box.py\n# with slight modifications\ndef pairwise_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor:\n    \"\"\"\n    Given two lists of boxes of size N and M,\n    compute the IoU (intersection over union)\n    between __all__ N x M pairs of boxes.\n    The box order must be (xmin, ymin, xmax, ymax).\n\n    Args:\n        boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively.\n\n    Returns:\n        Tensor: IoU, sized [N,M].\n    \"\"\"\n    area1 = boxes1.area()\n    area2 = boxes2.area()\n\n    boxes1, boxes2 = boxes1.tensor, boxes2.tensor\n\n    lt = torch.max(boxes1[:, None, :2], boxes2[:, :2])  # [N,M,2]\n    rb = torch.min(boxes1[:, None, 2:], boxes2[:, 2:])  # [N,M,2]\n\n    wh = (rb - lt).clamp(min=0)  # [N,M,2]\n    inter = wh[:, :, 0] * wh[:, :, 1]  # [N,M]\n\n    # handle empty boxes\n    iou = torch.where(\n        inter > 0,\n        inter / (area1[:, None] + area2 - inter),\n        torch.zeros(1, dtype=inter.dtype, device=inter.device),\n    )\n    return iou\n\n\ndef matched_boxlist_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor:\n    \"\"\"\n    Compute pairwise intersection over union (IOU) of two sets of matched\n    boxes. The box order must be (xmin, ymin, xmax, ymax).\n    Similar to boxlist_iou, but computes only diagonal elements of the matrix\n    Arguments:\n        boxes1: (Boxes) bounding boxes, sized [N,4].\n        boxes2: (Boxes) bounding boxes, sized [N,4].\n    Returns:\n        (tensor) iou, sized [N].\n    \"\"\"\n    assert len(boxes1) == len(boxes2), (\n        \"boxlists should have the same\"\n        \"number of entries, got {}, {}\".format(len(boxes1), len(boxes2))\n    )\n    area1 = boxes1.area()  # [N]\n    area2 = boxes2.area()  # [N]\n    box1, box2 = boxes1.tensor, boxes2.tensor\n    lt = torch.max(box1[:, :2], box2[:, :2])  # [N,2]\n    rb = torch.min(box1[:, 2:], box2[:, 2:])  # [N,2]\n    wh = (rb - lt).clamp(min=0)  # [N,2]\n    inter = wh[:, 0] * wh[:, 1]  # [N]\n    iou = inter / (area1 + area2 - inter)  # [N]\n    return iou\n"
  },
  {
    "path": "detectron2/structures/image_list.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\nfrom __future__ import division\nfrom typing import Any, List, Sequence, Tuple, Union\nimport torch\nfrom torch.nn import functional as F\n\n\nclass ImageList(object):\n    \"\"\"\n    Structure that holds a list of images (of possibly\n    varying sizes) as a single tensor.\n    This works by padding the images to the same size,\n    and storing in a field the original sizes of each image\n\n    Attributes:\n        image_sizes (list[tuple[int, int]]): each tuple is (h, w)\n    \"\"\"\n\n    def __init__(self, tensor: torch.Tensor, image_sizes: List[Tuple[int, int]]):\n        \"\"\"\n        Arguments:\n            tensor (Tensor): of shape (N, H, W) or (N, C_1, ..., C_K, H, W) where K >= 1\n            image_sizes (list[tuple[int, int]]): Each tuple is (h, w).\n        \"\"\"\n        self.tensor = tensor\n        self.image_sizes = image_sizes\n\n    def __len__(self) -> int:\n        return len(self.image_sizes)\n\n    def __getitem__(self, idx: Union[int, slice]) -> torch.Tensor:\n        \"\"\"\n        Access the individual image in its original size.\n\n        Returns:\n            Tensor: an image of shape (H, W) or (C_1, ..., C_K, H, W) where K >= 1\n        \"\"\"\n        size = self.image_sizes[idx]\n        return self.tensor[idx, ..., : size[0], : size[1]]  # type: ignore\n\n    def to(self, *args: Any, **kwargs: Any) -> \"ImageList\":\n        cast_tensor = self.tensor.to(*args, **kwargs)\n        return ImageList(cast_tensor, self.image_sizes)\n\n    @staticmethod\n    def from_tensors(\n        tensors: Sequence[torch.Tensor], size_divisibility: int = 0, pad_value: float = 0.0\n    ) -> \"ImageList\":\n        \"\"\"\n        Args:\n            tensors: a tuple or list of `torch.Tensors`, each of shape (Hi, Wi) or\n                (C_1, ..., C_K, Hi, Wi) where K >= 1. The Tensors will be padded with `pad_value`\n                so that they will have the same shape.\n            size_divisibility (int): If `size_divisibility > 0`, also adds padding to ensure\n                the common height and width is divisible by `size_divisibility`\n            pad_value (float): value to pad\n\n        Returns:\n            an `ImageList`.\n        \"\"\"\n        assert len(tensors) > 0\n        assert isinstance(tensors, (tuple, list))\n        for t in tensors:\n            assert isinstance(t, torch.Tensor), type(t)\n            assert t.shape[1:-2] == tensors[0].shape[1:-2], t.shape\n        # per dimension maximum (H, W) or (C_1, ..., C_K, H, W) where K >= 1 among all tensors\n        max_size = tuple(max(s) for s in zip(*[img.shape for img in tensors]))\n\n        if size_divisibility > 0:\n            import math\n\n            stride = size_divisibility\n            max_size = list(max_size)  # type: ignore\n            max_size[-2] = int(math.ceil(max_size[-2] / stride) * stride)  # type: ignore\n            max_size[-1] = int(math.ceil(max_size[-1] / stride) * stride)  # type: ignore\n            max_size = tuple(max_size)\n\n        image_sizes = [im.shape[-2:] for im in tensors]\n\n        if len(tensors) == 1:\n            # This seems slightly (2%) faster.\n            # TODO: check whether it's faster for multiple images as well\n            image_size = image_sizes[0]\n            padded = F.pad(\n                tensors[0],\n                [0, max_size[-1] - image_size[1], 0, max_size[-2] - image_size[0]],\n                value=pad_value,\n            )\n            batched_imgs = padded.unsqueeze_(0)\n        else:\n            batch_shape = (len(tensors),) + max_size\n            batched_imgs = tensors[0].new_full(batch_shape, pad_value)\n            for img, pad_img in zip(tensors, batched_imgs):\n                pad_img[..., : img.shape[-2], : img.shape[-1]].copy_(img)\n\n        return ImageList(batched_imgs.contiguous(), image_sizes)\n"
  },
  {
    "path": "detectron2/structures/instances.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport itertools\nfrom typing import Any, Dict, List, Tuple, Union\nimport torch\n\nfrom detectron2.layers import cat\n\n\nclass Instances:\n    \"\"\"\n    This class represents a list of instances in an image.\n    It stores the attributes of instances (e.g., boxes, masks, labels, scores) as \"fields\".\n    All fields must have the same `__len__` which is the number of instances.\n\n    All other (non-field) attributes of this class are considered private:\n    they must start with '_' and are not modifiable by a user.\n\n    Some basic usage:\n\n    1. Set/Get a field:\n       instances.gt_boxes = Boxes(...)\n       print(instances.pred_masks)\n       print('gt_masks' in instances)\n    2. `len(instances)` returns the number of instances\n    3. Indexing: `instances[indices]` will apply the indexing on all the fields\n       and returns a new `Instances`.\n       Typically, `indices` is a binary vector of length num_instances,\n       or a vector of integer indices.\n    \"\"\"\n\n    def __init__(self, image_size: Tuple[int, int], **kwargs: Any):\n        \"\"\"\n        Args:\n            image_size (height, width): the spatial size of the image.\n            kwargs: fields to add to this `Instances`.\n        \"\"\"\n        self._image_size = image_size\n        self._fields: Dict[str, Any] = {}\n        for k, v in kwargs.items():\n            self.set(k, v)\n\n    @property\n    def image_size(self) -> Tuple[int, int]:\n        \"\"\"\n        Returns:\n            tuple: height, width\n        \"\"\"\n        return self._image_size\n\n    def __setattr__(self, name: str, val: Any) -> None:\n        if name.startswith(\"_\"):\n            super().__setattr__(name, val)\n        else:\n            self.set(name, val)\n\n    def __getattr__(self, name: str) -> Any:\n        if name == \"_fields\" or name not in self._fields:\n            raise AttributeError(\"Cannot find field '{}' in the given Instances!\".format(name))\n        return self._fields[name]\n\n    def set(self, name: str, value: Any) -> None:\n        \"\"\"\n        Set the field named `name` to `value`.\n        The length of `value` must be the number of instances,\n        and must agree with other existing fields in this object.\n        \"\"\"\n        data_len = len(value)\n        if len(self._fields):\n            assert (\n                len(self) == data_len\n            ), \"Adding a field of length {} to a Instances of length {}\".format(data_len, len(self))\n        self._fields[name] = value\n\n    def has(self, name: str) -> bool:\n        \"\"\"\n        Returns:\n            bool: whether the field called `name` exists.\n        \"\"\"\n        return name in self._fields\n\n    def remove(self, name: str) -> None:\n        \"\"\"\n        Remove the field called `name`.\n        \"\"\"\n        del self._fields[name]\n\n    def get(self, name: str) -> Any:\n        \"\"\"\n        Returns the field called `name`.\n        \"\"\"\n        return self._fields[name]\n\n    def get_fields(self) -> Dict[str, Any]:\n        \"\"\"\n        Returns:\n            dict: a dict which maps names (str) to data of the fields\n\n        Modifying the returned dict will modify this instance.\n        \"\"\"\n        return self._fields\n\n    # Tensor-like methods\n    def to(self, device: str) -> \"Instances\":\n        \"\"\"\n        Returns:\n            Instances: all fields are called with a `to(device)`, if the field has this method.\n        \"\"\"\n        ret = Instances(self._image_size)\n        for k, v in self._fields.items():\n            if hasattr(v, \"to\"):\n                v = v.to(device)\n            ret.set(k, v)\n        return ret\n\n    def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> \"Instances\":\n        \"\"\"\n        Args:\n            item: an index-like object and will be used to index all the fields.\n\n        Returns:\n            If `item` is a string, return the data in the corresponding field.\n            Otherwise, returns an `Instances` where all fields are indexed by `item`.\n        \"\"\"\n        ret = Instances(self._image_size)\n        for k, v in self._fields.items():\n            ret.set(k, v[item])\n        return ret\n\n    def __len__(self) -> int:\n        for v in self._fields.values():\n            return len(v)\n        raise NotImplementedError(\"Empty Instances does not support __len__!\")\n\n    @staticmethod\n    def cat(instance_lists: List[\"Instances\"]) -> \"Instances\":\n        \"\"\"\n        Args:\n            instance_lists (list[Instances])\n\n        Returns:\n            Instances\n        \"\"\"\n        assert all(isinstance(i, Instances) for i in instance_lists)\n        assert len(instance_lists) > 0\n        if len(instance_lists) == 1:\n            return instance_lists[0]\n\n        image_size = instance_lists[0].image_size\n        for i in instance_lists[1:]:\n            assert i.image_size == image_size\n        ret = Instances(image_size)\n        for k in instance_lists[0]._fields.keys():\n            values = [i.get(k) for i in instance_lists]\n            v0 = values[0]\n            if isinstance(v0, torch.Tensor):\n                values = cat(values, dim=0)\n            elif isinstance(v0, list):\n                values = list(itertools.chain(*values))\n            elif hasattr(type(v0), \"cat\"):\n                values = type(v0).cat(values)\n            else:\n                raise ValueError(\"Unsupported type {} for concatenation\".format(type(v0)))\n            ret.set(k, values)\n        return ret\n\n    def __str__(self) -> str:\n        s = self.__class__.__name__ + \"(\"\n        s += \"num_instances={}, \".format(len(self))\n        s += \"image_height={}, \".format(self._image_size[0])\n        s += \"image_width={}, \".format(self._image_size[1])\n        s += \"fields=[{}])\".format(\", \".join(self._fields.keys()))\n        return s\n\n    def __repr__(self) -> str:\n        s = self.__class__.__name__ + \"(\"\n        s += \"num_instances={}, \".format(len(self))\n        s += \"image_height={}, \".format(self._image_size[0])\n        s += \"image_width={}, \".format(self._image_size[1])\n        s += \"fields=[\"\n        for k, v in self._fields.items():\n            s += \"{} = {}, \".format(k, v)\n        s += \"])\"\n        return s\n"
  },
  {
    "path": "detectron2/structures/keypoints.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport numpy as np\nfrom typing import Any, List, Tuple, Union\nimport torch\n\nfrom detectron2.layers import interpolate\n\n\nclass Keypoints:\n    \"\"\"\n    Stores keypoint annotation data. GT Instances have a `gt_keypoints` property\n    containing the x,y location and visibility flag of each keypoint. This tensor has shape\n    (N, K, 3) where N is the number of instances and K is the number of keypoints per instance.\n\n    The visibility flag follows the COCO format and must be one of three integers:\n    * v=0: not labeled (in which case x=y=0)\n    * v=1: labeled but not visible\n    * v=2: labeled and visible\n    \"\"\"\n\n    def __init__(self, keypoints: Union[torch.Tensor, np.ndarray, List[List[float]]]):\n        \"\"\"\n        Arguments:\n            keypoints: A Tensor, numpy array, or list of the x, y, and visibility of each keypoint.\n                The shape should be (N, K, 3) where N is the number of\n                instances, and K is the number of keypoints per instance.\n        \"\"\"\n        device = keypoints.device if isinstance(keypoints, torch.Tensor) else torch.device(\"cpu\")\n        keypoints = torch.as_tensor(keypoints, dtype=torch.float32, device=device)\n        assert keypoints.dim() == 3 and keypoints.shape[2] == 3, keypoints.shape\n        self.tensor = keypoints\n\n    def __len__(self) -> int:\n        return self.tensor.size(0)\n\n    def to(self, *args: Any, **kwargs: Any) -> \"Keypoints\":\n        return type(self)(self.tensor.to(*args, **kwargs))\n\n    def to_heatmap(self, boxes: torch.Tensor, heatmap_size: int) -> torch.Tensor:\n        \"\"\"\n        Arguments:\n            boxes: Nx4 tensor, the boxes to draw the keypoints to\n\n        Returns:\n            heatmaps:\n                A tensor of shape (N, K) containing an integer spatial label\n                in the range [0, heatmap_size**2 - 1] for each keypoint in the input.\n            valid:\n                A tensor of shape (N, K) containing whether each keypoint is in the roi or not.\n        \"\"\"\n        return _keypoints_to_heatmap(self.tensor, boxes, heatmap_size)\n\n    def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> \"Keypoints\":\n        \"\"\"\n        Create a new `Keypoints` by indexing on this `Keypoints`.\n\n        The following usage are allowed:\n\n        1. `new_kpts = kpts[3]`: return a `Keypoints` which contains only one instance.\n        2. `new_kpts = kpts[2:10]`: return a slice of key points.\n        3. `new_kpts = kpts[vector]`, where vector is a torch.ByteTensor\n           with `length = len(kpts)`. Nonzero elements in the vector will be selected.\n\n        Note that the returned Keypoints might share storage with this Keypoints,\n        subject to Pytorch's indexing semantics.\n        \"\"\"\n        if isinstance(item, int):\n            return Keypoints([self.tensor[item]])\n        return Keypoints(self.tensor[item])\n\n    def __repr__(self) -> str:\n        s = self.__class__.__name__ + \"(\"\n        s += \"num_instances={})\".format(len(self.tensor))\n        return s\n\n\n# TODO make this nicer, this is a direct translation from C2 (but removing the inner loop)\ndef _keypoints_to_heatmap(\n    keypoints: torch.Tensor, rois: torch.Tensor, heatmap_size: int\n) -> Tuple[torch.Tensor, torch.Tensor]:\n    \"\"\"\n    Encode keypoint locations into a target heatmap for use in SoftmaxWithLoss across space.\n\n    Maps keypoints from the half-open interval [x1, x2) on continuous image coordinates to the\n    closed interval [0, heatmap_size - 1] on discrete image coordinates. We use the\n    continuous-discrete conversion from Heckbert 1990 (\"What is the coordinate of a pixel?\"):\n    d = floor(c) and c = d + 0.5, where d is a discrete coordinate and c is a continuous coordinate.\n\n    Arguments:\n        keypoints: tensor of keypoint locations in of shape (N, K, 3).\n        rois: Nx4 tensor of rois in xyxy format\n        heatmap_size: integer side length of square heatmap.\n\n    Returns:\n        heatmaps: A tensor of shape (N, K) containing an integer spatial label\n            in the range [0, heatmap_size**2 - 1] for each keypoint in the input.\n        valid: A tensor of shape (N, K) containing whether each keypoint is in\n            the roi or not.\n    \"\"\"\n\n    if rois.numel() == 0:\n        return rois.new().long(), rois.new().long()\n    offset_x = rois[:, 0]\n    offset_y = rois[:, 1]\n    scale_x = heatmap_size / (rois[:, 2] - rois[:, 0])\n    scale_y = heatmap_size / (rois[:, 3] - rois[:, 1])\n\n    offset_x = offset_x[:, None]\n    offset_y = offset_y[:, None]\n    scale_x = scale_x[:, None]\n    scale_y = scale_y[:, None]\n\n    x = keypoints[..., 0]\n    y = keypoints[..., 1]\n\n    x_boundary_inds = x == rois[:, 2][:, None]\n    y_boundary_inds = y == rois[:, 3][:, None]\n\n    x = (x - offset_x) * scale_x\n    x = x.floor().long()\n    y = (y - offset_y) * scale_y\n    y = y.floor().long()\n\n    x[x_boundary_inds] = heatmap_size - 1\n    y[y_boundary_inds] = heatmap_size - 1\n\n    valid_loc = (x >= 0) & (y >= 0) & (x < heatmap_size) & (y < heatmap_size)\n    vis = keypoints[..., 2] > 0\n    valid = (valid_loc & vis).long()\n\n    lin_ind = y * heatmap_size + x\n    heatmaps = lin_ind * valid\n\n    return heatmaps, valid\n\n\n@torch.no_grad()\ndef heatmaps_to_keypoints(maps: torch.Tensor, rois: torch.Tensor) -> torch.Tensor:\n    \"\"\"\n    Args:\n        maps (Tensor): (#ROIs, #keypoints, POOL_H, POOL_W)\n        rois (Tensor): (#ROIs, 4)\n\n    Extract predicted keypoint locations from heatmaps. Output has shape\n    (#rois, #keypoints, 4) with the last dimension corresponding to (x, y, logit, prob)\n    for each keypoint.\n\n    Converts a discrete image coordinate in an NxN image to a continuous keypoint coordinate. We\n    maintain consistency with keypoints_to_heatmap by using the conversion from Heckbert 1990:\n    c = d + 0.5, where d is a discrete coordinate and c is a continuous coordinate.\n    \"\"\"\n    offset_x = rois[:, 0]\n    offset_y = rois[:, 1]\n\n    widths = (rois[:, 2] - rois[:, 0]).clamp(min=1)\n    heights = (rois[:, 3] - rois[:, 1]).clamp(min=1)\n    widths_ceil = widths.ceil()\n    heights_ceil = heights.ceil()\n\n    num_rois, num_keypoints = maps.shape[:2]\n    xy_preds = maps.new_zeros(rois.shape[0], num_keypoints, 4)\n\n    width_corrections = widths / widths_ceil\n    height_corrections = heights / heights_ceil\n\n    keypoints_idx = torch.arange(num_keypoints, device=maps.device)\n\n    for i in range(num_rois):\n        outsize = (int(heights_ceil[i]), int(widths_ceil[i]))\n        roi_map = interpolate(maps[[i]], size=outsize, mode=\"bicubic\", align_corners=False).squeeze(\n            0\n        )  # #keypoints x H x W\n\n        # softmax over the spatial region\n        max_score, _ = roi_map.view(num_keypoints, -1).max(1)\n        max_score = max_score.view(num_keypoints, 1, 1)\n        tmp_full_resolution = (roi_map - max_score).exp_()\n        tmp_pool_resolution = (maps[i] - max_score).exp_()\n        # Produce scores over the region H x W, but normalize with POOL_H x POOL_W\n        # So that the scores of objects of different absolute sizes will be more comparable\n        roi_map_probs = tmp_full_resolution / tmp_pool_resolution.sum((1, 2), keepdim=True)\n\n        w = roi_map.shape[2]\n        pos = roi_map.view(num_keypoints, -1).argmax(1)\n\n        x_int = pos % w\n        y_int = (pos - x_int) // w\n\n        assert (\n            roi_map_probs[keypoints_idx, y_int, x_int]\n            == roi_map_probs.view(num_keypoints, -1).max(1)[0]\n        ).all()\n\n        x = (x_int.float() + 0.5) * width_corrections[i]\n        y = (y_int.float() + 0.5) * height_corrections[i]\n\n        xy_preds[i, :, 0] = x + offset_x[i]\n        xy_preds[i, :, 1] = y + offset_y[i]\n        xy_preds[i, :, 2] = roi_map[keypoints_idx, y_int, x_int]\n        xy_preds[i, :, 3] = roi_map_probs[keypoints_idx, y_int, x_int]\n\n    return xy_preds\n"
  },
  {
    "path": "detectron2/structures/masks.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nimport copy\nimport itertools\nimport numpy as np\nfrom typing import Any, Iterator, List, Union\nimport pycocotools.mask as mask_util\nimport torch\nfrom torch import device\n\nfrom detectron2.layers.roi_align import ROIAlign\nfrom detectron2.utils.memory import retry_if_cuda_oom\n\nfrom .boxes import Boxes\n\n\ndef polygon_area(x, y):\n    # Using the shoelace formula\n    # https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates\n    return 0.5 * np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1)))\n\n\ndef polygons_to_bitmask(polygons: List[np.ndarray], height: int, width: int) -> np.ndarray:\n    \"\"\"\n    Args:\n        polygons (list[ndarray]): each array has shape (Nx2,)\n        height, width (int)\n\n    Returns:\n        ndarray: a bool mask of shape (height, width)\n    \"\"\"\n    if len(polygons) == 0:\n        # COCOAPI does not support empty polygons\n        return np.zeros((height, width)).astype(np.bool)\n    rles = mask_util.frPyObjects(polygons, height, width)\n    rle = mask_util.merge(rles)\n    return mask_util.decode(rle).astype(np.bool)\n\n\ndef rasterize_polygons_within_box(\n    polygons: List[np.ndarray], box: np.ndarray, mask_size: int\n) -> torch.Tensor:\n    \"\"\"\n    Rasterize the polygons into a mask image and\n    crop the mask content in the given box.\n    The cropped mask is resized to (mask_size, mask_size).\n\n    This function is used when generating training targets for mask head in Mask R-CNN.\n    Given original ground-truth masks for an image, new ground-truth mask\n    training targets in the size of `mask_size x mask_size`\n    must be provided for each predicted box. This function will be called to\n    produce such targets.\n\n    Args:\n        polygons (list[ndarray[float]]): a list of polygons, which represents an instance.\n        box: 4-element numpy array\n        mask_size (int):\n\n    Returns:\n        Tensor: BoolTensor of shape (mask_size, mask_size)\n    \"\"\"\n    # 1. Shift the polygons w.r.t the boxes\n    w, h = box[2] - box[0], box[3] - box[1]\n\n    polygons = copy.deepcopy(polygons)\n    for p in polygons:\n        p[0::2] = p[0::2] - box[0]\n        p[1::2] = p[1::2] - box[1]\n\n    # 2. Rescale the polygons to the new box size\n    # max() to avoid division by small number\n    ratio_h = mask_size / max(h, 0.1)\n    ratio_w = mask_size / max(w, 0.1)\n\n    if ratio_h == ratio_w:\n        for p in polygons:\n            p *= ratio_h\n    else:\n        for p in polygons:\n            p[0::2] *= ratio_w\n            p[1::2] *= ratio_h\n\n    # 3. Rasterize the polygons with coco api\n    mask = polygons_to_bitmask(polygons, mask_size, mask_size)\n    mask = torch.from_numpy(mask)\n    return mask\n\n\nclass BitMasks:\n    \"\"\"\n    This class stores the segmentation masks for all objects in one image, in\n    the form of bitmaps.\n\n    Attributes:\n        tensor: bool Tensor of N,H,W, representing N instances in the image.\n    \"\"\"\n\n    def __init__(self, tensor: Union[torch.Tensor, np.ndarray]):\n        \"\"\"\n        Args:\n            tensor: bool Tensor of N,H,W, representing N instances in the image.\n        \"\"\"\n        if isinstance(tensor, torch.Tensor):\n            tensor = tensor.to(torch.bool)\n        else:\n            tensor = torch.as_tensor(tensor, dtype=torch.bool, device=torch.device(\"cpu\"))\n        assert tensor.dim() == 3, tensor.size()\n        self.image_size = tensor.shape[1:]\n        self.tensor = tensor\n\n    @torch.jit.unused\n    def to(self, *args: Any, **kwargs: Any) -> \"BitMasks\":\n        return BitMasks(self.tensor.to(*args, **kwargs))\n\n    @property\n    def device(self) -> torch.device:\n        return self.tensor.device\n\n    @torch.jit.unused\n    def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> \"BitMasks\":\n        \"\"\"\n        Returns:\n            BitMasks: Create a new :class:`BitMasks` by indexing.\n\n        The following usage are allowed:\n\n        1. `new_masks = masks[3]`: return a `BitMasks` which contains only one mask.\n        2. `new_masks = masks[2:10]`: return a slice of masks.\n        3. `new_masks = masks[vector]`, where vector is a torch.BoolTensor\n           with `length = len(masks)`. Nonzero elements in the vector will be selected.\n\n        Note that the returned object might share storage with this object,\n        subject to Pytorch's indexing semantics.\n        \"\"\"\n        if isinstance(item, int):\n            return BitMasks(self.tensor[item].unsqueeze(0))\n        m = self.tensor[item]\n        assert m.dim() == 3, \"Indexing on BitMasks with {} returns a tensor with shape {}!\".format(\n            item, m.shape\n        )\n        return BitMasks(m)\n\n    @torch.jit.unused\n    def __iter__(self) -> torch.Tensor:\n        yield from self.tensor\n\n    @torch.jit.unused\n    def __repr__(self) -> str:\n        s = self.__class__.__name__ + \"(\"\n        s += \"num_instances={})\".format(len(self.tensor))\n        return s\n\n    def __len__(self) -> int:\n        return self.tensor.shape[0]\n\n    def nonempty(self) -> torch.Tensor:\n        \"\"\"\n        Find masks that are non-empty.\n\n        Returns:\n            Tensor: a BoolTensor which represents\n                whether each mask is empty (False) or non-empty (True).\n        \"\"\"\n        return self.tensor.flatten(1).any(dim=1)\n\n    @staticmethod\n    def from_polygon_masks(\n        polygon_masks: Union[\"PolygonMasks\", List[List[np.ndarray]]], height: int, width: int\n    ) -> \"BitMasks\":\n        \"\"\"\n        Args:\n            polygon_masks (list[list[ndarray]] or PolygonMasks)\n            height, width (int)\n        \"\"\"\n        if isinstance(polygon_masks, PolygonMasks):\n            polygon_masks = polygon_masks.polygons\n        masks = [polygons_to_bitmask(p, height, width) for p in polygon_masks]\n        if len(masks):\n            return BitMasks(torch.stack([torch.from_numpy(x) for x in masks]))\n        else:\n            return BitMasks(torch.empty(0, height, width, dtype=torch.bool))\n\n    @staticmethod\n    def from_roi_masks(roi_masks: \"ROIMasks\", height: int, width: int) -> \"BitMasks\":\n        \"\"\"\n        Args:\n            roi_masks:\n            height, width (int):\n        \"\"\"\n        return roi_masks.to_bitmasks(height, width)\n\n    def crop_and_resize(self, boxes: torch.Tensor, mask_size: int) -> torch.Tensor:\n        \"\"\"\n        Crop each bitmask by the given box, and resize results to (mask_size, mask_size).\n        This can be used to prepare training targets for Mask R-CNN.\n        It has less reconstruction error compared to rasterization with polygons.\n        However we observe no difference in accuracy,\n        but BitMasks requires more memory to store all the masks.\n\n        Args:\n            boxes (Tensor): Nx4 tensor storing the boxes for each mask\n            mask_size (int): the size of the rasterized mask.\n\n        Returns:\n            Tensor:\n                A bool tensor of shape (N, mask_size, mask_size), where\n                N is the number of predicted boxes for this image.\n        \"\"\"\n        assert len(boxes) == len(self), \"{} != {}\".format(len(boxes), len(self))\n        device = self.tensor.device\n\n        batch_inds = torch.arange(len(boxes), device=device).to(dtype=boxes.dtype)[:, None]\n        rois = torch.cat([batch_inds, boxes], dim=1)  # Nx5\n\n        bit_masks = self.tensor.to(dtype=torch.float32)\n        rois = rois.to(device=device)\n        output = (\n            ROIAlign((mask_size, mask_size), 1.0, 0, aligned=True)\n            .forward(bit_masks[:, None, :, :], rois)\n            .squeeze(1)\n        )\n        output = output >= 0.5\n        return output\n\n    def get_bounding_boxes(self) -> Boxes:\n        \"\"\"\n        Returns:\n            Boxes: tight bounding boxes around bitmasks.\n            If a mask is empty, it's bounding box will be all zero.\n        \"\"\"\n        boxes = torch.zeros(self.tensor.shape[0], 4, dtype=torch.float32)\n        x_any = torch.any(self.tensor, dim=1)\n        y_any = torch.any(self.tensor, dim=2)\n        for idx in range(self.tensor.shape[0]):\n            x = torch.where(x_any[idx, :])[0]\n            y = torch.where(y_any[idx, :])[0]\n            if len(x) > 0 and len(y) > 0:\n                boxes[idx, :] = torch.as_tensor(\n                    [x[0], y[0], x[-1] + 1, y[-1] + 1], dtype=torch.float32\n                )\n        return Boxes(boxes)\n\n    @staticmethod\n    def cat(bitmasks_list: List[\"BitMasks\"]) -> \"BitMasks\":\n        \"\"\"\n        Concatenates a list of BitMasks into a single BitMasks\n\n        Arguments:\n            bitmasks_list (list[BitMasks])\n\n        Returns:\n            BitMasks: the concatenated BitMasks\n        \"\"\"\n        assert isinstance(bitmasks_list, (list, tuple))\n        assert len(bitmasks_list) > 0\n        assert all(isinstance(bitmask, BitMasks) for bitmask in bitmasks_list)\n\n        cat_bitmasks = type(bitmasks_list[0])(torch.cat([bm.tensor for bm in bitmasks_list], dim=0))\n        return cat_bitmasks\n\n\nclass PolygonMasks:\n    \"\"\"\n    This class stores the segmentation masks for all objects in one image, in the form of polygons.\n\n    Attributes:\n        polygons: list[list[ndarray]]. Each ndarray is a float64 vector representing a polygon.\n    \"\"\"\n\n    def __init__(self, polygons: List[List[Union[torch.Tensor, np.ndarray]]]):\n        \"\"\"\n        Arguments:\n            polygons (list[list[np.ndarray]]): The first\n                level of the list correspond to individual instances,\n                the second level to all the polygons that compose the\n                instance, and the third level to the polygon coordinates.\n                The third level array should have the format of\n                [x0, y0, x1, y1, ..., xn, yn] (n >= 3).\n        \"\"\"\n        if not isinstance(polygons, list):\n            raise ValueError(\n                \"Cannot create PolygonMasks: Expect a list of list of polygons per image. \"\n                \"Got '{}' instead.\".format(type(polygons))\n            )\n\n        def _make_array(t: Union[torch.Tensor, np.ndarray]) -> np.ndarray:\n            # Use float64 for higher precision, because why not?\n            # Always put polygons on CPU (self.to is a no-op) since they\n            # are supposed to be small tensors.\n            # May need to change this assumption if GPU placement becomes useful\n            if isinstance(t, torch.Tensor):\n                t = t.cpu().numpy()\n            return np.asarray(t).astype(\"float64\")\n\n        def process_polygons(\n            polygons_per_instance: List[Union[torch.Tensor, np.ndarray]]\n        ) -> List[np.ndarray]:\n            if not isinstance(polygons_per_instance, list):\n                raise ValueError(\n                    \"Cannot create polygons: Expect a list of polygons per instance. \"\n                    \"Got '{}' instead.\".format(type(polygons_per_instance))\n                )\n            # transform each polygon to a numpy array\n            polygons_per_instance = [_make_array(p) for p in polygons_per_instance]\n            for polygon in polygons_per_instance:\n                if len(polygon) % 2 != 0 or len(polygon) < 6:\n                    raise ValueError(f\"Cannot create a polygon from {len(polygon)} coordinates.\")\n            return polygons_per_instance\n\n        self.polygons: List[List[np.ndarray]] = [\n            process_polygons(polygons_per_instance) for polygons_per_instance in polygons\n        ]\n\n    def to(self, *args: Any, **kwargs: Any) -> \"PolygonMasks\":\n        return self\n\n    @property\n    def device(self) -> torch.device:\n        return torch.device(\"cpu\")\n\n    def get_bounding_boxes(self) -> Boxes:\n        \"\"\"\n        Returns:\n            Boxes: tight bounding boxes around polygon masks.\n        \"\"\"\n        boxes = torch.zeros(len(self.polygons), 4, dtype=torch.float32)\n        for idx, polygons_per_instance in enumerate(self.polygons):\n            minxy = torch.as_tensor([float(\"inf\"), float(\"inf\")], dtype=torch.float32)\n            maxxy = torch.zeros(2, dtype=torch.float32)\n            for polygon in polygons_per_instance:\n                coords = torch.from_numpy(polygon).view(-1, 2).to(dtype=torch.float32)\n                minxy = torch.min(minxy, torch.min(coords, dim=0).values)\n                maxxy = torch.max(maxxy, torch.max(coords, dim=0).values)\n            boxes[idx, :2] = minxy\n            boxes[idx, 2:] = maxxy\n        return Boxes(boxes)\n\n    def nonempty(self) -> torch.Tensor:\n        \"\"\"\n        Find masks that are non-empty.\n\n        Returns:\n            Tensor:\n                a BoolTensor which represents whether each mask is empty (False) or not (True).\n        \"\"\"\n        keep = [1 if len(polygon) > 0 else 0 for polygon in self.polygons]\n        return torch.from_numpy(np.asarray(keep, dtype=np.bool))\n\n    def __getitem__(self, item: Union[int, slice, List[int], torch.BoolTensor]) -> \"PolygonMasks\":\n        \"\"\"\n        Support indexing over the instances and return a `PolygonMasks` object.\n        `item` can be:\n\n        1. An integer. It will return an object with only one instance.\n        2. A slice. It will return an object with the selected instances.\n        3. A list[int]. It will return an object with the selected instances,\n           correpsonding to the indices in the list.\n        4. A vector mask of type BoolTensor, whose length is num_instances.\n           It will return an object with the instances whose mask is nonzero.\n        \"\"\"\n        if isinstance(item, int):\n            selected_polygons = [self.polygons[item]]\n        elif isinstance(item, slice):\n            selected_polygons = self.polygons[item]\n        elif isinstance(item, list):\n            selected_polygons = [self.polygons[i] for i in item]\n        elif isinstance(item, torch.Tensor):\n            # Polygons is a list, so we have to move the indices back to CPU.\n            if item.dtype == torch.bool:\n                assert item.dim() == 1, item.shape\n                item = item.nonzero().squeeze(1).cpu().numpy().tolist()\n            elif item.dtype in [torch.int32, torch.int64]:\n                item = item.cpu().numpy().tolist()\n            else:\n                raise ValueError(\"Unsupported tensor dtype={} for indexing!\".format(item.dtype))\n            selected_polygons = [self.polygons[i] for i in item]\n        return PolygonMasks(selected_polygons)\n\n    def __iter__(self) -> Iterator[List[np.ndarray]]:\n        \"\"\"\n        Yields:\n            list[ndarray]: the polygons for one instance.\n            Each Tensor is a float64 vector representing a polygon.\n        \"\"\"\n        return iter(self.polygons)\n\n    def __repr__(self) -> str:\n        s = self.__class__.__name__ + \"(\"\n        s += \"num_instances={})\".format(len(self.polygons))\n        return s\n\n    def __len__(self) -> int:\n        return len(self.polygons)\n\n    def crop_and_resize(self, boxes: torch.Tensor, mask_size: int) -> torch.Tensor:\n        \"\"\"\n        Crop each mask by the given box, and resize results to (mask_size, mask_size).\n        This can be used to prepare training targets for Mask R-CNN.\n\n        Args:\n            boxes (Tensor): Nx4 tensor storing the boxes for each mask\n            mask_size (int): the size of the rasterized mask.\n\n        Returns:\n            Tensor: A bool tensor of shape (N, mask_size, mask_size), where\n            N is the number of predicted boxes for this image.\n        \"\"\"\n        assert len(boxes) == len(self), \"{} != {}\".format(len(boxes), len(self))\n\n        device = boxes.device\n        # Put boxes on the CPU, as the polygon representation is not efficient GPU-wise\n        # (several small tensors for representing a single instance mask)\n        boxes = boxes.to(torch.device(\"cpu\"))\n\n        results = [\n            rasterize_polygons_within_box(poly, box.numpy(), mask_size)\n            for poly, box in zip(self.polygons, boxes)\n        ]\n        \"\"\"\n        poly: list[list[float]], the polygons for one instance\n        box: a tensor of shape (4,)\n        \"\"\"\n        if len(results) == 0:\n            return torch.empty(0, mask_size, mask_size, dtype=torch.bool, device=device)\n        return torch.stack(results, dim=0).to(device=device)\n\n    def area(self):\n        \"\"\"\n        Computes area of the mask.\n        Only works with Polygons, using the shoelace formula:\n        https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates\n\n        Returns:\n            Tensor: a vector, area for each instance\n        \"\"\"\n\n        area = []\n        for polygons_per_instance in self.polygons:\n            area_per_instance = 0\n            for p in polygons_per_instance:\n                area_per_instance += polygon_area(p[0::2], p[1::2])\n            area.append(area_per_instance)\n\n        return torch.tensor(area)\n\n    @staticmethod\n    def cat(polymasks_list: List[\"PolygonMasks\"]) -> \"PolygonMasks\":\n        \"\"\"\n        Concatenates a list of PolygonMasks into a single PolygonMasks\n\n        Arguments:\n            polymasks_list (list[PolygonMasks])\n\n        Returns:\n            PolygonMasks: the concatenated PolygonMasks\n        \"\"\"\n        assert isinstance(polymasks_list, (list, tuple))\n        assert len(polymasks_list) > 0\n        assert all(isinstance(polymask, PolygonMasks) for polymask in polymasks_list)\n\n        cat_polymasks = type(polymasks_list[0])(\n            list(itertools.chain.from_iterable(pm.polygons for pm in polymasks_list))\n        )\n        return cat_polymasks\n\n\nclass ROIMasks:\n    \"\"\"\n    Represent masks by N smaller masks defined in some ROIs. Once ROI boxes are given,\n    full-image bitmask can be obtained by \"pasting\" the mask on the region defined\n    by the corresponding ROI box.\n    \"\"\"\n\n    def __init__(self, tensor: torch.Tensor):\n        \"\"\"\n        Args:\n            tensor: (N, M, M) mask tensor that defines the mask within each ROI.\n        \"\"\"\n        if tensor.dim() != 3:\n            raise ValueError(\"ROIMasks must take a masks of 3 dimension.\")\n        self.tensor = tensor\n\n    def to(self, device: torch.device) -> \"ROIMasks\":\n        return ROIMasks(self.tensor.to(device))\n\n    @property\n    def device(self) -> device:\n        return self.tensor.device\n\n    def __len__(self):\n        return self.tensor.shape[0]\n\n    def __getitem__(self, item) -> \"ROIMasks\":\n        \"\"\"\n        Returns:\n            ROIMasks: Create a new :class:`ROIMasks` by indexing.\n\n        The following usage are allowed:\n\n        1. `new_masks = masks[2:10]`: return a slice of masks.\n        2. `new_masks = masks[vector]`, where vector is a torch.BoolTensor\n           with `length = len(masks)`. Nonzero elements in the vector will be selected.\n\n        Note that the returned object might share storage with this object,\n        subject to Pytorch's indexing semantics.\n        \"\"\"\n        t = self.tensor[item]\n        if t.dim() != 3:\n            raise ValueError(\n                f\"Indexing on ROIMasks with {item} returns a tensor with shape {t.shape}!\"\n            )\n        return ROIMasks(t)\n\n    @torch.jit.unused\n    def __repr__(self) -> str:\n        s = self.__class__.__name__ + \"(\"\n        s += \"num_instances={})\".format(len(self.tensor))\n        return s\n\n    @torch.jit.unused\n    def to_bitmasks(self, boxes: torch.Tensor, height, width, threshold=0.5):\n        \"\"\"\n        Args: see documentation of :func:`paste_masks_in_image`.\n        \"\"\"\n        from detectron2.layers.mask_ops import paste_masks_in_image, _paste_masks_tensor_shape\n\n        if torch.jit.is_tracing():\n            if isinstance(height, torch.Tensor):\n                paste_func = _paste_masks_tensor_shape\n            else:\n                paste_func = paste_masks_in_image\n        else:\n            paste_func = retry_if_cuda_oom(paste_masks_in_image)\n        bitmasks = paste_func(self.tensor, boxes.tensor, (height, width), threshold=threshold)\n        return BitMasks(bitmasks)\n"
  },
  {
    "path": "detectron2/structures/rotated_boxes.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom typing import Iterator, List, Union\nimport torch\n\nfrom detectron2.layers import cat\nfrom detectron2.layers.rotated_boxes import pairwise_iou_rotated\n\nfrom .boxes import Boxes\n\n\nclass RotatedBoxes(Boxes):\n    \"\"\"\n    This structure stores a list of rotated boxes as a Nx5 torch.Tensor.\n    It supports some common methods about boxes\n    (`area`, `clip`, `nonempty`, etc),\n    and also behaves like a Tensor\n    (support indexing, `to(device)`, `.device`, and iteration over all boxes)\n    \"\"\"\n\n    def __init__(self, tensor: torch.Tensor):\n        \"\"\"\n        Args:\n            tensor (Tensor[float]): a Nx5 matrix.  Each row is\n                (x_center, y_center, width, height, angle),\n                in which angle is represented in degrees.\n                While there's no strict range restriction for it,\n                the recommended principal range is between (-180, 180] degrees.\n\n        Assume we have a horizontal box B = (x_center, y_center, width, height),\n        where width is along the x-axis and height is along the y-axis.\n        The rotated box B_rot (x_center, y_center, width, height, angle)\n        can be seen as:\n\n        1. When angle == 0:\n           B_rot == B\n        2. When angle > 0:\n           B_rot is obtained by rotating B w.r.t its center by :math:`|angle|` degrees CCW;\n        3. When angle < 0:\n           B_rot is obtained by rotating B w.r.t its center by :math:`|angle|` degrees CW.\n\n        Mathematically, since the right-handed coordinate system for image space\n        is (y, x), where y is top->down and x is left->right, the 4 vertices of the\n        rotated rectangle :math:`(yr_i, xr_i)` (i = 1, 2, 3, 4) can be obtained from\n        the vertices of the horizontal rectangle (y_i, x_i) (i = 1, 2, 3, 4)\n        in the following way (:math:`\\\\theta = angle*\\\\pi/180` is the angle in radians,\n        (y_c, x_c) is the center of the rectangle):\n\n        .. math::\n\n            yr_i = \\\\cos(\\\\theta) (y_i - y_c) - \\\\sin(\\\\theta) (x_i - x_c) + y_c,\n\n            xr_i = \\\\sin(\\\\theta) (y_i - y_c) + \\\\cos(\\\\theta) (x_i - x_c) + x_c,\n\n        which is the standard rigid-body rotation transformation.\n\n        Intuitively, the angle is\n        (1) the rotation angle from y-axis in image space\n        to the height vector (top->down in the box's local coordinate system)\n        of the box in CCW, and\n        (2) the rotation angle from x-axis in image space\n        to the width vector (left->right in the box's local coordinate system)\n        of the box in CCW.\n\n        More intuitively, consider the following horizontal box ABCD represented\n        in (x1, y1, x2, y2): (3, 2, 7, 4),\n        covering the [3, 7] x [2, 4] region of the continuous coordinate system\n        which looks like this:\n\n        .. code:: none\n\n            O--------> x\n            |\n            |  A---B\n            |  |   |\n            |  D---C\n            |\n            v y\n\n        Note that each capital letter represents one 0-dimensional geometric point\n        instead of a 'square pixel' here.\n\n        In the example above, using (x, y) to represent a point we have:\n\n        .. math::\n\n            O = (0, 0), A = (3, 2), B = (7, 2), C = (7, 4), D = (3, 4)\n\n        We name vector AB = vector DC as the width vector in box's local coordinate system, and\n        vector AD = vector BC as the height vector in box's local coordinate system. Initially,\n        when angle = 0 degree, they're aligned with the positive directions of x-axis and y-axis\n        in the image space, respectively.\n\n        For better illustration, we denote the center of the box as E,\n\n        .. code:: none\n\n            O--------> x\n            |\n            |  A---B\n            |  | E |\n            |  D---C\n            |\n            v y\n\n        where the center E = ((3+7)/2, (2+4)/2) = (5, 3).\n\n        Also,\n\n        .. math::\n\n            width = |AB| = |CD| = 7 - 3 = 4,\n            height = |AD| = |BC| = 4 - 2 = 2.\n\n        Therefore, the corresponding representation for the same shape in rotated box in\n        (x_center, y_center, width, height, angle) format is:\n\n        (5, 3, 4, 2, 0),\n\n        Now, let's consider (5, 3, 4, 2, 90), which is rotated by 90 degrees\n        CCW (counter-clockwise) by definition. It looks like this:\n\n        .. code:: none\n\n            O--------> x\n            |   B-C\n            |   | |\n            |   |E|\n            |   | |\n            |   A-D\n            v y\n\n        The center E is still located at the same point (5, 3), while the vertices\n        ABCD are rotated by 90 degrees CCW with regard to E:\n        A = (4, 5), B = (4, 1), C = (6, 1), D = (6, 5)\n\n        Here, 90 degrees can be seen as the CCW angle to rotate from y-axis to\n        vector AD or vector BC (the top->down height vector in box's local coordinate system),\n        or the CCW angle to rotate from x-axis to vector AB or vector DC (the left->right\n        width vector in box's local coordinate system).\n\n        .. math::\n\n            width = |AB| = |CD| = 5 - 1 = 4,\n            height = |AD| = |BC| = 6 - 4 = 2.\n\n        Next, how about (5, 3, 4, 2, -90), which is rotated by 90 degrees CW (clockwise)\n        by definition? It looks like this:\n\n        .. code:: none\n\n            O--------> x\n            |   D-A\n            |   | |\n            |   |E|\n            |   | |\n            |   C-B\n            v y\n\n        The center E is still located at the same point (5, 3), while the vertices\n        ABCD are rotated by 90 degrees CW with regard to E:\n        A = (6, 1), B = (6, 5), C = (4, 5), D = (4, 1)\n\n        .. math::\n\n            width = |AB| = |CD| = 5 - 1 = 4,\n            height = |AD| = |BC| = 6 - 4 = 2.\n\n        This covers exactly the same region as (5, 3, 4, 2, 90) does, and their IoU\n        will be 1. However, these two will generate different RoI Pooling results and\n        should not be treated as an identical box.\n\n        On the other hand, it's easy to see that (X, Y, W, H, A) is identical to\n        (X, Y, W, H, A+360N), for any integer N. For example (5, 3, 4, 2, 270) would be\n        identical to (5, 3, 4, 2, -90), because rotating the shape 270 degrees CCW is\n        equivalent to rotating the same shape 90 degrees CW.\n\n        We could rotate further to get (5, 3, 4, 2, 180), or (5, 3, 4, 2, -180):\n\n        .. code:: none\n\n            O--------> x\n            |\n            |  C---D\n            |  | E |\n            |  B---A\n            |\n            v y\n\n        .. math::\n\n            A = (7, 4), B = (3, 4), C = (3, 2), D = (7, 2),\n\n            width = |AB| = |CD| = 7 - 3 = 4,\n            height = |AD| = |BC| = 4 - 2 = 2.\n\n        Finally, this is a very inaccurate (heavily quantized) illustration of\n        how (5, 3, 4, 2, 60) looks like in case anyone wonders:\n\n        .. code:: none\n\n            O--------> x\n            |     B\\\n            |    /  C\n            |   /E /\n            |  A  /\n            |   `D\n            v y\n\n        It's still a rectangle with center of (5, 3), width of 4 and height of 2,\n        but its angle (and thus orientation) is somewhere between\n        (5, 3, 4, 2, 0) and (5, 3, 4, 2, 90).\n        \"\"\"\n        device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device(\"cpu\")\n        tensor = torch.as_tensor(tensor, dtype=torch.float32, device=device)\n        if tensor.numel() == 0:\n            tensor = torch.zeros(0, 5, dtype=torch.float32, device=device)\n        assert tensor.dim() == 2 and tensor.size(-1) == 5, tensor.size()\n\n        self.tensor = tensor\n\n    def clone(self) -> \"RotatedBoxes\":\n        \"\"\"\n        Clone the RotatedBoxes.\n\n        Returns:\n            RotatedBoxes\n        \"\"\"\n        return RotatedBoxes(self.tensor.clone())\n\n    def to(self, device: str) -> \"RotatedBoxes\":\n        return RotatedBoxes(self.tensor.to(device))\n\n    def area(self) -> torch.Tensor:\n        \"\"\"\n        Computes the area of all the boxes.\n\n        Returns:\n            torch.Tensor: a vector with areas of each box.\n        \"\"\"\n        box = self.tensor\n        area = box[:, 2] * box[:, 3]\n        return area\n\n    def normalize_angles(self) -> None:\n        \"\"\"\n        Restrict angles to the range of (-180, 180] degrees\n        \"\"\"\n        self.tensor[:, 4] = self.tensor[:, 4] % 360\n        self.tensor[:, 4][torch.where(self.tensor[:, 4] > 180)] -= 360\n\n    def clip(self, box_size: Boxes.BoxSizeType, clip_angle_threshold: float = 1.0) -> None:\n        \"\"\"\n        Clip (in place) the boxes by limiting x coordinates to the range [0, width]\n        and y coordinates to the range [0, height].\n\n        For RRPN:\n        Only clip boxes that are almost horizontal with a tolerance of\n        clip_angle_threshold to maintain backward compatibility.\n\n        Rotated boxes beyond this threshold are not clipped for two reasons:\n\n        1. There are potentially multiple ways to clip a rotated box to make it\n           fit within the image.\n        2. It's tricky to make the entire rectangular box fit within the image\n           and still be able to not leave out pixels of interest.\n\n        Therefore we rely on ops like RoIAlignRotated to safely handle this.\n\n        Args:\n            box_size (height, width): The clipping box's size.\n            clip_angle_threshold:\n                Iff. abs(normalized(angle)) <= clip_angle_threshold (in degrees),\n                we do the clipping as horizontal boxes.\n        \"\"\"\n        h, w = box_size\n\n        # normalize angles to be within (-180, 180] degrees\n        self.normalize_angles()\n\n        idx = torch.where(torch.abs(self.tensor[:, 4]) <= clip_angle_threshold)[0]\n\n        # convert to (x1, y1, x2, y2)\n        x1 = self.tensor[idx, 0] - self.tensor[idx, 2] / 2.0\n        y1 = self.tensor[idx, 1] - self.tensor[idx, 3] / 2.0\n        x2 = self.tensor[idx, 0] + self.tensor[idx, 2] / 2.0\n        y2 = self.tensor[idx, 1] + self.tensor[idx, 3] / 2.0\n\n        # clip\n        x1.clamp_(min=0, max=w)\n        y1.clamp_(min=0, max=h)\n        x2.clamp_(min=0, max=w)\n        y2.clamp_(min=0, max=h)\n\n        # convert back to (xc, yc, w, h)\n        self.tensor[idx, 0] = (x1 + x2) / 2.0\n        self.tensor[idx, 1] = (y1 + y2) / 2.0\n        # make sure widths and heights do not increase due to numerical errors\n        self.tensor[idx, 2] = torch.min(self.tensor[idx, 2], x2 - x1)\n        self.tensor[idx, 3] = torch.min(self.tensor[idx, 3], y2 - y1)\n\n    def nonempty(self, threshold: int = 0) -> torch.Tensor:\n        \"\"\"\n        Find boxes that are non-empty.\n        A box is considered empty, if either of its side is no larger than threshold.\n\n        Returns:\n            Tensor: a binary vector which represents\n            whether each box is empty (False) or non-empty (True).\n        \"\"\"\n        box = self.tensor\n        widths = box[:, 2]\n        heights = box[:, 3]\n        keep = (widths > threshold) & (heights > threshold)\n        return keep\n\n    def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> \"RotatedBoxes\":\n        \"\"\"\n        Returns:\n            RotatedBoxes: Create a new :class:`RotatedBoxes` by indexing.\n\n        The following usage are allowed:\n\n        1. `new_boxes = boxes[3]`: return a `RotatedBoxes` which contains only one box.\n        2. `new_boxes = boxes[2:10]`: return a slice of boxes.\n        3. `new_boxes = boxes[vector]`, where vector is a torch.ByteTensor\n           with `length = len(boxes)`. Nonzero elements in the vector will be selected.\n\n        Note that the returned RotatedBoxes might share storage with this RotatedBoxes,\n        subject to Pytorch's indexing semantics.\n        \"\"\"\n        if isinstance(item, int):\n            return RotatedBoxes(self.tensor[item].view(1, -1))\n        b = self.tensor[item]\n        assert b.dim() == 2, \"Indexing on RotatedBoxes with {} failed to return a matrix!\".format(\n            item\n        )\n        return RotatedBoxes(b)\n\n    def __len__(self) -> int:\n        return self.tensor.shape[0]\n\n    def __repr__(self) -> str:\n        return \"RotatedBoxes(\" + str(self.tensor) + \")\"\n\n    def inside_box(self, box_size: Boxes.BoxSizeType, boundary_threshold: int = 0) -> torch.Tensor:\n        \"\"\"\n        Args:\n            box_size (height, width): Size of the reference box covering\n                [0, width] x [0, height]\n            boundary_threshold (int): Boxes that extend beyond the reference box\n                boundary by more than boundary_threshold are considered \"outside\".\n\n        For RRPN, it might not be necessary to call this function since it's common\n        for rotated box to extend to outside of the image boundaries\n        (the clip function only clips the near-horizontal boxes)\n\n        Returns:\n            a binary vector, indicating whether each box is inside the reference box.\n        \"\"\"\n        height, width = box_size\n\n        cnt_x = self.tensor[..., 0]\n        cnt_y = self.tensor[..., 1]\n        half_w = self.tensor[..., 2] / 2.0\n        half_h = self.tensor[..., 3] / 2.0\n        a = self.tensor[..., 4]\n        c = torch.abs(torch.cos(a * torch.pi / 180.0))\n        s = torch.abs(torch.sin(a * torch.pi / 180.0))\n        # This basically computes the horizontal bounding rectangle of the rotated box\n        max_rect_dx = c * half_w + s * half_h\n        max_rect_dy = c * half_h + s * half_w\n\n        inds_inside = (\n            (cnt_x - max_rect_dx >= -boundary_threshold)\n            & (cnt_y - max_rect_dy >= -boundary_threshold)\n            & (cnt_x + max_rect_dx < width + boundary_threshold)\n            & (cnt_y + max_rect_dy < height + boundary_threshold)\n        )\n\n        return inds_inside\n\n    def get_centers(self) -> torch.Tensor:\n        \"\"\"\n        Returns:\n            The box centers in a Nx2 array of (x, y).\n        \"\"\"\n        return self.tensor[:, :2]\n\n    @staticmethod\n    def cat(boxes_list: List[\"RotatedBoxes\"]) -> \"RotatedBoxes\":  # type: ignore\n        \"\"\"\n        Concatenates a list of RotatedBoxes into a single RotatedBoxes\n\n        Arguments:\n            boxes_list (list[RotatedBoxes])\n\n        Returns:\n            RotatedBoxes: the concatenated RotatedBoxes\n        \"\"\"\n        assert isinstance(boxes_list, (list, tuple))\n        assert len(boxes_list) > 0\n        assert all(isinstance(box, RotatedBoxes) for box in boxes_list)\n\n        cat_boxes = type(boxes_list[0])(cat([b.tensor for b in boxes_list], dim=0))\n        return cat_boxes\n\n    @property\n    def device(self) -> str:\n        return self.tensor.device\n\n    def __iter__(self) -> Iterator[torch.Tensor]:\n        \"\"\"\n        Yield a box as a Tensor of shape (5,) at a time.\n        \"\"\"\n        yield from self.tensor\n\n\ndef pairwise_iou(boxes1: RotatedBoxes, boxes2: RotatedBoxes) -> None:\n    \"\"\"\n    Given two lists of rotated boxes of size N and M,\n    compute the IoU (intersection over union)\n    between __all__ N x M pairs of boxes.\n    The box order must be (x_center, y_center, width, height, angle).\n\n    Args:\n        boxes1, boxes2 (RotatedBoxes):\n            two `RotatedBoxes`. Contains N & M rotated boxes, respectively.\n\n    Returns:\n        Tensor: IoU, sized [N,M].\n    \"\"\"\n\n    return pairwise_iou_rotated(boxes1.tensor, boxes2.tensor)\n"
  },
  {
    "path": "detectron2/utils/README.md",
    "content": "# Utility functions\n\nThis folder contain utility functions that are not used in the\ncore library, but are useful for building models or training\ncode using the config system.\n"
  },
  {
    "path": "detectron2/utils/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n"
  },
  {
    "path": "detectron2/utils/collect_env.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport os\nimport sys\nfrom collections import defaultdict\nimport PIL\nimport torch\nfrom tabulate import tabulate\n\n__all__ = [\"collect_env_info\"]\n\n\ndef collect_torch_env():\n    try:\n        import torch.__config__\n\n        return torch.__config__.show()\n    except ImportError:\n        # compatible with older versions of pytorch\n        from torch.utils.collect_env import get_pretty_env_info\n\n        return get_pretty_env_info()\n\n\ndef get_env_module():\n    var_name = \"DETECTRON2_ENV_MODULE\"\n    return var_name, os.environ.get(var_name, \"<not set>\")\n\n\ndef collect_env_info():\n    data = []\n    data.append((\"Python\", sys.version.replace(\"\\n\", \"\")))\n    try:\n        from detectron2 import _C\n    except ImportError:\n        pass\n    else:\n        data.append((\"Detectron2 Compiler\", _C.get_compiler_version()))\n\n    data.append(get_env_module())\n    data.append((\"PyTorch\", torch.__version__))\n    data.append((\"PyTorch Debug Build\", torch.version.debug))\n\n    has_cuda = torch.cuda.is_available()\n    data.append((\"CUDA available\", has_cuda))\n    if has_cuda:\n        devices = defaultdict(list)\n        for k in range(torch.cuda.device_count()):\n            devices[torch.cuda.get_device_name(k)].append(str(k))\n        for name, devids in devices.items():\n            data.append((\"GPU \" + \",\".join(devids), name))\n    data.append((\"Pillow\", PIL.__version__))\n\n    try:\n        import cv2\n\n        data.append((\"cv2\", cv2.__version__))\n    except ImportError:\n        pass\n    env_str = tabulate(data) + \"\\n\"\n    env_str += collect_torch_env()\n    return env_str\n\n\nif __name__ == \"__main__\":\n    print(collect_env_info())\n"
  },
  {
    "path": "detectron2/utils/colormap.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n\n\"\"\"\nAn awesome colormap for really neat visualizations.\nCopied from Detectron, and removed gray colors.\n\"\"\"\n\nimport numpy as np\n\n__all__ = [\"colormap\", \"random_color\"]\n\n# fmt: off\n# RGB:\n_COLORS = np.array(\n    [\n        0.125, 0.212, 0.263,\n        0.816, 0.314, 0.365,\n        0.224, 0.082, 0.443,\n        0.494, 0.184, 0.556,\n        0.466, 0.674, 0.188,\n        0.301, 0.745, 0.933,\n        0.635, 0.078, 0.184,\n        0.300, 0.300, 0.300,\n        0.600, 0.600, 0.600,\n        1.000, 0.000, 0.000,\n        1.000, 0.500, 0.000,\n        0.749, 0.749, 0.000,\n        0.000, 1.000, 0.000,\n        0.000, 0.000, 1.000,\n        0.667, 0.000, 1.000,\n        0.333, 0.333, 0.000,\n        0.333, 0.667, 0.000,\n        0.333, 1.000, 0.000,\n        0.667, 0.333, 0.000,\n        0.667, 0.667, 0.000,\n        0.667, 1.000, 0.000,\n        1.000, 0.333, 0.000,\n        1.000, 0.667, 0.000,\n        1.000, 1.000, 0.000,\n        0.000, 0.333, 0.500,\n        0.000, 0.667, 0.500,\n        0.000, 1.000, 0.500,\n        0.333, 0.000, 0.500,\n        0.333, 0.333, 0.500,\n        0.333, 0.667, 0.500,\n        0.333, 1.000, 0.500,\n        0.667, 0.000, 0.500,\n        0.667, 0.333, 0.500,\n        0.667, 0.667, 0.500,\n        0.667, 1.000, 0.500,\n        1.000, 0.000, 0.500,\n        1.000, 0.333, 0.500,\n        1.000, 0.667, 0.500,\n        1.000, 1.000, 0.500,\n        0.000, 0.333, 1.000,\n        0.000, 0.667, 1.000,\n        0.000, 1.000, 1.000,\n        0.333, 0.000, 1.000,\n        0.333, 0.333, 1.000,\n        0.333, 0.667, 1.000,\n        0.333, 1.000, 1.000,\n        0.667, 0.000, 1.000,\n        0.667, 0.333, 1.000,\n        0.667, 0.667, 1.000,\n        0.667, 1.000, 1.000,\n        1.000, 0.000, 1.000,\n        1.000, 0.333, 1.000,\n        1.000, 0.667, 1.000,\n        0.333, 0.000, 0.000,\n        0.500, 0.000, 0.000,\n        0.667, 0.000, 0.000,\n        0.833, 0.000, 0.000,\n        1.000, 0.000, 0.000,\n        0.000, 0.167, 0.000,\n        0.000, 0.333, 0.000,\n        0.000, 0.500, 0.000,\n        0.000, 0.667, 0.000,\n        0.000, 0.833, 0.000,\n        0.000, 1.000, 0.000,\n        0.000, 0.000, 0.167,\n        0.000, 0.000, 0.333,\n        0.000, 0.000, 0.500,\n        0.000, 0.000, 0.667,\n        0.000, 0.000, 0.833,\n        0.000, 0.000, 1.000,\n        0.000, 0.000, 0.000,\n        0.143, 0.143, 0.143,\n        0.857, 0.857, 0.857,\n        1.000, 1.000, 1.000\n    ]\n).astype(np.float32).reshape(-1, 3)\n# fmt: on\n\n\ndef colormap(rgb=False, maximum=255):\n    \"\"\"\n    Args:\n        rgb (bool): whether to return RGB colors or BGR colors.\n        maximum (int): either 255 or 1\n\n    Returns:\n        ndarray: a float32 array of Nx3 colors, in range [0, 255] or [0, 1]\n    \"\"\"\n    assert maximum in [255, 1], maximum\n    c = _COLORS * maximum\n    if not rgb:\n        c = c[:, ::-1]\n    return c\n\n\ndef random_color(rgb=False, maximum=255):\n    \"\"\"\n    Args:\n        rgb (bool): whether to return RGB colors or BGR colors.\n        maximum (int): either 255 or 1\n\n    Returns:\n        ndarray: a vector of 3 numbers\n    \"\"\"\n    idx = np.random.randint(0, len(_COLORS))\n    ret = _COLORS[idx] * maximum\n    if not rgb:\n        ret = ret[::-1]\n    return ret\n\n\nif __name__ == \"__main__\":\n    import cv2\n\n    size = 100\n    H, W = 10, 10\n    canvas = np.random.rand(H * size, W * size, 3).astype(\"float32\")\n    for h in range(H):\n        for w in range(W):\n            idx = h * W + w\n            if idx >= len(_COLORS):\n                break\n            canvas[h * size : (h + 1) * size, w * size : (w + 1) * size] = _COLORS[idx]\n    cv2.imshow(\"a\", canvas)\n    cv2.waitKey(0)\n"
  },
  {
    "path": "detectron2/utils/comm.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\"\"\"\nThis file contains primitives for multi-gpu communication.\nThis is useful when doing distributed training.\n\"\"\"\n\nimport functools\nimport logging\nimport numpy as np\nimport pickle\nimport torch\nimport torch.distributed as dist\n\n_LOCAL_PROCESS_GROUP = None\n\"\"\"\nA torch process group which only includes processes that on the same machine as the current process.\nThis variable is set when processes are spawned by `launch()` in \"engine/launch.py\".\n\"\"\"\n\n\ndef get_world_size() -> int:\n    if not dist.is_available():\n        return 1\n    if not dist.is_initialized():\n        return 1\n    return dist.get_world_size()\n\n\ndef get_rank() -> int:\n    if not dist.is_available():\n        return 0\n    if not dist.is_initialized():\n        return 0\n    return dist.get_rank()\n\n\ndef get_local_rank() -> int:\n    \"\"\"\n    Returns:\n        The rank of the current process within the local (per-machine) process group.\n    \"\"\"\n    if not dist.is_available():\n        return 0\n    if not dist.is_initialized():\n        return 0\n    assert _LOCAL_PROCESS_GROUP is not None\n    return dist.get_rank(group=_LOCAL_PROCESS_GROUP)\n\n\ndef get_local_size() -> int:\n    \"\"\"\n    Returns:\n        The size of the per-machine process group,\n        i.e. the number of processes per machine.\n    \"\"\"\n    if not dist.is_available():\n        return 1\n    if not dist.is_initialized():\n        return 1\n    return dist.get_world_size(group=_LOCAL_PROCESS_GROUP)\n\n\ndef is_main_process() -> bool:\n    return get_rank() == 0\n\n\ndef synchronize():\n    \"\"\"\n    Helper function to synchronize (barrier) among all processes when\n    using distributed training\n    \"\"\"\n    if not dist.is_available():\n        return\n    if not dist.is_initialized():\n        return\n    world_size = dist.get_world_size()\n    if world_size == 1:\n        return\n    dist.barrier()\n\n\n@functools.lru_cache()\ndef _get_global_gloo_group():\n    \"\"\"\n    Return a process group based on gloo backend, containing all the ranks\n    The result is cached.\n    \"\"\"\n    if dist.get_backend() == \"nccl\":\n        return dist.new_group(backend=\"gloo\")\n    else:\n        return dist.group.WORLD\n\n\ndef _serialize_to_tensor(data, group):\n    backend = dist.get_backend(group)\n    assert backend in [\"gloo\", \"nccl\"]\n    device = torch.device(\"cpu\" if backend == \"gloo\" else \"cuda\")\n\n    buffer = pickle.dumps(data)\n    if len(buffer) > 1024 ** 3:\n        logger = logging.getLogger(__name__)\n        logger.warning(\n            \"Rank {} trying to all-gather {:.2f} GB of data on device {}\".format(\n                get_rank(), len(buffer) / (1024 ** 3), device\n            )\n        )\n    storage = torch.ByteStorage.from_buffer(buffer)\n    tensor = torch.ByteTensor(storage).to(device=device)\n    return tensor\n\n\ndef _pad_to_largest_tensor(tensor, group):\n    \"\"\"\n    Returns:\n        list[int]: size of the tensor, on each rank\n        Tensor: padded tensor that has the max size\n    \"\"\"\n    world_size = dist.get_world_size(group=group)\n    assert (\n        world_size >= 1\n    ), \"comm.gather/all_gather must be called from ranks within the given group!\"\n    local_size = torch.tensor([tensor.numel()], dtype=torch.int64, device=tensor.device)\n    size_list = [\n        torch.zeros([1], dtype=torch.int64, device=tensor.device) for _ in range(world_size)\n    ]\n    dist.all_gather(size_list, local_size, group=group)\n    size_list = [int(size.item()) for size in size_list]\n\n    max_size = max(size_list)\n\n    # we pad the tensor because torch all_gather does not support\n    # gathering tensors of different shapes\n    if local_size != max_size:\n        padding = torch.zeros((max_size - local_size,), dtype=torch.uint8, device=tensor.device)\n        tensor = torch.cat((tensor, padding), dim=0)\n    return size_list, tensor\n\n\ndef all_gather(data, group=None):\n    \"\"\"\n    Run all_gather on arbitrary picklable data (not necessarily tensors).\n\n    Args:\n        data: any picklable object\n        group: a torch process group. By default, will use a group which\n            contains all ranks on gloo backend.\n\n    Returns:\n        list[data]: list of data gathered from each rank\n    \"\"\"\n    if get_world_size() == 1:\n        return [data]\n    if group is None:\n        group = _get_global_gloo_group()\n    if dist.get_world_size(group) == 1:\n        return [data]\n\n    tensor = _serialize_to_tensor(data, group)\n\n    size_list, tensor = _pad_to_largest_tensor(tensor, group)\n    max_size = max(size_list)\n\n    # receiving Tensor from all ranks\n    tensor_list = [\n        torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list\n    ]\n    dist.all_gather(tensor_list, tensor, group=group)\n\n    data_list = []\n    for size, tensor in zip(size_list, tensor_list):\n        buffer = tensor.cpu().numpy().tobytes()[:size]\n        data_list.append(pickle.loads(buffer))\n\n    return data_list\n\n\ndef gather(data, dst=0, group=None):\n    \"\"\"\n    Run gather on arbitrary picklable data (not necessarily tensors).\n\n    Args:\n        data: any picklable object\n        dst (int): destination rank\n        group: a torch process group. By default, will use a group which\n            contains all ranks on gloo backend.\n\n    Returns:\n        list[data]: on dst, a list of data gathered from each rank. Otherwise,\n            an empty list.\n    \"\"\"\n    if get_world_size() == 1:\n        return [data]\n    if group is None:\n        group = _get_global_gloo_group()\n    if dist.get_world_size(group=group) == 1:\n        return [data]\n    rank = dist.get_rank(group=group)\n\n    tensor = _serialize_to_tensor(data, group)\n    size_list, tensor = _pad_to_largest_tensor(tensor, group)\n\n    # receiving Tensor from all ranks\n    if rank == dst:\n        max_size = max(size_list)\n        tensor_list = [\n            torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) for _ in size_list\n        ]\n        dist.gather(tensor, tensor_list, dst=dst, group=group)\n\n        data_list = []\n        for size, tensor in zip(size_list, tensor_list):\n            buffer = tensor.cpu().numpy().tobytes()[:size]\n            data_list.append(pickle.loads(buffer))\n        return data_list\n    else:\n        dist.gather(tensor, [], dst=dst, group=group)\n        return []\n\n\ndef shared_random_seed():\n    \"\"\"\n    Returns:\n        int: a random number that is the same across all workers.\n            If workers need a shared RNG, they can use this shared seed to\n            create one.\n\n    All workers must call this function, otherwise it will deadlock.\n    \"\"\"\n    ints = np.random.randint(2 ** 31)\n    all_ints = all_gather(ints)\n    return all_ints[0]\n\n\ndef reduce_dict(input_dict, average=True):\n    \"\"\"\n    Reduce the values in the dictionary from all processes so that process with rank\n    0 has the reduced results.\n\n    Args:\n        input_dict (dict): inputs to be reduced. All the values must be scalar CUDA Tensor.\n        average (bool): whether to do average or sum\n\n    Returns:\n        a dict with the same keys as input_dict, after reduction.\n    \"\"\"\n    world_size = get_world_size()\n    if world_size < 2:\n        return input_dict\n    with torch.no_grad():\n        names = []\n        values = []\n        # sort the keys so that they are consistent across processes\n        for k in sorted(input_dict.keys()):\n            names.append(k)\n            values.append(input_dict[k])\n        values = torch.stack(values, dim=0)\n        dist.reduce(values, dst=0)\n        if dist.get_rank() == 0 and average:\n            # only main process gets accumulated, so only divide by\n            # world_size in this case\n            values /= world_size\n        reduced_dict = {k: v for k, v in zip(names, values)}\n    return reduced_dict\n"
  },
  {
    "path": "detectron2/utils/develop.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\n\"\"\" Utilities for developers only.\nThese are not visible to users (not automatically imported). And should not\nappeared in docs.\"\"\"\n# adapted from https://github.com/tensorpack/tensorpack/blob/master/tensorpack/utils/develop.py\n\n\ndef create_dummy_class(klass, dependency, message=\"\"):\n    \"\"\"\n    When a dependency of a class is not available, create a dummy class which throws ImportError\n    when used.\n\n    Args:\n        klass (str): name of the class.\n        dependency (str): name of the dependency.\n        message: extra message to print\n    Returns:\n        class: a class object\n    \"\"\"\n    err = \"Cannot import '{}', therefore '{}' is not available.\".format(dependency, klass)\n    if message:\n        err = err + \" \" + message\n\n    class _DummyMetaClass(type):\n        # throw error on class attribute access\n        def __getattr__(_, __):  # noqa: B902\n            raise ImportError(err)\n\n    class _Dummy(object, metaclass=_DummyMetaClass):\n        # throw error on constructor\n        def __init__(self, *args, **kwargs):\n            raise ImportError(err)\n\n    return _Dummy\n\n\ndef create_dummy_func(func, dependency, message=\"\"):\n    \"\"\"\n    When a dependency of a function is not available, create a dummy function which throws\n    ImportError when used.\n\n    Args:\n        func (str): name of the function.\n        dependency (str or list[str]): name(s) of the dependency.\n        message: extra message to print\n    Returns:\n        function: a function object\n    \"\"\"\n    err = \"Cannot import '{}', therefore '{}' is not available.\".format(dependency, func)\n    if message:\n        err = err + \" \" + message\n\n    if isinstance(dependency, (list, tuple)):\n        dependency = \",\".join(dependency)\n\n    def _dummy(*args, **kwargs):\n        raise ImportError(err)\n\n    return _dummy\n"
  },
  {
    "path": "detectron2/utils/env.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates.\nimport importlib\nimport importlib.util\nimport logging\nimport numpy as np\nimport os\nimport random\nimport sys\nfrom datetime import datetime\nimport torch\n\n__all__ = [\"seed_all_rng\"]\n\n\nTORCH_VERSION = tuple(int(x) for x in torch.__version__.split(\".\")[:2])\n\"\"\"\nPyTorch version as a tuple of 2 ints. Useful for comparison.\n\"\"\"\n\n\nDOC_BUILDING = os.getenv(\"_DOC_BUILDING\", False)  # set in docs/conf.py\n\"\"\"\nWhether we're building documentation.\n\"\"\"\n\n\ndef seed_all_rng(seed=None):\n    \"\"\"\n    Set the random seed for the RNG in torch, numpy and python.\n\n    Args:\n        seed (int): if None, will use a strong random seed.\n    \"\"\"\n    if seed is None:\n        seed = (\n            os.getpid()\n            + int(datetime.now().strftime(\"%S%f\"))\n            + int.from_bytes(os.urandom(2), \"big\")\n        )\n        logger = logging.getLogger(__name__)\n        logger.info(\"Using a generated random seed {}\".format(seed))\n    np.random.seed(seed)\n    torch.manual_seed(seed)\n    random.seed(seed)\n    os.environ[\"PYTHONHASHSEED\"] = str(seed)\n\n\n# from https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path\ndef _import_file(module_name, file_path, make_importable=False):\n    spec = importlib.util.spec_from_file_location(module_name, file_path)\n    module = importlib.util.module_from_spec(spec)\n    spec.loader.exec_module(module)\n    if make_importable:\n        sys.modules[module_name] = module\n    return module\n\n\ndef _configure_libraries():\n    \"\"\"\n    Configurations for some libraries.\n    \"\"\"\n    # An environment option to disable `import cv2` globally,\n    # in case it leads to negative performance impact\n    disable_cv2 = int(os.environ.get(\"DETECTRON2_DISABLE_CV2\", False))\n    if disable_cv2:\n        sys.modules[\"cv2\"] = None\n    else:\n        # Disable opencl in opencv since its interaction with cuda often has negative effects\n        # This envvar is supported after OpenCV 3.4.0\n        os.environ[\"OPENCV_OPENCL_RUNTIME\"] = \"disabled\"\n        try:\n            import cv2\n\n            if int(cv2.__version__.split(\".\")[0]) >= 3:\n                cv2.ocl.setUseOpenCL(False)\n        except ModuleNotFoundError:\n            # Other types of ImportError, if happened, should not be ignored.\n            # Because a failed opencv import could mess up address space\n            # https://github.com/skvark/opencv-python/issues/381\n            pass\n\n    def get_version(module, digit=2):\n        return tuple(map(int, module.__version__.split(\".\")[:digit]))\n\n    # fmt: off\n    assert get_version(torch) >= (1, 4), \"Requires torch>=1.4\"\n    import fvcore\n    assert get_version(fvcore, 3) >= (0, 1, 2), \"Requires fvcore>=0.1.2\"\n    import yaml\n    assert get_version(yaml) >= (5, 1), \"Requires pyyaml>=5.1\"\n    # fmt: on\n\n\n_ENV_SETUP_DONE = False\n\n\ndef setup_environment():\n    \"\"\"Perform environment setup work. The default setup is a no-op, but this\n    function allows the user to specify a Python source file or a module in\n    the $DETECTRON2_ENV_MODULE environment variable, that performs\n    custom setup work that may be necessary to their computing environment.\n    \"\"\"\n    global _ENV_SETUP_DONE\n    if _ENV_SETUP_DONE:\n        return\n    _ENV_SETUP_DONE = True\n\n    _configure_libraries()\n\n    custom_module_path = os.environ.get(\"DETECTRON2_ENV_MODULE\")\n\n    if custom_module_path:\n        setup_custom_environment(custom_module_path)\n    else:\n        # The default setup is a no-op\n        pass\n\n\ndef setup_custom_environment(custom_module):\n    \"\"\"\n    Load custom environment setup by importing a Python source file or a\n    module, and run the setup function.\n    \"\"\"\n    if custom_module.endswith(\".py\"):\n        module = _import_file(\"detectron2.utils.env.custom_module\", custom_module)\n    else:\n        module = importlib.import_module(custom_module)\n    assert hasattr(module, \"setup_environment\") and callable(module.setup_environment), (\n        \"Custom environment module defined in {} does not have the \"\n        \"required callable attribute 'setup_environment'.\"\n    ).format(custom_module)\n    module.setup_environment()\n\n\ndef fixup_module_metadata(module_name, namespace, keys=None):\n    \"\"\"\n    Fix the __qualname__ of module members to be their exported api name, so\n    when they are referenced in docs, sphinx can find them. Reference:\n    https://github.com/python-trio/trio/blob/6754c74eacfad9cc5c92d5c24727a2f3b620624e/trio/_util.py#L216-L241\n    \"\"\"\n    if not DOC_BUILDING:\n        return\n    seen_ids = set()\n\n    def fix_one(qualname, name, obj):\n        # avoid infinite recursion (relevant when using\n        # typing.Generic, for example)\n        if id(obj) in seen_ids:\n            return\n        seen_ids.add(id(obj))\n\n        mod = getattr(obj, \"__module__\", None)\n        if mod is not None and (mod.startswith(module_name) or mod.startswith(\"fvcore.\")):\n            obj.__module__ = module_name\n            # Modules, unlike everything else in Python, put fully-qualitied\n            # names into their __name__ attribute. We check for \".\" to avoid\n            # rewriting these.\n            if hasattr(obj, \"__name__\") and \".\" not in obj.__name__:\n                obj.__name__ = name\n                obj.__qualname__ = qualname\n            if isinstance(obj, type):\n                for attr_name, attr_value in obj.__dict__.items():\n                    fix_one(objname + \".\" + attr_name, attr_name, attr_value)\n\n    if keys is None:\n        keys = namespace.keys()\n    for objname in keys:\n        if not objname.startswith(\"_\"):\n            obj = namespace[objname]\n            fix_one(objname, objname, obj)\n"
  },
  {
    "path": "detectron2/utils/events.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport datetime\nimport json\nimport logging\nimport os\nfrom collections import defaultdict\nfrom contextlib import contextmanager\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom fvcore.common.history_buffer import HistoryBuffer\n\n_CURRENT_STORAGE_STACK = []\n\n\ndef get_event_storage():\n    assert len(\n        _CURRENT_STORAGE_STACK\n    ), \"get_event_storage() has to be called inside a 'with EventStorage(...)' context!\"\n    return _CURRENT_STORAGE_STACK[-1]\n\n\nclass JSONWriter:\n    \"\"\"\n    Write scalars to a json file.\n\n    It saves scalars as one json per line (instead of a big json) for easy parsing.\n\n    Examples parsing such a json file:\n\n    .. code-block:: none\n\n        $ cat metrics.json | jq -s '.[0:2]'\n        [\n          {\n            \"data_time\": 0.008433341979980469,\n            \"iteration\": 20,\n            \"loss\": 1.9228371381759644,\n            \"loss_box_reg\": 0.050025828182697296,\n            \"loss_classifier\": 0.5316952466964722,\n            \"loss_mask\": 0.7236229181289673,\n            \"loss_rpn_box\": 0.0856662318110466,\n            \"loss_rpn_cls\": 0.48198649287223816,\n            \"lr\": 0.007173333333333333,\n            \"time\": 0.25401854515075684\n          },\n          {\n            \"data_time\": 0.007216215133666992,\n            \"iteration\": 40,\n            \"loss\": 1.282649278640747,\n            \"loss_box_reg\": 0.06222952902317047,\n            \"loss_classifier\": 0.30682939291000366,\n            \"loss_mask\": 0.6970193982124329,\n            \"loss_rpn_box\": 0.038663312792778015,\n            \"loss_rpn_cls\": 0.1471673548221588,\n            \"lr\": 0.007706666666666667,\n            \"time\": 0.2490077018737793\n          }\n        ]\n\n        $ cat metrics.json | jq '.loss_mask'\n        0.7126231789588928\n        0.689423680305481\n        0.6776131987571716\n        ...\n\n    \"\"\"\n\n    def __init__(self, json_file, window_size=20):\n        \"\"\"\n        Args:\n            json_file (str): path to the json file. New data will be appended if the file exists.\n            window_size (int): the window size of median smoothing for the scalars whose\n                `smoothing_hint` are True.\n        \"\"\"\n        self._file_handle = PathManager.open(json_file, \"a\")\n        self._window_size = window_size\n\n    def write(self):\n        storage = get_event_storage()\n        to_save = {\"iteration\": storage.iter}\n        to_save.update(storage.latest_with_smoothing_hint(self._window_size))\n        self._file_handle.write(json.dumps(to_save, sort_keys=True) + \"\\n\")\n        self._file_handle.flush()\n        try:\n            os.fsync(self._file_handle.fileno())\n        except AttributeError:\n            pass\n\n    def __del__(self):\n        # not guaranteed to be called at exit, but probably fine\n        self._file_handle.close()\n\n\nclass TensorboardXWriter:\n    \"\"\"\n    Write all scalars to a tensorboard file.\n    \"\"\"\n\n    def __init__(self, log_dir: str, window_size: int = 20, **kwargs):\n        \"\"\"\n        Args:\n            log_dir (str): The directory to save the output events\n            window_size (int): the scalars will be median-smoothed by this window size\n            kwargs: other arguments passed to `torch.utils.tensorboard.SummaryWriter(...)`\n        \"\"\"\n        self._window_size = window_size\n        from torch.utils.tensorboard import SummaryWriter\n\n        self._writer = SummaryWriter(log_dir, **kwargs)\n\n    def write(self):\n        storage = get_event_storage()\n        for k, v in storage.latest_with_smoothing_hint(self._window_size).items():\n            self._writer.add_scalar(k, v, storage.iter)\n\n    def __del__(self):\n        if hasattr(self, \"_writer\"):  # doesn't exist when the code fails at import\n            self._writer.close()\n\n\nclass CommonMetricPrinter:\n    \"\"\"\n    Print __common__ metrics to the terminal, including\n    iteration time, ETA, memory, all losses, and the learning rate.\n\n    To print something different, please implement a similar printer by yourself.\n    \"\"\"\n\n    def __init__(self, max_iter):\n        \"\"\"\n        Args:\n            max_iter (int): the maximum number of iterations to train.\n                Used to compute ETA.\n        \"\"\"\n        self.logger = logging.getLogger(__name__)\n        self._max_iter = max_iter\n\n    def write(self):\n        storage = get_event_storage()\n        iteration = storage.iter\n\n        data_time, time = None, None\n        eta_string = \"N/A\"\n        try:\n            data_time = storage.history(\"data_time\").avg(20)\n            time = storage.history(\"time\").global_avg()\n            eta_seconds = storage.history(\"time\").median(1000) * (self._max_iter - iteration)\n            eta_string = str(datetime.timedelta(seconds=int(eta_seconds)))\n        except KeyError:  # they may not exist in the first few iterations (due to warmup)\n            pass\n\n        try:\n            lr = \"{:.6f}\".format(storage.history(\"lr\").latest())\n        except KeyError:\n            lr = \"N/A\"\n\n        if torch.cuda.is_available():\n            max_mem_mb = torch.cuda.max_memory_allocated() / 1024.0 / 1024.0\n        else:\n            max_mem_mb = None\n\n        # NOTE: max_mem is parsed by grep in \"dev/parse_results.sh\"\n        self.logger.info(\n            \"\"\"\\\neta: {eta}  iter: {iter}  {losses}  \\\n{time}  {data_time}  \\\nlr: {lr}  {memory}\\\n\"\"\".format(\n                eta=eta_string,\n                iter=iteration,\n                losses=\"  \".join(\n                    [\n                        \"{}: {:.3f}\".format(k, v.median(20))\n                        for k, v in storage.histories().items()\n                        if \"loss\" in k\n                    ]\n                ),\n                time=\"time: {:.4f}\".format(time) if time is not None else \"\",\n                data_time=\"data_time: {:.4f}\".format(data_time) if data_time is not None else \"\",\n                lr=lr,\n                memory=\"max_mem: {:.0f}M\".format(max_mem_mb) if max_mem_mb is not None else \"\",\n            )\n        )\n\n\nclass EventStorage:\n    \"\"\"\n    The user-facing class that provides metric storage functionalities.\n\n    In the future we may add support for storing / logging other types of data if needed.\n    \"\"\"\n\n    def __init__(self, start_iter=0):\n        \"\"\"\n        Args:\n            start_iter (int): the iteration number to start with\n        \"\"\"\n        self._history = defaultdict(HistoryBuffer)\n        self._smoothing_hints = {}\n        self._latest_scalars = {}\n        self._iter = start_iter\n        self._current_prefix = \"\"\n\n    def put_scalar(self, name, value, smoothing_hint=True):\n        \"\"\"\n        Add a scalar `value` to the `HistoryBuffer` associated with `name`.\n\n        Args:\n            smoothing_hint (bool): a 'hint' on whether this scalar is noisy and should be\n                smoothed when logged. The hint will be accessible through\n                :meth:`EventStorage.smoothing_hints`.  A writer may ignore the hint\n                and apply custom smoothing rule.\n\n                It defaults to True because most scalars we save need to be smoothed to\n                provide any useful signal.\n        \"\"\"\n        name = self._current_prefix + name\n        history = self._history[name]\n        value = float(value)\n        history.update(value, self._iter)\n        self._latest_scalars[name] = value\n\n        existing_hint = self._smoothing_hints.get(name)\n        if existing_hint is not None:\n            assert (\n                existing_hint == smoothing_hint\n            ), \"Scalar {} was put with a different smoothing_hint!\".format(name)\n        else:\n            self._smoothing_hints[name] = smoothing_hint\n\n    def put_scalars(self, *, smoothing_hint=True, **kwargs):\n        \"\"\"\n        Put multiple scalars from keyword arguments.\n\n        Examples:\n\n            storage.put_scalars(loss=my_loss, accuracy=my_accuracy, smoothing_hint=True)\n        \"\"\"\n        for k, v in kwargs.items():\n            self.put_scalar(k, v, smoothing_hint=smoothing_hint)\n\n    def history(self, name):\n        \"\"\"\n        Returns:\n            HistoryBuffer: the scalar history for name\n        \"\"\"\n        ret = self._history.get(name, None)\n        if ret is None:\n            raise KeyError(\"No history metric available for {}!\".format(name))\n        return ret\n\n    def histories(self):\n        \"\"\"\n        Returns:\n            dict[name -> HistoryBuffer]: the HistoryBuffer for all scalars\n        \"\"\"\n        return self._history\n\n    def latest(self):\n        \"\"\"\n        Returns:\n            dict[name -> number]: the scalars that's added in the current iteration.\n        \"\"\"\n        return self._latest_scalars\n\n    def latest_with_smoothing_hint(self, window_size=20):\n        \"\"\"\n        Similar to :meth:`latest`, but the returned values\n        are either the un-smoothed original latest value,\n        or a median of the given window_size,\n        depend on whether the smoothing_hint is True.\n\n        This provides a default behavior that other writers can use.\n        \"\"\"\n        result = {}\n        for k, v in self._latest_scalars.items():\n            result[k] = self._history[k].median(window_size) if self._smoothing_hints[k] else v\n        return result\n\n    def smoothing_hints(self):\n        \"\"\"\n        Returns:\n            dict[name -> bool]: the user-provided hint on whether the scalar\n                is noisy and needs smoothing.\n        \"\"\"\n        return self._smoothing_hints\n\n    def step(self):\n        \"\"\"\n        User should call this function at the beginning of each iteration, to\n        notify the storage of the start of a new iteration.\n        The storage will then be able to associate the new data with the\n        correct iteration number.\n        \"\"\"\n        self._iter += 1\n        self._latest_scalars = {}\n\n    @property\n    def iter(self):\n        return self._iter\n\n    @property\n    def iteration(self):\n        # for backward compatibility\n        return self._iter\n\n    def __enter__(self):\n        _CURRENT_STORAGE_STACK.append(self)\n        return self\n\n    def __exit__(self, exc_type, exc_val, exc_tb):\n        assert _CURRENT_STORAGE_STACK[-1] == self\n        _CURRENT_STORAGE_STACK.pop()\n\n    @contextmanager\n    def name_scope(self, name):\n        \"\"\"\n        Yields:\n            A context within which all the events added to this storage\n            will be prefixed by the name scope.\n        \"\"\"\n        old_prefix = self._current_prefix\n        self._current_prefix = name.rstrip(\"/\") + \"/\"\n        yield\n        self._current_prefix = old_prefix\n"
  },
  {
    "path": "detectron2/utils/logger.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport functools\nimport logging\nimport os\nimport sys\nfrom collections import Counter\nfrom fvcore.common.file_io import PathManager\nfrom tabulate import tabulate\nfrom termcolor import colored\n\n\nclass _ColorfulFormatter(logging.Formatter):\n    def __init__(self, *args, **kwargs):\n        self._root_name = kwargs.pop(\"root_name\") + \".\"\n        self._abbrev_name = kwargs.pop(\"abbrev_name\", \"\")\n        if len(self._abbrev_name):\n            self._abbrev_name = self._abbrev_name + \".\"\n        super(_ColorfulFormatter, self).__init__(*args, **kwargs)\n\n    def formatMessage(self, record):\n        record.name = record.name.replace(self._root_name, self._abbrev_name)\n        log = super(_ColorfulFormatter, self).formatMessage(record)\n        if record.levelno == logging.WARNING:\n            prefix = colored(\"WARNING\", \"red\", attrs=[\"blink\"])\n        elif record.levelno == logging.ERROR or record.levelno == logging.CRITICAL:\n            prefix = colored(\"ERROR\", \"red\", attrs=[\"blink\", \"underline\"])\n        else:\n            return log\n        return prefix + \" \" + log\n\n\n@functools.lru_cache()  # so that calling setup_logger multiple times won't add many handlers\ndef setup_logger(\n    output=None, distributed_rank=0, *, color=True, name=\"detectron2\", abbrev_name=None\n):\n    \"\"\"\n    Args:\n        output (str): a file name or a directory to save log. If None, will not save log file.\n            If ends with \".txt\" or \".log\", assumed to be a file name.\n            Otherwise, logs will be saved to `output/log.txt`.\n        name (str): the root module name of this logger\n        abbrev_name (str): an abbreviation of the module, to avoid long names in logs.\n            Set to \"\" to not log the root module in logs.\n            By default, will abbreviate \"detectron2\" to \"d2\" and leave other\n            modules unchanged.\n    \"\"\"\n    logger = logging.getLogger(name)\n    logger.setLevel(logging.DEBUG)\n    logger.propagate = False\n\n    if abbrev_name is None:\n        abbrev_name = \"d2\" if name == \"detectron2\" else name\n\n    plain_formatter = logging.Formatter(\n        \"[%(asctime)s] %(name)s %(levelname)s: %(message)s\", datefmt=\"%m/%d %H:%M:%S\"\n    )\n    # stdout logging: master only\n    if distributed_rank == 0:\n        ch = logging.StreamHandler(stream=sys.stdout)\n        ch.setLevel(logging.DEBUG)\n        if color:\n            formatter = _ColorfulFormatter(\n                colored(\"[%(asctime)s %(name)s]: \", \"green\") + \"%(message)s\",\n                datefmt=\"%m/%d %H:%M:%S\",\n                root_name=name,\n                abbrev_name=str(abbrev_name),\n            )\n        else:\n            formatter = plain_formatter\n        ch.setFormatter(formatter)\n        logger.addHandler(ch)\n\n    # file logging: all workers\n    if output is not None:\n        if output.endswith(\".txt\") or output.endswith(\".log\"):\n            filename = output\n        else:\n            filename = os.path.join(output, \"log.txt\")\n        if distributed_rank > 0:\n            filename = filename + \".rank{}\".format(distributed_rank)\n        PathManager.mkdirs(os.path.dirname(filename))\n\n        fh = logging.StreamHandler(_cached_log_stream(filename))\n        fh.setLevel(logging.DEBUG)\n        fh.setFormatter(plain_formatter)\n        logger.addHandler(fh)\n\n    return logger\n\n\n# cache the opened file object, so that different calls to `setup_logger`\n# with the same file name can safely write to the same file.\n@functools.lru_cache(maxsize=None)\ndef _cached_log_stream(filename):\n    return PathManager.open(filename, \"a\")\n\n\n\"\"\"\nBelow are some other convenient logging methods.\nThey are mainly adopted from\nhttps://github.com/abseil/abseil-py/blob/master/absl/logging/__init__.py\n\"\"\"\n\n\ndef _find_caller():\n    \"\"\"\n    Returns:\n        str: module name of the caller\n        tuple: a hashable key to be used to identify different callers\n    \"\"\"\n    frame = sys._getframe(2)\n    while frame:\n        code = frame.f_code\n        if os.path.join(\"utils\", \"logger.\") not in code.co_filename:\n            mod_name = frame.f_globals[\"__name__\"]\n            if mod_name == \"__main__\":\n                mod_name = \"detectron2\"\n            return mod_name, (code.co_filename, frame.f_lineno, code.co_name)\n        frame = frame.f_back\n\n\n_LOG_COUNTER = Counter()\n\n\ndef log_first_n(lvl, msg, n=1, *, name=None, key=\"caller\"):\n    \"\"\"\n    Log only for the first n times.\n\n    Args:\n        lvl (int): the logging level\n        msg (str):\n        n (int):\n        name (str): name of the logger to use. Will use the caller's module by default.\n        key (str or tuple[str]): the string(s) can be one of \"caller\" or\n            \"message\", which defines how to identify duplicated logs.\n            For example, if called with `n=1, key=\"caller\"`, this function\n            will only log the first call from the same caller, regardless of\n            the message content.\n            If called with `n=1, key=\"message\"`, this function will log the\n            same content only once, even if they are called from different places.\n            If called with `n=1, key=(\"caller\", \"message\")`, this function\n            will not log only if the same caller has logged the same message before.\n    \"\"\"\n    if isinstance(key, str):\n        key = (key,)\n    assert len(key) > 0\n\n    caller_module, caller_key = _find_caller()\n    hash_key = ()\n    if \"caller\" in key:\n        hash_key = hash_key + caller_key\n    if \"message\" in key:\n        hash_key = hash_key + (msg,)\n\n    _LOG_COUNTER[hash_key] += 1\n    if _LOG_COUNTER[hash_key] <= n:\n        logging.getLogger(name or caller_module).log(lvl, msg)\n\n\ndef log_every_n(lvl, msg, n=1, *, name=None):\n    \"\"\"\n    Log once per n times.\n\n    Args:\n        lvl (int): the logging level\n        msg (str):\n        n (int):\n        name (str): name of the logger to use. Will use the caller's module by default.\n    \"\"\"\n    caller_module, key = _find_caller()\n    _LOG_COUNTER[key] += 1\n    if n == 1 or _LOG_COUNTER[key] % n == 1:\n        logging.getLogger(name or caller_module).log(lvl, msg)\n\n\ndef create_small_table(small_dict):\n    \"\"\"\n    Create a small table using the keys of small_dict as headers. This is only\n    suitable for small dictionaries.\n\n    Args:\n        small_dict (dict): a result dictionary of only a few items.\n\n    Returns:\n        str: the table as a string.\n    \"\"\"\n    keys, values = tuple(zip(*small_dict.items()))\n    table = tabulate(\n        [values],\n        headers=keys,\n        tablefmt=\"pipe\",\n        floatfmt=\".3f\",\n        stralign=\"center\",\n        numalign=\"center\",\n    )\n    return table\n"
  },
  {
    "path": "detectron2/utils/memory.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n\nimport logging\nfrom contextlib import contextmanager\nfrom functools import wraps\nimport torch\n\n__all__ = [\"retry_if_cuda_oom\"]\n\n\n@contextmanager\ndef _ignore_torch_cuda_oom():\n    \"\"\"\n    A context which ignores CUDA OOM exception from pytorch.\n    \"\"\"\n    try:\n        yield\n    except RuntimeError as e:\n        # NOTE: the string may change?\n        if \"CUDA out of memory. \" in str(e):\n            pass\n        else:\n            raise\n\n\ndef retry_if_cuda_oom(func):\n    \"\"\"\n    Makes a function retry itself after encountering\n    pytorch's CUDA OOM error.\n    It will first retry after calling `torch.cuda.empty_cache()`.\n\n    If that still fails, it will then retry by trying to convert inputs to CPUs.\n    In this case, it expects the function to dispatch to CPU implementation.\n    The return values may become CPU tensors as well and it's user's\n    responsibility to convert it back to CUDA tensor if needed.\n\n    Args:\n        func: a stateless callable that takes tensor-like objects as arguments\n\n    Returns:\n        a callable which retries `func` if OOM is encountered.\n\n    Examples:\n\n    .. code-block:: python\n\n        output = retry_if_cuda_oom(some_torch_function)(input1, input2)\n        # output may be on CPU even if inputs are on GPU\n\n    Note:\n        1. When converting inputs to CPU, it will only look at each argument and check\n           if it has `.device` and `.to` for conversion. Nested structures of tensors\n           are not supported.\n\n        2. Since the function might be called more than once, it has to be\n           stateless.\n    \"\"\"\n\n    def maybe_to_cpu(x):\n        try:\n            like_gpu_tensor = x.device.type == \"cuda\" and hasattr(x, \"to\")\n        except AttributeError:\n            like_gpu_tensor = False\n        if like_gpu_tensor:\n            return x.to(device=\"cpu\")\n        else:\n            return x\n\n    @wraps(func)\n    def wrapped(*args, **kwargs):\n        with _ignore_torch_cuda_oom():\n            return func(*args, **kwargs)\n\n        # Clear cache and retry\n        torch.cuda.empty_cache()\n        with _ignore_torch_cuda_oom():\n            return func(*args, **kwargs)\n\n        # Try on CPU. This slows down the code significantly, therefore print a notice.\n        logger = logging.getLogger(__name__)\n        logger.info(\"Attempting to copy inputs of {} to CPU due to CUDA OOM\".format(str(func)))\n        new_args = (maybe_to_cpu(x) for x in args)\n        new_kwargs = {k: maybe_to_cpu(v) for k, v in kwargs.items()}\n        return func(*new_args, **new_kwargs)\n\n    return wrapped\n"
  },
  {
    "path": "detectron2/utils/registry.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nclass Registry(object):\n    \"\"\"\n    The registry that provides name -> object mapping, to support third-party users' custom modules.\n\n    To create a registry (inside detectron2):\n\n    .. code-block:: python\n\n        BACKBONE_REGISTRY = Registry('BACKBONE')\n\n    To register an object:\n\n    .. code-block:: python\n\n        @BACKBONE_REGISTRY.register()\n        class MyBackbone():\n            ...\n\n    Or:\n\n    .. code-block:: python\n\n        BACKBONE_REGISTRY.register(MyBackbone)\n    \"\"\"\n\n    def __init__(self, name):\n        \"\"\"\n        Args:\n            name (str): the name of this registry\n        \"\"\"\n        self._name = name\n\n        self._obj_map = {}\n\n    def _do_register(self, name, obj):\n        assert (\n            name not in self._obj_map\n        ), \"An object named '{}' was already registered in '{}' registry!\".format(name, self._name)\n        self._obj_map[name] = obj\n\n    def register(self, obj=None):\n        \"\"\"\n        Register the given object under the the name `obj.__name__`.\n        Can be used as either a decorator or not. See docstring of this class for usage.\n        \"\"\"\n        if obj is None:\n            # used as a decorator\n            def deco(func_or_class):\n                name = func_or_class.__name__\n                self._do_register(name, func_or_class)\n                return func_or_class\n\n            return deco\n\n        # used as a function call\n        name = obj.__name__\n        self._do_register(name, obj)\n\n    def get(self, name):\n        ret = self._obj_map.get(name)\n        if ret is None:\n            raise KeyError(\"No object named '{}' found in '{}' registry!\".format(name, self._name))\n        return ret\n"
  },
  {
    "path": "detectron2/utils/serialize.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport cloudpickle\n\n\nclass PicklableWrapper(object):\n    \"\"\"\n    Wrap an object to make it more picklable, note that it uses\n    heavy weight serialization libraries that are slower than pickle.\n    It's best to use it only on closures (which are usually not picklable).\n\n    This is a simplified version of\n    https://github.com/joblib/joblib/blob/master/joblib/externals/loky/cloudpickle_wrapper.py\n    \"\"\"\n\n    def __init__(self, obj):\n        self._obj = obj\n\n    def __reduce__(self):\n        s = cloudpickle.dumps(self._obj)\n        return cloudpickle.loads, (s,)\n\n    def __call__(self, *args, **kwargs):\n        return self._obj(*args, **kwargs)\n\n    def __getattr__(self, attr):\n        # Ensure that the wrapped object can be used seamlessly as the previous object.\n        if attr not in [\"_obj\"]:\n            return getattr(self._obj, attr)\n        return getattr(self, attr)\n"
  },
  {
    "path": "detectron2/utils/video_visualizer.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport numpy as np\nimport pycocotools.mask as mask_util\n\nfrom detectron2.utils.visualizer import (\n    ColorMode,\n    Visualizer,\n    _create_text_labels,\n    _PanopticPrediction,\n)\n\nfrom .colormap import random_color\n\n\nclass _DetectedInstance:\n    \"\"\"\n    Used to store data about detected objects in video frame,\n    in order to transfer color to objects in the future frames.\n\n    Attributes:\n        label (int):\n        bbox (tuple[float]):\n        mask_rle (dict):\n        color (tuple[float]): RGB colors in range (0, 1)\n        ttl (int): time-to-live for the instance. For example, if ttl=2,\n            the instance color can be transferred to objects in the next two frames.\n    \"\"\"\n\n    __slots__ = [\"label\", \"bbox\", \"mask_rle\", \"color\", \"ttl\"]\n\n    def __init__(self, label, bbox, mask_rle, color, ttl):\n        self.label = label\n        self.bbox = bbox\n        self.mask_rle = mask_rle\n        self.color = color\n        self.ttl = ttl\n\n\nclass VideoVisualizer:\n    def __init__(self, metadata, instance_mode=ColorMode.IMAGE):\n        \"\"\"\n        Args:\n            metadata (MetadataCatalog): image metadata.\n        \"\"\"\n        self.metadata = metadata\n        self._old_instances = []\n        assert instance_mode in [\n            ColorMode.IMAGE,\n            ColorMode.IMAGE_BW,\n        ], \"Other mode not supported yet.\"\n        self._instance_mode = instance_mode\n\n    def draw_instance_predictions(self, frame, predictions):\n        \"\"\"\n        Draw instance-level prediction results on an image.\n\n        Args:\n            frame (ndarray): an RGB image of shape (H, W, C), in the range [0, 255].\n            predictions (Instances): the output of an instance detection/segmentation\n                model. Following fields will be used to draw:\n                \"pred_boxes\", \"pred_classes\", \"scores\", \"pred_masks\" (or \"pred_masks_rle\").\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        frame_visualizer = Visualizer(frame, self.metadata)\n        num_instances = len(predictions)\n        if num_instances == 0:\n            return frame_visualizer.output\n\n        boxes = predictions.pred_boxes.tensor.numpy() if predictions.has(\"pred_boxes\") else None\n        scores = predictions.scores if predictions.has(\"scores\") else None\n        classes = predictions.pred_classes.numpy() if predictions.has(\"pred_classes\") else None\n        keypoints = predictions.pred_keypoints if predictions.has(\"pred_keypoints\") else None\n\n        if predictions.has(\"pred_masks\"):\n            masks = predictions.pred_masks\n            # mask IOU is not yet enabled\n            # masks_rles = mask_util.encode(np.asarray(masks.permute(1, 2, 0), order=\"F\"))\n            # assert len(masks_rles) == num_instances\n        else:\n            masks = None\n\n        detected = [\n            _DetectedInstance(classes[i], boxes[i], mask_rle=None, color=None, ttl=8)\n            for i in range(num_instances)\n        ]\n        colors = self._assign_colors(detected)\n\n        labels = _create_text_labels(classes, scores, self.metadata.thing_classes)\n\n        if self._instance_mode == ColorMode.IMAGE_BW:\n            # any() returns uint8 tensor\n            frame_visualizer.output.img = frame_visualizer._create_grayscale_image(\n                (masks.any(dim=0) > 0).numpy() if masks is not None else None\n            )\n            alpha = 0.3\n        else:\n            alpha = 0.5\n\n        frame_visualizer.overlay_instances(\n            boxes=None if masks is not None else boxes,  # boxes are a bit distracting\n            masks=masks,\n            labels=labels,\n            keypoints=keypoints,\n            assigned_colors=colors,\n            alpha=alpha,\n        )\n\n        return frame_visualizer.output\n\n    def draw_sem_seg(self, frame, sem_seg, area_threshold=None):\n        \"\"\"\n        Args:\n            sem_seg (ndarray or Tensor): semantic segmentation of shape (H, W),\n                each value is the integer label.\n            area_threshold (Optional[int]): only draw segmentations larger than the threshold\n        \"\"\"\n        # don't need to do anything special\n        frame_visualizer = Visualizer(frame, self.metadata)\n        frame_visualizer.draw_sem_seg(sem_seg, area_threshold=None)\n        return frame_visualizer.output\n\n    def draw_panoptic_seg_predictions(\n        self, frame, panoptic_seg, segments_info, area_threshold=None, alpha=0.5\n    ):\n        frame_visualizer = Visualizer(frame, self.metadata)\n        pred = _PanopticPrediction(panoptic_seg, segments_info)\n\n        if self._instance_mode == ColorMode.IMAGE_BW:\n            frame_visualizer.output.img = frame_visualizer._create_grayscale_image(\n                pred.non_empty_mask()\n            )\n\n        # draw mask for all semantic segments first i.e. \"stuff\"\n        for mask, sinfo in pred.semantic_masks():\n            category_idx = sinfo[\"category_id\"]\n            try:\n                mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]]\n            except AttributeError:\n                mask_color = None\n\n            frame_visualizer.draw_binary_mask(\n                mask,\n                color=mask_color,\n                text=self.metadata.stuff_classes[category_idx],\n                alpha=alpha,\n                area_threshold=area_threshold,\n            )\n\n        all_instances = list(pred.instance_masks())\n        if len(all_instances) == 0:\n            return frame_visualizer.output\n        # draw mask for all instances second\n        masks, sinfo = list(zip(*all_instances))\n        num_instances = len(masks)\n        masks_rles = mask_util.encode(\n            np.asarray(np.asarray(masks).transpose(1, 2, 0), dtype=np.uint8, order=\"F\")\n        )\n        assert len(masks_rles) == num_instances\n\n        category_ids = [x[\"category_id\"] for x in sinfo]\n        detected = [\n            _DetectedInstance(category_ids[i], bbox=None, mask_rle=masks_rles[i], color=None, ttl=8)\n            for i in range(num_instances)\n        ]\n        colors = self._assign_colors(detected)\n        labels = [self.metadata.thing_classes[k] for k in category_ids]\n\n        frame_visualizer.overlay_instances(\n            boxes=None,\n            masks=masks,\n            labels=labels,\n            keypoints=None,\n            assigned_colors=colors,\n            alpha=alpha,\n        )\n        return frame_visualizer.output\n\n    def _assign_colors(self, instances):\n        \"\"\"\n        Naive tracking heuristics to assign same color to the same instance,\n        will update the internal state of tracked instances.\n\n        Returns:\n            list[tuple[float]]: list of colors.\n        \"\"\"\n\n        # Compute iou with either boxes or masks:\n        is_crowd = np.zeros((len(instances),), dtype=np.bool)\n        if instances[0].bbox is None:\n            assert instances[0].mask_rle is not None\n            # use mask iou only when box iou is None\n            # because box seems good enough\n            rles_old = [x.mask_rle for x in self._old_instances]\n            rles_new = [x.mask_rle for x in instances]\n            ious = mask_util.iou(rles_old, rles_new, is_crowd)\n            threshold = 0.5\n        else:\n            boxes_old = [x.bbox for x in self._old_instances]\n            boxes_new = [x.bbox for x in instances]\n            ious = mask_util.iou(boxes_old, boxes_new, is_crowd)\n            threshold = 0.6\n        if len(ious) == 0:\n            ious = np.zeros((len(self._old_instances), len(instances)), dtype=\"float32\")\n\n        # Only allow matching instances of the same label:\n        for old_idx, old in enumerate(self._old_instances):\n            for new_idx, new in enumerate(instances):\n                if old.label != new.label:\n                    ious[old_idx, new_idx] = 0\n\n        matched_new_per_old = np.asarray(ious).argmax(axis=1)\n        max_iou_per_old = np.asarray(ious).max(axis=1)\n\n        # Try to find match for each old instance:\n        extra_instances = []\n        for idx, inst in enumerate(self._old_instances):\n            if max_iou_per_old[idx] > threshold:\n                newidx = matched_new_per_old[idx]\n                if instances[newidx].color is None:\n                    instances[newidx].color = inst.color\n                    continue\n            # If an old instance does not match any new instances,\n            # keep it for the next frame in case it is just missed by the detector\n            inst.ttl -= 1\n            if inst.ttl > 0:\n                extra_instances.append(inst)\n\n        # Assign random color to newly-detected instances:\n        for inst in instances:\n            if inst.color is None:\n                inst.color = random_color(rgb=True, maximum=1)\n        self._old_instances = instances[:] + extra_instances\n        return [d.color for d in instances]\n"
  },
  {
    "path": "detectron2/utils/visualizer.py",
    "content": "#Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport colorsys\nimport numpy as np\nfrom enum import Enum, unique\nimport cv2\nimport matplotlib as mpl\nimport matplotlib.colors as mplc\nimport matplotlib.figure as mplfigure\nimport pysobatools.mask as mask_util\nimport torch\nfrom matplotlib.backends.backend_agg import FigureCanvasAgg\n\nfrom detectron2.structures import BitMasks, Boxes, BoxMode, Keypoints, PolygonMasks\n\nfrom .colormap import random_color,colormap\n\n__all__ = [\"ColorMode\", \"VisImage\", \"Visualizer\"]\n\n\n_SMALL_OBJECT_AREA_THRESH = 1000\n_LARGE_MASK_AREA_THRESH = 120000\n_OFF_WHITE = (1.0, 1.0, 240.0 / 255)\n_BLACK = (0, 0, 0)\n_RED = (1.0, 0, 0)\n\n_KEYPOINT_THRESHOLD = 0.05\n\n\n@unique\nclass ColorMode(Enum):\n    \"\"\"\n    Enum of different color modes to use for instance visualizations.\n\n    Attributes:\n        IMAGE: Picks a random color for every instance and overlay segmentations with low opacity.\n        SEGMENTATION: Let instances of the same category have similar colors, and overlay them with\n            high opacity. This provides more attention on the quality of segmentation.\n        IMAGE_BW: same as IMAGE, but convert all areas without masks to gray-scale.\n    \"\"\"\n\n    IMAGE = 0\n    SEGMENTATION = 1\n    IMAGE_BW = 2\n\n\nclass GenericMask:\n    \"\"\"\n    Attribute:\n        polygons (list[ndarray]): list[ndarray]: polygons for this mask.\n            Each ndarray has format [x, y, x, y, ...]\n        mask (ndarray): a binary mask\n    \"\"\"\n\n    def __init__(self, mask_or_polygons, height, width):\n        self._mask = self._polygons = self._has_holes = None\n        self.height = height\n        self.width = width\n\n        m = mask_or_polygons\n        if isinstance(m, dict):\n            # RLEs\n            assert \"counts\" in m and \"size\" in m\n            if isinstance(m[\"counts\"], list):  # uncompressed RLEs\n                h, w = m[\"size\"]\n                assert h == height and w == width\n                m = mask_util.frPyObjects(m, h, w)\n            self._mask = mask_util.decode(m)[:, :]\n            return\n\n        if isinstance(m, list):  # list[ndarray]\n            self._polygons = [np.asarray(x).reshape(-1) for x in m]\n            return\n\n        if isinstance(m, np.ndarray):  # assumed to be a binary mask\n            assert m.shape[1] != 2, m.shape\n            assert m.shape == (height, width), m.shape\n            self._mask = m.astype(\"uint8\")\n            return\n\n        raise ValueError(\"GenericMask cannot handle object {} of type '{}'\".format(m, type(m)))\n\n    @property\n    def mask(self):\n        if self._mask is None:\n            self._mask = self.polygons_to_mask(self._polygons)\n        return self._mask\n\n    @property\n    def polygons(self):\n        if self._polygons is None:\n            self._polygons, self._has_holes = self.mask_to_polygons(self._mask)\n        return self._polygons\n\n    @property\n    def has_holes(self):\n        if self._has_holes is None:\n            if self._mask is not None:\n                self._polygons, self._has_holes = self.mask_to_polygons(self._mask)\n            else:\n                self._has_holes = False  # if original format is polygon, does not have holes\n        return self._has_holes\n\n    def mask_to_polygons(self, mask):\n        # cv2.RETR_CCOMP flag retrieves all the contours and arranges them to a 2-level\n        # hierarchy. External contours (boundary) of the object are placed in hierarchy-1.\n        # Internal contours (holes) are placed in hierarchy-2.\n        # cv2.CHAIN_APPROX_NONE flag gets vertices of polygons from contours.\n        mask = mask.copy()\n        res = cv2.findContours(mask.astype(\"uint8\"), cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)\n        hierarchy = res[-1]\n        # try:\n        has_holes = (hierarchy.reshape(-1, 4)[:, 3] >= 0).sum() > 0\n        # except:\n            # print(hierarchy)\n            # assert 1!= 1\n        res = res[-2]\n        res = [x.flatten() for x in res]\n        res = [x for x in res if len(x) >= 6]\n        return res, has_holes\n\n    def polygons_to_mask(self, polygons):\n        rle = mask_util.frPyObjects(polygons, self.height, self.width)\n        rle = mask_util.merge(rle)\n        return mask_util.decode(rle)[:, :]\n\n    def area(self):\n        return self.mask.sum()\n\n    def bbox(self):\n        p = mask_util.frPyObjects(self.polygons, self.height, self.width)\n        p = mask_util.merge(p)\n        bbox = mask_util.toBbox(p)\n        bbox[2] += bbox[0]\n        bbox[3] += bbox[1]\n        return bbox\n\n\nclass _PanopticPrediction:\n    def __init__(self, panoptic_seg, segments_info):\n        self._seg = panoptic_seg\n\n        self._sinfo = {s[\"id\"]: s for s in segments_info}  # seg id -> seg info\n        segment_ids, areas = torch.unique(panoptic_seg, sorted=True, return_counts=True)\n        areas = areas.numpy()\n        sorted_idxs = np.argsort(-areas)\n        self._seg_ids, self._seg_areas = segment_ids[sorted_idxs], areas[sorted_idxs]\n        self._seg_ids = self._seg_ids.tolist()\n        for sid, area in zip(self._seg_ids, self._seg_areas):\n            if sid in self._sinfo:\n                self._sinfo[sid][\"area\"] = float(area)\n\n    def non_empty_mask(self):\n        \"\"\"\n        Returns:\n            (H, W) array, a mask for all pixels that have a prediction\n        \"\"\"\n        empty_ids = []\n        for id in self._seg_ids:\n            if id not in self._sinfo:\n                empty_ids.append(id)\n        if len(empty_ids) == 0:\n            return np.zeros(self._seg.shape, dtype=np.uint8)\n        assert (\n            len(empty_ids) == 1\n        ), \">1 ids corresponds to no labels. This is currently not supported\"\n        return (self._seg != empty_ids[0]).numpy().astype(np.bool)\n\n    def semantic_masks(self):\n        for sid in self._seg_ids:\n            sinfo = self._sinfo.get(sid)\n            if sinfo is None or sinfo[\"isthing\"]:\n                # Some pixels (e.g. id 0 in PanopticFPN) have no instance or semantic predictions.\n                continue\n            yield (self._seg == sid).numpy().astype(np.bool), sinfo\n\n    def instance_masks(self):\n        for sid in self._seg_ids:\n            sinfo = self._sinfo.get(sid)\n            if sinfo is None or not sinfo[\"isthing\"]:\n                continue\n            mask = (self._seg == sid).numpy().astype(np.bool)\n            if mask.sum() > 0:\n                yield mask, sinfo\n\n\ndef _create_text_labels(classes, scores, class_names):\n    \"\"\"\n    Args:\n        classes (list[int] or None):\n        scores (list[float] or None):\n        class_names (list[str] or None):\n\n    Returns:\n        list[str] or None\n    \"\"\"\n    labels = None\n    if class_names is not None and len(class_names) >= 1:\n        labels = [class_names[i] for i in classes]\n    if scores is not None:\n        if labels is None:\n            labels = [\"{:.0f}%\".format(s * 100) for s in scores]\n        else:\n            labels = [\"{}\".format(l) for l in labels]\n            # print(labels)\n    return labels\n\n\nclass VisImage:\n    def __init__(self, img, scale=1.0):\n        \"\"\"\n        Args:\n            img (ndarray): an RGB image of shape (H, W, 3).\n            scale (float): scale the input image\n        \"\"\"\n        self.img = img\n        self.scale = scale\n        self.width, self.height = img.shape[1], img.shape[0]\n        self._setup_figure(img)\n\n    def _setup_figure(self, img):\n        \"\"\"\n        Args:\n            Same as in :meth:`__init__()`.\n\n        Returns:\n            fig (matplotlib.pyplot.figure): top level container for all the image plot elements.\n            ax (matplotlib.pyplot.Axes): contains figure elements and sets the coordinate system.\n        \"\"\"\n        fig = mplfigure.Figure(frameon=False)\n        self.dpi = fig.get_dpi()\n        # add a small 1e-2 to avoid precision lost due to matplotlib's truncation\n        # (https://github.com/matplotlib/matplotlib/issues/15363)\n        fig.set_size_inches(\n            (self.width * self.scale + 1e-2) / self.dpi,\n            (self.height * self.scale + 1e-2) / self.dpi,\n        )\n        self.canvas = FigureCanvasAgg(fig)\n        # self.canvas = mpl.backends.backend_cairo.FigureCanvasCairo(fig)\n        ax = fig.add_axes([0.0, 0.0, 1.0, 1.0])\n        ax.axis(\"off\")\n        ax.set_xlim(0.0, self.width)\n        ax.set_ylim(self.height)\n\n        self.fig = fig\n        self.ax = ax\n\n    def save(self, filepath):\n        \"\"\"\n        Args:\n            filepath (str): a string that contains the absolute path, including the file name, where\n                the visualized image will be saved.\n        \"\"\"\n        cv2.imwrite(filepath, self.get_image()[:, :, ::-1])\n\n    def get_image(self):\n        \"\"\"\n        Returns:\n            ndarray: the visualized image of shape (H, W, 3) (RGB) in uint8 type.\n              The shape is scaled w.r.t the input image using the given `scale` argument.\n        \"\"\"\n        canvas = self.canvas\n        s, (width, height) = canvas.print_to_buffer()\n        if (self.width, self.height) != (width, height):\n            img = cv2.resize(self.img, (width, height))\n        else:\n            img = self.img\n\n        # buf = io.BytesIO()  # works for cairo backend\n        # canvas.print_rgba(buf)\n        # width, height = self.width, self.height\n        # s = buf.getvalue()\n\n        buffer = np.frombuffer(s, dtype=\"uint8\")\n\n        # imshow is slow. blend manually (still quite slow)\n        img_rgba = buffer.reshape(height, width, 4)\n        rgb, alpha = np.split(img_rgba, [3], axis=2)\n\n        try:\n            import numexpr as ne  # fuse them with numexpr\n\n            visualized_image = ne.evaluate(\"img * (1 - alpha / 255.0) + rgb * (alpha / 255.0)\")\n        except ImportError:\n            alpha = alpha.astype(\"float32\") / 255.0\n            visualized_image = img * (1 - alpha) + rgb * alpha\n\n        visualized_image = visualized_image.astype(\"uint8\")\n\n        return visualized_image\n\n\nclass Visualizer:\n    def __init__(self, img_rgb, metadata, scale=1.0, instance_mode=ColorMode.IMAGE):\n        \"\"\"\n        Args:\n            img_rgb: a numpy array of shape (H, W, C), where H and W correspond to\n                the height and width of the image respectively. C is the number of\n                color channels. The image is required to be in RGB format since that\n                is a requirement of the Matplotlib library. The image is also expected\n                to be in the range [0, 255].\n            metadata (MetadataCatalog): image metadata.\n        \"\"\"\n        self.img = np.asarray(img_rgb).clip(0, 255).astype(np.uint8)\n        self.metadata = metadata\n        self.output = VisImage(self.img, scale=scale)\n        self.cpu_device = torch.device(\"cpu\")\n\n        # too small texts are useless, therefore clamp to 9\n        self._default_font_size = max(\n            np.sqrt(self.output.height * self.output.width) // 90, 10 // scale\n        )\n        self._instance_mode = instance_mode\n        self._colors = colormap(True,1)\n\n    def draw_instance_predictions(self, predictions,is_ass= False, labels_align = 'left'):\n        \"\"\"\n        Draw instance-level prediction results on an image.\n\n        Args:\n            predictions (Instances): the output of an instance detection/segmentation\n                model. Following fields will be used to draw:\n                \"pred_boxes\", \"pred_classes\", \"scores\", \"pred_masks\" (or \"pred_masks_rle\").\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        boxes = predictions.pred_boxes if predictions.has(\"pred_boxes\") else None\n        scores = predictions.scores if predictions.has(\"scores\") else None\n        classes = predictions.pred_classes if predictions.has(\"pred_classes\") else None\n        associations = predictions.pred_associations if predictions.has(\"pred_associations\") else None\n        if is_ass:\n            labels = _create_text_labels(classes, scores, ['obj+shad'])\n        else:\n            labels = _create_text_labels(classes, scores, self.metadata.get(\"thing_classes\",None))\n        keypoints = predictions.pred_keypoints if predictions.has(\"pred_keypoints\") else None\n\n        if predictions.has(\"pred_masks\"):\n            masks = predictions.pred_masks\n            masks = [GenericMask(x, self.output.height, self.output.width) for x in masks]\n        else:\n            masks = None\n\n        if self._instance_mode == ColorMode.SEGMENTATION and self.metadata.get(\"thing_colors\"):\n            colors = [\n                self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) for c in classes\n            ]\n            alpha = 0.8\n        else:\n            colors = [self._colors[k] for k in associations]\n            alpha = 0.5\n\n        if self._instance_mode == ColorMode.IMAGE_BW:\n            self.output.img = self._create_grayscale_image(\n                (predictions.pred_masks.any(dim=0) > 0).numpy()\n            )\n            alpha = 0.3\n\n        self.overlay_instances(\n            masks=masks,\n            boxes=boxes,\n            labels=labels,\n            keypoints=keypoints,\n            assigned_colors=colors,\n            alpha=alpha,\n            labels_align=labels_align,\n            associations= associations\n        )\n        return self.output\n\n    def draw_sem_seg(self, sem_seg, area_threshold=None, alpha=0.8):\n        \"\"\"\n        Draw semantic segmentation predictions/labels.\n\n        Args:\n            sem_seg (Tensor or ndarray): the segmentation of shape (H, W).\n            area_threshold (int): segments with less than `area_threshold` are not drawn.\n            alpha (float): the larger it is, the more opaque the segmentations are.\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        if isinstance(sem_seg, torch.Tensor):\n            sem_seg = sem_seg.numpy()\n        labels, areas = np.unique(sem_seg, return_counts=True)\n        sorted_idxs = np.argsort(-areas).tolist()\n        labels = labels[sorted_idxs]\n        for label in labels:\n            try:\n                mask_color = [x / 255 for x in self.metadata.stuff_colors[label]]\n            except (AttributeError, IndexError):\n                mask_color = None\n\n            binary_mask = (sem_seg == label).astype(np.uint8)\n            text = self.metadata.stuff_classes[label]\n            self.draw_binary_mask(\n                binary_mask,\n                color=mask_color,\n                edge_color=_OFF_WHITE,\n                text=text,\n                alpha=alpha,\n                area_threshold=area_threshold,\n            )\n        return self.output\n\n    def draw_panoptic_seg_predictions(\n        self, panoptic_seg, segments_info, area_threshold=None, alpha=0.7\n    ):\n        \"\"\"\n        Draw panoptic prediction results on an image.\n\n        Args:\n            panoptic_seg (Tensor): of shape (height, width) where the values are ids for each\n                segment.\n            segments_info (list[dict]): Describe each segment in `panoptic_seg`.\n                Each dict contains keys \"id\", \"category_id\", \"isthing\".\n            area_threshold (int): stuff segments with less than `area_threshold` are not drawn.\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        pred = _PanopticPrediction(panoptic_seg, segments_info)\n\n        if self._instance_mode == ColorMode.IMAGE_BW:\n            self.output.img = self._create_grayscale_image(pred.non_empty_mask())\n\n        # draw mask for all semantic segments first i.e. \"stuff\"\n        for mask, sinfo in pred.semantic_masks():\n            category_idx = sinfo[\"category_id\"]\n            try:\n                mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]]\n            except AttributeError:\n                mask_color = None\n\n            text = self.metadata.stuff_classes[category_idx]\n            self.draw_binary_mask(\n                mask,\n                color=mask_color,\n                edge_color=_OFF_WHITE,\n                text=text,\n                alpha=alpha,\n                area_threshold=area_threshold,\n            )\n\n        # draw mask for all instances second\n        all_instances = list(pred.instance_masks())\n        if len(all_instances) == 0:\n            return self.output\n        masks, sinfo = list(zip(*all_instances))\n        category_ids = [x[\"category_id\"] for x in sinfo]\n\n        try:\n            scores = [x[\"score\"] for x in sinfo]\n        except KeyError:\n            scores = None\n        labels = _create_text_labels(category_ids, scores, self.metadata.thing_classes)\n\n        try:\n            colors = [random_color(rgb=True, maximum=1) for k in category_ids]\n        except AttributeError:\n            colors = None\n        self.overlay_instances(masks=masks, labels=labels, assigned_colors=colors)\n\n        return self.output\n\n    def draw_dataset_dict(self, dic):\n        annos = dic.get(\"annotations\", None)\n        asso_annos = dic.get(\"association_anno\",None)\n        if annos:\n            if \"segmentation\" in annos[0]:\n                masks = [x[\"segmentation\"] for x in annos]\n            else:\n                masks = None\n            if \"keypoints\" in annos[0]:\n                keypts = [x[\"keypoints\"] for x in annos]\n                keypts = np.array(keypts).reshape(len(annos), -1, 3)\n            else:\n                keypts = None\n\n            boxes = [BoxMode.convert(x[\"bbox\"], x[\"bbox_mode\"], BoxMode.XYXY_ABS) for x in annos]\n            colos = [self._colors[i] for i in range(1,int(len(boxes)/2)+1)]*2\n            labels = [x[\"category_id\"] for x in annos]\n            names = self.metadata.get(\"thing_classes\", None)\n            if names:\n                labels = [names[i] for i in labels]\n            labels = [i + (\"|crowd\" if a.get(\"iscrowd\", 0) else \"\") for i, a in zip(labels, annos)]\n            self.overlay_instances(labels=labels, boxes=boxes, masks=masks, keypoints=keypts,assigned_colors=colos)\n        if asso_annos:\n            asso_boxes = [BoxMode.convert(x[\"bbox\"],x[\"bbox_mode\"], BoxMode.XYXY_ABS) for x in asso_annos]\n            colos = colos[:int(len(colos)/2)]\n            asso_labels = [x[\"category_id\"] for x in asso_annos]\n            names = ['obj+shad']\n            if names:\n                asso_labels = [names[i] for i in asso_labels]\n            self.overlay_instances(labels=asso_labels,boxes=asso_boxes,labels_align='right',assigned_colors=colos)\n        sem_seg = dic.get(\"sem_seg\", None)\n        if sem_seg is None and \"sem_seg_file_name\" in dic:\n            sem_seg = cv2.imread(dic[\"sem_seg_file_name\"], cv2.IMREAD_GRAYSCALE)\n        if sem_seg is not None:\n            self.draw_sem_seg(sem_seg, area_threshold=0, alpha=0.5)\n        return self.output\n\n    def overlay_instances(\n        self,\n        *,\n        boxes=None,\n        labels=None,\n        masks=None,\n        keypoints=None,\n        assigned_colors=None,\n        alpha=0.5,\n        labels_align = \"left\",\n        associations = None\n        \n    ):\n        \"\"\"\n        Args:\n            boxes (Boxes or ndarray): either a :class:`Boxes` or a Nx4 numpy array\n                of XYXY_ABS format for the N objects in a single image.\n            labels (list[str]): the text to be displayed for each instance.\n            masks (masks-like object): Supported types are:\n\n                * `structures.masks.PolygonMasks`, `structures.masks.BitMasks`.\n                * list[list[ndarray]]: contains the segmentation masks for all objects in one image.\n                    The first level of the list corresponds to individual instances. The second\n                    level to all the polygon that compose the instance, and the third level\n                    to the polygon coordinates. The third level should have the format of\n                    [x0, y0, x1, y1, ..., xn, yn] (n >= 3).\n                * list[ndarray]: each ndarray is a binary mask of shape (H, W).\n                * list[dict]: each dict is a COCO-style RLE.\n            keypoints (Keypoint or array like): an array-like object of shape (N, K, 3),\n                where the N is the number of instances and K is the number of keypoints.\n                The last dimension corresponds to (x, y, visibility or score).\n            assigned_colors (list[matplotlib.colors]): a list of colors, where each color\n                corresponds to each mask or box in the image. Refer to 'matplotlib.colors'\n                for full list of formats that the colors are accepted in.\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        num_instances = None\n        if boxes is not None:\n            boxes = self._convert_boxes(boxes)\n            num_instances = len(boxes)\n        if masks is not None:\n            masks = self._convert_masks(masks)\n            if num_instances:\n                assert len(masks) == num_instances\n            else:\n                num_instances = len(masks)\n        if keypoints is not None:\n            if num_instances:\n                assert len(keypoints) == num_instances\n            else:\n                num_instances = len(keypoints)\n            keypoints = self._convert_keypoints(keypoints)\n        if labels is not None:\n            assert len(labels) == num_instances\n        if assigned_colors is None:\n            assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)]\n        if num_instances == 0:\n            return self.output\n\n        # Display in largest to smallest order to reduce occlusion.\n        areas = None\n        if boxes is not None:\n            areas = np.prod(boxes[:, 2:] - boxes[:, :2], axis=1)\n        elif masks is not None:\n            areas = np.asarray([x.area() for x in masks])\n\n        for i in range(num_instances):\n            if associations is not None:\n                if associations[i] == 0:\n                    continue\n            color = assigned_colors[i]\n            if boxes is not None:\n                self.draw_box(boxes[i], edge_color=color)\n                pass\n\n            if masks is not None:\n                try :\n                    for segment in masks[i].polygons:\n                        if labels[i] == 'Shadow':\n                            alpha = 0.8\n                        else:\n                            alpha = 0.5\n                        self.draw_polygon(segment.reshape(-1, 2), color, alpha=alpha)\n                except:\n                    continue\n\n            if labels is not None:\n                # first get a box\n                if boxes is not None:\n                    x0, y0, x1, y1 = boxes[i]\n                    text_pos = (x0, y0)  # if drawing boxes, put text on the box corner.\n                    horiz_align = labels_align\n                    if horiz_align == 'right':\n                        text_pos = (x1,y1-20)\n                elif masks is not None:\n                    x0, y0, x1, y1 = masks[i].bbox()\n\n                    # draw text in the center (defined by median) when box is not drawn\n                    # median is less sensitive to outliers.\n                    text_pos = np.median(masks[i].mask.nonzero(), axis=1)[::-1]\n                    horiz_align = \"center\"\n                else:\n                    continue  # drawing the box confidence for keypoints isn't very useful.\n                # for small objects, draw text at the side to avoid occlusion\n                instance_area = (y1 - y0) * (x1 - x0)\n                if (\n                    instance_area < _SMALL_OBJECT_AREA_THRESH * self.output.scale\n                    or y1 - y0 < 40 * self.output.scale\n                ):\n                    if y1 >= self.output.height - 5:\n                        text_pos = (x1, y0)\n                    else:\n                        text_pos = (x0, y1)\n                height_ratio = (y1 - y0) / np.sqrt(self.output.height * self.output.width)\n                lighter_color = self._change_color_brightness(color, brightness_factor=0.7)\n                font_size = (\n                    np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2)\n                    * 0.5\n                    * self._default_font_size\n                )\n                self.draw_text(\n                    labels[i],\n                    text_pos,\n                    color=lighter_color,\n                    horizontal_alignment=horiz_align,\n                    font_size=font_size,\n                )\n\n        # draw keypoints\n        if keypoints is not None:\n            for keypoints_per_instance in keypoints:\n                self.draw_and_connect_keypoints(keypoints_per_instance)\n\n        return self.output\n\n    def draw_and_connect_keypoints(self, keypoints):\n        \"\"\"\n        Draws keypoints of an instance and follows the rules for keypoint connections\n        to draw lines between appropriate keypoints. This follows color heuristics for\n        line color.\n\n        Args:\n            keypoints (Tensor): a tensor of shape (K, 3), where K is the number of keypoints\n                and the last dimension corresponds to (x, y, probability).\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        visible = {}\n        for idx, keypoint in enumerate(keypoints):\n            # draw keypoint\n            x, y, prob = keypoint\n            if prob > _KEYPOINT_THRESHOLD:\n                self.draw_circle((x, y), color=_RED)\n                keypoint_name = self.metadata.keypoint_names[idx]\n                visible[keypoint_name] = (x, y)\n\n        for kp0, kp1, color in self.metadata.keypoint_connection_rules:\n            if kp0 in visible and kp1 in visible:\n                x0, y0 = visible[kp0]\n                x1, y1 = visible[kp1]\n                color = tuple(x / 255.0 for x in color)\n                self.draw_line([x0, x1], [y0, y1], color=color)\n\n        # draw lines from nose to mid-shoulder and mid-shoulder to mid-hip\n        # Note that this strategy is specific to person keypoints.\n        # For other keypoints, it should just do nothing\n        try:\n            ls_x, ls_y = visible[\"left_shoulder\"]\n            rs_x, rs_y = visible[\"right_shoulder\"]\n            mid_shoulder_x, mid_shoulder_y = (ls_x + rs_x) / 2, (ls_y + rs_y) / 2\n        except KeyError:\n            pass\n        else:\n            # draw line from nose to mid-shoulder\n            nose_x, nose_y = visible.get(\"nose\", (None, None))\n            if nose_x is not None:\n                self.draw_line([nose_x, mid_shoulder_x], [nose_y, mid_shoulder_y], color=_RED)\n\n            try:\n                # draw line from mid-shoulder to mid-hip\n                lh_x, lh_y = visible[\"left_hip\"]\n                rh_x, rh_y = visible[\"right_hip\"]\n            except KeyError:\n                pass\n            else:\n                mid_hip_x, mid_hip_y = (lh_x + rh_x) / 2, (lh_y + rh_y) / 2\n                self.draw_line([mid_hip_x, mid_shoulder_x], [mid_hip_y, mid_shoulder_y], color=_RED)\n        return self.output\n\n    \"\"\"\n    Primitive drawing functions:\n    \"\"\"\n\n    def draw_text(\n        self, text, position, *, font_size=None, color=\"g\", horizontal_alignment=\"center\"\n    ):\n        \"\"\"\n        Args:\n            text (str): class label\n            position (tuple): a tuple of the x and y coordinates to place text on image.\n            font_size (int, optional): font of the text. If not provided, a font size\n                proportional to the image width is calculated and used.\n            color: color of the text. Refer to `matplotlib.colors` for full list\n                of formats that are accepted.\n            horizontal_alignment (str): see `matplotlib.text.Text`\n\n        Returns:\n            output (VisImage): image object with text drawn.\n        \"\"\"\n        if not font_size:\n            font_size = self._default_font_size\n\n        # since the text background is dark, we don't want the text to be dark\n        color = np.maximum(list(mplc.to_rgb(color)), 0.2)\n        color[np.argmax(color)] = max(0.8, np.max(color))\n\n        x, y = position\n        self.output.ax.text(\n            x,\n            y,\n            text,\n            size=font_size * self.output.scale,\n            family=\"sans-serif\",\n            bbox={\"facecolor\": \"black\", \"alpha\": 0.8, \"pad\": 0.7, \"edgecolor\": \"none\"},\n            verticalalignment=\"top\",\n            horizontalalignment=horizontal_alignment,\n            color=color,\n            zorder=10,\n        )\n        return self.output\n\n    def draw_box(self, box_coord, alpha=0.5, edge_color=\"g\", line_style=\"-\"):\n        \"\"\"\n        Args:\n            box_coord (tuple): a tuple containing x0, y0, x1, y1 coordinates, where x0 and y0\n                are the coordinates of the image's top left corner. x1 and y1 are the\n                coordinates of the image's bottom right corner.\n            alpha (float): blending efficient. Smaller values lead to more transparent masks.\n            edge_color: color of the outline of the box. Refer to `matplotlib.colors`\n                for full list of formats that are accepted.\n            line_style (string): the string to use to create the outline of the boxes.\n\n        Returns:\n            output (VisImage): image object with box drawn.\n        \"\"\"\n        x0, y0, x1, y1 = box_coord\n        width = x1 - x0\n        height = y1 - y0\n\n        linewidth = max(5,1)\n\n        self.output.ax.add_patch(\n            mpl.patches.Rectangle(\n                (x0, y0),\n                width,\n                height,\n                fill=False,\n                edgecolor=edge_color,\n                linewidth=linewidth * self.output.scale,\n                alpha=alpha,\n                linestyle=line_style,\n            )\n        )\n        return self.output\n\n    def draw_circle(self, circle_coord, color, radius=3):\n        \"\"\"\n        Args:\n            circle_coord (list(int) or tuple(int)): contains the x and y coordinates\n                of the center of the circle.\n            color: color of the polygon. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n            radius (int): radius of the circle.\n\n        Returns:\n            output (VisImage): image object with box drawn.\n        \"\"\"\n        x, y = circle_coord\n        self.output.ax.add_patch(\n            mpl.patches.Circle(circle_coord, radius=radius, fill=True, color=color)\n        )\n        return self.output\n\n    def draw_line(self, x_data, y_data, color):\n        \"\"\"\n        Args:\n            x_data (list[int]): a list containing x values of all the points being drawn.\n                Length of list should match the length of y_data.\n            y_data (list[int]): a list containing y values of all the points being drawn.\n                Length of list should match the length of x_data.\n            color: color of the line. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n\n        Returns:\n            output (VisImage): image object with line drawn.\n        \"\"\"\n        linewidth = max(self._default_font_size / 3, 1)\n        self.output.ax.add_line(\n            mpl.lines.Line2D(x_data, y_data, linewidth=linewidth * self.output.scale, color=color)\n        )\n        return self.output\n\n    def draw_binary_mask(\n        self, binary_mask, color=None, *, edge_color=None, text=None, alpha=0.5, area_threshold=4096\n    ):\n        \"\"\"\n        Args:\n            binary_mask (ndarray): numpy array of shape (H, W), where H is the image height and\n                W is the image width. Each value in the array is either a 0 or 1 value of uint8\n                type.\n            color: color of the mask. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted. If None, will pick a random color.\n            edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a\n                full list of formats that are accepted.\n            text (str): if None, will be drawn in the object's center of mass.\n            alpha (float): blending efficient. Smaller values lead to more transparent masks.\n            area_threshold (float): a connected component small than this will not be shown.\n\n        Returns:\n            output (VisImage): image object with mask drawn.\n        \"\"\"\n        if color is None:\n            color = random_color(rgb=True, maximum=1)\n        if area_threshold is None:\n            area_threshold = 4096\n\n        has_valid_segment = False\n        binary_mask = binary_mask.astype(\"uint8\")  # opencv needs uint8\n        mask = GenericMask(binary_mask, self.output.height, self.output.width)\n        shape2d = (binary_mask.shape[0], binary_mask.shape[1])\n\n        if not mask.has_holes:\n            # draw polygons for regular masks\n            for segment in mask.polygons:\n                area = mask_util.area(mask_util.frPyObjects([segment], shape2d[0], shape2d[1]))\n                if area < area_threshold:\n                    continue\n                has_valid_segment = True\n                segment = segment.reshape(-1, 2)\n                self.draw_polygon(segment, color=color, edge_color=edge_color, alpha=alpha)\n        else:\n            rgba = np.zeros(shape2d + (4,), dtype=\"float32\")\n            rgba[:, :, :3] = color\n            rgba[:, :, 3] = (mask.mask == 1).astype(\"float32\") * alpha\n            has_valid_segment = True\n            self.output.ax.imshow(rgba)\n\n        if text is not None and has_valid_segment:\n            # TODO sometimes drawn on wrong objects. the heuristics here can improve.\n            lighter_color = self._change_color_brightness(color, brightness_factor=0.7)\n            _num_cc, cc_labels, stats, centroids = cv2.connectedComponentsWithStats(binary_mask, 8)\n            largest_component_id = np.argmax(stats[1:, -1]) + 1\n\n            # draw text on the largest component, as well as other very large components.\n            for cid in range(1, _num_cc):\n                if cid == largest_component_id or stats[cid, -1] > _LARGE_MASK_AREA_THRESH:\n                    # median is more stable than centroid\n                    # center = centroids[largest_component_id]\n                    center = np.median((cc_labels == cid).nonzero(), axis=1)[::-1]\n                    self.draw_text(text, center, color=lighter_color)\n        return self.output\n\n    def draw_polygon(self, segment, color, edge_color=None, alpha=0.5):\n        \"\"\"\n        Args:\n            segment: numpy array of shape Nx2, containing all the points in the polygon.\n            color: color of the polygon. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n            edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a\n                full list of formats that are accepted. If not provided, a darker shade\n                of the polygon color will be used instead.\n            alpha (float): blending efficient. Smaller values lead to more transparent masks.\n\n        Returns:\n            output (VisImage): image object with polygon drawn.\n        \"\"\"\n        if edge_color is None:\n            # make edge color darker than the polygon color\n            if alpha > 0.8:\n                edge_color = self._change_color_brightness(color, brightness_factor=-0.7)\n            else:\n                edge_color = color\n        edge_color = mplc.to_rgb(edge_color) + (1,)\n\n        polygon = mpl.patches.Polygon(\n            segment,\n            fill=True,\n            facecolor=mplc.to_rgb(color) + (alpha,),\n            edgecolor=edge_color,\n            linewidth=max(self._default_font_size // 15 * self.output.scale, 1),\n        )\n        self.output.ax.add_patch(polygon)\n        return self.output\n\n    \"\"\"\n    Internal methods:\n    \"\"\"\n\n    def _jitter(self, color):\n        \"\"\"\n        Randomly modifies given color to produce a slightly different color than the color given.\n\n        Args:\n            color (tuple[double]): a tuple of 3 elements, containing the RGB values of the color\n                picked. The values in the list are in the [0.0, 1.0] range.\n\n        Returns:\n            jittered_color (tuple[double]): a tuple of 3 elements, containing the RGB values of the\n                color after being jittered. The values in the list are in the [0.0, 1.0] range.\n        \"\"\"\n        color = mplc.to_rgb(color)\n        vec = np.random.rand(3)\n        # better to do it in another color space\n        vec = vec / np.linalg.norm(vec) * 0.5\n        res = np.clip(vec + color, 0, 1)\n        return tuple(res)\n\n    def _create_grayscale_image(self, mask=None):\n        \"\"\"\n        Create a grayscale version of the original image.\n        The colors in masked area, if given, will be kept.\n        \"\"\"\n        img_bw = self.img.astype(\"f4\").mean(axis=2)\n        img_bw = np.stack([img_bw] * 3, axis=2)\n        if mask is not None:\n            img_bw[mask] = self.img[mask]\n        return img_bw\n\n    def _change_color_brightness(self, color, brightness_factor):\n        \"\"\"\n        Depending on the brightness_factor, gives a lighter or darker color i.e. a color with\n        less or more saturation than the original color.\n\n        Args:\n            color: color of the polygon. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n            brightness_factor (float): a value in [-1.0, 1.0] range. A lightness factor of\n                0 will correspond to no change, a factor in [-1.0, 0) range will result in\n                a darker color and a factor in (0, 1.0] range will result in a lighter color.\n\n        Returns:\n            modified_color (tuple[double]): a tuple containing the RGB values of the\n                modified color. Each value in the tuple is in the [0.0, 1.0] range.\n        \"\"\"\n        assert brightness_factor >= -1.0 and brightness_factor <= 1.0\n        color = mplc.to_rgb(color)\n        polygon_color = colorsys.rgb_to_hls(*mplc.to_rgb(color))\n        modified_lightness = polygon_color[1] + (brightness_factor * polygon_color[1])\n        modified_lightness = 0.0 if modified_lightness < 0.0 else modified_lightness\n        modified_lightness = 1.0 if modified_lightness > 1.0 else modified_lightness\n        modified_color = colorsys.hls_to_rgb(polygon_color[0], modified_lightness, polygon_color[2])\n        return modified_color\n\n    def _convert_boxes(self, boxes):\n        \"\"\"\n        Convert different format of boxes to a Nx4 array.\n        \"\"\"\n        if isinstance(boxes, Boxes):\n            return boxes.tensor.numpy()\n        else:\n            return np.asarray(boxes)\n\n    def _convert_masks(self, masks_or_polygons):\n        \"\"\"\n        Convert different format of masks or polygons to a tuple of masks and polygons.\n\n        Returns:\n            list[GenericMask]:\n        \"\"\"\n\n        m = masks_or_polygons\n        if isinstance(m, PolygonMasks):\n            m = m.polygons\n        if isinstance(m, BitMasks):\n            m = m.tensor.numpy()\n        if isinstance(m, torch.Tensor):\n            m = m.numpy()\n        ret = []\n        for x in m:\n            if isinstance(x, GenericMask):\n                ret.append(x)\n            else:\n                ret.append(GenericMask(x, self.output.height, self.output.width))\n        return ret\n\n    def _convert_keypoints(self, keypoints):\n        if isinstance(keypoints, Keypoints):\n            keypoints = keypoints.tensor\n        keypoints = np.asarray(keypoints)\n        return keypoints\n\n    def get_output(self):\n        \"\"\"\n        Returns:\n            output (VisImage): the image output containing the visualizations added\n                to the image.\n        \"\"\"\n        return self.output\n"
  },
  {
    "path": "detectron2/utils/visualizer.py.back",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport colorsys\nimport numpy as np\nfrom enum import Enum, unique\nimport cv2\nimport matplotlib as mpl\nimport matplotlib.colors as mplc\nimport matplotlib.figure as mplfigure\nimport pysobatools.mask as mask_util\nimport torch\nfrom matplotlib.backends.backend_agg import FigureCanvasAgg\n\nfrom detectron2.structures import BitMasks, Boxes, BoxMode, Keypoints, PolygonMasks\n\nfrom .colormap import random_color,colormap\n\n__all__ = [\"ColorMode\", \"VisImage\", \"Visualizer\"]\n\n\n_SMALL_OBJECT_AREA_THRESH = 1000\n_LARGE_MASK_AREA_THRESH = 120000\n_OFF_WHITE = (1.0, 1.0, 240.0 / 255)\n_BLACK = (0, 0, 0)\n_RED = (1.0, 0, 0)\n\n_KEYPOINT_THRESHOLD = 0.05\n\n\n@unique\nclass ColorMode(Enum):\n    \"\"\"\n    Enum of different color modes to use for instance visualizations.\n\n    Attributes:\n        IMAGE: Picks a random color for every instance and overlay segmentations with low opacity.\n        SEGMENTATION: Let instances of the same category have similar colors, and overlay them with\n            high opacity. This provides more attention on the quality of segmentation.\n        IMAGE_BW: same as IMAGE, but convert all areas without masks to gray-scale.\n    \"\"\"\n\n    IMAGE = 0\n    SEGMENTATION = 1\n    IMAGE_BW = 2\n\n\nclass GenericMask:\n    \"\"\"\n    Attribute:\n        polygons (list[ndarray]): list[ndarray]: polygons for this mask.\n            Each ndarray has format [x, y, x, y, ...]\n        mask (ndarray): a binary mask\n    \"\"\"\n\n    def __init__(self, mask_or_polygons, height, width):\n        self._mask = self._polygons = self._has_holes = None\n        self.height = height\n        self.width = width\n        m = mask_or_polygons\n        if isinstance(m, dict):\n            # RLEs\n            assert \"counts\" in m and \"size\" in m\n            if isinstance(m[\"counts\"], list):  # uncompressed RLEs\n                h, w = m[\"size\"]\n                assert h == height and w == width\n                m = mask_util.frPyObjects(m, h, w)\n            self._mask = mask_util.decode(m)[:, :]\n            return\n\n        if isinstance(m, list):  # list[ndarray]\n            self._polygons = [np.asarray(x).reshape(-1) for x in m]\n            return\n\n        if isinstance(m, np.ndarray):  # assumed to be a binary mask\n            assert m.shape[1] != 2, m.shape\n            assert m.shape == (height, width), m.shape\n            self._mask = m.astype(\"uint8\")\n            return\n\n        raise ValueError(\"GenericMask cannot handle object {} of type '{}'\".format(m, type(m)))\n\n    @property\n    def mask(self):\n        if self._mask is None:\n            self._mask = self.polygons_to_mask(self._polygons)\n        return self._mask\n\n    @property\n    def polygons(self):\n        if self._polygons is None:\n            self._polygons, self._has_holes = self.mask_to_polygons(self._mask)\n        return self._polygons\n\n    @property\n    def has_holes(self):\n        if self._has_holes is None:\n            if self._mask is not None:\n                self._polygons, self._has_holes = self.mask_to_polygons(self._mask)\n            else:\n                self._has_holes = False  # if original format is polygon, does not have holes\n        return self._has_holes\n\n    def mask_to_polygons(self, mask):\n        # cv2.RETR_CCOMP flag retrieves all the contours and arranges them to a 2-level\n        # hierarchy. External contours (boundary) of the object are placed in hierarchy-1.\n        # Internal contours (holes) are placed in hierarchy-2.\n        # cv2.CHAIN_APPROX_NONE flag gets vertices of polygons from contours.\n        mask = mask.copy()\n        res = cv2.findContours(mask.astype(\"uint8\"), cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)\n        hierarchy = res[-1]\n        # try:\n        has_holes = (hierarchy.reshape(-1, 4)[:, 3] >= 0).sum() > 0\n        # except:\n            # assert 1!= 1\n        res = res[-2]\n        res = [x.flatten() for x in res]\n        res = [x for x in res if len(x) >= 6]\n        return res, has_holes\n\n    def polygons_to_mask(self, polygons):\n        rle = mask_util.frPyObjects(polygons, self.height, self.width)\n        rle = mask_util.merge(rle)\n        return mask_util.decode(rle)[:, :]\n\n    def area(self):\n        return self.mask.sum()\n\n    def bbox(self):\n        p = mask_util.frPyObjects(self.polygons, self.height, self.width)\n        p = mask_util.merge(p)\n        bbox = mask_util.toBbox(p)\n        bbox[2] += bbox[0]\n        bbox[3] += bbox[1]\n        return bbox\n\n\nclass _PanopticPrediction:\n    def __init__(self, panoptic_seg, segments_info):\n        self._seg = panoptic_seg\n\n        self._sinfo = {s[\"id\"]: s for s in segments_info}  # seg id -> seg info\n        segment_ids, areas = torch.unique(panoptic_seg, sorted=True, return_counts=True)\n        areas = areas.numpy()\n        sorted_idxs = np.argsort(-areas)\n        self._seg_ids, self._seg_areas = segment_ids[sorted_idxs], areas[sorted_idxs]\n        self._seg_ids = self._seg_ids.tolist()\n        for sid, area in zip(self._seg_ids, self._seg_areas):\n            if sid in self._sinfo:\n                self._sinfo[sid][\"area\"] = float(area)\n\n    def non_empty_mask(self):\n        \"\"\"\n        Returns:\n            (H, W) array, a mask for all pixels that have a prediction\n        \"\"\"\n        empty_ids = []\n        for id in self._seg_ids:\n            if id not in self._sinfo:\n                empty_ids.append(id)\n        if len(empty_ids) == 0:\n            return np.zeros(self._seg.shape, dtype=np.uint8)\n        assert (\n            len(empty_ids) == 1\n        ), \">1 ids corresponds to no labels. This is currently not supported\"\n        return (self._seg != empty_ids[0]).numpy().astype(np.bool)\n\n    def semantic_masks(self):\n        for sid in self._seg_ids:\n            sinfo = self._sinfo.get(sid)\n            if sinfo is None or sinfo[\"isthing\"]:\n                # Some pixels (e.g. id 0 in PanopticFPN) have no instance or semantic predictions.\n                continue\n            yield (self._seg == sid).numpy().astype(np.bool), sinfo\n\n    def instance_masks(self):\n        for sid in self._seg_ids:\n            sinfo = self._sinfo.get(sid)\n            if sinfo is None or not sinfo[\"isthing\"]:\n                continue\n            mask = (self._seg == sid).numpy().astype(np.bool)\n            if mask.sum() > 0:\n                yield mask, sinfo\n\n\ndef _create_text_labels(classes, scores, class_names):\n    \"\"\"\n    Args:\n        classes (list[int] or None):\n        scores (list[float] or None):\n        class_names (list[str] or None):\n\n    Returns:\n        list[str] or None\n    \"\"\"\n    labels = None\n    if class_names is not None and len(class_names) >= 1:\n        labels = [class_names[i] for i in classes]\n    if scores is not None:\n        if labels is None:\n            labels = [\"{:.0f}%\".format(s * 100) for s in scores]\n        else:\n            labels = [\"{}\".format(l) for l in labels]\n    return labels\n\n\nclass VisImage:\n    def __init__(self, img, scale=1.0):\n        \"\"\"\n        Args:\n            img (ndarray): an RGB image of shape (H, W, 3).\n            scale (float): scale the input image\n        \"\"\"\n        self.img = img\n        self.scale = scale\n        self.width, self.height = img.shape[1], img.shape[0]\n        self._setup_figure(img)\n\n    def _setup_figure(self, img):\n        \"\"\"\n        Args:\n            Same as in :meth:`__init__()`.\n\n        Returns:\n            fig (matplotlib.pyplot.figure): top level container for all the image plot elements.\n            ax (matplotlib.pyplot.Axes): contains figure elements and sets the coordinate system.\n        \"\"\"\n        fig = mplfigure.Figure(frameon=False)\n        self.dpi = fig.get_dpi()\n        # add a small 1e-2 to avoid precision lost due to matplotlib's truncation\n        # (https://github.com/matplotlib/matplotlib/issues/15363)\n        fig.set_size_inches(\n            (self.width * self.scale + 1e-2) / self.dpi,\n            (self.height * self.scale + 1e-2) / self.dpi,\n        )\n        self.canvas = FigureCanvasAgg(fig)\n        # self.canvas = mpl.backends.backend_cairo.FigureCanvasCairo(fig)\n        ax = fig.add_axes([0.0, 0.0, 1.0, 1.0])\n        ax.axis(\"off\")\n        ax.set_xlim(0.0, self.width)\n        ax.set_ylim(self.height)\n\n        self.fig = fig\n        self.ax = ax\n\n    def save(self, filepath):\n        \"\"\"\n        Args:\n            filepath (str): a string that contains the absolute path, including the file name, where\n                the visualized image will be saved.\n        \"\"\"\n        cv2.imwrite(filepath, self.get_image()[:, :, ::-1])\n\n    def get_image(self):\n        \"\"\"\n        Returns:\n            ndarray: the visualized image of shape (H, W, 3) (RGB) in uint8 type.\n              The shape is scaled w.r.t the input image using the given `scale` argument.\n        \"\"\"\n        canvas = self.canvas\n        s, (width, height) = canvas.print_to_buffer()\n        if (self.width, self.height) != (width, height):\n            img = cv2.resize(self.img, (width, height))\n        else:\n            img = self.img\n\n        # buf = io.BytesIO()  # works for cairo backend\n        # width, height = self.width, self.height\n        # s = buf.getvalue()\n\n        buffer = np.frombuffer(s, dtype=\"uint8\")\n\n        # imshow is slow. blend manually (still quite slow)\n        img_rgba = buffer.reshape(height, width, 4)\n        rgb, alpha = np.split(img_rgba, [3], axis=2)\n\n        try:\n            import numexpr as ne  # fuse them with numexpr\n\n            visualized_image = ne.evaluate(\"img * (1 - alpha / 255.0) + rgb * (alpha / 255.0)\")\n        except ImportError:\n            alpha = alpha.astype(\"float32\") / 255.0\n            visualized_image = img * (1 - alpha) + rgb * alpha\n\n        visualized_image = visualized_image.astype(\"uint8\")\n\n        return visualized_image\n\n\nclass Visualizer:\n    def __init__(self, img_rgb, metadata, scale=1.0, instance_mode=ColorMode.IMAGE):\n        \"\"\"\n        Args:\n            img_rgb: a numpy array of shape (H, W, C), where H and W correspond to\n                the height and width of the image respectively. C is the number of\n                color channels. The image is required to be in RGB format since that\n                is a requirement of the Matplotlib library. The image is also expected\n                to be in the range [0, 255].\n            metadata (MetadataCatalog): image metadata.\n        \"\"\"\n        self.img = np.asarray(img_rgb).clip(0, 255).astype(np.uint8)\n        self.metadata = metadata\n        self.output = VisImage(self.img, scale=scale)\n        self.cpu_device = torch.device(\"cpu\")\n\n        # too small texts are useless, therefore clamp to 9\n        self._default_font_size = max(\n            np.sqrt(self.output.height * self.output.width) // 90, 10 // scale\n        )\n        self._instance_mode = instance_mode\n        self._colors = colormap(True,1)\n\n    def draw_instance_predictions(self, predictions,is_ass= False, labels_align = 'left'):\n        \"\"\"\n        Draw instance-level prediction results on an image.\n\n        Args:\n            predictions (Instances): the output of an instance detection/segmentation\n                model. Following fields will be used to draw:\n                \"pred_boxes\", \"pred_classes\", \"scores\", \"pred_masks\" (or \"pred_masks_rle\").\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        boxes = predictions.pred_boxes if predictions.has(\"pred_boxes\") else None\n        scores = predictions.scores if predictions.has(\"scores\") else None\n        classes = predictions.pred_classes if predictions.has(\"pred_classes\") else None\n        associations = predictions.pred_associations if predictions.has(\"pred_associations\") else None\n        if is_ass:\n            labels = _create_text_labels(classes, scores, ['obj+shad'])\n        else:\n            labels = _create_text_labels(classes, scores, self.metadata.get(\"thing_classes\",None))\n        keypoints = predictions.pred_keypoints if predictions.has(\"pred_keypoints\") else None\n\n        if predictions.has(\"pred_masks\"):\n            masks = predictions.pred_masks\n            masks = [GenericMask(x, self.output.height, self.output.width) for x in masks]\n        else:\n            masks = None\n\n        if self._instance_mode == ColorMode.SEGMENTATION and self.metadata.get(\"thing_colors\"):\n            colors = [\n                self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) for c in classes\n            ]\n            alpha = 0.8\n        else:\n            colors = [self._colors[k] for k in associations]\n            alpha = 0.5\n\n        if self._instance_mode == ColorMode.IMAGE_BW:\n            self.output.img = self._create_grayscale_image(\n                (predictions.pred_masks.any(dim=0) > 0).numpy()\n            )\n            alpha = 0.3\n\n        self.overlay_instances(\n            masks=masks,\n            boxes=boxes,\n            labels=labels,\n            keypoints=keypoints,\n            assigned_colors=colors,\n            alpha=alpha,\n            labels_align=labels_align,\n            associations= associations\n        )\n        return self.output\n\n    def draw_sem_seg(self, sem_seg, area_threshold=None, alpha=0.8):\n        \"\"\"\n        Draw semantic segmentation predictions/labels.\n\n        Args:\n            sem_seg (Tensor or ndarray): the segmentation of shape (H, W).\n            area_threshold (int): segments with less than `area_threshold` are not drawn.\n            alpha (float): the larger it is, the more opaque the segmentations are.\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        if isinstance(sem_seg, torch.Tensor):\n            sem_seg = sem_seg.numpy()\n        labels, areas = np.unique(sem_seg, return_counts=True)\n        sorted_idxs = np.argsort(-areas).tolist()\n        labels = labels[sorted_idxs]\n        for label in labels:\n            try:\n                mask_color = [x / 255 for x in self.metadata.stuff_colors[label]]\n            except (AttributeError, IndexError):\n                mask_color = None\n\n            binary_mask = (sem_seg == label).astype(np.uint8)\n            text = self.metadata.stuff_classes[label]\n            self.draw_binary_mask(\n                binary_mask,\n                color=mask_color,\n                edge_color=_OFF_WHITE,\n                text=text,\n                alpha=alpha,\n                area_threshold=area_threshold,\n            )\n        return self.output\n\n    def draw_panoptic_seg_predictions(\n        self, panoptic_seg, segments_info, area_threshold=None, alpha=0.7\n    ):\n        \"\"\"\n        Draw panoptic prediction results on an image.\n\n        Args:\n            panoptic_seg (Tensor): of shape (height, width) where the values are ids for each\n                segment.\n            segments_info (list[dict]): Describe each segment in `panoptic_seg`.\n                Each dict contains keys \"id\", \"category_id\", \"isthing\".\n            area_threshold (int): stuff segments with less than `area_threshold` are not drawn.\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        pred = _PanopticPrediction(panoptic_seg, segments_info)\n\n        if self._instance_mode == ColorMode.IMAGE_BW:\n            self.output.img = self._create_grayscale_image(pred.non_empty_mask())\n\n        # draw mask for all semantic segments first i.e. \"stuff\"\n        for mask, sinfo in pred.semantic_masks():\n            category_idx = sinfo[\"category_id\"]\n            try:\n                mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]]\n            except AttributeError:\n                mask_color = None\n\n            text = self.metadata.stuff_classes[category_idx]\n            self.draw_binary_mask(\n                mask,\n                color=mask_color,\n                edge_color=_OFF_WHITE,\n                text=text,\n                alpha=alpha,\n                area_threshold=area_threshold,\n            )\n\n        # draw mask for all instances second\n        all_instances = list(pred.instance_masks())\n        if len(all_instances) == 0:\n            return self.output\n        masks, sinfo = list(zip(*all_instances))\n        category_ids = [x[\"category_id\"] for x in sinfo]\n\n        try:\n            scores = [x[\"score\"] for x in sinfo]\n        except KeyError:\n            scores = None\n        labels = _create_text_labels(category_ids, scores, self.metadata.thing_classes)\n\n        try:\n            colors = [random_color(rgb=True, maximum=1) for k in category_ids]\n        except AttributeError:\n            colors = None\n        self.overlay_instances(masks=masks, labels=labels, assigned_colors=colors)\n\n        return self.output\n\n    def draw_dataset_dict(self, dic):\n        annos = dic.get(\"annotations\", None)\n        soba_annos = dic.get(\"association_anno\",None)\n        if annos:\n            if \"segmentation\" in annos[0]:\n                masks = [x[\"segmentation\"] for x in annos]\n            else:\n                masks = None\n            if \"keypoints\" in annos[0]:\n                keypts = [x[\"keypoints\"] for x in annos]\n                keypts = np.array(keypts).reshape(len(annos), -1, 3)\n            else:\n                keypts = None\n\n            boxes = [BoxMode.convert(x[\"bbox\"], x[\"bbox_mode\"], BoxMode.XYXY_ABS) for x in annos]\n            colos = [self._colors[i] for i in range(1,int(len(boxes)/2)+1)]*2\n            labels = [x[\"category_id\"] for x in annos]\n            names = self.metadata.get(\"thing_classes\", None)\n            if names:\n                labels = [names[i] for i in labels]\n            labels = [i + (\"|crowd\" if a.get(\"iscrowd\", 0) else \"\") for i, a in zip(labels, annos)]\n            self.overlay_instances(labels=labels, boxes=boxes, masks=masks, keypoints=keypts,assigned_colors=colos)\n        if soba_annos:\n            soba_boxes = [BoxMode.convert(x[\"bbox\"],x[\"bbox_mode\"], BoxMode.XYXY_ABS) for x in soba_annos]\n            colos = colos[:int(len(colos)/2)]\n            soba_labels = [x[\"category_id\"] for x in soba_annos]\n            names = ['obj+shad']\n            if names:\n                soba_labels = [names[i] for i in soba_labels]\n            self.overlay_instances(labels=soba_labels,boxes=soba_boxes,labels_align='right',assigned_colors=colos)\n        sem_seg = dic.get(\"sem_seg\", None)\n        if sem_seg is None and \"sem_seg_file_name\" in dic:\n            sem_seg = cv2.imread(dic[\"sem_seg_file_name\"], cv2.IMREAD_GRAYSCALE)\n        if sem_seg is not None:\n            self.draw_sem_seg(sem_seg, area_threshold=0, alpha=0.5)\n        return self.output\n\n    def overlay_instances(\n        self,\n        *,\n        boxes=None,\n        labels=None,\n        masks=None,\n        keypoints=None,\n        assigned_colors=None,\n        alpha=0.5,\n        labels_align = \"left\",\n        associations = None\n        \n    ):\n        \"\"\"\n        Args:\n            boxes (Boxes or ndarray): either a :class:`Boxes` or a Nx4 numpy array\n                of XYXY_ABS format for the N objects in a single image.\n            labels (list[str]): the text to be displayed for each instance.\n            masks (masks-like object): Supported types are:\n\n                * `structures.masks.PolygonMasks`, `structures.masks.BitMasks`.\n                * list[list[ndarray]]: contains the segmentation masks for all objects in one image.\n                    The first level of the list corresponds to individual instances. The second\n                    level to all the polygon that compose the instance, and the third level\n                    to the polygon coordinates. The third level should have the format of\n                    [x0, y0, x1, y1, ..., xn, yn] (n >= 3).\n                * list[ndarray]: each ndarray is a binary mask of shape (H, W).\n                * list[dict]: each dict is a COCO-style RLE.\n            keypoints (Keypoint or array like): an array-like object of shape (N, K, 3),\n                where the N is the number of instances and K is the number of keypoints.\n                The last dimension corresponds to (x, y, visibility or score).\n            assigned_colors (list[matplotlib.colors]): a list of colors, where each color\n                corresponds to each mask or box in the image. Refer to 'matplotlib.colors'\n                for full list of formats that the colors are accepted in.\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        num_instances = None\n        if boxes is not None:\n            boxes = self._convert_boxes(boxes)\n            num_instances = len(boxes)\n        if masks is not None:\n            masks = self._convert_masks(masks)\n            if num_instances:\n                assert len(masks) == num_instances\n            else:\n                num_instances = len(masks)\n        if keypoints is not None:\n            if num_instances:\n                assert len(keypoints) == num_instances\n            else:\n                num_instances = len(keypoints)\n            keypoints = self._convert_keypoints(keypoints)\n        if labels is not None:\n            assert len(labels) == num_instances\n        if assigned_colors is None:\n            assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)]\n        if num_instances == 0:\n            return self.output\n\n        # Display in largest to smallest order to reduce occlusion.\n        areas = None\n        if boxes is not None:\n            areas = np.prod(boxes[:, 2:] - boxes[:, :2], axis=1)\n        elif masks is not None:\n            areas = np.asarray([x.area() for x in masks])\n\n        for i in range(num_instances):\n            if associations is not None:\n                if associations[i] == 0:\n                    continue\n            color = assigned_colors[i]\n            if boxes is not None:\n                self.draw_box(boxes[i], edge_color=color)\n                pass\n\n            if masks is not None:\n                for segment in masks[i].polygons:\n                    if labels[i] == 'Shadow':\n                        alpha = 0.8\n                    else:\n                        alpha = 0.5\n                    self.draw_polygon(segment.reshape(-1, 2), color, alpha=alpha)\n\n            if labels is not None:\n                # first get a box\n                if boxes is not None:\n                    x0, y0, x1, y1 = boxes[i]\n                    text_pos = (x0, y0)  # if drawing boxes, put text on the box corner.\n                    horiz_align = labels_align\n                    if horiz_align == 'right':\n                        text_pos = (x1,y1-20)\n                elif masks is not None:\n                    x0, y0, x1, y1 = masks[i].bbox()\n\n                    # draw text in the center (defined by median) when box is not drawn\n                    # median is less sensitive to outliers.\n                    text_pos = np.median(masks[i].mask.nonzero(), axis=1)[::-1]\n                    horiz_align = \"center\"\n                else:\n                    continue  # drawing the box confidence for keypoints isn't very useful.\n                # for small objects, draw text at the side to avoid occlusion\n                instance_area = (y1 - y0) * (x1 - x0)\n                if (\n                    instance_area < _SMALL_OBJECT_AREA_THRESH * self.output.scale\n                    or y1 - y0 < 40 * self.output.scale\n                ):\n                    if y1 >= self.output.height - 5:\n                        text_pos = (x1, y0)\n                    else:\n                        text_pos = (x0, y1)\n                height_ratio = (y1 - y0) / np.sqrt(self.output.height * self.output.width)\n                lighter_color = self._change_color_brightness(color, brightness_factor=0.7)\n                font_size = (\n                    np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2)\n                    * 0.5\n                    * self._default_font_size\n                )\n                self.draw_text(\n                    labels[i],\n                    text_pos,\n                    color=lighter_color,\n                    horizontal_alignment=horiz_align,\n                    font_size=font_size,\n                )\n\n        # draw keypoints\n        if keypoints is not None:\n            for keypoints_per_instance in keypoints:\n                self.draw_and_connect_keypoints(keypoints_per_instance)\n\n        return self.output\n\n    def draw_and_connect_keypoints(self, keypoints):\n        \"\"\"\n        Draws keypoints of an instance and follows the rules for keypoint connections\n        to draw lines between appropriate keypoints. This follows color heuristics for\n        line color.\n\n        Args:\n            keypoints (Tensor): a tensor of shape (K, 3), where K is the number of keypoints\n                and the last dimension corresponds to (x, y, probability).\n\n        Returns:\n            output (VisImage): image object with visualizations.\n        \"\"\"\n        visible = {}\n        for idx, keypoint in enumerate(keypoints):\n            # draw keypoint\n            x, y, prob = keypoint\n            if prob > _KEYPOINT_THRESHOLD:\n                self.draw_circle((x, y), color=_RED)\n                keypoint_name = self.metadata.keypoint_names[idx]\n                visible[keypoint_name] = (x, y)\n\n        for kp0, kp1, color in self.metadata.keypoint_connection_rules:\n            if kp0 in visible and kp1 in visible:\n                x0, y0 = visible[kp0]\n                x1, y1 = visible[kp1]\n                color = tuple(x / 255.0 for x in color)\n                self.draw_line([x0, x1], [y0, y1], color=color)\n\n        # draw lines from nose to mid-shoulder and mid-shoulder to mid-hip\n        # Note that this strategy is specific to person keypoints.\n        # For other keypoints, it should just do nothing\n        try:\n            ls_x, ls_y = visible[\"left_shoulder\"]\n            rs_x, rs_y = visible[\"right_shoulder\"]\n            mid_shoulder_x, mid_shoulder_y = (ls_x + rs_x) / 2, (ls_y + rs_y) / 2\n        except KeyError:\n            pass\n        else:\n            # draw line from nose to mid-shoulder\n            nose_x, nose_y = visible.get(\"nose\", (None, None))\n            if nose_x is not None:\n                self.draw_line([nose_x, mid_shoulder_x], [nose_y, mid_shoulder_y], color=_RED)\n\n            try:\n                # draw line from mid-shoulder to mid-hip\n                lh_x, lh_y = visible[\"left_hip\"]\n                rh_x, rh_y = visible[\"right_hip\"]\n            except KeyError:\n                pass\n            else:\n                mid_hip_x, mid_hip_y = (lh_x + rh_x) / 2, (lh_y + rh_y) / 2\n                self.draw_line([mid_hip_x, mid_shoulder_x], [mid_hip_y, mid_shoulder_y], color=_RED)\n        return self.output\n\n    \"\"\"\n    Primitive drawing functions:\n    \"\"\"\n\n    def draw_text(\n        self, text, position, *, font_size=None, color=\"g\", horizontal_alignment=\"center\"\n    ):\n        \"\"\"\n        Args:\n            text (str): class label\n            position (tuple): a tuple of the x and y coordinates to place text on image.\n            font_size (int, optional): font of the text. If not provided, a font size\n                proportional to the image width is calculated and used.\n            color: color of the text. Refer to `matplotlib.colors` for full list\n                of formats that are accepted.\n            horizontal_alignment (str): see `matplotlib.text.Text`\n\n        Returns:\n            output (VisImage): image object with text drawn.\n        \"\"\"\n        if not font_size:\n            font_size = self._default_font_size\n\n        # since the text background is dark, we don't want the text to be dark\n        color = np.maximum(list(mplc.to_rgb(color)), 0.2)\n        color[np.argmax(color)] = max(0.8, np.max(color))\n\n        x, y = position\n        self.output.ax.text(\n            x,\n            y,\n            text,\n            size=font_size * self.output.scale,\n            family=\"sans-serif\",\n            bbox={\"facecolor\": \"black\", \"alpha\": 0.8, \"pad\": 0.7, \"edgecolor\": \"none\"},\n            verticalalignment=\"top\",\n            horizontalalignment=horizontal_alignment,\n            color=color,\n            zorder=10,\n        )\n        return self.output\n\n    def draw_box(self, box_coord, alpha=0.5, edge_color=\"g\", line_style=\"-\"):\n        \"\"\"\n        Args:\n            box_coord (tuple): a tuple containing x0, y0, x1, y1 coordinates, where x0 and y0\n                are the coordinates of the image's top left corner. x1 and y1 are the\n                coordinates of the image's bottom right corner.\n            alpha (float): blending efficient. Smaller values lead to more transparent masks.\n            edge_color: color of the outline of the box. Refer to `matplotlib.colors`\n                for full list of formats that are accepted.\n            line_style (string): the string to use to create the outline of the boxes.\n\n        Returns:\n            output (VisImage): image object with box drawn.\n        \"\"\"\n        x0, y0, x1, y1 = box_coord\n        width = x1 - x0\n        height = y1 - y0\n\n        linewidth = max(5,1)\n\n        self.output.ax.add_patch(\n            mpl.patches.Rectangle(\n                (x0, y0),\n                width,\n                height,\n                fill=False,\n                edgecolor=edge_color,\n                linewidth=linewidth * self.output.scale,\n                alpha=alpha,\n                linestyle=line_style,\n            )\n        )\n        return self.output\n\n    def draw_circle(self, circle_coord, color, radius=3):\n        \"\"\"\n        Args:\n            circle_coord (list(int) or tuple(int)): contains the x and y coordinates\n                of the center of the circle.\n            color: color of the polygon. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n            radius (int): radius of the circle.\n\n        Returns:\n            output (VisImage): image object with box drawn.\n        \"\"\"\n        x, y = circle_coord\n        self.output.ax.add_patch(\n            mpl.patches.Circle(circle_coord, radius=radius, fill=True, color=color)\n        )\n        return self.output\n\n    def draw_line(self, x_data, y_data, color):\n        \"\"\"\n        Args:\n            x_data (list[int]): a list containing x values of all the points being drawn.\n                Length of list should match the length of y_data.\n            y_data (list[int]): a list containing y values of all the points being drawn.\n                Length of list should match the length of x_data.\n            color: color of the line. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n\n        Returns:\n            output (VisImage): image object with line drawn.\n        \"\"\"\n        linewidth = max(self._default_font_size / 3, 1)\n        self.output.ax.add_line(\n            mpl.lines.Line2D(x_data, y_data, linewidth=linewidth * self.output.scale, color=color)\n        )\n        return self.output\n\n    def draw_binary_mask(\n        self, binary_mask, color=None, *, edge_color=None, text=None, alpha=0.5, area_threshold=4096\n    ):\n        \"\"\"\n        Args:\n            binary_mask (ndarray): numpy array of shape (H, W), where H is the image height and\n                W is the image width. Each value in the array is either a 0 or 1 value of uint8\n                type.\n            color: color of the mask. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted. If None, will pick a random color.\n            edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a\n                full list of formats that are accepted.\n            text (str): if None, will be drawn in the object's center of mass.\n            alpha (float): blending efficient. Smaller values lead to more transparent masks.\n            area_threshold (float): a connected component small than this will not be shown.\n\n        Returns:\n            output (VisImage): image object with mask drawn.\n        \"\"\"\n        if color is None:\n            color = random_color(rgb=True, maximum=1)\n        if area_threshold is None:\n            area_threshold = 4096\n\n        has_valid_segment = False\n        binary_mask = binary_mask.astype(\"uint8\")  # opencv needs uint8\n        mask = GenericMask(binary_mask, self.output.height, self.output.width)\n        shape2d = (binary_mask.shape[0], binary_mask.shape[1])\n\n        if not mask.has_holes:\n            # draw polygons for regular masks\n            for segment in mask.polygons:\n                area = mask_util.area(mask_util.frPyObjects([segment], shape2d[0], shape2d[1]))\n                if area < area_threshold:\n                    continue\n                has_valid_segment = True\n                segment = segment.reshape(-1, 2)\n                self.draw_polygon(segment, color=color, edge_color=edge_color, alpha=alpha)\n        else:\n            rgba = np.zeros(shape2d + (4,), dtype=\"float32\")\n            rgba[:, :, :3] = color\n            rgba[:, :, 3] = (mask.mask == 1).astype(\"float32\") * alpha\n            has_valid_segment = True\n            self.output.ax.imshow(rgba)\n\n        if text is not None and has_valid_segment:\n            # TODO sometimes drawn on wrong objects. the heuristics here can improve.\n            lighter_color = self._change_color_brightness(color, brightness_factor=0.7)\n            _num_cc, cc_labels, stats, centroids = cv2.connectedComponentsWithStats(binary_mask, 8)\n            largest_component_id = np.argmax(stats[1:, -1]) + 1\n\n            # draw text on the largest component, as well as other very large components.\n            for cid in range(1, _num_cc):\n                if cid == largest_component_id or stats[cid, -1] > _LARGE_MASK_AREA_THRESH:\n                    # median is more stable than centroid\n                    # center = centroids[largest_component_id]\n                    center = np.median((cc_labels == cid).nonzero(), axis=1)[::-1]\n                    self.draw_text(text, center, color=lighter_color)\n        return self.output\n\n    def draw_polygon(self, segment, color, edge_color=None, alpha=0.5):\n        \"\"\"\n        Args:\n            segment: numpy array of shape Nx2, containing all the points in the polygon.\n            color: color of the polygon. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n            edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a\n                full list of formats that are accepted. If not provided, a darker shade\n                of the polygon color will be used instead.\n            alpha (float): blending efficient. Smaller values lead to more transparent masks.\n\n        Returns:\n            output (VisImage): image object with polygon drawn.\n        \"\"\"\n        if edge_color is None:\n            # make edge color darker than the polygon color\n            if alpha > 0.8:\n                edge_color = self._change_color_brightness(color, brightness_factor=-0.7)\n            else:\n                edge_color = color\n        edge_color = mplc.to_rgb(edge_color) + (1,)\n\n        polygon = mpl.patches.Polygon(\n            segment,\n            fill=True,\n            facecolor=mplc.to_rgb(color) + (alpha,),\n            edgecolor=edge_color,\n            linewidth=max(self._default_font_size // 15 * self.output.scale, 1),\n        )\n        self.output.ax.add_patch(polygon)\n        return self.output\n\n    \"\"\"\n    Internal methods:\n    \"\"\"\n\n    def _jitter(self, color):\n        \"\"\"\n        Randomly modifies given color to produce a slightly different color than the color given.\n\n        Args:\n            color (tuple[double]): a tuple of 3 elements, containing the RGB values of the color\n                picked. The values in the list are in the [0.0, 1.0] range.\n\n        Returns:\n            jittered_color (tuple[double]): a tuple of 3 elements, containing the RGB values of the\n                color after being jittered. The values in the list are in the [0.0, 1.0] range.\n        \"\"\"\n        color = mplc.to_rgb(color)\n        vec = np.random.rand(3)\n        # better to do it in another color space\n        vec = vec / np.linalg.norm(vec) * 0.5\n        res = np.clip(vec + color, 0, 1)\n        return tuple(res)\n\n    def _create_grayscale_image(self, mask=None):\n        \"\"\"\n        Create a grayscale version of the original image.\n        The colors in masked area, if given, will be kept.\n        \"\"\"\n        img_bw = self.img.astype(\"f4\").mean(axis=2)\n        img_bw = np.stack([img_bw] * 3, axis=2)\n        if mask is not None:\n            img_bw[mask] = self.img[mask]\n        return img_bw\n\n    def _change_color_brightness(self, color, brightness_factor):\n        \"\"\"\n        Depending on the brightness_factor, gives a lighter or darker color i.e. a color with\n        less or more saturation than the original color.\n\n        Args:\n            color: color of the polygon. Refer to `matplotlib.colors` for a full list of\n                formats that are accepted.\n            brightness_factor (float): a value in [-1.0, 1.0] range. A lightness factor of\n                0 will correspond to no change, a factor in [-1.0, 0) range will result in\n                a darker color and a factor in (0, 1.0] range will result in a lighter color.\n\n        Returns:\n            modified_color (tuple[double]): a tuple containing the RGB values of the\n                modified color. Each value in the tuple is in the [0.0, 1.0] range.\n        \"\"\"\n        assert brightness_factor >= -1.0 and brightness_factor <= 1.0\n        color = mplc.to_rgb(color)\n        polygon_color = colorsys.rgb_to_hls(*mplc.to_rgb(color))\n        modified_lightness = polygon_color[1] + (brightness_factor * polygon_color[1])\n        modified_lightness = 0.0 if modified_lightness < 0.0 else modified_lightness\n        modified_lightness = 1.0 if modified_lightness > 1.0 else modified_lightness\n        modified_color = colorsys.hls_to_rgb(polygon_color[0], modified_lightness, polygon_color[2])\n        return modified_color\n\n    def _convert_boxes(self, boxes):\n        \"\"\"\n        Convert different format of boxes to a Nx4 array.\n        \"\"\"\n        if isinstance(boxes, Boxes):\n            return boxes.tensor.numpy()\n        else:\n            return np.asarray(boxes)\n\n    def _convert_masks(self, masks_or_polygons):\n        \"\"\"\n        Convert different format of masks or polygons to a tuple of masks and polygons.\n\n        Returns:\n            list[GenericMask]:\n        \"\"\"\n\n        m = masks_or_polygons\n        if isinstance(m, PolygonMasks):\n            m = m.polygons\n        if isinstance(m, BitMasks):\n            m = m.tensor.numpy()\n        if isinstance(m, torch.Tensor):\n            m = m.numpy()\n        ret = []\n        for x in m:\n            if isinstance(x, GenericMask):\n                ret.append(x)\n            else:\n                ret.append(GenericMask(x, self.output.height, self.output.width))\n        return ret\n\n    def _convert_keypoints(self, keypoints):\n        if isinstance(keypoints, Keypoints):\n            keypoints = keypoints.tensor\n        keypoints = np.asarray(keypoints)\n        return keypoints\n\n    def get_output(self):\n        \"\"\"\n        Returns:\n            output (VisImage): the image output containing the visualizations added\n                to the image.\n        \"\"\"\n        return self.output\n"
  },
  {
    "path": "dev/README.md",
    "content": "\n## Some scripts for developers to use, include:\n\n- `linter.sh`: lint the codebase before commit\n- `run_{inference,instant}_tests.sh`: run inference/training for a few iterations.\n   Note that these tests require 2 GPUs.\n- `parse_results.sh`: parse results from a log file.\n"
  },
  {
    "path": "dev/linter.sh",
    "content": "#!/bin/bash -e\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n# Run this script at project root by \"./dev/linter.sh\" before you commit\n\nvergte() {\n  [ \"$2\" = \"$(echo -e \"$1\\n$2\" | sort -V | head -n1)\" ]\n}\n\n{\n\tblack --version | grep \"19.3b0\" > /dev/null\n} || {\n\techo \"Linter requires black==19.3b0 !\"\n\texit 1\n}\n\nISORT_TARGET_VERSION=\"4.3.21\"\nISORT_VERSION=$(isort -v | grep VERSION | awk '{print $2}')\nvergte \"$ISORT_VERSION\" \"$ISORT_TARGET_VERSION\" || {\n  echo \"Linter requires isort>=${ISORT_TARGET_VERSION} !\"\n  exit 1\n}\n\nset -v\n\necho \"Running isort ...\"\nisort -y -sp . --atomic\n\necho \"Running black ...\"\nblack -l 100 .\n\necho \"Running flake8 ...\"\nif [ -x \"$(command -v flake8-3)\" ]; then\n  flake8-3 .\nelse\n  python3 -m flake8 .\nfi\n\n# echo \"Running mypy ...\"\n# Pytorch does not have enough type annotations\n# mypy detectron2/solver detectron2/structures detectron2/config\n\necho \"Running clang-format ...\"\nfind . -regex \".*\\.\\(cpp\\|c\\|cc\\|cu\\|cxx\\|h\\|hh\\|hpp\\|hxx\\|tcc\\|mm\\|m\\)\" -print0 | xargs -0 clang-format -i\n\ncommand -v arc > /dev/null && arc lint\n"
  },
  {
    "path": "dev/packaging/README.md",
    "content": "\n## To build a cu101 wheel for release:\n\n```\n$ nvidia-docker run -it --storage-opt \"size=20GB\" --name pt  pytorch/manylinux-cuda101\n# inside the container:\n# git clone https://github.com/facebookresearch/detectron2/\n# cd detectron2\n# export CU_VERSION=cu101 D2_VERSION_SUFFIX= PYTHON_VERSION=3.7 PYTORCH_VERSION=1.4\n# ./dev/packaging/build_wheel.sh\n```\n\n## To build all wheels for `CUDA {9.2,10.0,10.1}` x `Python {3.6,3.7,3.8}`:\n```\n./dev/packaging/build_all_wheels.sh\n./dev/packaging/gen_wheel_index.sh /path/to/wheels\n```\n"
  },
  {
    "path": "dev/packaging/build_all_wheels.sh",
    "content": "#!/bin/bash -e\n\nPYTORCH_VERSION=1.4\n\nbuild_for_one_cuda() {\n  cu=$1\n\n  case \"$cu\" in\n    cu*)\n      container_name=manylinux-cuda${cu/cu/}\n      ;;\n    cpu)\n      container_name=manylinux-cuda101\n      ;;\n    *)\n      echo \"Unrecognized cu=$cu\"\n      exit 1\n      ;;\n  esac\n\n  echo \"Launching container $container_name ...\"\n\n  for py in 3.6 3.7 3.8; do\n    docker run -itd \\\n      --name $container_name \\\n      --mount type=bind,source=\"$(pwd)\",target=/detectron2 \\\n      pytorch/$container_name\n\n    cat <<EOF | docker exec -i $container_name sh\n      export CU_VERSION=$cu D2_VERSION_SUFFIX=+$cu PYTHON_VERSION=$py\n      export PYTORCH_VERSION=$PYTORCH_VERSION\n      cd /detectron2 && ./dev/packaging/build_wheel.sh\nEOF\n\n    if [[ \"$cu\" == \"cu101\" ]]; then\n      # build wheel without local version\n      cat <<EOF | docker exec -i $container_name sh\n        export CU_VERSION=$cu D2_VERSION_SUFFIX= PYTHON_VERSION=$py\n        export PYTORCH_VERSION=$PYTORCH_VERSION\n        cd /detectron2 && ./dev/packaging/build_wheel.sh\nEOF\n    fi\n\n    docker exec -i $container_name rm -rf /detectron2/build/$cu\n    docker container stop $container_name\n    docker container rm $container_name\n  done\n}\n\nif [[ -n \"$1\" ]]; then\n  build_for_one_cuda \"$1\"\nelse\n  for cu in cu101 cu100 cu92 cpu; do\n    build_for_one_cuda \"$cu\"\n  done\nfi\n"
  },
  {
    "path": "dev/packaging/build_wheel.sh",
    "content": "#!/bin/bash\nset -ex\n\nldconfig  # https://github.com/NVIDIA/nvidia-docker/issues/854\n\nscript_dir=\"$( cd \"$( dirname \"${BASH_SOURCE[0]}\" )\" >/dev/null 2>&1 && pwd )\"\n. \"$script_dir/pkg_helpers.bash\"\n\necho \"Build Settings:\"\necho \"CU_VERSION: $CU_VERSION\"                 # e.g. cu100\necho \"D2_VERSION_SUFFIX: $D2_VERSION_SUFFIX\"   # e.g. +cu100 or \"\"\necho \"PYTHON_VERSION: $PYTHON_VERSION\"         # e.g. 3.6\necho \"PYTORCH_VERSION: $PYTORCH_VERSION\"       # e.g. 1.4\n\nsetup_cuda\nsetup_wheel_python\n\nexport TORCH_VERSION_SUFFIX=\"+$CU_VERSION\"\nif [[ \"$CU_VERSION\" == \"cu101\" ]]; then\n\texport TORCH_VERSION_SUFFIX=\"\"\nfi\npip_install pip numpy -U\npip_install \"torch==$PYTORCH_VERSION$TORCH_VERSION_SUFFIX\" \\\n\t-f https://download.pytorch.org/whl/$CU_VERSION/torch_stable.html\n\n# use separate directories to allow parallel build\nBASE_BUILD_DIR=build/$CU_VERSION/$PYTHON_VERSION\npython setup.py \\\n  build -b $BASE_BUILD_DIR \\\n  bdist_wheel -b $BASE_BUILD_DIR/build_dist -d wheels/$CU_VERSION\n"
  },
  {
    "path": "dev/packaging/gen_wheel_index.sh",
    "content": "#!/bin/bash -e\n\n\nroot=$1\nif [[ -z \"$root\" ]]; then\n  echo \"Usage: ./gen_wheel_index.sh /path/to/wheels\"\n  exit\nfi\n\nindex=$root/index.html\n\ncd \"$root\"\nfor cu in cpu cu92 cu100 cu101; do\n  cd $cu\n  for whl in *.whl; do\n    echo \"<a href=\\\"$whl\\\">$whl</a><br>\"\n  done > index.html\n  cd \"$root\"\ndone\n\nfor whl in $(find . -type f -name '*.whl' -printf '%P\\n' | sort); do\n  echo \"<a href=\\\"$whl\\\">$whl</a><br>\"\ndone > \"$index\"\n\n"
  },
  {
    "path": "dev/packaging/pkg_helpers.bash",
    "content": "#!/bin/bash -e\n\n# Function to retry functions that sometimes timeout or have flaky failures\nretry () {\n    $*  || (sleep 1 && $*) || (sleep 2 && $*) || (sleep 4 && $*) || (sleep 8 && $*)\n}\n# Install with pip a bit more robustly than the default\npip_install() {\n  retry pip install --progress-bar off \"$@\"\n}\n\n\nsetup_cuda() {\n  # Now work out the CUDA settings\n  # Like other torch domain libraries, we choose common GPU architectures only.\n  export FORCE_CUDA=1\n  case \"$CU_VERSION\" in\n    cu101)\n      export CUDA_HOME=/usr/local/cuda-10.1/\n      export TORCH_CUDA_ARCH_LIST=\"3.5;3.7;5.0;5.2;6.0+PTX;6.1+PTX;7.0+PTX;7.5+PTX\"\n      ;;\n    cu100)\n      export CUDA_HOME=/usr/local/cuda-10.0/\n      export TORCH_CUDA_ARCH_LIST=\"3.5;3.7;5.0;5.2;6.0+PTX;6.1+PTX;7.0+PTX;7.5+PTX\"\n      ;;\n    cu92)\n      export CUDA_HOME=/usr/local/cuda-9.2/\n      export TORCH_CUDA_ARCH_LIST=\"3.5;3.7;5.0;5.2;6.0+PTX;6.1+PTX;7.0+PTX\"\n      ;;\n    cpu)\n      unset FORCE_CUDA\n      export CUDA_VISIBLE_DEVICES=\n      ;;\n    *)\n      echo \"Unrecognized CU_VERSION=$CU_VERSION\"\n      exit 1\n      ;;\n  esac\n}\n\nsetup_wheel_python() {\n  case \"$PYTHON_VERSION\" in\n    3.6) python_abi=cp36-cp36m ;;\n    3.7) python_abi=cp37-cp37m ;;\n    3.8) python_abi=cp38-cp38 ;;\n    *)\n      echo \"Unrecognized PYTHON_VERSION=$PYTHON_VERSION\"\n      exit 1\n      ;;\n  esac\n  export PATH=\"/opt/python/$python_abi/bin:$PATH\"\n}\n"
  },
  {
    "path": "dev/parse_results.sh",
    "content": "#!/bin/bash\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n# A shell script that parses metrics from the log file.\n# Make it easier for developers to track performance of models.\n\nLOG=\"$1\"\n\nif [[ -z \"$LOG\" ]]; then\n\techo \"Usage: $0 /path/to/log/file\"\n\texit 1\nfi\n\n# [12/15 11:47:32] trainer INFO: Total training time: 12:15:04.446477 (0.4900 s / it)\n# [12/15 11:49:03] inference INFO: Total inference time: 0:01:25.326167 (0.13652186737060548 s / img per device, on 8 devices)\n# [12/15 11:49:03] inference INFO: Total inference pure compute time: .....\n\n# training time\ntrainspeed=$(grep -o 'Overall training.*' \"$LOG\" | grep -Eo '\\(.*\\)' | grep -o '[0-9\\.]*')\necho \"Training speed: $trainspeed s/it\"\n\n# inference time: there could be multiple inference during training\ninferencespeed=$(grep -o 'Total inference pure.*' \"$LOG\" | tail -n1 | grep -Eo '\\(.*\\)' | grep -o '[0-9\\.]*' | head -n1)\necho \"Inference speed: $inferencespeed s/it\"\n\n# [12/15 11:47:18] trainer INFO: eta: 0:00:00  iter: 90000  loss: 0.5407 (0.7256)  loss_classifier: 0.1744 (0.2446)  loss_box_reg: 0.0838 (0.1160)  loss_mask: 0.2159 (0.2722)  loss_objectness: 0.0244 (0.0429)  loss_rpn_box_reg: 0.0279 (0.0500)  time: 0.4487 (0.4899)  data: 0.0076 (0.0975) lr: 0.000200  max mem: 4161\nmemory=$(grep -o 'max[_ ]mem: [0-9]*' \"$LOG\" | tail -n1 | grep -o '[0-9]*')\necho \"Training memory: $memory MB\"\n\necho \"Easy to copypaste:\"\necho \"$trainspeed\",\"$inferencespeed\",\"$memory\"\n\necho \"------------------------------\"\n\n# [12/26 17:26:32] engine.coco_evaluation: copypaste: Task: bbox\n# [12/26 17:26:32] engine.coco_evaluation: copypaste: AP,AP50,AP75,APs,APm,APl\n# [12/26 17:26:32] engine.coco_evaluation: copypaste: 0.0017,0.0024,0.0017,0.0005,0.0019,0.0011\n# [12/26 17:26:32] engine.coco_evaluation: copypaste: Task: segm\n# [12/26 17:26:32] engine.coco_evaluation: copypaste: AP,AP50,AP75,APs,APm,APl\n# [12/26 17:26:32] engine.coco_evaluation: copypaste: 0.0014,0.0021,0.0016,0.0005,0.0016,0.0011\n\necho \"COCO Results:\"\nnum_tasks=$(grep -o 'copypaste:.*Task.*' \"$LOG\" | sort -u | wc -l)\n# each task has 3 lines\ngrep -o 'copypaste:.*' \"$LOG\" | cut -d ' ' -f 2- | tail -n $((num_tasks * 3))\n"
  },
  {
    "path": "dev/run_inference_tests.sh",
    "content": "#!/bin/bash -e\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nBIN=\"python tools/train_net.py\"\nOUTPUT=\"inference_test_output\"\nNUM_GPUS=2\n\nCFG_LIST=( \"${@:1}\" )\n\nif [ ${#CFG_LIST[@]} -eq 0 ]; then\n  CFG_LIST=( ./configs/quick_schedules/*inference_acc_test.yaml )\nfi\n\necho \"========================================================================\"\necho \"Configs to run:\"\necho \"${CFG_LIST[@]}\"\necho \"========================================================================\"\n\n\nfor cfg in \"${CFG_LIST[@]}\"; do\n    echo \"========================================================================\"\n    echo \"Running $cfg ...\"\n    echo \"========================================================================\"\n    $BIN \\\n      --eval-only \\\n      --num-gpus $NUM_GPUS \\\n      --config-file \"$cfg\" \\\n      OUTPUT_DIR $OUTPUT\n      rm -rf $OUTPUT\ndone\n\n\necho \"========================================================================\"\necho \"Running demo.py ...\"\necho \"========================================================================\"\nDEMO_BIN=\"python demo/demo.py\"\nCOCO_DIR=datasets/coco/val2014\nmkdir -pv $OUTPUT\n\nset -v\n\n$DEMO_BIN --config-file ./configs/quick_schedules/panoptic_fpn_R_50_inference_acc_test.yaml \\\n  --input $COCO_DIR/COCO_val2014_0000001933* --output $OUTPUT\nrm -rf $OUTPUT\n"
  },
  {
    "path": "dev/run_instant_tests.sh",
    "content": "#!/bin/bash -e\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nBIN=\"python tools/train_net.py\"\nOUTPUT=\"instant_test_output\"\nNUM_GPUS=2\n\nCFG_LIST=( \"${@:1}\" )\nif [ ${#CFG_LIST[@]} -eq 0 ]; then\n  CFG_LIST=( ./configs/quick_schedules/*instant_test.yaml )\nfi\n\necho \"========================================================================\"\necho \"Configs to run:\"\necho \"${CFG_LIST[@]}\"\necho \"========================================================================\"\n\nfor cfg in \"${CFG_LIST[@]}\"; do\n    echo \"========================================================================\"\n    echo \"Running $cfg ...\"\n    echo \"========================================================================\"\n    $BIN --num-gpus $NUM_GPUS --config-file \"$cfg\" \\\n      SOLVER.IMS_PER_BATCH $(($NUM_GPUS * 2)) \\\n      OUTPUT_DIR \"$OUTPUT\"\n    rm -rf \"$OUTPUT\"\ndone\n\n"
  },
  {
    "path": "docker/Dockerfile",
    "content": "FROM nvidia/cuda:10.0-cudnn7-devel\n\nENV DEBIAN_FRONTEND noninteractive\nRUN apt-get update && apt-get install -y \\\n\tpython3-opencv ca-certificates python3-dev git wget sudo  \\\n\tcmake ninja-build protobuf-compiler libprotobuf-dev && \\\n  rm -rf /var/lib/apt/lists/*\nRUN ln -sv /usr/bin/python3 /usr/bin/python\n\nRUN wget https://bootstrap.pypa.io/get-pip.py && \\\n\tpython3 get-pip.py && \\\n\trm get-pip.py\n\n# install dependencies\n# See https://pytorch.org/ for other options if you use a different version of CUDA\nRUN pip install  tensorboard cython jupyter scikit-image numpy opencv-python\nRUN pip install torch==1.3.0+cu100 torchvision==0.4.1+cu100 -f https://download.pytorch.org/whl/torch_stable.html\nRUN pip install --user 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI'\n\nRUN pip install --user 'git+https://github.com/facebookresearch/fvcore'\n\n# set FORCE_CUDA because during `docker build` cuda is not accessible\nENV FORCE_CUDA=\"1\"\n# This will by default build detectron2 for all common cuda architectures and take a lot more time,\n# because inside `docker build`, there is no way to tell which architecture will be used.\nARG TORCH_CUDA_ARCH_LIST=\"Kepler;Kepler+Tesla;Maxwell;Maxwell+Tegra;Pascal;Volta;Turing\"\nENV TORCH_CUDA_ARCH_LIST=\"${TORCH_CUDA_ARCH_LIST}\"\n\n#RUN pip install --user -e detectron2_repo\n\n# Set a fixed model cache directory.\n#ENV FVCORE_CACHE=\"/tmp\"\nWORKDIR /home/ISD/\n\n# Install Instance Shadow Detection\nRUN git clone https://github.com/stevewongv/InstanceShadowDetection.git\n\nRUN cd InstanceShadowDetection && python setup.py install\n\nRUN cd InstanceShadowDetection/PythonAPI && python setup.py install\n\n\n"
  },
  {
    "path": "docker/Dockerfile-circleci",
    "content": "FROM nvidia/cuda:10.1-cudnn7-devel\n# This dockerfile only aims to provide an environment for unittest on CircleCI\n\nENV DEBIAN_FRONTEND noninteractive\nRUN apt-get update && apt-get install -y \\\n\tpython3-opencv ca-certificates python3-dev git wget sudo && \\\n  rm -rf /var/lib/apt/lists/*\n\nRUN wget -q https://bootstrap.pypa.io/get-pip.py && \\\n\tpython3 get-pip.py && \\\n\trm get-pip.py\n\n# install dependencies\n# See https://pytorch.org/ for other options if you use a different version of CUDA\nRUN pip install torch torchvision tensorboard cython\nRUN pip install 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI'\n"
  },
  {
    "path": "docker/docker-compose.yml",
    "content": "version: \"2.3\"\nservices:\n  detectron2:\n    build:\n      context: .\n      dockerfile: Dockerfile\n      args:\n        USER_ID: ${USER_ID:-1000}\n    runtime: nvidia  # TODO: Exchange with \"gpu: all\" in the future (see https://github.com/facebookresearch/detectron2/pull/197/commits/00545e1f376918db4a8ce264d427a07c1e896c5a).\n    shm_size: \"8gb\"\n    ulimits:\n      memlock: -1\n      stack: 67108864\n    volumes:\n      - /tmp/.X11-unix:/tmp/.X11-unix:ro\n    environment:\n      - DISPLAY=$DISPLAY\n      - NVIDIA_VISIBLE_DEVICES=all\n"
  },
  {
    "path": "docs/.gitignore",
    "content": "_build\n"
  },
  {
    "path": "docs/Makefile",
    "content": "# Minimal makefile for Sphinx documentation\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n# You can set these variables from the command line.\nSPHINXOPTS    =\nSPHINXBUILD   = sphinx-build\nSOURCEDIR     = .\nBUILDDIR      = _build\n\n# Put it first so that \"make\" without argument is like \"make help\".\nhelp:\n\t@$(SPHINXBUILD) -M help \"$(SOURCEDIR)\" \"$(BUILDDIR)\" $(SPHINXOPTS) $(O)\n\n.PHONY: help Makefile\n\n# Catch-all target: route all unknown targets to Sphinx using the new\n# \"make mode\" option.  $(O) is meant as a shortcut for $(SPHINXOPTS).\n%: Makefile\n\t@$(SPHINXBUILD) -M $@ \"$(SOURCEDIR)\" \"$(BUILDDIR)\" $(SPHINXOPTS) $(O)\n"
  },
  {
    "path": "docs/README.md",
    "content": "# Read the docs:\n\nThe latest documentation built from this directory is available at [detectron2.readthedocs.io](https://detectron2.readthedocs.io/).\nDocuments in this directory are not meant to be read on github.\n\n# Build the docs:\n\n1. Install detectron2 according to [INSTALL.md](INSTALL.md).\n2. Install additional libraries required to build docs:\n  - docutils>=0.14\n  - Sphinx>=1.7\n  - recommonmark==0.4.0\n  - sphinx_rtd_theme\n  - mock\n\n3. Run `make html` from this directory.\n"
  },
  {
    "path": "docs/conf.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n# flake8: noqa\n\n# Configuration file for the Sphinx documentation builder.\n#\n# This file does only contain a selection of the most common options. For a\n# full list see the documentation:\n# http://www.sphinx-doc.org/en/master/config\n\n# -- Path setup --------------------------------------------------------------\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#\nimport os\nimport sys\nimport mock\n\n# The theme to use for HTML and HTML Help pages.  See the documentation for\n# a list of builtin themes.\n#\nimport sphinx_rtd_theme\n\n# The suffix(es) of source filenames.\n# You can specify multiple suffix as a list of string:\n#\n# to support markdown\nfrom recommonmark.parser import CommonMarkParser\n\nsys.path.insert(0, os.path.abspath(\"../\"))\n\nDEPLOY = os.environ.get(\"READTHEDOCS\") == \"True\"\n\n\n# -- Project information -----------------------------------------------------\n\ntry:\n    import torch  # noqa\nexcept ImportError:\n    for m in [\n        \"torch\",\n        \"torchvision\",\n        \"torch.nn\",\n        \"torch.nn.parallel\",\n        \"torch.distributed\",\n        \"torch.multiprocessing\",\n        \"torch.autograd\",\n        \"torch.autograd.function\",\n        \"torch.nn.modules\",\n        \"torch.nn.modules.utils\",\n        \"torch.utils\",\n        \"torch.utils.data\",\n        \"torch.onnx\",\n        \"torchvision\",\n        \"torchvision.ops\",\n    ]:\n        sys.modules[m] = mock.Mock(name=m)\n\nfor m in [\n    \"cv2\",\n    \"scipy\",\n    \"portalocker\",\n    \"detectron2._C\",\n    \"pycocotools\",\n    \"pycocotools.mask\",\n    \"pycocotools.coco\",\n    \"pycocotools.cocoeval\",\n    \"google\",\n    \"google.protobuf\",\n    \"google.protobuf.internal\",\n    \"onnx\",\n    \"caffe2\",\n    \"caffe2.proto\",\n    \"caffe2.python\",\n    \"caffe2.python.utils\",\n    \"caffe2.python.onnx\",\n    \"caffe2.python.onnx.backend\",\n]:\n    sys.modules[m] = mock.Mock(name=m)\nsys.modules[\"cv2\"].__version__ = \"3.4\"\n\nimport detectron2  # isort: skip\n\n\nproject = \"detectron2\"\ncopyright = \"2019, detectron2 contributors\"\nauthor = \"detectron2 contributors\"\n\n# The short X.Y version\nversion = detectron2.__version__\n# The full version, including alpha/beta/rc tags\nrelease = version\n\n\n# -- General configuration ---------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#\nneeds_sphinx = \"1.7\"\n\n# Add any Sphinx extension module names here, as strings. They can be\n# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom\n# ones.\nextensions = [\n    \"sphinx.ext.autodoc\",\n    \"sphinx.ext.napoleon\",\n    \"sphinx.ext.intersphinx\",\n    \"sphinx.ext.todo\",\n    \"sphinx.ext.coverage\",\n    \"sphinx.ext.mathjax\",\n    \"sphinx.ext.viewcode\",\n    \"sphinx.ext.githubpages\",\n]\n\n# -- Configurations for plugins ------------\nnapoleon_google_docstring = True\nnapoleon_include_init_with_doc = True\nnapoleon_include_special_with_doc = True\nnapoleon_numpy_docstring = False\nnapoleon_use_rtype = False\nautodoc_inherit_docstrings = False\nautodoc_member_order = \"bysource\"\n\nif DEPLOY:\n    intersphinx_timeout = 10\nelse:\n    # skip this when building locally\n    intersphinx_timeout = 0.1\nintersphinx_mapping = {\n    \"python\": (\"https://docs.python.org/3.6\", None),\n    \"numpy\": (\"https://docs.scipy.org/doc/numpy/\", None),\n    \"torch\": (\"https://pytorch.org/docs/master/\", None),\n}\n# -------------------------\n\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = [\"_templates\"]\n\nsource_parsers = {\".md\": CommonMarkParser}\n\nsource_suffix = [\".rst\", \".md\"]\n\n# The master toctree document.\nmaster_doc = \"index\"\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#\n# This is also used if you do content translation via gettext catalogs.\n# Usually you set \"language\" from the command line for these cases.\nlanguage = None\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\n# This pattern also affects html_static_path and html_extra_path.\nexclude_patterns = [\"_build\", \"Thumbs.db\", \".DS_Store\", \"build\", \"README.md\"]\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = \"sphinx\"\n\n\n# -- Options for HTML output -------------------------------------------------\n\nhtml_theme = \"sphinx_rtd_theme\"\nhtml_theme_path = [sphinx_rtd_theme.get_html_theme_path()]\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further.  For a list of options available for each theme, see the\n# documentation.\n#\n# html_theme_options = {}\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = [\"_static\"]\n\n# Custom sidebar templates, must be a dictionary that maps document names\n# to template names.\n#\n# The default sidebars (for documents that don't match any pattern) are\n# defined by theme itself.  Builtin themes are using these templates by\n# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',\n# 'searchbox.html']``.\n#\n# html_sidebars = {}\n\n\n# -- Options for HTMLHelp output ---------------------------------------------\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = \"detectron2doc\"\n\n\n# -- Options for LaTeX output ------------------------------------------------\n\nlatex_elements = {\n    # The paper size ('letterpaper' or 'a4paper').\n    #\n    # 'papersize': 'letterpaper',\n    # The font size ('10pt', '11pt' or '12pt').\n    #\n    # 'pointsize': '10pt',\n    # Additional stuff for the LaTeX preamble.\n    #\n    # 'preamble': '',\n    # Latex figure (float) alignment\n    #\n    # 'figure_align': 'htbp',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title,\n#  author, documentclass [howto, manual, or own class]).\nlatex_documents = [\n    (master_doc, \"detectron2.tex\", \"detectron2 Documentation\", \"detectron2 contributors\", \"manual\")\n]\n\n\n# -- Options for manual page output ------------------------------------------\n\n# One entry per manual page. List of tuples\n# (source start file, name, description, authors, manual section).\nman_pages = [(master_doc, \"detectron2\", \"detectron2 Documentation\", [author], 1)]\n\n\n# -- Options for Texinfo output ----------------------------------------------\n\n# Grouping the document tree into Texinfo files. List of tuples\n# (source start file, target name, title, author,\n#  dir menu entry, description, category)\ntexinfo_documents = [\n    (\n        master_doc,\n        \"detectron2\",\n        \"detectron2 Documentation\",\n        author,\n        \"detectron2\",\n        \"One line description of project.\",\n        \"Miscellaneous\",\n    )\n]\n\n\n# -- Options for todo extension ----------------------------------------------\n\n# If true, `todo` and `todoList` produce output, else they produce nothing.\ntodo_include_todos = True\n\n\n_DEPRECATED_NAMES = set()\n\n\ndef autodoc_skip_member(app, what, name, obj, skip, options):\n    # we hide something deliberately\n    if getattr(obj, \"__HIDE_SPHINX_DOC__\", False):\n        return True\n    # Hide some names that are deprecated or not intended to be used\n    if name in _DEPRECATED_NAMES:\n        return True\n    return None\n\n\ndef url_resolver(url):\n    if \".html\" not in url:\n        url = url.replace(\"../\", \"\")\n        return \"https://github.com/facebookresearch/detectron2/blob/master/\" + url\n    else:\n        if DEPLOY:\n            return \"http://detectron2.readthedocs.io/\" + url\n        else:\n            return \"/\" + url\n\n\ndef setup(app):\n    from recommonmark.transform import AutoStructify\n\n    app.connect(\"autodoc-skip-member\", autodoc_skip_member)\n    # app.connect('autodoc-skip-member', autodoc_skip_member)\n    app.add_config_value(\n        \"recommonmark_config\",\n        {\n            \"url_resolver\": url_resolver,\n            \"auto_toc_tree_section\": \"Contents\",\n            \"enable_math\": True,\n            \"enable_inline_math\": True,\n            \"enable_eval_rst\": True,\n        },\n        True,\n    )\n    app.add_transform(AutoStructify)\n"
  },
  {
    "path": "docs/index.rst",
    "content": ".. detectron2 documentation master file, created by\n   sphinx-quickstart on Sat Sep 21 13:46:45 2019.\n   You can adapt this file completely to your liking, but it should at least\n   contain the root `toctree` directive.\n\nWelcome to detectron2's documentation!\n======================================\n\n.. toctree::\n   :maxdepth: 2\n\n   tutorials/index\n   notes/index\n   modules/index\n"
  },
  {
    "path": "docs/modules/checkpoint.rst",
    "content": "detectron2.checkpoint package\n=============================\n\n.. automodule:: detectron2.checkpoint\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/config.rst",
    "content": "detectron2.config package\n=========================\n\n.. automodule:: detectron2.config\n    :members:\n    :undoc-members:\n    :show-inheritance:\n    :inherited-members:\n\n\nConfig References\n-----------------\n\n.. literalinclude:: ../../detectron2/config/defaults.py\n  :language: python\n  :linenos:\n  :lines: 4-\n"
  },
  {
    "path": "docs/modules/data.rst",
    "content": "detectron2.data package\n=======================\n\n.. automodule:: detectron2.data\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\ndetectron2.data.detection\\_utils module\n---------------------------------------\n\n.. automodule:: detectron2.data.detection_utils\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\ndetectron2.data.datasets module\n---------------------------------------\n\n.. automodule:: detectron2.data.datasets\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\ndetectron2.data.samplers module\n---------------------------------------\n\n.. automodule:: detectron2.data.samplers\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.data.transforms module\n---------------------------------------\n\n.. automodule:: detectron2.data.transforms\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/engine.rst",
    "content": "detectron2.engine package\n=========================\n\n\n.. automodule:: detectron2.engine\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.engine.defaults module\n---------------------------------\n\n.. automodule:: detectron2.engine.defaults\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\ndetectron2.engine.hooks module\n---------------------------------\n\n.. automodule:: detectron2.engine.hooks\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/evaluation.rst",
    "content": "detectron2.evaluation package\n=============================\n\n.. automodule:: detectron2.evaluation\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/export.rst",
    "content": "detectron2.export package\n=========================\n\n.. automodule:: detectron2.export\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/index.rst",
    "content": "API Documentation\n==================\n\n.. toctree::\n\n    checkpoint\n    config\n    data\n    engine\n    evaluation\n    layers\n    model_zoo\n    modeling\n    solver\n    structures\n    utils\n    export\n"
  },
  {
    "path": "docs/modules/layers.rst",
    "content": "detectron2.layers package\n=========================\n\n.. automodule:: detectron2.layers\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/model_zoo.rst",
    "content": "detectron2.model_zoo package\n============================\n\n.. automodule:: detectron2.model_zoo\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/modeling.rst",
    "content": "detectron2.modeling package\n===========================\n\n.. automodule:: detectron2.modeling\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.modeilng.poolers module\n---------------------------------------\n\n.. automodule:: detectron2.modeling.poolers\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.modeilng.sampling module\n------------------------------------\n\n.. automodule:: detectron2.modeling.sampling\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.modeilng.box_regression module\n------------------------------------------\n\n.. automodule:: detectron2.modeling.box_regression\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\nModel Registries\n-----------------\n\nThese are different registries provided in modeling.\nEach registry provide you the ability to replace it with your customized component,\nwithout having to modify detectron2's code.\n\nNote that it is impossible to allow users to customize any line of code directly.\nEven just to add one line at some place,\nyou'll likely need to find out the smallest registry which contains that line,\nand register your component to that registry.\n\n\n.. autodata:: detectron2.modeling.META_ARCH_REGISTRY\n.. autodata:: detectron2.modeling.BACKBONE_REGISTRY\n.. autodata:: detectron2.modeling.PROPOSAL_GENERATOR_REGISTRY\n.. autodata:: detectron2.modeling.RPN_HEAD_REGISTRY\n.. autodata:: detectron2.modeling.ANCHOR_GENERATOR_REGISTRY\n.. autodata:: detectron2.modeling.ROI_HEADS_REGISTRY\n.. autodata:: detectron2.modeling.ROI_BOX_HEAD_REGISTRY\n.. autodata:: detectron2.modeling.ROI_MASK_HEAD_REGISTRY\n.. autodata:: detectron2.modeling.ROI_KEYPOINT_HEAD_REGISTRY\n"
  },
  {
    "path": "docs/modules/solver.rst",
    "content": "detectron2.solver package\n=========================\n\n.. automodule:: detectron2.solver\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/structures.rst",
    "content": "detectron2.structures package\n=============================\n\n.. automodule:: detectron2.structures\n    :members:\n    :undoc-members:\n    :show-inheritance:\n"
  },
  {
    "path": "docs/modules/utils.rst",
    "content": "detectron2.utils package\n========================\n\ndetectron2.utils.colormap module\n--------------------------------\n\n.. automodule:: detectron2.utils.colormap\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\ndetectron2.utils.comm module\n----------------------------\n\n.. automodule:: detectron2.utils.comm\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.utils.events module\n------------------------------\n\n.. automodule:: detectron2.utils.events\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.utils.logger module\n------------------------------\n\n.. automodule:: detectron2.utils.logger\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.utils.registry module\n--------------------------------\n\n.. automodule:: detectron2.utils.registry\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\ndetectron2.utils.memory module\n----------------------------------\n\n.. automodule:: detectron2.utils.memory\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n\ndetectron2.utils.video\\_visualizer module\n-----------------------------------------\n\n.. automodule:: detectron2.utils.video_visualizer\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\ndetectron2.utils.visualizer module\n----------------------------------\n\n.. automodule:: detectron2.utils.visualizer\n    :members:\n    :undoc-members:\n    :show-inheritance:\n\n"
  },
  {
    "path": "docs/notes/benchmarks.md",
    "content": "\n# Benchmarks\n\nHere we benchmark the training speed of a Mask R-CNN in detectron2,\nwith some other popular open source Mask R-CNN implementations.\n\n\n### Settings\n\n* Hardware: 8 NVIDIA V100s with NVLink.\n* Software: Python 3.7, CUDA 10.0, cuDNN 7.6.4, PyTorch 1.3.0 (at\n  [this link](https://download.pytorch.org/whl/nightly/cu100/torch-1.3.0%2Bcu100-cp37-cp37m-linux_x86_64.whl)),\n  TensorFlow 1.15.0rc2, Keras 2.2.5, MxNet 1.6.0b20190820.\n* Model: an end-to-end R-50-FPN Mask-RCNN model, using the same hyperparameter as the\n\t[Detectron baseline config](https://github.com/facebookresearch/Detectron/blob/master/configs/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml).\n* Metrics: We use the average throughput in iterations 100-500 to skip GPU warmup time.\n\tNote that for R-CNN-style models, the throughput of a model typically changes during training, because\n\tit depends on the predictions of the model. Therefore this metric is not directly comparable with\n\t\"train speed\" in model zoo, which is the average speed of the entire training run.\n\n\n### Main Results\n\n```eval_rst\n+-------------------------------+--------------------+\n| Implementation                | Throughput (img/s) |\n+===============================+====================+\n| |D2| |PT|                     | 59                 |\n+-------------------------------+--------------------+\n| maskrcnn-benchmark_  |PT|     | 51                 |\n+-------------------------------+--------------------+\n| tensorpack_ |TF|              | 50                 |\n+-------------------------------+--------------------+\n| mmdetection_  |PT|            | 41                 |\n+-------------------------------+--------------------+\n| simpledet_ |mxnet|            | 39                 |\n+-------------------------------+--------------------+\n| Detectron_  |C2|              | 19                 |\n+-------------------------------+--------------------+\n| `matterport/Mask_RCNN`__ |TF| | 14                 |\n+-------------------------------+--------------------+\n\n.. _maskrcnn-benchmark: https://github.com/facebookresearch/maskrcnn-benchmark/\n.. _tensorpack: https://github.com/tensorpack/tensorpack/tree/master/examples/FasterRCNN\n.. _mmdetection: https://github.com/open-mmlab/mmdetection/\n.. _simpledet: https://github.com/TuSimple/simpledet/\n.. _Detectron: https://github.com/facebookresearch/Detectron\n__ https://github.com/matterport/Mask_RCNN/\n\n.. |D2| image:: https://github.com/facebookresearch/detectron2/raw/master/.github/Detectron2-Logo-Horz.svg?sanitize=true\n   :height: 15pt\n   :target: https://github.com/facebookresearch/detectron2/\n.. |PT| image:: https://pytorch.org/assets/images/logo-icon.svg\n   :width: 15pt\n   :height: 15pt\n   :target: https://pytorch.org\n.. |TF| image:: https://static.nvidiagrid.net/ngc/containers/tensorflow.png\n   :width: 15pt\n   :height: 15pt\n   :target: https://tensorflow.org\n.. |mxnet| image:: https://github.com/dmlc/web-data/raw/master/mxnet/image/mxnet_favicon.png\n   :width: 15pt\n   :height: 15pt\n   :target: https://mxnet.apache.org/\n.. |C2| image:: https://caffe2.ai/static/logo.svg\n   :width: 15pt\n   :height: 15pt\n   :target: https://caffe2.ai\n```\n\n\nDetails for each implementation:\n\n* __Detectron2__:\n  ```\n  python tools/train_net.py  --config-file configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml --num-gpus 8\n  ```\n\n* __maskrcnn-benchmark__: use commit `0ce8f6f` with `sed -i ‘s/torch.uint8/torch.bool/g’ **/*.py` to make it compatible with latest PyTorch.\n  Then, run training with\n  ```\n  python -m torch.distributed.launch --nproc_per_node=8 tools/train_net.py --config-file configs/e2e_mask_rcnn_R_50_FPN_1x.yaml\n  ```\n  The speed we observed is faster than its model zoo, likely due to different software versions.\n\n* __tensorpack__: at commit `caafda`, `export TF_CUDNN_USE_AUTOTUNE=0`, then run\n  ```\n  mpirun -np 8 ./train.py --config DATA.BASEDIR=/data/coco TRAINER=horovod BACKBONE.STRIDE_1X1=True TRAIN.STEPS_PER_EPOCH=50 --load ImageNet-R50-AlignPadding.npz\n  ```\n\n* __mmdetection__: at commit `4d9a5f`, apply the following diff, then run\n\t```\n\t./tools/dist_train.sh configs/mask_rcnn_r50_fpn_1x.py 8\n\t```\n\n    The speed we observed is faster than its model zoo, likely due to different software versions.\n\n\t<details>\n\t<summary>\n\t(diff to make it use the same architecture - click to expand)\n\t</summary>\n\t```diff\n\tdiff --git i/configs/mask_rcnn_r50_fpn_1x.py w/configs/mask_rcnn_r50_fpn_1x.py\n\tindex 04f6d22..ed721f2 100644\n\t--- i/configs/mask_rcnn_r50_fpn_1x.py\n\t+++ w/configs/mask_rcnn_r50_fpn_1x.py\n\t@@ -1,14 +1,15 @@\n\t# model settings\n\tmodel = dict(\n\t\ttype='MaskRCNN',\n\t-    pretrained='torchvision://resnet50',\n\t+    pretrained='open-mmlab://resnet50_caffe',\n\t\tbackbone=dict(\n\t\t\ttype='ResNet',\n\t\t\tdepth=50,\n\t\t\tnum_stages=4,\n\t\t\tout_indices=(0, 1, 2, 3),\n\t\t\tfrozen_stages=1,\n\t-        style='pytorch'),\n\t+        norm_cfg=dict(type=\"BN\", requires_grad=False),\n\t+        style='caffe'),\n\t\tneck=dict(\n\t\t\ttype='FPN',\n\t\t\tin_channels=[256, 512, 1024, 2048],\n\t@@ -115,7 +116,7 @@ test_cfg = dict(\n\tdataset_type = 'CocoDataset'\n\tdata_root = 'data/coco/'\n\timg_norm_cfg = dict(\n\t-    mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)\n\t+    mean=[123.675, 116.28, 103.53], std=[1.0, 1.0, 1.0], to_rgb=False)\n\ttrain_pipeline = [\n\t\tdict(type='LoadImageFromFile'),\n\t\tdict(type='LoadAnnotations', with_bbox=True, with_mask=True),\n\t```\n\t</details>\n\n* __SimpleDet__: at commit `9187a1`, run\n\t```\n\tpython detection_train.py --config config/mask_r50v1_fpn_1x.py\n\t```\n\n* __Detectron__: run\n  ```\n  python tools/train_net.py --cfg configs/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml\n  ```\n  Note that many of its ops run on CPUs, therefore the performance is limited.\n\n* __matterport/Mask_RCNN__: at commit `3deaec`, apply the following diff, `export TF_CUDNN_USE_AUTOTUNE=0`, then run\n\t```\n\tpython coco.py train --dataset=/data/coco/ --model=imagenet\n\t```\n  Note that many small details in this implementation might be different\n  from Detectron's standards.\n\n\t<details>\n\t<summary>\n\t(diff to make it use the same hyperparameters - click to expand)\n\t</summary>\n\t```diff\n\tdiff --git i/mrcnn/model.py w/mrcnn/model.py\n\tindex 62cb2b0..61d7779 100644\n\t--- i/mrcnn/model.py\n\t+++ w/mrcnn/model.py\n\t@@ -2367,8 +2367,8 @@ class MaskRCNN():\n\t\t\t\tepochs=epochs,\n\t\t\t\tsteps_per_epoch=self.config.STEPS_PER_EPOCH,\n\t\t\t\tcallbacks=callbacks,\n\t-            validation_data=val_generator,\n\t-            validation_steps=self.config.VALIDATION_STEPS,\n\t+            #validation_data=val_generator,\n\t+            #validation_steps=self.config.VALIDATION_STEPS,\n\t\t\t\tmax_queue_size=100,\n\t\t\t\tworkers=workers,\n\t\t\t\tuse_multiprocessing=True,\n\tdiff --git i/mrcnn/parallel_model.py w/mrcnn/parallel_model.py\n\tindex d2bf53b..060172a 100644\n\t--- i/mrcnn/parallel_model.py\n\t+++ w/mrcnn/parallel_model.py\n\t@@ -32,6 +32,7 @@ class ParallelModel(KM.Model):\n\t\t\tkeras_model: The Keras model to parallelize\n\t\t\tgpu_count: Number of GPUs. Must be > 1\n\t\t\t\"\"\"\n\t+        super().__init__()\n\t\t\tself.inner_model = keras_model\n\t\t\tself.gpu_count = gpu_count\n\t\t\tmerged_outputs = self.make_parallel()\n\tdiff --git i/samples/coco/coco.py w/samples/coco/coco.py\n\tindex 5d172b5..239ed75 100644\n\t--- i/samples/coco/coco.py\n\t+++ w/samples/coco/coco.py\n\t@@ -81,7 +81,10 @@ class CocoConfig(Config):\n\t\tIMAGES_PER_GPU = 2\n\n\t\t# Uncomment to train on 8 GPUs (default is 1)\n\t-    # GPU_COUNT = 8\n\t+    GPU_COUNT = 8\n\t+    BACKBONE = \"resnet50\"\n\t+    STEPS_PER_EPOCH = 50\n\t+    TRAIN_ROIS_PER_IMAGE = 512\n\n\t\t# Number of classes (including background)\n\t\tNUM_CLASSES = 1 + 80  # COCO has 80 classes\n\t@@ -496,29 +499,10 @@ if __name__ == '__main__':\n\t\t\t# *** This training schedule is an example. Update to your needs ***\n\n\t\t\t# Training - Stage 1\n\t-        print(\"Training network heads\")\n\t\t\tmodel.train(dataset_train, dataset_val,\n\t\t\t\t\t\tlearning_rate=config.LEARNING_RATE,\n\t\t\t\t\t\tepochs=40,\n\t-                    layers='heads',\n\t-                    augmentation=augmentation)\n\t-\n\t-        # Training - Stage 2\n\t-        # Finetune layers from ResNet stage 4 and up\n\t-        print(\"Fine tune Resnet stage 4 and up\")\n\t-        model.train(dataset_train, dataset_val,\n\t-                    learning_rate=config.LEARNING_RATE,\n\t-                    epochs=120,\n\t-                    layers='4+',\n\t-                    augmentation=augmentation)\n\t-\n\t-        # Training - Stage 3\n\t-        # Fine tune all layers\n\t-        print(\"Fine tune all layers\")\n\t-        model.train(dataset_train, dataset_val,\n\t-                    learning_rate=config.LEARNING_RATE / 10,\n\t-                    epochs=160,\n\t-                    layers='all',\n\t+                    layers='3+',\n\t\t\t\t\t\taugmentation=augmentation)\n\n\t\telif args.command == \"evaluate\":\n\t```\n\t</details>\n"
  },
  {
    "path": "docs/notes/changelog.md",
    "content": "# Change Log\n\n### Releases\nSee release log at\n[https://github.com/facebookresearch/detectron2/releases](https://github.com/facebookresearch/detectron2/releases)\n\n### Notable Backward Incompatible Changes:\n\n* 2019-11-11: `detectron2.data.detection_utils.read_image` transposes images with exif information.\n* 2019-10-10: initial release.\n\n### Config Version Change Log\n\n* v1: Rename `RPN_HEAD.NAME` to `RPN.HEAD_NAME`.\n* v2: A batch of rename of many configurations before release.\n\n### Known Bugs in Historical Versions:\n* Dec 19 - Dec 26: Using aspect ratio grouping causes a drop in accuracy.\n* Oct 10 - Nov 9: Test time augmentation does not predict the last category.\n"
  },
  {
    "path": "docs/notes/compatibility.md",
    "content": "# Compatibility with Other Libraries\n\n## Compatibility with Detectron (and maskrcnn-benchmark)\n\nDetectron2 addresses some legacy issues left in Detectron. As a result, their models\nare not compatible:\nrunning inference with the same model weights will produce different results in the two code bases.\n\nThe major differences regarding inference are:\n\n- The height and width of a box with corners (x1, y1) and (x2, y2) is now computed more naturally as\n  width = x2 - x1 and height = y2 - y1;\n  In Detectron, a \"+ 1\" was added both height and width.\n\n  Note that the relevant ops in Caffe2 have [adopted this change of convention](https://github.com/pytorch/pytorch/pull/20550)\n  with an extra option.\n  So it is still possible to run inference with a Detectron2-trained model in Caffe2.\n\n  The change in height/width calculations most notably changes:\n  - encoding/decoding in bounding box regression.\n  - non-maximum suppression. The effect here is very negligible, though.\n\n- RPN now uses simpler anchors with fewer quantization artifacts.\n\n  In Detectron, the anchors were quantized and\n  [do not have accurate areas](https://github.com/facebookresearch/Detectron/issues/227).\n  In Detectron2, the anchors are center-aligned to feature grid points and not quantized.\n\n- Classification layers have a different ordering of class labels.\n\n  This involves any trainable parameter with shape (..., num_categories + 1, ...).\n  In Detectron2, integer labels [0, K-1] correspond to the K = num_categories object categories\n  and the label \"K\" corresponds to the special \"background\" category.\n  In Detectron, label \"0\" means background, and labels [1, K] correspond to the K categories.\n\n- ROIAlign is implemented differently. The new implementation is [available in Caffe2](https://github.com/pytorch/pytorch/pull/23706).\n\n  1. All the ROIs are shifted by half a pixel compared to Detectron in order to create better image-feature-map alignment.\n     See `layers/roi_align.py` for details.\n     To enable the old behavior, use `ROIAlign(aligned=False)`, or `POOLER_TYPE=ROIAlign` instead of\n     `ROIAlignV2` (the default).\n\n  1. The ROIs are not required to have a minimum size of 1.\n     This will lead to tiny differences in the output, but should be negligible.\n\n- Mask inference function is different.\n\n  In Detectron2, the \"paste_mask\" function is different and should be more accurate than in Detectron. This change\n  can improve mask AP on COCO by ~0.5% absolute.\n\nThere are some other differences in training as well, but they won't affect\nmodel-level compatibility. The major ones are:\n\n- We fixed a [bug](https://github.com/facebookresearch/Detectron/issues/459) in\n  Detectron, by making `RPN.POST_NMS_TOPK_TRAIN` per-image, rather than per-batch.\n  The fix may lead to a small accuracy drop for a few models (e.g. keypoint\n  detection) and will require some parameter tuning to match the Detectron results.\n- For simplicity, we change the default loss in bounding box regression to L1 loss, instead of smooth L1 loss.\n  We have observed that this tends to slightly decrease box AP50 while improving box AP for higher\n  overlap thresholds (and leading to a slight overall improvement in box AP).\n- We interpret the coordinates in COCO bounding box and segmentation annotations\n  as coordinates in range `[0, width]` or `[0, height]`. The coordinates in\n  COCO keypoint annotations are interpreted as pixel indices in range `[0, width - 1]` or `[0, height - 1]`.\n  Note that this affects how flip augmentation is implemetned.\n\n\nWe will later share more details and rationale behind the above mentioned issues\nabout pixels, coordinates, and \"+1\"s.\n\n\n## Compatibility with Caffe2\n\nAs mentioned above, despite the incompatibilities with Detectron, the relevant\nops have been implemented in Caffe2, in [PR1](https://github.com/pytorch/pytorch/pull/20550)\nand [PR2](https://github.com/pytorch/pytorch/pull/23706).\nTherefore, models trained with detectron2 can be converted in Caffe2.\nSee [Deployment](../tutorials/deployment.html) for the tutorial.\n\n## Compatibility with TensorFlow\n\nMost ops are available in TensorFlow, although some tiny differences in\nthe implementation of resize / ROIAlign / padding need to be addressed.\nA working conversion script is provided by [tensorpack FasterRCNN](https://github.com/tensorpack/tensorpack/tree/master/examples/FasterRCNN/convert_d2)\nto run a standard Detectron2 model in TensorFlow.\n"
  },
  {
    "path": "docs/notes/index.rst",
    "content": "Notes\n======================================\n\n.. toctree::\n   :maxdepth: 2\n\n   benchmarks\n   compatibility\n   contributing\n   changelog\n"
  },
  {
    "path": "docs/tutorials/README.md",
    "content": "# Read the docs:\n\nThe latest documentation built from this directory is available at [detectron2.readthedocs.io](https://detectron2.readthedocs.io/).\nDocuments in this directory are not meant to be read on github.\n"
  },
  {
    "path": "docs/tutorials/configs.md",
    "content": "# Use Configs\n\nDetectron2's config system uses yaml and [yacs](https://github.com/rbgirshick/yacs).\nIn addition to the basic operations that access and update a config, we provide\nthe following extra functionalities:\n\n1. The config can have `_BASE_: base.yaml` field, which will load a base config first.\n   Values in the base config will be overwritten in sub-configs, if there are any conflicts.\n   We provided several base configs for standard model architectures.\n2. We provide config versioning, for backward compatibility.\n   If your config file is versioned with a config line like `VERSION: 2`,\n   detectron2 will still recognize it even if we rename some keys in the future.\n\n### Use Configs\n\nSome basic usage of the `CfgNode` object is shown below:\n```python\nfrom detectron2.config import get_cfg\ncfg = get_cfg()    # obtain detectron2's default config\ncfg.xxx = yyy      # add new configs for your own custom components\ncfg.merge_from_file(\"my_cfg.yaml\")   # load values from a file\n\ncfg.merge_from_list([\"MODEL.WEIGHTS\", \"weights.pth\"])   # can also load values from a list of str\nprint(cfg.dump())  # print formatted configs\n```\n\nTo see a list of available configs in detectron2, see [Config References](../modules/config.html#config-references)\n\n\n### Best Practice with Configs\n\n1. Treat the configs you write as \"code\": avoid copying them or duplicating them; use `_BASE_`\n   to share common parts between configs.\n\n2. Keep the configs you write simple: don't include keys that do not affect the experimental setting.\n\n3. Keep a version number in your configs (or the base config), e.g., `VERSION: 2`,\n   for backward compatibility.\n\t We print a warning when reading a config without version number.\n   The official configs do not include version number because they are meant to\n   be always up-to-date.\n\n4. Save a full config together with a trained model, and use it to run inference.\n   This is more robust to changes that may happen to the config definition\n   (e.g., if a default value changed), although we will try to avoid such changes.\n"
  },
  {
    "path": "docs/tutorials/data_loading.md",
    "content": "\n# Use Custom Dataloaders\n\n## How the Existing Dataloader Works\n\nDetectron2 contains a builtin data loading pipeline.\nIt's good to understand how it works, in case you need to write a custom one.\n\nDetectron2 provides two functions\n[build_detection_{train,test}_loader](../modules/data.html#detectron2.data.build_detection_train_loader)\nthat create a default data loader from a given config.\nHere is how `build_detection_{train,test}_loader` work:\n\n1. It takes the name of a registered dataset (e.g., \"coco_2017_train\") and loads a `list[dict]` representing the dataset items\n   in a lightweight, canonical format. These dataset items are not yet ready to be used by the model (e.g., images are\n   not loaded into memory, random augmentations have not been applied, etc.).\n   Details about the dataset format and dataset registration can be found in\n   [datasets](datasets.html).\n2. Each dict in this list is mapped by a function (\"mapper\"):\n   * Users can customize this mapping function by specifying the \"mapper\" argument in\n        `build_detection_{train,test}_loader`. The default mapper is [DatasetMapper]( ../modules/data.html#detectron2.data.DatasetMapper).\n   * The output format of such function can be arbitrary, as long as it is accepted by the consumer of this data loader (usually the model).\n     The outputs of the default mapper, after batching, follow the default model input format documented in\n     [Use Models](https://detectron2.readthedocs.io/tutorials/models.html#model-input-format).\n   * The role of the mapper is to transform the lightweight, canonical representation of a dataset item into a format\n     that is ready for the model to consume (including, e.g., read images, perform random data augmentation and convert to torch Tensors).\n     If you would like to perform custom transformations to data, you often want a custom mapper.\n3. The outputs of the mapper are batched (simply into a list).\n4. This batched data is the output of the data loader. Typically, it's also the input of\n   `model.forward()`.\n\n\n## Write a Custom Dataloader\n\nUsing a different \"mapper\" with `build_detection_{train,test}_loader(mapper=)` works for most use cases\nof custom data loading.\nFor example, if you want to resize all images to a fixed size for Mask R-CNN training, write this:\n\n```python\nfrom detectron2.data import build_detection_train_loader\nfrom detectron2.data import transforms as T\nfrom detectron2.data import detection_utils as utils\n\ndef mapper(dataset_dict):\n\t# Implement a mapper, similar to the default DatasetMapper, but with your own customizations\n\tdataset_dict = copy.deepcopy(dataset_dict)  # it will be modified by code below\n\timage = utils.read_image(dataset_dict[\"file_name\"], format=\"BGR\")\n\timage, transforms = T.apply_transform_gens([T.Resize((800, 800))], image)\n\tdataset_dict[\"image\"] = torch.as_tensor(image.transpose(2, 0, 1).astype(\"float32\"))\n\n\tannos = [\n\t\tutils.transform_instance_annotations(obj, transforms, image.shape[:2])\n\t\tfor obj in dataset_dict.pop(\"annotations\")\n\t\tif obj.get(\"iscrowd\", 0) == 0\n\t]\n\tinstances = utils.annotations_to_instances(annos, image.shape[:2])\n\tdataset_dict[\"instances\"] = utils.filter_empty_instances(instances)\n\treturn dataset_dict\n\ndata_loader = build_detection_train_loader(cfg, mapper=mapper)\n# use this dataloader instead of the default\n```\nRefer to [API documentation of detectron2.data](../modules/data.html) for details.\n\nIf you want to change not only the mapper (e.g., to write different sampling or batching logic),\nyou can write your own data loader. The data loader is simply a\npython iterator that produces [the format](models.html) your model accepts.\nYou can implement it using any tools you like.\n\n## Use a Custom Dataloader\n\nIf you use [DefaultTrainer](../modules/engine.html#detectron2.engine.defaults.DefaultTrainer),\nyou can overwrite its `build_{train,test}__loader` method to use your own dataloader.\nSee the [densepose dataloader](../../projects/DensePose/train_net.py)\nfor an example.\n\nIf you write your own training loop, you can plug in your data loader easily.\n"
  },
  {
    "path": "docs/tutorials/datasets.md",
    "content": "# Use Custom Datasets\n\nIf you want to use a custom dataset while also reusing detectron2's data loaders,\nyou will need to\n\n1. Register your dataset (i.e., tell detectron2 how to obtain your dataset).\n2. Optionally, register metadata for your dataset.\n\nNext, we explain the above two concepts in details.\n\nThe [Colab Notebook](https://colab.research.google.com/drive/16jcaJoc6bCFAQ96jDe2HwtXj7BMD_-m5)\nhas a working example of how to register and train on a dataset of custom formats.\n\n\n### Register a Dataset\n\nTo let detectron2 know how to obtain a dataset named \"my_dataset\", you will implement\na function that returns the items in your dataset and then tell detectron2 about this\nfunction:\n```python\ndef get_dicts():\n  ...\n  return list[dict] in the following format\n\nfrom detectron2.data import DatasetCatalog\nDatasetCatalog.register(\"my_dataset\", get_dicts)\n```\n\nHere, the snippet associates a dataset \"my_dataset\" with a function that returns the data.\nIf you do not modify downstream code (i.e., you use the standard data loader and data mapper),\nthen the function has to return a list of dicts in detectron2's standard dataset format, described\nnext. You can also use arbitrary custom data format, as long as the\ndownstream code (mainly the [custom data loader](data_loading.html)) supports it.\n\nFor standard tasks\n(instance detection, instance/semantic/panoptic segmentation, keypoint detection),\nwe use a format similar to COCO's json annotations\nas the basic dataset representation.\n\nThe format uses one dict to represent the annotations of\none image. The dict may have the following fields.\nThe fields are often optional, and some functions may be able to\ninfer certain fields from others if needed, e.g., the data loader\nwill load the image from \"file_name\" and load \"sem_seg\" from \"sem_seg_file_name\".\n\n+ `file_name`: the full path to the image file. Will apply rotation and flipping if the image has such exif information.\n+ `sem_seg_file_name`: the full path to the ground truth semantic segmentation file.\n+ `sem_seg`: semantic segmentation ground truth in a 2D `torch.Tensor`. Values in the array represent\n   category labels starting from 0.\n+ `height`, `width`: integer. The shape of image.\n+ `image_id` (str or int): a unique id that identifies this image. Used\n\tduring evaluation to identify the images, but a dataset may use it for different purposes.\n+ `annotations` (list[dict]): the per-instance annotations of every\n  instance in this image. Each annotation dict may contain:\n  + `bbox` (list[float]): list of 4 numbers representing the bounding box of the instance.\n  + `bbox_mode` (int): the format of bbox.\n    It must be a member of\n    [structures.BoxMode](../modules/structures.html#detectron2.structures.BoxMode).\n    Currently supports: `BoxMode.XYXY_ABS`, `BoxMode.XYWH_ABS`.\n  + `category_id` (int): an integer in the range [0, num_categories) representing the category label.\n    The value num_categories is reserved to represent the \"background\" category, if applicable.\n  + `segmentation` (list[list[float]] or dict):\n    + If `list[list[float]]`, it represents a list of polygons, one for each connected component\n      of the object. Each `list[float]` is one simple polygon in the format of `[x1, y1, ..., xn, yn]`.\n      The Xs and Ys are either relative coordinates in [0, 1], or absolute coordinates,\n      depend on whether \"bbox_mode\" is relative.\n    + If `dict`, it represents the per-pixel segmentation mask in COCO's RLE format. The dict should have\n\t\t\tkeys \"size\" and \"counts\". You can convert a uint8 segmentation mask of 0s and 1s into\n\t\t\tRLE format by `pycocotools.mask.encode(np.asarray(mask, order=\"F\"))`.\n  + `keypoints` (list[float]): in the format of [x1, y1, v1,..., xn, yn, vn].\n    v[i] means the [visibility](http://cocodataset.org/#format-data) of this keypoint.\n    `n` must be equal to the number of keypoint categories.\n    The Xs and Ys are either relative coordinates in [0, 1], or absolute coordinates,\n    depend on whether \"bbox_mode\" is relative.\n\n    Note that the coordinate annotations in COCO format are integers in range [0, H-1 or W-1].\n    By default, detectron2 adds 0.5 to absolute keypoint coordinates to convert them from discrete\n    pixel indices to floating point coordinates.\n  + `iscrowd`: 0 or 1. Whether this instance is labeled as COCO's \"crowd\n    region\". Don't include this field if you don't know what it means.\n+ `proposal_boxes` (array): 2D numpy array with shape (K, 4) representing K precomputed proposal boxes for this image.\n+ `proposal_objectness_logits` (array): numpy array with shape (K, ), which corresponds to the objectness\n  logits of proposals in 'proposal_boxes'.\n+ `proposal_bbox_mode` (int): the format of the precomputed proposal bbox.\n  It must be a member of\n  [structures.BoxMode](../modules/structures.html#detectron2.structures.BoxMode).\n  Default format is `BoxMode.XYXY_ABS`.\n\n\nIf your dataset is already in the COCO format, you can simply register it by\n```python\nfrom detectron2.data.datasets import register_coco_instances\nregister_coco_instances(\"my_dataset\", {}, \"json_annotation.json\", \"path/to/image/dir\")\n```\nwhich will take care of everything (including metadata) for you.\n\nIf your dataset is in COCO format with custom per-instance annotations,\nthe [load_coco_json](../modules/data.html#detectron2.data.datasets.load_coco_json) function can be used.\n\n\n### \"Metadata\" for Datasets\n\nEach dataset is associated with some metadata, accessible through\n`MetadataCatalog.get(dataset_name).some_metadata`.\nMetadata is a key-value mapping that contains primitive information that helps interpret what's in the dataset, e.g.,\nnames of classes, colors of classes, root of files, etc.\nThis information will be useful for augmentation, evaluation, visualization, logging, etc.\nThe structure of metadata depends on the what is needed from the corresponding downstream code.\n\n\nIf you register a new dataset through `DatasetCatalog.register`,\nyou may also want to add its corresponding metadata through\n`MetadataCatalog.get(dataset_name).set(name, value)`, to enable any features that need metadata.\nYou can do it like this (using the metadata field \"thing_classes\" as an example):\n\n```python\nfrom detectron2.data import MetadataCatalog\nMetadataCatalog.get(\"my_dataset\").thing_classes = [\"person\", \"dog\"]\n```\n\nHere is a list of metadata keys that are used by builtin features in detectron2.\nIf you add your own dataset without these metadata, some features may be\nunavailable to you:\n\n* `thing_classes` (list[str]): Used by all instance detection/segmentation tasks.\n  A list of names for each instance/thing category.\n  If you load a COCO format dataset, it will be automatically set by the function `load_coco_json`.\n\n* `thing_colors` (list[tuple(r, g, b)]): Pre-defined color (in [0, 255]) for each thing category.\n  Used for visualization. If not given, random colors are used.\n\n* `stuff_classes` (list[str]): Used by semantic and panoptic segmentation tasks.\n  A list of names for each stuff category.\n\n* `stuff_colors` (list[tuple(r, g, b)]): Pre-defined color (in [0, 255]) for each stuff category.\n  Used for visualization. If not given, random colors are used.\n\n* `keypoint_names` (list[str]): Used by keypoint localization. A list of names for each keypoint.\n\n* `keypoint_flip_map` (list[tuple[str]]): Used by the keypoint localization task. A list of pairs of names,\n  where each pair are the two keypoints that should be flipped if the image is\n  flipped during augmentation.\n* `keypoint_connection_rules`: list[tuple(str, str, (r, g, b))]. Each tuple specifies a pair of keypoints\n  that are connected and the color to use for the line between them when visualized.\n\nSome additional metadata that are specific to the evaluation of certain datasets (e.g. COCO):\n\n* `thing_dataset_id_to_contiguous_id` (dict[int->int]): Used by all instance detection/segmentation tasks in the COCO format.\n  A mapping from instance class ids in the dataset to contiguous ids in range [0, #class).\n  Will be automatically set by the function `load_coco_json`.\n\n* `stuff_dataset_id_to_contiguous_id` (dict[int->int]): Used when generating prediction json files for\n  semantic/panoptic segmentation.\n  A mapping from semantic segmentation class ids in the dataset\n  to contiguous ids in [0, num_categories). It is useful for evaluation only.\n\n* `json_file`: The COCO annotation json file. Used by COCO evaluation for COCO-format datasets.\n* `panoptic_root`, `panoptic_json`: Used by panoptic evaluation.\n* `evaluator_type`: Used by the builtin main training script to select\n   evaluator. No need to use it if you write your own main script.\n   You can just provide the [DatasetEvaluator](../modules/evaluation.html#detectron2.evaluation.DatasetEvaluator)\n   for your dataset directly in your main script.\n\nNOTE: For background on the concept of \"thing\" and \"stuff\", see\n[On Seeing Stuff: The Perception of Materials by Humans and Machines](http://persci.mit.edu/pub_pdfs/adelson_spie_01.pdf).\nIn detectron2, the term \"thing\" is used for instance-level tasks,\nand \"stuff\" is used for semantic segmentation tasks.\nBoth are used in panoptic segmentation.\n\n\n### Update the Config for New Datasets\n\nOnce you've registered the dataset, you can use the name of the dataset (e.g., \"my_dataset\" in\nexample above) in `DATASETS.{TRAIN,TEST}`.\nThere are other configs you might want to change to train or evaluate on new datasets:\n\n* `MODEL.ROI_HEADS.NUM_CLASSES` and `MODEL.RETINANET.NUM_CLASSES` are the number of thing classes\n\tfor R-CNN and RetinaNet models.\n* `MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS` sets the number of keypoints for Keypoint R-CNN.\n  You'll also need to set [Keypoint OKS](http://cocodataset.org/#keypoints-eval)\n\twith `TEST.KEYPOINT_OKS_SIGMAS` for evaluation.\n* `MODEL.SEM_SEG_HEAD.NUM_CLASSES` sets the number of stuff classes for Semantic FPN & Panoptic FPN.\n* If you're training Fast R-CNN (with precomputed proposals), `DATASETS.PROPOSAL_FILES_{TRAIN,TEST}`\n\tneed to match the datasts. The format of proposal files are documented\n\t[here](../modules/data.html#detectron2.data.load_proposals_into_dataset).\n"
  },
  {
    "path": "docs/tutorials/deployment.md",
    "content": "# Deployment\n\n## Caffe2 Deployment\nWe currently support converting a detectron2 model to Caffe2 format through ONNX.\nThe converted Caffe2 model is able to run without detectron2 dependency in either Python or C++.\nIt has a runtime optimized for CPU & mobile inference, but not for GPU inference.\n\nCaffe2 conversion requires PyTorch ≥ 1.4 and ONNX ≥ 1.6.\n\n### Coverage\n\nIt supports 3 most common meta architectures: `GeneralizedRCNN`, `RetinaNet`, `PanopticFPN`,\nand almost all official models under these 3 meta architectures.\n\nUsers' custom extensions under these architectures (added through registration) are supported\nas long as they do not contain control flow or operators not available in Caffe2 (e.g. deformable convolution).\nFor example, custom backbones and heads are often supported out of the box.\n\n### Usage\n\nThe conversion APIs are documented at [the API documentation](../modules/export.html).\nWe provide a tool, `tools/caffe2_converter.py` as an example that uses\nthese APIs to convert a standard model.\n\nTo convert an official Mask R-CNN trained on COCO, first\n[prepare the COCO dataset](../../datasets/), then pick the model from [Model Zoo](../../MODEL_ZOO.md), and run:\n```\npython tools/caffe2_converter.py --config-file configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml \\\n\t--output ./caffe2_model --run-eval \\\n\tMODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl \\\n\tMODEL.DEVICE cpu\n```\n\nNote that:\n1. The conversion needs valid sample inputs & weights to trace the model. That's why the script requires the dataset.\n\t You can modify the script to obtain sample inputs in other ways.\n2. GPU conversion is supported only with Pytorch's master. So we use `MODEL.DEVICE cpu`.\n3. With the `--run-eval` flag, it will evaluate the converted models to verify its accuracy.\n   The accuracy is typically slightly different (within 0.1 AP) from PyTorch due to\n\t numerical precisions between different implementations.\n\t It's recommended to always verify the accuracy in case your custom model is not supported by the\n\t conversion.\n\nThe converted model is available at the specified `caffe2_model/` directory. Two files `model.pb`\nand `model_init.pb` that contain network structure and network parameters are necessary for deployment.\nThese files can then be loaded in C++ or Python using Caffe2's APIs.\n\nThe script generates `model.svg` file which contains a visualization of the network.\nYou can also load `model.pb` to tools such as [netron](https://github.com/lutzroeder/netron) to visualize it.\n\n### Inputs & Outputs\n\nAll converted models take two input tensors:\n\"data\" which is an NCHW image, and \"im_info\" which is a Nx3 tensor of (height, width, unused legacy parameter) for\neach image (the shape of \"data\" might be larger than that in \"im_info\" due to padding).\n\nThe converted models do not contain post-processing operations that\ntransform raw layer outputs into formatted predictions.\nThe models only produce raw outputs from the final\nlayers that are not post-processed, because in actual deployment, an application often needs\nits custom lightweight post-processing (e.g. full-image masks for every detected object is often not necessary).\n\nDue to different inputs & outputs formats, the `Caffe2Model.__call__` method includes\npre/post-processing code in order to match the formats of original detectron2 models.\nThey can serve as a reference for pre/post-processing in actual deployment.\n"
  },
  {
    "path": "docs/tutorials/evaluation.md",
    "content": "\n# Evaluation\n\nEvaluation is a process that takes a number of inputs/outputs pairs and aggregate them.\nYou can always [use the model](models.html) directly and just parse its inputs/outputs manually to perform\nevaluation.\nAlternatively, evaluation is implemented in detectron2 using the [DatasetEvaluator](../modules/evaluation.html#detectron2.evaluation.DatasetEvaluator)\ninterface.\n\nDetectron2 includes a few `DatasetEvaluator` that computes metrics using standard dataset-specific\nAPIs (e.g., COCO, LVIS).\nYou can also implement your own `DatasetEvaluator` that performs some other jobs\nusing the inputs/outputs pairs.\nFor example, to count how many instances are detected on the validation set:\n\n```\nclass Counter(DatasetEvaluator):\n  def reset(self):\n    self.count = 0\n  def process(self, inputs, outputs):\n    for output in outputs:\n      self.count += len(output[\"instances\"])\n  def evaluate(self):\n    # save self.count somewhere, or print it, or return it.\n    return {\"count\": self.count}\n```\n\nOnce you have some `DatasetEvaluator`, you can run it with\n[inference_on_dataset](../modules/evaluation.html#detectron2.evaluation.inference_on_dataset).\nFor example,\n\n```python\nval_results = inference_on_dataset(\n    model,\n    val_data_loader,\n    DatasetEvaluators([COCOEvaluator(...), Counter()]))\n```\nCompared to running the evaluation manually using the model, the benefit of this function is that\nyou can merge evaluators together using [DatasetEvaluators](../modules/evaluation.html#detectron2.evaluation.DatasetEvaluators).\nIn this way you can run all evaluations without having to go through the dataset multiple times.\n\nThe `inference_on_dataset` function also provides accurate speed benchmarks for the\ngiven model and dataset.\n"
  },
  {
    "path": "docs/tutorials/extend.md",
    "content": "# Extend Detectron2's Defaults\n\n__Research is about doing things in new ways__.\nThis brings a tension in how to create abstractions in code,\nwhich is a challenge for any research engineering project of a significant size:\n\n1. On one hand, it needs to have very thin abstractions to allow for the possibility of doing\n   everything in new ways. It should be reasonably easy to break existing\n   abstractions and replace them with new ones.\n\n2. On the other hand, such a project also needs reasonably high-level\n   abstractions, so that users can easily do things in standard ways,\n   without worrying too much about the details that only certain researchers care about.\n\nIn detectron2, there are two types of interfaces that address this tension together:\n\n1. Functions and classes that take only a \"config\" argument (optionally with a minimal\n   set of extra arguments in cases of mature interfaces).\n\n   Such functions and classes implement\n   the \"standard default\" behavior: it will read what it needs from the\n   config and do the \"standard\" thing.\n   Users only need to load a standard config and pass it around, without having to worry about\n   which arguments are used and what they all mean.\n\n2. Functions and classes that have well-defined explicit arguments.\n\n   Each of these is a small building block of the entire system.\n   They require users' effort to stitch together, but can be stitched together in more flexible ways.\n   When you need to implement something different from the \"standard defaults\"\n   included in detectron2, these well-defined components can be reused.\n\n\nIf you only need the standard behavior, the [Beginner's Tutorial](getting_started.html)\nshould suffice. If you need to extend detectron2 to your own needs,\nsee the following tutorials for more details:\n\n* Detectron2 includes a few standard datasets, but you can use custom ones. See\n  [Use Custom Datasets](datasets.html).\n* Detectron2 contains the standard logic that creates a data loader from a\n  dataset, but you can write your own as well. See [Use Custom Data Loaders](data_loading.html).\n* Detectron2 implements many standard detection models, and provide ways for you\n  to overwrite its behaviors. See [Use Models](models.html) and [Write Models](write-models.html).\n* Detectron2 provides a default training loop that is good for common training tasks.\n  You can customize it with hooks, or write your own loop instead. See [training](training.html).\n"
  },
  {
    "path": "docs/tutorials/index.rst",
    "content": "Tutorials\n======================================\n\n.. toctree::\n   :maxdepth: 2\n\n   install\n   getting_started\n   extend\n   datasets\n   data_loading\n   models\n   write-models\n   training\n   evaluation\n   configs\n   deployment\n"
  },
  {
    "path": "docs/tutorials/models.md",
    "content": "# Use Models\n\nModels (and their sub-models) in detectron2 are built by\nfunctions such as `build_model`, `build_backbone`, `build_roi_heads`:\n```python\nfrom detectron2.modeling import build_model\nmodel = build_model(cfg)  # returns a torch.nn.Module\n```\n\nNote that `build_model` only builds the model structure, and fill it with random parameters.\nTo load an existing checkpoint to the model, use\n`DetectionCheckpointer(model).load(file_path)`.\nDetectron2 recognizes models in pytorch's `.pth` format, as well as the `.pkl` files\nin our model zoo.\n\nYou can use a model by just `outputs = model(inputs)`.\nNext, we explain the inputs/outputs format used by the builtin models in detectron2.\n\n[DefaultPredictor](../modules/engine.html#detectron2.engine.defaults.DefaultPredictor)\nis a wrapper around model that provides the default behavior for regular inference. It includes model loading as\nwell as preprocessing, and operates on single image rather than batches.\n\n\n### Model Input Format\n\nAll builtin models take a `list[dict]` as the inputs. Each dict\ncorresponds to information about one image.\n\nThe dict may contain the following keys:\n\n* \"image\": `Tensor` in (C, H, W) format. The meaning of channels are defined by `cfg.INPUT.FORMAT`.\n* \"instances\": an [Instances](../modules/structures.html#detectron2.structures.Instances)\n  object, with the following fields:\n  + \"gt_boxes\": a [Boxes](../modules/structures.html#detectron2.structures.Boxes) object storing N boxes, one for each instance.\n  + \"gt_classes\": `Tensor` of long type, a vector of N labels, in range [0, num_categories).\n  + \"gt_masks\": a [PolygonMasks](../modules/structures.html#detectron2.structures.PolygonMasks)\n    or [BitMasks](../modules/structures.html#detectron2.structures.BitMasks) object storing N masks, one for each instance.\n  + \"gt_keypoints\": a [Keypoints](../modules/structures.html#detectron2.structures.Keypoints)\n    object storing N keypoint sets, one for each instance.\n* \"proposals\": an [Instances](../modules/structures.html#detectron2.structures.Instances)\n  object used only in Fast R-CNN style models, with the following fields:\n  + \"proposal_boxes\": a [Boxes](../modules/structures.html#detectron2.structures.Boxes) object storing P proposal boxes.\n  + \"objectness_logits\": `Tensor`, a vector of P scores, one for each proposal.\n* \"height\", \"width\": the **desired** output height and width, which is not necessarily the same\n  as the height or width of the `image` input field.\n  For example, the `image` input field might be a resized image,\n  but you may want the outputs to be in **original** resolution.\n\n  If provided, the model will produce output in this resolution,\n  rather than in the resolution of the `image` as input into the model. This is more efficient and accurate.\n* \"sem_seg\": `Tensor[int]` in (H, W) format. The semantic segmentation ground truth.\n  Values represent category labels starting from 0.\n\n\n#### How it connects to data loader:\n\nThe output of the default [DatasetMapper]( ../modules/data.html#detectron2.data.DatasetMapper) is a dict\nthat follows the above format.\nAfter the data loader performs batching, it becomes `list[dict]` which the builtin models support.\n\n\n### Model Output Format\n\nWhen in training mode, the builtin models output a `dict[str->ScalarTensor]` with all the losses.\n\nWhen in inference mode, the builtin models output a `list[dict]`, one dict for each image.\nBased on the tasks the model is doing, each dict may contain the following fields:\n\n* \"instances\": [Instances](../modules/structures.html#detectron2.structures.Instances)\n  object with the following fields:\n  * \"pred_boxes\": [Boxes](../modules/structures.html#detectron2.structures.Boxes) object storing N boxes, one for each detected instance.\n  * \"scores\": `Tensor`, a vector of N scores.\n  * \"pred_classes\": `Tensor`, a vector of N labels in range [0, num_categories).\n  + \"pred_masks\": a `Tensor` of shape (N, H, W), masks for each detected instance.\n  + \"pred_keypoints\": a `Tensor` of shape (N, num_keypoint, 3).\n    Each row in the last dimension is (x, y, score). Scores are larger than 0.\n* \"sem_seg\": `Tensor` of (num_categories, H, W), the semantic segmentation prediction.\n* \"proposals\": [Instances](../modules/structures.html#detectron2.structures.Instances)\n  object with the following fields:\n  * \"proposal_boxes\": [Boxes](../modules/structures.html#detectron2.structures.Boxes)\n    object storing N boxes.\n  * \"objectness_logits\": a torch vector of N scores.\n* \"panoptic_seg\": A tuple of `(Tensor, list[dict])`. The tensor has shape (H, W), where each element\n  represent the segment id of the pixel. Each dict describes one segment id and has the following fields:\n  * \"id\": the segment id\n  * \"isthing\": whether the segment is a thing or stuff\n  * \"category_id\": the category id of this segment. It represents the thing\n       class id when `isthing==True`, and the stuff class id otherwise.\n\n\n### How to use a model in your code:\n\nContruct your own `list[dict]` as inputs, with the necessary keys. Then call `outputs = model(inputs)`.\nFor example, in order to do inference, provide dicts with \"image\", and optionally \"height\" and \"width\".\n\nNote that when in training mode, all models are required to be used under an `EventStorage`.\nThe training statistics will be put into the storage:\n```python\nfrom detectron2.utils.events import EventStorage\nwith EventStorage() as storage:\n  losses = model(inputs)\n```\n\nAnother small thing to remember: detectron2 models do not support `model.to(device)` or `model.cpu()`.\nThe device is defined in `cfg.MODEL.DEVICE` and cannot be changed afterwards.\n\n\n### Partially execute a model:\n\nSometimes you may want to obtain an intermediate tensor inside a model.\nSince there are typically hundreds of intermediate tensors, there isn't an API that provides you\nthe intermediate result you need.\nYou have the following options:\n\n1. Write a (sub)model. Following the [tutorial](write-models.html), you can\n   rewrite a model component (e.g. a head of a model), such that it\n   does the same thing as the existing component, but returns the output\n   you need.\n2. Partially execute a model. You can create the model as usual,\n   but use custom code to execute it instead of its `forward()`. For example,\n   the following code obtains mask features before mask head.\n\n```python\nimages = ImageList(...)  # preprocessed input tensor\nmodel = build_model(cfg)\nfeatures = model.backbone(images.tensor)\nproposals, _ = model.proposal_generator(images, features)\ninstances = model.roi_heads._forward_box(\n  [features[k] for k in model.roi_heads.in_features],\n  proposals\n)\nmask_features = model.roi_heads.mask_pooler(features, [x.pred_boxes for x in instances])\n```\n\nNote that both options require you to read the existing forward code to understand\nhow to write code to obtain the outputs you need.\n"
  },
  {
    "path": "docs/tutorials/training.md",
    "content": "# Training\n\nFrom the previous tutorials, you may now have a custom model and data loader.\n\nYou are free to create your own optimizer, and write the training logic: it's\nusually easy with PyTorch, and allow researchers to see the entire training\nlogic more clearly.\nOne such example is provided in [tools/plain_train_net.py](../../tools/plain_train_net.py).\n\nWe also provide a standarized \"trainer\" abstraction with a\n[minimal hook system](../modules/engine.html#detectron2.engine.HookBase)\nthat helps simplify the standard types of training.\n\nYou can use\n[SimpleTrainer().train()](../modules/engine.html#detectron2.engine.SimpleTrainer)\nwhich does single-cost single-optimizer single-data-source training.\nOr use [DefaultTrainer().train()](../modules/engine.html#detectron2.engine.defaults.DefaultTrainer)\nwhich includes more standard behavior that one might want to opt in.\nThis also means that it's less likely to support some non-standard behavior\nyou might want during research.\n\n\n### Logging of Metrics\n\nDuring training, metrics are logged with a centralized [EventStorage](../modules/utils.html#detectron2.utils.events.EventStorage).\nYou can use the following code to access it and log metrics to it:\n```\nfrom detectron2.utils.events import get_event_storage\n\n# inside the model:\nif self.training:\n  value = # compute the value from inputs\n  storage = get_event_storage()\n  storage.put_scalar(\"some_accuracy\", value)\n```\n\nRefer to its documentation for more details.\n"
  },
  {
    "path": "docs/tutorials/write-models.md",
    "content": "# Write Models\n\nIf you are trying to do something completely new, you may wish to implement\na model entirely from scratch within detectron2. However, in many situations you may\nbe interested in modifying or extending some components of an existing model.\nTherefore, we also provide a registration mechanism that lets you override the\nbehavior of certain internal components of standard models.\n\nFor example, to add a new backbone, import this code in your code:\n```python\nfrom detectron2.modeling import BACKBONE_REGISTRY, Backbone, ShapeSpec\n\n@BACKBONE_REGISTRY.register()\nclass ToyBackBone(Backbone):\n  def __init__(self, cfg, input_shape):\n    # create your own backbone\n    self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=16, padding=3)\n\n  def forward(self, image):\n    return {\"conv1\": self.conv1(image)}\n\n  def output_shape(self):\n    return {\"conv1\": ShapeSpec(channels=64, stride=16)}\n```\nThen, you can use `cfg.MODEL.BACKBONE.NAME = 'ToyBackBone'` in your config object.\n`build_model(cfg)` will then call your `ToyBackBone` instead.\n\nAs another example, to add new abilities to the ROI heads in the Generalized R-CNN meta-architecture,\nyou can implement a new\n[ROIHeads](../modules/modeling.html#detectron2.modeling.ROIHeads) subclass and put it in the `ROI_HEADS_REGISTRY`.\nSee [densepose in detectron2](../../projects/DensePose)\nfor an example that implements new ROIHeads.\nAnd [projects/](../../projects/)\ncontains examples that implement a few different architectures.\n\nA complete list of registries can be found in [API documentation](../modules/modeling.html#model-registries).\nYou can register components in these registries to customize different parts of a model, or the\nentire model.\n"
  },
  {
    "path": "projects/LISA/LISA/LISA_meta_arch.py",
    "content": "#  Copyright (c) Tianyu Wang. All Rights Reserved.\nimport logging\nimport torch\nfrom torch import nn\n\nfrom detectron2.structures import ImageList\nfrom detectron2.utils.logger import log_first_n\n\nfrom detectron2.modeling.backbone import build_backbone\nfrom detectron2.modeling.postprocessing import detector_postprocess, matchor, combine_association\nfrom .LISA_rpn  import build_proposal_generator\nfrom detectron2.modeling.roi_heads import build_roi_heads\nfrom detectron2.modeling.meta_arch.build import META_ARCH_REGISTRY\nfrom detectron2.modeling.meta_arch.rcnn import __all__, GeneralizedRCNN\nfrom detectron2.utils.registry import Registry\n__all__.append(\"LISARCNN\")\n\n@META_ARCH_REGISTRY.register()\nclass LISARCNN(GeneralizedRCNN):\n\n    def __init__(self,cfg):\n        super(LISARCNN, self).__init__(cfg)\n        self.association_proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape(), shadow_object_part= False)\n        self.proposal_generator = build_proposal_generator(cfg, self.backbone.output_shape(), shadow_object_part= True)\n        self.to(self.device)\n    def forward(self, batched_inputs):\n\n        if not self.training:\n            return self.inference(batched_inputs)\n\n        images = self.preprocess_image(batched_inputs)\n\n        if \"instances\" in batched_inputs[0]:\n            gt_instances = [x[\"instances\"].to(self.device) for x in batched_inputs]\n        if \"associations\" in batched_inputs[0]:\n            gt_associations = [x[\"associations\"].to(self.device) for x in batched_inputs]\n        elif \"targets\" in batched_inputs[0]:\n            log_first_n(\n                logging.WARN, \"'targets' in the model inputs is now renamed to 'instances'!\", n=10\n            )\n            gt_instances = [x[\"targets\"].to(self.device) for x in batched_inputs]\n        else:\n            gt_instances = None\n\n        features = self.backbone(images.tensor)\n\n        if self.association_proposal_generator:\n            association_proposals, association_losses, pre_features, pre_proposals = self.association_proposal_generator(images, features, gt_associations)\n        \n        if self.proposal_generator:\n            proposals, proposal_losses = self.proposal_generator(images,features,gt_instances,pre_proposals)\n\n        _, detector_losses = self.roi_heads(images, features, association_proposals, proposals, gt_associations, gt_instances)\n\n        losses = {}\n        losses.update(detector_losses)\n        losses.update(proposal_losses)\n        losses.update(association_losses)\n        return losses\n\n    def inference(self, batched_inputs, detected_instances=None, do_postprocess=True):\n        assert not self.training\n        images = self.preprocess_image(batched_inputs)\n        features = self.backbone(images.tensor)\n\n        if detected_instances is None:\n            if self.association_proposal_generator:\n                association_proposals, _, pre_features, pre_proposals = self.association_proposal_generator(images, features)\n            else:\n                assert \"associations\" in batched_inputs[0]\n                proposals = [x[\"associations\"].to(self.device) for x in batched_inputs]\n            if self.proposal_generator:\n                # concat_features = {}\n                # for pre_features,(k,v) in zip(pre_features,features.items()):\n                #     concat_features[k] = torch.cat([v,pre_features],1)\n                proposals, _ = self.proposal_generator(images,features,pre_proposals = pre_proposals)\n            else:\n                assert \"proposals\" in batched_inputs[0]\n                proposals = [x[\"proposals\"].to(self.device) for x in batched_inputs]\n\n            results,associations, _ = self.roi_heads(images, features, association_proposals, proposals, None, None)\n        \n        if do_postprocess:\n            processed_results = []\n            for results_per_image, input_per_image, image_size in zip(\n                results, batched_inputs, images.image_sizes\n            ):\n                height = input_per_image.get(\"height\", image_size[0])\n                width = input_per_image.get(\"width\", image_size[1])\n                r = detector_postprocess(results_per_image, height, width)\n                processed_results.append({\"instances\": r.to(torch.device('cpu'))})\n\n            processed_associations = []\n            for results_per_image, input_per_image, image_size in zip(\n                associations, batched_inputs, images.image_sizes\n            ):\n                height = input_per_image.get(\"height\", image_size[0])\n                width = input_per_image.get(\"width\", image_size[1])\n                r = detector_postprocess(results_per_image, height, width)\n                processed_associations.append({\"instances\": r.to(torch.device('cpu'))})\n            \n            for instances, associations in zip(processed_results, processed_associations):\n                _instances, _associations = matchor(instances[\"instances\"],associations[\"instances\"])\n                _associations,_instances = combine_association(_instances,_associations)\n                associations[\"instances\"] = _associations\n                instances[\"instances\"] = _instances\n                    \n\n            return processed_results,processed_associations\n        else:\n            return results,associations\n\n\n\n\n"
  },
  {
    "path": "projects/LISA/LISA/LISA_rcnn.py",
    "content": "#  Copyright (c) Tianyu Wang. All Rights Reserved.\nimport torch\nfrom torch import nn\nfrom torch.autograd.function import Function\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.structures import Boxes, Instances, RotatedBoxes, pairwise_iou, pairwise_iou_rotated\nfrom detectron2.utils.events import get_event_storage\nfrom detectron2.utils.registry import Registry\n\nfrom detectron2.modeling.matcher import Matcher\nfrom detectron2.modeling.poolers import ROIPooler\nfrom detectron2.modeling import ROI_HEADS_REGISTRY, StandardROIHeads\nfrom detectron2.modeling.roi_heads.roi_heads import Res5ROIHeads\nfrom detectron2.modeling.roi_heads.box_head import build_box_head\nfrom detectron2.modeling.box_regression import Box2BoxTransform\nfrom detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs, fast_rcnn_inference,LightdirectionOutputLayer\nfrom detectron2.modeling.proposal_generator.proposal_utils import add_ground_truth_to_proposals\nfrom detectron2.modeling.sampling import subsample_labels\nimport numpy as np\n\n# class\n@ROI_HEADS_REGISTRY.register()\nclass LISAROIHeads(StandardROIHeads):\n    pass\n    \"\"\"\n    This class is used by association RPN.\n    \"\"\"\n\n    # def __init__(self, cfg, input_shape: Dict[str, ShapeSpec]):\n    #     super(associationROIHeads, self).__init__(cfg,input_shape)\n    #     pass\n        \n\n    def _init_box_head(self,cfg):\n        \n        pooler_resolution        = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION\n        pooler_scales            = tuple(1.0 / self.feature_strides[k] for k in self.in_features)\n        sampling_ratio           = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO\n        pooler_type              = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE\n\n        in_channels = [self.feature_channels[f] for f in self.in_features]\n        # Check all channel counts are equal\n        assert len(set(in_channels)) == 1, in_channels\n        in_channels = in_channels[0]\n\n        self.box_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=pooler_type,\n        )\n\n        pooled_shape = ShapeSpec(\n            channels=in_channels, width=pooler_resolution, height=pooler_resolution\n        )\n        self.association_box_head = build_box_head(cfg,pooled_shape)\n        self.association_box_predictor = FastRCNNOutputLayers(\n                    self.association_box_head.output_size, self.num_classes -1 , cls_agnostic_bbox_reg = False\n                )\n        self.box_head = build_box_head(cfg,pooled_shape)\n\n        self.box_predictor = FastRCNNOutputLayers(\n                    self.box_head.output_size, self.num_classes, cls_agnostic_bbox_reg = False\n                )\n        self.light_direction_head = build_box_head(cfg,pooled_shape)\n\n        self.light_direction_predictor = LightdirectionOutputLayer(\n            self.light_direction_head.output_size)\n\n    def forward(self,images, features, association_proposals, proposals, association_targets=None, targets=None):\n        del images\n        if self.training:\n            association_proposals = self.label_and_sample_proposals(association_proposals, association_targets, True)\n            proposals = self.label_and_sample_proposals(proposals,targets)\n        del targets\n        del association_targets\n\n        features_list = [features[f] for f in self.in_features]\n        if self.training:\n            losses = self._forward_association_box(features_list,association_proposals)\n            losses.update(self._forward_box(features_list, proposals))\n            # During training the proposals used by the box head are\n            # used by the mask, keypoint (and densepose) heads.\n            losses.update(self._forward_mask(features_list, proposals))\n            losses.update(self._forward_keypoint(features_list, association_proposals))\n            return proposals, losses\n        else:\n            \n            pred_instances = self._forward_box(features_list, proposals)\n            pred_instances = self.forward_with_given_boxes(features, pred_instances)\n            pred_associations = self._forward_association_box(features_list,association_proposals)\n            # During inference cascaded prediction is used: the mask and keypoints heads are only\n            # applied to the top scoring box detections.\n            \n            # pred_associations = self.forward_with_given_association_boxes(features,pred_associations)\n            return pred_instances, pred_associations ,{}\n    \n    \n\n\n    def _forward_association_box(self, features, association_proposals):\n        box_features = self.box_pooler(features, [x.proposal_boxes for x in association_proposals])\n        light_features = self.light_direction_head(box_features)\n        box_features = self.association_box_head(box_features)\n        pred_light_direction = self.light_direction_predictor(light_features)\n        pred_class_logits, pred_proposal_deltas = self.association_box_predictor(box_features)\n        del box_features, light_features\n\n        outputs = FastRCNNOutputs(\n            self.box2box_transform,\n            pred_class_logits,\n            pred_proposal_deltas,\n            association_proposals,\n            self.smooth_l1_beta,\n            pred_light_direction\n        )\n        if self.training:\n            return {k+'_asso': v for k, v in outputs.losses().items()}\n        else:\n            pred_instances, _ = outputs.inference(\n                self.test_score_thresh, self.test_nms_thresh, self.test_detections_per_img\n            )\n            # print(pred_instances)\n            return pred_instances\n        \n    @torch.no_grad()\n    def label_and_sample_proposals(self, proposals, targets, isassociation = False):\n        \"\"\"\n        Prepare some proposals to be used to train the ROI heads.\n        It performs box matching between `proposals` and `targets`, and assigns\n        training labels to the proposals.\n        It returns `self.batch_size_per_image` random samples from proposals and groundtruth boxes,\n        with a fraction of positives that is no larger than `self.positive_sample_fraction.\n\n        Args:\n            See :meth:`ROIHeads.forward`\n\n        Returns:\n            list[Instances]:\n                length `N` list of `Instances`s containing the proposals\n                sampled for training. Each `Instances` has the following fields:\n                - proposal_boxes: the proposal boxes\n                - gt_boxes: the ground-truth box that the proposal is assigned to\n                  (this is only meaningful if the proposal has a label > 0; if label = 0\n                   then the ground-truth box is random)\n                Other fields such as \"gt_classes\", \"gt_masks\", that's included in `targets`.\n        \"\"\"\n        gt_boxes = [x.gt_boxes for x in targets]\n        if targets[0].has('gt_light'):\n            gt_light = [x.gt_light for x in targets]\n        # Augment proposals with ground-truth boxes.\n        # In the case of learned proposals (e.g., RPN), when training starts\n        # the proposals will be low quality due to random initialization.\n        # It's possible that none of these initial\n        # proposals have high enough overlap with the gt objects to be used\n        # as positive examples for the second stage components (box head,\n        # cls head, mask head). Adding the gt boxes to the set of proposals\n        # ensures that the second stage components will have some positive\n        # examples from the start of training. For RPN, this augmentation improves\n        # convergence and empirically improves box AP on COCO by about 0.5\n        # points (under one tested configuration).\n            if self.proposal_append_gt:\n                proposals = add_ground_truth_to_proposals(gt_boxes, proposals,gt_light)\n        else:\n            gt_light = None\n            if self.proposal_append_gt:\n                proposals = add_ground_truth_to_proposals(gt_boxes,proposals)\n\n\n        proposals_with_gt = []\n\n        num_fg_samples = []\n        num_bg_samples = []\n        for proposals_per_image, targets_per_image in zip(proposals, targets):\n            has_gt = len(targets_per_image) > 0\n            match_quality_matrix = pairwise_iou(\n                targets_per_image.gt_boxes, proposals_per_image.proposal_boxes\n            )\n            matched_idxs, proposals_labels = self.proposal_matcher(match_quality_matrix)\n            if isassociation:\n                num_classes = self.num_classes - 1\n            else:\n                num_classes = self.num_classes\n            # Get the corresponding GT for each proposal\n            if has_gt:\n                gt_classes = targets_per_image.gt_classes[matched_idxs]\n                # Label unmatched proposals (0 label from matcher) as background (label=num_classes)\n                \n                gt_classes[proposals_labels == 0] = num_classes\n                # Label ignore proposals (-1 label)\n                gt_classes[proposals_labels == -1] = -1\n            else:\n                gt_classes = torch.zeros_like(matched_idxs) + num_classes\n\n            sampled_fg_inds, sampled_bg_inds = subsample_labels(\n                gt_classes,\n                self.batch_size_per_image,\n                self.positive_sample_fraction,\n                num_classes,\n            )\n\n            sampled_inds = torch.cat([sampled_fg_inds, sampled_bg_inds], dim=0)\n\n            proposals_per_image = proposals_per_image[sampled_inds]\n            proposals_per_image.gt_classes = gt_classes[sampled_inds]\n\n            # We index all the attributes of targets that start with \"gt_\"\n            # and have not been added to proposals yet (=\"gt_classes\").\n            if has_gt:\n                sampled_targets = matched_idxs[sampled_inds]\n                # NOTE: here the indexing waste some compute, because heads\n                # like masks, keypoints, etc, will filter the proposals again,\n                # (by foreground/background, or number of keypoints in the image, etc)\n                # so we essentially index the data twice.\n                for (trg_name, trg_value) in targets_per_image.get_fields().items():\n                    if trg_name.startswith(\"gt_\") and not proposals_per_image.has(trg_name):\n                        proposals_per_image.set(trg_name, trg_value[sampled_targets])\n            else:\n                gt_boxes = Boxes(\n                    targets_per_image.gt_boxes.tensor.new_zeros((len(sampled_inds), 4))\n                )\n                proposals_per_image.gt_boxes = gt_boxes\n                if gt_light != None:\n                    gt_light = Boxes(\n                        targets_per_image.gt_light.tensor.new_zeros((len(sampled_inds), 4))\n                    )\n                    proposal_per_image.gt_light = gt_light\n\n            num_fg_samples.append(sampled_fg_inds.numel())\n            num_bg_samples.append(sampled_bg_inds.numel())\n            proposals_with_gt.append(proposals_per_image)\n\n        # Log the number of fg/bg samples that are selected for training ROI heads\n        storage = get_event_storage()\n        storage.put_scalar(\"roi_head/num_fg_samples\", np.mean(num_fg_samples))\n        storage.put_scalar(\"roi_head/num_bg_samples\", np.mean(num_bg_samples))\n\n        return proposals_with_gt\n"
  },
  {
    "path": "projects/LISA/LISA/LISA_rpn.py",
    "content": "#  Copyright (c) Tianyu Wang. All Rights Reserved.\nfrom typing import Dict, List\nimport torch\nimport torch.nn.functional as F\nfrom torch import nn\n\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.utils.registry import Registry\n\nfrom detectron2.modeling.anchor_generator import build_anchor_generator\nfrom detectron2.modeling.box_regression import Box2BoxTransform\nfrom detectron2.modeling.matcher import Matcher\nfrom detectron2.modeling import PROPOSAL_GENERATOR_REGISTRY, RPN_HEAD_REGISTRY\nfrom detectron2.modeling.proposal_generator.rpn_outputs import RPNOutputs, find_top_rpn_proposals\nfrom detectron2.modeling.proposal_generator.rpn import StandardRPNHead, RPN\nfrom detectron2.structures import BoxMode,Boxes\n\n\"\"\"\nRegistry for LISA RPN heads, which take CNN feature maps and perform\nobjectness classification and bounding box regression for anchors.\n\"\"\"\n\n# NOTE: `cfg.MODEL.RPN.HEAD_NAME` should be \"LISARPNHead\".\n\ndef build_rpn_head(cfg, input_shape,shadow_object_part=False):\n    \"\"\"\n    Build an RPN head defined by `cfg.MODEL.RPN.HEAD_NAME`.\n    \"\"\"\n    name = cfg.MODEL.RPN.HEAD_NAME\n    return RPN_HEAD_REGISTRY.get(name)(cfg, input_shape,shadow_object_part)\n\n\n@RPN_HEAD_REGISTRY.register()\nclass LISARPNHead(StandardRPNHead):\n    def __init__(self, cfg, input_shape: List[ShapeSpec], shadow_object_part= False):\n        super(LISARPNHead, self).__init__(cfg,input_shape)\n        self.shadow_object_part = shadow_object_part\n        if self.shadow_object_part:\n            in_channels = [s.channels for s in input_shape]\n            assert len(set(in_channels)) == 1, \"Each level must have the same channel!\"\n            in_channels = in_channels[0]\n            self.conv = nn.Conv2d(in_channels , in_channels, kernel_size=3, stride=1, padding=1)\n            for l in [self.conv]:\n                nn.init.normal_(l.weight, std=0.01)\n                nn.init.constant_(l.bias, 0)\n\n    def forward(self, features):\n        \"\"\"\n        Args:\n            features (list[Tensor]): list of feature maps\n        \"\"\"\n        \n        pred_objectness_logits = []\n        pred_anchor_deltas = []\n        if self.shadow_object_part == False:\n            pre_features = []\n        for i,x in enumerate(features):\n\n            t = F.relu(self.conv(x))\n            \n            pred_objectness_logits.append(self.objectness_logits(t))\n            pred_anchor_deltas.append(self.anchor_deltas(t))\n        \n        if self.shadow_object_part == False:\n            return pred_objectness_logits, pred_anchor_deltas, None\n        else:\n            return pred_objectness_logits, pred_anchor_deltas\n\n\ndef build_proposal_generator(cfg, input_shape, **args):\n    \"\"\"\n    Build a proposal generator from `cfg.MODEL.PROPOSAL_GENERATOR.NAME`.\n    The name can be \"PrecomputedProposals\" to use no proposal generator.\n    \"\"\"\n    name = cfg.MODEL.PROPOSAL_GENERATOR.NAME\n    if name == \"PrecomputedProposals\":\n        return None\n\n    return PROPOSAL_GENERATOR_REGISTRY.get(name)(cfg, input_shape,**args)\n    \n@PROPOSAL_GENERATOR_REGISTRY.register()\nclass LISARPN(RPN):\n\n    def __init__(self, cfg, input_shape: Dict[str, ShapeSpec], shadow_object_part= False):\n        super(LISARPN, self).__init__(cfg, input_shape)\n        self.shadow_object_part = shadow_object_part\n        if self.shadow_object_part:\n            self.rpn_head = build_rpn_head(cfg, [input_shape[f] for f in self.in_features], self.shadow_object_part)\n    \n    def forward(self, images, features, gt_instances=None, pre_proposals=None):\n        gt_boxes = [x.gt_boxes for x in gt_instances] if gt_instances is not None else None\n        del gt_instances\n\n        if self.shadow_object_part == False:\n            features = [features[f] for f in self.in_features]\n            pred_objectness_logits, pred_anchor_deltas, pre_features = self.rpn_head(features)\n            anchors = self.anchor_generator(features)\n        else:\n            features = [features[f] for f in self.in_features]\n            pred_objectness_logits, pred_anchor_deltas = self.rpn_head(features)\n            anchors = self.anchor_generator(features)\n            assert len(anchors[0]) == len(pre_proposals), \"number of pre_proposals {} and pre_anchors {} should be same.\".format(len(anchors[0]),len(pre_proposals))\n\n        outputs = RPNOutputs(\n            self.box2box_transform,\n            self.anchor_matcher,\n            self.batch_size_per_image,\n            self.positive_fraction,\n            images,\n            pred_objectness_logits,\n            pred_anchor_deltas,\n            anchors,\n            self.boundary_threshold,\n            gt_boxes,\n            self.smooth_l1_beta,\n        )\n        \n        if self.training:\n            if self.shadow_object_part == False:\n                losses = {k+'_asso': v * self.loss_weight for k, v in outputs.losses().items()}\n            else:\n                losses = {k: v * self.loss_weight for k, v in outputs.losses().items()}\n        else:\n            losses = {}\n\n        with torch.no_grad():\n\n            \n            pre_proposals = outputs.predict_proposals()\n            # Find the top proposals by applying NMS and removing boxes that\n            # are too small. The proposals are treated as fixed for approximate\n            # joint training with roi heads. This approach ignores the derivative\n            # w.r.t. the proposal boxes’ coordinates that are also network\n            # responses, so is approximate.\n            proposals = find_top_rpn_proposals(\n                pre_proposals,\n                outputs.predict_objectness_logits(),\n                images,\n                self.nms_thresh,\n                self.pre_nms_topk[self.training],\n                self.post_nms_topk[self.training],\n                self.min_box_side_len,\n                self.training,\n            )\n            # For RPN-only models, the proposals are the final output and we return them in\n            # high-to-low confidence order.\n            # For end-to-end models, the RPN proposals are an intermediate state\n            # and this sorting is actually not needed. But the cost is negligible.\n            inds = [p.objectness_logits.sort(descending=True)[1] for p in proposals]\n            proposals = [p[ind] for p, ind in zip(proposals, inds)]\n        if self.shadow_object_part == False:\n            return proposals, losses, pre_features, pre_proposals\n        else:\n            return proposals, losses\n    \n\n\n"
  },
  {
    "path": "projects/LISA/LISA/__init__.py",
    "content": "#  Copyright (c) Tianyu Wang. All Rights Reserved.\nfrom .config import add_lisa_config\nfrom .LISA_meta_arch import LISARCNN\nfrom .LISA_rcnn import LISAROIHeads\nfrom .LISA_rpn import LISARPN\n"
  },
  {
    "path": "projects/LISA/LISA/config.py",
    "content": "# -*- coding: utf-8 -*-\n\n\nfrom detectron2.config import CfgNode as CN\n\n\ndef add_lisa_config(cfg):\n    \"\"\"\n    Add config for RelationNet.\n    \"\"\"\n    _C = cfg\n\n    _C.MODEL.LISA = CN()\n\n"
  },
  {
    "path": "projects/LISA/SOAP.py",
    "content": "import numpy as np\nimport pysobatools.sobaeval as SOAPEval\nimport pysobatools.cocoeval as Eval\nfrom pysobatools.soba import SOBA\nimport json\n\n\nsoba = SOBA('../../../SOBA/annotations/SOBA_val.json')\nresults =  json.load(open('./output_light/inference/soba_instances_results.json'))\nassociation = json.load(open('./output_light/inference/soba_association_results.json'))\n\ninstance_soba = soba.loadRes(results)\nassociation_soba = soba.loadRes_asso(association)\n\nsobaeval= SOAPEval.SOAPeval(soba,instance_soba,association_soba)\nprint('segmentaion:')\n\nsobaeval.evaluate_asso()\n\nsobaeval.accumulate()\nsobaeval.summarize()\nprint('bbox:')\nsobaeval= SOAPEval.SOAPeval(soba,instance_soba,association_soba)\nsobaeval.params.iouType = 'bbox'\nsobaeval.evaluate_asso()\n\nsobaeval.accumulate()\nsobaeval.summarize()\n\nprint(\"--------------\")\nsobaeval= Eval.COCOeval(soba,association_soba)\nsobaeval.evaluate_asso()\nsobaeval.accumulate()\nsobaeval.summarize()\n\nsobaeval= Eval.COCOeval(soba,association_soba)\nsobaeval.params.iouType=\"bbox\"\nsobaeval.evaluate_asso()\nsobaeval.accumulate()\nsobaeval.summarize()"
  },
  {
    "path": "projects/LISA/__init__.py",
    "content": "from .utils import *"
  },
  {
    "path": "projects/LISA/config/Base-RCNN-FPN.yaml",
    "content": "MODEL:\n  META_ARCHITECTURE: \"GeneralizedRCNN\"\n  BACKBONE:\n    NAME: \"build_resnet_fpn_backbone\"\n  RESNETS:\n    OUT_FEATURES: [\"res2\", \"res3\", \"res4\", \"res5\"]\n  FPN:\n    IN_FEATURES: [\"res2\", \"res3\", \"res4\", \"res5\"]\n  ANCHOR_GENERATOR:\n    SIZES: [[32], [64], [128], [256], [512]]  # One size for each in feature map\n    ASPECT_RATIOS: [[0.5, 1.0, 2.0]]  # Three aspect ratios (same for all in feature maps)\n  RPN:\n    IN_FEATURES: [\"p2\", \"p3\", \"p4\", \"p5\", \"p6\"]\n    PRE_NMS_TOPK_TRAIN: 2000  # Per FPN level\n    PRE_NMS_TOPK_TEST: 1000  # Per FPN level\n    # Detectron1 uses 2000 proposals per-batch,\n    # (See \"modeling/rpn/rpn_outputs.py\" for details of this legacy issue)\n    # which is approximately 1000 proposals per-image since the default batch size for FPN is 2.\n    POST_NMS_TOPK_TRAIN: 1000\n    POST_NMS_TOPK_TEST: 1000\n  ROI_HEADS:\n    NAME: \"StandardROIHeads\"\n    IN_FEATURES: [\"p2\", \"p3\", \"p4\", \"p5\"]\n  ROI_BOX_HEAD:\n    NAME: \"FastRCNNConvFCHead\"\n    NUM_FC: 2\n    POOLER_RESOLUTION: 7\n  ROI_MASK_HEAD:\n    NAME: \"MaskRCNNConvUpsampleHead\"\n    NUM_CONV: 4\n    POOLER_RESOLUTION: 14\nDATASETS:\n  TRAIN: (\"coco_2017_train\",)\n  TEST: (\"coco_2017_val\",)\nSOLVER:\n  IMS_PER_BATCH: 16\n  BASE_LR: 0.02\n  STEPS: (60000, 80000)\n  MAX_ITER: 90000\nINPUT:\n  MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)\n"
  },
  {
    "path": "projects/LISA/config/LISA_101_FPN_3x.yaml",
    "content": "_BASE_: \"Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"./R-101.pkl\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 101\n  META_ARCHITECTURE: \"LISARCNN\"\n  PROPOSAL_GENERATOR:\n    NAME: \"LISARPN\"\n  RPN:\n    HEAD_NAME: \"LISARPNHead\"\n    PRE_NMS_TOPK_TRAIN: 2000  # Per FPN level\n    PRE_NMS_TOPK_TEST: 1000  # Per FPN level\n    # Detectron1 uses 2000 proposals per-batch,\n    # (See \"modeling/rpn/rpn_outputs.py\" for details of this legacy issue)\n    # which is approximately 1000 proposals per-image since the default batch size for FPN is 2.\n    POST_NMS_TOPK_TRAIN: 2000\n    POST_NMS_TOPK_TEST: 1000\n  ROI_HEADS:\n    NAME: \"LISAROIHeads\"\n    NUM_CLASSES: 2\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 0.5\nSOLVER:\n  STEPS: (40000, 100000)\n  MAX_ITER: 40000\n  BASE_LR: 0.001\n  IMS_PER_BATCH: 8\nDATASETS:\n  TRAIN: (\"soba_cast_shadow_train_full\",)\n  TEST: (\"soba_cast_shadow_val_full\",)\nOUTPUT_DIR: \"./output_light\"\n"
  },
  {
    "path": "projects/LISA/config/LISA_101_FPN_3x_demo.yaml",
    "content": "_BASE_: \"Base-RCNN-FPN.yaml\"\nMODEL:\n  WEIGHTS: \"output_light/last_checkpoint.pth\"\n  MASK_ON: True\n  RESNETS:\n    DEPTH: 101\n  META_ARCHITECTURE: \"LISARCNN\"\n  PROPOSAL_GENERATOR:\n    NAME: \"LISARPN\"\n  RPN:\n    HEAD_NAME: \"LISARPNHead\"\n    PRE_NMS_TOPK_TRAIN: 2000  # Per FPN level\n    PRE_NMS_TOPK_TEST: 1000  # Per FPN level\n    # Detectron1 uses 2000 proposals per-batch,\n    # (See \"modeling/rpn/rpn_outputs.py\" for details of this legacy issue)\n    # which is approximately 1000 proposals per-image since the default batch size for FPN is 2.\n    POST_NMS_TOPK_TRAIN: 2000\n    POST_NMS_TOPK_TEST: 1000\n  ROI_HEADS:\n    NAME: \"LISAROIHeads\"\n    NUM_CLASSES: 2\n  ROI_BOX_HEAD:\n    SMOOTH_L1_BETA: 0.5\nSOLVER:\n  STEPS: (80000, 100000)\n  MAX_ITER: 150000\n  BASE_LR: 0.001\n  IMS_PER_BATCH: 8\nDATASETS:\n  TRAIN: (\"coco_cast_shadow_train_beta_full\",)\n  TEST: (\"coco_cast_shadow_val_beta_full\",)\nOUTPUT_DIR: \"./output_light\"\n"
  },
  {
    "path": "projects/LISA/defaults.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n\"\"\"\nThis file contains components with some default boilerplate logic user may need\nin training / testing. They will not work for everyone, but many users may find them useful.\n\nThe behavior of functions/classes in this file is subject to change,\nsince they are meant to represent the \"common default behavior\" people need in their projects.\n\"\"\"\n\nimport argparse\nimport logging\nimport os\nfrom collections import OrderedDict\nimport torch\nfrom fvcore.common.file_io import PathManager\nfrom fvcore.nn.precise_bn import get_bn_modules\nfrom torch.nn.parallel import DistributedDataParallel\n\nimport detectron2.data.transforms as T\nfrom detectron2.checkpoint import DetectionCheckpointer\nfrom detectron2.data import (\n    MetadataCatalog,\n    build_detection_test_loader,\n    build_detection_train_loader,\n)\nfrom detectron2.evaluation import (\n    DatasetEvaluator,\n    inference_on_dataset,\n    print_csv_format,\n    verify_results,\n)\nfrom detectron2.modeling import build_model\nfrom detectron2.solver import build_lr_scheduler, build_optimizer\nfrom detectron2.utils import comm\nfrom detectron2.utils.collect_env import collect_env_info\nfrom detectron2.utils.env import seed_all_rng\nfrom detectron2.utils.events import CommonMetricPrinter, JSONWriter, TensorboardXWriter\nfrom detectron2.utils.logger import setup_logger\n\nfrom . import hooks\nfrom .train_loop import SimpleTrainer\n\n__all__ = [\"default_argument_parser\", \"default_setup\", \"DefaultPredictor\", \"DefaultTrainer\"]\n\n\ndef default_argument_parser():\n    \"\"\"\n    Create a parser with some common arguments used by detectron2 users.\n\n    Returns:\n        argparse.ArgumentParser:\n    \"\"\"\n    parser = argparse.ArgumentParser(description=\"Detectron2 Training\")\n    parser.add_argument(\"--config-file\", default=\"\", metavar=\"FILE\", help=\"path to config file\")\n    parser.add_argument(\n        \"--resume\",\n        action=\"store_true\",\n        help=\"whether to attempt to resume from the checkpoint directory\",\n    )\n    parser.add_argument(\"--eval-only\", action=\"store_true\", help=\"perform evaluation only\")\n    parser.add_argument(\"--num-gpus\", type=int, default=1, help=\"number of gpus *per machine*\")\n    parser.add_argument(\"--num-machines\", type=int, default=1)\n    parser.add_argument(\n        \"--machine-rank\", type=int, default=0, help=\"the rank of this machine (unique per machine)\"\n    )\n\n    # PyTorch still may leave orphan processes in multi-gpu training.\n    # Therefore we use a deterministic way to obtain port,\n    # so that users are aware of orphan processes by seeing the port occupied.\n    port = 2 ** 15 + 2 ** 14 + hash(os.getuid()) % 2 ** 14\n    parser.add_argument(\"--dist-url\", default=\"tcp://127.0.0.1:{}\".format(port))\n    parser.add_argument(\n        \"opts\",\n        help=\"Modify config options using the command-line\",\n        default=None,\n        nargs=argparse.REMAINDER,\n    )\n    return parser\n\n\ndef default_setup(cfg, args):\n    \"\"\"\n    Perform some basic common setups at the beginning of a job, including:\n\n    1. Set up the detectron2 logger\n    2. Log basic information about environment, cmdline arguments, and config\n    3. Backup the config to the output directory\n\n    Args:\n        cfg (CfgNode): the full config to be used\n        args (argparse.NameSpace): the command line arguments to be logged\n    \"\"\"\n    output_dir = cfg.OUTPUT_DIR\n    if comm.is_main_process() and output_dir:\n        PathManager.mkdirs(output_dir)\n\n    rank = comm.get_rank()\n    setup_logger(output_dir, distributed_rank=rank, name=\"fvcore\")\n    logger = setup_logger(output_dir, distributed_rank=rank)\n\n    logger.info(\"Rank of current process: {}. World size: {}\".format(rank, comm.get_world_size()))\n    logger.info(\"Environment info:\\n\" + collect_env_info())\n\n    logger.info(\"Command line arguments: \" + str(args))\n    if hasattr(args, \"config_file\"):\n        logger.info(\n            \"Contents of args.config_file={}:\\n{}\".format(\n                args.config_file, PathManager.open(args.config_file, \"r\").read()\n            )\n        )\n\n    logger.info(\"Running with full config:\\n{}\".format(cfg))\n    if comm.is_main_process() and output_dir:\n        # Note: some of our scripts may expect the existence of\n        # config.yaml in output directory\n        path = os.path.join(output_dir, \"config.yaml\")\n        with PathManager.open(path, \"w\") as f:\n            f.write(cfg.dump())\n        logger.info(\"Full config saved to {}\".format(os.path.abspath(path)))\n\n    # make sure each worker has a different, yet deterministic seed if specified\n    seed_all_rng(None if cfg.SEED < 0 else cfg.SEED + rank)\n\n    # cudnn benchmark has large overhead. It shouldn't be used considering the small size of\n    # typical validation set.\n    if not (hasattr(args, \"eval_only\") and args.eval_only):\n        torch.backends.cudnn.benchmark = cfg.CUDNN_BENCHMARK\n\n\nclass DefaultPredictor:\n    \"\"\"\n    Create a simple end-to-end predictor with the given config.\n    The predictor takes an BGR image and produce a dict of predictions.\n\n    Attributes:\n        metadata (Metadata): the metadata of the underlying dataset, obtained from\n            cfg.DATASETS.TEST.\n    \"\"\"\n\n    def __init__(self, cfg):\n        self.cfg = cfg.clone()  # cfg can be modified by model\n        self.model = build_model(self.cfg)\n        self.model.eval()\n        self.metadata = MetadataCatalog.get(cfg.DATASETS.TEST[0])\n\n        checkpointer = DetectionCheckpointer(self.model)\n        checkpointer.load(cfg.MODEL.WEIGHTS)\n\n        self.transform_gen = T.ResizeShortestEdge(\n            [cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST\n        )\n\n        self.input_format = cfg.INPUT.FORMAT\n        assert self.input_format in [\"RGB\", \"BGR\"], self.input_format\n\n    @torch.no_grad()\n    def __call__(self, original_image):\n        \"\"\"\n        Args:\n            original_image (np.ndarray): an image of shape (H, W, C) (in BGR order).\n\n        Returns:\n            predictions (dict): the output of the model\n        \"\"\"\n        # Apply pre-processing to image.\n        if self.input_format == \"RGB\":\n            # whether the model expects BGR inputs or RGB\n            original_image = original_image[:, :, ::-1]\n        height, width = original_image.shape[:2]\n        image = self.transform_gen.get_transform(original_image).apply_image(original_image)\n        image = torch.as_tensor(image.astype(\"float32\").transpose(2, 0, 1))\n\n        inputs = {\"image\": image, \"height\": height, \"width\": width}\n        predictions = self.model([inputs])[0]\n        return predictions\n\n\nclass DefaultTrainer(SimpleTrainer):\n    \"\"\"\n    A trainer with default training logic. Compared to `SimpleTrainer`, it\n    contains the following logic in addition:\n\n    1. Create model, optimizer, scheduler, dataloader from the given config.\n    2. Load a checkpoint or `cfg.MODEL.WEIGHTS`, if exists.\n    3. Register a few common hooks.\n\n    It is created to simplify the **standard model training workflow** and reduce code boilerplate\n    for users who only need the standard training workflow, with standard features.\n    It means this class makes *many assumptions* about your training logic that\n    may easily become invalid in a new research. In fact, any assumptions beyond those made in the\n    :class:`SimpleTrainer` are too much for research.\n\n    The code of this class has been annotated about restrictive assumptions it mades.\n    When they do not work for you, you're encouraged to write your own training logic.\n\n    Also note that the behavior of this class, like other functions/classes in\n    this file, is not stable, since it is meant to represent the \"common default behavior\".\n    It is only guaranteed to work well with the standard models and training workflow in detectron2.\n    To obtain more stable behavior, write your own training logic with other public APIs.\n\n    Attributes:\n        scheduler:\n        checkpointer (DetectionCheckpointer):\n        cfg (CfgNode):\n    \"\"\"\n\n    def __init__(self, cfg):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n        \"\"\"\n        # Assume these objects must be constructed in this order.\n        model = self.build_model(cfg)\n        optimizer = self.build_optimizer(cfg, model)\n        data_loader = self.build_train_loader(cfg)\n\n        # For training, wrap with DDP. But don't need this for inference.\n        if comm.get_world_size() > 1:\n            # print(comm.get_local_rank())\n            model = DistributedDataParallel(\n                model, device_ids=[comm.get_local_rank()], broadcast_buffers=False\n            )\n        super().__init__(model, data_loader, optimizer)\n\n        self.scheduler = self.build_lr_scheduler(cfg, optimizer)\n        # Assume no other objects need to be checkpointed.\n        # We can later make it checkpoint the stateful hooks\n        self.checkpointer = DetectionCheckpointer(\n            # Assume you want to save checkpoints together with logs/statistics\n            model,\n            cfg.OUTPUT_DIR,\n            optimizer=optimizer,\n            scheduler=self.scheduler,\n        )\n        self.start_iter = 0\n        self.max_iter = cfg.SOLVER.MAX_ITER\n        self.cfg = cfg\n\n        self.register_hooks(self.build_hooks())\n\n    def resume_or_load(self, resume=True):\n        \"\"\"\n        If `resume==True`, and last checkpoint exists, resume from it.\n\n        Otherwise, load a model specified by the config.\n\n        Args:\n            resume (bool): whether to do resume or not\n        \"\"\"\n        # The checkpoint stores the training iteration that just finished, thus we start\n        # at the next iteration (or iter zero if there's no checkpoint).\n        self.start_iter = (\n            self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume).get(\n                \"iteration\", -1\n            )\n            + 1\n        )\n\n    def build_hooks(self):\n        \"\"\"\n        Build a list of default hooks.\n\n        Returns:\n            list[HookBase]:\n        \"\"\"\n        cfg = self.cfg.clone()\n        cfg.defrost()\n        cfg.DATALOADER.NUM_WORKERS = 0  # save some memory and time for PreciseBN\n\n        ret = [\n            hooks.IterationTimer(),\n            hooks.LRScheduler(self.optimizer, self.scheduler),\n            hooks.PreciseBN(\n                # Run at the same freq as (but before) evaluation.\n                cfg.TEST.EVAL_PERIOD,\n                self.model,\n                # Build a new data loader to not affect training\n                self.build_train_loader(cfg),\n                cfg.TEST.PRECISE_BN.NUM_ITER,\n            )\n            if cfg.TEST.PRECISE_BN.ENABLED and get_bn_modules(self.model)\n            else None,\n        ]\n\n        # Do PreciseBN before checkpointer, because it updates the model and need to\n        # be saved by checkpointer.\n        # This is not always the best: if checkpointing has a different frequency,\n        # some checkpoints may have more precise statistics than others.\n        if comm.is_main_process():\n            ret.append(hooks.PeriodicCheckpointer(self.checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD))\n\n        def test_and_save_results():\n            self._last_eval_results = self.test(self.cfg, self.model)\n            return self._last_eval_results\n\n        # Do evaluation after checkpointer, because then if it fails,\n        # we can use the saved checkpoint to debug.\n        ret.append(hooks.EvalHook(cfg.TEST.EVAL_PERIOD, test_and_save_results))\n\n        if comm.is_main_process():\n            # run writers in the end, so that evaluation metrics are written\n            ret.append(hooks.PeriodicWriter(self.build_writers()))\n        return ret\n\n    def build_writers(self):\n        \"\"\"\n        Build a list of default writers, that write metrics to the screen,\n        a json file, and a tensorboard event file respectively.\n\n        Returns:\n            list[Writer]: a list of objects that have a ``.write`` method.\n        \"\"\"\n        # Assume the default print/log frequency.\n        return [\n            # It may not always print what you want to see, since it prints \"common\" metrics only.\n            CommonMetricPrinter(self.max_iter),\n            JSONWriter(os.path.join(self.cfg.OUTPUT_DIR, \"metrics.json\")),\n            TensorboardXWriter(self.cfg.OUTPUT_DIR),\n        ]\n\n    def train(self):\n        \"\"\"\n        Run training.\n\n        Returns:\n            OrderedDict of results, if evaluation is enabled. Otherwise None.\n        \"\"\"\n        super().train(self.start_iter, self.max_iter)\n        if hasattr(self, \"_last_eval_results\") and comm.is_main_process():\n            verify_results(self.cfg, self._last_eval_results)\n            return self._last_eval_results\n\n    @classmethod\n    def build_model(cls, cfg):\n        \"\"\"\n        Returns:\n            torch.nn.Module:\n        \"\"\"\n        model = build_model(cfg)\n        logger = logging.getLogger(__name__)\n        logger.info(\"Model:\\n{}\".format(model))\n        return model\n\n    @classmethod\n    def build_optimizer(cls, cfg, model):\n        \"\"\"\n        Returns:\n            torch.optim.Optimizer:\n        \"\"\"\n        return build_optimizer(cfg, model)\n\n    @classmethod\n    def build_lr_scheduler(cls, cfg, optimizer):\n        return build_lr_scheduler(cfg, optimizer)\n\n    @classmethod\n    def build_train_loader(cls, cfg):\n        \"\"\"\n        Returns:\n            iterable\n        \"\"\"\n        return build_detection_train_loader(cfg)\n\n    @classmethod\n    def build_test_loader(cls, cfg, dataset_name):\n        \"\"\"\n        Returns:\n            iterable\n        \"\"\"\n        return build_detection_test_loader(cfg, dataset_name)\n\n    @classmethod\n    def build_evaluator(cls, cfg, dataset_name):\n        \"\"\"\n        Returns:\n            DatasetEvaluator\n        \"\"\"\n        raise NotImplementedError\n\n    @classmethod\n    def test(cls, cfg, model, evaluators=None):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            model (nn.Module):\n            evaluators (list[DatasetEvaluator] or None): if None, will call\n                :meth:`build_evaluator`. Otherwise, must have the same length as\n                `cfg.DATASETS.TEST`.\n\n        Returns:\n            dict: a dict of result metrics\n        \"\"\"\n        logger = logging.getLogger(__name__)\n        if isinstance(evaluators, DatasetEvaluator):\n            evaluators = [evaluators]\n        if evaluators is not None:\n            assert len(cfg.DATASETS.TEST) == len(evaluators), \"{} != {}\".format(\n                len(cfg.DATASETS.TEST), len(evaluators)\n            )\n\n        results = OrderedDict()\n        for idx, dataset_name in enumerate(cfg.DATASETS.TEST):\n            data_loader = cls.build_test_loader(cfg, dataset_name)\n            # When evaluators are passed in as arguments,\n            # implicitly assume that evaluators can be created before data_loader.\n            evaluator = (\n                evaluators[idx]\n                if evaluators is not None\n                else cls.build_evaluator(cfg, dataset_name)\n            )\n            results_i = inference_on_dataset(model, data_loader, evaluator)\n            results[dataset_name] = results_i\n            if comm.is_main_process():\n                assert isinstance(\n                    results_i, dict\n                ), \"Evaluator must return a dict on the main process. Got {} instead.\".format(\n                    results_i\n                )\n                logger.info(\"Evaluation results for {} in csv format:\".format(dataset_name))\n                print_csv_format(results_i)\n\n        if len(results) == 1:\n            results = list(results.values())[0]\n        return results\n"
  },
  {
    "path": "projects/LISA/demo.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport argparse\nimport glob\nimport multiprocessing as mp\nimport os\nimport time\nimport cv2\nimport tqdm\nimport skimage.io as io\nimport numpy as np\nimport skimage.transform as tf\nfrom detectron2.config import get_cfg\nfrom detectron2.data.detection_utils import read_image\nfrom detectron2.utils.logger import setup_logger\n\nfrom predictor import VisualizationDemo\n\n\nfrom LISA import add_lisa_config \n\n\ndef setup_cfg(args):\n    # load config from file and command-line arguments\n    cfg = get_cfg()\n    cfg.merge_from_file(args.config_file)\n    cfg.merge_from_list(args.opts)\n    # Set score_threshold for builtin models\n    cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold\n    cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold\n    cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold\n    cfg.freeze()\n    return cfg\n\n\ndef get_parser():\n    parser = argparse.ArgumentParser(description=\"Detectron2 Demo\")\n    parser.add_argument(\n        \"--config-file\",\n        default=\"configs/quick_schedules/e2e_mask_rcnn_R_50_FPN_inference_acc_test.yaml\",\n        metavar=\"FILE\",\n        help=\"path to config file\",\n    )\n    parser.add_argument(\"--webcam\", action=\"store_true\", help=\"Take inputs from webcam.\")\n    parser.add_argument(\"--video-input\", help=\"Path to video file.\")\n    parser.add_argument(\"--input\", nargs=\"+\", help=\"A list of space separated input images\")\n    parser.add_argument(\n        \"--output\",\n        help=\"A file or directory to save output visualizations. \"\n        \"If not given, will show output in an OpenCV window.\",\n    )\n\n    parser.add_argument(\n        \"--confidence-threshold\",\n        type=float,\n        default=0.5,\n        help=\"Minimum score for instance predictions to be shown\",\n    )\n    parser.add_argument(\n        \"--opts\",\n        help=\"Modify model config options using the command-line\",\n        default=[],\n        nargs=argparse.REMAINDER,\n    )\n    return parser\n\n\nif __name__ == \"__main__\":\n    mp.set_start_method(\"spawn\", force=True)\n    args = get_parser().parse_args()\n    logger = setup_logger()\n    logger.info(\"Arguments: \" + str(args))\n\n    cfg = setup_cfg(args)\n\n    demo = VisualizationDemo(cfg)\n\n    if args.input:\n        if len(args.input) == 1:\n            args.input = glob.glob(os.path.expanduser(args.input[0]))\n        for path in tqdm.tqdm(args.input, disable=not args.output):\n            # use PIL, to be consistent with evaluation\n            img = read_image(path, format=\"BGR\")\n            start_time = time.time()\n            predictions, visualized_output = demo.run_on_image(img)\n            \n            logger.info(\n                \"{}: detected {} instances in {:.2f}s\".format(\n                    path, len(predictions[0][0][\"instances\"]), time.time() - start_time\n                )\n            )\n            ins,rel = predictions\n            #for i,mask in enumerate(rel[0]['instances'].pred_masks):\n             #   mask = mask[:,:,0]\n              #  mask = np.array([mask,mask,mask,mask]).astype('uint8').transpose(1,2,0) * 255\n               # io.imsave('./demo/mask{}.png'.format(i),mask)\n            # print(predictions)\n            # print(visualized_output)\n            if args.output:\n                if os.path.isdir(args.output):\n                    assert os.path.isdir(args.output), args.output\n                    out_filename = os.path.join(args.output, os.path.basename(path))\n                else:\n                    assert len(args.input) == 1, \"Please specify a directory with args.output\"\n                    out_filename = args.output\n                visualized_output.save(out_filename)\n            else:\n                cv2.imshow(\"COCO detections\", visualized_output.get_image()[:, :, ::-1])\n                if cv2.waitKey(0) == 27:\n                    break  # esc to quit\n    elif args.webcam:\n        assert args.input is None, \"Cannot have both --input and --webcam!\"\n        cam = cv2.VideoCapture(0)\n        for vis in tqdm.tqdm(demo.run_on_video(cam)):\n            cv2.imshow(\"COCO detections\", vis)\n            if cv2.waitKey(1) == 27:\n                break  # esc to quit\n        cv2.destroyAllWindows()\n    elif args.video_input:\n        video = cv2.VideoCapture(args.video_input)\n        width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH))\n        height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT))\n        frames_per_second = video.get(cv2.CAP_PROP_FPS)\n        num_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))\n        basename = os.path.basename(args.video_input)\n\n        if args.output:\n            if os.path.isdir(args.output):\n                output_fname = os.path.join(args.output, basename)\n                output_fname = os.path.splitext(output_fname)[0] + \".mkv\"\n            else:\n                output_fname = args.output\n            assert not os.path.isfile(output_fname), output_fname\n            output_file = cv2.VideoWriter(\n                filename=output_fname,\n                # some installation of opencv may not support x264 (due to its license),\n                # you can try other format (e.g. MPEG)\n                fourcc=cv2.VideoWriter_fourcc(*\"x264\"),\n                fps=float(frames_per_second),\n                frameSize=(width, height),\n                isColor=True,\n            )\n        assert os.path.isfile(args.video_input)\n        for vis_frame in tqdm.tqdm(demo.run_on_video(video), total=num_frames):\n            if args.output:\n                output_file.write(vis_frame)\n            else:\n                cv2.imshow(basename, vis_frame)\n                if cv2.waitKey(1) == 27:\n                    break  # esc to quit\n        video.release()\n        if args.output:\n            output_file.release()\n        else:\n            cv2.destroyAllWindows()\n"
  },
  {
    "path": "projects/LISA/output_light/inference/soba_association_results.json",
    "content": "[{\"image_id\": 1, \"category_id\": 1, \"bbox\": [208.0, 135.0, 177.0, 293.0], \"score\": 0.9999988675117493, \"association_id\": 1, \"light\": [-1.8854426145553589, -1.7631640434265137, 1.7723000049591064, 1.5570213794708252], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"jZl4=ag04M2N101N2N2O1O1N2O0O101N2O0O4M2M2O1O001O1N3N2N2N1O1O1O1O001O001O3M2N1O0O2O2N8G4I6N0O2O0O2F]MU[Oc2gd0>K5M3N2OkNc[OhN[d0Y1e[OgN[d0Y1f[OgNYd0X1h[OiNWd0\\\\Of[OY13\\\\OVd0ZOh[OY14\\\\O[?VOmD3fK[12]OU?2ZDVO`LZ12_Oo>b0RDcNoL\\\\11^Om>g0PD^NSM]1O@l>g0QD[NUM^1N@k>k0nCXNYM]1MAk>R1gCPNbM]1LBj>c1TC_MWN]1JAj>m1kBUMbN]1HBj>R2fBPMhN]1GAk>V2aBiLQOn2^>>WBfL^Ol2[>n1fARNZ>m1gASNY>l1hAUNW>k1iAaM\\\\OmMj>c4jA_M^OmMh>d4jA_M@kMf>f4kA]M@mMd>g4lA\\\\M@mMd>g4lA\\\\M@mMe>e4lA^M_OmMf>d4mA]M\\\\OPNg>b4SBYMVOUNh>a4UBWMSOXNh>a4VBVMQOZNi>_4XBVMoN[Ni>_4YBUMnN\\\\Ni>_4YBUMnN\\\\Ni>_4ZBSMnN^Nh>_4[BRMmN_Ni>^4ZBRMnN`Nh>_4ZBoLoNaNh>a4YBmLoNbNh>b4YBjLPOdNh>b4WBZLjNRO72h>c4WBWLkNTO62h>c4YBTLkNUO54g>d4ZBQLkNUO55g>f4\\\\BkKjNWO58f>e4]BjK[OBX>d4^BiKZOBY>f4]BgKZOBZ>g4\\\\BgKZOBZ>h4\\\\BeKZOCZ>i4[BcK[OEZ>h4\\\\BbKZOE[>j4ZBaK[OE[>k4ZB_KZOG\\\\>k4ZB]KYOH]>m4YB[KYOI^>m4YBYKYOJ^>m4YBYKXOJ`>n4XBWKVOMb>l4XBWKROOh>j4WBVKnN2m>h4VBSKmN5n>h4VBRKkN6P?h4WBPKiN8o>i4XBoJhN8Q?i4XBnJgN9Q?i4YBnJdN:S?h4ZBmJbN;U?i4YBlJ`N<W?h4ZBmJ\\\\N<Z?h4ZBmJYN<]?h4YBfKh=[4XBdKh=^4VBbKj=^4VBaKk=`4TB`Kl=a4TB]Km=c4SB]Km=d4RB[Ko=e4QB[Ko=e4QB[Ko=e4QB[KP>e4oAZKR>f4nAZKS>e4mA[KT>d4kA\\\\KX>c4gA]KZ>b4fA^K[>a4eA^K]>a4cA_K^>`4bAiJeN8k?n4_AhJiN:i?l4^AiJkN:j?k4ZAkJmN9j?k4YAkJnN:k?j4WAkJnN;l?k4UAiJoN<R`0f4_AYKh>`4XA`Kk>^4TAaKo>^4PAbKR?]4m@cKZ?X4d@hKS`0j3d^OoK:7^a0W4`^OgKga0T4W^OmKla0P4T^OPLQb0l3m]OULPc0n2o\\\\ORM\\\\c0d2d\\\\O\\\\M_c0b2`\\\\O^Mbc0`2]\\\\O`Mgc0\\\\2Z\\\\OdMlc0V2T\\\\OiMTd0P2l[OPNZd0i1g[OVNbd0b1^[O^Nee0>\\\\ZOAne04UZOJSf0NnYO1Wf0IkYO7Pg0N101N3Mfkn>\"}}, {\"image_id\": 1, \"category_id\": 1, \"bbox\": [360.0, 138.0, 406.0, 614.0], \"score\": 0.999998152256012, \"association_id\": 2, \"light\": [-2.4960312843322754, -1.2195820808410645, 2.3225607872009277, 1.0873587131500244], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Q`^85hg06K4M4K4M2O11N2O1N3M3N2M100O10O0010O01O1O10O0010O10003L7MM10O000O1O1O10O01O001O001O1O1O3dHYOXGk0a8_OYGf0_8C[G`0`8E]G=^8H`G:[8JdG7Y8MeG5R83nGMi7=VHCb7f0]HZO\\\\7n0cHROT7X1kHiNi6c1VI]N`6n1_ISNV6X2kIhMl5a2UJ_Ma5j2aJUM`4i3_KXLR4V4nKkKi3]4WLbKd3d4[L[Kb3j4\\\\LWKa3l4_LTK]3R5aLmJ\\\\3X5cLhJY3]5fLcJX3`5gL_JX3c5hL]JV3f5iL[JT3g5lLYJS3h5mLYJP3h5QMZJk2h5TM[Ji2f5WM[Jf2f5[M[Jc2f5]M[Ja2e5`M[J_2e5bM[J]2f5cMYJ]2g5dMYJ[2g5fMXJZ2h5fMYJY2g5hMYJW2h5iMWJW2i5iMXJV2h5kMXJT2h5lMYJS2g5mMYJS2g5\\\\K`HiNi1j5h5YKcHlNf1j5g5UKiHQO`1i5g5SKmHSO]1i5f5QKQIUOZ1i5f5oJSIWOX1h5f5RKQIVOY1h5g5RKPITO[1i5e5TKoHRO^1i5c5UKoHQO`1i5b5VKmHPOc1i5`5WKmHoNe1i5_5WKlHnNi1i5[5ZKkHkNn1h5X5]KkHhN5_Ne0Z7\\\\6^KjHhN4eNd0T7^6_KjHgN3jNd0o6`6_KiHgN4lNc0m6a6_KhHgN4POa0j6d6^KgHgN4SOa0g6d6`KfHfN5UO`0d6f6`KeHfN6WO>c6h6`KdHeN5[O>_6i6bKcHcN7^O;]6l6bKaHcN8@:Z6m6dKaH`N9D8W6o6eK_H`N9H7R6Q7fK`H^N9K6P6R7gK^H]N;M7l5P7kK^HZN<07i5o6mK_HXN;39f5n6oK]HWN=59c5n6PL]HVN=79b5m6QL^HTN<;:^5m6SL\\\\HSN><:]5m6SL\\\\HRN>>:\\\\5m6TLZHRN??9[5o6SLYHSN>`0;Y5o6TLWHSN>a0=W5o6ULUHRN?d0<U5P7VLUHPN>f0>S5P7ZLPHmM`0h0a0P5P7\\\\LmGkMb0k0a0m4Q7^LjGjMc0l0d0j4o6aLjGfMd0Q1c0g4P7aLiGfMc0T1f0c4n6dLiGcMc0X1f0`4o6dLiGbMb0[1g0^4n6fLlG[M2ALQ2X1\\\\4o6fLmGYMOMEj1`1Y4o6gLPHTMM^2T1f3P7gLSHoLKd2S1e3o6jLiH@7f3Q7lLfH^O9e3Q7nLhHZO7h3R7mLhH[O6g3S7nLgH[O6f3S7oLhHZO6f3S7oLhH[O5e3T7oLgH\\\\O6d3S7QMfH\\\\O7b3T7QMeH^O7`3U7QMeH^O7`3T7RMeH_O6_3V7QMdHA6]3V7RMdHA6]3W7QMcHC6[3X7QMbHD6[3X7RMaHC8Z3X7SM_HD8Y3Z7RM^HE9X3Y7TM\\\\HE;X3Y7TMZHF<U3Z7VMYHJ9o2_7XMVH;K[2o7[MUH=NS2n7`MSHhN_OROf0_4i7gMQHfNDTOb0]4j7iMoGdNHVO?\\\\4j7jMoGcNIXO>Z4k7kMlGcNLYO=X4k7lMlGaNO[O9X4m7lMjG`N0_O8T4o7lMjG_N0B6S4P8mMiG]N1F4P4R8nMhG[N3I2m3T8oMfG[N3K2k3U8PNeGYN4O1g3W8QNdGWN53Ne3Y8RNcGVN64Nc3Y8SNcGUN67N`3Z8TNaGUN78M`3Z8SNcGSN6;N^3Y8UNbGRN7<N\\\\3Z8UNaGRN8=N[3Y8VNbGQN6`0OY3Y8VNaGQN7`00X3X8WNaGPN8b0OV3Y8XN_GPN8c00V3X8WN`GPN8c00V3Y8WN^GPN8e01S3Y8YN]GoM8f02S3Y8WN]GPN8g01R3[8XNZGoM8j03n2[8ZNYGnM8k04m2\\\\8]NSGlM<l04j2^8aNoFhM?o03h2_8cNlFgM`0Q15d2`8eNeFjMd0P16a2a89XGXM6^2d88UG]M7Z2d8ROQGdL2S28W2f8POUGaLMY28U2g8QO[GYLFc28R2h8ROTHmNTOQ2h8ROTHnNSOP2j8QORHPOTOn1k8ROQHQOSOm1l8ROQHQOSOm1m8QOoGSOTOm1l8POPHTOTOk1m8POnGVOUOj1n8oNmGXOTOj1n8nNnGXOTOj1o8mNmGZOSOi1P9mNlG[OTOi1P9kNlG]OTOg1P9mNkG\\\\OUOh1P9kNkG^OTOg1Q9lNjG]OUOg1Q9mNhG]OWOg1Q9kNhG^OVOi1Q9jNhG]OWOi1R9jNfG]OXOi1R9kNeG[OZOj1R9jNcG]O[Oi1R9kNbG\\\\O\\\\Oh1T9lN_G\\\\O]Oh1U9kN^G]O]Oh1V9kN[G^O^Oh1X9iNZG_O^Oh1Y9iNXG_O_Og1[9iNUGA@f1\\\\9iNSGAAf1^9hNPGBBf1_9gNPGB@h1a9fNnFABi1a9fNlFACi1a9gNkF@Cj1b9fNlF^OCm1a9eNkF^ODn1`9eNkF]ODP2`9cNlF\\\\OER2^9cNmFZOET2]9bNnFZODV2^9`NmFZOEW2]9_NnFYOFX2\\\\9`NnFWOE[2\\\\9^NoFWOE\\\\2\\\\9\\\\NoFWOE^2\\\\9\\\\NnFVOE`2\\\\9[NnFTOFb2\\\\9ZNmFTOFe2]9WNlFROHg2]9YNiFnNJk2]9\\\\NdFhN0l2]9`N]FdN6m2]9jNRFXNa0o2^9d1dF\\\\N\\\\9c1dF^N\\\\9a1eF_N\\\\9_1eFaN[9^1eFcN\\\\9\\\\1dFdN\\\\9[1eFeN\\\\9Y1dFhN\\\\9X1cFiN^9i0oFWOQ9h0nF[OQ9c0oF_OQ9?PGBP9<PGGP9FQF]Jo0o5P9AbG`0^8]OcGf0\\\\8XOdGj0\\\\8TOdGn0]8oNdGS1[8lNfGT1Z8VNnEjKh1Q6Z8iNfGY1Z8fNeG[1[8dNeG]1[8cNdG^1\\\\8aNcGa1]8_NbGc1]8]NaGe1_8ZNaGg1_8YNaGg1_8YN`Gh1`8XN_Gj1`8UNaGk1_8UN`Gl1`8TN`Gl1`8TN_Gn1`8QNaGo1_8QN`GQ2^8oMbGS2[8VMWFVL^1e6X8WM]FPL[1k6V8UMcFjKY1S7Q8UMhFdKW1Z7o7RMmF`KU1a7j7QMSG[KT1e7g7PMkHR3R7oLnHR3Q7oLoHR3n6oLRIS3j6PMVIP3g6RMYIP3b6SM_Im2c5SHPIR5\\\\1m2b5WHkHn4c1k2b5]HcHi4k1k2a5bH]He4?jJm0o7V6PNkHSJn0n7V6oMjHVJo0m7V6mMjHXJn0l7W6kMkH[Jm0k7W6cMRIdJf0j7W6aMTIfJd0k7V6_MTIjJd0h7W6^MTIlJd0g7W6]MUImJc0g7V6]MXIlJa0h7V6\\\\MZIlJ`0i7U6UKgHbNc0bN?i7W6WM[IQK=i7X6UM[IRK<k7Z6PMZIWK:k7\\\\6mLZIYK8l7_6hLZI]K4n7c6cLYIaKJV8m6_GkHl3?U6n7[IRHg6n7XIRHi6m7VITHk6l7SIUHn6j7QIWHP7i7nHXHS7g7lHYHV7h7gHYHZ7g7eHXH]7h7bHXH_7h7_HYHb7g7]HXHf7g7YHYHh7g7VHZHl7e7SHZHP8f7mG[HU8d7jG\\\\HW8d7gG]H[8b7dG^H]8b7aG^Ha8b7^G^Hc8b7\\\\G^He8`7^G^Hc8a7^G]Hd8a7^G^Hc8a7^G^Hd8_7^G`Hd8^7^G_He8^7]GaHe8]7\\\\GbHf8\\\\7[GbHh8[7ZGdHg8[7ZGdHh8Z7ZGdHg8Z7[GdHg8[7ZGdHg8Z7]GbHe8\\\\7Q4N3N2M3M5K8H6K6I5K4L4M2M3M4L3M5J6K5K6Ij0WO;E:F<D:F5J5L3M4L4K5H>ZOUTQ6\"}}, {\"image_id\": 2, \"category_id\": 1, \"bbox\": [183.0, 375.0, 488.0, 731.0], \"score\": 0.9999985098838806, \"association_id\": 1, \"light\": [-1.6857788562774658, -2.483166456222534, 1.5531294345855713, 2.3420069217681885], \"segmentation\": {\"size\": [1280, 1024], \"counts\": \"laU7X1_V1a0G4L2N3N1N2O1O1O1O3M4L4L5K5K5K4L3bkNfMoR1k3K2N1O2N1O2N1O1O2N001O001O1O001O1O1O2N2N1O1O1O1O001O000010O0001O001O001O001O1O1O1O1O1O001O001O001O00001O001O1O001O1O2N001O1O001O001O1O001O100O1O1O1O1O001O00001O00001O0000001O001O001O010O1O1O1O1O1O10O0001O01O00010fIQoNa5PQ1[JVoNb5mP1YJXoNe5jP1UJ[oNi5`Q1N1O1000001O1O1O1O1O1aKhmNS3YR1iLQnNPOO_3RR1]MdnNa2\\\\Q1\\\\MjmNTOe0^3cQ1ZMemNM<f2iR1Z1M4L4M3LfMjmNRORR1S1PnNiNlQ1]1UnNaNcQ1i1\\\\nNUNZQ1]2_nNaMUQ1X3`nNgLcP1j5J5L4M3N3L3NROmoNTJQP1n5RPOnIlo0V6UPOhIho0[6YPOdIdo0_6]PO`I^o0f6bPOYIXo0n6gPOQISo0W7lPOiHmn0_7QQOaHin0f7VQO[Hfn0a8M4N1N2O2N1N2O1O2M2N2M3M3L4M3M2O2N2O1N2O1O1N2N2M3L4K5L4M3L4M3M3N2N2O2N1N2O1O1O1O1O1O1O1N2O1O100N2O1N2M4L3M3M3N2N3L3N2N3M2O2M3M3L4L4L3M4K4K6J5N2NVOfVOQCYi0R=jVOiBUi0Z=oVO`BQi0a=f03L3N3L4L5J6J6I7J5L4L4L4M3M2N2N2M3N2M3L4L4I7J6K5K5L3M4M3L3M3L5gM`^Ok]Oea0oa0j^Oc]O[a0Vb0W_Of\\\\Oba0Qc0l1K4M3N2N3M2O1N2O1O1O0O2N1N3M2O2N1O101O00000001O1N2N2O0O2O1O1N2O001O1O1O010O01O1O001O0O2O1N1O2O1N101O100O100O2O0O101N10000O101O0O1000000000000000000000000000000000O100000000O1000000O10000O100O1O100^Mj]OY_OWb0U`0a^Od_O`a0S`0m^Oi_OSa0T`0T_Og_Om`0W`0X_Oe_Oi`0Z`0[_Ob_Of`0\\\\`0__O`_Ob`0_`0a_O^_O``0a`0c_O]_O]`0b`0d_O]_O^`0a`0c_O^_O^`0a`0c_O^_O_`0``0b_O__O_`0_`0d_O`_O]`0^`0d_Oa_O]`0]`0e_Ob_O]`0[`0e_Oe_O\\\\`0X`0f_Og_O[`0W`0h_Oh_OY`0U`0i_Oj_OY`0S`0i_Om_OX`0P`0j_Oo_OX`0o?j_OP@W`0o?j_OP@W`0n?k_OP@X`0n?i_OQ@X`0m?i_OS@Y`0k?h_OT@Y`0j?i_OU@X`0j?i_OT@Y`0k?g_OU@Z`0i?h_OV@Y`0i?h_OV@Y`0i?g_OW@Z`0g?h_OW@Y`0i?g_OW@Z`0h?g_OW@Y`0i?g_OV@Z`0j?g_OU@Z`0j?g_OT@Z`0l?g_OR@Z`0n?g_OP@Z`0P`0g_On_O[`0P`0h_Om_OY`0S`0h_Ok_OY`0U`0i_Oh_OX`0X`0j_Oe_OW`0[`0h3O0000001_BVYOj9jf0TFZYOi9gf0UF^YOh9cf0UFbYOh9^f0WFfYOe9[f0ZFjYOb9Wf0\\\\FnYO_9Sf0`FRZO\\\\9oe0bFUZO[9ke0dFXZOY9je0fFXZOX9he0gF[ZOV9ge0hF[ZOW9ee0iF\\\\ZOT9fe0lF\\\\ZOQ9ee0oF]ZOn8ee0QG]ZOk8ee0UG]ZOf8fe0ZG]ZO^8he0bG]ZOS8je0lG\\\\ZOh7je0XH[ZO[7me0eHWZOP7Pf0PITZOf6Sf0YIPZO^6Vf0bIlYOX6Xf0hIkYOo5\\\\f0PJfYOV5Sg0hJPYO`3aLZKnj0U1dXO[3hh0dL\\\\WOS3jh0lLZWOk2lh0TMZWO_2nh0`M_WOk1jh0TNkWOP1\\\\h0POiWOe0^h0ZOfWO<ah0BdWO6`h0JbWO1bh0N`WOMdh02^WOJdh06^WOFeh08_WOCdh0;cWO\\\\Oah0c0fWORO_h0l0jWOhNZh0W1Y7N2N2N2N2M3L4K5L4K5L4L4L\\\\jh=\"}}, {\"image_id\": 3, \"category_id\": 1, \"bbox\": [19.0, 186.0, 111.0, 115.0], \"score\": 0.9940583109855652, \"association_id\": 2, \"light\": [-2.3670382499694824, -1.874373197555542, 2.282944679260254, 1.7394850254058838], \"segmentation\": {\"size\": [682, 1024], \"counts\": \"Un<2Xe000000O101O0000000O101O00000O10000O1000001XM1S@Ok?5S@Kk?9S@Gl?:T@Fj?=U@Ci?`0V@@h?b0X@^Og?e0W@ZOi?h0U@YOk?h0T@XOk?j0T@VOk?l0T@TOl?P1P@POo?R1P@nNP`0T1n_OmNQ`0U1m_OkNS`0Z1h_OfNW`0^1f_ObNZ`0_1e_OaN[`0a1c_O_N]`0d1`_O]N_`0d1`_O\\\\N``0e1__O[Na`0g1]_OYNc`0i1[_OWNe`0j1Y_OWNg`0j1X_OVNh`0k1W_OUNi`0m1U_OSNk`0n1T_ORNl`0o1S_OQNm`0P2R_OPNn`0R2P_OnMPa0S2o^OmMQa0T2n^OlMQa0W2n^OhMRa0[2k^OeMUa0]2i^OcMWa0a2e^O_M[a0X3000O1K51O1O000000000000001O00000000000000O101N1O1DU^OXMla0f2[^OTMfa0f2b0L4N2N3K4L5M2M3Dh\\\\O_N_c0X1>N2N2O1O1O1O2M2N3N2N2Dg[OD[d08>J^Wcb0\"}}, {\"image_id\": 3, \"category_id\": 1, \"bbox\": [77.0, 219.0, 946.0, 389.0], \"score\": 0.9996058344841003, \"association_id\": 1, \"light\": [-1.3938037157058716, -2.5917248725891113, 1.3562194108963013, 2.4783990383148193], \"segmentation\": {\"size\": [682, 1024], \"counts\": \"Qkc11Ye01N2O1N2O2M2N4M4K2N2O1N2N2SJZOSGg0j8^ORGe0i8ATGa0i8DSG>i8FUG<g85kFKS98kFJR99mFHP9:PGFo8<PGEo8<oFEP9=oFDP9<PGDo8>PGCo8>PGBo8?PGBP9?oFBo8`0PG@P9a0oF_OP9b0PG^Oo8d0PG\\\\OP9e0nF\\\\OQ9g0mFYOR9j0lFWOS9j0lFVOS9m0jFTOU9o0iFQOV9S1gFmNX9W1eFiNZ9[1bFfN]9^1`FbN_9c1\\\\F^Nc9f1ZFZNf9i1VFXNi9k1UFVNi9n1SFSNm9n1QFSNn9o1QFQNo9P2oEQNP:P2PFPNo9R2oEoMQ:R2mEoMR:R2nEnMR:S2lEnMT:R2lEnMS:T2kEmMU:S2kEmMT:U2kEkMU:U2jElMV:U2iEkMW:U2iElMU:V2jEjMV:V2iEkMW:V2hEjMX:V2hEjMW:X2hEhMX:Y2gEgMY:Y2fEhMZ:Y2eEgM[:Z2dEfM\\\\:[2cEeM\\\\:]2bEdM^:^2`EbM`:_2_EaMa:`2^E`Mb:b2[E_Me:b2ZE^Me:d2ZE\\\\Mf:e2XE\\\\Mh:e2WE[Mi:f2UE[Mk:f2TEZMl:f2SE[Mm:f2REZMn:g2PEZMo:g2QEYMo:h2oDYMQ;g2oDYMQ;h2mDYMS;g2mDYMS;g2lDZMT;g2kDYMU;g2kDYMU;g2jDZMU;h2jDXMV;h2jDXMV;h2iDYMW;h2hDXMX;h2hDXMX;i2fDXMZ;h2fDXMZ;i2eDWM[;j2dDVM\\\\;k2bDVM^;j2bDVM^;k2aDUM_;l2`DTM`;m2^DTMb;n2\\\\DRMd;o2[DQMe;P3YDQMg;Q3WDoLi;Q3WDoLi;R3UDoLk;R3TDnLl;R3SDoLm;R3RDnLn;R3QDoLo;Q3PDPMP<Q3nCPMR<P3mCQMS<o2lCRMS<P3lCPMT<P3kCQMU<o2jCRMV<o2hCRMX<n2hCRMX<n2gCSMY<m2gCSMY<m2gCSMY<n2fCRMZ<n2eCSM[<m2eCSMZ<n2fCRMZ<n2fCRMZ<o2eCQM[<o2dCRM\\\\<n2dCRM\\\\<n2dCRM[<P3dCPM\\\\<P3dCPM\\\\<P3dCPM\\\\<Q3cCoL\\\\<R3dCnL\\\\<R3dCnL\\\\<P600O10000O1000000O10000O1000O1000O100000000000000O1000000000000000000O10O10000000000000000O1000O100000O1000000O100000000O1000000000O01000000O1000000O10000O10000dMcCjK^<V4bCiK^<X4bCgK_<Y4aCfK`<Z4`CeKa<[4_CdKb<\\\\4^CcKc<^4]C`Kd<`4\\\\C`Kd<`4\\\\C_Ke<a4[C_Ke<a4[C_Ke<b4[C]Ke<c4[C]Ke<c4[C\\\\Kf<d4ZC\\\\Kf<d4ZC\\\\Kf<e4ZCZKf<f4ZCZKf<f4ZCZKe<f6000O10000000000000000000000000000O10O100000000000000000000O100000O1000000000O100000000000000O10000000000000000O1000000O100000000O1000000O1000000O10000O1000000O1000000O100000_NdCTJ[<m5eCSJ[<m5eCSJ[<m5fCRJY<o5gCQJY<o5hCPJX<Q6gCoIX<R6hCnIX<R6iCnIU<S6kCmIU<T6kCkIT<V6lCjIS<W6mCiIS<X6lChIS<Y6nCfIQ<[6oCeIP<]6oCcIP<^6PDbIo;`6QD_In;b6RD^Im;d6RD\\\\Im;e6SD[Ik;g6VDXIg;l6XDTIf;n6ZDRId;P7\\\\DPIb;S7^DlH`;V7`DjH_;W7aDjH];X7cDgH\\\\;Z7dDfH[;[7eDeH[;[7fDdHY;^7gDaHX;`7iD_HW;a7jD^HU;c7lD\\\\HT;d7mD[HR;g7nDYHQ;g7QEWHn:j7SEUHm:k7TETHk:n7UEQHj:P8WEoGi:Q8XEnGg:T8YEkGf:V8[EjGc:X8\\\\EhGc:Y8]EgGc:Z8]EeGa:]8_EcG`:_8`E`G_:a8aE_G^:c8aE^G\\\\:d8eE[GZ:g8eEYGY:i8gEWGX:k8gEUGW:m8jERGU:o8kEQGT:Q9kEnFU:S9lElFS:U9mEkFR:V9nEjFQ:X9oEgFP:Z9PFfFP:Z9PFfFo9[9RFcFn9^9SFaFm9`9SF_Fm9a9SF_Fl9b9UF]Fk9c9VF\\\\Fi9e9XFYFi9g9XFXFh9h9XFXFh9h9YFWFf9j9[FUFe9k9[FUFe9k9\\\\FSFe9m9[FSFd9n9]FQFc9o9]FQFc9o9]FQFc9o9^FPFb9P:^FoEb9R:^FnEb9R:_FmEa9S:_FmEa9S:_FmE`9T:`FlE`9T:`FkEa9U:`FjE`9V:`FjE_9W:aFiE_9W:aFiE_9W:aFiE_9W:aFiE^9X:bFhE^9X:bFhE^9X:bFhE^9X:bFhE^9X:bFhE]9Y:cFgE]9Y:cFgE]9Y:cFgE]9Y:cFgE\\\\9Z:dFfE\\\\9Z:dFfE\\\\9Z:dFfE\\\\9Z:dFfE\\\\9Z:dFfE\\\\9Z:dFfE[9[:eFeE[9[:eFeE[9[:eFeE[9[:eFeE[9[:eFeEZ9\\\\:fFdEZ9\\\\:fFdEZ9\\\\:fFdEZ9\\\\:fFdEZ9\\\\:fFdEY9]:gFcEY9]:gFcEY9]:gFcEY9]:gFcEY9]:gFcEY9]:gFcEX9^:gFdEX9\\\\:gFeEY9[:fFgEY9Y:fFhEZ9X:eFjEZ9V:eFlEZ9T:eFmEZ9S:gFnEX9R:gFoEY9Q:fFQFY9o9gFRFX9n9gFUFW9k9hFZFT9f9lF[FS9e9lF\\\\FT9d9kF]FU9c9jF_FU9`9jFbFV9^9gFeFX9\\\\9eFhFZ9X9eFiF[9V9dFlF\\\\9T9cFmF]9a:O00001O000000[NgFYGY9d8jF]GU9a8lF`GT9]8oFcGQ9\\\\8PGdGP9[8QGeGo8[8PGgGo8X8RGhGn8X8RGhGm8X8TGhGl8X8SGiGm8W8SGiGm8V8TGjGl8V8SGkGm8U8SGkGm8T8TGlGl8T8SGmGm8S8SGmGm8S8QGoGo8P8QGRHn8n7PGTHP9l7nFVHR9j7iF[HW9l900000000000000000000000000000000000UMiFdIV9X90SMjFfIV9Z6jFfIV9W90RMjFhIV9X6jFhIV9V90QMjFjIV9V6jFjIV9U900000001O00000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000[LlFTKT9l4lFTKU9k4kFUKU9j4lFVKT9j4kFWKU9i4kFWKU9h4lFXKT9h4lFXKT9h4lFXKT9h4lFXKT9h4lFXKT9g4lFZKT9f4lFZKT9f4lFZKT9f4lFZKT9f4lFZKT9f4kF[KU9e4kF[KU9d4lF\\\\KT9d4kF]KU9c4kF]KU9c4kF]KU9c4kF]KU9\\\\800001ULjF`KV9`4jF`KV9Z80000000000000ULjFbKV9Y800000000001O00000000000000001O0000000O1000001O000000001O00000000001O00000000000O2O000000000O2O00000O101O0O10000O2O00000O101O000O10001O000O10001O000O10001O0O101O0O2O000O2O1N101N101N101N2N101O0O2O001O1N101O001O001N2O001O001O0O2O1O001N2O001O1N2O1N2O2M3N2M4M3L:G5K2M4M2N2M2O1O0O2O001O000O2O00001N1000001N10001O0O101O001N101O001N101O1N101O001N1SMhBhMY=W2gBiMY=W2gBhMZ=X2fBhM[=V2fBjMZ=V2fBjMZ=V2fBjM[=T2fBkM[=U2dBlM\\\\=T2dBlM\\\\=T2dBlM]=S2cBmM]=R2dBnM\\\\=R2dBmM]=S2bBnM^=R2bBnM^=Q2cBoM]=Q2cBoM^=o1cBPN^=P2aBQN_=o1aBQN_=n1bBRN^=n1aBRNa=l1`BTN`=l1_BUNa=j1`BUNa=j1`BVN`=g1bBZN_=b1dB]N]=`1eBaN[=\\\\1hBdNY=Y1hBgNY=X1hBhNX=V1iBjNY=T1hBlNX=S1hBnNX=Q1iBoNX=o0hBQOY=n0hBROX=m0iBSOX=k0hBVOX=i0iBVOY=i0gBWOY=h0hBXOX=h0gBYOY=f0iBYOX=f0hBYOY=f0hBZOX=f0hBZOX=e0iB[OX=d0iB[OW=d0jB\\\\OV=c0kB\\\\OV=d0jB\\\\OW=b0kB]OU=b0lB^OT=a0mB_OT=`0mB^OT=a0mB_OS=`0nB@R=?PC_OR=?oBAQ=>PCAQ==RCBo<<RCCo<<RCCo<<SCBo<<RCCo<<RCCo<<SCCn<;SCDn<<RCCo<<RCDo<:SCEm<;SCDo<:RCFn<:RCEo<:SCEn<:RCFn<:RCFo<8RCGo<9RCFn<9TCFm<9TCEm<;TCDm<:UCEk<;VCCl<;VCDk<:WCDj<<WCCj<;XCCj<;XCBk<<WCBk<=VC@m<;XCCl<6_C_Ol<6_C_Olf0\"}}, {\"image_id\": 4, \"category_id\": 1, \"bbox\": [71.0, 264.0, 170.0, 87.0], \"score\": 0.9999998807907104, \"association_id\": 1, \"light\": [-2.44331693649292, -1.908146619796753, 2.330742835998535, 1.7796013355255127], \"segmentation\": {\"size\": [480, 640], \"counts\": \"bcQ18V>e0L3M1O1M3N2O100O1O0N3O10O010ON10011O0000ONO4N2N3O000M300010O001N1O20O02N1O3L4M2O1N2N3M3N1ZCTNU<n1hCUNW<k1hCWNV<k1gCXNX<i1eCYNZ<X2O0O1000^OjCZNV<e1mCXNT<f1oCXNR<h1oCVNR<i1oCVNR<j1QDSNo;m1b00000O11O1O2YCTNV<o1dCUN[<X201O0000XOeCkN[<S1hClNX<S1kCkNU<U1PDeNQ<Y1SDfNl;Z1UDbN^OL\\\\<a1]DaNc;_1\\\\DbNd;]1YDgNg;Y1XDhNh;X1XDhNh;W1XDiNi;W1WDiNi;W1VDjNj;V1VDjNj;U1WDkNi;U1WDkNi;U1VDlNj;T1VDlNj;T1VDlNj;T1VDlNj;T1VDkNk;U1UDkNk;U1TDlNl;T1TDlNl;S1UDmNj;T1VDlNj;T1VDlNj;T1UDmNk;S1UDmNk;S1UDmNk;S1TDmNm;S1SDmNm;S1SDnNl;R1SDoNm;R1RDnNm;S1SDlNn;U1PDgNU<Z1jCeNW<^1fCaN[<`1eC\\\\N^<d1cCXN`<i1^CZNINc<[2O2M1O1OO1O2N13M0001N1O2O1O00010O1O1O1O1N2N3L6K2bNoBi0NkNR=:RCo0n<nNUCi0GVOh=g0XBZOQ>:PBHS>2nAOU>NlA2V>KjA7W>EiA=\\\\>1O1000000O101O0000000000O100000000000000O1BbA:YUk5\"}}, {\"image_id\": 4, \"category_id\": 1, \"bbox\": [238.0, 269.0, 276.0, 185.0], \"score\": 0.9999955296516418, \"association_id\": 2, \"light\": [-2.7548646926879883, -1.9430928230285645, 2.6450157165527344, 1.7103960514068604], \"segmentation\": {\"size\": [480, 640], \"counts\": \"Vl_37f>8I5J4J5O2M4lDPOR8S1kGTOi7W1PHQOc7g1UFhM03o0b0i8d2lF_MR9f2hF\\\\MW9g2fF[MY9g2cF[M\\\\9f2eFYM[9h2eFWMZ9j2hFTMW9m2kFPMU9R3lFlLS9U3PGgLP9Z3oFUL[O3P:j3cFVL[OOR:l3bF[L]9e3^F`La9a3_F`L`9`3_FaL`9`3`F`L_9`3bFaL\\\\9`3cFaL]9_3cF`L]9a3cF^L]9c3bF]L_9c3aF\\\\L`9d3`F[L`9e3`F[La9f3^FYLc9g3]FYLc9h3[FXLe9i3[FWLe9j3ZFVLe9k3ZFVLf9k3XFULh9l3XFTLh9m3VFTLi9m3WFSLi9n3VFRLi9o3XFoKi9R4WFmKi9T4WFjKj9W4VFbKL0n9h4TFVKl9l431O2RFTKd9U5O1O1O1O1O5K3M1O1O001O000000000000QNmFQNS9m1nFTNR9k1nFVNR9i1nFXNR9g1mF[NS9e1lF[NU9e1kF[NU9d1kF]NU9c1kF\\\\NV9c1jFlMOQOW9R3kFmM5iNQ9Z3hFoMi9P2WFQNi9n1WFSNi9l1XFTNh9l1WFVNh9j1XFUNi9k1WFTNj9l1VFSNk9l1UFSNm9m1SFQNo9o1QFPNP:P2PFoMQ:Q2nEoMS:Q2mEoMS:Q2mEoMS:Q2mEnMT:R2lEmMV:R2kElMV:T2jEkMW:V2hEjMX:V2hEiMZ:V2fEjMZ:V2fEiM[:X2eEfM]:Y2cEfM^:[2bEcM`:\\\\2`EcMa:^2_EaMa:_2_E`Mb:_31O0O100O1O2N100O1000000O1000000000000000000000000000QFQLm8o3RGTLl8l3SGVLl8j3RGYLm8g3QG[Lo8e3nF_LQ9a3kFcLU9]3iFfLV9Z3hFhLX9X3fFkLY9T3gFmLY9R3hFmLY9Q3iFnLX9P3jFoLV9Q3kFmLW9g2[FgL?=[9m2VFeL>:a9R3QFaL`09c9W3cFdLa9^3^F^Lf9c3YF\\\\Lh9d3XF[L_O4n9b3bFYL_O9l9a3cFUL@<m9c3cF]L]9h3]FYLb9m3YFSLg9o3VFRLj9n3UFSLk9m3TFTLk9m3TFTLl9`40O1000000O01000O1001O00000000O10000O1O2A>N21O0000001O01O0O10001O1O00001O0QOaE\\\\Ma:`2aE`M`:^2\\\\EfMf:X32O1O2O0jNREYNo:d1TE[Nn:`1UE`Nl:Z1ZEbNj:]1VEbNm:]1SEbNn:]1REcNP;[1QEeNP;[1oDeNR;Z1nDfNS;Y1mDfNU;Z1jDfNW;Z1hDfN[;X1dDgN_;X1`DhNb;V1^DjNc;V1]DhNe;W1[DiNf;W1ZDhNf;X1[DfNg;Z1YDcNk;\\\\1VDaNl;_1TD[NS<k1fCmMO1^<V2`CSNZ<b2H0000O10000019I14K0NO0O0100O001O2N4L5K3M2N1O010O0O2O1O1O2N2N2N2N1O1N2O1N3M8E^fj1\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [640.0, 476.0, 50.0, 26.0], \"score\": 0.973321259021759, \"association_id\": 2, \"light\": [-1.558318018913269, -2.1439743041992188, 1.453538417816162, 1.9778165817260742], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"][[=4Ue03M2H8O1O2N100O1001OO100000001N101O1O0000000000O10000O1O110O001O0J\\\\[OEed09[[OHfd0>2M4DU[O2Re0000000O10000O10000O1O1O_Yn6\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [0.0, 153.0, 322.0, 187.0], \"score\": 0.9981808662414551, \"association_id\": 1, \"light\": [-1.7202225923538208, -1.363750696182251, 1.724881649017334, 1.2142659425735474], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_6X2mb0<G4O2M2j]OUMja0l2T^OXM4OUa0i2f^OZM3OVa0g2b^OUM077MVa0i2b^O_M7HWa0i2b^O`M6FYa0Z3g^OfLYa0[3f^OeLZa0[3f^OeLZa0[3f^OeLZa0\\\\3e^OcL\\\\a0]3d^OcL]a0]3b^OcL^a0]3b^OcL^a0]3b^OcL^a0]3b^OcL^a0]3b^ObL`a0]3a^ObL_a0^3701O001O001N10000^OR^OgMoa0X2Q^OhMoa0X2Q^OhMoa0X2Q^OhMoa0X2Q^OhMoa0X2Q^OgMPb0Y2P^OgMPb0Y2P^OgMQb0X2o]OhMQb0X2P^OgMPb0Y2P^OgMPb0Y2P^OgMPb0Y2P^OgMPb0Y2P^OfMQb0Z2o]OfMQb0Z2o]OfMQb0Z2o]OfMQb0Z2o]OfMQb0[2n]OeMRb0[2n]OeMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMSb0Z2m]OfMRb0[2n]OeMRb0[2n]OeMRb0[2n]OeMRb0[2n]OeMRb0[2n]OeMRb0[2n]OeMRb0[2n]OeMRb0[2n]OeMQb0\\\\2o]OdMQb0\\\\2o]OdMQb0\\\\2o]OdMQb0\\\\2o]OdMQb0\\\\2o]OdMPb0]2P^OcMPb0]2P^OcMPb0]2P^OcMoa0^2Q^ObMna0_2R^OaMna0_2R^OaMma0`2T^O_Mka0b2U^O^Mka0b2U^O^Mja0c2V^O]Mja0c2V^O]Mia0d2W^O\\\\Mha0e2X^O[Mfa0g2Z^OYMda0i2\\\\^OXMba0j2]^OVMba0k2^^OUMaa0l2_^OTM`a0m2`^OSM`a0m2a^ORM^a0o2b^ORM\\\\a0o2d^OQM[a0Q3d^OoL\\\\a0Q3d^OoL[a0R3e^OnLZa0S3g^OlLYa0T3g^OmLWa0U3h^OkLWa0V3i^OjLWa0W3i^OhLVa0Y3j^OgLUa0h3O1O1O2N1O1O100O2O0O1O2N1N2N2N3N1O100O101N100O2N1O101N100000001O000000XLl_OZ2S`0dMR@Y2n?fMV@W2j?gMY@X2g?gM[@Y2e?eM]@Z2d?cM_@\\\\2a?bMb@]2_?]Mh@a2Y?UMQAj2P?QMUAo2k>mLXAS3a`0O001O00100O001O001O01O01O0000000010O000001O0000001O00000Di]OdMXb0[2h]OeMXb0[2i]OdMWb0]2i]ObMXb0]2h]OcMXb0]2h]OcMYb0\\\\2h]OcMYb0\\\\2g]OdMZb0[2f]OdM\\\\b0[2e]OdM\\\\b0[2d]OeM\\\\b0\\\\2c]OcM_b0\\\\2a]OdM_b0\\\\2b]OcM^b0]2b]ObM`b0]2`]OcM`b0^2`]O`Mab0`25010O0010O01O0X]O_Mgb0`2Y]OaM3O^b0`2_]ObM00bb0f20O0F^]OlMbb0S2^]OmMbb0S2^]OnM1G\\\\b0[2c]ORN^b0m1b]OTN]b0k1c]OVN]b0j1c]OWN]b0h1c]OXN]b0\\\\20O1O1O2N1O1O1O100O001O1O1O2N1O2UOS]OTOmb0e11UOR]OWOob0h0Q]OXOob0h0Q]OXOPc0f0P]O[OPc0e0P]O[ORc0b0o\\\\O^ORc0`0n\\\\OARc0?n\\\\OASc0=m\\\\OESc0:m\\\\OFTc08l\\\\OITc07l\\\\OIUc06j\\\\OKVc04k\\\\OLVc03i\\\\ONXc00i\\\\O0Wc00i\\\\O0Wc0Oi\\\\O2Xc0Li\\\\O4Wc0Kj\\\\O5Wc0Ji\\\\O6Wc0Ij\\\\O7Vc0Ii\\\\O8Xc0Fi\\\\O:Wc0Fi\\\\O;Vc0Dk\\\\O<Vc0Cj\\\\O=Vc0Bk\\\\O>Vc0@k\\\\O`0Vc0^Oj\\\\Oc0Wc0\\\\Oi\\\\Od0Xc0ZOi\\\\Of0Xc0XOi\\\\Oh0Wc0XOi\\\\Oh0Xc0WOi\\\\Oh0Wc0XOi\\\\Oh0Xc0VOi\\\\Oj0Xc0UOh\\\\Ok0Yc0TOg\\\\Ol0Zc0ROg\\\\On0Zc0QOf\\\\Oo0Zc0POg\\\\OP1Zc0oNf\\\\OQ1jc001O01O0001O001O00001O00001O00000O2O001O001O1N101O0000001N10000O2O2M2N5ES[O0Ue0N2Onlc>\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [322.0, 66.0, 687.0, 351.0], \"score\": 0.9981313943862915, \"association_id\": 3, \"light\": [-2.0658626556396484, -1.5898399353027344, 1.9670677185058594, 1.5236592292785645], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"hTg62Xe0:bIFXG>Y80dG3X81RD^OX1d0c:0RD_OY1d0c:NQDA[1c0b:MQDA]1c0a:M[C]OTO6n2a0b:NYC@RO4R3?b:NXCDnN2W3=b:NVC;W2Hb:NTC=Y2Ec:5kB9`2Ce:Y2ZEgMf:Z2YEgMf:Z2YEfMg:\\\\2WEdMh:a2SE`Mm:d2oD\\\\MQ;h2kDXMU;i2jDWMV;k2hDVMW;k2gDVMY;k2fDUMZ;l2eDTM[;m2dDSM\\\\;m2dDSM\\\\;n2bDSM^;n2aDRM_;o2`DQM`;o2`DRM_;o2`DPMb;P3^DoLb;R3]DnLd;R3\\\\DmLd;U3ZDkLg;V3XDhLi;Z3VDeLk;\\\\3SDdLn;VOlBV3U1cMo;ROXCU3i0gM\\\\=W2bBiMa=U2_BiMe=U2[BiMi=U2WBiMk=V2VBhMl=X2UBeMl=\\\\2UBaMm=^2TB`Mn=`2SB]Mn=d2RBZMP>e2QBYMQ>g2oAWMR>i2PBTMQ>m2W2O0010O0100O10O0101O2M3N2M3N1N100O100O100O10O01O10O01O010O10O010000O0101O1O0O2O001N10001N1000O0100O100O00100O1O1O10O02N2N101N2N1O2dIRMXJo2c5XMYJi2d5]MnEOU2d2l7aMjENY2b2j7dMiEM]2_2h7fMiEL_2^2f7jMgEKb2\\\\2e7kMgEKc2[2d7lMgEKd2Y2c7oMgEIg2X2`7QNVEWOEb0d3V2`7SNSE;]3b1_7UNPE=`3_1_7UNmD?d3\\\\1_73`HN_72bHN^72aHN_73`HN_73`HN_73`HM`73`HN_73`HN_73`HO^72aHO^72aHO^71cHO\\\\72cHN]73bHN]72cHO\\\\72cHN]72cHO\\\\72cHN]72cHO\\\\72dHM\\\\73dHM]73bHN]72cHN]73bHM^73bHM^73bHN^72aHN_72aHN`72_HOa71^HOb71^HOc71]HOd70[H0f70YH0i7NWH3j7LUH4m7KRH5P8JoG6S8IlG7U8HkG9V8FiG:X8FgG:Z8FeG:\\\\8FcG:^8FaG:_8FaG;_8E`G;a8E^G;b8E^G;c8E\\\\G;d8E\\\\G;e8EYG<h8CXG=h8DWG<j8DTG=m8BSG>o8AoF`0R9@lFa0U9_OjFa0X9^OfFc0\\\\9\\\\ObFe0`9[O^Fe0d9[OYFf0i9YOUFh0m9WOQFj0Q:VOlEk0U:UOiEl0Y:SOeEn0\\\\:QOdEo0]:QOaEP1`:PO_EP1b:oN]ER1d:nN[ER1e:nN[ER1f:nNXES1h:mNXES1i:mNVES1j:nNVEQ1k:oNTEQ1m:nNSER1n:nNQER1o:oNQEP1P;oNPEQ1Q;oNnDQ1T;nNlDQ1U;nNkDR1V;nNiDR1Z;lNgDR1_;hNbDV1e;eN\\\\DY1e;fN]DX1d;hN]DV1c;jN_DT1b;lN_DR1b;mN_DR1a;oN_DP1b;PO^Do0b;QO^Do0c;RO\\\\Dm0d;UO[Dj0f;WOYDh0g;YOYDf0g;\\\\OXDc0i;]OWDb0i;@WD>j;BVD=j;DVD;k;DVD;k;EVD9k;FVD9j;GWD8k;GUD8l;GUD8l;GUD8k;HVD7k;IUD6l;ITD7m;HTD7l;IUD6l;JTD5l;KUD4l;KTD5l;KUD4l;KTD5l;KUD4l;LSD4m;LTD3l;MTD3m;LTD3l;MUD2l;MTD3l;MUD2k;OTD1m;NTD1l;OUD0l;OTD1l;OUD0l;OUD0k;0UD0k;0VDOk;1UDNk;2UDNk;2VDMk;2VDMj;3VDMj;3WDLi;4WDLi;4XDKi;5VDKj;5WDJi;6WDJi;6WDJi;6WDJi;6XDIh;8WDHi;8WDHj;7VDIj;8UDIj;7VDIj;7VDIj;8UDHk;8UDHk;8UDHk;9TDHk;8UDHk;9TDGl;:SDFm;:SDFm;:SDFm;;RDEn;;RDEn;<QDEo;:QDFo;:QDFo;;PDEP<;PDEP<;PDEP<;PDEP<<oCDQ<<oCDQ<<oCDQ<<oCDR<<mCDS<<lCET<;lCET<<kCDU<<kCDU<<kCDV<<iCDW<<iCDW<<iCDW<=hCCY<<fCEZ<<eCD[<<eCD[<<eCD\\\\<<cCD]<<cCD]<=aCD`<;`CDa<=^CCb<=^CCc<=[CDe<<[CDf<;ZCEf<;ZCEf<<YCDh<;WCFi<:WCFi<:WCFj<9VCGj<9UCHk<9TCGl<9TCGm<8SCHm<8SCHm<8SCHm<8RCIo<6QCJo<7PCIP=7PCIP=7PCIP=7PCIQ=6nBKR=5nBKR=5nBKR=6mBJS=6mBJS=6mBJT=5lBKT=5lBKT=5lBKT=5mBJS=6mBJT=5lBKT=6kBJU=6kBJU=6kBJV=5jBKV=5kBJU=6kBJV=5jBKV=5jBKV=5jBKW=5iBJW=6iBJX=5hBKX=5iBJX=5hBKX=5iBJX=5hBKX=5hBKX=5iBJX=5hBKX=5jBIV=7kBHV=7kBHU=8lBGT=9nBER=;oBEP=;QCDP=;QCDo<<RCCn<=RCCn<=SCBm<>SCBm<>TCAm<>SCBm<>SCBm<>TCAl<?TCAl<?TCAl<?UC@k<`0UC@l<?TCAl<?UC@k<`0UC@k<`0UC@k<`0UC@k<`0VC_Oj<a0VC_Oj<a0VC_Oj<a0WC^Oi<b0WC^Oi<b0XC]Oh<c0XC]Oh<c0iAaMj0l1]=d0gAaMm0j1\\\\=e0gAbMl0i1^=d0eAdMn0g1]=e0eAeMn0e1]=f0dAgMn0c1^=f0dAgMo0b1]=g0cAiMP1_1]=h0cAjMP1]1]=i0cAjMQ1\\\\1\\\\=j0cAjMR1[1[=k0cAkMR1Y1[=l0cAkMS1X1Z=m0bAlMU1V1Y=n0bAlMU1V1Y=n0bAmMU1T1Y=o0bAmMV1S1X=P1bAmMV1S1X=P1bAmMW1R1W=Q1bAmMW1Q1X=S1`AlMY1P1W=T1`AlMZ1o0V=U1`AmMZ1m0V=W1`AkM[1l0V=Y1_AkM\\\\1k0U=[1^AjM^1j0T=\\\\1^AjM`1g0S=`1\\\\AjMa1e0S=a1\\\\AjMc1b0R=e1ZAiMe1a0Q=f1ZAiMg1>P=j1XAhMj1;o<m1WAhMo11o<X2QAhMU2Eo<c2l@hMda0Z2[^OfMea0Z2[^OfMea0Z2[^OfMea0[2Z^OeMfa0[2Z^OeMfa0[2Z^OfMea0[2Z^OeMfa0[2[^OdMea0\\\\2[^OdMea0\\\\2[^OdMea0\\\\2[^OdMea0\\\\2[^OeMda0[2\\\\^OeMca0\\\\2^^OcMba0^2]^ObMca0^2]^ObMca0^2]^OcMba0]2^^OcMba0]2^^OcMaa0^2_^ObMaa0^2_^ObMaa0^2_^ObMaa0^2_^OcM_a0^2a^ObM_a0^2a^ObM_a0^2b^OaM^a0_2b^OaM]a0`2c^O`M]a0`2c^O`M]a0a2b^O_M]a0b2c^O^M]a0b2d^O]M\\\\a0c2d^O]M[a0d2e^O\\\\M[a0d2e^O\\\\MZa0e2g^O[MXa0e2h^O[MXa0e2h^O[MXa0f2h^OYMXa0g2h^OYMXa0g2h^OYMXa0g2i^OXMWa0h2i^OXMWa0i2h^OWMXa0i2h^OWMWa0j2i^OVMWa0j2i^OVMWa0k2i^OTMWa0l2i^OTMWa0l2i^OUMVa0k2j^OUMVa0l2i^OTMWa0l2j^OSMVa0m2j^OSMVa0n2i^ORMWa0n2j^ORMTa0o2l^OQMTa0P3k^OPMUa0P3l^OoLTa0R3k^OoLTa0Q3l^OoLUa0P3k^OPMUa0P3k^OPMVa0P3i^OPMXa0o2i^OQMWa0n2i^ORMXa0m2h^OSMYa0m2f^OSM[a0l2e^OTM[a0l2e^OTM\\\\a0k2d^OUM]a0j2d^OUM]a0k2b^OUM_a0j2a^OVM`a0i2`^OXM`a0g2a^OXM_a0h2a^OXM`a0h2`^OWMaa0h2_^OXMba0g2^^OYMba0g2_^OXMba0g2^^OYMba0g2^^OYMba0g2^^OYMca0g2]^OXMca0h2]^OXMda0g2\\\\^OYMda0g2\\\\^OYMda0g2\\\\^OYMea0g2Z^OYMfa0g2[^OXMea0h2[^OXMfa0h2Y^OXMga0h2Y^OXMga0h2Z^OWMga0i2X^OWMha0i2Y^OVMha0j2W^OVMia0j2W^OUMja0l2V^OSMka0l2U^OTMka0m2T^OSMla0n2T^OPMna0o2R^OQMna0P36010O010O00010O01O001O001O010O1O00001O0010O01O00001O01O01O00001O00001O001O001N2O1O001O1O0O2O1O001O0O2O00c]OfMja0Y2U^OhMka0X2n]OoMSb0o1n]OQNRb0o1n]OQNSb0n1l]OSNTb0m1l]OSNTb0m1l]OSNUb0l1k]OTNUb0l1j]OUNWb0j1i]OVNXb0i1h]OXNXb0f1h]O[NYb0d1g]O\\\\N[b0b1e]O^N\\\\b0a1d]O_N^b0_1a]ObN`b0]1[]OVN0=gb0[1Y]OjNib0T1W]OlNjb0S1V]OmNkb0R1T]OoNmb0P1S]OPOob0n0Q]OROPc0m0P]OSOPc0l0Q]OTOPc0k0o\\\\OVORc0i0n\\\\OWOSc0h0m\\\\OXOSc0h0m\\\\OXOTc0g0k\\\\O[OUc05j\\\\OB19Uc04o\\\\O_OL=Vc02T]O[OGc0Vc00Z]O1hb0LY]O4hb0IY]O8jb0DW]O<kb0@W]O`0lb0[OV]Oe0gc0100000000000000O1000O10000000000000O1000000000O1000001N100O2N101N1M;Doe9\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [117.0, 378.0, 733.0, 238.0], \"score\": 0.8118217587471008, \"association_id\": 5, \"light\": [-2.445855140686035, -1.231278896331787, 2.3154830932617188, 1.0974282026290894], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\]^21Te0;I;F5L2N1W\\\\OTOQc0m0m\\\\OUOQc0m0n\\\\OUOPc0l0n\\\\OVOQc0k0m\\\\OXOQc0k0j\\\\OYOTc0i0i\\\\OZOUc0i0g\\\\OZOWc0^1O2N1O1O001O0O2O1O001O1O001O1O001O00001O00001O001O001O001O001O1O001O001O001O00010O00001O000010O0001O0010O0100000O10O1000O01000O010O101N2O1N2O0O2O001N3N4L2N0000000000O01000O010O10O00010O010O00010O01O010O001O01O01O0010O01O001O10O0001O0100O010O1O10O010O10O01O100T^ObMi`0_2U_OdMi`0]2U_OfMj`0Z2U_OgMj`0Z2T_OhMk`0Y2S_OiMl`0X2R_OkMl`0U2S_OSNg`0m1W_OTNi`0m1V_OSNj`0o1S_ORNm`0o1P_OSNPa0n1m^OTNSa0m1j^OTNXa0m1d^OTN]a0m20O101O0O1000001O00000000001O000O10000010O0001O000010O01O001O10O01O001O010O000010O00000001O0001O000000000001O00000001O00000001O0000000001O000[N_L`Ab3^>_LbAa3^>_LbAb3\\\\>_LeA`3[>`LeAa3Y>`LgA`3X>aLhA`3W>`LiAa3U>`LkA`3T>aLlA`3S>`LmAa3Q>`LoAa3P>_LPBb3n=`LQBa3n=_LRBa3n=_LSBa3l=_LTBb3k=^LUBb3k=^LUBc3j=]LVBd3i=\\\\LWBd3i=\\\\LWBe3h=[LXBe3i=YLXBg3m=TLSBl3g?O0001O0001O0000000010O00000001O0000010O00001O00001N10001O0000001O0000001OQO\\\\LW@d3i?^LU@b3k?_LT@a3k?aLU@]3l?cLT@]3l?cLT@]3l?dLS@\\\\3m?cLT@]3l?cLT@]3l?cLU@\\\\3k?dLU@\\\\3k?dLU@\\\\3l?bLU@^3k?bLV@]3j?aLX@^3j?_LX@a3i`001O00001N10001O000O10001O0000000O10000000000000000O1000001O00000000000000000O10000000000000000000000001O00000000000^MjLnBW3Q=kLnBU3R=mLlBS3S=oLlBQ3S=RMkBn2U=SMjBm2V=SMjBm2U=UMjBk2V=UMjBk2V=UMjBk2W=TMiBl2W=TMiBl2W=TMiBl2W=TMiBl2W=TMiBm2W=QMjBo2V=QMjBo2V=QMjBo2V=QMjBo2W=PMiBP3W=oLjBQ3V=oLjBQ3V=nLkBR3V=lLkBU3T=jLmBV3S=jLmBV3S=hLoBX3R=fLoBZ3a?001O00000001O0001O00000001O000001O000[NgLXAY3e>kLZAU3f>lLYAT3g>mLYAR3g>nLYAR3g>oLXAQ3h>oLXAQ3h>oLXAR3g>nLYAR3g>nLYAR3g>oLXAQ3h>oLYAP3g>PMYAP3g>PMZAo2f>QMZAo2f>QMZAo2f>QM[An2e>RM[An2e>RM\\\\Am2d>TM[Al2e>TM\\\\Ak2d>UM\\\\Ak2d>UM\\\\Ak2d>UM]Aj2d>UM\\\\Ak2d>UM\\\\Ak2d>UM]Aj2c>WM\\\\Ai2d>WM\\\\Ai2d>WM\\\\Ai2e>VM\\\\Ai2d>WM\\\\Ai2d>WM\\\\Ai2e>VM[Ak2d>UM\\\\Ak2d>UM]Aj2c>VM]Aj2d>UM\\\\Ak2d>UM\\\\Ak2d>UM\\\\Ak2e>TM\\\\Ak2d>UM\\\\Ak2d>UM\\\\Ak2e>TM[Am2d>SM]Al2c>TM]Al2d>TM[Al2e>TM\\\\Ak2e>TM[Al2e>TM[Al2e>TM\\\\Al2c>TM]Al2c>TM]Al2c>TM]Al2d>SM]Al2c>TM]Am2b>SM^Am2b>SM^Am2b>SM^Am2b>SM^Am2b>TM]Am2b>SM^Am2c>RM]An2c>RM]Ao2b>RM]An2c>RM]An2c>RM]Ao2b>QM^Ao2b>RM^Am2c>RM]Ao2b>QM^Ao2b>RM]An2c>RM]An2c>RM]Ao2c>PM]AP3c>QM\\\\Ao2l=lLVA4n0P3k=nLVA3n0o2j=PMXA2m0o2i=PMZA1m0o2i=PMZA2l0n2i=RMZA0m0n2i=RMZA0m0n2f=UM]ANl0n2b=XMbAJl0n2`=ZMeAGk0o2_=\\\\MeAEl0o2^=]MfAEk0n2^=^MgADk0o2]=]MhADl0n2[=`MhABm0n2[=`MhACl0n2Z=`MjABl0n2Z=`MjABl0n2Z=aMiAAm0o2X=aMkA@m0o2X=aMkAAm0n2W=bMkA@n0n2V=cMlA_On0o2U=bMmA_On0P3T=bMmA^Oo0P3T=bMmA^Oo0P3T=cMlA^Oo0P3S=cMoA\\\\Oo0P3R=dMoA\\\\Oo0Q3Q=dMoA[OP1Q3h<cLQBR16ZOQ1R3g<eLoAo09ZOQ1R3g<dLPBP18ZOR1Q3f<dLQBQ18ZOP1Q3P=eMPBZOP1R3n<eMRBYOQ1Q3l<gMSBXOQ1R3j<gMUBWOR1Q3i<hMUBWOR1Q3i<iMTBVOS1R3g<gMiAQO86X1Q3g<hMiARO75Y1Q3g<hMiARO75Z1Q3e<gMkASO65Z1Q3e<gMkATO54[1Q3e<gMkATO54\\\\1P3d<hMkATO63[1Q3d<hMkATO64Z1Q3d<gMlAUO53\\\\1P3c<hMlAUO53\\\\1P3c<hMlAUO63Z1P3d<hMlAUO63[1o2c<iMlAVO52\\\\1o2c<hMmAWO43[1n2d<hMmAWO43[1o2c<gMnAWO53Z1n2c<hMnAWO53Z1n2c<iMmAVO64Y1m2d<iMlAXO62[1l2b<kMmAWO63Z1k2c<YNTBkNY1l2c<YNTBlNY1j2c<ZNTBlNZ1i2b<\\\\NSBkN[1i2a<]NTBkN[1g2a<^NUBjN[1g2`<_NUBjN[1g2`<`NTBiN]1f2^<bNUBiN]1d2^<cNUBiN]1d2]<dNVBhN^1c2\\\\<fNVBfN^1d2[<gNWBeN_1c2Z<iNWBcN`1c2Y<jNWBbNa1d2X<jNXBaNa1d2W<lNWB`Nb1d2W<lNXB_Nb1c2V<oNYB]Na1d2V<POXB[Nd1d2T<QOYBZNc1e2T<QOZBYNb1f2T<ROZBWNc1f2S<SO[BUNd1g2Q<UOZBTNe1g2Q<UO[BRNf1h2o;WOZBQNg1h2o;WO[BoMh1i2m;YO_Df0a;[O_Dd0a;]O_Db0a;_O_D?b;A_D>a;C_D<a;E^D;b;E_D:`;H`D7`;J`D5`;KaD4_;LbD2_;O`D1`;OaD0_;1aDN_;2aDN_;2bDM^;4aDK`;5`DK`;5aDI`;7aDG`;:_DEb;;_DDa;<_DCb;>^D@c;`0^D]Od;c0]D[Od;e0]DYOd;h0\\\\DUOf;k0[DSOf;m0\\\\DoNf;Q1[DlNg;T1ZDjNg;W1YDfNi;Z1YDcNh;]1YDaNh;_1ZD]Nh;c1YD[Nh;f1YDWNh;i1YDUNg;l1ZDPNi;P2XDmMk;R2VDkMl;U2UDgMn;Y2TDcMn;]2SD_MP<a2]31O0000000000001O00000000001O0000001O0000000001O0000000000b]O\\\\MUb0d2k]O\\\\MVb0d2i]O]MVb0c2i]O^MWb0b2i]O_MWb0`2h]OaMXb0_2h]ObMXb0]2g]OdMZb0[2f]OfMYb0Z2f]OgM[b0X2d]OiM]b0c20O1O1O1O2O2M2N2N2N1ZOU]OjNmb0R1U]OoNmb0l0U]OTORc0b0Q]O^OXc07i\\\\OJ\\\\d000000000000O101O00000O10000O100O10000O100O1000000000000000000000000000000001O000O100000000000001N101N2O0O2N2M3N2Fecc3\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [138.0, 173.0, 771.0, 449.0], \"score\": 0.9071166515350342, \"association_id\": 4, \"light\": [-2.458665132522583, -1.4000746011734009, 2.4414639472961426, 1.244713544845581], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"h^l23We02N2N2N2ROGg\\\\O;Xc0Eh\\\\O<Wc0Eg\\\\O=Xc0Ch\\\\O=Wc0Eg\\\\O=Xc0Ch\\\\O>Wc0Cg\\\\O?Xc0Ah\\\\O`0Wc0Ag\\\\Oa0Wc0Ah\\\\O`0Wc0@h\\\\Ob0Wc0_Oh\\\\Ob0Wc0_Og\\\\Oc0Xc0^Og\\\\Ob0Xc0@f\\\\Ob0Yc0_Of\\\\Oa0Zc0@d\\\\Ob0[c0_Oc\\\\Ob0]c0_Ob\\\\Ob0]c0@`\\\\Oa0_c0g0O00001O000O2O00001O000100O010O100O2O8H4L1O00O1O2O1N2N2O1N2N101N2N00100O001O0010O01O01O010O0010O010O010O0100O010O10O0100000O01000O10O10O1O010O010O0010O010O01O10O01O100O100O100O101N1O2O0O100O100O100O10O01O1O1O1O2N2N2N2N1O2N2N1O2N001O1O1O001O1O001O001O001O00001O001O0O101O0000001O00001O0000001O00000O2O0000001O0000001O000000001O01O00010O00ZN]LdAc3\\\\>]LeAc3Z>^LeAb3Z>_LfAa3Z>`LfA_3Y>bLgA^3Y>bLgA_3W>cLiA\\\\3a:bL[G2TN\\\\3V:oLcGGVNZ3Q:WMgG_OYNY3j9`MjGYO[NX3e9fMmGSO^NW3b9kMnGoN`NU3`9PNmGlNcNT3^9SNnGjNcNS3]9WNmGgNgNQ3Z9[NmGeNiNQ3W9]NnGdNjNo2W9_NnGbNlNn2T9cNnGaNmNl2T9dNnGaNnNk2S9fNnG_NPOj2T9eNmG_NPOl2T9eNlG]NROn2R9eNmGZNSOP3Q9eNmGYNTOQ3P9fNmGVNTOT3P9fNlGTNVOU3n8hNmGoMXOX3l8iNmGlMYO[3j8iNPJW1Q6iNoIV1R6jNmIV1S6kNmIU1S6lNkIT1U6mNjIS1W6nNhIQ1Y6oNfIQ1Z6POeIQ1[6oNeIP1[6QOdIo0]6QObIP1]6QOcIn0^6ROaIn0_6SO`Im0a6SO^In0a6SO_Il0a6UO^Ik0c6TO]Im0b6TO]Il0c6UO]Ik0c6TO]Il0c6UO\\\\Ik0e6UOZIk0g6TOYIm0g6SOXIm0i6ROWIn0i6SOWIm0i6ROWIn0j6ROUIn0l6QOTIo0n6POQIQ1o6nNQIR1P7nNoHR1S7lNmHU1S7kNmHT1ULdL`:W2[IU1ULdLa:V2ZIV1ULeLa:U2YIV1VLeLb:T2XIX1ULeLc:S2WIX1VLfLc:Q2WIY1VLgLc:o1XIY1ULiLc:n1WIY1VLjLb:m1XIZ1ULjLc:k1XI[1ULlLa:j1YIZ1VLnL`:g1ZI[1VLoL_:f1[I[1VLoL`:e1ZI]1ULnLa:f1ZI[1TLPMc:d1YI\\\\1TLQMb:c1ZI\\\\1TLQMb:d1YI[1ULQMb:d1YI[1ULQMb:e1XIZ1VLRMb:d1WIZ1WLRMb:d1WIZ1WLRMb:e1WIX1WLSMb:f1VIW1XLSMc:f1TIW1YLSMc:h1RIU1[LTMb:i1QIS1]LTMb:k1oHQ1_LTMc:l1lHP1`LUMd:m1kHm0aLVMd:o1iHk0cLVMe:P2fHj0eLVMe:R2dHh0gLWMd:S2cHf0iLWMd:T2bHe0jLWMe:T2aHd0jLXMe:V2_Hb0lLXMf:V2]Hb0mLXMf:W2\\\\Ha0nLXMf:X2[H`0oLXMg:X2ZH?oLZMf:X2ZH>PMZMf:Y2YH=QMZMf:Y2YH=QMZMg:Y2XH<QM[Mg:Z2WH;RM[Mg:[2VH:SM[Mh:[2TH:TM[Mh:\\\\2SH9UM[Mh:]2RH8WMYMi:_2oG8XMYMi:`2nG7YMYMi:a2mG6ZMYMi:b2lG5[MYMj:b2jG5]MXMi:e2hG3_MXMi:f2gG2`MXMi:g2fG0bMYMi:h2cGOdMYMi:j2aGMgMXMh:l2`GLhMXMi:m2]GKjMXMi:n2\\\\GJlMWMh:P3[GInMVMg:R3ZGHoMVMg:R3ZGHPNUMg:S3XGHQNUMg:T3WGGRNUMg:U3VGFTNTMf:V3VGFTNTMg:V3TGFUNTMg:W3SGEVNTMg:W3SGEUNUMh:W3RGDUNVMi:V3RGDUNVMj:V3PGDUNWMk:U3PGDTNXMk:V3PGBUNYMj:U3QGBTNZMk:U3PGBTNYMl:U3PGBTNYMl:V3PG@SN[Ml:V3QG_OSN[Ml:W3PG^OTN[Ml:W3PG^OTN[Ml:X3oF]OTN]Ml:V3PG]OTN]Mk:X3PG[OUN]Mk:Y3oFZOVN]Mk:Y3oFZOVN]Mk:Z3nFYOVN^Ml:Y3nFYOVN^Mk:[3nFWOWN^Mk:[3oFVOVN_Mk:\\\\3nFUOWN`Mj:[3oFUOWN`Mj:\\\\3nFTOXN`Mi:]3oFSOWNaMj:]3nFROXNaMj:]3nFROXNaMj:]3nFROXNaMj:^3mFQOYNaMi:_3nFQOXN`M`7XOdKW4dNQOXN`M`7XOdKX4cNPOZN_M^7[OdKV4dNPOZN^M_7\\\\OcKV4eNoNYN_M_7\\\\OcKV4eNoNYN_M_7]ObKU4fNoNYN_M_7]ObKU4fNoNYN_M_7]ObKU4fNoNYN_M_7]ObKU4gNnNXN`M_7^ObKS4gNoNXN`M^7_OcKR4gNoNXN`M^7@bKQ4hNoNXN`M^7@bKQ4iNnNWNaM^7@bKQ4iNnNWNaM^7AaKP4jNoNVN`M_7AbKo3iNPOVN`M_7BaKn3jNPOVN`M_7BaKn3jNPOVN`M_7C`Km3lNoNUNaM_7D_Kl3mNoNUNaM^7F_Kj3nNoNUNaM^7F`Ki3mNPOUNaM^7G_Ki3mNoNVNaM^7H^Kh3oNnNUNbM^7J\\\\Kf3QOnNUNbM^7K\\\\Kd3QOoNUNbM^7L[Kc3ROoNUNbM^7NYKa3TOoNUNbM^70WK_3VOoNUNbM^72VK\\\\3XOoNTNcM^74TKZ3ZOPOSNbM_76RKX3\\\\OPOSNbM_78PKV3^OPOSNbM_7:kHVO_1m34QOSNbM_7=`H\\\\Oj1c35QORNcM_7Q1YJ[26QORNcM`7Q1XJY27SOQNcM`7Q1YJX26TOQNcMa7Q1WJX28SOPNdMa7R1WJV28TOPNdMb7R1VJU28UOPNdMb7S1WJR28UOPNfMi7l0PJW27WOPNfMo7g0lIY25ZOPNgMQ8d0lIX24\\\\OoMhMR8d0mIU22_OoMhMR8e0PJP2OCoMhMS8d0PJP2OCnMiMS8e0PJn1ODnMiMS8f0PJl1OEnMiMS8g0PJj10EmMjMT8g0oIi10FmMjMT8h0nIh11FnMiMS8j0nIf12FmMjMS8k0nId12GmMjMS8l0mIc13GnMhMS8n0mIa14HlMiMS8o0mI_14IlMiMT8n0lI`14ImMhMS8o0mI_14IlMiMS8P1lI^15IlMiMS8P1mI]14JmMhMR8Q1mI]15ImMhMQ8S1mI[15JmMhMQ8S1mI[16ImMhMP8U1mIY16JmMhMP8U1mIY16KmMfMP8V1nIX16KlMgMP8W1mIW17KlMgMP8W1nIV16LmMfMP8X1lIV18LkMfMQ8X1mIU17MlMeMP8Z1mIS17NlMeMP8Z1mIS18MkMfMP8[1mIQ18NkMeMQ8\\\\1lIQ18OkMcMQ8^1lIo09OjMdMQ8_1kIn0:OjMdMQ8_1lIm090jMdMQ8`1kIl0:1iMcMR8`1lIk0:1hMdMR8a1kIj0;1iMcMQ8c1jIi0<2hMbMR8c1kIh0;3hMbMS8c1iIh0=2gMcMS8c1iIh0=2gMcMS8d1iIf0=4gMaMS8e1iIf0=4gMaMS8f1hIe0>4gMaMS8f1hIf0>3fMaMT8g1hId0>4fM`MU8h1gId0>4gM_MT8i1gId0>5fM^MU8i1gId0>5fM^MU8j1fIc0?5fM^MU8j1fIc0?5gM]MU8j1eId0?6fM\\\\MV8j1eIe0>5gM[MW8k1dIe0>5gM[MW8k1dIe0>6fMZMX8l1cId0?6gMYMW8m1cIe0>5hMXMX8n1bIe0>6gMWMZ8m1aIf0=7iMTMZ8o1`If0=7iMTMZ8o1`Ig0<7iMRM\\\\8P2_Ig0<7_6SOTIf0=7_6SOTIg0<7_6ROUIg0<7`6QOTIh0<7`6QOTIi0;7`6POUIi0;7`6POUIi0;7`6QOTIh0<8_6POUIi0;7`6POUIi0;7`6POUIi0;8`6nNUIk0:7a6nNUIk0:7a6nNUIl097a6mNUIm097b6mNTIl0:7b6mNTIm097b6lNUIm0:6a6mNUIn096a6lNVIn096b6lNTIn0:6b6lNTIo096b6kNUIo096b6kNUIo097a6jNVIP195a6lNUIo0:6`6kNVIo0:6`6kNVIP196`6jNWIP196`6kNVIo0:7_6jNWIP1:6^6jNXIP1:6^6jNXIP1:7]6jNXIo0;8\\\\6iNYIP1:7^6hNXIQ1:8]6hNXIP1<7\\\\6iNXIP1<7\\\\6iNXIQ1;7\\\\6iNXIP1<7\\\\6iNXIP1<7\\\\6iNXIP1<8[6hNYIP1=7Z6jNXIP1=6[6jNXIP1=7Z6jNXIo0>7Z6jNXIo0>7[6iNWIP1?6Z6kNVIP1?5\\\\6jNUIQ1`05Z6kNUIP1a05[6jNTIQ1b04Z6lNSIQ1b03\\\\6kNRIR1c02[6lNRIR1d01[6mNPIR1e01\\\\6lNoHT1e0O\\\\6mNoHT1f0N\\\\6nNmHT1h0NZ6nNnHU1h0L[6nNmHV1i0KZ6POlHV1i0J\\\\6oNjHX1k0H[6POjHX1l0G[6POiHZ1k0F\\\\6QOhHY1m0E[6ROhHZ1m0C\\\\6ROgH\\\\1l0C\\\\6QOhH\\\\1m0B\\\\6ROfH]1m0A]6ROfH]1n0@\\\\6SOfH^1m0_O]6SOfH^1n0^O]6SOeH`1m0]O^6TOdH_1n0]O^6TOdH_1o0\\\\O]6UOdH`1n0[O^6UOdH`1o0ZO^6UOcHa1o0ZO^6UOcHa1P1YO]6VOcHb1o0WO_6WObHb1P1VO^6YObH`1Q1VO]6ZObH`1R1UO\\\\6[ObHa1S1QO\\\\6^OaHa1U1oNZ6@aHa1W1mNX6BaHb1X1jNX6C`Hc1Y1hNX6E`Hb1Z1gNV6G`Hb1\\\\1eNT6I`Hc1]1aNT6M^Hb1`1_NR6O^Hb1a1^NQ60^Hb1b1\\\\NQ62^Hb1a1[NR62]Hc1b1YNR64\\\\Hc1b1YNR64\\\\Hc1c1WNR66[Hd1c1UNR67\\\\Hc1c1TNR6:ZHb1d1SNS6;YHb1e1PNU6=VHd1e1mMV6?VHc1d1mMW6`0UHc1e1kMXMaN`8Q2cHc1e1jMXMdN_8o1eHc1d1gMZMhN]8m1eHd1e1eMYMkN]8l1fHd1c1cM\\\\MmN\\\\8k1fHd1c1bM\\\\MoN[8k1fHe1c1^M^MROY8k1gHd1c1\\\\M_MUOX8j1gHe1b1YMaMWOW8k1gHd1c1WMaMYOV8k1gHe1c1SMbM]OT8k1gHe1e1oLbMAS8j1gHf1f1iLcMGQ8i1gHg1g1cLdMLo7j1fHg1R3_LY4i1fHh1P3_LZ4i1fHi1o2^L\\\\4h1fHi1n2_L\\\\4h1fHj1m2^L^4g1fHj1l2_L^4g1fHk1l2\\\\L`4h1eHk1k2]L`4h1eHk1k2]La4g1dHm1j2\\\\Lb4g1eHl1S<TNmCm1S<RNmCn1S<RNnCm1R<SNnCn1R<QNnCo1R<QNnCo1R<QNoCn1Q<SNnCm1R<SNnCm1R<SNoCl1Q<TNoCl1Q<TNPDk1P<UNPDk1P<UNQDk1n;UNSDj1m;VNSDj1m;VNTDi1l;WNUDh1k;XNVDg1j;YNWDf1i;ZNYDd1g;\\\\N[Db1e;^N\\\\Da1d;_N^D_1b;aN_D^1a;bNaD\\\\1_;dNbD[1^;eNcDZ1];fNdDY1\\\\;gNfDW1n1SMg4f1\\\\IV1m1TMg4g1\\\\IT1m1UMg4g1]IS1l1VMg4g1^IR1W;nNiDR1W;nNjDQ1V;oNkDP1U;POlDo0T;QOlDn0U;ROlDm0T;SOmDl0S;TOnDj0S;WOmDh0S;XOnDf0S;ZOoDd0Q;\\\\OPEb0Q;^OPEa0P;_ORE>o:BUE:k:GYE3h:M]EMd:3bEF_::gE_OZ:a0kEWOX:j0oEhNW:X1SFWNT:i1g4000mAVNe8k1XGXNg8h1XGZNg8f1YGZNg8f1YGZNg8f1YGZNg8f1YGZNg8g1WGZNi8f1WGZNi8f1WG[Nh8e1XG[Nh8e1XG[Nh8e1XG[Nh8f1WGZNi8f1WGZNi8f1WGZNi8f1WGZNi8f1WG[Nh8e1XG[Nh8e1XG[Nh8f1VG[Nj8e1VG[Nj8e1VG[Nj8e1VG\\\\Ni8d1WG\\\\Ni8d1WG\\\\Ni8d1WG\\\\Ni8d1WG\\\\Ni8e1VG\\\\Ni8d1WG\\\\Ni8d1WG\\\\Ni8d1XGZNi8f1XGXNj8g1XGUNj8k1WGSNj8m1Z500001O000000000000010O000000000010O00000001O0001O01O0000010O00001O01O01O00001O010O001O001O01O01O00010O01O010O0010O01O0010O01O2N1N3M3N2M3YO_\\\\O_Occ0?c\\\\O[O`c0a0h\\\\OXO[c0e0h0L4K5L5JQ^\\\\2\"}}, {\"image_id\": 6, \"category_id\": 1, \"bbox\": [193.0, 20.0, 454.0, 881.0], \"score\": 0.9999930262565613, \"association_id\": 1, \"light\": [-1.322417974472046, -2.231844902038574, 1.157697319984436, 2.100700855255127], \"segmentation\": {\"size\": [1024, 768], \"counts\": \"lQQ64jo06J5K7I3M4M2M3N1O100O2N100O2O0O101N2N3M3M2M4M2M3N101WYOaNo>_1n@hNn>X1n@POm>Q1n@YOm>g0o@Am>?PAFn>;o@In>8PALn>4QAOm>2PA1o>OPA3o>No@4P?Mn@5Q?Lm@6R?Lk@6T?Kk@6S?Lk@5U?Lj@5U?Li@5W?Kh@7W?Jf@8Z?He@:Z?Fc@=]?Da@>^?Ba@?_?A`@a0_?@_@b0`?^O_@d0`?]O^@e0a?\\\\O\\\\@g0c?ZOZ@h0f?ZOT@k0k?WOn_Oo0Q`0SOi_OQ1W`0POe_OS1[`0nNb_OU1]`0lNa_OU1_`0lN^_OV1b`0lNY_OW1g`0kNS_OY1m`0iNZ\\\\On3fc0V400O10000001O01O0000000000000O1000000000000000000O1000000O100000000O10000000000O10000O10000O1000000000000O1000000000000000000S\\\\OREhb0n:R]OmEMUO`a0n:^^ORG^a0n8_^OXG^a0h8`^O\\\\G^a0d8b^O^G]a0a8c^OaG[a0_8e^OcGYa0]8f^OgGWa0Y8i^OiGVa0Df_Oh4TOgKSa0^OR@e4kNoKQa0[OY@b4fNULo`0XO_@_4bN]Lm`0ROc@`4`N`Lk`0oNh@_4]NdLj`0lNk@^4[NhLi`0hNQAY8m>fGeAl7Y>RHPCi6m<VI`Ca6^<]IfCd6V<[InCg6m;WIWDj6f;VI\\\\Dj6c;TI`DPNgKZ8g?eIXEY6g:gI]EV6b:iIcET6\\\\:lIgER6X:mIkES6S:mIoER6P:mIRFS1TJk1g?RMVFV6h9jIYFX6e9gI\\\\F[6a9eIaF[6]9eIdF\\\\6Z9dIhF\\\\6V9cIkF^6T9bImF^6R9bIoF_6o8aIQGb6m8\\\\IUGf6h8YIYGj6d8UI]Go6_8PIbGn0dIZ3h>eKgGP1aI^3e>aKkGP1aIa3b>^KnGQ1_Ie3a>XKQHR1_Ih3_>TKTHS1]Ij3_>RKTHU1]Ij3^>PKVHV1[Ik3_>mJWHX1ZIm3_>iJXHP8eMXGP5f0]MR8YMnGm40jM[1WIR4h3SKP5_ORNV8fLgHS5ROXNX8^LmHX5jN]N[1RI[4R3fK^5bN`N[1QI_4m2gKb5]NbN\\\\1oHa4k2hKc5YNgN[1mHd4f2kKf5RNmN[1jHg4a2mKi5mMQO]1fHi4]2oKm5gMWO\\\\1fHi4S2VLQ6bM\\\\OY1hHi4i1_LT6\\\\M_O1oGd0l0[5_1fL\\\\6TM^OMWHh0e0^5W1kLf6iL5f0SHb5o0RMS`0WMQ_Of5h0WMY`0oLS_Og5c0[M]`0iLT_Ok5<^Mc`0aLV_Oo56aMbb0^2Z]OeMhb0Y2V]OiMkb0V2R]OlMPc0T2k\\\\OPNVc0Q2d\\\\OSN]c0Q2X\\\\OUNhc0P2j[OZNVd0j1^[O]Nbd0b6O1O0001O1O1O1N10001N10O010000000000100010O000O10O000001O1O101N2O1O0O10O01O00001N2O2N2N2O1N2N1O100O1O001O1O2N1N3N2N2M2O1N1O2O0O2N2N2N3M3M2N1O1O2O0O001O1O2N1O2N1O3M2N1O2N1O010O001O1O010O001O0010O100O100O2O1O1O1O00000000O00100O0010O01O010O001O1O2N1O2N2N2N2N2M3N1O1O1O100XKiXOe0Xg0XOkXOg0Vg0VOnXOh0Tg0TOoXOk0Sg0ROoXOn0Rg0nNQYOQ1Qg0lNQYOS1Qg0iNRYOW1nf0gNTYOX1mf0fNUYOY1lf0fNUYOY1lf0fNUYOX1lf0iNSYOW1nf0hNSYOW1nf0hNSYOW1mf0iNTYOU1nf0jNTYOT1lf0lNUYOS1kf0mNVYOR1kf0mNVYOQ1kf0POVYOn0kf0POWYOo0if0QOXYOn0if0QOYYOm0gf0SO[YOk0ff0TO\\\\YOj0ef0UO]YOi0df0VO^YOh0cf0WO`YOf0af0XObYOf0`f0XObYOf0_f0YObYOf0`f0XOaYOf0af0YO`YOf0`f0ZOaYOe0`f0ZObYOc0_f0]ObYOb0_f0\\\\OcYOb0^f0^OdYO`0]f0_OeYO>\\\\f0BeYO<]f0CeYO:\\\\f0FhYO5Yf0KkYO0Wf0OlYOMUf03nYOITf07oYODRf0<RZO\\\\ORf0d0ZZOiNne0W1]4010O01O010O011N10001O1O1O0001O1O001N1O1O1O2O0O1O1O2N1O101N1O2O8H9Fi0XO4L7If0[O0O0100O0O1O001O001N101O001O001O1N2M4L4@a0VOoYh3\"}}, {\"image_id\": 7, \"category_id\": 1, \"bbox\": [353.0, 222.0, 191.0, 267.0], \"score\": 0.9999999403953552, \"association_id\": 2, \"light\": [-2.857271194458008, -1.4808465242385864, 2.7410426139831543, 1.3242011070251465], \"segmentation\": {\"size\": [576, 1024], \"counts\": \"_lV6l0d?g1E6J6H:I6K4L4I>ZBnLh;k4dD\\\\JP:j5jE[Jm9P6kEUJn9S7K4N2M3M3N200O1000000000O0nNdFbIE\\\\Oc9l6TG`I[ODa9b6nGaIQ8R6[HoIe7l5`HTJ`7n4cF_KP2D]7i4aIWK_6f4dIZK\\\\6d4fI\\\\KZ6c4fI^KZ6b4hHXK`N6h8b4cHhKZNFS9b4`HlK\\\\NBT9c4XHSLcNZOU9e4XGPMB[NV9g4QGTMHTNX9j4kFUMLRNY9m4]F\\\\M9gMZ9`7fF`HZ9a7eF_H[9a7eF^H]9`7dF`H\\\\9_7\\\\F]H33b9`7ZF^H33d9]7ZFaH12f9^6]FRJL^O12h9<XFR5a0jJ_OF01m94_FR5;nJZOKO1i:R5TFlJVO1L2m:j4`FTKcN2P;d4`FZK`N2R;_4aF_K]N1T;]4aFbK[N1T;Z4dFeKXN1U;U4gFjKTN1V;Q4iFnKQN1W;m3kFRLnM1[;d3lF[LiM1`;j2\\\\GUMTM1c;e2\\\\GYMQM1g;`2[G_MnL1i;_1XH`NoK1l;Q1`HnNdK1o;l0_HROcK2o;k0^HSOcK3P<g0^HVObK3R<e0[HYOcK2T<b0ZH\\\\ObK2V<`0XH^ObK2Y<=UHAbK2[<;SHCbK2]<9QHEbK2`<7mGGcK3a<4lGIcK3b<3kGJcK3c<3iGJdK3d<2hGKdK3f<2dGKeK5j<0\\\\GLjK4k<0ZGLkK4k<0ZGLjK5l<OZGMhK5n<OYG7g8IYG7h8IWG7i8JUG8j8JTG6m8JRG6n8JRG6n8KQG5P9JPG6P9IQG7P9FRG:o8BTG>W=000000000000O10001O0000000000000000000000000000000000000000000000O2O000001Ac^O9]a0Gc^O9]a0Gd^O8\\\\a0Ge^O9[a0Ge^O9[a0Ge^O9aa001O0000000001O0000000000000000000000O1000000000000000000O10000000000O1000000O100000000O2O0O10001NRa]8\"}}, {\"image_id\": 7, \"category_id\": 1, \"bbox\": [630.0, 299.0, 103.0, 117.0], \"score\": 0.9999975562095642, \"association_id\": 1, \"light\": [-3.052081346511841, -1.6455615758895874, 2.9090237617492676, 1.404773473739624], \"segmentation\": {\"size\": [576, 1024], \"counts\": \"PgR;1ma06K2O0O10000O01O1O1O10O010N2N100QOIU@7i?1P@1o?4g_O1Y`08^_OJa`0k0O01000Q@kNS?V1g@VOR?j0l@]Oo>o1K4L3M4L3M5K2N2N10jLhAc2X>ZMlAe2i>UOTA]Nn>^1\\\\AZNf>f1cASN[>m1eASN\\\\>l1cAUN]>k1cAUN^>j1bAVN_>j1`AVNa>k1]AUNd>o1WAQNi>T1g@D?XOl>l0UADM@P?f0YAIFAS??^AN^ODW?9^A3ZOD]?OaA8TOIk`01W_O0\\\\a000000000000000000000000000000000000000000000000000000000000000O100000000000001O0000000000001O0OTYS5\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [193.0, 86.0, 296.0, 190.0], \"score\": 0.9999995827674866, \"association_id\": 3, \"light\": [-3.0191335678100586, -1.600159764289856, 2.7622909545898438, 1.3627523183822632], \"segmentation\": {\"size\": [768, 813], \"counts\": \"kk`4i1me0<K5L3N2N2N1O2N2M3N2N1O2O1N2O0O2O000O101O0O10001N1000001N101O1O001O001O00001O0O2O001O1O1O1V\\\\O]LUc0d3b\\\\O\\\\LL2ac0c3b\\\\OeL]c0[3b\\\\OgL]c0Y3c\\\\OhL\\\\c0X3c\\\\OnLWc0S3i\\\\OoLUc0Q3j\\\\OSMSc0m2m\\\\OTMRc0l2m\\\\OVMRc0j2n\\\\OVMRc0j2m\\\\OXMRc0h2n\\\\OXMQc0i2o\\\\OXMPc0g2Q]O[Mmb0e2R]O]Mmb0c2S]O_Mjb0b2V]O_Mib0a2V]O`Mjb0`2V]OaMib0_2W]ObMgb0_2X]ObMhb0^2X]OcMgb0l3O1O1O^N\\\\]OiMcb0W2]]OjMbb0j30001OZN_]OmMab0S2_]OmM`b0T2`]OlM`b0T2`]OlM`b0T2`]OlM`b0T2`]OmM_b0S2a]OmM_b0S2a]OmM_b0S2a]OmM_b0k3O001O00001O0000000000000000000000000001O0VNc]OQN]b0i300000001O000000001O0000WNe]OnM[b0Q2e]OoM[b0k30001OVNf]OnMZb0R2f]OnMZb0R2f]OnM[b0Q2e]OoM[b0Q2e]OoM[b0Q2e]OoM[b0Q2f]OoMYb0R2f]OnM[b0Q2e]OoM[b0Q2f]OnMZb0R2f]OnMZb0R2g]OmMYb0S2g]OmMYb0S2h]OmMWb0S2i]OmMWb0S2i]OmMWb0T2h]OlMXb0T2h]OmMWb0S2i]OmMWb0T2h]OlMXb0T2i]OlMVb0T2j]OlMVb0U2i]OkMWb0U2i]OkMWb0U2i]OkMWb0V2h]OjMXb0V2i]OiMWb0W2i]OjMVb0V2k]OiMUb0X2j]OhMVb0X2k]OgMUb0Y2k]OgMUb0Z2k]OeMUb0[2k]OeMVb0Z2j]OfMUb0\\\\2j]OdMVb0\\\\2k]OcMUb0]2k]OcMUb0^2k]OaMUb0_2k]OaMTb0`2m]O_MSb0b2l]O^MTb0b2l]O^MTb0b2l]O^MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MSb0c2m]O]MRb0d2n]O\\\\MRb0d2n]O\\\\MRb0d2n]O\\\\MRb0d2n]O\\\\MRb0d2n]O\\\\MRb0c2o]O]MQb0c2o]O]MQb0b2o]O_MQb0`2P^O`MPb0`2P^O`MPb0_2P^ObMPb0^2P^ObMPb0^2P^ObMPb0]2P^OdMPb0\\\\2P^OdMPb0[2Q^OeMoa0[2P^OfMPb0Y2P^OhMPb0W2P^OjMPb0V2o]OkMQb0T2o]OmMQb0S2o]OmMRb0R2m]OoMSb0P2n]OPNRb0P2m]OQNSb0o1l]ORNTb0n1l]ORNTb0m1l]OTNTb0l1k]OUNUb0j1k]OWNUb0i1k]OWNUb0h1k]OYNUb0g1k]OYNUb0f1k]O[NUb0e1k]O[NUb0e1k]O[NUb0e1k]O[NUb0h31kMj]O^NVb0b1j]O^NVb0f3000001O000000001O00000000O10O10O101N100O1O2nMf]O[N[b0e1e]OXN^b0h1b]OSNcb0m1]]ORNdb0n1\\\\]OQNfb0n1Z]ORNfb0n1Z]OQNgb0o1Y]OPNhb0d30O101O0O1000001^NT]OnMlb0R2T]OnMmb0Q2S]OnMnb0R2R]OnMob0Q2Q]OoMPc0P2P]OPNRc0n1n\\\\ORNSc0m1m\\\\OSNTc0l1l\\\\OTNUc0k1k\\\\OTNVc0[31N101O001N3N2N2N1O1N2O1O1N101N3N2N7H7J2M2O1N2O0O3M3hNX[OTOjd0h0Y[OVOld0b0^[OWOif0KXib7\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [252.0, 453.0, 276.0, 178.0], \"score\": 0.9999998211860657, \"association_id\": 1, \"light\": [-2.5078461170196533, -1.4699252843856812, 2.3921008110046387, 1.2966899871826172], \"segmentation\": {\"size\": [768, 813], \"counts\": \"P_m5X2`e0>G4O1N2N101O001N101O0O2O00001N10001N2O001O0O2O000000001O0000001O001O00001N1e[O`LVd0`3i[OiLoc0W3P\\\\OjLPd0V3o[OlLPd0c3O002M5L3M1O001O001O1N103M1O1O1O00001OjNg\\\\OeMYc0[2g\\\\OfMXc0Z2h\\\\OgMWc0Y2i\\\\OhMVc0X2j\\\\OhMVc0X2j\\\\OhMWc0W2i\\\\OjMVc0U2k\\\\OkMUc0U2k\\\\OkMUc0U2k\\\\OlMTc0T2l\\\\OlMTc0T2l\\\\OmMSc0S2m\\\\OmMSc0S2m\\\\OnMQc0S2o\\\\OmMQc0S2o\\\\OmMQc0S2o\\\\OmMQc0S2o\\\\OmMQc0S2o\\\\OmMQc0S2o\\\\OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OnMPc0R2P]OoMob0Q2Q]OoMob0Q2Q]OoMob0Q2Q]OoMob0R2P]OoMob0Q2Q]OoMob0Q2Q]OPNnb0P2R]OPNnb0P2R]OPNnb0P2R]OPNnb0P2R]OQNmb0o1S]OQNmb0o1S]OQNmb0a300001O0_NR]OPNnb0a3000001O00001O001O00000000001O0000^NV]OnMjb0R2V]OnMjb0R2V]OnMjb0R2V]OnMjb0R2V]OnMjb0R2V]OoMib0Q2W]OoMib0Q2W]OoMib0Q2W]OoMhb0R2X]OoMgb0Q2Y]OoMgb0Q2Y]OPNfb0P2Z]OPNfb0P2Z]OPNfb0P2Z]OPNfb0P2Z]OPNfb0P2[]OPNcb0Q2]]OoMcb0R2\\\\]OnMdb0R2\\\\]OnMdb0R2\\\\]OnMdb0R2]]OmMcb0T2\\\\]OlMdb0T2\\\\]OlMdb0U2\\\\]OjMdb0V2\\\\]OkMcb0V2]]OiMcb0W2]]OiMcb0W2]]OiMcb0W2^]OhMbb0Y2]]OgMbb0Z2_]OeMab0[2`]OdM`b0\\\\2`]OeM_b0\\\\2a]OcM_b0]2a]OcM_b0]2a]OcM^b0^2b]ObM^b0^2c]OaM]b0`2b]O`M^b0`2b]O`M^b0`2b]O`M^b0`2c]O_M]b0a2c]O_M\\\\b0c2c]O]M]b0c2c]O]M]b0c2c]O]M]b0b2d]O^M\\\\b0b2d]O^M[b0c2e]O]M[b0c2e]O]M[b0b2f]O^MZb0b2f]O^MZb0b2f]O^MZb0b2e]O_M[b0a2e]O_M[b0a2e]O_M[b0a2e]O_M[b0`2e]OaM[b0_2e]OaM[b0_2e]OaM[b0_2d]ObM\\\\b0^2d]ObM\\\\b0^2d]ObM\\\\b0^2c]ObM^b0^2b]ObM^b0^2a]OcM_b0]2a]OcM_b0o30]Na]OeM_b0[2a]OeM_b0n3O10000000000000000001O0O100ZN`]OlM_b0U2a]OkM_b0U2a]OkM_b0U2a]OkM_b0j310000O1O100O100O1O1N2O1O2N1O100O1O1O2O0O101N101O000O1O2hNi\\\\OeMWc0[2i\\\\OdMXc0\\\\2h\\\\OdMXc0\\\\2h\\\\OdMXc0[2i\\\\OeMWc0[2i\\\\OdMXc0\\\\2h\\\\OdMYc0[2g\\\\OeMYc0[2f\\\\OeM[c0[2e\\\\OdM]c0_30kNc\\\\OfM^c0Z2b\\\\OfM_c0]31O1N2O1kN\\\\\\\\OlMec0V32kNY\\\\OoMhc0o1Y\\\\OQNjc0k1V\\\\OUNmc0g1U\\\\OYNmc0c1U\\\\O\\\\Nmc0b1S\\\\O^Nnc0a1P\\\\ObNQd0\\\\1n[OeNTd0Y1l[OhNUd0V1k[OjNWd0V1g[OjN^d0i0l[OVOaf0M9Fa0ZOoUe6\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [7.0, 289.0, 281.0, 177.0], \"score\": 0.9999998807907104, \"association_id\": 4, \"light\": [-3.1650242805480957, -1.439558506011963, 2.9918465614318848, 1.1672853231430054], \"segmentation\": {\"size\": [768, 813], \"counts\": \"ca5d2[e04K3N1O2O0O101N1O101N101O000O101O0000000O1000001O00000000000O2O0000000000000O100_[OjLXd0V3h[OkLWd0U3i[OmLUd0S3j[ORMQd0o2o[OWMkc0h2V\\\\O\\\\Mfc0d2Z\\\\O]Mec0c2Z\\\\O_Mec0a2[\\\\O`Mdc0`2\\\\\\\\OaMcc0_2]\\\\OaMcc0_2]\\\\ObMbc0^2^\\\\OcM`c0^2`\\\\OdM^c0\\\\2b\\\\OfM\\\\c0Z2d\\\\OhMZc0X2f\\\\OiMYc0W2f\\\\OkMYc0U2g\\\\OkMYc0U2g\\\\OlMYc0S2g\\\\OmMYc0S2g\\\\OnMXc0R2h\\\\OoMWc0Q2i\\\\OPNVc0P2j\\\\OQNUc0o1k\\\\OQNUc0o1k\\\\ORNTc0n1l\\\\ORNTc0n1l\\\\ORNTc0n1l\\\\OSNSc0m1m\\\\OSNSc0m1n\\\\OSNQc0m1o\\\\OTNPc0l1P]OTNPc0l1P]OUNob0k1Q]OUNob0k1Q]OUNob0l1P]OTNQc0k1o\\\\OUNQc0k1o\\\\OVNPc0j1P]OVNPc0j1Q]OUNob0l1P]OTNQc0k1o\\\\OUNQc0k1o\\\\OUNPc0_3O001O00]NR]OTNnb0l1R]OUNmb0_3O1000001O\\\\NU]OSNkb0m1U]OSNkb0m1U]OSNkb0m1U]OSNkb0m1U]OSNjb0n1V]ORNjb0n1W]OQNib0o1W]OQNib0o1W]OQNib0o1W]OQNib0o1W]OQNib0o1V]OSNib0m1W]OSNjb0l1V]OTNjb0l1V]OUNib0k1W]OUNib0k1W]OVNhb0j1X]OVNhb0j1X]OVNhb0j1X]OVNhb0j1X]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNgb0i1Y]OWNfb0j1Z]OVNfb0j1Z]OWNeb0i1[]OWNeb0b3000XN[]OUNeb0k1[]OUNeb0k1[]OUNeb0k1[]OUNeb0k1[]OUNeb0k1[]OVNdb0j1]]OUNcb0l1\\\\]OTNcb0m1]]OSNcb0m1]]OSNcb0m1]]OSNcb0n1]]ORNbb0n1^]ORNbb0n1^]ORNbb0n1_]OQNab0o1_]OQNab0P2_]OoMab0Q2_]OoMab0Q2_]OoMab0Q2_]OPN`b0P2a]OoM^b0R2b]OnM^b0R2b]OnM^b0R2b]OnM^b0R2b]OnM^b0R2c]OmM]b0S2c]OmM]b0S2b]OnM^b0R2b]OnM^b0R2b]OnM^b0R2b]OnM^b0R2b]OnM^b0R2b]OnM^b0R2b]OnM_b0Q2a]OoM_b0Q2`]OPN`b0P2`]OPN`b0P2`]OPN`b0P2_]OQNab0o1_]OQNab0g30000000000001O0VN^]OVNbb0j1^]OVNbb0d30UN^]OXNbb0h1^]OXNbb0h1^]OXNbb0h1^]OXNbb0h1^]OXNbb0h1^]OXNbb0h1^]OXNbb0g1^]OZNbb0f1^]OZNbb0f1^]OZNbb0f1^]OZNbb0f1^]OZNbb0f1^]OZNbb0b30000O101O0000000000000000000000000000000O100000000O11O00000oMa]OaN_b0_1a]O`Nab0_1_]OaNab0_1_]OaNab0_1_]OaNab0_1_]O`Nbb0`1^]O`Nbb0`1^]O_Ncb0^31O1O1N3O0O1O100O1O2N1O2O1N2N101O0O2O0O2O1O2N2M4M1O1O0O2O0aNY\\\\OeNhc0Y1Y\\\\OfNhc0Z1X\\\\OfNjc0W1W\\\\OiNkc0U1U\\\\OjNnc0T1R\\\\OlNoc0R1R\\\\OnNnc0R1Q\\\\OnNQd0Q1o[OoNQd0P1P\\\\OoNRd0P1n[OPOSd0n0n[OQOTd0h0R\\\\OXORd0>T\\\\OAWf0kNZYOm0of0M9FR[Y<\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [408.0, 304.0, 281.0, 174.0], \"score\": 0.9996356964111328, \"association_id\": 2, \"light\": [-2.08481764793396, -2.0195953845977783, 1.8986135721206665, 1.8003787994384766], \"segmentation\": {\"size\": [768, 813], \"counts\": \"bZb9`1Yf0<H4M3N2M3M2N3N3L3N3L3O1N2O0O101O1N1O3N2M3M2O1N2O001O1O1O2N3M5J3N1O1O1O1O2N2N4L3M100O001N10000000000000000O100000000000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000W]ObKPb0^4n]OdKRb0\\\\4m]OeKSb0[4l]OfKTb0Z4l]OgKSb0Y4l]OhKTb0X4l]OhKTb0X4l]OhKTb0X4l]OhKTb0X4l]OhKTb0X4k]OjKTb0V4l]OjKTb0V4l]OjKTb0V4k]OkKUb0T4l]OlKTb0T4k]OmKUb0S4k]OmKUb0R4k]OoKUb0Q4j]OQLUb0m3m]OSLSb0i3P^OXLPb0e3S^O[Lma0c3T^O^Lla0a3Q^OdLna0[3_]OYMab0g2^]OZMbb0f2]]O\\\\Mbb0c2_]O]Mab0c2^]O^Mbb0a2^]O`Mbb0o30`N^]ObMbb0n30000000000000000000000000000000O100000O1000O100000000O10000000000000000^N\\\\]OhMdb0X2\\\\]OhMdb0W2]]OiMcb0W2\\\\]OjMdb0U2]]OkMcb0U2]]OkMcb0T2]]OmMcb0S2]]OmMcb0S2\\\\]OnMdb0Q2]]OoMdb0P2[]OQNeb0o1[]OQNeb0n1[]OSNeb0m1[]OSNeb0l1\\\\]OTNdb0l1[]OUNeb0j1[]OWNeb0i1[]OWNeb0h1[]OYNeb0g1[]OYNeb0f1\\\\]OZNdb0f1[]OZNfb0f1Z]OZNfb0`30UNZ]O\\\\Nfb0_300000000000000000000000O1O1O1O100O100O1N2N2O1O100O100O100O1O10000O100000001O0O1000001O0O2O1N2O0O2O00001O0O2O1mN[\\\\OiMfc0V2Z\\\\OjMhc0T2Z\\\\OjMgc0U2Z\\\\OjMgc0U2Z\\\\OiMhc0W2W\\\\OiMjc0V2W\\\\OhMjc0W2W\\\\OiMjc0T2Y\\\\OjMjc0S2W\\\\OlMlc0P2W\\\\OnMnf001N2M7Iabl2\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [677.0, 402.0, 92.0, 153.0], \"score\": 0.9999999403953552, \"association_id\": 2, \"light\": [-2.172018051147461, -1.6836564540863037, 2.125114917755127, 1.509023666381836], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"YPT>3Se09J3M2ZNKR]OIg0>Wb09b]OI]b0<Z]OIfb0^1000N2N2O100O1Nn]OWNf`0h1Q_OhNi`0X1U_OoNf`0Q1W_OVOe`0k0X_OXOg`0j0S_O\\\\Ok`0g0o^O_Oo`0e0h^OBWa0Y200o^OmKj`0Q4S_OSLm`0l3R_OVLo`0h3Q_OYLn`0g3Q_OZLPa0e3o^O\\\\LRa0c3n^O]LTa0m30O001O0_Ok^OlLUa0P3n^OQMRa0k2R_OUMo`0h2S_OXMn`0e2T_O[Mm`0c2T_O]Ml`0b2T_O_Mm`0`2S_O`Mm`0`2S_O`Mm`0`2S_O`Mm`0a2Q_O`Mo`0b2o^O^MQa0d2l^O]MUa0c2i^O^MWa0c2f^O_M[a0X32O0O2UMj^O]1Xa0_Nl^O_1Xa0VNS_Og1Zb0N2O3nNi\\\\OIYc03h\\\\OMac0De\\\\O<bc0UOf\\\\Ok0mc0001O0000000001O001O1VOd[Oc0dd0N0O2O00000O1O2O0O10000O10001N10001N1N3NjaY5\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [590.0, 413.0, 113.0, 203.0], \"score\": 0.9999984502792358, \"association_id\": 4, \"light\": [-2.6213512420654297, -2.065340280532837, 2.504042625427246, 1.8923578262329102], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PPZ<2We04K4M3M1N3M3M3WOi0f]OkNY`0Z1e_OPOa?_O\\\\_Oc1Q1RO\\\\?b1b@cN[?^1`@iN]?X1a@kN^?U1`@nN_?R1_@RO_?n0`@UO^?j0b@YO\\\\?h0a@[O^?e0a@^O]?b0a@AJSNP?]2SACIUNR?Z2RACJTNT?[2m@GIRNY?X2k@JHQN\\\\?W2g@<Y?k2000001O0001O001O1O1N2O1O000UO]@nKd?P4\\\\@QLd?m3^@SLb?k3`@ULa?h3a@XL`?f3a@YLa?d3a@\\\\L_?b3c@_L]?^3e@bL[?\\\\3g@dLY?[3g@fLY?X3h@jLW?V3i@jLX?T3h@mLX?S3h@nLX?Q3g@PMY?o2g@SMY?m2f@SM\\\\?k2c@WMEPO`?h3k@aMT?`2j@aMW?^2i@cMV?^2h@cMY?\\\\2f@fMZ?[2d@eM^?[2^@gMc?Y2[@gMg?X2Y@iMg?m32N2lKT@e2n?VMZ@e2k?QM]@l2j?hL]@V3n`0M4M4L1O2SNS^O8_c01000O100O101N1O1O100O1jN[\\\\Oe0fc0[O[\\\\Od0ec0ZO^\\\\Of0bc0VOa\\\\Oj0bc0SO_\\\\Ol0Pd0000001O001O000O100O1O2L3M4N1O2L4N2NRae6\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [471.0, 419.0, 131.0, 263.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-2.835449695587158, -1.6882619857788086, 2.829672336578369, 1.4939136505126953], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Wej94Te06J6T]O5P`0Ni_O:T`0Gh_O>V`0Cf_Oc0V`0_Oc_On0T`0UOg_OT1S`0nNj_OV1KRNV?i0n@W1HWNU?b0RAY1GXNS?b0SAY1IWNl>f0ZAU1IWNX=D\\\\CR1BV1HUNV=H^Cm0EV1FWNT=K\\\\Ci0JV1EWNT=0PCk06o0EXNR=W2ZCAA\\\\NS=S2[CCA[NR=T2\\\\CBA[NR=R2^CD\\\\O^NU=m1`CFYO`NU=j1cCFXO`NU=i1cCHXO_NT=i1eCHWO_NT=h1eCJWO]NT=j1dCJXO\\\\NS=j1eCKXO[NR=j1fCLXOZNQ=k1fCLYOYNQ=k1dCN[OWNP=m1cCM\\\\OWNQ=n1_CM@UNQ=Z1lBTN?_2DRNQ=Z1oBXN5_2JPNR=X1PC]NM^21lMT=W1nBl1MnLU=U1oBm1LnLU=T1PCn1KnLU=V1mBm1MnLW=a5iB`JY=^5gBbJZ=]5eBdJ\\\\=[5cBgJ]=X5cBhJ]=V5dBlJ\\\\=Q5fBoJ[=k4jBUKW=g4lBYKW=c4jB]KW=a4jB_KW=^4kBcKU=[4dBYJN\\\\1^=Z4cB]JNY1`=X4bB`JMY1a=V4cBbJKX1b=U4dBcJJX1b=T4eBeJGX1d=S4dBWL]=g3dBYL\\\\=f3dB[L\\\\=d3eB\\\\L\\\\=a3fB_LZ=_3hBaLX=^3iBbLX=\\\\3iBdLX=Z3iBfLW=X3jBiLV=U3lBkLU=S3lBmLT=S3lBmLT=R3mBnLT=Q3lBoLT=P3mBPMS=P3mBPMT=o2lBPMV=n2kBRMV=m2jBSMW=l2iBTMX=l2gBTMZ=l2eBTM[=n2bBSM_=n2^BSMc=n2[BRMe=o2ZBQMg=o2WBRMj=o2SBRMo=o2mASMS>m2jAUMW>k2hAUMZ>]OhAm3Z>nKjAQ4Y>iKkAV4W>gKkAX4V>fKlAY4U>eKmAZ4W>aKkA^4[>[KgAd4]>WKdAi4^>QKgAn4Z?100O1O1001O2O0fMj_O]OV`0a0l_O^OV`0`0k_O@V`0>k_OBV`0:m_OFT`00U@0m?KW@3k?JW@eNZOl0a`0;W@gN\\\\Ok0``04\\\\@POVOj0b`0N]@WOSOi0d`0G_@_OPOf0e`0D_@FnNd0f`0_Oa@MjNb0mb0]OU]Ob0kb0]OV]Ob0kb0^OW]O>kb0AV]O>kb0BV]O=jb0BW]O=jb0BY]O:ib0EY]O9ib0EY]O9nc0Njjh8\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [761.0, 392.0, 74.0, 117.0], \"score\": 0.9077272415161133, \"association_id\": 3, \"light\": [-1.8120059967041016, -2.3405635356903076, 1.7455443143844604, 2.187856435775757], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"YPl?3kd0f0F2L3L3O2M2N4M4L4M5L1N5a\\\\OgNdb0S2L3M4L8I2N4K3N0O100N20O0N3M2M3101N3N2N1O10O2M1N3_Oc0L2Na]O`MVb0n1X^OUNia0h1X^OYNia0e1W^O\\\\Nka0`1W^O`Nka0]1V^OcNka0M]]OW1h0mNka0Kh]Oo0=UOla0Hl]O>DC04e03^b04o\\\\ODO2e07Wd0O001O000010O0000010O00001O01O000000001O0O10001N2Ncbm3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [205.0, 461.0, 206.0, 222.0], \"score\": 0.9328086376190186, \"association_id\": 5, \"light\": [-2.8298752307891846, -1.1163127422332764, 2.7068471908569336, 0.9031901955604553], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"WYY41n20X?7c@OW?5f@NX?5d@N[?3b@O]?4`@M_?5]@Oa?3[@0e?1X@1g?0W@2h?0V@1j?0T@1k?0T@0l?2R@Nn?4P@Mo?5o_OKR`06m_OJR`07m_OIT`08k_OGV`09i_OHW`08[^OlNZ1k0\\\\`09Y^OnNY1j0^`08Y^OoNW1i0a`08X^OoNV1i0c`08W^OoNT1k0e`0<[_OCf`0=Y_ODg`0<Y_OEf`04W^OQOS1l0d`04Y^OPOR1n0d`01[^OQOQ1Q1a`0M_^OROP1T1^`0Ib^OTOP1W1Z`0Ef^OUOn0[1X`0_Ok^OVOm0]1U`0^On^OUOl0^1V`0]On^OTOm0`1S`0\\\\OQ_OTOk0a1R`0]OS_OROj0c1Q`0\\\\OU_OQOi0d1Q`0[OV_OROh0d1R`0ZOV_OROg0f1Q`0YOX_OQOg0f1Q`0YOY_OoNg0h1P`0YOY_OnNh0i1[>[NRBn0kNnNg0j1Y>_NRBi0POlNf0l1U>dNSBd03h0i=fNSBb04h0h=hNRBa06g0f=lNRB=7h0f=nNPB;:g0e=ROkA:?e0d=XObA9j0?c=Y1[BhNc=Z1\\\\BgNd=Y1[BhNd=Y1[BhNe=Y1YBhNf=Z1XBgNh=Z1VBgNj=\\\\1RBeNn=_1mAbNR>b1jA_NV>c1fA^N[>c1bA_N^>c1^A_Nb>b1\\\\A_Nd>T40000000O010000O1000O01O101N100O101O0O1000000000000000000000000000000000000O1000000000000000001O000000000000000000000000000000000000000000000000000001O0000000000000000000000000000001O00001O001O1O1O001O001O00001O00001O0TL[AR1f>kN^AS1c>jN`AT1b>hNaAX1`>dNdA[1^>^NhAa1Z>YNjAg1Y>SNkAl1V>PNnAo1S>oMnAQ2S>mMoAQ2T>kMnAU2S>fMRBY2W>UMRBk2X`0O0000001N101O001O1O1O1N2K6@a0FgYh<\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [808.0, 399.0, 38.0, 85.0], \"score\": 0.9756857752799988, \"association_id\": 6, \"light\": [-2.7204675674438477, -1.9434350728988647, 2.664654493331909, 1.7765934467315674], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\[k`0221Re00mZO12Mkd0?_[OA12cb04c]O:BBK1<0ab07c]Oe0KUObb09`]Oc0MTObb0j1\\\\]OWNdb0i1\\\\]OWNdb0i1\\\\]OWNcb0c1W]OXN86ab0_1f]OaNYb0_1h]OaNWb0_1j]O`NVb0a1j]O_NUb0`1m]O`NSb0`1n]O_NRb0a1n]O_NQb0a1P^O_NQb0`1o]O`NQb0`1P^O_NPb0a1P^O_NRb0_1n]OaNRb0_1n]OaNSb0^1n]OaNSb0^1m]OcNRb0]1n]OcNSb0[1n]OeNRb0Z1o]OfNRb0X1o]OiNQb0V1o]OjNQb0e0S]OEm0Foa0d0W]OCl0Ina0b0X]OAn0Lla0<g^OD[a09f^OG_c001N2OTXf3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [821.0, 404.0, 38.0, 66.0], \"score\": 0.6143817901611328, \"association_id\": 7, \"light\": [-2.855509042739868, -2.3642170429229736, 2.8193283081054688, 2.2448296546936035], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\PTa0o0[d03N0S\\\\OQO^c0o0a\\\\ORO_c0n0`\\\\OSOTc00e\\\\OQ17oNRc0\\\\1m\\\\OdNRc0\\\\1n\\\\OeNRc0\\\\1m\\\\OdNRc0]1n\\\\OcNRc0\\\\1o\\\\OdNRc0\\\\1m\\\\OdNSc0[1n\\\\OeNRc0[1n\\\\OeNRc0[1n\\\\OeNRc0[1n\\\\OdNSc0[1P]OcNPc0]1Q]ObNob0]1R]OcNob0[1S]OdNmb0[1T]OfNjb0Y1X]OgNhb0X1Y]OhNgb0W1[]OhNeb0W1\\\\]OiNdb0W1\\\\]OjNcb0V1]]OjNcb0U1^]OkNbb0U1^]OkNbb0T1`]OkNbb0R1T1FIYOj[O00e0Wd0Ah[O<[d0]Ol[Oa0dd0J4L3Lcd]3\"}}, {\"image_id\": 10, \"category_id\": 1, \"bbox\": [826.0, 209.0, 476.0, 582.0], \"score\": 0.9999997019767761, \"association_id\": 2, \"light\": [-1.730029582977295, -1.583714246749878, 1.6694145202636719, 1.5477819442749023], \"segmentation\": {\"size\": [882, 1323], \"counts\": \"hfWf0U1Vj0`0C<D7J5L4L3M4L2O2O0O1O2N1O101N1O1O2O0O2N100O2N1O2O0O2M3O0O2N2O1N2O0O2O1O0O2O001O1N2O1O1O1O1O2N1O2N1N3N2N1O1O1O1O1O1O1N2O2N1O2l[OSK_a0n4Y^O[Kea0g4P^OcKoa0`4d]OlKo1oNk<Y5h@SLT2oNQ=Q5d@ULR2TOX=i4`@XLT2SOZ=h4]@XLW2TOZ=f4Z@ZLZ2RO[=f4W@ZL]2SOZ=d4V@\\\\L^2RO[=i6dBZIZ=f6eB\\\\IZ=d6eB`IX=`6gBcIW=]6hBeIW=\\\\6fBhIX=X6gBjIX=V6fBmIY=S6fBoIY=Q6eBQJ[=P6bBSJ]=m5bBUJ]=k5bBVJ^=j5aBXJ^=h5aBYJ_=g5aBZJ^=f5bB[J]=e5bB]J]=c5cB^J\\\\=b5dB_J[=a5dBaJ[=^5fBcJY=]5fBfJX=Z5hBhJV=X5jBiJU=W5jBlJT=T5lBmJS=R5nBoJQ=Q5nBQKQ=o4oBRKP=m4PCTKP=k4QCVKn<j4QCWKo<h4RCYKm<f4SC[Km<d4SC^Kl<a4TC`Kl<^4UCdKj<[4VCgKi<X4WCiKi<T4YCnKf<P4[CRLd<l3[CXLd<f3\\\\C]Lc<a3]CbLb<\\\\3^CgLa<[3[ChLd<Z3XCiLg<Y3TCkLk<W3QClLn<U3nBoLQ=S3iBRMV=o2fBTMZ=n2`BWM_=o2WBUMi=[8O001O010O000010O010O010000O1000cMlA[GT>_8SB`Gn=[8WBeG`2iMZ8[:ZElGj1[No8c9ZERHe1]NS9]9ZEVHa1_NW9V9[E[H\\\\1aN[9P9\\\\E^HW1cN`9k8[EaHU1eN`9h8]EcHQ1fNc9e8_EdHl0hNg9a8`EfHh0jNh9^8dEfHb0mNk9\\\\8fEeH>oNm9Z8hEfH9QOP:X8jEfH3TOR:U8nEgHMUOV:S8nEiHJTOY:Q8PFlHCTO]:P8RFmH]OTOb:m7SFPIXOUOd:k7UFRITOSOh:j7UFTIPOSOk:i7WFSIlNUOn:f7XFWIfNTOS;d7YFXI`NVOX;a7ZF\\\\Kg9c4[F[Kf9c4]F[Kd9d4_FYKb9f4`FXKa9g4bFVK\\\\NRLP:g8gGSKXNYLQ:c8iGRKVN[LR:a8kGRKRN^LT:_8kGRKPN`LU:^8mGPKnMcLU:\\\\8nGPKlMeLW:Z8mGQKlMeLW:Z8nGoJlMgLW:X8oGoJkMiLW:W8PHmJiMmLX:U8PHlJiMoLW:U8RHiJhMRMW:T8SHfJhMVMV:S8THbJhM\\\\MV:o7UH_JiMbMS:n7VH[JjMgMQ:m7VHXJlMkMP:k7UHVJmMPNP:h7THUJnMSNo9f7UHTJnMVNn9e7UHRJnMZNn9c7UHQJnM\\\\Nn9b7UHoIoM_Nm9a7THoIPN`Nm9`7THnIPNaNn9`7RHnIQNbNo9^7QHnIQNdNo9]7QHmIQNfNP:[7PHmIQNhNP:Z7PHlIPNjNT:W7mGlIQNmNT:U7lGlIQNoNZ:n6gGPJPNRO`:g6bGSJPNUOe:b6]GVJoMXOi:]6ZGWJnM]Ol:W6YGWJnMBl:T6WGWJoMDn:S6TGUJoMIP;o5RGUJdMDTN9\\\\=h5nFWJdMKnM7g=`5iFYJdMa0l;n4bF\\\\JeMf0l;l4iHUKZ7h4fHXK\\\\7f4dHYK`7d4_H]Kd7a4[H^Kh7`4WHaKl7\\\\4SHeKo7Y4PHhKQ8W4oGhKS8W4lGjKU8V4jGjKW8U4iGkKX8T4hGlKX8T4hGlKY8T4fGlK[8S4eGmK[8S4dGnK]8R4bGnK^8R4bGnK_8R4`GnKa8Q4_GoKa8R4^GoKb8P4^GPLc8o3]GQLd8o3[GQLe8P4YGQLh8n3XGRLi8n3VGQLl8o3SGQLn8o3PGRLR9m3RFWKYKl0g>l3nEZKZKj0j>k3jE\\\\K\\\\Ki0m>i3fE_K\\\\Kh0Q?g3bEaK]Kh0T?e3]EdK^Kh0X?b3YEgK^Kg0\\\\?_3UEkK_Kf0^?^3RElK`Ke0`?_3nDnKaKc0c?]3lDPLaKc0d?]3iDQLcKb0e?\\\\3hDRLcKb0f?[3gDTLaKb0i?Z3eDTLbKa0l?Y3aDWLcK`0m?Y3_DWLdK`0o?W3]DYLdK`0R`0U3YD[LdKa0V`0Q3VD^LdKa0X`0P3SD^LfKb0Z`0n2oC`LgKa0]`0n2kCaLgKb0_`0l2iCcLhKa0a`0k2fCdLiKa0b`0k2dCdLjKa0d`0j2aCeLkKa0e`0k2^CdLmK`0g`0l2[CdLmKa0i`0k2XCeLoK`0j`0k2VCdLQL`0k`0k2TCeLPLa0n`0i2QCfLQLa0Pa0h2nBgLRL`0Ra0h2lBhLRL`0Ta0g2hBiLTLa0Ua0e2gBjLTLa0Wa0d2dBkLUL`0Ya0d2bBlLUL`0Za0d2`BlLVL?\\\\a0d2^BmLUL?_a0d2[BlLWL?`a0d2YBmLWL>ba0d2WBnLVL>ea0d2TBnLWL=ha0c2QBPMWL=ia0c2oAoLXL>la0b2kAPMYL=oa0b2gAQMZL<Sb0a2bASM[L;Wb0`2\\\\AVM\\\\L;\\\\b0]2VAWM_L;`b0[2PAZM_L<eb0Y2i@[MaL<ib0X2e@\\\\MaL=lb0W2a@\\\\McL<ob0W2]@]McL=Qc0V2[@\\\\MeL>Rc0U2X@]MfL>Sc0U2U@^MgL=Vc0U2R@^MhL=Wc0T2Q@_MhL=Xc0T2o_O_MhL=[c0T2l_O_MiL=\\\\c0T2j_O_MjL=^c0S2g_O`MjL=cc0R2a_OaMlL=fc0P2]_OcMmL=jc0o1V_OdMoL>nc0m1Q_OeMQM=Sd0k1k^OhMQM>Wd0i1f^OiMRM?Zd0g1c^OjMSM?[d0g1a^OjMSM`0]d0f1_^OkMRM`0`d0d1^^OlMRM`0ad0d1\\\\^OlMSM`0bd0d1Y^OmMTM?ed0c1W^OnMTM?fd0c1U^OnMUM?gd0c1S^OoMUM>id0c1Q^OmNPb0R1P^OnNRb0R1l]OnNVb0Q1i]OoNYb0P1f]OPO\\\\b0P1b]OoNab0Q1]]OoNdb0Q1[]OoNfb0R1X]OnNib0R1U]OnNmb0R1R]OnNob0R1P]OnNQc0Q1o\\\\OoNRc0P1n\\\\OPOSc0P1k\\\\OQOVc0n0j\\\\OROWc0n0h\\\\OROZc0l0f\\\\OTO\\\\c0k0c\\\\OTO`c0j0`\\\\OVOcc0h0[\\\\OYOgc0e0Y\\\\O[Ohc0e0W\\\\O[Ojc0e0U\\\\O[Olc0d0T\\\\O\\\\Omc0d0R\\\\O\\\\Ooc0d0P\\\\O\\\\OQd0c0o[O]ORd0c0m[O]OSd0e0j[O\\\\OWd0d0h[O\\\\OYd0e0e[O[O\\\\d0e0c[O[O_d0d0`[O\\\\Oad0d0^[O\\\\Odd0c0[[O]Ogd0b0W[O_Okd0`0T[O@md0`0R[O@Pe0?oZOARe0?mZOASe0`0lZO@Ue0?kZOAVe0?iZOAXe0?gZOBXe0?gZOAZe0?eZOA\\\\e0?cZOA^e0`0`ZO@be0`0\\\\ZO@ee0`0ZZO@he0`0VZO@le0?SZOAoe0>PZOBQf0>nYOBTf0=kYODUf0<jYODWf0;iYOFWf0:hYOFXf0;hYODYf0;gYOEZf0;eYOF[f0:dYOF]f0:bYOF_f0:`YOFaf0;^YODdf0;[YOEgf0<WYOCkf0>SYOAof0`0PYO^ORg0c0lXO\\\\OUg0e0jXOZOWg0g0hXOXOYg0i0fXOVO[g0j0eXOUO[g0n0bXORO_g0P1_XOnNcg0U1ZXOiNhg0Z1UXOdNmg0_1TXOZNPh0h1l02O200O1O1NO1O1O001O001N2N1O2N2N2N2N1O2O1N1O2N1O2N1O1O2M3M4L3M5J;Edn6WOkQI<VNHSXO`0kg0DQXO=ng0GjWO>Uh0CkWO>Sh0DlWO<Th0DkWO=Uh0DjWO<Vh0DjWO<Vh0GfWO:Zh0GaWO=_h0D`WO<`h0E^WO<bh0GZWO:fh0IRWO<mh0U1001N2^NUWO;lh0_O[WO?gh0ZO`WOc0oi0nN\\\\UOf0kUb0\"}}, {\"image_id\": 10, \"category_id\": 1, \"bbox\": [565.0, 255.0, 424.0, 474.0], \"score\": 0.9999983906745911, \"association_id\": 1, \"light\": [-1.2623932361602783, -1.9831109046936035, 1.0696947574615479, 1.8624380826950073], \"segmentation\": {\"size\": [882, 1323], \"counts\": \"YQW?1^k0c0^O4L4L3N2N2N2N2O2M2O3L4L3N2M2N3N1N200000001N2O1N101O0O0010O01O00001O00010O00001O0000000O100000000O100O0O2N2N1O2N2O0O2N2O001N1O2N1O1O1O1N10001N200O1iMWMf[Oi2Rd0eMg[O]2Wd0lM`[OU2`d0RNX[Oo1hd0XNnZOl1Qe0\\\\NVZOU2je0j10O010O010O10000O100O10000O100O1O1O100O1O1O100O100O100000000000V_OjJX;V5SDjKb;V4jCdLP<\\\\3hCnLV<R3hCQMW<o2gCTMX<l2fCWMY<i2fCYMY<h2eCZMZ<g2cC\\\\M\\\\<e2aC]M_<e2]C^Mb<d2WCcMg<_2QChMn<Y2lBnMR=T2hBRNV=o1gBTNX=m1fBVNX=k1fBWNY=k1dBWN[=j1bBXN^=j1_BXNFlKS<o5RDXNIlKS<o5nCYNNiKS<R6gCZN4eKU<T6`C\\\\N:aKU<V6[C]N>^KW<X6VC]Nb0[KX<\\\\6PC\\\\Ng0XKY<a6jBYNl0VKZ<e6dBYNP1RK\\\\<j6^BVNU1PK]<V<bCjC^<W<aCjC^<W<aCiC_<X<`ChCa<X<^ChCb<X<^ChCb<Y<]CgCc<Y<]CgCc<Z<\\\\CfCd<Z<\\\\CfCe<Z<ZCfCg<Z<XCfCh<Z<XCfCi<Z<VCfCl<Y<SChCm<X<RChCP=X<oBgCS=X<lBhCV=Y<gBgCZ=Z<dBfC^=Y<aBgC`=Z<^BgCc=Z<ZBfCg=\\\\<VBdCm=\\\\<QBbCR>n<3M23M2N1N2O0O0001O0O2O1N100O100O010O001O001O0nLYBYHh=]7dB`H\\\\=\\\\7jBbHV=\\\\7mBcHT=[7nBdHR=Z7QCfHo<Y7RCfHn<Y7TCfHm<Y7UCeHk<Z7WCeHj<Z7YCcHg<\\\\7\\\\CbHe<]7]CaHd<^7^C`Hb<_7`C`Ha<_7`C`H`<`7aC_H`<_7bC`H_<`7aC_H_<a7bC^H_<a7bC]H_<d7bCZH_<e7cCYH]<g7fCVH[<i7gCUHY<l7hCRHX<n7iCQHX<n7iCQHW<o7jCPHV<Q8jCmGX<S8hClGX<U8hCiGY<Y8gCdGZ<^8fC_G\\\\<h2`AFEkNc2`NY<f2mADIlNU2cNV<g2UBBFUOR2\\\\NU<i2YB_OE\\\\O\\\\1^NT=e2PB\\\\OCAU1gNR=[2YBYOCEP1iNT=W2[BYOCGl0mNT=Q2aBVOALj0nNjM_OV?`2gBSOA1g0ROQ=h1lBoN_O6d0TOQ=e1QCjN]O>`0TOS=b1`CoMZO\\\\11TOV=_1oE\\\\OkLUOX=\\\\1nE@iLUOY=Z1nEBhLTO[=X1oEDeLTO]=V1oEGbLTO`=T1nEJ`LSOb=Q1PFL]LSOe=n0PFOZLSOh=k0PF3ULSOm=g0PF6RLSOP>d0PF8PLTOR>a0PF;mKTOU>>QF=iKUOX>;RF?dKWO[>8SF`0bKXO\\\\>6TFb0_KXO^>4UFc0]KYO_>2VFd0[KZOa>OVFf0XK\\\\Oc>KXFh0UK\\\\Of>HYFi0RK_Oj>@XFP1nJ@Pf0>PZOBRf0<nYODTf0:kYOGXf05iYOKYf03gYOMZf02fYON\\\\f0OeYO1^f0KcYO5_f0GcYO9af0A`YO`0ih00000O100000O1000000000000O010000000000O10O1000000000O10O100000O1000000000O01000000O100000O010000000000O10O1000O10000O10000000O0100000000000000O1000000000O10000000000000000000000000000000000000O10O10000000000000000000000O1000000000000000000000000000000000O1000000000000000000000000000000000000O2O0O101N102K6GbPo8\"}}, {\"image_id\": 11, \"category_id\": 1, \"bbox\": [1.0, 171.0, 556.0, 933.0], \"score\": 0.9999939799308777, \"association_id\": 1, \"light\": [-1.7281177043914795, -2.4276373386383057, 1.5327701568603516, 2.306938886642456], \"segmentation\": {\"size\": [1240, 836], \"counts\": \"XV2a0RV16M3N2N101O001O00001N10001O00001O001O00001O00001O0000001O000000001O00000000001O00000000001O00000000001O00000000001O00000000001O0000000000001O0000000000001O0000001O000000001O00000000001O0000000000001O0000001O00000000001O0001O000000000000000001O000001O000000001O00000000001O0001O0001O000000010O1O1O2N1O00001O010O0000001O00002N1O1O10O01O001O0010_B0XD0j:Y1kDgNR;b1iD^NS;j1hDVNV;P2fDPNX;V2dDjMZ;[2dDdM[;`2bDaM[;d2bD\\\\M\\\\;j2`DVM];R3^DnL_;Z3\\\\DfLa;d3XD\\\\Ld;o3UDRLa;^4YDaK_;m4[DSK^;Y5]DgJ\\\\;c5aD]JZ;l5bDUJZ;Q6cDoI[;U6cDkI[;Z6bDfI\\\\;^6bDcI[;b6bD^I[;h6bDXI[;m6dDSIX;S7eDmHV;[7gDeHT;c7iD]HR;j7lDVHP;P8nDPHn:W8oDjGn:Z8PEfGn:^8PEbGn:b8PE^Gn:f8QEYGn:j8PEWGm:m8QESGn:Q9oDoFo:U9oDkFo:Y9oDhFn:\\\\9PEdFm:`9SE_Fi:g9UEZFf:k9YEUFd:o9[EQFc:R:\\\\EnEb:U:]ElE`:W:_EiE`:Y:_EgE_:[:aEeE^:]:aEcE^:_:aEaE^:`:bE`E\\\\:c:cE]E[:e:eE[EY:h:fEXEX:k:gEUEV:n:jERES:R;lEnDP:W;PFhDm9\\\\;RFdDl9_;SFaDk9b;TF^Dj9e;UF[Di9h;VFXDj9i;UFWDj9l;TFTDl9m;SFSDm9o;QFQDn9R<PFnCP:S<oEmCQ:U<mEkCS:V<lEjCS:X<lEgCU:Z<jEfCV:[<iEeCV:]<iEcCW:^<hEbCX:_<fEbCZ:_<eEaCZ:`<fE`CY:a<gE^CX:d<hE\\\\CV:f<jEZCU:h<jEXCS:k<mETCR:n<nERCn9R=RFnBk9U=UFkBh9X=XFhBf9Z=ZFfBd9\\\\=\\\\FcBc9`=\\\\F`Bb9b=^F^Ba9c=_F]Ba9c=_F]B`9d=`F\\\\B`9d=`F\\\\B`9d=`F\\\\B`9d=`F\\\\B`9d=`F\\\\B_9e=aF[B_9e=aF[B_9e=aF[B_9e=aF[B^9f=bFZB^9f=bFZB^9f=aF[B_9e=aF[B^9f=bFZB]9g=cFYB\\\\9h=dFXB[9i=eFWBY9k=gFTBX9n=hFRBV9P>jFPBT9R>lFnAQ9U>oFkAm8Y>SGhAj8[>UGeAh8^>YGaAe8a>[G_Ad8b>\\\\G^Ab8d>^G\\\\Aa8e>_G[Aa8e>_G[Aa8e>_G[A`8f>`GZA`8f>`GZA`8f>`GZA`8f>`GZA_8g>aGYA_8g>aGZA^8f>bGZA^8f>bGZA]8g>cGYA]8g>cGYA]8g>cGYA]8g>cGYA\\\\8h>dGXA[8i>eGXAY8j>fGVAY8k>gGUAX8l>hGTAW8m>iGTAU8m>kGSAS8o>mGQAR8P?nGQAo7Q?QHo@m7S?SHm@k7U?UHl@g7W?YHi@e7Y?[Hg@c7[?]Hf@`7]?_Hc@`7^?`Hb@_7`?`Ha@^7`?bH`@]7a?cH_@\\\\7c?cH]@\\\\7d?dH\\\\@[7f?dH[@[7Z5dCV2Q5`HZ7W5cD`1R4YIZ7T5mD^1j3]IX7S5UE]1b3aIX7o4\\\\E]1\\\\3dIW7n4bE\\\\1V3gIV7l4hE\\\\1Q3iIU7i4PF[1k2lIT7h4VFZ1e2oIR7g4^FX1_2RJQ7d4hFU1W2XJn6a4TGS1m1\\\\Jl6b4^Gn0e1aJj6`4iGk0\\\\1eJh6a4RHg0U1iJe6a4\\\\Hc0n0lJc6c4dH>h0oJb6c4kH<b0QKa6d4RI8<UK_6c4ZI67VK^6d4^I53WK^6d4bI4OXK_6d4dI4KXK`6e4fI4HXKa6c4jI4DYKa6d4lI4AXKc6d4mI4_OXKc6d4PJ4]OWKc6e4QJ5ZOWKd6c4TJ6WOWKd6d4UJ6VOVKe6c4WJ8ROUKf6d4YJ7POVKf6c4ZJ8oNUKf6c4]J8lNUKg6c4^J8kNTKf6d4`J:hNRKg6e4bJ9fNRKg6e4eJ:bNRKg6e4hJ;^NPKh6f4lJ<YNnJj6g4oJ<TNmJk6h4SK<PNlJk6i4XK<jMkJl6k4\\\\K:fMkJk6m4bK8aMkJi6P5jK3\\\\MmJf6T5QLNXMmJc6X5XLJTMnJ`6[5`LEoLQK\\\\6]5gLBlLQKZ6`5kL_OjLQKX6c5oL]OgLPKV6g5TMZOdLoJV6i5WMYOaLnJV6k5ZMXO^LmJW6l5\\\\MYOZLkJY6m5^M[OULhJ[6o5`M]OQLdJ^6P6bM^OnKaJ_6Q6eM@iK`J`6Q6hMAeK^Jc6Q6hMCcK\\\\Jd6R6jMC`K[Je6S6lMC]KZJf6T6nMB[KZJg6S6PNDXKXJh6S6SNDTKYJj6Q6UNFoJZJk6o5YNGjJZJm6n5\\\\NHfJYJn6o5_NGaJZJP7n5dNGYJ\\\\JR7l5jNGQJ]JU7k5oNGiI^JX7k5ROFeI_JX7j5WOF_I`JZ7i5ZOGZI`J\\\\7i5\\\\OFWIbJ\\\\7g5@GRIbJ^7e0WFZ2\\\\9^2nHcJ_7c0gFm1P9j2kHeJ]7b0YG`1b8X3gHfJ^7`0^G]1b8[3aHiJ^7=eGZ1b8\\\\3ZHmJ_7<kGT1c8_3RHQK`7:QHP1d8a3jGUK`79ZHi0c8e3cGXK`79`Hc0d8h3[G]K`77iH;c8m3SGaKa75oH7d8o3kFeKb74UI0d8U4eFgKa73ZILe8X4_FiKa73_IDh8`4VFjKb71dI^Oj8f4oEkKc7OhIZOl8k4hEmKd7KmIUOn8S5`ElKf7IQJPOP9Z5XEmKg7GWJhNR9d5nDnKi7Bj7`4[@nKl7]OP8c4S@PLn7YOU8f4l_OQLo7TO\\\\8i4d_OSLQ8POb8k4[_OULT8lNg8m4T_OWLV8gNl8Q5m^OYLW8bNQ9T5g^OZLY8^NU9X5a^OYL\\\\8ZNX9\\\\5[^OZLok0f3QTOYLPl0h3oSOXLQl0i3nSOVLRl0l3nSORLSl0P4kSOkJ_N`0fm0h4kSOdJaNd0dm0k4ZTOTKfk0P5YTOmJhk0V5UTOiJkk0Z5STOfJlk0]5QTOcJPl0^5nSObJRl0a5kSO`JUl0a5iSO_JWl0c5gSO]JYl0d5fSO\\\\J[l0e5dSOZJ]l0f5bSOZJ_l0g5_SOYJbl0h5\\\\SOXJfl0h5XSOXJil0i5VSOVJll0k5QSOUJQm0l5lROTJVm0m5gROSJ\\\\m0m5bRORJam0m5]ROSJem0m5YROSJim0m5UROSJmm0m5RRORJom0o5oQOQJRn0P6lQOPJUn0Q6jQOnIXn0R6fQOnI[n0S6dQOlI]n0U6aQOkI`n0W6]QOiIen0W6ZQOhIgn0Y6WQOgIjn0Y6UQOgImn0Y6QQOgIPo0Y6oPOgISo0X6mPOgITo0n62N2N2N3M3M2N3M2N3M2N2N3M2N2N2N2N2N2N4L4L5K5K5L5J6J4L3M2N2N2N1O2N1O1O2N2N2N2N2N3M3M3]O\\\\mNlLgR1Q3YmNoLiR1o2XmNPMjR1n2VmNRMkR1m2UmNSMlR1l2UmNSMkR1m2UmNSMlR1l2UmNSMlR1l2TmNTMlR1l2UmNSMlR1l2TmNTMlR1l2TmNTMmR1l2SmNSMnR1l2RmNTMnR1l2RmNTMoR1k2RmNTMnR1l2RmNTMoR1k2QmNUMoR1k2RmNTMoR1k2QmNUMQS1f2RmNZMPS1b2RmN^MQS1\\\\2SmNbMQS1X2RmNhMQS1Q2SmNoMhS1=RmNBRU1Ooh`:\"}}, {\"image_id\": 12, \"category_id\": 1, \"bbox\": [258.0, 260.0, 489.0, 370.0], \"score\": 0.9998130202293396, \"association_id\": 1, \"light\": [-2.260455369949341, -1.914825439453125, 2.1228504180908203, 1.7513835430145264], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[e\\\\54Qe0<Z[OBmc0g0m[O]OQd0P1N101N1O2M3N2M2N3L3O2M2O1O1O2N100O10000O1O100O2N1O1J6N2O1O1O100O100O10000O100O1O1O1O1N2O1O1O1O100O100O10000O100O100O1O100O1O100O1O1O100O10000O1000O010000O100O1O100O1O100O10000O01000O10O10O10O01N2M2N3O100O10O010000O100O100O100M3N1N3O1O100O1O100O10000O100O010O10O0100O0eMiKcCX4Y<PLcCo3[<XL`Ci3]<`L]C_3_<jL\\\\CW3Z<YM`Cg2o;QNiCo1R<ZNkCf1R<_NlCa1Q<dNlC]1R<fNmCZ1Q<jNmCV1Q<mNnCS1P<POnCP1Q<TOmCl0Q<XOmCh0d;cKUDj35c0a;2ZDO`;;\\\\DE_;b0_D^O_;f0_DZO`;Y5O00100O1O1O1O1N1O2O1N2O001O10O010000O010O100OeKoD^NQ;a1PE_No:b1QE^No:b1RE\\\\Nn:d1SE\\\\Nl:e1UEZNk:g1TEYNl:g1UEXNj:i1XEUNh:l1YERNg:n1ZEQNe:Q2[EnMe:S2[ElMe:U2[EjMe:W2ZEiMe:Y2[EfMe:Z2[EeMf:\\\\2YEdMg:\\\\2ZEcMf:^2YEbMg:_2YE`Mg:a2XE_Mh:b2XE]Mh:i2SEVMm:d60[LWEhLi:X3^EaLb:`3_E^La:c3`E[L`:e3aEZL^:g3cEXL]:h3cEXL]:h3dEWL\\\\:i3dEWL\\\\:i3eEVL[:j3eEVL[:j3eEUL[:l3fESLZ:m3fESLZ:m3gEQLZ:o3fEQLY:P4hEoKX:Q4hEnKY:R4hEmKW:T4jEjKW:V4iEjKV:W4kEgKV:Y4jEgKV:Y4jEgKU:Z4lEeKT:[4lEdKU:\\\\4kEdKU:\\\\4kEdKT:]4lEcKT:]4kEdKU:\\\\4kEdKU:\\\\4kEdKU:\\\\4jEeKV:[4jEeKU:\\\\4iEfKW:Z4fEiKZ:W4dEkK\\\\:U4cElK]:T4bEmK^:S4bEmK^:S4aEnK_:R4aEnK_:R4`EoK`:Q4`EoK_:R4aEnK_:R4aEmK`:S4_EnKa:R4_EnKa:R4_EnKa:R4^EoKb:Q4^EoKb:Q4]EPLc:P4\\\\EQLd:o3YETLg:l3WEVLi:j3UEXLk:h3UEXLk:S7100000000000O0100O10000O0100O010O100O010O1000000000O1000O10000000O10000000000O10000O10000O10000O1000000O1bLoDbLQ;l600000aLoDdLQ;\\\\3oDcLR;]3nDcLR;]3nDcLR;]3nDcLR;]3nDbLS;^3mDbLS;^3mDbLT;]3lDbLU;^3kDbLU;i61O1eLiDbLW;^3iDbLX;]3hDbLY;^3gDbLY;^3gDbLZ;]3fDbL[;^3eDbL\\\\;]3dDcL\\\\;]3dDcL];\\\\3cDdL];\\\\3cDdL];e61N10001O00000O101O000O101O0O1fL[DnLf;Q3ZDoLf;Q3ZDnLg;Q3YDPMh;o2XDQMh;o2XDQMi;n2WDQMj;n2WDRMj;m2VDSMk;l2UDTMl;k2TDTMm;l2SDTMn;k2RDUMo;i2RDWMo;h2QDXMP<g2PDYMP<g2PDYMQ<f2oCYMS<f2mCZMT<e2lC[MT<e2lC[MU<d2kC[MW<d2iC\\\\MW<d2iC[MY<d2hC[MY<d2gC[MZ<e2fCZM\\\\<e2dC[M\\\\<e2dC[M]<d2cC[M^<e2bC[M_<d2aC\\\\M`<c2`C\\\\Ma<d2_C\\\\Mb<c2_C\\\\Ma<d2_C\\\\Mb<c2^C]Mb<c2_C\\\\Mb<c2^C\\\\Mc<d2^C[Mb<e2^C[Mc<d2]C[Md<e2]CZMc<f2]CZMc<f2^CXMc<h2]CWMe<h2[CXMe<h2[CWMf<j2ZCUMg<j2YCVMg<j2YCUMh<k2XCUMi<j2WCVMi<j2XCUMh<k2XCUMi<k2VCTMk<l2VCSMk<m2TCSMl<m2TCSMm<m2SCRMn<m2RCRMo<o2QCPMP=P3oBPMQ=P3PCoLQ=Q3nBnLS=R3mBnLS=S3mBlLS=T3mBkLT=V3kBjLU=V3lBhLU=Y3jBgLV=SNdBk46RMV=RNeBm44PMX=SNeBm42PMY=RNfBo40nL\\\\=RNeBP5NnL]=QNfBQ5MmL^=QNgBR5JmL`=nMjB8F_40ZMQ>QNTBd4KZM^>f2aAYMb>e2^A[Md>c2\\\\A\\\\Mg>b2YA^Mj>_2VA`Mm>^43L2O1O001N101N2O1N3M3L4L3M3N2N2O1N2O1O2M2O3M4K6VN__O`Nc`0]1^_ObNd`0[1^_OeNd`0W1^_OhNd`0U1^_OjNc`0U1__OhNd`0V1]_OhNf`0V1[_OhNi`0T1Y_OkNj`0Q1X_OnNk`0b0d_O]Oh`0Kd_O4jb00000O2O1N2O0O2N4M3GoXh5\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [200.0, 595.0, 66.0, 74.0], \"score\": 0.9997410178184509, \"association_id\": 1, \"light\": [-2.143784523010254, -1.3253211975097656, 2.0579376220703125, 1.150414228439331], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"VQV41Ze000001O00001O0001O0001O01O01O0000100O000010O010O00001O^e20aZM3M2N2N1N2M3K5K5A?@`0J6H8N2O1000000000000002N:F2N4L4L6J4L3lNR\\\\Oe0ad0I5K2N3KQnh?\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [326.0, 432.0, 698.0, 232.0], \"score\": 0.9999042749404907, \"association_id\": 2, \"light\": [-2.6782541275024414, -1.6578285694122314, 2.670149803161621, 1.543189287185669], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"QRj62Ye00O2O0O101O0O10001N100O10001N10000O2O000O10000000000O10001O00000000000O1000000000000O10000000000000000O10000000000TN@m^O`0Qa0Hi^O8Va0Ne^O2Ya03d^OM[a05c^OL\\\\a06c^OJ\\\\a08c^OH[a0;d^OE[a0<e^ODYa0?f^OAYa0`0f^OAXa0b0g^O^OXa0c0h^O]OXa0d0g^O\\\\OXa0e0g^O\\\\OYa0d0g^O\\\\OYa0d0g^O\\\\OXa0f0g^OZOYa0f0f^O[OYa0f0g^OZOXa0g0h^OYOXa0g0h^OYOWa0i0h^OWOWa0j0i^OVOWa0j0i^OVOVa0k0j^OUOUa0l0k^OTOX?YO^Am1XOkNV?^O_Ah1[OjNS?B^@Hc0m1LiNn>I^@Ef0j1NhNi>0_@Ah0h10gNi>>UAm01eNi>`0g@jN1Q2?eNi>a0d@mN1n1b0dNh>c0b@nN2l1d0cNh>d0`@oN3j1e0cNg>U1a@9h0bNg>V1`@8h0cNh>^3WAbLi>^3WAbLi>^3VAcLi>^3WAbLi>^3WAbLi>^3VAcLj>]3VAcLj>]3UAdLk>\\\\3UAdLj>]3VAcLj>]3VAcLj>]3UAdLk>\\\\3UAdLk>\\\\3UAdLj>]3VAcLj>]3VAcLj>]3VAcLj>]3UAdLk>]3TAcLk>^3UAbLk>^3UAbLk>^3UAbLk>^3UAbLk>^3UAbLa>ROUA]4:aLa>ROVA\\\\49bLa>ROVA\\\\49bLk>^3UAbLk>_3TAbLk>^3TAcLl>]3TAcLl>^3SAbLm>^3SAbLn>]3RAcLn>]3RAcLn>^3QAbLo>^3QAbLP?^3o@cLP?]3PAcLP?^3o@bLR?]3n@cLS?]3l@cLU?XOa@i3:oLV?QOh@o31PMd?o2\\\\@QMf?n2Y@QMj?m2W@RMj?m2V@SMS`0e2m_OYMU`0f2k_OZMU`0f2l_OXMV`0h2i_OWMX`0i2j_OTMW`0l2k_OPMW`0Q3o_O\\\\L@2g`0a3l0001O0000101N2N3M2N2N2O0O2N2N1O1O2N100O1N01O1O1O0O2O1N2O1O001O1O10O01O1O100O10O10O10000O10O10O10O1O010O010i^OXLj`0h3S_OiL_`0X3a_OjL]`0U3c_OmL[`0T3d_OmL\\\\`0R3e_OoLY`0R3g_OnLY`0R3f_OoLY`0Q3h_OPMW`0P3i_OPMV`0Q3j_OoLV`0P3k_OPMU`0P3j_OQMV`0o2j_OQMV`0o2j_ORMT`0o2l_OQMT`0n2m_ORMS`0n2m_OQMT`0o2l_OQMT`0o2l_OQMS`0P3l_OQMT`0n2m_ORMS`0n2m_ORMS`0n2m_ORMR`0o2m_ORMS`0m2n_OSMR`0m2n_OSMR`0m2m_OSMT`0m2l_OSMS`0m2n_OSMR`0m2n_OSMR`0m2m_OTMS`0l2m_OTMS`0k2n_OUMQ`0l2o_OTMQ`0l2o_OTMQ`0l2o_OTMQ`0k2P@UMP`0k2P@UMo?l2Q@TMo?k2Q@VMo?j2Q@UMP`0k2P@UMP`0k2P@UMP`0k2P@UMo?l2Q@TMo?k2R@UMn?k2R@UMn?k2R@UMn?k2R@UMn?k2R@UMn?k2R@UMn?k2R@UMn?k2R@TMn?m2R@SMn?m2Q@TMo?l2Q@TMo?l2Q@TMo?l2Q@TMo?l2Q@TMo?l2Q@SMo?m2R@SMn?m2R@SMn?m2Q@TMo?l2Q@TMo?l2Q@TMo?l2Q@TMo?l2Q@TMn?m2R@SMn?m2R@SMn?m2R@SMn?m2R@SMn?m2R@SMn?m2R@RMn?o2R@QMn?o2R@QMn?o2R@QMn?o2R@QMn?n2S@RMl?o2T@QMl?o2T@QMl?o2T@QMl?o2S@RMm?n2S@RMl?o2T@QMl?o2T@PMm?P3S@PMm?P3S@PMl?Q3T@oLl?Q3T@oLl?Q3T@oLl?Q3T@oLk?R3U@nLk?R3U@nLk?R3U@nLk?R3U@nLj?R3W@nLi?R3W@nLi?R3W@nLi?R3W@nLh?S3X@lLi?T3W@lLi?T3W@lLh?U3W@lLi?T3W@lLi?T3W@lLi?T3W@lLi?T3W@lLi?T3W@lLh?U3X@kLh?U3X@kLh?U3X@kLh?U3X@kLh?U3X@kLh?U3X@kLg?V3Y@jLg?V3Y@jLg?V3Y@jLg?V3Y@jLg?V3Y@jLg?V3X@kLh?U3X@kLg?V3Y@jLg?V3Y@jLg?V3Y@jLg?U3Z@kLf?U3Z@kLf?U3Z@kLf?U3Z@kLf?U3Z@kLf?U3Z@kLe?V3[@iLf?W3Z@iLf?V3Z@kLf?U3Z@kLf?U3Z@kLf?U3Z@kLf?U3Z@kLe?V3[@jLe?V3[@jLe?U3[@lLe?T3[@kLf?U3Z@kLf?U3Z@kLf?T3[@lLe?T3[@lLd?U3\\\\@kLd?U3\\\\@kLd?U3\\\\@kLd?U3\\\\@kLd?T3\\\\@mLd?S3\\\\@mLd?S3\\\\@mLd?S3\\\\@mLd?S3\\\\@mLd?S3\\\\@mLd?S3\\\\@mLd?R3]@nLc?R3]@nLc?R3]@nLc?R3]@nLc?R3]@nLc?R3]@nLc?R3]@nLc?R3\\\\@oLd?Q3\\\\@oLd?P3]@PMc?P3]@PMc?P3]@PMc?P3]@oLd?Q3\\\\@oLd?Q3[@PMe?P3[@PMe?P3[@PMe?P3[@PMe?P3[@PMe?P3[@PMe?P3[@PMe?P3[@PMe?P3[@PMe?P3[@PMe?P3Z@QMf?o2Z@QMf?o2Z@QMf?o2Z@QMf?o2Z@QMg?n2Y@RMg?n2Y@RMg?n2Y@RMg?n2Y@RMg?n2Y@RMg?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?m2Z@SMf?n2X@SMh?m2X@SMi?l2W@TMi?l2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMh?m2X@SMi?l2W@TMi?l2W@TMi?l2W@TMi?l2W@TMi?l2W@TMi?l2W@TMi?l2W@TMi?m2W@RMi?n2W@RMi?n2W@RMi?n2W@RMi?n2W@RMh?P3W@PMi?P3W@PMi?P3W@PMi?P3W@PMi?Q3V@oLj?Q3V@PMi?P3W@PMi?Q3V@oLj?Q3V@oLj?R3U@nLk?R3U@nLk?R3V@mLj?S3V@mLj?T3U@lLk?T3U@lLk?T3U@lLk?T3U@lLk?T3U@lLk?Z40000000kNU@jLk?V3U@jLk?V3U@jLk?V3U@jLk?V3U@jLk?V3U@kLj?U3V@kLj?V3V@iLj?W3V@iLj?W3V@iLj?W3V@iLj?W3V@iLj?W3W@hLi?X3W@hLi?X3W@hLi?X3W@iLh?W3X@iLh?X3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3Y@fLg?Z3Y@gLf?Y3Z@gLf?Y3[@fLe?Z3[@fLe?Z3\\\\@eLd?[3\\\\@eLd?[3\\\\@fLc?[3]@dLc?\\\\3]@dLc?\\\\3]@dLc?\\\\3]@dLc?\\\\3]@dLc?\\\\3^@cLb?]3]@dLc?\\\\3]@dLc?\\\\3]@dLc?\\\\3]@eLb?[3^@eLb?\\\\3]@dLc?\\\\3]@dLc?\\\\3]@dLc?\\\\3]@dLc?\\\\3]@dLc?\\\\3]@eLb?[3^@eLb?[3^@eLb?[3^@eLb?[3^@fL`?[3`@eL`?Z3`@gL`?Y3`@gL`?Y3`@hL_?X3a@hL_?X3a@hL_?X3a@hL_?W3b@iL]?X3c@iL\\\\?W3d@iL\\\\?W3d@iL\\\\?W3d@iL\\\\?W3d@iL[?W3f@iLZ?W3f@iLZ?W3f@iLZ?W3f@jLX?W3h@iLX?W3h@iLX?V3i@jLW?V3i@jLW?V3i@jLV?W3j@iLV?W3j@iLV?W3j@jLU?V3k@jLU?V3k@jLU?V3k@jLU?V3k@jLU?V3k@jLT?W3k@kLT?U3l@kLT?U3l@kLT?U3l@kLT?U3l@kLT?U3l@kLT?U3l@kLT?U3l@kLT?U3l@kLS?V3n@jLQ?V3o@jLQ?V3o@jLQ?V3o@jLQ?V3o@jLQ?V3o@jLQ?V3o@jLQ?V3o@jLQ?V3PAiLP?W3PAiLP?X3o@iLo>X3QAhLo>X3RAgLn>Y3RAgLn>Y3SAfLm>Z3SAfLm>[3SAdLl>]3TAdLk>\\\\3UAdLk>\\\\3VAcLj>]3VAcLj>^3UAbLk>^3UAbLj>_3VAaLj>_3VAaLj>_3WA`Li>a3VA_Lj>a3VA_Li>b3WA^Li>b3WA^Li>c3VA^Li>b3XA]Lg>d3YA\\\\Lg>e3YAZLg>f3ZAYLe>h3[AXLe>i3[AVLe>j3[AVLe>k3[ATLd>m3\\\\ASLd>n3]APLc>P4aAlK_>U4cAiK[>Y4eAfK[>Z4fAeKZ>\\\\4fAcKZ>^4eAbKZ>`4fA_KZ>b4eA^K[>c4eA\\\\K[>e4dA[K\\\\>e4eA[KY>g4fAYKZ>h4eAXK[>i4dAWK\\\\>i4dAWK\\\\>j4cAVK\\\\>k4dAUK\\\\>l4dASK\\\\>n4cARK]>n4cARK]>n4cARK]>o4bARK]>n4dAQK\\\\>P5cAPK\\\\>Q5eAnJ[>R5eAnJ[>S5eAlJ[>T5fAkJZ>V5eAkJZ>U5gAjJY>W5fAiJY>X5gAhJY>Y5fAhJY>X5hAgJX>Z5gAfJY>[5fAeJZ>[5fAeJY>]5gAcJX>]5hAcJX>]5hAcJX>^5gAbJY>^5gAbJX>`5gA`JY>`5gA`JY>`5gAaJX>_5hAaJX>_5hAaJX>_5hAaJW>`5iA`JW>`5jA_JV>a5jA`JU>a5jA_JV>a5jA_JU>b5kA^JU>b5kA_JT>a5lA_JT>a5lA_JT>a5lA`JR>a5nA_JR>a5nA`JQ>`5oA`JQ>`5oAaJP>_5PBaJo=`5QB`Jo=`5QBaJn=_5RBaJn=_5SB`Jm=`5SB`Jl=a5TB_Jl=a5TB_Jl=a5TB_Jl=a5TB_Jl=a5TB_Jl=a5TB_Jl=`5UB`Jk=`5UB`Jk=`5UB`Jk=`5UB`Jl=_5TBaJl=_5TBaJl=_5TBaJl=_5TBaJl=_5TBaJl=_5TBbJk=^5UBbJk=]5VBcJj=]5VBcJj=]5VBcJj=]5VBcJj=]5VBcJj=]5VBcJk=\\\\5UBdJk=\\\\5UBdJk=[5VBeJj=[5VBeJj=[5VBeJj=[5WBdJi=[5XBdJj=[5VBeJj=Z5WBfJi=X5ZBfJh=W5ZBiJf=U5]BiJe=T5]BlJd=Q5_BmJc=o4aBoJb=c4dASKo06b=`4n1IUB\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [9.0, 214.0, 1004.0, 308.0], \"score\": 0.9995362758636475, \"association_id\": 3, \"light\": [-2.4286303520202637, -1.8617019653320312, 2.3380329608917236, 1.7432308197021484], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"W_61Ye02O1O1N101O00000O1000000O10000000000O1000000000000O100000000000000O1000000000000O1oL`0W@@c?h0[@XOb?l0]@TOb?n0]@ROb?P1]@POb?R1]@nNc?S1[@nNd?T1[@lNe?U1Z@kNe?W1Z@iNf?W1Z@iNe?Y1Z@gNf?Z1Y@fNf?[1Z@dNg?]1X@cNh?]1X@cNh?]1W@dNi?]1V@cNj?]1V@cNj?^1U@bNk?^1U@bNk?^1U@bNk?^1U@bNk?_1T@aNl?_1T@aNl?_1T@aNl?`1S@`Nm?`1S@`Nm?`1S@`Nm?`1R@aNn?`1Q@_NP`0a1P@_NP`0a1P@_NP`0a1P@_NP`0b1o_O^NQ`0b1o_O^NQ`0b1o_O^NQ`0b1o_O^NQ`0c1n_O]NR`0c1m_O^NS`0b1m_O^NS`0b1m_O^NS`0b1m_O^NS`0c1l_O]NT`0c1l_O]NT`0c1l_O]NT`0c1l_O]NT`0c1k_O^NU`0c1j_O]NV`0c1j_O]NV`0c1j_O\\\\NW`0d1i_O\\\\NW`0d1i_O\\\\NW`0d1i_O\\\\NV`0f1i_OZNW`0f1i_OZNW`0f1i_OZNW`0f1i_OZNW`0f1i_OZNW`0g1h_OYNX`0g1h_OYNX`0g1h_OYNW`0h1i_OXNW`0i1g_OXNY`0h1g_OXNY`0h1g_OXNY`0h1g_OXNX`0j1g_OVNY`0j1g_OVNY`0j1g_OVNY`0j1g_OVNY`0k1e_OVNZ`0k1f_OUNZ`0k1f_OUNZ`0k1f_OUNZ`0k1e_OVN[`0k1d_OTN\\\\`0m1d_OSN\\\\`0m1d_OSN\\\\`0m1d_OSN[`0n1e_ORN[`0n1e_ORN[`0o1d_OQN\\\\`0o1d_OQN[`0P2e_OPN[`0P2e_OPN[`0P2e_OPN[`0P2e_OPNZ`0Q2f_OoMZ`0R2e_OnM[`0R2d_OoM\\\\`0Q2d_OoM\\\\`0Q2d_OoM\\\\`0Q2d_OoM[`0R2e_OnM[`0R2e_OnM[`0R2e_OnM[`0S2d_OmM\\\\`0S2d_OmM\\\\`0S2d_OmM\\\\`0S2d_OmM[`0T2e_OlM[`0T2e_OlM[`0T2d_OmM\\\\`0S2d_OmM\\\\`0S2d_OmM\\\\`0T2c_OkM^`0U2b_OkM^`0U2b_OkM^`0U2b_OkM]`0V2c_OiM^`0W2b_OiM^`0W2c_OhM]`0X2c_OgM^`0Z2a_OfM_`0Z2a_OfM_`0Z2a_OeM``0[2`_OeM``0[2`_OeM``0[2`_OdMa`0\\\\2__OcMa`0^2__ObMa`0^2`_O`Ma`0`2__O_Mb`0b2]_O^Mc`0b2]_O]Mc`0d2]_O\\\\Mc`0d2^_OZMc`0f2]_OZMc`0f2]_OZMb`0g2__OWMb`0i2^_OWMb`0i2^_OWMb`0i2^_OWMa`0j2`_OTMa`0m2^_OSMa`0n2^_OTMa`0l2__OTMa`0l2__OTM``0m2__OTMa`0l2__OUM_`0l2`_OUM``0k2`_OUM_`0l2`_OVM^`0k2`_OWM``0j2__OVMa`0j2^_OXM``0i2`_OWM``0i2`_OXM^`0i2b_OWM^`0i2b_OXM]`0h2c_OXM]`0h2b_OZM\\\\`0h2c_OXM]`0h2c_OYM\\\\`0g2d_OYM\\\\`0g2d_OYM[`0h2e_OYMZ`0g2f_OYMZ`0f2g_OZMX`0g2h_OZMW`0f2h_O[MX`0e2h_O[MW`0f2i_OZMW`0f2i_O[MU`0f2k_OZMU`0f2k_OZMT`0g2l_OZMS`0f2m_OZMR`0g2m_OZMS`0f2m_O[MQ`0f2o_OZMP`0g2P@YMo?h2P@ZMn?g2P@[Mo?e2Q@\\\\Mn?e2Q@]Mm?d2S@\\\\Ml?e2S@\\\\Mm?d2S@\\\\Ml?e2S@\\\\Ml?e2T@\\\\Mk?d2T@]Mk?d2Q@`Mo?`2Q@`Mn?a2Q@`Mo?`2Q@`Mn?a2R@_Mn?a2Q@`Mo?`2Q@`Mn?a2R@`Mm?`2S@`Mm?_2T@aMk?`2T@aMl?_2T@aMk?`2U@`Mk?`2U@`Mj?a2V@_Mj?a2V@_Mi?b2V@_Mj?`2W@`Mh?a2X@_Mh?a2X@_Mg?b2Y@^Mg?b2Y@^Mg?b2Y@^Mf?c2Y@^Mg?b2Y@^Mf?c2Z@]Mf?c2Z@]Mf?c2Z@]Me?c2\\\\@]Md?c2\\\\@]Md?c2\\\\@]Mc?d2]@\\\\Mc?d2\\\\@]Md?c2\\\\@]Mc?d2]@\\\\Mc?d2]@\\\\Mc?d2]@\\\\Mb?e2^@[Mb?e2^@[Mb?e2^@[Mb?e2^@[Ma?f2_@ZMa?f2_@ZMa?f2_@ZM`?g2`@YM`?g2`@YM`?g2`@YM`?g2`@YM_?h2a@XM_?h2a@XM_?h2a@XM_?h2a@XM^?i2b@WM^?i2b@WM^?i2b@WM^?i2b@WM]?j2c@VM]?j2c@VM]?j2c@VM]?j2c@WM\\\\?i2d@WM\\\\?i2d@WM\\\\?i2d@WM[?j2e@VM[?j2e@VM[?j2e@VM[?j2e@VM[?j2e@VM[?j2e@VM[?j2e@VMZ?k2f@UMZ?k2f@UMZ?k2f@VMY?j2g@VMY?j2g@VMY?j2g@VMY?j2g@VMY?j2g@WMX?i2h@WMX?i2h@WMX?i2h@WMX?i2h@XMW?h2i@XMW?h2i@XMW?h2i@YMV?g2j@YMV?g2j@YMV?g2j@YMV?g2j@ZMU?f2k@ZMU?f2k@ZMU?f2k@ZMU?f2l@YMT?g2l@ZMS?f2m@ZMS?f2m@ZMS?f2m@ZMS?f2m@ZMS?f2m@ZMS?f2m@ZMS?f2m@[MR?e2o@ZMQ?f2o@ZMQ?f2o@ZMQ?f2o@[MP?e2PA[MP?e2QAZMo>f2QAZMo>f2QA[Mn>e2SA\\\\Mk>d2UAbMe>^2[AdMc>]2]AdMa>\\\\2_AdMa>\\\\2_AeM`>[2aAdM_>\\\\2aAeM^>\\\\2aAdM_>\\\\2aAdM_>\\\\2aAeM^>[2cAdM]>]2bAcM^>]2bAdM]>\\\\2cAdM]>]2bAcM^>]2bAcM^>^2aAbM_>^2bAaM]>`2cA`M]>a2bA`M]>`2cA`M]>a2bA_M^>a2cA^M]>c2bA]M^>c2bA]M^>d2bA\\\\M]>d2cA\\\\M]>d2eAZM[>g2gAVMY>j2jATMU>l2lASMT>n2lAQMT>o2mAQMR>o2oAPMQ>P3oAPMQ>Q3oAnLQ>R3oAnLQ>R3oAoLP>Q3QBnLo=S3PBmLP>S3PBmLP>S3PBmLP>S3QBlLo=U3PBkLP>U3PBkLP>V3oAjLQ>V3oAjLQ>V3PBiLP>X3oAhLQ>X3oAhLQ>Y3oAfLQ>Z3oAfLQ>[3oAdLQ>]3nAcLR>^3nA`LS>b3lA]LT>d3mAZLS>g3VBoKj=R4WBlKi=V4WBhKi=Y4WBfKi=[4WBdKi=]4WBbKi=_4WB`Ki=a4VB^Kk=c4UB\\\\Kk=e4TB[Kl=f4SBZKm=f4TBYKl=h4SBXKm=i4RBWKn=i4SBVKm=k4RBTKo=l4QBTKo=l4QBTKo=m4QBRKo=n4QBRKo=o4PBQKP>o4QBoJP>Q5PBoJP>R5PBmJP>S5PBlJQ>T5PBkJP>V5PBiJP>W5RBfJo=[5SBaJn=_5VB[Jl=e5m01Oi0WO1O001O001O001O001O001O001O001O01O00000000000000000O10000oN^AYKb>g4`AWK`>h4bAWK^>h4eAVK[>i4hAUKX>j4jAUKV>j4mATKS>k4oATKQ>k4QBSKP>k4SBTKm=k4UBTKl=j4UBVKk=h4XBWKh=g4ZBYKf=d4^B[Kb=c4`B]K`=a4cB^K]=`4eB`K[=_4fBaKZ=_4gB`KY=_4hB`KY=`4gB`KY=`4gB`KY=_4iB`KW=`4iB`KW=`4jB^KW=a4jB_KV=a4jB_KV=a4kB]KV=b4kB^KU=b4lB\\\\KU=d4kB\\\\KU=c4mB[KT=e4lB[KT=d4PB\\\\Km>d4RA]Kn>c4l@\\\\K_O2e?a4l@fKS?Z4m@fKS?Y4m@iKR?W4n@hKS?W4o@hKQ?X4o@hKQ?W4QAaKZO2e?]4QA_K\\\\O3d?]4WAbKj>]4WAbKi>]4S1O100O10000O100O10000O10O0100O100O100O1O100O1O1O1O100O1O10O0100O100O10000O10000O10000O1000000O1000000O1000000O100O100O10000O100O100O10000O100O10000O10000O10000O10000O100O100O10000O100O1000000O1000000O100000000O101O0000000O10000000000000000000000000000000000000000000000000001O0000000000000000000000000O1000000000O1000000000O10000000000000000O100000O10000000O10000000000O10000000X_ORMS?m2n@SMR?m2n@SMR?m2m@UMR?k2n@UMR?k2n@UMR?j2o@VMQ?j2o@VMQ?j2o@VMQ?j2o@VMQ?i2PAWMP?i2PAWMP?i2PAWMP?i2PAXMo>g2QAZMo>f2QAZMo>f2QAZMo>f2QAZMo>f2QAZMo>f2QAZMo>e2RA[Mn>e2RA[Mn>e2RA[Mn>e2RA[Mn>e2RA[Mn>d2SA]Ml>c2TA]M\\\\NJ\\\\`0i2XA]M[NK]`0h2WA^M[NK^`0g2WAbMi>^2WAbMi>^2WAbMi>]2XAcMh>]2XAcMh>]2XAcMg>^2ZA]MYNL]`0g2ZA]Mj>c2VA]Mj>c2VA\\\\Mk>c2VA]Mj>c2WA\\\\Mi>d2WA\\\\Mi>d2WA\\\\Mi>d2WA\\\\Mi>d2WA[Mj>e2WAZMi>e2XA[Mh>e2XA[Mh>e2XAZMi>f2XAYMh>g2n1000000O1000000000O10O10000000000000O100000000O100000O010000O10000O100O10000O10000O1000O10O10000000000O2O000000000000001O0O100000000000001O000O10000000000000001N10000000000O1000000O10000O100O100O1O1O1O100O1O1O1O1O100O1O1O100O1O100O100O1000O0100O100O010O100\\\\OWLe_Oi3Y`0\\\\Ld_Oe3Z`0_Lc_Ob3Z`0cLc_O]3[`0hLc_OX3\\\\`0kL`_OW3_`0j001O000001O0000000000001O001O00100O010O100O100O10O0100O1O1O1O100O1O1K5I7L4K4N3M3N2O1N2O1N2O1O1O1O1O1O100O1O100O1O1O1O100O1O2M2N2M3L4K5K6\\\\OR]OcNTc0\\\\1>N2O1O1O2O0O1O1O2N1O101N1O100O2O0O`BPOoI3l;m0UJQOmI5KL^;l0jJUOkI6IKb;j0jJUOkI6IKb;i0kJVOjI6JJa;j0kJVOkI5LH_;l0jJWOlI3m;e0YJVOlI2m;h0WJVOe6i0\\\\IWOVJ0Z;h0`JXOUJ2Z;e0bJXOUJ3Y;e0bJXOTJ5Z;a0dJYORJ6Z;a0dJYORJ6[;?fKAZ4?fKAZ4>gKBZ4<gKDY4<gKDZ4:gKFY49hKHX45jKKV43lKMU40mK0T4MnK3R4KPL5c<0000000000000000000000O2O0O2O0O2Omj6\"}}, {\"image_id\": 14, \"category_id\": 1, \"bbox\": [602.0, 381.0, 86.0, 177.0], \"score\": 0.9999995827674866, \"association_id\": 2, \"light\": [-2.3798763751983643, -2.3095428943634033, 2.3244123458862305, 2.216465473175049], \"segmentation\": {\"size\": [864, 1152], \"counts\": \"a^l?7ij01O1N2O001O00001O000000001O000000001O0000000010O0001O001jLO[[O1bd0<U[OChd0d0T[O]Oid0h0U[OWOid0o0T[ObNnM<mf0W1U[OgNhd0_1V[OaNfd0e1Y[OYNed0l1P[OlMcN8Zf0R2gZORNlNK[f0]2ZZOQN[O@Zf0c2RZOTNCZOYf0R4gYOmKWf0V4hYOkKTf0b4M4I7K4N3M201O001O00CbZOYK]e0f4eZOYK[e0g4eZOZKZe0g4gZOWKYe0j4hZOTKXe0n4iZOoJWe0U5gZOiJYe0Y5fZOfJZe0[5eZOeJ\\\\e0Z56O2N2N4L3M3M3M2N2gMZZOTOge0i0aZOQO`e0l0jZOkNYe0Q1a[OUNcd0g1]2N2M3K7I5L5L4L7I5L8H]RW<\"}}, {\"image_id\": 14, \"category_id\": 1, \"bbox\": [191.0, 389.0, 111.0, 199.0], \"score\": 0.9484664797782898, \"association_id\": 3, \"light\": [-2.337596893310547, -1.987549066543579, 2.241708755493164, 1.9060823917388916], \"segmentation\": {\"size\": [864, 1152], \"counts\": \"XgQ52mj02N2O0000000000000ENhUO2Vj00jUO0Uj02jUONVj03hUONWj03jUOLUj05kUOKTj06lUOJSj07mUOISj07lUOJSj06oUOIPj08PVOHoi09QVOGni0:RVOFmi0;SVOEmi0;SVOEli0=SVOCmi0=SVOCli0>SVOB^g00gZO?iMB]g04iZO:iMA]g09hZO=We0EhZO<Ve0GiZO9Ue0IiZO8Ve0IjZO7Se0LnZO4md00S[O1kd00U[O1gd03Y[OLed06[[OKad08`[OH]d0:c[OF\\\\d0;e[OEXd0>g[OBWd0a0h[O_OVd0e0i[OZOWd0h0h[OWOXd0k0f[OUOZd0m0e[ORO\\\\d0P1b[OoN_d0S1^[OmNcd0V1Z[OhNgd0[1W[OcNjd0`1T[O_Njd0g1S[OWNnd0l1P[OSNQe0o1mZOQNSe0R2jZOmMVe0V2hZOjMWe0Z2fZOfMXe0T3PZOkLoe0\\\\3lYOdLQf0a3mYO]Lke0P4RZOmKoe0Z46<E9H3N1N101O1O001O001O10O1O2O1N1O2M5L6K1N2N2M2O1POPZOnLPf0T2U[OcMmd0T2`[OfMbd0X2d[ObM_d0[2T2N3M3N3N1N2O1N2O2M4L2M4M5L4K5K;C<Beg\\\\f0\"}}, {\"image_id\": 14, \"category_id\": 1, \"bbox\": [420.0, 384.0, 97.0, 216.0], \"score\": 0.9999990463256836, \"association_id\": 1, \"light\": [-2.2688205242156982, -1.6267638206481934, 2.2489495277404785, 1.4403047561645508], \"segmentation\": {\"size\": [864, 1152], \"counts\": \"lmR;8fj03N1O2O0O1000O10O10O1000000000\\\\M1VZO1ce09YZOGce0>\\\\ZOCXe0i0fZOXOmd0V1R[OjNbd0b1^[O_NVd0m1h[OTNRd0T2l[OmMnc0Y2Q\\\\OgMjc0_2U\\\\OaMhc0c2W\\\\O^Mgc0d2X\\\\O]Mgc0e2W\\\\O\\\\Mgc0i2U\\\\OXMhc0m2U\\\\OTMjc0R3P\\\\OoLnc0Y3k[OgLUd0]3g[OcLXd0e3b[OYL^d0R53N12O2M4M1N3N2M1O1O1O1O3M2O1N1O100O2M2O100000O11O0002M2N2L5M2N3M2N2M3J6QOo0I6L5M3L4M3N3N1O1N101O1O001O1O001N1bMRYOj0nf0QO^YOM^ODVg0;cYOJ^ODSg0>fYOH]OCQg0b0fYOGA^Omf0g0iYOAWg0;l1In[g`0\"}}, {\"image_id\": 15, \"category_id\": 1, \"bbox\": [76.0, 175.0, 434.0, 251.0], \"score\": 1.0, \"association_id\": 2, \"light\": [-1.4171404838562012, -3.102489709854126, 1.2076191902160645, 3.004786491394043], \"segmentation\": {\"size\": [426, 640], \"counts\": \"fjo0g0^<<G4K5H:J5L4K5Ca0A>H4L3M6K4L3N2N3M3M2N2N3M8H2N2N3M<aGdKX7Y5N1O1O2N1O000000O100N2eNTIWLm6e3ZIWLg6g3\\\\IWLe6g3_IVLb6h3fIQL[6m3b1O1O1N2O1O10000001O1O3M3WGfK\\\\8g4M3M9G8H2N1O3_HkJg6m5O1O000000000001O00001O1QN]JULe5f3aJVLa5h3cJUL_5i3R2N3N2M200OaIYL]3f3WKYLC2W5d3UK[LA5`5Y3nJdL_O5j5P3gJkL^O6P6j2bJPM]O7S6g2`JRM]O8S6e2`JSM]O8T6d2_JTM\\\\O9U6c2_JUM[O8W6b2lHRM^15_O7W6b2kHTM^13@8W6d2YJTM@8W6d2YJTM@8W6;oH[1Z1RN@8W69ZIU1m0\\\\N@7Y67dIm0c0eN@6Z66hIm0=gNA6Z65nIi07lNA6Z65PJh04nNA5[65UJc0NTOB4[66WJ`0KVOC4[66ZJ=HYOC4[66]J:E]OB3\\\\66aJFZN4W1MB3\\\\66hJ]OTN>V1LA4]66kJKVOKB4]66kJKVOKB4]65lJLUOKB4]65lJLUOKB4^64kJMUOLA3_63lJNTOLA3a6MnJ5oNKB3U92YGKB3U92YGKB3U92XGLC2U92XGLC2U92XGLC2U93WGKD2U93WGKC3V92WGKC3V93VGKC2W93VGKC2W93VGKC2W93VGKC2W94UGJD2W94VGIC3W94VGIC3W95UGIC2X95UGIC2X96TGHC3Y95TGHC3Z95RGIC2[95SGHB3[96RGGC3[96RGHB2\\\\97QGGC2\\\\97RGFB3\\\\98QGEC3\\\\98QGFB2]98QGFB2]99PGEC2]99oFFD1]99oFFC2^99nFFC1_9:mFED1_9:lFFE0_9;kFFENa9=iFEFNa9?gFDGMb9a0TFPO;b0OMb9i0TFiN1b08Mc9i0QFkN3`08Ld9P1TFTO8Ld9Q1SFSO9Ld9Q1SFSO9Ld9R1QFTO9Kf9Q1QFTO9Jg9k1XFVNh9j1WFWNh9k1VFUNk9k1TFVNl9d20O1000000O10000000000O1000000O1000O010000000000O101O0000000000000000000000001O000001O01O000010O00010O0000010O000001O00000010O000001O0000000010O000000001O00001O000000010O00000000000010O0001O001O00001O0001O01O000000001O001O00001O0000001O00000001O01O000010O0010O000001O01O000001O0000001O001O00001O0000001O0000001O00001O001O00001O000000001O0000001O001O1O1O1O001O001O0000001O001O001O2N2N1O1O1O1O001O00001O0000001N10001O00001O00001O00001O0000100O007I3M4L00001O0000001O00001O00002N2N2N001O0000001O000O101O00001O1O2N1O1O1O001N2O001O001N2O0ZOdC=^<@hC<e<CWC4R=Mcee1\"}}, {\"image_id\": 15, \"category_id\": 1, \"bbox\": [466.0, 129.0, 172.0, 288.0], \"score\": 0.9999971985816956, \"association_id\": 1, \"light\": [-1.8402858972549438, -1.6236293315887451, 1.671170711517334, 1.4840645790100098], \"segmentation\": {\"size\": [426, 640], \"counts\": \"iVR67R=7H3M4L3M2O2kKZOdKg0X4CaK>\\\\4FaK<]4FbK<Z4GeK:X4IgK9U4KjK5T4OiK2U40jK0V41iK0V41hK1V42hKNX43fKOY43eKNZ44cKM[48bKIZ4>bKC[4c0aK]O]4h0_KZO_4h0`KWO`4l0]KSOd4Q1XKlNj4Z1PKcNR5b1jJ]NX5e1eJ[N\\\\5i1aJVN`5m1\\\\JRNe5T2UJkMm5Y2nIfMS62lHl0n0ROV6`2gI`MX6f2_I_M^6h4N2O012N2M=D=C7H4L5K4L3L3N2O0O100O10000000000O1O1N2M3M3N2M2L4J50M3M5M300\\\\OZKdHf4U7aKkH_4n6hKRIX4g6oKZIP4Z6\\\\LfId3k5kLUJU3g5oLYJQ3e5QM[Jo2d5RM\\\\Jn2b5TM_Jk2^5XMbJh2Z5\\\\MfJd2X5^MiJa2U5aMkJ_2S5cMmJ]2l4jMTKV2c4SN]Km1]4YNcKg1[4\\\\NdKd1Y4_NhK`1Q4gNoKY1P3HPM8i2OWM1f22ZMNc25]MK]2;cMEU2d0jM\\\\OT2f0mMYOR2h0nMXOR2h0nMXOR2h0oMWOP2k0oMVO7^Kl0\\\\5mNVO4fKj0T5SOVO1iKj0Q5UOWOMmKk0n4WO:h0FXO<f0D[O<c0F\\\\O<a0E_O>;EF?2EMc0H@8i61O01O01O00001O0010O01O1O0010O0000000000001O01O0001N10001O001O0000001O0001O000000001O0001O01O0001O001O001N2NQ>\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [247.0, 600.0, 193.0, 76.0], \"score\": 0.9999996423721313, \"association_id\": 1, \"light\": [-2.1943283081054688, -1.6876800060272217, 2.136971950531006, 1.605538010597229], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"l\\\\U53We02O00001N10000000000000000O1000000000000000000000000O100000000O1000000000000O1000000000000000000000000000000000000000000000000000000000000O1000000000O1000XNF`^O9_a0M\\\\^O3ba04Y^OLga06W^OJha08W^OGja0<S^ODla0h0R]OYO8Nfb0\\\\1W]OdNhb0_1W]O`Nib0Q2000O100000O1000000000ZOP]OQOPc0n0R]OmNLERc0\\\\1W]OPOib0n0]]OgNC0ob0Z1d]OgN\\\\b0X1d]OiN\\\\b0W1d]OhN]b0X1b]OiN^b0W1b]OiN^b0W1b]OiN^b0W1a]OjN_b0V1a]OjN_b0V1a]OjN_b0W1`]OiN`b0W1`]OiN`b0W1`]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1_]OhNab0X1^]OhNcb0W1^]OiNbb0W1^]OiNbb0V1_]OjNab0U1`]OkN`b0U1`]OkN`b0U1`]OkN`b0T1a]OlN_b0T1a]OlN_b0T1a]OlN_b0T1a]OlN_b0T1b]OkN^b0V1a]OjN_b0V1Z]OkNBOTc0W1V]OSOjb0n0R]OTOob0b121O1O000001O000WOS]OSOnb0i0Z]OQOhb0k0^]OROdb0m0m000Od\\\\OPO_b0P1`]ORO`b0m0`]OTO`b0l0_]OTOab0l0^]OUOcb0j0]]OWObb0j0]]OVOcb0k0\\\\]OUOeb0k0Z]OUOgb0j0Y]OUOhb0l0X]OQOkb0n0j0O2N100O4L1O011N2O0000O101N7J1N2N100O01O00001O1O1NRmT<\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [873.0, 417.0, 110.0, 58.0], \"score\": 0.7714323997497559, \"association_id\": 2, \"light\": [-1.3079837560653687, -2.710653066635132, 1.1851615905761719, 2.4606826305389404], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"kgVb02Xe02O0O1000000O100000000000000O0100000000000000O1000000O100000000000000O100000000000hN4Q]OMkb0;P]OEnb0`0o\\\\O@Qc0a0n\\\\O_OQc0e0l\\\\O[OTc0h0c\\\\OYOEOgc0_100O1000O10O100O10L\\\\\\\\OaNdc0_1\\\\\\\\OaNdc0U1>N2O100O010000V\\\\OmN\\\\c0T1b\\\\OnN]c0R1b\\\\OoN^c0Q1a\\\\OQO^c0^11O1N2O0O2L4L3N2O100O2O0O101O0010O00010O100O1O1000O2O4K102M2N101N101N1O1N3N2M3Mllj0\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [530.0, 514.0, 148.0, 80.0], \"score\": 0.9999995827674866, \"association_id\": 3, \"light\": [-2.29561185836792, -1.5280168056488037, 2.287526845932007, 1.413456916809082], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"bRR;2Xe02O001O0O101O000000000000001O0000000O100000000000000000000O100000O10000000O1000000000000000O10O1000000000000O100000000000O10000000O100000000000O2O000000cNHj\\\\O1C5`01mb0o0n\\\\OQOQc0V1i\\\\OjNVc0]1d\\\\OcNZc0h1N20O0100000Ih\\\\O[NXc0e1h\\\\O[NXc0d1h\\\\O]NXc0c1h\\\\O]NXc0o0h\\\\O]O0CYc0m0X]OQOA1Wc0l0a]OTO`b0j0a]OVO_b0j0a]OVO_b0i0b]OWO^b0h0c]OXO]b0h0c]OXO\\\\b0i0d]OWO\\\\b0i0d]OWOUb0NP]Om0j0UOVb0R1i]OnNVb0S1j]OmNVb0T1i]OlNWb0T1i]OlNXb0S1h]OmNXb0T1g]OlNYb0U1f]OjN[b0V1f]OiN[b0X1]]OnNcb0U1W]OnNjb0Y1l\\\\OgNZc0c11O0O101O0O1O2N1N2N2L4J6O2O01O0010O10O111O2M1006I1O5J4L1O2O0O1N3N1N2M]WV7\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [532.0, 491.0, 115.0, 56.0], \"score\": 0.9999253749847412, \"association_id\": 4, \"light\": [-2.133171319961548, -1.9655787944793701, 2.022139072418213, 1.7871482372283936], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"l[S;1Ze01O000000000O100000001O000000000O10O10000000000000O10000000000000000000000000000O100000O1000eNM^]O3`b02]]ONcb03\\\\]OMcb05\\\\]OKdb06[]OIeb0;X]OEhb0<W]ODib0g0l\\\\OYOTc0n0\\\\\\\\OSO0Odc0X1[\\\\OhNdc0Z1[\\\\OeNfc0[1Z\\\\OeNfc0l0[\\\\O]O0Fec0k0^\\\\O]ONGec0k0b\\\\OZOcc0e0_\\\\OWOdc0i0?O01000001O000000001O000000000000010f[OVOQd0m0k[OVOSd0h0n[OYORd0g0n[OZORd0d0o[O\\\\OQd0b0Q\\\\O]OPd0b0R\\\\OZOQd0e0Q\\\\OZOoc0f0<N10001O001O1N2O1O1O101N1N2M7K1N10iZO100Pe02oZO0Pe0OP[O2od0NP[O5Pe0IQ[O6od0JQ[O7nd0IR[O7Ue0MOLlZO1[ck7\"}}, {\"image_id\": 17, \"category_id\": 1, \"bbox\": [80.0, 522.0, 185.0, 135.0], \"score\": 0.9999993443489075, \"association_id\": 1, \"light\": [-1.5077661275863647, -1.9665403366088867, 1.3526777029037476, 1.8648324012756348], \"segmentation\": {\"size\": [1228, 819], \"counts\": \"oaP31VV17K5jM;]mNJ]R1=^mNG^R1=\\\\mNH`R1>XmNIfR1:VmNHhR1?olNDPS1l100O1N2O1OO1N2O0101O1O10FmlN[MUS1P3N5K1O00000000000000O1M31O1O0000001O1O001O001O001O2N1O2N3M3M3M5K2N001O0000000001O1O2N1O2M6lNhmNjMZR1m0`mNcNO?;@G;`R1n0cnNROjNC19lR1e0XnNQOkNe0^S13imN7SR1EnmN>oS12M10000000O1000000O10OXOjjN;WU1CmjN:TU1CPkN;PU1ERkN:nT1FRkN9nT1HQkN8oT1IQkN6oT1KPkN5QU1KojN5PU1KQkN5nT1LRkN3nT1NQkN3oT1MQkN3nT1NRkN2mT1OSkN1kT11UkNOjT13TkNNkT13UkNMjT14UkNMjT14VkNLiT15WkNKhT16WkNJiT17WkNIhT18XkNHfT1:YkNFgT1<XkNDgT1P1100O1O2M201N2N2O1O1N10001O001O0O2O000O10O100000O1O0010O10O001O10O01N2O001O01ON2O110O00G9O101O01O0010O01O100O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O2Nligd0\"}}, {\"image_id\": 17, \"category_id\": 1, \"bbox\": [304.0, 532.0, 244.0, 618.0], \"score\": 0.9750718474388123, \"association_id\": 2, \"light\": [-2.4673871994018555, -2.093292713165283, 2.330357074737549, 1.8845605850219727], \"segmentation\": {\"size\": [1228, 819], \"counts\": \"kZ];=]U1T1ROh0XNh1_O:H6K5K4M3L4L5K6Hj0VO`0B:_VOaIcb0e6U]OiI_b0\\\\6Z]OkIab0Y6Z]OkIdb0Y6U]OlIhb0Y6P]OlImb0[6i\\\\OlISc0l6n[O_Imc0b<M4L3O1N3N1N2M3M3J7I6L4oLY@Z_Oj?c`0h@k^O[?Ta0j@e^OY?Za0j@a^OY?^a0k@[^OY?ea0k@R^OZ?ma0j@k]O[?Tb0k@Y]Oc?gb0Q2O10000O1000O1000000O100O100O1O1O1O2M2N2M3M3L4K5gMi\\\\OVA^c0e>Z]OV@Xc0f?i1L4L4M2N3N3N1O1O1N2O1O1O1O1O1O1NVZOTAXd0i>f[O_AWd0\\\\>_[ObAnN7ae0P>g[O_BWd0S<U[O^Ck0?oc0j;a]OWD_b0e;e]O\\\\DZb0a;f]ObDlNnNZa0]<f_OlDmNlN[a0U<i_OREgNnN`a0k;l_OXE`NSOba0a;R@YEYN]Oda0V;W@[EQNFga0l:[@[EmMNga0e:^@[EjM3ha0a:_@[EhM7ha0\\\\:b@\\\\EeM;ha0Y:c@ZEfM>ga0W:f@WEcMd0ga0U:mBmES=S:jBPFV=P:d_OaEnNi0^a0e9Y_OcIg`0]6U_OgIk`0Y6S_OiIm`0V6R_OlIn`0T6P_OoIo`0S6m^OoISa0T6h^OnIXa0U6b^OnI^a0W6P^OVJPb0S=00000000000000000000O10O100000000O101O0O100O100SMk\\\\OaBXc0X=n\\\\OfBUc0S=P]OdBGeM]c0]?U]OPCQc0d<Y]O_Cfb0X<b]OiC_b0n;g]OTDZb0g;i]OYDYb0c;i]O[D[b0a;h]OYD`b0a;c]OYDgb0_;^]O^Dib0Y;[]OeDmb0l:\\\\]OREkb0^:a]O`Edb0U:b]OkEbb0o9`]OPFeb0k9\\\\]OSFib0j9W]OUFmb0g9T]OWFQc0e9P]OZFUc0l9b\\\\ORFbc0n9[\\\\ORFhc0[9cYO^Ee2W1jc0W9cYObEd2U1lc0T9cYOgEa2U1nc0Q9bYOjE`2T1Qd0o8_YOmEa2S1Rd0o8\\\\YOPFa2P1Vd0m8XYOUFb2n0Wd0l8VYOXFb2k0Zd0k8TYO[Fb2j0[d0j8RYO^Fa2h0_d0h8oXOcFa2b0dd0i8lXOeF`2`0gd0i8jXOhF^2>jd0h8iXOkF\\\\2<md0f8iXOoFY2;od0c8jXOSGU2:Se0_8lXOVGQ2:Ue0[8nXO\\\\Gk18Ze0V8QYObGd16^e0Q8TYOjG]13be0m7WYOPHV13de0h7ZYOUHR12ge0d7\\\\YOXHm04he0a7^YOZHj05ie0^7aYOZHf08ke0\\\\7bYOXHd0<le0Z7bYOXHb0>ne0W7dYOYH=`0Qf0U7dYOZH:b0Sf0R7fYOZH7d0Tf0P7gYOZH5g0Uf0m6hYO[H3g0Wf0l6hYO[H2h0Xf0k6hYO\\\\HOi0[f0i6hYO]HMj0\\\\f0g6jYO^HHl0_f0e6jYO^HGm0`f0c6lYO]HEP1`f0b6T\\\\O_Imc0`6S\\\\O_IPd0_6P\\\\O`IRd0`6m[O_IUd0`6k[O_IXd0`6g[O`I\\\\d0^6c[ObI_d0]6a[ObIbd0\\\\6][OcIgd0Z6Z[OdIkd0Y6T[OgIod0W6P[OiISe0U6lZOkIVe0S6jZOmIXe0R6gZOnI[e0Q6eZOnI\\\\e0R6bZOoI`e0o5`ZOPJce0o5[ZOPJhe0P6VZOQJle0m5TZOSJme0m5RZOSJPf0k5PZOVJQf0h5oYOXJTf0d5mYO[JWf0`5kYO`JZf0X5hYOhJ^f0Q5dYOoJ`f0k4aYOVKcf0d4_YO\\\\Kdf0_4^YO`Kef0]4[YOcKif0X4XYOhKlf0S4TYOnKQg0k3RYOULTg0b3oXO^Lmg0b2XXO]M\\\\h0k1kWORN_h0]1kWO_N^h0T1jWOfN`h0o0gWOlNah0j0eWOSOdh0:iWODao0N]QT:\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [84.0, 81.0, 215.0, 369.0], \"score\": 1.0, \"association_id\": 3, \"light\": [-2.53208589553833, -1.9450876712799072, 2.4903793334960938, 1.7533659934997559], \"segmentation\": {\"size\": [572, 1024], \"counts\": \"_S_18aa0:F6G8J5L5K4M2M3M4M2N2M3K4M4N2N1O2O1O1O001000000O100nAhM`<Y2XCUNb<k1nB_OW<c0]CM]<l2M3M7I9F5L2N3L5K=C7H4K7nNT1Aa0E7K3M3N1N3N3M2N2N1O1O1ZIZFT5g9hJ^FV5d9eJ_F[5d9ZJeFe5i:01O000000000000000000000000lMSJYHm5a7_JYHa5e7eJWH[5i7hJTHX54fIY6W1_IS5KVJc6m0[In4N[Je6k0YIk4N^Jh6j0VIj4N_Jm6h0RIm4DeJ\\\\7>lH^6U7nI]HT6d7hI^HX6d7fI\\\\HZ6h7aIYH_6m7ZIUHe6f5`HQL=ZNS7b5gHVL0XNY7`5kHYLHWN_7]5nH]L_OVNc7Y5UIcLSOTNi7S5ZIkLiNSNn7n4^IoLbNSNP8l4aIPM_NTNP8i4fIQMYNWNQ8d4nIQMPN[NR8c4SJoLkM^NR8`4XJPMfM`NS8]4ZJRMbMbNV8U4^JWM\\\\MeNX8a3nJiMjLfNZ8^2nKjNhKiN\\\\8X2PLnNdKkN[8U2VLmN_KnN\\\\8S2XLlN\\\\KRO\\\\8Q2[LkNYKTO\\\\8P2^LjNUKWO^8m1_LjNTKYO]8l1aLiNSK[O]8h1fLgNoJB[8e1oLZNnJ1T8b1PN^NP2a1PN`NR2^1nMbNU2[1jMfNY2W1gMiN[2T1eMmN\\\\2R1dMnN^2o0cMQO_2l0bMTOb2g0^MZOe2b0\\\\M^Of2?[M@i28\\\\MHR3AVM`0S:0000O1000O10O100000O1000O1000O1000O10000000O010000O10000O1000000O100O1000O10O100000000O10000000O100000O10000000O10O1000000000000O100000000O10000000000O100000000O1000000O101O001N[bd<\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [453.0, 30.0, 150.0, 427.0], \"score\": 0.9999991059303284, \"association_id\": 2, \"light\": [-2.4488799571990967, -2.0365848541259766, 2.2244129180908203, 1.8098467588424683], \"segmentation\": {\"size\": [572, 1024], \"counts\": \"SWm75ca08H7F;H8I5L2L5K4M3N3K7[Of0_Od0F9H?@;F6J5L3N3M3M1O2M1O2N3N1O2N5L2M2N3L6K`0gD\\\\JZ9\\\\7F6VHaGl5f8eIjGS6U:WI]En5g:nIZER6g:mIZEP6i:nIXEf43QLg:XOVE]4k0kKQ:HTEZ4X1aKh93QE\\\\4b<cK_C\\\\4b<dK^C\\\\4c<bK^C^4b<bK^C^4b<aK_C_4a<aK_C^4a<bK`C^4`<bK`C^4`<`KbC`4^<`KbC`4m<0O100O10000N2O100O1O1O1O101]OWKPDj4n;WKRDj4k;YKUDh4c;^K]Dd4T;eKPE^4^:nKeEU4R:QLoE^4ZOoJ\\\\9i0[GX5R8oJnGT5j7oJYHS5b7nJ`HR5^7PKaHQ5\\\\7RKdHo4V7UKkHk4Q7YKnHh4n6\\\\KRIe4S6TLnIl3e5aLZJ`3b5dL^J\\\\3_5gLaJZ3Z5jLeJW3V5mLkJS3R5PMmJQ3Q5QMoJP3m4SMRKP3h4TMVKQ3h1VIJn3\\\\No2d1]IId3cNQ3c1^IEc3hNP3b1bI]Ob3POn2b1gIQOa3\\\\Oj2b1lIhN]3Fh2`1PJcN[3Mf2^1]JTNP3>g2Z1DfN?U1CjNb0Q1_OoNf0j0\\\\OVOl0`0VO@T14nNL\\\\1GgN8_900000000000000000000O10000000000O1000000000000O10000O1000001N10001O000O101O0O2O_hZ7\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [668.0, 15.0, 124.0, 446.0], \"score\": 0.9999998807907104, \"association_id\": 4, \"light\": [-2.1856534481048584, -2.3892314434051514, 2.007209062576294, 2.1974289417266846], \"segmentation\": {\"size\": [572, 1024], \"counts\": \"kZe;3Ua0e0F:K5M3M0OOO3N201O2N1L4N3N0O1O0N2N3M2J6H62O2N4L3A`0K4O1ObHaNbN^1^1eN_NZ1lJWOk3AU1X1nJDe3TO[1W1PKL`3nN_1W1mJ2`3hNb1V1kJ9`3aNe1V1iJ=`3]Nf1V1iJa0^3YNj1V1fJe0^3UNl1X1bJh0_3PNn1\\\\1ZJn0d3fMR2`1PJR1l3]MU2e1gIT1R4VMW2k1_IU1X4PMX2g5eMXJ\\\\2o5]MnIgKOf6^6bMcI\\\\2b6_M`I]2g6^M\\\\IY2Q7aMPIR2n7[MSHU2c8bL[GiN3a4i8^L_GjNIe4o8VLaGROAf4Z9[KiGLoNf4^;XKcDf4_;XKcDd4`;[KbDb4`;]KbD`4`;`KdDZ4^;eKcDZ4^;fKcDY4^;fKcDY4];fKeDY4\\\\;fKdDZ4];eKdDZ4\\\\;fKeDY4];eKcD[4`<O0O2O001O001O100O2N1O1OAnKUCQ4k<PLUCo3n9aLnGASNn3T9lMYHWNbNm3o8XNYHlMhNl3j8_N[HfMjNm3e8dN^H`MmNo3]8iNbHYMPOT4S8lNiHQMSOX4k7oNmHjLXO[4HQLT7o2jIfLYOZ5b6VNQJaL]O[5Z6\\\\NWJYL_O]5S6bNZJSLB\\\\5Q6_NkIjK><F[5o5`NQJPLN81Y5l5bNVJdLNj4g5fN^J^LKl4c5jNeJXLGn4b5lNhJVLEn4`5oNkJSLDo4]5ROoJQLAn4[5WOTKh1i4[NWKe1Q2ZIOS5PNd1m1cIIm4YN`1k1TJYOa4kN\\\\1j1^JmN[4YOW1h1dJiNX4^OU1h1gJcNW4DS1j1Z1VNgNi1Y1WNhNi1V1WNmNg1S1YNQOc1o0]NSOa1n0]NUOa1k0_NWO_1i0aNXO^1h0bN[OZ1f0fNAS1`0kNHm09SOKh07WOO>6AT9OoUQ4\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [320.0, 87.0, 114.0, 357.0], \"score\": 0.9259598851203918, \"association_id\": 1, \"light\": [-1.7284009456634521, -2.432504415512085, 1.5578043460845947, 2.270106077194214], \"segmentation\": {\"size\": [572, 1024], \"counts\": \"kjb5?\\\\a04QALo;7hC7P<KjC=S<DfCd0W<_OcCg0[<ZObCi0]<XO_Cm0_<UOZCR1d<oNYCU1e<kNZCY1c<gN\\\\C\\\\1j0\\\\NW97PF_19RO]9^OZFd11YO_9TO_Fj1C]Ok9iNaFg4a1^JW5k0XIk4Y1aJ\\\\5d0ZIo4T1aJa5`0YIU5n0`Ji5;VI\\\\5j0\\\\Jo58UIb5f0YJT66TIe5d0VJX66RIj5?RJ_65PIo59oIg62PIR63oIm60nHU6LQJW7KiHi7W7YHeHi7[7ZHaHf7_7\\\\10O001O1N2M3M3L4M3L4N2N2O1N20000O1001O1O2_HfF\\\\6[9]ITGZ6n8\\\\I\\\\Gb6f8VIbGh6j9O001O000001O002N3M:YE`I^9a7N1N3`NjGVIW8d6XHRIi7j6]HRId7h6dHUI^7d6lHVIX7g5P3F;B=K5L3N2J6M4M2M3L5L4M3L5TNfBmNQ4jNQ5T2WGfNl3YOm4n1_G^Ng3El4i1dLYN\\\\3e1dL\\\\N]3c1bL_N_3_1`LbNc3[1\\\\LfNg3W1XLjNk3R1TLPOQ4i0PLXOU4a0nK^OU4<oKCV41RLNn:O2O00000O2O00001N1000001N2O1NRUY:\"}}, {\"image_id\": 19, \"category_id\": 1, \"bbox\": [45.0, 121.0, 1467.0, 542.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-2.2797462940216064, -1.798678994178772, 2.0804576873779297, 1.6143113374710083], \"segmentation\": {\"size\": [999, 1599], \"counts\": \"jW\\\\1`1]m0>G8I6J7J5M3M3M2N3M2N2N2O1N2N2N101N2O1N2N1O2O1N3M2N2N2N1O2M3N1O2N1O2N1O2N1O2O0O2N1O2N1O2N100O2O0O101N100O2O0O101O0O2O0O2O0O2O0O2N2O1N2N101N2N2N2N2N1O2O1N2N2N101N2O0O2O1N2O0O2O001N101N101O000O2O0O10001N100O101N1O100O1O2O0O1O1O100O2O0O100O100O2O000O10000O10001O000O101O0000001O0O2O00001O0O2O001O1O001N2O1O001O1N101O001N101O000O2O00000O2O000O100O101N100O100O100O2O0O10000O10001N10000O101O000O10001O0O1000001O0O10001O0O2O00001N101O0O2O0O2O001N1O101N100O1O2O0O101N100O10001N100O10001O0O100000000O100000000000000000000O10000000001O000000000O1000000000000000000O10000000000O10000O100O10000O100O1O100O1O100O100O100O100O10000O10000O10000O1000000O10000000000000000000000000000000000000000000000000000O1000000000000000000000000000000O100000000000000O10000000000O10000000000O100000000O1000000O100000000O1000000O1000001O0O1000000O101O00000O10001O000O1000001O000O10001O0000001O0O101O001O001O1N101O1O1O2N1O1O1N3N1O2N2N2N2N1O1N3N1O1O1O1O1O001O1N2O001O001O001O001O001O1O1O1O1O1O1O1O2N1O2N1O2N2N2N1O1N2O2N1O001O1O1O1O001O001O1O001O00001O001O001N101O1O001O001N101O001O001N10001O001N10001O000O2O00001N10001O0O101O00001O001O001O001O001O001O1O001O00001O001O00001O00001O0000001O0O10001O000000001N10000O101O000O101O00000O2O00001O0O101O001O001N101O001O001O0O101O001O0000001O000000001O00000000001O0000000000001O0000000000001O000000000000001O00000000001O000O10001O00001O0000001O00001O001O00001O001O001O001O00001O001O000000001O00000000000000000000000000000000000000000000001O00000000000000000000000000000000000000001O000000000000000000000000000000001O00000000000000000000000000000000000000000000000001O0000000000000000000000000000000000001O00000000000000001O00000000001O0000001O0000001O0000001O00001O001O00001O001O001O00001O001O00001O001O001O001O001O001O001O001O1O0O2O1O001Oc0]O4L4L3M3M3M2N1fCk^O`:V`0nCd@_1[O_:o?ZDa@T1E_:h?gD\\\\@h01]:a?UEW@;=]:Z?cEQ@Nh0]:T?aG`@]8]?h3M3N2N2O1O1O1O1O1O100O100O10000O10VFe@h3Z?ZLQAZ3o>hLPAW3o>lLPAS3P?PMn@o2Q?TMm@l2S?WMk@h2T?[Mk@d2T?_Mk@`2U?bMi@^2V?eMi@Z2V?l600O1O1O100O100O100O100O01000O100O10000O10000O1000000O10O10O100O100O100O1O100O1O1O00100O1O100O100O1000000O100000000O100000000O1000O100RJeAPK[>Q5oAdJP>_5WBXJi=i5_BnIa=S6fBeIZ=]6eBbI[=`6dB_I\\\\=d6bB[I^=g6aBXI_=k6_BTIa=P7\\\\BoHd=X7VBgHj=j7fAUHZ>\\\\<0000000000000O10000000000O100000000O100000000O10000O1000000O10000O1000000O100000000O1O100O1O1O1O1O1O1O1O1O1O1K5E;E;O11O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O10O01O10O01O10O0100O010O10O10O10O10000O10O010O1O1O10O01O1O010O001O0010O0001O00010O000000000000010O0000000000000001O00000000000000000000001O0000000000000000001O0000000000001O00001O00001O00001O001O001O001O1O001O001O001O001O001O00001O00001O0000001O0000001O0000000000000000000000000000000000000000000000001O00001O00001O001O001O001O1O001O1O1O1O1O001O001O001O00001O00001O00001O0000001O00000O101O00001O00001O00001O001O001O1O001O1O1N2O001O1O0O2O0O2O000O2O0O2O0O100O2N1O100O2N100O101N101O0O101N101O0O2O1O001N2O1O1N2O1O1O1N100O2O001N1O101N1O101N1O1O2N1O1O101N1O100O2O0O2O1O1N101N3N1O1N3N1N2O1O0O2O1N101N101N101N1O2N1O2N2N2M3N2N2N3M2M3N3M2N101N2N1O2N2N1O2N1O2N1O2N2N2N2N2N2N3N1N2N3M1O2N2N2N1O2N1O2O1N1O2M3N2N1O3M2N2N3L4M2N2M3N2M3N2N2N1O2N2O0O2N2N2M3N2N3L5K6J5K6J6J4L4M3L3N2N3M2N2N1O2N3M2M3M4L3L8H9G9DSad2\"}}, {\"image_id\": 20, \"category_id\": 1, \"bbox\": [227.0, 242.0, 615.0, 441.0], \"score\": 0.9999990463256836, \"association_id\": 1, \"light\": [-1.5223686695098877, -1.6767048835754395, 1.3363885879516602, 1.5367496013641357], \"segmentation\": {\"size\": [844, 1500], \"counts\": \"Vgk58Tj01O0O2O00001O0000000O101O0000000000001O00000O1000000000000000000O2O00000000000000000000000O10001O000000001O0000000O2O00000000000O10001N1000000O2O00001N10000000001N1000000O101O0000001O0O10001O000000000O2O000O10000O2O00000O2O0000000O10001N10000O100O100O100O2O00000O1000000O10000O1N2N2L4O1O100O10000O1000000O10000O100N1M4H8N2O100O100O100000000O100000000000O1000000000O10O1000000000000000000O1000000000000000001O000000000000001O0O101O0000001O000000001O000O101O00001N101O0000001N10000000001O000000000000001O000000000000000000000000000000XXOmMWg0S2hXOnMXg0R2hXOnMXg0R2hXOnMXg0R2gXOoMYg0Q2gXOoMYg0Q2fXOQNYg0P2fXOPNZg0P2fXOPNZg0P2eXOQN[g0o1eXOQN[g0o1dXORN\\\\g0o1bXORN^g0n1bXORN^g0Q2^XOPNbg0]200000000000000000001O00000000000000000000000000000000000000000000O1000000000000O1000000000000O1000000000000000000000O10000000O10000000O1000000000000O10000000000000000000000000000O1000001O000000000oJjMlAV2n=XNjAh1o=lNdAT1S>7k@Io>b0l@^OS?g0i@YOU?k0j@UOT?o0i@QOW<VM_Dk3YOoNU<[M_Dj3YOkNS<cM_Df3\\\\OgNo;nM^Da3_OaNQ<WNWD_3DZNR<bNnC]3JQNW<gNiC_3KjM[<jNgC`3KfM]<mNeC`3MbM]<POdCa3M_M_<QObCc3N[M_<UO`Cc3OXMa<VO]Cg30RMb<[OYCh32dJQOh1d=2mBb7Q=gH_Ba7a=e2OdNaBmE^=R:fBlEY=R:kBmET=U:mBiER=Y:nBfEQ=[:PCdEo<^:PCbEo<_:RC`Em<b:RC^En<b:SC]El<e:SC[Em<e:SC[Em<e:SC[Em<e:TC[Ek<e:UC[Ej<f:VCZEj<f:VCZEj<e:WCZEj<f:VCZEj<f:VCZEj<f:VCZEj<f:VCZEi<f:XCZEh<e:YC[Eg<d:ZC\\\\Ef<c:[C]Ed<c:]C]Ec<b:_C]Ea<b:`C^E`<b:`C^E_<b:bC^E]<c:cC]E\\\\<c:eC]EZ<d:fC\\\\EY<d:hC\\\\EV<e:kC[ET<f:lCZES<f:nCZEQ<g:PDXEP<g:QDYEn;g:SDYEm;f:TDZEl;e:UD[Ek;c:WD]Ei;b:XD^Eg;b:ZD^Ef;a:\\\\D^Ed;a:]D_Ec;`:^D`Eb;`:_D_Ea;`:`D`E`;`:`D`E`;`:aD_E^;b:bD^E^;a:cD^E^;b:cD]E];c:cD]E];c:cD]E];b:eD]E[;c:eD]E[;c:eD]E[;c:fD\\\\EZ;d:fD\\\\EZ;d:gD[EY;e:gD[EY;e:hDZEX;f:iDYEW;f:lDWEU;i:lDVES;l:mDSES;m:nDRER;n:oDPER;Q;nDkDU;U;lDiDU;W;lDgDU;Y;kDfDV;[;m1001N2O1O1O1O2N1O2M2O001O1O001O1O1O001O1O00000O2O0000000O1000O10000O10000O10O11O1N101N10001N101O0O1O1O1N2N2N3M2O1O1O1O1O1O1N2N2N2N2O1O1O2N1O1O1N2N2N2O1N2O1O1O1N2M3M4M2M3O1N2O1O2M3M2aI]_OS3f`0_LP@U3T`0hLT@Q3n?lLV@P3m?mLV@P3m?nLU@P3m?oLT@n2o?QMS@k2o?UMR@g2R`0XMQ@b2R`0^MQ@[2S`0eMP@S2V`0lMm_Om1W`0SNj_Oi1Z`0VNh_Oe1[`0[Nf_Ob1\\\\`0^Nf_O\\\\1_`0dNb_OT1d`0lNg_O?c`0AU5O000010O01O1N3N2N2N2N1N2O1O1N2O1N3M4K7H6J6J7I^Qn`0\"}}, {\"image_id\": 21, \"category_id\": 1, \"bbox\": [566.0, 288.0, 130.0, 173.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-2.3341355323791504, -1.9910742044448853, 2.2550621032714844, 1.8743019104003906], \"segmentation\": {\"size\": [576, 1024], \"counts\": \"oen9a0Za06L4K4K6N1N3L5X@QOZ>Q1cASOY>P1cAVOX>n0cAXOY>k0dAWOZ>[2M2M2O3M4L5Kf0ZOa0_O6J2N1O1O0000O1O1PO\\\\CcLe<i2[DhLi;S3fDbLZ;\\\\3hDdLY;]3eDcL];_3_DaLc;h3QDXLR<l3hCTL`<j3ZCVLk<h3RCXLQ=i3kBXLU=i3iBWLX=j3fBVLZ=l3dBTL\\\\=m3cBSL]=m3bBTLa=n0bBBj03UO]O`=k0hDXOkMM^=i0[EWOg:g0YEYOj:d0UE\\\\On:b0RE^OQ;?oDAS;=mDCU;;kDEX;9gDGZ;:dDF^;9aDGa;8^DHd;7[DIg;6XDJl;6PDJR<7kCIW<8fCH[<;`CFa<<\\\\CDf<:ZCFg<:XCFi<:VCFl<8TCHm<8RCIm<8RCHP=6PCJS=3mBMU=0lB0V=JnB6^?000001O000001O00000001O0000000000000000000001O0000000000000000000000000000001O0000000000000001O00001O00001O0O2OkQh5\"}}, {\"image_id\": 21, \"category_id\": 1, \"bbox\": [388.0, 164.0, 288.0, 378.0], \"score\": 0.9999996423721313, \"association_id\": 2, \"light\": [-2.6362032890319824, -1.2278330326080322, 2.5303807258605957, 1.0695565938949585], \"segmentation\": {\"size\": [576, 1024], \"counts\": \"Saj6X1]`0a0E7K4K5J6H8K6L3M3L4M2N2O1O1N2O1M3N^NUBSOi=l0aBlN_=S1gBhNY=X1lBdNS=[1PCcNo<^1RCbNl<^1WCaNe<a1^C^N`<b1bC^N\\\\<c1fC[NZ<f1dC`NV<d1eCaNS<g1fC`Nn;m1kCXNQ<n1hCWNU<m1fCXNW<k1bC[N[<h3L5I6L4M2N3M2O3L4L3M3M3L4M5K:SE_Jh8f7F4M2M3N5KS1mN3M2M2O000001O00000000000`NbI^G^6Z8mIcGS6P6bImJ`0QOn5P6gIjJ?SOl5Q6jIeJ?WOi5R6PJ]J=]Of5T6gLdI\\\\3Y6X4M3N2O2]J`Da4h<G3N1O01N2N3UGRK_4Q5\\\\KQKc4_5^GUKc2]On5c6iI\\\\IX6h6dIXI]6i6aIWIa6i6\\\\IXIf6k6UITIn6V7fHjH\\\\7Z7^HfHc7]7YHcHh7^7WH`Hl7`7SH^HP8b7Z1N1N1O1N2M5J`0A6bNXESKR;U3]DlMj<k1_CQNZ1kN_9l2`ETNS1QO^9b2jEVNj0ZO]9Z2fHeM]7V2fHjM[7T2fHlM\\\\7P2fHPN[7n1fHRN\\\\7j1fHVN\\\\7f1fHZN[7c1gH]N[7_1gHaNZ7f0\\\\DG[4CoKLj:e0SEHV4GmKNh:`0[EGR4JjK0j:<`EDn3OiK2h::\\\\JDlJ2h:9]JEjJ4h:6_JGhJ3j:4_JIgJ3k:1`JMdJ3l:MbJ0bJ3m:IdJ4_J4j`0MU_O3k`0MU_O4k`0KU_O5k`0LT_O4l`0LT_O5k`0KU_O5k`0LT_O4l`0LT_O5k`0LT_O4l`0LT_O4l`0LT_O5k`0KU_O5l`0JT_O6l`0KR_O6n`0JR_O6n`0KQ_O6n`0JR_O6Z;BTJ8bJ6n:O_JLaJ6P;N_JM_J6R;N^J>b5B_J>`5C_J=a5C_J=a5D^J<b5D^J<c5D\\\\J<f5BZJ>h5_OYJa0j5[OWJe0l5VOWJi0`;00000000000001O00000000001O0000001O0001O0000000000000000001O000000000010O001O1O00001O01O00000001O00000010O000001O0000001O01O01O000010O01O1O0010O00010O010O0001O0000001O0001O0001O0000001O00000O100000000000000000000000O0100000000O10000O10000O100O1O2L3M4NcWS6\"}}, {\"image_id\": 22, \"category_id\": 1, \"bbox\": [169.0, 31.0, 288.0, 727.0], \"score\": 0.9999977946281433, \"association_id\": 1, \"light\": [-1.6605138778686523, -2.099789619445801, 1.4732459783554077, 1.9300251007080078], \"segmentation\": {\"size\": [768, 621], \"counts\": \"m^o32ng00O101N10000O10001O000O1000000O1000000O10000000000O100000000O10000000000O10000O10000000000O100000O010000000000O10000O1000000QBKR46iK:l3FPLf0h3ZOULo0e3QOXLW1c3iNXLa1c3`NWLi1d3XNXLP2d3PNXLW2e3iMWL]2g3cMULb2j3^MRLh2l3XMQLl2n3TMPLo2dLbLd2`0e0R3bLcLh2;e0T3_LfLj26e0W3\\\\LhLn21d0Z3ZLiLQ3Md0\\\\3VLlLU3Hc0_3TLmLX3Dc0`3SLoLY3Ac0b3RLPMY3^Od0d3QLPMZ3\\\\Oc0h3PLnL\\\\3ZOc0k3nKmL^3XOb0P4kKlLa3TOb0U4iKjLc3RO`0^4eKcLi3oN?g4aK^Lm3kN`0o4[K[LR4fNa0W5VKWLV4bNb0^5RKSLZ4_Nc0b5nJSL]4[Nd0f5jJSL`4WNe0k5dJSLe4RNg0o5]JTLj4mMi0T6kI_LY5]Mk0T:ROlEn0W:oNiEQ1[:kNeET1_:iNaEW1a:gN_EY1c:eN]EZ1f:dNZE[1i:cNWE]1j:bNVE]1m:aNSE^1Q;_NnDa1U;]NkDb1Y;\\\\NfDc1^;ZNbDe1b;XN^Df1f;XNZDf1i;YNWDe1m;YNSDe1P<ZNPDd1S<[NmCc1U<]NkCb1W<]NhCc1Z<\\\\NfCc1\\\\<\\\\NdCc1]<]NcCb1_<]NaCc1_<]NaCc1`<\\\\N`Cd1`<\\\\N_Ce1b<ZN^Ce1c<[N]Ce1c<[N]Ce1d<ZN\\\\Cf1d<ZN\\\\Cf1d<ZN\\\\Cf1e<YN[Cf1f<ZNYCg1g<YNYCf1h<ZNXCe1i<[NWCd1j<\\\\NVCc1k<]NUCb1l<^NTCa1m<_NSC_1o<aNQC^1P=bNPC[1S=eNmBW1W=iNiBT1Y=mNgBo0]=QOcBj0a=WO_Be0e=[O[B`0j=@VB9P>HPB2V>OiAL\\\\>4dAG`>:`A^Of>d0ZASOn>n0RAhNW?Y1i@aN[?a1e@\\\\N\\\\?f1d@WN]?k1d@RNd=QNiBP4BmMd=n3\\\\BPLc=S4]BmK_=W4aBhK\\\\=\\\\4dBcKY=b4fB]KV=h4jBXKR=l4nBSKP=P5PCoJo<S5RCkJm<W5SChJl<Z5TCeJl<\\\\5TCcJm<]5SCaJn<`5RC^JP=c5oB[JR=f5oBXJQ=i5oBUJS=k5mBTJS=m5mBRJT=n5lBQJT=P6lBPJT=P6lBPJT=P6lBPJT=P6lBPJT=Q6lBnIT=R6lBnIT=R6lBnIT=R6lBnIT=R6lBoIS=Q6mBoIS=Q6mBoIS=R6lBnIU=Q6lBmIV=R6jBnIW=Q6iBoIW=Q6iBoIX=Q6hBnIY=Q6gBoIY=Q6gBoIZ=P6fBPJZ=P6fBPJ[=P6dBQJ]=m5dBSJ]=k5cBWJ]=h5bBYJ`=d5`B^Ja=`5^BbJc=[5]BgJd=V5\\\\BkJe=S5\\\\BnJc=R5\\\\BoJe=o4[BRKe=m4[BTKe=l4ZBUKg=i4ZBWKg=g4YBZKh=d4YB\\\\Kh=c4WB^Kk=_4UBaKn=\\\\4RBeKR>V4oAkKS>Q4mAPLX>k3gAXL\\\\>b3dA`L[?_2f@dMa`0R1^_OSOZa00g^O4`b0aN`]Ob1mb0mMS]OW2Pc0bMQ]O`2Rc0ZMn\\\\Oi2Tc0SMk\\\\OS3Tc0hLl\\\\Ob3ob0YLQ]OS4gb0jKY]OR5\\\\b06J5K3_C[Ka5h4XJ_Kf5c4TJbKl5_4oIdKP6`4jIdKU6_4iFRKEc0`9`4^FXKM<d9_4RFaK54g9`4`EnKd0Fk9`4kD]LV1WOm9]7oEgHP:Y7kEmHS:U7gEQIW:P7bEYI[:i6]E`I`:R:N3M2N2N3M2N2N2O1N4L3PMTBfKo=[3UC]Lm<V3eCcL^<Q3RDhLP<R3YDiLi;R3]DkLe;P3aDmLa;o2cDoL`;l2dDRM`;g2fDUMa;b2dD\\\\Ma;\\\\2eD`Mb;U2fDhMb;l1eDPNY<P1oCjN_<f0iCUOa<?eC\\\\O`<>eC^Oa<:eCBb<5dCFg<MUdj3\"}}, {\"image_id\": 23, \"category_id\": 1, \"bbox\": [198.0, 229.0, 751.0, 309.0], \"score\": 0.9999895095825195, \"association_id\": 1, \"light\": [-2.565096855163574, -1.5090670585632324, 2.4631590843200684, 1.396848440170288], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"d_T43Ue06J6J6I6I8K4M3M3L3N2M3M3K6J5J6H8E;K5L3N3M3M2O2N1N3N100O2N1O101N1O2N100O2N1N3N1O2M2O1O2N1O1O1O2O0O100O101N100O100O2O0O1O101N100O2N1O101N1O1O2O001N101N101O000O101O00000O101O0O10001O0O100O10001N100O10001N1000001N10001O0O101O00001O0O10001O000000001N100000000000000000000000001O0000000n@QJh>o5XARJg>n5XAUJf>k5ZAVJe>j5[AWJc>j5]AWJb>i5]AXJc>h5]AXJc>h5]AXJc>h5]AYJb>g5^AYJb>g5^AYJb>g5^AYJb>g5^AZJa>f5_AZJa>f5_AZJa>f5_A[J`>e5`A[J`>e5`A\\\\J_>d5aA\\\\J_>d5aA]J]>d5cA]J\\\\>c5dA]J\\\\>c5cA_J\\\\>a5dA_J\\\\>a5dA`J[>`5eA`J[>`5eA`J[>`5eAaJZ>_5fAaJZ>_5fAaJZ>_5fAbJX>_5hAaJX>_5hAaJX>_5hAaJX>_5hAaJX>_5hAbJW>^5iAbJW>^5iAbJW>^5iAbJV>_5jAbJU>^5kAbJU>^5kAbJU>^5kAcJT>]5lAcJT>]5lAcJS>^5mAcJR>]5nAcJR>]5nAcJR>]5nAdJQ>\\\\5oAdJQ>\\\\5oAdJP>]5PBcJP>]5PBdJo=\\\\5QBdJo=\\\\5QBdJo=\\\\5QBdJo=\\\\5QBdJo=\\\\5QBdJn=]5RBdJm=\\\\5SBdJm=\\\\5SBdJm=\\\\5SBdJm=\\\\5SBdJm=\\\\5SBeJl=[5TBeJl=[5TBeJl=[5TBeJk=\\\\5UBdJk=\\\\5TBfJk=Z5UBfJk=Z5UBfJk=Z5UBfJk=Z5UBfJk=Z5UBgJj=Y5VBgJj=Y5VBgJj=Y5VBgJj=Y5VBgJj=Y5VBgJi=Z5WBfJi=Z5VBgJj=Y5VBgJj=Y5VBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJh=Y5XBgJh=Y5XBgJh=Y5WBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5VBiJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5VBiJj=W5VBiJj=W5VBiJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJi=X5WBhJh=Y5XBgJh=Y5XBgJh=Y5XBgJh=Y5XBgJg=Z5YBfJg=Z5YBfJg=Z5YBfJf=[5ZBeJf=[5ZBeJf=[5ZBeJe=\\\\5[BdJe=\\\\5[BdJe=\\\\5[BdJe=\\\\5[BdJd=]5\\\\BcJd=]5]BbJc=^5]BbJc=^5]BbJb=_5^BaJb=_5^BaJb=_5^BaJb=_5^BaJa=`5_B`Ja=`5_B`Ja=`5_B`J`=a5aB^J_=b5aB^J_=b5aB^J^=c5bB]J^=c5bB]J^=c5bB]J]=d5dB[J\\\\=e5dB[J\\\\=e5dB[J[=f5eBZJ[=f5eBZJ[=f5eBZJZ=g5fBYJZ=g5fBYJY=h5hBWJX=i5hBWJW=j5iBVJW=j5iBVJV=k5jBUJV=k5jBUJU=l5kBTJU=l5kBTJT=m5lBSJS=n5mBRJR=o5nBQJQ=P6PCoIo<R6QCnIn<S6RCmIm<T6SClIl<U6TCkIk<V6UCjIk<V6UCjIj<W6VCiIj<W6VCiIi<X6XCgIh<Y6XCgIg<Z6YCfIg<Z6YCfIg<Z6YCfIf<[6[CdIe<\\\\6[CdIe<\\\\6[CdId<]6\\\\CcId<]6]CbIb<_6^CaIb<_6^CaIa<`6`C_I`<a6`C_I_<b6aC^I^<c6bC]I\\\\<e6eCZIZ<g6fCYIX<i6hCWIW<j6iCUIW<l6iCTIU<n6kCRIT<o6lCQIT<o6lCQIS<P7mCPIS<P7mCoHS<R7mCnHS<R7nCmHR<S7nCmHQ<T7oCkHR<U7nCkHR<U7nCkHR<U7nCjHR<W7nCiHR<W7nCiHR<W7nCiHR<W7nChHR<Y7nCgHR<Y7nCgHR<Y7nCgHR<Y7nCgHQ<Z7oCfHQ<Z7oCeHR<[7nCeHQ<\\\\7oCdHQ<\\\\7oCdHQ<\\\\7oCdHP<]7PDcHP<]7PDcHP<]7PDcHo;^7PDcHP<]7PDcHP<]7PDcHo;^7PDcHP<]7PDcHP<]7PDbHP<_7oCbHQ<^7oCbHQ<^7nCcHR<]7mCdHR<]7nCcHR<\\\\7nCeHR<[7nCeHR<[7mCfHR<[7nCeHR<[7mCeHT<[7lCeHT<[7lCeHT<[7kCfHU<Z7kCfHT<[7lCeHT<Z7mCfHS<Z7lCfHU<Z7kCfHU<Z7jCgHV<Y7jCgHV<Y7iCgHW<Z7hCgHX<X7hCiHX<T8100000000O10O1000000000O100000O10000000O100000O1000O1000000O100000O010000000000PNeCPK[<o4eCQK[<o4fCQKZ<o4fCQKZ<n4gCRKY<n4gCRKY<n4gCRKY<m4gCTKY<l4gCTKX<m4hCSKX<l4iCTKW<l4hCUKX<j4iCVKW<j4hCWKX<h4iCXKW<h4hCYKW<g4jCYKV<f4jC[KV<e4jC[KV<d4jC]KV<b4kC^KU<a4lC_KT<`4lCaKT<_4lCaKS<_4nCaKR<_4nCaKR<_4nC`KS<_4mCbKS<^4mCbKS<^4mCbKS<]4nCcKR<]4nCcKR<]4mCdKS<\\\\4mCdKS<[4nCeKR<[4mCfKS<Z4lCgKT<X4mChKS<X4lCiKS<X4lCiKT<V4lCkKT<U4kClKU<S4kCnKU<R4kCnKU<Q4kCPLU<o3lCQLT<o3lCQLT<n3lCSLT<m3lCSLT<l3mCTLS<l3mCTLS<l3lCULT<j3mCVLS<j3mCVLS<j3mCVLS<j3mCVLS<i3nCWLR<i3mCXLS<h3mCWLT<i3lCWLT<i3lCWLT<i3kCXLU<h3kCXLU<g3lCYLT<g3kCZLU<f3iC[LX<\\\\600000O1000000O10000000000O10000000000000001TMbCiL^<R60TMbCkL^<U3bCkL^<T3bCmL^<S3bCmL^<P60RMbCoL^<Q3bCnL_<Q3aCPM`<o2`CQM`<o2`CQM`<o2`CQM`<n2aCRM_<n2`CSM`<m2`CSMa<l2_CTMa<k2`CUM`<k2`CUMa<j2_CVMa<i2_CXMb<g2^CXMc<h2]CXMd<g2\\\\CYMd<f2]CZMc<f2\\\\C[Me<c2\\\\C\\\\Me<d2ZC]Mf<c2ZC]Mf<c2ZC\\\\Mh<b2XC_Mh<a2XC_Mh<a2XC^Mi<b2VC_Mk<_2VCaMj<_2VCaMj<_2VC`Mk<`2UC`Mk<`2TCaMm<^2SCbMm<^2SCbMm<^2SCaMo<^2QCbMo<^2QCbMo<^2QCbMP=]2PCcMP=]2PCcMQ=[2PCeMQ=Z2oBfMR=Y2nBgMS=X2mBhMS=X2mBhMT=W2lBiMU=V2kBjMU=V2jBkMW=T2iBlMW=T2iBlMX=S2hBmMX=S2hBmMY=R2gBnMY=R2gBnMY=R2gBnMZ=Q2fBoMZ=Q2fBoMZ=Q2fBoM[=P2eBPN[=P2eBPN\\\\=o1dBQN\\\\=o1dBQN]=n1cBRN^=l1cBTN]=l1cBSN_=l1aBTN_=l1aBTN`=k1`BUNa=j1_BVNa=j1_BVNb=i1^BWNb=i1_BULNT1c=g2_BTL0T1b=g2^BSL2V1`=g2^BRL4V1_=g2]BRL6V1]=g2^BRL6W1\\\\=g2^BQL8W1[=g2]BRL8W1[=g2]BQL:W1Y=h2]BQL:V1[=h2[BRL;U1Z=h2\\\\BRL;V1Z=g2[BSL;V1Z=g2[BSL;V1[=e2[BUL:V1[=e2[BTL<V1Z=d2[BVL:W1[=c2[BVL:V1\\\\=c2[BWL9V1]=a2[BYL8V1]=`2[BZL9V1]=_2ZB[L9U1^=_2ZB\\\\L7V1_=]2[B]L6V1_=]2ZB]L8V1_=[2ZB_L7V1_=[2ZB_L7U1`=[2YB`L7V1a=Y2WBbL8U1a=Y2UBdL:S1a=X2TBfL<R1`=X2RBhL>P1a=V2RBjL=P1a=V2QBkL=o0c=V2PBjL>P1c=T2PBlL=P1c=T2oAmL>o0c=S2PBnL=o0d=R2oAoL=o0d=Q2oAPM>o0d=o1oARM=o0e=m1oATM;P1g=i1oAXM:o0h=g1oAZM9n0j=f1nA\\\\M8n0k=d1nA^M7n0l=a1nAbM5n0n=]1oAfM2m0P>X1QBlMNm0R>S1RBQNLl0S>P1RBUNKk0T>m0RBYNIj0W>k0PB\\\\NHj0Y>g0PB`NEk0[>d0nAeNEh0^>a0jAkNGe0`>>iAnNFd0b>=hAPODe0e>9hAROBf0f>6iAVO_Od0j>4hAXO]Oe0l>0hA\\\\O[Oe0n>LiAd1Y>WNjAh1Z>jMPBW2S>]MVBc2Y`00000O100000000O1000000O10000O10O010000O10000O100000000O1000O1000O10000O100O0O2L4I7O1O1O100O100O10O010000O2O000O10000O100O2O0O1O1O1O2N1O1O2N2N101O1O0O2O001nMm\\\\Oj1[c0O0O2O1O1O1[Na\\\\O[1hc002N1N2mNo[Oj0[d0M3YOc[O42IoWb1\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [50.0, 555.0, 104.0, 118.0], \"score\": 0.9999998807907104, \"association_id\": 3, \"light\": [-1.319387674331665, -2.491478204727173, 1.1577484607696533, 2.3572685718536377], \"segmentation\": {\"size\": [768, 513], \"counts\": \"SSV1U1hf06E?H3J6L4K4L4M3L4M2J6M4I6N2N4M2N1O2N1O1O100000000000000000000000000000000000iLi[Oe2Wd0YMn[Od2Rd0ZMT\\\\Ob2lc0]MV\\\\Ob2jc0]MZ\\\\O_2gc0`M[\\\\O_2fc0_M]\\\\O_2dc0\\\\Mb\\\\Ob2_d0N102N5K2N5K3M3M4^OkYOQOaf0MTZO4lf00000O1000000000000000O100000000000O100000000O2O00000000000O10O11O0O10O100000O1001OO101O000O2N^c\\\\8\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [318.0, 505.0, 102.0, 96.0], \"score\": 0.9999983906745911, \"association_id\": 7, \"light\": [-1.20527184009552, -2.8900034427642822, 1.1181498765945435, 2.645603895187378], \"segmentation\": {\"size\": [768, 513], \"counts\": \"\\\\Q_74gg0>Bo0TO4K3M4M3M2N2O1O2N1O1O1M3N2O1K5N2N2O100O1O100000000O10000O1O1000000O10000000000001O00000cMZ[ODGM0b1od0hNR\\\\OS1mc0jN\\\\\\\\OP1dc0oNb\\\\Ol0^c0ROf\\\\Ol0Zc0QOU]O4PN2ag000000O1001O00000000000000000000001O0000000O10000000000000000000001O000000000O101O000O2O000000000000\\\\UU2\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [2.0, 333.0, 70.0, 85.0], \"score\": 0.9931771755218506, \"association_id\": 5, \"light\": [-2.553144693374634, -1.4721717834472656, 2.452892303466797, 1.310878038406372], \"segmentation\": {\"size\": [768, 513], \"counts\": \"^j1i0Vg05L1O1O2bYOVObe0o0PZO\\\\One0`1K6TZOPN\\\\e0Z2001O0O3N2M4M3M3N0N111N1O1O2N1O2M2O2N2M3N2N4M3M5J:F>B6J01O000000000001O00O2O0000000001O0000001O00001O01O000001N10000000000OS[Z:\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [196.0, 308.0, 63.0, 70.0], \"score\": 0.9999804496765137, \"association_id\": 4, \"light\": [-0.9560285210609436, -2.744659423828125, 0.8865196108818054, 2.463252305984497], \"segmentation\": {\"size\": [768, 513], \"counts\": \"QZc4R1if0:J4K5L4L4L4K5M00000000000000000002N2N2N2N1O2N003M1O2N2N1O2N2M3VOeYOJaf0MfYOJ`W20j_N2O00000000000000001O000001N1000001O000000000000000O]Tn5\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [0.0, 424.0, 123.0, 112.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-0.55742347240448, -3.1450366973876953, 0.3948069214820862, 2.9922142028808594], \"segmentation\": {\"size\": [768, 513], \"counts\": \"k>:`g0<D:]YOZOae0P1PZOZOke0`1M2O1N3N2M2N3L3M3N3M4M1N2O1O1O1N2O1O1O1O100O100000001O000000001O1O0000Bf[OVMZd0g2h[OZMXd06d[Oj17PNUd05h[Og14TNTd03n[Oc1O[NSd01W\\\\OZ1HeNQd00c\\\\Om0_OSOoc0Nj\\\\Of0YO\\\\Omc0LR]O?TOEof000000000000000000000001O000000000000000000010O00000000000000000000O2O00000000000000000010O00000000000001O00000O101O0000000000001O00000000001O0000001OZoS9\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [407.0, 416.0, 94.0, 69.0], \"score\": 0.9993202686309814, \"association_id\": 6, \"light\": [-2.671511173248291, -1.0258779525756836, 2.636662483215332, 0.8720052242279053], \"segmentation\": {\"size\": [768, 513], \"counts\": \"Rga92ng00O1O1N2O2O0O10000000000000000000000000000001O000000000000bN0lZO0Qe04nZOLjd0?S[OAkd0c0S[O]Old0g0Q[OYOnd0i0Q[OWOod0j0Q[OUOod0l0P[OTOPe0l0P[OTOPe0l0P[OTOPe0l0P[OTOPe0k0Q[OUOod0k0Q[OTOQe0e0U[O[Old0d0T[O\\\\Old0d0T[O\\\\Omd0c0S[O]Ond0c0Q[O]OPe0e0mZOZOUe0f0oYOWOg03Ze0m0dZORO\\\\e0P1bZOoN`e0P1`ZOoNbe0R1e01N2O1O00O101OOO2L4O10OIRYO\\\\Oof0e0QYOYOPg0e0TYOYOmf0e08O100000001O2N2M2N2L6J]XONhg0000ZX22bgM2O3N3O2L5LN2N3IPb8\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [149.0, 531.0, 135.0, 147.0], \"score\": 0.9999992847442627, \"association_id\": 9, \"light\": [-2.464493751525879, -1.3930425643920898, 2.3496108055114746, 1.1919004917144775], \"segmentation\": {\"size\": [768, 513], \"counts\": \"_[`3d0Sg0?B?G5J7I8H8J4K3N1O1O1O100O1O1N2N2N2N2O1L4I7M3N2M3\\\\O[Li\\\\Oi3Rc0e0O1O100001nKc\\\\O_3]c0_Lf\\\\O`3Zc0_Lg\\\\Oa3Yc0^Lh\\\\Ob3Xc0[Ll\\\\Od3Uc0ULR]Oj3bc00O10000000000O11O1O001O3N4K2N1O5K1O2\\\\Mb[Ob1_d0ZNk[O_1Ud0`Nn[Oo0[OmNfd05T\\\\Og0XOSOed06V\\\\Oc0VOWOdd05\\\\\\\\O<SO_O`d05S]O_OeN<Xd04d]OL[b05e]OKZb06f]OJXe0O100000000000000000000000000000000000000O100000001O000000000000000000000000O1000O1000000000000O10000O100O100O101O0O100000000000000O10001OVS[5\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [236.0, 418.0, 75.0, 74.0], \"score\": 0.9999989867210388, \"association_id\": 11, \"light\": [-2.8723702430725098, -1.0638659000396729, 2.8301827907562256, 0.9860450029373169], \"segmentation\": {\"size\": [768, 513], \"counts\": \"X]a59eg03ZYONUOJne09PZOEg0c0Ve0?TZOGje0Y1O0000000000001O1O000000001O000000010O001O000003M1O002N2N1O1O1O00001O0000001O1O2M2N03OHhYOfN_f0<VZOGje03[ZOMkf010000000000000000000O1000000000010O0000001O001O1OjPg4\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [295.0, 439.0, 63.0, 70.0], \"score\": 0.9988435506820679, \"association_id\": 13, \"light\": [-2.8278517723083496, -1.0479695796966553, 2.6945137977600098, 0.8948013782501221], \"segmentation\": {\"size\": [768, 513], \"counts\": \"Tfm63kg03O000000000000000000000000000QYOKSf05kYO3oe0MnYO8Pf0HkYOa0Rf0^OkYOj0me0WOSZOl0de0_OTZOh0je0g0O101O00001O00000000O10000000000000000000002O2M3N1N1O1O10O02N001O1O100N6J3N3L5Jf0ZOfhc3\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [173.0, 478.0, 142.0, 121.0], \"score\": 0.9999769926071167, \"association_id\": 10, \"light\": [-2.230135917663574, -1.4760985374450684, 2.209226369857788, 1.3785860538482666], \"segmentation\": {\"size\": [768, 513], \"counts\": \"eYR41kf0T1N2N3M3N00O02N2O1M3M2O3K5N1K5M3N2K6M3M3N1O2O1N4L6J4L2O3M2N1O1O1O00001c[OaLWd0d30Kh[OcLWd0c300000001O000000Ej[OlLVd0S3k[OmLUd0R3l[OnLTd07P\\\\OS2LfMUd04W\\\\Oo1EmMTd02[\\\\Oo1AoMUd00b\\\\Oi1ZOWNVd0Mk\\\\Oa1PObNVd0HU]O^1fNjNWf0f0jYOYO\\\\f0a0eYO_Ocf09]YOGff04\\\\YOL\\\\g00000000000001O000000000000000000000000000000000001O0000000000001O00000000001O0000001O001O0000000000001O000000000000000000000000000000001O000000000000000000001O00001OZmc4\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [253.0, 251.0, 60.0, 61.0], \"score\": 0.9990883469581604, \"association_id\": 2, \"light\": [-1.7563247680664062, -2.2954981327056885, 1.7023082971572876, 2.04750657081604], \"segmentation\": {\"size\": [768, 513], \"counts\": \"nPn56bg0:I5I8I7M4L3M4L4gYO^Noe0l1N0000000001O1O1O1N2O1O002N1O2N102M2N4K7IQa1QOo_N1O000O1000000001O000000000000000000000000000000001O00Yfe4\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [146.0, 390.0, 86.0, 82.0], \"score\": 0.9999715089797974, \"association_id\": 8, \"light\": [-1.9561213254928589, -1.343571424484253, 1.9205719232559204, 1.1684504747390747], \"segmentation\": {\"size\": [768, 513], \"counts\": \"Xl]36hg0:H01O00O2O0TYOB_O0bf0>oYO0he01RZO_OFm0le0IWZO`0fe0o0N1O1O1O1O1O001O001O1O0002OO01O1O010O100O1O1O1O1O00001O2N001O1O001O1N2N5O1L3EeYOkN^f0j0jYOVOXf0?PZOBWg0000000000000000001O0000000000001O0000000000001N100000000000000O2O0000\\\\Yb6\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [303.0, 350.0, 50.0, 70.0], \"score\": 0.9961066246032715, \"association_id\": 15, \"light\": [-1.6078553199768066, -2.5851762294769287, 1.5609209537506104, 2.4509689807891846], \"segmentation\": {\"size\": [768, 513], \"counts\": \"^cS73ig0b0A9F6J9YYOeNO3Pf0k1L4N001O000000000001O1O3M4L3M3M3GeYOhN\\\\f0Q1iYOoNhf0:]YOG^g000000001O00000000000000001O000000000000000000000000Pcg3\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [349.0, 288.0, 53.0, 55.0], \"score\": 0.9995363354682922, \"association_id\": 12, \"light\": [-1.7765514850616455, -2.0906429290771484, 1.7170817852020264, 2.0046186447143555], \"segmentation\": {\"size\": [768, 513], \"counts\": \"aQV88fg01O5I4O100O002M3N3L2O2O1O0010O00]YOTOSf0l0aYORO28]f0k0aYOUO_f0V11O1O2N1O2N1O2NWYOlNhf0Q1YYOoNif0o0WYOQOkf0l0UYOUOPg0f0PYOZOUg0OjXO162MNig01O000000000000001O00000001O0O1000001O0000\\\\mb2\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [436.0, 324.0, 75.0, 97.0], \"score\": 0.9976018667221069, \"association_id\": 14, \"light\": [-0.48288920521736145, -2.9619197845458984, 0.4435770809650421, 2.786979913711548], \"segmentation\": {\"size\": [768, 513], \"counts\": \"h[W:2c02[f01TYO10O71HMif00YYO3N251gf0HXYO?MKjf0HWYOj0jf0WOTYOi0mf05O1O0L6O0POhNU[O\\\\1hd0hN_ZOM>_1Se0iNmZOX1Qe0iNoZOW1Qe0jNmZOW1Se0kNjZOW1Te0iNlZOX1Te0hNiZO[1We0fNfZO\\\\1Ye0g0OOiZO`MPe0_2Q[ObMnd0^2R[ObMnd0Z2V[OgMjd0T2Z[OlMfd0R2\\\\[OnMfd0m1\\\\[OUNfd0d1^[O\\\\Ned0^1^[ObNbd0\\\\1`[OdN`d0Z1b[OfNfd0l0`[OUObd04R\\\\OLmc04T\\\\OLlc03T\\\\ONmc01S\\\\OOnc0OS\\\\O1mc0OS\\\\O1nc0LS\\\\O4me0000000002O1N1O0000000001O001N10001O0000000000000O101O0000000000001O1OPS1\"}}, {\"image_id\": 25, \"category_id\": 1, \"bbox\": [63.0, 50.0, 556.0, 574.0], \"score\": 0.9984437227249146, \"association_id\": 1, \"light\": [-1.443637728691101, -1.5681229829788208, 1.2444041967391968, 1.454077124595642], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]`Z1`0id07J4L3M2O2M2N2OO1O1O1O1N2O1O1O1O1O00100O100O10000O2N2O1N2N3M2JPh:JZXE3M4L5K4L4f[OG[c0<`\\\\OG`c0;Z\\\\OJdc0o001N100O1O100O01000O01000O01O1O100O01000O100O010O1O001O001O001O010O0001O10O010O010O0001O00O1N2N2O1O100010O010O01000O010O010O0000000O1000010O0010O0100O010O001O000000000000001O0010O01O10O01O1O00001O0O1O01000010O010O010O010O10O0010O00O00001O1O10O100010O010O01O010O001O0O100O1001O01O01O01000O010O1O01O01O000O100001O01O01O10O10O010O10O0001O0000O101O0O110O001O10O010O01O001O00O1M2N3O10O11O00010O0100O010O0010O0000OO2N2O1O2O001O010O10O010O010O01O0000N2O1O1O1000001O010O010O010O01O000000000001O010O0010O010O010O0010OO1O001O1O101O001O010O010O10O0100O001O000000001O000010O010O010O010O01O000O1O1O1N11001O00010O010O010O010O0000000O2O000000000010O001O0100O0O2O1O00100O1O010O10000O10O11N100N2N3M2N2N3L3L5MjmU15QRjN4M2N2N1N3WJ_OcFe0[9^O`Fe0_9]O^Fg0`9[O[Ff0g9\\\\ORFh0n9ZOnEh0S:YOkEg0V:YOhEg0Z:ZObEb0e:^OYE`0k:@SE`0o:AoD>S;BiD?Z;AdD?^;B_D?b;B[D?f;BVD?l;BRD=P<DmC=T<CjC>W<CfC?Z<AdC`0]<@bC`0_<@aC?`<A_C>c<B\\\\C=f<BYC`0h<]OYCc0h<ZOZCg0f<VO\\\\Cj0f<RO\\\\Co0^`02XMPOSAR1l>POQAR1P?nNo@R1R?oNk@R1V?oNh@Q1X?QOe@Q1[?POb@Q1^?SO]@n0c?UOX@m0i?SOU@o0k?SOQ@n0P`0WOi_Oj0X`0YOc_Oi0_`0VO]_Ol0e`0TOV_Oo0n`0YOc^Oj0_a0`12N3M4L3N1M7J4L2N2N3M2O0O01000O10N10001N100O10O10O01N2N2O100O1O1dNY]O3hb0KQ^OYOTb0d0S^OTOQb0j0Y1O1O1O2N1O3J6Klo]8\"}}, {\"image_id\": 26, \"category_id\": 1, \"bbox\": [678.0, 168.0, 325.0, 654.0], \"score\": 0.9999982714653015, \"association_id\": 2, \"light\": [-1.2828845977783203, -2.9883363246917725, 1.0991482734680176, 2.74814772605896], \"segmentation\": {\"size\": [941, 1024], \"counts\": \"ki_c02Ym03O00001O000O2O00000O101O00000O1000000O100N2O100O1O1O10O100O10000O0100000O10000000000O1001O0000000000000000010O000000000000001O000001O0000eG=lBCo<f0mBZOP=k0oBTOo<P1oBPOn<V1oBjNn<[1PCeNm<a1PC_Ni<k1TCUNe<U2XCkM`<`2^C_M\\\\<i2bCWMZ<n2eCRMY<Q3fCoLX<U3fCkLV<Z3iCfLP<c3nC]Lj;l3UDTLd;U4ZDkK`;]4^DcKY;i4dDWKQ;V5mDjJm:_5PEaJn:d5oD\\\\JP;i5lDWJR;o5jDQJT;W6fDiIY;_6aD`I];h6]DXIb;n6YDRIe;T7WDlHh;Y7TDgHi;`7SD`Hi;f7UDZHg;l7WDTHf;P8YDQHc;T8[DkGd;X8[DhGc;[8\\\\DeGb;^8]DbGa;a8^D_G`;c8`D]G^;f8aDZG];h8cDXGZ;k8fDUGW;n8iDRGT;Q9lDoFQ;T9oDlFo:V9QEjFl:Y9TEgFj:[9VEeFh:]9XEcFf:_9ZEaFd:a9\\\\E_Fb:c9^E]Fa:d9_E\\\\F`:e9`E[F^:g9cEXF\\\\:i9dEWF[:j9eEVF[:j9eEVFZ:k9fEUFY:k9hEUFX:k9hEUFX:k9hEUFW:l9iETFW:l9iETFV:m9jESFV:m9jESFU:n9kERFU:m9lETFR:m9nESFQ:m9PFSFn9m9TFSFj9m9XFSFe9m9^FSF^9m9fFSFX9l9kFTFS9k9PGUFn8k9TGUFk8k9VGVFh8j9YGVFg8i9ZGWFe8i9\\\\GWFd8g9^GYFb8f9_G[F_8d9cG\\\\F]8a9fG_FY8_9jGaFV8]9lGcFS8\\\\9oGdFP8[9RHeFm7[9THfFi7Z9YHfFd7[9^HeF_7]9bHcF[7^9gHbFW7^9kHbFT7\\\\9oHeFn6[9TIeFk6Z9WIfFi6W9ZIiFf6U9\\\\IlFc6R9_InF`6Q9bIoF^6o8dIRG[6m8fISGZ6k8iITGW6j8kIVGU6h8mIXGS6f8oIZGQ6d8QJ]Gn5`8UJ`Gk5^8WJbGh5]8ZJdGe5Z8]JfGc5X8_JhGa5W8`JjG_5U8bJkG^5T8dJkG\\\\5S8fJmGZ5R8gJoGX5P8iJRHT5m7nJTHQ5j7QKVHo4g7TKZHj4e7XK[Hh4c7ZK]He4b7`KgGTH`0\\\\<h7fKWHY4i7jKUHV4j7PLRHn3o7VLmGi3S8YLlGf3U8[LjGd3V8nKQHmGIT<W8fKaHY4_7`KkH\\\\4W7aKoH[4R7cKSIY4n6eKVIY4k6cKZIZ4g6cK^I[4c6`KbI^4`6[KgId4Z6VKlIh4V6TKnIk4S6RKPJm4Q6PKRJn4Q6nJQJR5Q6kJPJU5T6eJnI[5V6^JnI`5Y6VJkIj5[6\\\\IYJd6n5eHbJ[7c5UHhJj7\\\\5oGhJQ8[5iGhJW8[5dGgJ\\\\8[5`GgJ`8]5XGgJg8_5PGeJP9c5aFcJ`9g5YEVKg:h;O10O10O1O000O2N1O1N2K5M3N1O1OO00LkNSFT_Ok9d`0_13102O101O1O2M3L4L3N3L4M3M3M3M2O2N2M3M3L4L4L4M3N2PHaB0a=NjBGX=8QC^OQ=`0UCYOn<g0WCQOl<n0[CgNj<X1]CZNk<f1eCbMc<]2hCTM]<l2fClL_<S3eCeL`<[3cC]Lb<c3bCRLe<m3bCbKi<^4\\\\DgIZ<Y6]5O100O100001O00001O00001O001O0000010OmKc[OVO]d0f0n[OSORd0j0V\\\\OQOjc0l0^\\\\OPOac0m0k\\\\OjNUc0T1T]OeNmb0X1]]O_Neb0]1c]O\\\\Nab0]1j]O[NYb0\\\\1U^O[NPb0Z1X5Cb0Ai0XO^Sc0\"}}, {\"image_id\": 26, \"category_id\": 1, \"bbox\": [2.0, 94.0, 304.0, 740.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-1.3189964294433594, -3.0842628479003906, 1.1528624296188354, 2.945192337036133], \"segmentation\": {\"size\": [941, 1024], \"counts\": \"lc2<Pm02N1O101O0000001N1000001O0000000O2O0000000000001O0000001O0000001O0000000000001O0000001O00000000001O0000000001O01O00000000010O01]O_SOI0<bl0GkSO2Vl0JnSO6el0O00m_OMm22cLa0[3]OaKl1f?=_Ob0F;[@hMc3a2cGUM^M`3Z8AjIhMXMP3i8]OfIPNVMi2P9[OeITNRMg2V9WOdIYNoLd2Z9WOcI\\\\NkLa2`9VO`IaNgL]2g9VO]IgN_LY2Q:VOXIoNVLT2^:TOQIR5m6UKeHT5Y7RK\\\\HU5b7PKVHU5h7PKPHV5n7mJmGV5S8mJfGW5Z8lJ_GX5a8[KgFj4X9VLbEQ4^:_700000O010000000O1O100O1O100O1O1N2N2ZO[FW^OR9Ua0`GS_OZ8V`0\\\\Hl_Oc7l>eIVAY6_>RJaAn5V>[JkAd5P>`JQB`5l=cJVB[5h=gJYBX5d=kJ\\\\BU5a=nJ`BP5\\\\=UKdBk4W=YKjBg4P=`KoB`4l<eKTC[4i<hKWCX4f<kK[CT4b<oK^CQ4`<QL`Co3^<SLbCm3\\\\<ULdCj3Z<YLfCg3W<\\\\LiCk0_OeKg<b3jCg0J^KZ<m3lCc05VKo;X4lC`0<SKf;_4nC=?QKc;c4nC;b0PK_;f4oC9e0mJ\\\\;l4PD6g0iJY;S5PD4j0dJV;Z5PD2m0]JT;d5oCNS1jIV;_6gCGS>f0mAZOi=P1WBPOf=S1[BlNc=V1]BjNa=X1_BhN`=Y1`BgN_=Z1aBfN^=[1bBdN^=]1bBcN]=kLUBm0?W2[=`LhBT1M\\\\2Z=ZLSCW1C_2Y=WL[CW1\\\\Ob2X=TLbCX1VOd2W=RLhCX1QOf2V=QLlCY1nNe2U=PLPD[1kNe2T=nKUD\\\\1gNf2R=mK[D\\\\1cNg2Q=kK_D^1`Ng2P=iKdD_1\\\\Nh2n<iKiD^1ZNh2l<iKmD^1WNi2k<iKPE]1UNj2j<iKSE\\\\1SNk2j<hKSE^1SNj2k<fKQEb1UNg2l<dKnDg1VNe2m<bKlDk1WNc2o<_KjDP2VNa2R=\\\\KhDT2VN`2T=YKfDX2VN_2U=WKeD[2VN^2W=TKcD_2VN]2X=QKcDc2UN\\\\2Y=oJbDf2UN[2[=jJbDl2SNZ2^=bJbDV3QNW2`=[JbD`3nMU2d=PJbDn3jMR2m`0SNR_Om1j`0WNV_Oi1f`0[NZ_Oe1d`0]N\\\\_Oc1b`0_No^OZKOW6Qa0`NP_OZKNV6Pa0bNQ_OYKOV6m`0cNT_OXKNU6l`0eNV_OVKNU6i`0hNX_OTKOT6e`0lN\\\\_OPK0S6_`0ROa_OlJOR6]`0VOc_OhJ0R6Z`0YOe_OfJ1Q6X`0\\\\Of_OcJ2Q6V`0^Og_OcJ2P6V`0^Og_ObJ3Q6U`0]Og_OcJ4Q6T`0\\\\Oh_OcJ4R6S`0\\\\Oh_OcJ4R6S`0[Oi_OcJ4T6Q`0Jo_O7Q`0Ho_O;n?FQ@?j?AV@f0c?ZO]@l0c0SIf=Q6gAT1JaIZ>\\\\5kAX2Q>hMnA^2m=bMSBb2j=^MUBg2f=YMZBi2d=XMZBk2d=VM[Bl2c=`600O001O001O001O001O0001O0001O01O00001O0010O0000000O1000000000000O10000000000000000000000O100000000`KZ@XKg?d4b@WK^?f4j@UKV?h4RARKo>j4[APKf>k4bAPK_>k4kAoJW>m4RBlJo=Q5ZBhJh=T5gB^J\\\\=_5VCPJk<n5_ChId<U6dCcI_<Z6iCYI_<d6hCfHj<W7h4L5J6K6J5L5K4L5K4L4K6I7Gb0aKSXOa2hi0Ba0@=gNW1^Oa0Ab_cd0\"}}, {\"image_id\": 26, \"category_id\": 1, \"bbox\": [401.0, 111.0, 355.0, 730.0], \"score\": 0.9999995827674866, \"association_id\": 3, \"light\": [-1.8256772756576538, -2.131059169769287, 1.737744927406311, 1.9372963905334473], \"segmentation\": {\"size\": [941, 1024], \"counts\": \"UYa;2Zm0101N2O00001N100000001O00000O10001O0000001O00000000001O0000001O00000000000010OO1000O100000000000000001O1O0000001O0000000O2O000000001O000000001O0000000000001O0000001O00001O0000000000001O000000001O000lACXO=;8]OH;i0\\\\OWOa0Q1ZOPOb0V1[OjNc0Z1[OfNc0^1[ObNd0`1[O`Nc0c1\\\\O]Nb0e1^O[N?i1@XN<k1DUN7Q2InM2W2NiMN\\\\21eMJ_26aMGb29^MEe2:[MDg2<YM@l2?UM]Oo2b0QM\\\\OQ3e0nLYOU3f0kLXOX3g0iLVOZ3i0fLUO]3j0cLUO_3j0aLUOa3j0_LUOb3k0_LROd3n0[LQOf3o0ZLoNi3P1XLlNl3S1TLkNn3U1RLhNR4W1nKgNU4X1kKfNX4Y1hKfNY4Z1gKeN\\\\4Z1cKfN^4Y1bKfN`4Y1aKfN`4Y1`KgNa4X1_KgNc4X1]KhNd4PHeJU6g0k1e4jGoJV6;o1g4gGXKT61U2h4bG_KU6IY2j4]GeKU6A]2m4WGkKX6YO`2o4nFSL^6nNd2R5aF_Lf6_Ni2n8SMRGl2T9oLlFQ3Y9jLgFV3\\\\9gLdFY3^9eLbFZ3a9dL_F\\\\3d9aL\\\\F^3g9`LYF_3k9^LUFb3o9ZLRFd3S:jGPFP1MU7\\\\:SG\\\\Fc1XOY7f:_FbFT2hN\\\\7m:PFhFa2[N^7S;eEkFk2RN^7W>`HiA]7]>`HcA^7b>_HSA^MCQ:a?\\\\Hk@eMDm9g?YHd@kMEj9m?WH]@QNEg9S`0THW@WNEd9Y`0RHo_O]NFa9``0nGh_OdNF^9f`0lGa_Oh8b`0UG]_Ol8f`0QGY_OP9l`0lFS_OT9Ra0\\\\Ff_OW9b`0kEW@R:da0N1N3N1O1O0000001O00000000001O0O10O0100O1O100O1O1N2N1N3M3M2OVLUHVBi7\\\\<XJRCg5i<dJSCZ5h<PKTCP5i<WKTCf4l<`KPC_4P=fKlBW4V=oKdBb3k=eLoAn2^>fMm@Y2U?m52N200O2N3M3M3L4L4L4M4L4M3M3N2M3N1O2N1N2O2N1O002N2M3N3M4L3M2N2N2N1O0000000000000000000000000000000000000000O1O1O1O1O2N1N2N2N2N3L5L4K5L5J8I6I7I6J5L5J7IR1[I`A\\\\LP?m2jA`Le>i2eBULa=b3PCoKW=i3WCjKo<o3]CeKi<W4`C_Ke<\\\\4dC[Ka<_4jCVK\\\\<c4[DfJk;S5\\\\6E:^Oc0E;K4M4L4N200O1O1O1O2O1N1O2N2O0010O00100O2N1O1O10O2N2M5L4L4L3M3M2N3M3K5K6J6I8I7I6J5K5K5Jdne7\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [244.0, 331.0, 179.0, 206.0], \"score\": 0.7639310359954834, \"association_id\": 6, \"light\": [-2.403940200805664, -1.357372522354126, 2.3027749061584473, 1.2648500204086304], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"lVS54Ve0101O0O100000000000001O0000001O00000O1000000000001O000O101O000000001OO100000001O00000O100O1O100O100O10000000O2O000000000000000000O10a[O1bc0O[\\\\O6Vb01W^OHC9Sb05W^OAE<Qb0:V^OXOHa0oa0g0l]OfN4f0na0m0S^OVOla0o0m]OUOPb0R1i]OPOTb0U2K5K6L3M3L4L4M3N3F9O3M2N2N2N1O1G9N3N1N2N2K5N29G6J2N2N01N1O100O2N1O2L4M3M3L3K5M4M2O1N2N2O1N3N1O1N200O10000O1000001O00000000000000000000000000000000O1000000000000000000000000]J_A\\\\4a>bKeAZ4[>eKhAY4X>fKjAX4W>gKmANOj2T>XMbCa2_<]MiC[2X<dMmCX2S<gMTDS2m;jM`Dj1a;UNeDl0[L^OQ?DfDg0aLBk>EfDg0aLBP?@bDj0bLCRc07S]ODSc05T]OEQc06V1Ka\\\\`<\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [646.0, 301.0, 220.0, 212.0], \"score\": 0.7627537250518799, \"association_id\": 4, \"light\": [-2.8714189529418945, -1.601693868637085, 2.78662109375, 1.4349607229232788], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"d[_=6eb0Ki]O2Y1?a`0g0k^O@Qa0f0i^O\\\\OSa0n0b^OXOTa0d2N2N2N3L3L4M3N2N2K5K5M3M3L4L4M3N2M3N2N2N2O1N2O1O1O1M3K5L4N2O1M3N2N2O1N2O1N2O10000O1000000000000000000000000000000000000000000000000000O100O100O1N2N2N2O1N2N2O1N2N2N2hJi@[4X?bKm@Z4U?dKn@Z4S?eKPAW4R?hKQAT4Q?kKRAR4o>nKRAP4o>oKSAo3o>oKUAm3l>QLYAj3i>TL[Ah3h>UL\\\\Ah3f>TL`Ag2aNXNda0c1b^OcM08ba0Q2n^OkMWa0P2m^OiMZa0S2V1E;F7J<E9F:E]Sn0FPmQO2O0K501O000000000000000000000000000001O0001O01O000000001O0000000001O00001O01O000001N100000000000000O2O0000000001O00001O1O1O1O3LklX3\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [71.0, 242.0, 375.0, 336.0], \"score\": 0.9999991059303284, \"association_id\": 1, \"light\": [-1.956742286682129, -1.7300217151641846, 1.8424785137176514, 1.6145565509796143], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"`d_1:ld0f0WO:H8jAbNT8f1hG_No7j1mGZNP8f1PHZNP8e1RHZNm7f1TH[Nk7c1VH^Ni7c1VH_Nh7b1`GXN`K=n<\\\\1]G_NeK1o<`1XGcNd9\\\\1ZFfNf9[1XFgNg9Y1YFhNg9W1YFjNf9V1ZFkNf9e0TBiNV4c0e9b0[BfNQ4i0c9`0_BdNo3l0b9?oFCP9<QGEn8;RGFm89RGIn86QGMn83PGOP90nF3R9KmF8S9\\\\OVGg0i8AmFe0P9AgFf0U9\\\\OgFk0T9XOnBlNQ3S2m9VOgB@h2`1\\\\:8_EO\\\\:2bE3Z:NdE8W:IfE>U:BiEf0Q:[OlEk0P:UOoEo0n9QOPFS1o9lNoEX1o9hNoE]1n9cNoEb1P:]NfEn1Y:RNaEV2]:iM`E]2_:bM^Eb2b:mJ^Dm1i0_3h:]JkDn17l3g;QLYDQ4g;nKWDU4i;jKVDX4j;gKTD[4n;dKlCa4V<_K`Cj4`<P21O2N1O1O1O1O1O0O2O00000O1000O0100000RCgGj<X8TCnGi<R8WCoGh<[8O000OD[CTHd<l7]CTHc<k7]CVHb<j7_CVH`<j7aCWH]<j7cCVH\\\\<k7cCVH[<l7eCTHZ<m7fCSHX<o7hCQHW<P8iCPHV<Q8jCoGU<R8kCnGS<T8mClGQ<U8PDkGn;W8RDiGm;X8SDiGk;X8UDhGj;Y8VDgGj;Y8VDgGi;Z8WDfGi;Z8WDfGi;Z8WDfGi;Z8WDfGh;[8XDeGh;[8XDeGh;[8XDeGh;[8XDeGh;[8YDdGg;\\\\8YDeGf;[8ZDeGf;\\\\8ZDcGf;^8YDbGf;`8YD`Gg;a8XD_Gh;b8XD]Gh;d8XD[Gh;g8VDYGj;S9100N3N12N1O001O0000jGVDl6i;SI\\\\Di6d;UIdDe6];YIiDa6X;]IoD^6Q;`IUE\\\\6k:cIXE[6h:cI\\\\EZ6e:fI]EW6e:gI^EV6c:jIcEk5b:UJfEm4P;SKXE_4o:_KVEY4kNoJn:h0ZFT4gNWKo:e0]FP4aNbKo:?dFg3_NmKk:<PG[3VN[Lh:;WGR3TNdLd::ZGn2TNhLb::[Gl2TNkLa:8]Gj2SNnL`:9_Ge2QNSM`:8bGa2oMWM`:8cG]2nM\\\\M^:7fGZ2mM_M^:6fGY2mMaM]:6hGV2lMdM]:3mGR2iMlM[:ORHm1fMTNZ:KXHh1aM]NY:H\\\\Hd1]MdNY:D`Ha1ZMkNW:BdH[1YMSOU:^OjH6WK=P2O_b0000001O00000001O00000000000000000000O101O00000001O0000000000000000000001O0000000000000000001O00000000000010O0000000000000001O00001O000000001O000000001O0000001O0000000001O01O000000000000001O0001O00000000000000000000000000000000000000000000000001O0000000000000000000000001O0000000000000000000000000O100000000000000000000000000000000000000000000000000O10000000000000001O000O10000000000000000O1000001N10000000000O1000000OnnP<\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [453.0, 310.0, 192.0, 260.0], \"score\": 0.7615970373153687, \"association_id\": 2, \"light\": [-2.372279644012451, -1.885880947113037, 2.269011974334717, 1.7034844160079956], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"kd^9=ld04K4L4YNXOd^On0Ua0_OP^OV1ka0X1M3M4K3M4L4L3N3M2M4J5E<K4M4N1N3M2M4M3M2O2N2PNoJcCR5X<UKdCn4Y<WK[CR5e<SK^Bf5`=S101N2O1O2N1N2O1O001O1O001O00001O001O1O00000O10001O000000000000001O00001O00000001O00000000001O001O001O2N3M3M2O2M2N3M3M2N1O2N1O1O1O1O2N1O1O000000000000000O100000000O3N3L5fIaAI9o4h?oJo_O_4b`0jKU_Oe3o`0QLZ_Ol3Ta0M3N3M3M4M3L6J4K4M1N3N1N2_N_]O6db0Ga]O4bb0Jb]O0db0La]ONcb0Ob]OJbb04b]OGbb07b]OBdb0:Y1N1M3N1OY[OJ[d06f[OIZd07=00O10O1000O10TKJcD6\\\\;KVDMRL8g?MUDNRL5;Hg>5kD0RL2<Kf>2nDORL2:Nh>OlD0TL191ab0MX]O00074oa0Kj]O30Li07\\\\a0Li]O3Y13m`0Kh]O3[12Zc01O1O1000000000000000000000000000O100000000O2O000O101N2O1NbYl7\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [601.0, 307.0, 348.0, 235.0], \"score\": 0.9999934434890747, \"association_id\": 3, \"light\": [-2.799510955810547, -1.7192256450653076, 2.673008441925049, 1.5054742097854614], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"n[a<110Ve04O001O0O101O00000O2O000000001O000CGh[O9Wd0Hi[O8Wd0Ih[O7Xd0Ih[O7Xd0Ih[O7Xd0Ih[O7Wd0Ji[O6Wd0Ji[O6Wd0Ji[O6Wd0Ji[O6Wd0Jj[O5Vd0Kj[O5Vd0Li[O4Wd0Kj[O5Vd0Kj[O5Wd0Ji[O6Wd0Ji[O6Yd0Gh[O9ed001O000000000000000001O0001O00000000000001O0000000000000000000O2O000000000000000000000000000000000000000000001O0000001O00001O00000000001O0000001O01O000001O000000001O01O000000000001@DP\\\\O;oc0Ho[O8Qd0HP\\\\O7Pd0Jo[O6Qd0Jo[O6Pd0LP\\\\O3Pd0No[O2Qd0No[O2Qd0On[O1Rd0Oo[O0Qd00o[O0Qd00o[O0Qd01o[ONQd02o[ONQd02o[ONQd02o[ONQd02o[ONQd02o[ONQd02o[ONQd03n[OMRd03o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd03P\\\\OMPd03P\\\\OMPd03P\\\\OMPd03P\\\\OLQd04o[OLQd04o[OLQd04n[OMRd03n[OMRd03n[OMRd03n[OMRd03n[OMRd02o[ONQd02o[ONQd02o[OMRd03n[OMRd03n[OMRd03n[OMSd02m[ONSd02m[ONSd01n[ONSd02m[ONSd02m[OMTd03l[OMTd03l[OMTd03l[OLUd04k[OLUd04k[OLUd04l[OKTd05l[OJVd04l[OKTd05l[OIVd07?0000000000000000000001O000000000001O00000001O0000000000001O01O000000000000010O000000000000001O000001O0001O000000000000000000000001O000O2O001N102Mab2\\\\1i[M]1kN6K4K4K5J5J7L3N2M3N2N2L4L4M3M3N2O1M3K500O1O100O1O1O1N2H8L4N2M3L4N2N2O1O1O1O1N2N2O100O1O1O1O1N200O1O10000000O10000000000`NfAnK[>k3oARLQ>l3WBnKi=Q4[BmKd=f3ZAlKX1:_=f3QCWLP=a3a2L4O1N2Nk@eL^<Y3k2WMe^O]1`a0aNg^OX1[a0gNi^OU1Xa0kNn^On0Sa0QOY_Ob0i`0^O[_O<g`0Ba_OOi`0N[2O1O2N1OUga1\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [551.0, 299.0, 265.0, 243.0], \"score\": 0.7559328079223633, \"association_id\": 7, \"light\": [-2.8852694034576416, -1.113580584526062, 2.7581090927124023, 0.9109281301498413], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"QQ`;3Ue03N3M2O1^JIPF8n9NkE4S:0gE4Y:OkDk0S;YO^B5Bl0o=SOZB6Aj0U>SO\\\\BIGQ1l=YOaB[OJZ1e=]OcBQOMa1_=@\\\\D?b;F]D7b;N\\\\D1c;6XDaNlLn0k>i0fDlN[;Y1eDbNZ;`1iD\\\\NV;f1QBkMj1<U<k1oAnM\\\\ONP28d<n1mAaNYO_Oc12U=V4gBjKW=\\\\4eBdKY=`4eB`KY=d4dB]K[=g4_B\\\\K_=h4\\\\B[Kc=h4ZBYKd=k4XBWKf=V5lAmJR>W6N2O1O1O1N10M3L4O2O001NcAXI[>g6eAZI[>d6gA]IX>b6iA^IW>`6jAcIT>^6kAdIS>\\\\6mAeIR>[6nAeIR>[6nAeIR>Z6oAfIQ>Y6PBgIP>X6QBiIn=V6SBjIl=W6TBiIl=W6TBiIl=W6TBiIl=W6TBiIl=W6TBiIl=V6UBjIl=S6VBnIi=Q6XBoIh=Q6XBoIh=P6YBPJg=P6YBPJg=P6ZBoIf=Q6ZBoIf=P6[BPJe=n5]BRJc=l5_BTJb=i5`BWJ`=g5bBYJ_=e5bB[J_=b5cB^J^=Y5jBgJY=l2XBSNk0QOP=NcAY2W1hMh01n<m1jDSN[;Y1UEgNl:P1[EPOh:f0_EZOe:<aEDS`00000000000001O000000000001O00000000000000000010O00000000000000001O0000000000000000000000000000000000000000000000001O00000000001O00001O0000010O000000001O0000000001O0001O00000001O000000010O0000HAd[O?\\\\d0Ad[O?\\\\d0Bc[O>\\\\d0Dd[O;\\\\d0Ed[O;\\\\d0Ed[O;\\\\d0Ed[O;[d0Fe[O:[d0Gd[O9\\\\d0Gd[O9\\\\d0Ge[O8[d0He[O8[d0He[O8[d0Id[O7\\\\d0Ie[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Jf[O5Zd0Kf[O5Zd0Le[O4[d0Le[O4[d0Le[O4[d0Le[O4[d0Kf[O5Zd0Kf[O5Zd0Kf[O5Zd0Kf[O5Zd0Kf[O4[d0Le[O4[d0Ld[O5\\\\d0Kd[O5\\\\d0Kd[O5\\\\d0Je[O6[d0Jd[O7\\\\d0Id[O7\\\\d0E_[O159]d0F^[O159]d0F_[OO5;\\\\d0F_[OO5;\\\\d0Fg[O:Yd0Fg[O9Zd0Gf[O9Zd0Gf[O9Zd0Gf[O8[d0He[O8[d0He[O8[d0He[O8[d0He[O7\\\\d0Id[O7\\\\d0Id[O6]d0Jc[O5^d0Kb[O4_d0La[O3`d0M`[O1bd0O^[O0cd00<0000001O000000000000000000000000000000000000000\\\\WZ4\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [390.0, 215.0, 93.0, 119.0], \"score\": 0.9072425365447998, \"association_id\": 5, \"light\": [-2.2073888778686523, -1.4736545085906982, 2.1571242809295654, 1.3633825778961182], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[]T81Ye03M3kZOMhd0`0L3M3YN[Od^Oi0Xa0ZOd^Oj0[a0WO`^On0_a0XOY^Ok0ea0WOW^Om0fa0WOT^On0ja0UOP^OP1na0SOe]O[O2d1Yb0V1N101M2O2O001O1N10001N1000001O0O10000O10001O000O101O02N1O00O1O21N2N3M00O11O4L2O6I1O1O3M3M3M3M2N1O1O2N5K2O0O2N2N3M4L4_Oa\\\\OoNmc0?V\\\\OAlc0:W\\\\OFPd0OT\\\\O1cd010O000001O000001O000O20OO10001O1Od`X;\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [342.0, 300.0, 57.0, 168.0], \"score\": 0.9570960998535156, \"association_id\": 1, \"light\": [-1.621281385421753, -2.5060319900512695, 1.5868490934371948, 2.3784844875335693], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hjP8c0[g04K5M2N3L4L4M6K3M4K8H7G6L5U\\\\OaMla0e2m]O^Mna0m2k]OTMQb0T3h]OnLVb0W3b]OnL[b0X3_]OkL`b0[3X]OhLfb0a3o\\\\OcLPc0W4O1O100O2O0N2O1O101O0O10000O10TO^]OZLcb0g3Y]O[Lhb0^42lJZ]Oj4Qc0M3M1lMm\\\\OYOTc0>W]O_Ojb0=\\\\]O\\\\OXObN^c0o1]]O]OUOcNac0m1^]O]OROeNac0h1g]O^OhNjNbc0e1W^OQOXNZObc0c1`^OjNnMCbc0c1e^OcNjMKWd0b0c^O_Ond010O2O001O_Yd>\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [259.0, 302.0, 393.0, 287.0], \"score\": 0.9980900287628174, \"association_id\": 2, \"light\": [-1.6788790225982666, -1.896336317062378, 1.5737335681915283, 1.7369645833969116], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\iR61og0001O0000000000001O000000000000001O0000000000001O00001O0000000000001O00000000000001O01O01O000000010O000000000001O000001O000000000000000001O0000010O00000000001O00001O00001O0000001O0001O0001O000000001O0001O00000000000001O00000001O00000000000000000000000000000000000000000000000000000001O00000000000001O000000000001O0000000000000000000000001O0000000001O000001O0001O0001O000001O00000001O0001O00000001O00000000001O01O0000000001O00000000001O0000000000000000001O000000000000000]N2T[ONid09S[OGld0<R[ODmd0>R[OBnd0>R[OBnd0>R[OBmd0?S[OAmd0?S[OAmd0?S[OAld0?U[OAkd0?U[OAjd0?X[O@gd0`0Z[O@fd0`0Z[O@ed0`0\\\\[O@cd0`0_[O_Oad0a0_[O_Oad0`0`[O@_d0a0a[O_O_d0`0b[O@^d0`0b[O@^d0?d[O@\\\\d0`0d[O@[d0`0f[O@Zd0`0f[O@Zd0?h[O@Xd0`0h[OAWd0>j[OBVd0>k[OAUd0?k[OATd0?m[OASd0?m[OARd0`0n[O@Rd0`0o[O_OPd0a0Q\\\\O_Onc0b0R\\\\O^Olc0d0T\\\\O\\\\Ojc0f0W\\\\OYOfc0i0[\\\\OXObc0j0^\\\\OVO`c0k0a\\\\OUO^c0k0c\\\\OUO]c0j0d\\\\OVO[c0j0g\\\\OUOXc0l0h\\\\OTOWc0l0j\\\\OTOUc0l0l\\\\OTOSc0m0m\\\\OSOQc0n0P]OROmb0Q1S]OoNlb0Q1V]OnNib0S1W]OmNhb0S1Y]OmNgb0R1Z]OoNeb0P1\\\\]OPOcb0P1^]OPObb0o0_]OQO`b0o0a]OQO^b0o0c]OQO\\\\b0n0f]OROYb0n0i]OQO[>JXCU1_NQOW>MXCQ1cNROT>OVCo0hNROn=6WCg0lNSOl=>QC>TOTOh<IlAm0Q25\\\\OUOg<KiAn0T1ROjNn0d1VOd<NgAQ1n0VOQO:P2AZ<OeA_1D^N8`0G@39`2KU<0dAc1ZOcN8?M_O8Ic23R<1bAY3\\\\OUM7]Oi24Q<3aAR4CgKk24Q<7\\\\Ao3n2iKf;;TAU4R3`Kj;Y5UDgJk;Y5UDgJk;V5YDhJh;V5ZDjJf;U5[DkJe;U5\\\\DjJd;U5]DkJc;T5^DkJc;T5e31N101O00001O00O1N2O2M2N2O1000000kKS]OV3ob0eLU]O[3kb0aLZ]O^3fb0aL[]O_3eb0aL\\\\]O]3fb0bL[]O\\\\3fb0dL[]OY3gb0gLZ]OU3ib0kLX]Oo2nb0PMT]OCHi2^c0ZMc]O^2Td0L2O0:F3L3N1N1000UOW[O_Ngd0k0Q\\\\OTOoc0f0W\\\\OZOic0`0]\\\\O@bc0<c\\\\OD\\\\c0<c\\\\OF\\\\c09d\\\\OI\\\\c06c\\\\OK]c04c\\\\ON\\\\c01d\\\\O1[c0Od\\\\O3[c0Le\\\\O6[c0Hf\\\\O8[c0Ge\\\\O9\\\\c0Ee\\\\O;[c0Df\\\\O<[c0Ce\\\\O=\\\\c0Ae\\\\O??POl`0?f^Oa0>POn`0<e^Od0JUORb03U^Oh0E[OZb0GQ^On0A@^b0@R^OQ1^O@Wd0?g[OCYd0>e[OCZd0>f[OAZd0`0g[O]OZd0d0g[OXO[d0h0g[OPO^d0R1V12_YOmNSf0U1lYOmNSf0S1oYOjNRf0U1RZOgNoe06iYO63O`f0HfYO013[f0LYZONje02\\\\cf8\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [672.0, 412.0, 322.0, 180.0], \"score\": 0.8670398592948914, \"association_id\": 3, \"light\": [-0.7980889678001404, -2.5591981410980225, 0.6734708547592163, 2.5472850799560547], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"eah?2mg02O000000001O000000001O0000000000001O000000000000001O0000010O0000000000001O00000001O0001O0000000000000001O000001O00000000000000000000000000000000001O0000000000000001O000001O000000000000000001O000000000001O0000000000001O000001O00^KI[A7d>LZA4f>MYA3g>NXA2h>MYA3g>MYA3g>LZA4f>K[A5e>I]A7d>G]A9Uc001O000000000000010O0000000000000000000001O0000000001O00000001O0cMHS]O7mb0IS]O7mb0JR]O6mb0KS]O5mb0KS]O5mb0KS]O5nb0JR]O7mb0HT]O8lb0HT]O8lb0HT]O8Ze000001O000000000SKIRB6l=NRB2m=0RB1m=0RB0m=1SBOm=2RBNn=2RBNn=2RBNn=3QBMo=3QBMo=3QBMo=3QBNn=2RBNm=3SBMm=3SBMm=3SBMm=3SBMm=3SBMm=3SBMn=2RBNn=1SBOn=0RB1n=MSB3n=LRB4o=KQB5P>IQB7P>ESB;ib00001O000000000001O01O00000000000000000000001O0000000000000000000000001O00000000000000000000001O0000001O00001O0000VKJjA7U>KiA5V>MiA3W>NhA2W>OjA0V>1iAOW>1iAOV>3iAMW>3iAMW>3iAMX>2iAMW>2n_OIQO5Ra01l_OKRO5Ra0Od_OLcM2g13Ra0Oc_O5[OLVa0J__O<[OIgc07Y\\\\OIfc08Z\\\\OHfc08Z\\\\OHfc08Z\\\\OHec09\\\\\\\\OFdc0:\\\\\\\\OFcc0;]\\\\OEbc0<^\\\\ODac0=_\\\\OC`c0>`\\\\OC_c0<b\\\\OD]c0=c\\\\OC\\\\c0>d\\\\OB\\\\c0>e\\\\OA[c0?e\\\\OA[c0>f\\\\OBZc0=g\\\\OCYc0=g\\\\OCYc0<h\\\\ODXc0;i\\\\OEWc0;i\\\\OEWc0:j\\\\OFWc08k\\\\OGUc09k\\\\OGUc08l\\\\OHTc08l\\\\OHUc06l\\\\OJUc05k\\\\OKVc03k\\\\OMde0O2OX[7OidH1N100010N\\\\Rf0\"}}, {\"image_id\": 29, \"category_id\": 1, \"bbox\": [601.0, 140.0, 224.0, 158.0], \"score\": 0.9999990463256836, \"association_id\": 1, \"light\": [-2.2594807147979736, -2.0078976154327393, 2.157949209213257, 1.8405799865722656], \"segmentation\": {\"size\": [682, 1024], \"counts\": \"h``<5Pe0?T[OBPd0U1YOe0J5M2N2N101N2L4[OgMS^O]2la0b0O0O100000000O1000001O1N100O1OKU^OmLja0S3V^OnLia0R3X^OnLha0Q3X^OPMha0o2W^OSMha0m2X^OUMga0W3O1OD[^OUMaa0\\\\3NDa^OQM^a0P3b^OPM]a0P3c^OQM\\\\a0P3d^OPM\\\\a0P3d^OPMZa0Q3g^OoLVa0T3i^OmLSa0W3m^OiLRa0X3n^OhLQa0X3P_OhLPa0X3P_OhLo`0Y3Q_OgLo`0Z3P_OfLo`0[3Q_OeLm`0]3S_OcLk`0o3N2O100O100O100O001O100O10000O101O00O100000000000000000O10000000000000000000000O1000000O10000O10000O10O1000000000O1000000000000000000000000000000000000000000000000000O10000000000000000Kb_O^K^`0b4c_O\\\\K^`0d4b_O\\\\K^`0d4b_O\\\\K^`0e43000000001O0000001O001O000001O000O100O10001N101N1O2N1M3M4N1O10o^OQLh`0m3W_OULi`0V4001AW_O[Lj`0d3V_O\\\\Lj`0c3W_O]Lj`0b3V_O]Ll`0b3T_O^Ll`0b3T_O^Lm`0a3S_O_Ln`0a3Q_O_LQa0_3o^O`LSa0_3m^OaLTa0_3l^O_LVa0`3j^O`LWa0`3i^O_LWa0a3i^O_LXa0`3h^O_L\\\\a0_3d^O`L`a0\\\\3`^OdLba0Z3_^OdLca0\\\\36O1O100O2O1N2O1N100O1O1O2Nb0^O8H3M3M3M3M6I6Ii0eNgYT4\"}}, {\"image_id\": 29, \"category_id\": 1, \"bbox\": [112.0, 414.0, 297.0, 226.0], \"score\": 0.999342679977417, \"association_id\": 2, \"light\": [-2.522980213165283, -1.5565142631530762, 2.434293031692505, 1.2979187965393066], \"segmentation\": {\"size\": [682, 1024], \"counts\": \"UR[2X3na07L2N3M2O1N2N101N1O2M2O2N1O101N100O1O1O1O1N2O1O1O1O100O1O2N1N2L5K4M4M2O2M3N2N2N2N2M3M4L<E4L3M3N1N2O1O001O1O0O101O00001N10000000000000000001O000000000000000000001O000O100O100O1O1O1O1O1O10WAcIe>\\\\6ZAgIe>_60001O000000000000000O100000001O0O1000000O100O1O100O2O0O10000O10000000000O1000000000000000010O000001O00001O001O001O1O001O001O00001O00000001O0000000000000O100000\\\\OaA]J^>d5bA\\\\J^>d5bA\\\\J]>e5cA[J]>e5cA[J\\\\>f5dAZJ\\\\>f5dAZJ[>g5eAYJ[>g5eAYJ[>g5eAYJ[>[61O0000000000001O001O00001O0000001O000000001O0000001O001O00001O1O1O1O001O1O001O001O001O1O1O4L5K2N1O1O1O1O001O1O2N4A[@iJ05j?m4`0L2M100O10O010O10O010O100O010O1O000001O000000000000000001O1N101O1O1VL]_Ok2d`0jLi_OS3X`0cLT@Z3Pa0N1O1O1N2O1O2M6K=B6K3J5L?@7J5J6SOS\\\\O2Vd0EdQi<\"}}, {\"image_id\": 30, \"category_id\": 1, \"bbox\": [266.0, 240.0, 250.0, 98.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-2.8398337364196777, -1.5959200859069824, 2.7442312240600586, 1.4027007818222046], \"segmentation\": {\"size\": [450, 736], \"counts\": \"kVe31Q>1O0O2O000O2O00000000000O2O0000000000000O10001O000000000O10001O0O100000000O2O000PODZD<`;L^D4_;3]DM];=_DC_;`0_DA`;a0_D@_;b0`D^O`;c0_D]O`;d0`D\\\\O^;g0aDYO];k0aDUO\\\\;R1`DnN^;W1_DiN`;Z1_DeN`;]1_DcN`;_1_DaN`;`1aD_N];T2N2N2O100O10DhDnMW;S2jDlMV;U2iDkMV;V2jDjMV;V2jDjMU;b201O0000000O100000000O10O10O100000_OnDRNR;_2000000000000O100000000000000000_OoDQNQ;o1oDQNQ;o1oDQNQ;o1oDQNQ;o1oDRNP;n1PERNP;n1PERNP;n1PERNP;n1PERNP;`20_OPEPNP;P2PEPNP;P2PEPNP;P2PEPNP;P2PEPNP;P2PEPNP;P2PEPNP;P2PEPNP;a20@PEoMo:Q2QEoMo:Q2QEoMo:b20000000000000000000000000EQEcMo:]2REbMn:_2QEaMo:i200000000000LQEUMP;j2PEVMP;k2PETMP;l230000001OO100O10000O2OkDVMR;m2001O0000001IkD_MU;g200001O001O1O000O101O1O1O001O001O0FbDPN_;Y22N2M2O0F[DWNf;h1ZDXNf;h1ZDXNg;g1YDYNi;e1WD[Nj;n11DUD^Nm;a1SD_Nn;`1RD`Nn;`1RD`No;_1QDaNo;_1QDaNP<^1PDbNP<^1PDbNQ<]1oCcNR<e110001O00000000001O0O10001O001O0O10001O0IgCeNY<[1hCcNZ<\\\\1600O2O000O2M2N2O2O000O2N2N3M2N1O2O0O2O1N6Fi\\\\P3\"}}, {\"image_id\": 31, \"category_id\": 1, \"bbox\": [46.0, 108.0, 961.0, 326.0], \"score\": 0.9999998211860657, \"association_id\": 1, \"light\": [-2.7589266300201416, -1.5660593509674072, 2.5745768547058105, 1.3667958974838257], \"segmentation\": {\"size\": [502, 1024], \"counts\": \"Rkf07\\\\?6K4K6J5dA_OZ=b1K5L4N101N2N2O1N2N2O1N3M2N4L5L2M4L3M1O2N2N101N1O2O0O101N100O2O0O101N100O2O0O2N101N1O2N2N1O2O1N1O2O0O2N101N100O2O000O2O0O100O2N100O2N1O2N101N1O2N1O2N2N2N2N1O2N2N101N1O101N10001N101N100O2O0O2N2N101N2M3N2O1N1O2N2O0O2O0O101N101O0O101O0O10001N100O2N101N1O2N1O2N2N1O2N1O2N1O2O0O1O2N100O101N10000O10000O1000000O1000000O2O0000000O10000000000O100000000O1000000O100O10000O100O1O100O1O100O10000O10000O10000O1000000000000000000O10000000000000000O10000000000000000O10000O10000O10000O100O1O100O100O10000O100O1000000O100000000O10000000000O1000000O100000000O10000O1000000O100O10000O100O10000O10000O1000000O1000000O1000001O0O1000000000000O1000000000000000000O100000000000000O1000000000000O1000000000000000000000000000000000O100000000000000000000000001O00000000000000000O101O0000000000000000000000001O0000000O10000000000O1000000O1000000O10000O10000O100O100O1000000O1000000O100000000O1000000000000O100000000000000O1000O100000000O10O10O10000O10O10O100O100O010O1000O010000O01000000O10O1000O1000000O1000O1000O10000000000O10O1000000000O1000000O100000000O101O000O1000000O100000000O1000000O10000O100O101N100O1O10000O101N10000O1000001N100000000O2O00001N101O0O2O001N101N100O2N1O1O2O0O1O101N10000O2O0O1000001N10000O2O000O2O0O2O0O2N101N2N1cJ\\\\Gh3e8VL_Gf3c8XLaGe3_8ZLeGb3]8\\\\LfGb3Z8]LSHV3o7hLWHS3i7lLYHR3h7nLYHQ3h7mLZHR3f7mL\\\\HR3d7nL]HP3e7nL]HQ3c7nL_HQ3a7oL_HQ3b7mL`HQ3a7nL`HR3a7lLaHS3_7lLbHS3`7kLbHT3^7kLcHT3_7kLbHT3^7kLdHS3^7kLcHU3^7iLdHU3]7jLdHV3_7fLcHX3`7fL`HY3d7cL^H[3e7bL\\\\H^3e7`L]H^3f7_L[H`3o9O0O2O0O2O000O2O1O001N2O001O1O1N101O001N10001O0O2O000O2O000O2O000O2O0O101N100O2O1N101N2O0O3N2M2O2M3M3N2M3N1N3M2OlMCPE;i:6oDHl:c0PE[On:j0QEUOm:n0SEPOl:T1SEkNk:Y1TEeNk:`1SE_Nj:l1oDRNo:\\\\3N101O0010O01O0O1O_OPE_LP;c3REYLm:j3WEPLi:R4ZEiKf:X4;2N1O2M4M2M4M2M3N2N1O2N2N2N3M3L3N3M2N2N2O1N2N101N1O2N1O2N101N101N2O1N2O1O001N100O1O2N1O1N200O1O101N2O0O2O2M2O1N2N2N100O2N10000O2O00001O1O1N2O001O1O00001O0000000000000000000O100O100O100O1O1O1000000O10000000000O10000000000000000000000000000000000001O00000000000000001O0000001O00001N101O0O100N3N1O1O1O1O1O2O0O1O1O1O2N1O1O100O2O0O2O0O2N1O2N2N2N101O001O000O2O000000010O0000010O00010O001N3N1O1N2O1O1O1N2O1N2O3L4L4L2M4M2N1O2O1N3L3M4J5K5M3N1O2N2N2N2N2M3M2N3N2N1O2N1O2M3L5J6K5M2M3N3M3[N\\\\C3k<]OoCG\\\\>ElR8\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [742.0, 67.0, 273.0, 741.0], \"score\": 0.9999998807907104, \"association_id\": 2, \"light\": [-1.978847622871399, -2.1311981678009033, 1.8867312669754028, 2.0157694816589355], \"segmentation\": {\"size\": [858, 1024], \"counts\": \"lo]c0d1Si0>C8H7I6K5K7I8G;E9G6I5K5L4L3N2N2N2N2N2N2a^OeK\\\\<]4_CPLX<Q4eCSLX<P4dCTLY<m3fCULW<n3fCULW<m3hCULT<o3iCSLS<Q4kCRLo;d1_@RO^3]OP<T1WA[Oe2EQ<h0hA_OT2LR<b0RB@h11T<=[B_O_17T<8bB]O[1<R<3kB\\\\OT1b0P<MVC[Ok0i0n;HbCWOa0R1k;FlCPO;[1h;DSDjN7c1c;D\\\\DcN3i1_;DcD]N0P2Z;EjDTN0W2T;FoDoMN\\\\2Q;GREkML`2Q;FfH<X7FeH=Z7CcHa0\\\\7@_Hf0_7[O\\\\Hj0c7WOZHl0f7TOXHn0g7ROXHP1g7ROVHQ1h7POVHS1g7oN`E[NRNl2[<kNZEQObM_2P=bNTE^4h:eKoDf4k:]KQEj4i:YKTEm4g:VKVEn4f:TKXEP5e:SKWEP5g:SKTEP5k:TKoDP5o:UKhDQ5V;Z501N1O2O0O2aHPBR2Q>gMWBV2k=cM]B[2d=\\\\MfBa2\\\\=XMkBf2W=VMmBi2U=SMnBk2U=QMnBm2V=nLmBP3V=mLkBQ3Y=kLjBR3Y=kLiBS3Z=jLhBS3\\\\=iLgBU3\\\\=gLgBW3\\\\=cLiB[3Z=aLiB^3Y=^LjBa3W=]LkBb3W=\\\\LjBc3W=[LkBe3V=XLlBh3U=ULmBk3S=QLQCo3P=lKTCS4n<gKWCY4i<dKZC\\\\4g<`K\\\\C`4e<]K]Cb4f<ZK\\\\Cf4f<VK\\\\Ci4h<RKYCn4S=cJQC\\\\5Wa0O1N2O1N1O2M2N1N2M2M1NN5VOhJ[\\\\OT5\\\\c0kJR\\\\O1b0R5Yc0QKV\\\\OLc0P5Vc0WKX\\\\OHb0o4Uc0\\\\KY\\\\OCi0i4mb0hKa]OR4]b0SLd]Oh3[b0\\\\Lg]O^3Yb0gLg]OV3Zb0lLg]OQ3Xb0RMi]Ok2Xb0VMh]Oh2Xb0YMj]Oe2Vb0\\\\Mj]Od2Vb0\\\\Mj]Od2Vb0\\\\Mj]Od2Ub0]Mk]Oc2Ub0]Mj]Od2Vb0\\\\Mj]Oc2Wb0]Mi]Oc2Vb0^Mj]Ob2Vb0]Mk]Oc2Ub0]Mk]Oc2Tb0^Ml]Ob2Rb0`Mn]O`2Pb0aMQ^O_2la0dMS^O]2ja0fMV^OZ2fa0jMZ^OV2ca0mM\\\\^OT2ba0nM^^OR2`a0PN`^OP2^a0RNb^On1]a0SNb^On1\\\\a0TNd^Ol1Za0UNg^Ok1Va0XNj^Oh1Sa0[Nl^Of1Pa0^NP_Ob1l`0bNT_O^1i`0eNV_O\\\\1h`0fNX_OZ1f`0hNZ_OX1d`0jN\\\\_OV1a`0mN^_OT1^`0POb_OP1X`0VOh_Oj0R`0[Oo_Oe0m?_OR@b0k?AU@?h?CY@=e?E[@;c?G\\\\@:b?H^@8_?Ka@5Z?0f@0W?3i@MS?7l@JR?7o@Io>9QAHl>:TAFj>;VAGg>;YAEd>>\\\\AD_>?aABW>d0jA]Oo=i0QBYO4aLl:[4oDWOCQMV;m3WETOiNjMi;U3_Em1\\\\:VNdEk1X:XNhEj1T:WNlEk1P:XNPFj1j9ZNVFh1b9^N^Fe1Y9`NhFc1P9bNoFb1k8aNUGa1g8aNYGa1c8aN]G`1`8aNaG`1S3fGb0l6[L_1j2UHa0^6eL^1_2jH<l5TM[1W2]I2`5gMT1R2gICa5\\\\Nh0o1nIVOd5jN`0m1SJPOa5SO<k1XJlN_5YO:i1\\\\JhN\\\\5_O8j1_J`N\\\\5F5k1dJVNZ5N3n1hJiMY59OP2fKaLa4_1IR2`4nM`KU2]4jMdKY2Y4gMgK[2W4dMjK^2T4bMlK`2R4`MnKb2P4]MQLf2l3ZMULg2i3XMXLl2d3TM\\\\LP3a3oL_LU3]3jLdLZ3W3gLiL[3U3dLlL_3Q3aLPM_3o2`LTM^3l2bLUM^3k2`LXM_3g2aL[M]3e2bL_M\\\\3`2cLeMZ3Z2eLiMZ3V2eLnMX3R2hLoMX3Q2fLRNX3n1fLWNX3h1fL]NW3c1hLbNU3^1jLgNQ3Y1oLnNk2R1SMTOi2k0VMZOf2g0XM]Of2b0ZMAc2?\\\\ME`2<`MI[28^M\\\\DDf;d2OgM]DCQ<Y2DRNn0a1SO_NV1W1kNhN^1n0dNROb1g0_NYOg1`0[N@i17[NIk1LZN4V>00001O3M2N00000001O00001O0N3IXj6\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [26.0, 134.0, 288.0, 601.0], \"score\": 0.9999999403953552, \"association_id\": 3, \"light\": [-1.1733837127685547, -2.9392354488372803, 0.9803947806358337, 2.795403480529785], \"segmentation\": {\"size\": [858, 1024], \"counts\": \"[Rf0=[j07I5J5K3M4L3N3M3M2N3N1N3M2O1M3N2M3N2O1N2O0010O0000O10O1OO4L3N2M4M2fZOnMka0S2P^O[Nea0h1U^O`Nga0e1Q^OcNia0e1k]OfNma0e1c]OlNQb0l5RO=C6K3M4M4L4K5L2WCcFP8`9lGdFR8^9jGfFT8\\\\9iGeFW8]9eGeF[8_9`GcF_8a9[GaFe8a9WGaFWN\\\\Of8U:nHbF[N[Of8U:jHdF^NYOg8T:hHeFaNWOf8V:eHfFdNUOg8W:aHfFhNSOg8Y:]HfFkNROh8Z:YHfFoNPOh8^:RHeFUOnNi8_:nGeFYOlNi8a:hGgF^OiNj8c:`GiFFdNi8g:VGmF1\\\\Ni8k:\\\\F_Gk0fMi8W>VGjAj8V>VGiAk8V>VGjAi8V>XGjAh8U>YGjAg8U>[GkAd8S>^GnAb8m=cGSB\\\\8k=gGTBY8k=iGUBU8j=nGVBP8h=SHXBm7e=WH[Bg7d=\\\\H[Bc7d=`H\\\\B_7c=bH^B]7b=dH]B[7c=gH]BV7d=lH\\\\Be5j0kGk<b2[Ba5n>dJRA[5l>gJTAX5l>jJTAU5k>lJUAU5j>lJVAT5g>oJYAQ5d>QK]Ao4^>VKaAk4\\\\>WKeAi4Y>YKgAg4W>ZKiAg4U>[KkAe4S>]KmAc4P>_KQBa4j=jJRB^M4h7e=nJYBYM1j7c=PK^BSM0m7`=QKaBRMOm7^=RKeBPMLo7\\\\=TKhBmLLo7V=YKPCfLKQ8i<eK\\\\CZLJR8Z<TLlCjKJR8R<[LVDbKHS8n;_LZD]KIT8k;aL\\\\D[KIT8h;cLaDXKGU8e;fLdDTKHV8^;lLjDnJGW8[;oLnDjJGW8X;QMSEfJFY8U;SMUEdJFY8T;TMVEcJFY8S;UMWEaJF[8R;UMYE^JF]8P;VM[E[JF_8n:WM]EYJE`8m:XMkEi2T:XMlEh2R:ZMnEf2o9]MQFc2k9aMUF_2g9eMYF[2b9iM^FX2]9mMcFS2Z9PNfFP2X9RNhFn1U9TNkFm1Q9WNoFi1m8[NSGe1g8aNYG_1d8dN[G^1a8eN_G[1_8gNaGZ1\\\\8hNdGZ1W8hNiGZ1R8jNnGX1l7lNTHY1LTGa6g7bI_1]OSGl6a7gIc1oNTGV7\\\\7kIY2R6jMnIW2k5oMTJR2e5UN[Jk1^5[NcJe1j2jEYNf8mN`1g2QFTNc8TO^1c2WFSN]8ZO]1^2_FQNV8@\\\\1Z2XG^M^78\\\\1U2gGSMP7h0Z1P2^1oMcNm1a1SN`Nh1d1WN^Ne1e1[N[Nc1g1]NZN_1i1aNWN]1k1bNWN[1j1fNVNX1l1gNVNV1l1jNUNT1l1lNXNo0i1POaNe0a1[OjN8X1HVO_OU1a0b<O010O1000O10O10000000000O10000O10O010000O100O1000000O1000O010000O10O10O10000O10O10O10000O10O1000O10O0100O010O10000O01000O01000O10O010000O010O100O10O1000000000000000000000001O0000001O0000001O000000001O01O0O1000001O0O2O000O2N1O6JoWbb0\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [358.0, 130.0, 288.0, 657.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-1.2136166095733643, -2.895642042160034, 1.1220451593399048, 2.7627434730529785], \"segmentation\": {\"size\": [858, 1024], \"counts\": \"[Y\\\\9R1Ui0X1QOc0B9I6L101N1O2N101N1O2ON2N101N1O2N1O1O2N1O1O1O2N1N2O1N2O02M3N3M2N3M2N3M2N5L5I9F>oZOQM^a0e3S]OhMYb0g5D:F9H6N2N2N4L7IX2gM7J4K4K4L5K6J8G:G5K4L3M3N1O3M3M4^EmAU8e>eFQBY9j?N2N1N1O1O2O0O1O1O100O1O1N2N2N2dM__O\\\\Jd`0b5f_OmIc`0P6e_ObId`0]6m1O1000000O10aJTJhB3n0i5T<gJTB3b1V5V<UKeA3P2h4Y<YMbCh2a<YM[Cg2h<[MRCf2Q=\\\\MjBd2Y=^MaBc2a=`MYBULPOm5k>RNmAQLYOm5k>VNfAlKAn5j>ZN_AfKJP6i>\\\\NWAbK5Q6e>`NPA`K<P6e>hN\\\\AX1e>gNZAZ1g>dNYA]1h>bNWA_1j>VNk@aK`0Z6f>iMhAX2Y>fMgA[2Z>cMfA^2Z>bMfA^2[>oLg@RMm0P6]>lLg@SMl0R6^>jLf@TMk0S6_>iLf@SMj0V6a>fLe@SMj0X6c>cLc@UMi0Y6b>dLe@RMj0Z6a>dLf@QMi0[6`>dLh@PMh0]6_>dLi@oLh0]6^>eLk@mLf0_6^>eLl@kLf0a6^>cLn@kLd0b6]>dLo@iLd0d6]>cLPAgLd0f6[>dLQAfLc0g6\\\\>bLSAeLb0i6Z>cLVAbL?l6[>bLQB_3n=bLQB_3o=aLPB`3o=`LRB`3n=`LQBa3m=aLSB`3j=bLUB_3h=cLYB]3b=hL^BX3]=lLcBU3Z=nLfBR3W=QMiBo2U=SMkBm2T=TMlBl2Q=WMoBj2m<YMSCg2c<bM^C^2R<RNnCo1`0lHe9Y5kEk1;UIc9S5RFh17\\\\Ib9n4WFg12aIc9i4\\\\Fg1LfId9e4`Fg1ElIf9_4eFh1VOXJo9S4kFk3h8^LXGe3Q8nLPHT3g7SMYHo2`7VM`Hl2Z7XMfHh2W7[MiHf2Q7_MoHb2j6dMVI\\\\2b6lM^IU2U6WNkIi1h5dNXJ]1`5jN_JX1\\\\5lNdJT1Y5oNgJR1W5oNhJR1W5oNiJR1U5oNkJR1S5oNlJR1S5nNnJS1o4oNQKR1m4oNSKR1k4oNTKS1k4mNUKT1i4mNWKT1h4kNXKW1g4iNYKX1f4hNZKY1e4gN[KZ1d4eN\\\\K]1b4dN^K]1b4bN^K`1`4_NaKc1^4\\\\NbKe1^4YNbKj1]4UNcKm1\\\\4RNdKP2\\\\4nMcKT2^4iMcKY2_4cM`Ka2_2PEPN_8Ad2[2eE^Mf77j2U2nE^MP7>V3o1YFVM`6j0\\\\3k1VFZM^6k0a3e1RFaM[6j0h3`1nEgMY6i0P4W1iEQNV6g0X4P1dE[NQ6f0c4d0]EjNn5b0j4<ZEUOj5>R55VEAe5:Y5NTEM_54b5EUE<T5Ok5WOZEn0h4KT99iFGW9;gFDZ9<fFD[9;eFE\\\\9:cFF_99aFG`97aFHa97_FIb96]FKe93[FLh92XFNj9NXF2j9KWF4k9IWF7Vb0O1O100O100O10000O100O100O1O1O100O100O1O10YVO]OWi0c0gVO_OYi0a0fVOAYi0?fVOCYi0=gVODXi0<gVOEYi0<fVOD[i0;dVOF\\\\i0:eVOE\\\\i0:dVOE]i0;dVOC^i0<cVOB_i0=a0N1O0010O010000O01O01O010O00010O00001O000O2O0O2N101N2N\\\\Pl9\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [557.0, 167.0, 67.0, 317.0], \"score\": 0.7588441967964172, \"association_id\": 4, \"light\": [-2.0231690406799316, -2.2570600509643555, 1.8480429649353027, 2.130810022354126], \"segmentation\": {\"size\": [858, 1024], \"counts\": \"[Sc>3dj06WVOLch0:VWOMch0d0lVODjh0]1D:K5L4M6[IcMeD]2X;hMfDY2V;jMgDZ2U;hMgD]2W;dMhD]2V;eMoDW2o:kMPET2P;mMQER2n:nMm@AT1DjMj2U`0SNh@Fn0GRN`2X`0VNa@HS1CRNa2Z`0UN^@JT1ARNb2\\\\`0SN\\\\@NP1DPNa2c`0nMZ@11mNCV4R`0nLV@20QOCQ4W`0jLU@71QO]OQ4]`0gL`_Ol0f0^NUOU4e`0`L`_On0e0Y3k?jKe_Oh0=`3n?hKi_Oe05f3R`0fKc_Og0?a3n?hK]_OEG;R1V4j?jK]_OCJ:Q1@mNU4k`0]Lc_O_OE;R1^4f?gKh_O[OB;S1d4c?fKh_O[OC5X1i4]?hKf_OZOF0]1m4W?iKf_OZOFN_1o4U?gKh_O^OALa1Q5W?bKi_O1P1_4W?`Kg_O3S1\\\\4W?aKb_O7X1V4W?bK]_O<^1P4Z?ZK\\\\_Of0Z1Q4_`0oK`_OR4^`0oKd_Oc2eNnLea0a0j_OT2lNYMUa0g0S@U1XNaMg0a0o`0h0U@m0FZNU`0i0c@9_OmNn?j0aAWOfNNh?k0cDUO];j0eDUO[;l0eDSO\\\\;m0dDQO];P1eDmN\\\\;S1dDlN\\\\;U1eDiN`;S1`DkNj;n0TDROn;m0RDROP<k0RDTOo;j0SDTOo;j0SDTOo;k0PDWOQ<g0oCYOT<a0QD[OV<<jT_:\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [706.0, 141.0, 206.0, 511.0], \"score\": 0.9999929666519165, \"association_id\": 2, \"light\": [-2.463315486907959, -2.4288151264190674, 2.3168017864227295, 2.2404658794403076], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_Wg>9md08F:G8H9C<D=G8L3M3M4L3L4K5S@ZMo<k2cBRNj<S2QCXNe<k1WC^Nb<d1[CaNg0oN^8e2fFaNf0oN`8g2bFbNc0nNh8j2YFXO5mM]9o7nEPHn9i9L4M2O2OO1O001O00XFTES9k:fFfEQ9];L3N1N2N2N1SNdCjJ^<R5kCgJW<T5QDgJP<U5VDhJk;U5WDkJj;R5YDnJg;P5[DoJf;o4[DQKf;m4\\\\DRKe;l4]DTKc;j4^DVKc;g4aDWK`;e4hDUKZ;i4lDPKV;o4PEkJR;T5REgJo:Y5VE`Jm:_5_210O01N2N2J6IXJm@f5l>`JUA`5h>aJYA`5e>\\\\JaAd5]>WJjAi5l>000ROXJXBi5a=[JaBe5X=^JlBc5\\\\>2O1O1N2YNoIRDS6_=4M3QNgIkDZ6P;iIQEX6l:iIUEX6i:hIWEZ6g:cI]E^6g<1O1O1O1O1O1O1O100O1WMZIlFg6P9]IoFd6o8^IPGc6n8`IPGa6n8aIRG_6m8cIQG^6m8dIRG]6l8eISG\\\\6j8hITGY6h8kIXGU6e8oIYGR6e8PJ[GP6c8SJZGo5e8SJYGn5f8SJYGn5e8UJYGl5d8WJ[Gj5a8ZJ_Gf5^8]J`Ge5^8]JaGd5^8]JaGd5^8]JaGd5^8]JaGd5]8^JbGc5]8^JbGc5i0gHj4j1dIQNE`7j1oHc4c1iIY6b1[H]4_1PJW6a1^H[4]1TJV6_1_H[4\\\\1VJU6]1bHZ4\\\\1XJR6\\\\1eHX4]1[Jo5Y1gHX4^1_Jk5T1mHW4^1dJf5o0RIW4]1jJa5k0VIW2nNdM^2ZO_5f0ZIS2UOgMT2@]5c0]IP2\\\\OgMm1E[5b0]In1CdMh1LX5`0_In1g3bMj2?aIm1f3dMj2<bIn1f3gMg2:dIn1g3gMf29dIo1h3hMc28gIo1g3jMa25iIP2h3jM^29iIl1j3kM\\\\2:kIj1h3lM]2=iIf1k3mMZ2a0kI_1l3PNX2d0lIY1m3SNV2f0mIU1n3UNT2g0oIS1l3WNT2g0PJQ1m3WNT2h0PJQ1j3XNU2g0RJR1h3VNW2h0RJS1e3VN[2d0RJV1a3VN^2b0UJW1[3VNb2b0UJW1X3WNd2`0VJX1U3XNh2=TJ[1T3WNm27SJb1P3UNV3MoIn1j2UN_3@nIZ2b2UNo6j1mHVNW7i1gHWN[7h1dHXN^7g1aHYN_7h1aHWN`7h1_HXNd7g1[HXNk7e1SH[NW8^1fGaNa8Z1^GfNd8Y1[GgNg8X1XGgNi8Y1VGhNk8V1VGiNm8U1SGjNP9S1RGlNP9R1PGmNS9P1nFPOS9o0lFQOU9n0mFoNU9Q1lFlNW9R1jFlNY9T1a5O51N8HM1OO00O1000O2O1O00O001N2N2N2N2N1O2N2M3M7I8GmXZ2\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [456.0, 202.0, 244.0, 475.0], \"score\": 0.8059561252593994, \"association_id\": 1, \"light\": [-2.054305076599121, -2.052875280380249, 1.885396957397461, 1.8909581899642944], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"eb`97md0>B=C9I7XOdNY]Oa1ab0h0I6G9D=VOi0H8L5M2N2O0O1N2N2MZNP@XNn?h1]@nMd?P2c@iM^?W2g@cM[?\\\\2i@_MY?`2l@ZMV?c2QAVMR?c25fMa@S2`?_NT@]1l?lNn_OS1S`0SOf_On0[`0UOa_Ok0a`0XO[_Oi0d`0[OW_Of0j`0]OQ_Oe0o`0k1O100O1O1O101O1O1O1O1O100oAjKl:W4kD[Ll:e3REbLh:_3WEdLg:]3VEgLh:Y3UEmLh:T3SETMi:l2jDhMo:Y2dDZNU;g1dDeNV;[1fDlNY2]Lh5h4jGQOV2`Lm5`4iGUOT2aLQ6\\\\4eGYOR2dLV6U4aG^OS2eLX6c8_IiGZ6h<lN=C2N2N1O1O00000000001O0000000000000000O1000000O1O1O1N2O1K5nMhImD\\\\6P;lIgDX6U;QJaDT6[;P2J6F:oNQ1E;K5M3M3K5H8J6N2O1N2O100O11O010O1O1O2O0O2N110O11O4L4L4L3N2M1O2N3M4N21Kg0YO8G5K4L2M3M3M5K6K5J3M3M3M2O1N4L5K;E?B6ImFWGi5d8oIfGR6X8mIjGR6W8lIlGR6U8lInGQ6T8mIPHP6Q8nIRHP6o7nITHo5m7PJUHo5k7QJVHm5k7RJVHn5j7RJVHm5k7SJVHk5l7TJVHi5l7UJ^HYO`N[4U9ZLcKZ2`4dMoKl1S4RNmK]NdJk2a9gNjK^NhJh2_9iNiKaNhJe2`9hNhKdNiJa2a9jNfKeNlJ]2a9mNbKfNPKZ2a9mN_KjN[Kl1^9SOWKQOkL;W8<nJZOUMNR8d0jJ]O]MEl7l0gJ_OdM\\\\Oi7S1dJAa8>_GBc8=]GBe8<[GDg8;YGEh8:WGFl87TGIo85PGKR94mFLT94lFKU95kFKU95kFJV96jFIW97oFBR9>nFAT9>kFBV9>jFAX9=hFCZ9<fFD\\\\99dFG_97`FIa96`FIa97^FIc97\\\\FIe97[FHe99ZFHf99XFGh9;VFEk9:VFEj9<UFEk9:UFFl9;RFEo9;PFEQ::PFFT:5lEKW:3hEMZ:1fEO]:MeE3]:IdE7h?01O00001O01O0001O000000000O100000000O1O1000aag6\"}}, {\"image_id\": 34, \"category_id\": 1, \"bbox\": [4.0, 172.0, 942.0, 266.0], \"score\": 0.9986140727996826, \"association_id\": 1, \"light\": [-2.606602191925049, -1.6277655363082886, 2.5019538402557373, 1.5252846479415894], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fR33Ue05M1MHS[O9ld0300hMMW_O4h`00U_O0k`02S_ONl`06n^O]O`N=ab09m^OJSa06m^OKRa05n^OKQa07n^OIRa07n^OIQa08o^OHQa09n^OGQa0:n^OGRa09n^OGQa0;n^OERa0;n^OEQa0<o^ODQa0=n^OCQa0>o^OBPa0?P_OAo`0a0P_O_OPa0a0m^OmNkNb0Wb0b0n^OlNkNb0Vb0d0n^OjNlNb0Ub0e0o^OjNkNa0Ub0f0P_O]Oo`0e0P_O[Oo`0f0Q_OZOn`0h0Q_OXOn`0i0R_OWOm`0j0S_OVOl`0l0S_OTOl`0m0T_OSOl`0m0T_OSOk`0o0T_OQOk`0P1U_OPOk`0P1U_OPOj`0Q1V_OoNj`0R1U_OnNk`0R1U_OnNj`0S1V_OmNj`0S1V_OmNi`0U1V_OkNj`0U1V_OkNj`0U1V_OkNi`0V1W_OjNi`0V1W_OjNi`0W1V_OiNi`0X1W_OhNi`0X1W_OhNh`0Y1X_OgNh`0Y1X_OfNi`0[1V_OeNi`0\\\\1X_OcNh`0]1X_OcNg`0^1Y_ObNf`0_1Z_OaNf`0`1Y_O`Nf`0a1Z_O_Ne`0b1[_O^Ne`0b1[_O^Nd`0d1[_O\\\\Nd`0e1\\\\_O[Nc`0f1]_OZNb`0h1]_OXNb`0i1^_OWNa`0j1__OVNa`0k1^_OUNa`0l1__OTN``0m1`_OSN``0n1__ORN``0o1`_OQN``0o1`_OQN``0P2__OPN``0Q2`_OoM``0Q2`_OoM_`0S2`_OmM``0S2`_OmM``0S2`_OmM_`0U2`_OkM``0U2`_OkM``0U2`_OkM``0U2`_OkM_`0V2b_OiM^`0X2a_OhM_`0X2a_OhM^`0Y2b_OgM^`0Y2b_OgM^`0Y2b_OfM^`0[2b_OeM^`0[2b_OeM^`0\\\\2a_OdM^`0]2b_OcM^`0]2b_OcM]`0^2c_ObM]`0^2c_ObM\\\\`0_2d_OaM\\\\`0`2c_O`M\\\\`0a2d_O_M\\\\`0a2d_O_M[`0b2e_O^M[`0b2e_O^MZ`0d2e_O\\\\M[`0d2e_O\\\\MZ`0e2f_O[MZ`0e2g_OZMX`0g2h_OYMX`0h2g_OXMX`0i2h_OWMX`0i2h_OWMX`0i2i_OVMV`0l2i_OTMW`0l2i_OTMW`0l2i_OTMW`0l2i_OTMV`0n2j_OQMV`0o2j_OQMV`0o2j_OQMV`0o2j_OQMU`0P3k_OPMU`0Q3j_OoLV`0Q3k_OnLU`0R3k_OnLU`0R3k_OnLT`0S3l_OmLT`0S3l_OmLT`0S3l_OmLT`0S3l_OmLS`0T3m_OlLS`0T3m_OlLS`0T3m_OlLR`0U3n_OkLR`0U3n_OkLQ`0V3o_OjLQ`0V3o_OjLP`0W3P@iLP`0W3P@iLo?X3Q@hLo?X3Q@hLn?Y3R@gLn?Y3R@gLm?Z3S@fLm?[3R@eLm?\\\\3S@dLm?\\\\3S@dLl?]3T@cLl?]3T@cLl?]3T@cLk?^3U@bLk?^3U@bLk?^3U@bLk?^3U@bLj?_3V@aLj?_3V@aLj?_3V@aLj?_3V@aLi?`3W@`Li?`3W@`Li?`3W@`Li?`3W@`Li?`3V@aLj?_3V@aLi?_3X@aLh?_3X@aLh?_3W@bLi?^3W@bLi?^3V@cLi?^3W@bLi?^3V@cLj?]3V@cLj?]3V@cLi?]3W@dLi?\\\\3W@dLi?\\\\3W@dLi?\\\\3V@eLi?\\\\3V@eLj?[3U@fLk?Z3U@fLk?Z3U@fLk?Y3V@gLi?Z3W@fLi?Z3W@fLi?Z3W@fLi?Z3V@gLj?Y3V@gLj?Y3V@gLj?X3W@hLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@gLh?Y3X@fLi?Z3W@fLi?Z3W@fLi?Z3V@gLj?Y3V@gLj?Y3V@gLj?Y3V@gLj?Y3V@gLj?Y3V@gLj?Y3V@gLj?Y3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@hLk?X3U@gLl?Y3T@gLo<ZOaEo3`MgLo<[O`En3aMgLo<\\\\O_Em3bMgLo<]O^El3cMgLo<^O]Ek3dMgLn<_O^Ej3dMgLn<@]Ei3eMgLn<A\\\\Eh3fMgLn<A\\\\Eh3fMgLn<A\\\\Eh3fMgLn<A\\\\Eg3gMhLm<A\\\\Eg3gMhLm<@]Eh3fMhLm<@]Eh3fMhLn<^O]Ej3eMhLn<^O^Ei3dMiLn<]O_Ej3cMiLn<\\\\O`Ek3bMiLn<\\\\O`Ek3bMiLn<[OaEl3aMiLl?W3T@iLl?W3T@iLl?W3T@iLl?W3T@iLl?W3T@iLl?W3T@iLl?W3T@iLl?W3T@iLl?W3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@jLm?V3S@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3R@iLn?W3Q@jLo?V3Q@jLo?V3Q@jLo?V3Q@jLo?V3Q@kLn?U3R@kLn?U3R@kLn?U3R@kLn?U3R@kLn?U3R@kLn?U3Q@lLo?T3Q@lLo?T3Q@lLo?T3Q@lLo?T3P@mLP`0S3P@nLo?R3Q@nLo?R3Q@nLo?R3Q@nLo?S3o_OnLQ`0R3o_OnLQ`0R3o_OnLQ`0R3o_OnLQ`0R3o_OnLQ`0R3o_OnLQ`0R3o_OnLQ`0R3o_OnLQ`0R3o_OnLR`0Q3n_OoLR`0Q3n_OoLR`0Q3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OQMR`0o2n_OQMR`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0P3m_OPMS`0Q3l_OPMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0P3k_OPMU`0P3k_OPMV`0o2j_OQMV`0o2j_OQMV`0o2j_OQMV`0o2j_OQMV`0o2k_OPMV`0P3i_OPMW`0P3i_OPMW`0P3i_OPMW`0P3i_OPMW`0P3i_OPMW`0P3i_OPMW`0P3i_OPMX`0P3g_OPMY`0P3g_OPMY`0P3g_OPMY`0P3g_OPMY`0P3h_OoLX`0Q3h_OoLX`0R3g_OnLZ`0Q3f_OoLZ`0Q3f_OoLZ`0Q3f_OoLZ`0Q3f_OoLZ`0Q3f_OoLZ`0Q3f_OoLZ`0R3e_OnL[`0R3e_OnL[`0R3e_OnL[`0R3e_OnL\\\\`0Q3d_OoL\\\\`0Q3d_OoL\\\\`0R3d_OmL\\\\`0S3d_OmL\\\\`0S3d_OmL\\\\`0S3d_OmL\\\\`0S3d_OmL\\\\`0R4O00000000SOe_OjL[`0S40TOe_OhLZ`0Y3f_OgLZ`0U40UOf_OeLZ`0[3f_OeLZ`0V40VOf_OdLY`0\\\\3g_OdLY`0\\\\3g_OdLY`0\\\\3g_OdLY`0W4000000000000000000000000O10001O000000000000000000001O0O10000000000000000000000O100000001O0000000O100000000000000000000O10001O0000000000O1000O1000000000000000O10000000O01000000O100_On_OiKR`0W4n_OiKR`0W4n_OhKS`0W4n_OiKR`0W4n_OiKR`0W4o_OhKQ`0X4o_OhKQ`0X4o_OgKR`0Y4n_OgKQ`0Z4o_OfKQ`0Y4P@gKP`0Y4P@fKQ`0Z4P@eKP`0[4P@dKQ`0\\\\4o_OcKR`0\\\\4o_OdKQ`0\\\\4P@bKQ`0^4o_OaKR`0_4n_O`KS`0_4=000O10O1000000OgN^KkAb4T>bKiA^4W>dKgA\\\\4Y>gKdAY4[>iKdAV4]>jKcAV4]>kKaAV4_>jKaAV4^>lKaAT4_>mK`AR4a>nK_AR4a>oK^AQ4b>PL]AP4c>QL\\\\An3e>RL[An3e>SLYAn3g>SLXAm3h>TLWAk3j>VLUAj3k>VLUAj3k>WLTAh3l>YLTAg3l>ZLRAf3JbKh>h0^Af3IdKh>g0^Ac3KgKg>f0^Aa3LkKe>e0^A]30nKb>e0^AZ33QL_>f0\\\\AX36TL]>d0]AV38VL[>e0[AS3=XLX>e0ZAS3>YLX>d0YAR3a0ZLV>e0WAQ3d0ZLU>e0UAR3f0ZLU>e0SAQ3i0ZLT>e0RAQ3k0ZLS>h4mAXKS>g4mAZKS>e4nA[KR>d4oA\\\\KQ>c4PB]KP>c4oA^KQ>a4PB_KP>a4PB_KP>`4PBaKP>_4PBaKP>_4oAbKQ>^4oAbKQ>^4nAcKR>]4mAdKS>\\\\4lAeKT>[4lAeKT>[4kAfKU>Z4kAfKU>Y4kAhKU>X4kAhKU>X4jAiKW>V4iAjKW>V4iAjKW>V4hAkKX>U4hAkKX>U4hAkKX>U4hAkKX>U4gAlKY>T4gAlKY>T4gAlKY>T4gAlKY>T4gAlKY>T4gAlKY>T4fAmKZ>S4fAmK[>Q4fAoKZ>6o@l2g0nLZ>6o@l2g0nLZ>6o@l2g0nLZ>6PAk2f0oLZ>6PAk2f0oLZ>6PAk2e0PM[>5PAk2e0PM[>5PAk2e0PM[>4QAl2d0PM[>4QAl2d0PM[>5PAk2e0PM[>5PAk2e0oL\\\\>6o@k2e0oL\\\\>6n@l2f0nL\\\\>6n@l2f0nL]>5m@m2f0nL]>P4cAPL]>P4cAPL]>P4cAPL]>P4cAPL]>P4cAPL]>P4cAPL]>P4cAPL]>P4cAPL]>P4cAPL]>P4cAPL]>P4bAQL^>o3bAQL^>o3bAQL_>n3aARL_>n3aARL_>o3`AQL`>o3`AQL`>o3`AQL`>o3`AQL`>o3`AQL`>o3`APLa>P4_APLb>o3^AQLb>o3^AQLb>o3^AQLb>o3^AQLb>o3^AQLb>o3^AQLb>o3^AQLb>o3^AQLb>o3^AQLc>n3]ARLc>n3]ARLc>n3]ARLc>n3]ARLc>n3]AQLe>n3[ARLe>n3[ARLe>o3ZAQLf>o3ZAQLf>o3ZAQLg>n3YARLg>n3YARLg>n3YARLg>n3YARLg>n3YARLh>m3XASLh>m3WATLi>m3VASLj>m3VASLj>m3VASLj>Jj@[3<jLl>Fm@_37kLl>En@`36kLl>Eo@_35lLl>DPA`34lLl>DQA_33mLl>CRAa31lLm>CSA`30lLo>BRAb3OlLo>BSAa3NmLo>ATAb3MmLo>ATAb3MmLo>AUAa3LnLo>@VAb3KnLo>@VAb3KnLP?_OVAc3InLQ?^OWAd3HnLQ?]OYAd3FnLS?]OXAd3EoLS?\\\\OYAe3DoLQ`0P3o_OPMQ`0P3o_OPMQ`0P3o_OPMQ`0P3o_OPMQ`0P3o_OPMR`0o2n_OQMR`0o2n_OQMR`0o2n_OQMR`0o2n_OQMR`0o2n_OPMS`0P3m_OPMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2m_OPMT`0o2l_OQMT`0o2l_OPMU`0P3k_OPMU`0P3k_OPMU`0P3k_OPMU`0P3k_OPMU`0P3k_OPMU`0P3k_OPMV`0o2j_OQMV`0o2j_OQMV`0o2j_OQMV`0o2j_OPMW`0P3i_OPMW`0P3i_OPMW`0o2j_OQMV`0o2k_OPMU`0P3k_OPMU`0P3k_OPMU`0o2l_OQMT`0o2l_OQMT`0o2l_OQMT`0o2m_OPMS`0o2n_OQMR`0o2n_OPMS`0P3m_OPMS`0P3m_OPMS`0o2o_OoLR`0Q3n_OoLR`0Q3o_OnLQ`0R3o_OnLQ`0Q3Q@mLP`0S3P@mLP`0S3P@lLQ`0T3P@jLQ`0V3o_OjLQ`0U3P@kLP`0U3S@gLn?Y3R@gLn?Y3S@fLm?Z3S@fLm?Z3R1O10000000000O100000000O10O1000O10000O1000O0100O10000O01000O1000O010000000O0100000O010000000O10O1000O10O10O1000O0100O010O010O010O0O2N1N3N1O2N2O0O2O001N2J5N3O10O0100O01000O10O10O10O10O10O01000O0100O010O0010O010O010O10O01000O01000O10O100000O10O1000O10O10O1000O0100O0010O010O010O0010O010O01O10O10O010O01O10O01O10O0100001N10000O2O0O10001N100O10001N100O101N100O1O2O0UNY^O1ha0OY^O0ga0O\\\\^ONfa00\\\\^OOda00^^ONca02^^OMba02`^OLba03_^OcNJm0ga0?l^O@Ua0`0l^O_OUa0?l^OATa0?m^O_OTa0`0m^O@Ta0?l^OAUa0=m^OAUa0>k^OBVa0<l^OCVa09l^OFVa04P_OKQa01S_OMo`01R_OOo`0NT_O0o`0MS_O2n`0LU_ONQa0NW_OHSa01Tlc1\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [17.0, 63.0, 372.0, 166.0], \"score\": 0.9999812245368958, \"association_id\": 2, \"light\": [-2.207447052001953, -1.2164630889892578, 2.1014513969421387, 1.1231026649475098], \"segmentation\": {\"size\": [900, 928], \"counts\": \"^P?b03BUk0k0K5K5L4K3N110O00001O001O001O1O00010O000010O100O10O100O100O100O100O010O000100O10XWO[NPg0d1oXO^NQg0b1eXO[NkN6`h0^1dXOjN]g0V1aXOlN_g0T1_XOnN`g0T1^XOmNbg0S1\\\\XOoNcg0S1ZXOPOJXOPg0i1UYOPOJWORg0i1RYOROJWOSg0g1RYOTOJVOSg0g1QYOUOJUOUg0f1PYOVOKUOUg0e1oXOWOJUOWg0d1oXOWOJUOWg0d1oXOWOJUOXg0d1mXOXOIVOYg0b1mXOYOJUOZg0b1kXOYOKUO[g0b1iXOZOJUO]g0a1iXOZOJUO^g0a1gXOZOKUO^g0b1eXOZOLUO_g0a1eXOZOLUO`g0`1dXO\\\\OKTOag0a1cXO[OLTOag0a1cXO[OLTObg0`1bXO\\\\OLTObg0`1bXO\\\\OLTObg0`1bXO\\\\OKUOdg0_1`XO\\\\OLUOdg0_1_XO]OMTOeg0^1^XO]ONUOdg0^1^XO]ONUOeg0]1]XO^ONUOeg0^1\\\\XO]OOUOeg0^1\\\\XO]OOUOfg0_1YXO\\\\O1UOfg0a1VXOZO5UOeg0a1UXO[O5TOgg0b1SXOZO6TOhg0a1QXO\\\\O7SOhg0a1QXO\\\\O7SOhg0a1QXO\\\\O7ROig0c1PXOZO7SOig0c1RXOXO5UOjg0c1QXOWO5VOjg0c1QXOWO4VOlg0c1PXOWO4VOmg0b1PXOWO3WOmg0c1oWOVO4WOng0b1nWOWO4WOng0b1nWOWO4WOog0a1mWOXO4WOog0a1mWOXO4WOog0a1mWOXO4WOPh0`1mWOXO3XOPh0`1mWOXO3XOPh0`1mWOXO3XOPh0a1kWOXO5VOQh0c1hWOXO7UORh0c1fWOXO7VOSh0i2mWOWMSh0i2mWOWMSh0i2mWOWMSh0i2mWOWMTh0h2lWOXMTh0h2mWOWMSh0i2mWOWMSh0i2mWOWMTh0h2lWOXMTh0h2lWOXMTh0h2lWOXMUh0g2kWOYMUh0g2lWOXMTh0h2lWOXMUh0g2kWOYMUh0g2kWOYMUh0h2jWOXMVh0h2jWOXMWh0g2iWOYMWh0g2iWOYMWh0g2iWOYMWh0g2iWOYMXh0f2gWO[MYh0e2gWO[MYh0e2gWO[MYh0e2fWO\\\\MZh0d2fWO\\\\M[h0c2eWO]M[h0c2dWO^M\\\\h0b2dWO^M\\\\h0a2eWO_M[h0a2dWO`M]h0_2cWOaM]h0_2cWOaM]h0_2bWObM_h0\\\\2bWOcM_h0]2_WOeMah0[2]WOgMdh0X2ZWOjMfh0i2101N1000000O2O000O10001N10000O101N10001O00000O10001O0000000O10001O00000000000O100000000\\\\OjVOZNVi0f1lVOXNSi0i1mVOWNSi0i1nVOVNRi0j1nVOVNRi0j1nVOVNRi0j1nVOVNRi0j1nVOVNRi0j1nVOWNQi0i1oVOWNQi0i1oVOWNQi0i1oVOWNQi0i1oVOXNPi0h1PWOXNPi0h1PWOXNPi0h1oVOZNPi0g1oVOYNRi0g1mVOYNSi0g1mVOZNRi0g1mVOYNSi0\\\\200000000000000000000001O000000000O1O1N2O1O1O1O10000O10000000000000001O1O1O3M3M4L2N001O1O0010O01O001O001O1O010O001O10O01O1O010O00100O010O1O010O0010O01O0100O01O010O01O01O010O00001O0001O01O00001O00000010O01O010O01O0100O0010O00010O000001O00000BdUOTO\\\\j0l0dUOTO]j0k0dUOTO\\\\j0k0eUOUO[j0k0eUOVOZj0j0fUOVOZj0j0fUOVOZj0j0fUOVO[j0h0fUOXOZj0h0gUOWOZj0h0fUOXOZj0h0fUOXOZj0h0gUOWOZj0g0gUOYOZj0f0fUOZOZj0f0gUOZOYj0d0hUO\\\\OYj0c0hUO\\\\OYj0c0gUO]OZj0a0gUO_OZj0`0gUO_OZj0?gUOBYj0=gUOCZj0<fUODZj0<fUODZj0<fUODZj0<gUOCZj0<fUODZj0<fUODZj0<fUOD[j0;eUOD\\\\j0<dUOD]j0;cUOE]j0;cUOE^j0:cUOE]j0;cUOE^j0:bUOF^j0;aUOE_j0;aUODaj0:aUOE_j0;bUOC`j0<aUOB`j08j0N101M]ai>\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [248.0, 376.0, 245.0, 98.0], \"score\": 0.999954104423523, \"association_id\": 3, \"light\": [-1.926486849784851, -1.9281883239746094, 1.8013205528259277, 1.8143810033798218], \"segmentation\": {\"size\": [900, 928], \"counts\": \"e\\\\j63ok03L3O1O101N100K5O100oTO^O_j0c0ZUO]OJ0N1nj0b0ZUO]OI4lj0`0[UO\\\\OI4kj0a0\\\\UO[OH5lj0`0\\\\UO[OH6kj0`0\\\\UOBdj0>[UOCej0>ZUOBfj0a0WUO_Ohj0P1O00O10000FVUOZOjj0f0VUOZOij0g0WUOYOij0g0WUOYOij0g0WUOYOij0g0WUOYOhj0h0XUOXOhj0g0YUOYOgj0g0YUOYOgj0h0XUOXOgj0i0YUOWOgj0i0YUOWOgj0i0YUOWOgj0j0YUOUOgj0k0YUOUOfj0l0ZUOTOfj0U1000O1000000000000000000O2O000O11O2N1O1O1O1O1O2N1N101O00001O0O1000000O101N10000O100O010O1O100O1000000O10000000000000000000000000O101O0O1000000O100O2O0O100O2O0O1O1O0O2O1O1001O1O0kUO^Nhi0b1WVO`Ngi0a1YVO_Ngi0a1YVO`Nfi0`1ZVO_Nfi0b1[VO]Nei0c1[VO\\\\Nei0e1=0O100O1000O0100O1O100O100O1000CiUOlNWj0U1iUOkNWj0a10CiUOmNWj0S1iUOmNVj0S1kUOmNUj0S1kUOlNVj0T1jUOlNUj0U1kUOkNUj0T1lUOlNTj0T1lUOlNTj0T1lUOlNTj0T1lUOlNSj0T1nUOlNRj0T1nUOlNRj0T1nUOlNRj0S1oUOlNRj0T1nUOlNRj0T1mUOmNSj0R1nUOnNRj0R1nUOnNRj0R1nUOnNRj0Q1oUOoNQj0Q1oUOoNQj0Q1oUOoNQj0Q1oUOnNQj0S1oUOmNQj0S1oUOmNQj0R1oUOoNPj0R1PVOnNoi0S1QVOmNoi0S1QVOmNni0T1RVOlNmi0U1SVOkNmi0T1TVOlNki0U1TVOlNki0U1UVOkNki0U1UVOkNji0V1VVOjNii0W1WVOiNfi0Z1YVOgNei0[1[VOeNdi0\\\\1[VOeNei0[1ZVOfNei0Z1\\\\VOfNci0[1\\\\VOfNdi0[1[VOeNei0[1[VOfNdi0Z1\\\\VOfNdi0Z1\\\\VOfNdi0Z1\\\\VOfNdi0Z1]VOeNci0\\\\1\\\\VOdNci0g0fVOoNFN1<ci0f0hVOmNF1O=ci0c0iVOoNE1O=ci0`0mVOPOC2M>di04^VO^Oh00VO?di0KdWOFiN>di0IeWOJfN>di0FhWOLdN?di0^OnWO4]N?Yk0BgTO>Xk0BhTO>Yk0AgTO`0^k00001O0@eTO7[k0IfTO7Zk0GhTO8Xk0HiTO7Wk0HjTO9`k00001O001O010O1O00_OKSUO5mj0KTUO5kj0JVUO5]k00001O001O001N2Omkm;\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [0.0, 667.0, 153.0, 80.0], \"score\": 0.9990361928939819, \"association_id\": 4, \"light\": [-2.275362014770508, -1.3273024559020996, 2.177535057067871, 1.2112889289855957], \"segmentation\": {\"size\": [900, 928], \"counts\": \"ce0g0Zk09H3M;D4L3L5nUOZNci0h1[VOZNdi0e1[VO]Ndi0c1\\\\VO^Ndi0b1ZVO`Nci0b1[VObNci0_1]VOaNci0_1\\\\VObNdi0]1\\\\VOdNci0]1]VOcNci0o1000]O]VOeNci0[1]VOeNdi0Z1\\\\VOfNdi0Z1\\\\VOfNei0Y1[VOgNei0Y1[VOgNfi0X1ZVOhNfi0X1ZVOhNgi0i1100000000^OXVOhNhi0X1XVOhNhi0X1XVOhNhi0X1XVOhNhi0X1XVOhNhi0X1XVOhNhi0X1XVOhNhi0X1XVOgNii0j11O001O1]OTVOlNmi0S1SVOmNni0R1RVOnNoi0P1RVOPOoi0o0QVOQOoi0o0QVOQOPj0n0PVOROPj0n0PVOQORj0n0nUORORj0n0nUORORj0n0nUOROSj0m0mUOSOSj0m0mUOSOSj0m0mUOSOSj0m0mUOSOSj0m0mUOSOSj0m0mUOSOSj0m0mUOSOSj0m0mUOSOSj0m0lUOSOUj0m0kUOSOUj0m0kUOSOUj0m0kUOSOUj0m0kUOSOUj0m0kUOSOUj0m0kUOROUj0o0kUOQOUj0o0kUOQOUj0o0kUOQOTj0P1lUOPOTj0P1lUOPOTj0P1lUOPOTj0P1lUOPOTj0P1lUOPOTj0o0mUOQOSj0o0mUOQOSj0P1lUOPOTj0P1lUOPOTj0P1lUOPOTj0P1lUOoNUj0Q1kUOoNUj0Q1kUOoNVj0P1jUOPOVj0P1kUOoNUj0Q1kUOoNUj0R1jUOnNVj0R1jUOnNVj0R1jUOnNVj0`10CjUOlNVj0T1jUOlNVj0T1jUOlNVj0T1jUOkNWj0a1001O0ChUOnNXj0Q1hUOPOXj0P1hUOPOXj0o0hUOROXj0]1000_OhUOVOYj0Z1001O000\\\\OfUO^O[j0`0fUO@Zj0?fUOB[j0=eUOC[j0=eUOB\\\\j0=eUOC\\\\j0<cUOE]j0:eUOE[j0;eUOD]j0:dUOF\\\\j08fUOH[j07eUOI[j06fUOJ[j04fUOL[j02fUON\\\\j0OfUO0[j0OeUO0]j0NdUO2\\\\j0MeUO3\\\\j0JfUO6aj0B`UO>Tk0N100000001O0O100000001N1000001N10001O0O101NV^Xe0\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [424.0, 291.0, 241.0, 88.0], \"score\": 0.9996543526649475, \"association_id\": 1, \"light\": [-1.2881269454956055, -3.327949285507202, 1.1477606296539307, 3.101642370223999], \"segmentation\": {\"size\": [900, 928], \"counts\": \"Ynd;1Rl0`0A2M2O001O1O1N101O1O1O00001O000010O01O001O00001O00001O001O00001O0000000O101O0O10001O001O1O1_UOgNVj0Z1fUOkNWj0b10\\\\NeUO]1bj0O010N2O2N1O1O0000jUOjN_i0T1aVOmN_i0S1`VOoN_i0Q1aVOoN_i0Q1`VOPO`i0P1`VOPOai0o0]VOSOci0m0\\\\VOTOdi0l0[VOUOei0k0[VOUOfi0j0ZVOVOfi0j0YVOWOgi0i0YVOVOhi0j0WVOWOii0i0VVOWOki0i0TVOXOli0h0TVOXOki0i0TVOXOli0h0TVOWOmi0j0RVOVOni0j0QVOVOPj0j0PVOTORj0m0lUOSOUj0n0jUOROVj0n0iUOROXj0o0gUOQOYj0o0fUORO[j0Z11O1O1O1O10000O1N2O010O10000O0100O010O1000000O1000001O000O101N1O11`NbUOX1Yj0dNkUO1NZ1Uj0hNmUOL0[1Sj0iNmUOL0[1Qj0kNVVOT1ii0mN[VOo0di0RO]VOn0bi0RO^VOn0ai0SO_VOm0ai0SO`VOl0`i0SOaVOm0_i0ROcVOm0]i0ROeVOm0[i0ROgVOm0Yi0ROiVOn0Vi0ROjVOn0Vi0ROkVOm0Ti0TOmVOk0Si0TOoVOl0Qi0ROQWOm0Pi0QORWOn0Rj001O1O1O010O00001O000000000011N001O0000001N10001O0000001N10001N100O101O0OJTUOVOlj0j0TUOWOkj0i0UUOWOjj0j0VUOVOjj0j0WUOVOgj0k0YUOUOgj0k0YUOUOfj0l0ZUOTOfj0l0ZUOTOej0l0\\\\UOUOcj0k0]UOUObj0k0_UOUO`j0k0bUOTO^j0i0eUOWOZj0g0iUOZOUj0c0oUO]OPj0c0QVO]Oni0d0SVO[Omi0e0SVO\\\\Oki0d0VVO\\\\Oii0e0WVO[Ohi0e0YVO\\\\Oei0b0^VO^Obi0`0`VO@_i0a0aVO@]i0`0dVO@[i0a0fVO^OYi0b0hVO_OWi0a0iVO_OVi0a0kVO_OUi0`0lVOASi0>nVOBRi0=oVODoh0<RWODnh0;SWOEmh0:TWOFlh09UWOGkh08VWOHjh08VWOHkh05VWOLkh02VWONlh0MWWO3]j0000O10000O1000O10OO2N020O01000O100O010O100O1O2NeiV7\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [806.0, 21.0, 75.0, 47.0], \"score\": 0.9986101388931274, \"association_id\": 9, \"light\": [-1.9783166646957397, -1.5213003158569336, 1.9227948188781738, 1.3912811279296875], \"segmentation\": {\"size\": [900, 928], \"counts\": \"g^Tf04ik07M3N3M2O1O1O100000000M3M3M3O1O1O010O0010O001O100O100O10000O0100001N1O10O1O01N20gTOWOWk0k000001UOkTO9M1^k0MbTO3_k0LaTO4ak0G_TO009bk0F_TO0O;ek010OO2O1J600O20ON[OgTO0Ob0\\\\k0_OgTO>[k0CbTO=]k0AdTOb0_k0O2N101N010O1O010O1O2N101N3LRYY1\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [0.0, 333.0, 346.0, 138.0], \"score\": 0.9623303413391113, \"association_id\": 10, \"light\": [-2.4349613189697266, -1.449957013130188, 2.3138973712921143, 1.2868432998657227], \"segmentation\": {\"size\": [900, 928], \"counts\": \"U;W1ej0=I4L3N2M3M2UVOWN[i0i1cVOZN[i0h1cVOYN]i0g1bVO[N]i0e1cVO[N\\\\i0f1cVO[N]i0e1cVO[N]i0e1cVO[N]i0e1cVO[N]i0e1cVO[N]i0e1cVO[N\\\\i0f1dVOZN\\\\i0f1dVOZN\\\\i0f1dVOZN\\\\i0f1dVOZN\\\\i0f1dVOZN\\\\i0f1dVOZN]i0e1cVO[N]i0e1cVO[N]i0e1cVO[N]i0e1cVO[N]i0e1cVO[N^i0d1bVO\\\\N^i0d1bVO\\\\N^i0d1bVO\\\\N^i0d1bVO[N_i0e1aVO[N_i0d1bVO\\\\N^i0d1aVO]N`i0b1`VO^N`i0b1`VO^N`i0b1`VO^N`i0b1`VO^N`i0a1aVO_N`i0`1`VO`N`i0`1`VO`N`i0`1`VO`N`i0`1`VO`N`i0`1_VOaNbi0^1^VObNbi0]1_VObNbi0^1^VObNbi0^1^VObNbi0^1^VObNbi0^1^VObNbi0^1^VObNai0_1_VOaNai0_1_VOaNai0_1_VOaNai0_1_VOaNai0_1_VOaNai0_1_VOaNai0_1_VOaNai0_1_VOaNai0_1_VOaNai0_1_VO`Nbi0`1^VO`Nbi0`1^VO`Nbi0`1^VO`Nbi0a1]VO_Nci0a1]VO_Nci0a1]VO_Nci0a1]VO_Nci0b1]VO]Nci0c1]VO]Nci0c1]VO]Nci0c1]VO]Nci0d1]VO[N]1Fef0o1nWO[N\\\\1Gff0n1nWO[N[1Hgf0[2XYOfMhf0Z2WYOgMif0Y2WYOgMif0X2XYOhMhf0X2WYOiMif0V2XYOjMhf0S2ZYOnMff0T2XYOlMhf0U2WYOlMhf0V2UYOkMkf0Y2QYOgMof0]2mXOcMSg0`2jXO`MVg0d2eXO]M[g0d2dXO\\\\M\\\\g0e2cXOZM^g0g2aXOYM_g0h2`XOXM`g0j2QXOlL8:gg0P3WXOQMig0P3VXOPMjg0Q3UXOoLkg0R3SXOoLmg0Q3RXOPMng0Q3QXOoLog0Q3PXOoLQh0Q3oWOoLQh0Q3oWOoLQh0Q3PXOnLPh0R3QXOmLog0T3QXOkLog0U3RXOjLng0V3RXOiLog0X3QXOgLog0Y3QXOgLog0Y3QXOgLog0Y3QXOgLog0Y3RXOfLng0Z3RXOeLog0[3QXOeLog0Z3SXOdLng0\\\\3RXOdLng0[3TXOcLmg0]3=O10000O1`N[WOAeh0;_WODbh0;_WOEah0;_WOEah0:aWOD`h0;aWOE`h09aWOG_h08bWOG_h08cWOG]h09cWOE_h0:bWODah0:aWOD`h0<`WOCah0<aWOAah0>aWO@ah0>`WOZOhh0f0YWOWOjh0g0XWOWOih0i0WWOoN[OK_i0U1WWOPO[OJ^i0V1VWOQOQi0o0oVOQOPi0P1PWOPOPi0P1oVOQOQi0o0oVOQOQi0o0nVORORi0n0nVORORi0n0mVOSOSi0l0nVOTORi0l0nVOTORi0l0mVOUOSi0k0mVOUOSi0l0lVOTOTi0m0kVOSOUi0n0jVOROVi0n0jVOQOWi0P1hVOPOXi0P1hVOPOXi0Q1gVOoNYi0Q1gVOoNYi0R1fVOnNZi0R1fVOnNZi0S1eVOmNHE[i0^1mVOmNHE[i0_1lVOlNHF\\\\i0_1kVOkNIF[i0`1lVOiNKFYi0a1mVOhNJGYi0a1mVOhN\\\\i0Y1cVOgN]i0Z1cVOdN]i0_1aVOaN_i0a1`VO^N`i0c1`VO[Nai0e1?1O000O10000000O10000O10000O100000000000000000000O0100000000000000O100000000O100000000O1000O10000O00100O1O1O1O1O00100J7N100O1O1O1O2N100O2O0O7J1O001O0O2O001O0OUWOYO\\\\f0f0cYO\\\\O\\\\f0d0ZYO\\\\OQN1fh0b0XYO_OPN1hh0>XYODmM0kh0;WYOOjf0CVWO4P2;Zf0]ORXO0G8m1:[f0^OYXO6^1;Xf0@ZXO4_1;Xf0AZXO3^1<Xf0BZXO0`1<Wf0DZXON`1>Vf0DdZO;\\\\e0FdZO:\\\\e0FdZO8^e0HTZOMhM:Uh0JRZOLjM8Uh0LQZOLjM8Uh0LQZOLjM8Th0MRZOKjM7Uh0NQZOKjM7Uh0NQZOKjM7Uh0OPZOJlM5Uh01oYOJlM4Vh02nYOJlM4Vh03mYOImM3Vh05mYOHmM2Wh06lYOHmM2Wh08jYOFPN0Wh0=fYOCSN0Wh0d0_YO\\\\ORg0d0nXO\\\\OQg0f0nXOZOSg0e0mXO[OSg0e0mXO[OSg0f0lXOZOTg0f0U210O01O001O00010O000000000001O00000000jWOYOPe0f0eZO[O]M1mg0e0eZOZO]M2ng0d0eZO[O[M3og0b0fZO\\\\OZM2Qh0a0eZO]OYM3Rh0`0eZO]OYM3Rh0`0eZO\\\\OZM4Qh0`0fZO[OYM5Rh0?eZO[OZM6Qh0?kZOBTe0>lZOBTe0>lZOBUe0=kZOCUe0=kZOCUe0=kZOCUe0>jZOBWe0=iZOCWe0=iZOCXe0;iZODXe0<hZODYe0:iZOEXe0:hZOFYe08hZOHZe00lZO0eh0N10001O000O2O00YRo?\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [404.0, 131.0, 374.0, 154.0], \"score\": 0.9984375834465027, \"association_id\": 8, \"light\": [-1.9553898572921753, -1.539046287536621, 1.7928333282470703, 1.387012004852295], \"segmentation\": {\"size\": [900, 928], \"counts\": \"cWS;3Pl03M2N2N1O101N1O1O2N1O1O2N1O1O1O2O0O101N101N100O2N1O2N1O2O0O1O2O0O1O2O000O2N100O2O0jUO_Nhi0c1VVO_Nii0a1WVO_Nii0b1VVO^Nii0d1VVO\\\\Nji0d1UVO^Nji0c1UVO]Nki0c1UVO^Nki0a1UVO_Nli0a1SVO_Nmi0a1SVO`NPj0]1oUOcNYj0U1gUOkNZj0T1fUOlN[j0T1eUOkN\\\\j0T1dUOlN]j0S1dUOlN]j0T1cUOkN^j0T1bUOlN_j0S1bUOlN_j0S1aUOmN_j0S1aUOmN`j0S1_UOmNaj0S1`UOlNaj0S1_UOmNaj0S1_UOmNaj0T18O00010O010O01O010O0010O0001O010O001O010O1O1O010O001O010O100O1O1nVOdN[g0\\\\1dXOdN\\\\g0]1bXOeN]g0\\\\1bXOdN_g0\\\\1`XOdN`g0\\\\1_XOeNag0\\\\1^XOeNag0[1^XOfNcg0Z1\\\\XOfNdg0[1ZXOfNfg0Z1ZXOfNfg0[1XXOfNig0Z1UXOgNkg0Z1TXOfNmg0Y1RXOhNng0Y1QXOgNog0Z1oWOgNQh0Z1nWOfNRh0Z1nWOfNSh0Z1lWOfNTh0Z1kWOgNUh0Y1kWOgNUh0Z1jWOfNVh0Z1jWOfNVh0Z1jWOfNWh0Y1hWOgNYh0Y1gWOgNYh0Z1eWOgN[h0Y1eWOgN[h0Y1dWOhN]h0W1cWOiN]h0X1aWOiN_h0W1aWOhNah0W1^WOjNbh0V1^WOjNch0V1\\\\WOjNdh0V1[WOkNeh0U1[WOkNfh0U1YWOkNgh0U1XWOlNih0T1VWOlNjh0T1UWOmNlh0S1SWOlNnh0U1QWOkNPi0U1nVOlNRi0U1mVOkNSi0V1lVOjNUi0V1iVOjNXi0W1gVOiNYi0X1fVOhNZi0Y1eVOgN[i0Y1eVOgN\\\\i0Y1bVOgN_i0Z1`VOfN`i0Z1`VOfN`i0[1_VOeNai0o100000000001O0000000D^VOVNbi0j1^VOVNbi0j1_VOUNai0l1^VOTNbi0l1bVOPN_i0o1bVOPN^i0P2bVOoM_i0Q2bVOnM^i0S2bVOlM^i0T2bVOlM_i0S290100O001N10001N101O0O10000O2O000O100O10jUOWNQj0P2N1QVOPNhi0Q2VVOQNii0V2O1N10N2M3M300O012N1O1O2N1O1N2O2N1O001BcUOSO]j0m0cUOSO^j0Y10DbUORO^j0n0bUOSO]j0m0cUOSO^j0Y10DbUORO_j0Y1001O010O0001O01O0001O0`N]N`XOb1_g0`N`XOa1_g0_NaXOa1_g0_NbXOa1\\\\g0aNcXO`1\\\\g0`NeXO_1[g0aNeXO`1Yg0aNgXO_1Yg0aNhXO^1Xg0bNhXO^1Wg0cNiXO^1Vg0bNjXO^1Vg0bNkXO]1Ug0cNkXO^1Tg0bNlXO^1Ug0`NlXO`1Ug0^NlXOc1Tg0[NnXOd1ih0000010O000010O000010O01O000000001O000[OZNlVOf1Qi0^NnVOb1Qi0`NnVO`1Ri0aNmVOU1@mNai00nVOS1AmNai01mVOR1BmNai01mVOR1CmN_i01oVOQ1BnN_i01oVOQ1BnN_i01oVOQ1BnN_i01oVOQ1CnN]i01PWOQ1CnNbh0GUXO:GP1CoN_h0IVXO8HP1DnN]h0LVXO5JQ1CoN[h0LYXO3JQ1CPOXh0NZXO1KQ1CPOWh0O[XOOLR1BPOWh00ZXOK1T1^OROUh00YYOn0cNQOTh01YYOn0cNQOTh01YYOn0cNROSh00[YOm0bNSOSh00[YOm0cNRORh01\\\\YOl0bNTOQh0O^YOm0aNUOPh0N`YOl0aNUOng00aYOk0aNUOmg01cYOi0`NWOkg01fYOg0_NXOkg01fYOg0_NXOjg0InWO3j1k0^NYOjg0InWO3j1k0_NXOig0JnWO3j1k0_NXOig0JnWO3k1j0^NZOhg02jYOd0^NZOhg01kYOe0]NZOhg01lYOd0\\\\N[Ohg01lYOd0\\\\N[Ohg01mYOc0[N\\\\Ohg01nYOb0ZN]Ohg01oYOa0YN_Ogg00RZO?WNAfg01TZO=VNBfg01UZO<UNCfg01UZO<UNCfg01VZO;TNEeg00WZO;TNEdg00ZZO:RNFdg00ZZO:RNFdg00ZZO:RNFdg01ZZO8RNGdg01[ZO7QNHdg01\\\\ZO6PNIdg01^ZO4nMKdg01`ZO1mMNcg01cZONjM1dg00dZOMhM3dg00fZOKfM5dg00gZOIfM7cg00iZOGdM9cg00[[OOed01[[OOdd02\\\\[ONdd03[[OMed03[[OMed03\\\\[OLcd06\\\\[OIed07[[OIdd09[[OGed0:[[OEcd0=][OCad0`0^[O@ad0a0_[O^Oad0d0_[OZOad0g0_[OVOcd0k0[31O000010O01O0010O01O100O010O0001O2M2O2M2M7GoeS4\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [272.0, 219.0, 216.0, 95.0], \"score\": 0.9419149160385132, \"association_id\": 6, \"light\": [-2.416987419128418, -1.1249096393585205, 2.293351411819458, 0.9680533409118652], \"segmentation\": {\"size\": [900, 928], \"counts\": \"eY_72fk0=M3O1O001N101O00001O00001O1O1O10O01O001O001O001O01O01O00001O1O1O1bUOPOfi0R1PVOVOPj0l0lUOWOSj0l0iUOVOVj0\\\\10001O001O001O001O1O002O0O001O1O00100O1O1O10O01O1O1O001O010O1O001O00O10001O0000001N1001O10O01OO100O2L30000O101N1N2O1O1O101N1M3N2O2O0O1000000O2O000O1000000010O0000000010O00001O01O010N101O000000001N11O010O000010O00001O000010O0000RO_NYWOa1fh0`NZWO`1hh0]NZWOb1ei0O102N1O0000O01O0O100O2O0O100O2O001O00001O00000O100O2O0M3N2O100O010000O100O100O01000O100@ROjUOn0Sj0VOdUONNm0^j0UOdUOOMl0^j0WOcUOP1^j0PObUONOh0`j0[OaUOL1f0`j0^O`UOK2d0_j0A_UOJ4d0]j0B`UOH5e0Zj0DmUO:Tj0FnUO8Rj0HRVO3oi0MSVO1mi0NVVOOli00WVOJli05S1O1M`_R<\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [447.0, 82.0, 406.0, 114.0], \"score\": 0.9835370779037476, \"association_id\": 5, \"light\": [-2.6100540161132812, -0.8213375806808472, 2.5197300910949707, 0.7420828342437744], \"segmentation\": {\"size\": [900, 928], \"counts\": \"WQY<1Pl05XTOKVk0=gTOFVk0>fTODZk0d0N101O00001O001O010O00001O01O01O0001O1O001O001O00001O001O001O001O000001O0000001O00010O001O001O0001N101N1O2N3M2N2N2Nabl1GoaRNOak0:O10000O101N10000O2O0O101M21O001O00000000000000001O000000000001O001O002N1O1O2N1O2N1O1O0000001O0000001O001N2O001O1O001O000101N010O10O010O00010O001O01O00O2O0000001N101O001O001N2O001O0010O0001O010O0010O01O0010O01O0010O000000010O000000001O000001O01O000O2O00001O00001O0000001O00000001O00001O001N1000001O0000001O00001O010O001O001O001O0010O0001O0010O0001O010O010O00010O001O01O0010O01O001O001O10O01O00001O001O001O001O1O001O001N101O001O001O00001O00001O00001O001O00001O01N10001O0O101O0O101[NlNWXOU1ig0kNVXOV1ig0kNVXOW1ig0jNTXOX1lg0hNTXOX1lg0hNTXOX1lg0hNSXOY1mg0gNSXOY1lg0hNTXOX1lg0hNTXOX1kg0iNUXOW1kg0hNVXOX1jg0hNVXOX1ig0hNWXOY1ig0fNXXOZ1gg0gNYXOY1gg0fNZXOZ1dg0gN\\\\XOZ1cg0gN]XOY1bg0fN`XOZ1_g0eNcXO[1]g0cNeXO]1Zg0cNgXO\\\\1Zg0cNgXO]1Yg0bNhXO]1Yg0bNiXO]1Wg0bNjXO]1Xg0bNhXO]1Qi0N101fN\\\\UOQ1ej0nN_UOm0_k0[O6JThQ2\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [84.0, 414.0, 338.0, 149.0], \"score\": 0.9963144063949585, \"association_id\": 7, \"light\": [-1.2512637376785278, -2.411135673522949, 1.0411128997802734, 2.2033398151397705], \"segmentation\": {\"size\": [900, 928], \"counts\": \"[XZ2275J4Vk0KkTOd0ej0ZO_UOP1_j0RO_UOo0Xj0nNiUO5Mo0Yj0mNiUO5Mn0Yj0oNhUO\\\\1Xj0eNgUO[1Yj08O10001N10000O1M4O03M1O001O00001O01O000001O01O001O10O01O001nUOTNki0S2O00100O1000O100O1O2N2O1N2N2N001O001O0010O01O3M3M1O1O100O1O\\\\VOaNdh0^1\\\\WObNeh0^1ZWObNfh0`1XWO`Nih0`1VWO`Njh0a1UWO_Nkh0b1TWO^Nmh0a1SWO_Nmh0b1RWO^Nnh0b1QWO_NPi0`1mVOcNSi0^1kVOcNUi0]1kVO`NYi0`1g0O2M010O010O0001O01O0000010O000000001O01O0000002O1N001O000000H`UOlN`j0\\\\1000G`UOnNaj0Q1_UOoNaj0Q1_UOoNaj0Z1O100000000000O10000000O1000000O10001OHbUOjN^j0V1bUOjN^j0V1bUOjN]j0W1cUOiN]j0W1cUOiN]j0W1cUOiN]j0_10000000IcUOgN\\\\j0Z1dUOfN\\\\j0Z1dUOfN\\\\j0a10000000000O2O000O100O1O1O2O0O1O11O00TORNbWOn1]h0TNcWOk1]h0UNcWOk1]h0VNbWOj1]h0WNcWOi1\\\\h0XNeWOg1Zh0ZNfWOf1Yh0[NSWOI=j1`h0_NQWOI>h1`h0`NQWOJ>d1_OXNQi0;RWOI?d1_h0dNSWOF?f1^h0cNUWOE>h1\\\\h0dNmWO\\\\1Qh0eNoWO[1Qh0eNoWO\\\\1og0eNQXO]1mg0cNSXO^1kg0cNUXO^1jg0bNWXO_1gg0aNYXO`1fg0`NZXOa1dg0`N\\\\XOa1cg0_N]XOb1bg0_N^XOc1_g0]NaXOf1\\\\g0ZNeXOf1Zg0[NeXOf1Zg0`NaXOa1\\\\g0aNcXO`1\\\\g0aNdXO_1[g0aNeXO`1Zg0aNfXO_1Yg0aNgXO_1Yg0bNgXO^1Xg0bNhXO^1Xg0bNhXO^1Xg0bNhXO^1Xg0bNhXO]1Yg0cNgXO]1Yg0cNfXO^1[g0aNeXO_1[g0aNeXO_1[g0aNeXO_1[g0bNdXO]1]g0cNcXO]1]g0cNcXO^1^OeMhg0m0jXO`1[OeM2Ncg0Q1lXOb1_O_Mdg0Q1kXOe1Ug0[NjXOf1Vg0[NiXOf1Vg0ZNiXOg1Wg0ZNgXOh1Xg0YNfXOh1Zg0YNcXOi1]g0^11O1O1M3N2O1O1O1O100O1O1O1N2K500O1O2O000O2N1O2N1N4I9I6L3NM2N3N10001O01000O10O10O11O1N2LlVOVMUi0i2kVOVMWi0i23O1O1O3M1O1O00001O0000000^N^VOb0bi0\\\\ObVOa0`i0]OcVOa0]i0^OfVO`0Zi0_OhVO`0Xi0_OkVO?Ui0AlVO>Ti0AmVO?Si0AmVO?Si0@nVO`0Ri0_OPWO`0Pi0_OQWOa0oh0^ORWOc0mh0\\\\OTWOd0lh0\\\\OTWOd0YOkN`i0a0XWOg0gh0YOYWOg0gh0XOZWOh0fh0XOZWOh0fh0WO[WOj0dh0VO\\\\WOj0dh0VO]WOi0ch0VO^WOj0ch0TO^WOm0ah0QObWOn0^h0QOcWOo0]h0oNfWOP1Zh0POfWOP1[h0nNfWOR1Zh0nNgWOQ1Yh0nNhWOR1Xh0nNiWOQ1Wh0nNjWOR1Vh0nNkWOQ1Uh0oNkWOQ1Uh0oNlWOP1Th0POlWOo0Uh0POmWOo0Sh0QOmWOo0ig0QOPWO0X1n0gg0UOnVON\\\\1l0cg0A^XO>ag0C_XO<ag0DaXO;^g0FbXO:^g0EdXO9^g0EcXO7ag0HaXO7_g0GdXO6^g0IfXO0_g0NeXOM]g02hXOHZg08Z2N1O5LnYl=\"}}, {\"image_id\": 36, \"category_id\": 1, \"bbox\": [89.0, 114.0, 847.0, 575.0], \"score\": 0.8771070241928101, \"association_id\": 1, \"light\": [-1.758208155632019, -2.0296528339385986, 1.6982975006103516, 1.8702189922332764], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"nSS2<kf0l0C=G9H7I7I7I7J5K6YNiMk\\\\On2ib0\\\\1I6J7I6H9F:E;H8I7J6J6J5L4M3L4M3N1N3M3M2N3N1N3M3N1N3N2N2M2O2N2N2N1O2M3N1N3M3N2M3N3M2N3M3M3M4L4L4M2M2N3N0O100O100O100O010O100O10O010O10OO2M2G:H8O001O001O1O001O1O00100O1O001O1O1O1N2N2N2M3M2N3M3L4M3N2N2N2O1N2O1O1O1O1N2O001N2N2N2N2N2N2O1N101O1O1N2O1O1O1N2O1O1N2O2M2O1O1N2O1O1N2O1O1N3N1O1O100O1O1O100O100OnHoHXMo6g2SIYMm6d2VI\\\\Mj6a2YI_Mf6^2_IaMa6Z2dIfM[6Y2gIgMY6Y2hIfMX6Y2iIhMU6Y2kIgMU6X2lIiMS6V2nIjMR6U2PJkMo5U2QJlMo5R2RJoMm5P2TJPNl5o1UJRNj5n1VJSNi5l1YJTNf5k1[JVNd5j1\\\\JXNb5g1_JZN`5e1aJ\\\\N^5c1cJ^N\\\\5b1eJ]N[5b1fJ_NY5`1hJaNW5^1jJcNU5]1kJdNT5[1mJfNR5Y1oJhNP5W1PKkNP5S1QKoNm4P1TKQOk4n0VKTOh4k0YKXOd4g0]K[Oa4e0_K]O_4b0bK_O]4`0dKA\\\\4>dKC[4<fKEY4:gKHX48hKIW46jKKV44jKLV44jKMV42iKOW40jK1V4NiK4V4LjK5V4JiK7X4GiK:V4FiK<W4ChK?X4_OhKc0X4\\\\OhKe0Y4YOfKh0[4WOdKk0\\\\4SOdKo0\\\\4POcKR1\\\\4nNdKS1\\\\4kNdKV1]4jNbKV1^4jNbKV1_4jN`KV1`4jN_KW1b4hN^KY1a4hN^KX1c4gN]KY1c4hN[KY1e4gN[KY1f4gNYKY1g4gNYKY1h4gNVKZ1j4fNVKZ1k4fNTKZ1l4gNSKY1m4gNSKZ1m4fNQK[1o4fNPKZ1Q5eNnJ\\\\1R5dNmJ]1T5bNlJ^1T5cNjJ^1W5aNhJ`1Y5_NfJb1Z5^NeJc1\\\\5\\\\NcJe1^5[N`Jf1a5YN^Jh1c5WNSJT2m5kMoIY2R6gMjI\\\\2W6cMgI_2Y6aMfI`2[6b71O001O1O001N2O1O1O001O1O1O1O2N1N2O1O1O1O1N2O0O2O1N101N101N101O0O2O0O2O000O2O1N2O1N2O1O1N3N2N2M3N2M2O2N1N2O1O1O1N2O1O1O2N2M3N2N2N2N2M3N2N1O2M3N2N1N2O2M3M3N2M3M4L6J:F;E8H7I6K4K5K5L3L4M3M9G=C4L5K5K6J3M4L3M3M2N3M2N4L4L3M3M2N2N2XGdAh7g?C1O1O001O1O0010O01O001O001O000000000000000000001O01O000000000000001O001O001O001O001O001O1O1O001O001O1O001O0O2O001O001fMUH]Dk7U7XHZGd07O5_OS1g7i6mKkGbLZ1a7d6YLiG\\\\La1\\\\7b6_LeG[Lf1W7b6dM\\\\I]2a6gM]IY2a6kM]IV2a6mM]IT2a6oM]IQ2a6SN\\\\Io1b6TN\\\\Im1b6VN[Il1b6XN\\\\Ii1a6[N\\\\Ig1a6]N]Id1_6aN^Ia1^6dN_I`1[6fNbI\\\\1Y6iNdI[1U6VO]Im0\\\\6`8H8H8I7I8I;E;D?Ab0^O;E9H9G8I8H5L4K5J6K4L5I7I6K6J7@aaU3d1j\\\\jLl0_O6K4K;T\\\\OdLcb0\\\\3^]OeL`b0W3e]OlLXb0R3l]OlLSb0U3m]OkLSb0U3m]OlLQb0T3Q^OkLoa0U3Q^OkLoa0U3Q^OkLna0V3R^OjLna0U3U^OhLla0X3V^OfLja0Y3_100O100O100O101N100O101N100O1O010O001O1N101000O01000O1000000O100O101N100O1O10000O10001O00000O100000000000001O000000O10000O1000000O10O10O10O010O0001O1O2M2M3N2N2N2N3N2M4K6J7Cd0ROf0^OoXOKWg00g\\\\;0ecD2N3M2N2N1M4M3N1O2O1N100O1O0N3M2O2O1O1N3N1L4@?YOh0I7M3000001O0O1I6N2O2O001O2N1N2M4I601N1010O01O0N1O1010O10001O100O1O1O1N2O1O1O1O2N1O1N2M4D[[OYMgd0c2=M4N1O1N3M2O1O2N1O100010O1O1O2N100O1O1OO1000000000001N11O01O10O0100O10O01N1O2M3L3^OXZOhNme0Q1^ZOgNge0S1f0L4M4K6FX_Q2\"}}, {\"image_id\": 37, \"category_id\": 1, \"bbox\": [111.0, 84.0, 514.0, 1060.0], \"score\": 0.9999933242797852, \"association_id\": 1, \"light\": [-1.6516938209533691, -1.7865521907806396, 1.4497864246368408, 1.6103172302246094], \"segmentation\": {\"size\": [1272, 954], \"counts\": \"gkZ42ZW1=N1O101N1O1O101N100O1O100O1O100iNYO\\\\kNh0dT1]OVkNe0hT1^OUkNc0kT1_OPkNd0PU1^OhjNh0WU1[OejNg0[U1ZOcjNg0]U1[O_jNg0aU1h000O10000000O10000O100O100O1O1N2O1N2O100O100O1000000O1000000O10000O10000O1000000O10000001O00000000001O000000000000001O0000001O001O001O1O3M3WkNUN_S1o1QlNeNgS1`1RlNeNkS1]1QlNfNnS1\\\\1okNeNQT1b2O001O001O00001O001O001O1O2N1O2N1O1O001O00001O00001O00001O001O1O1O1O2N1O2N1O001O1O0000O100O1O1O2M2O1M3K5K5L4N2N2N2O2NSEZMj@e2U?kM]@U2a?nM^@Q2XGQMmf0R1iAm1PGgMif0?VBi1eFbNff0HeBR4R=QLmBQ4o<RLPCP4l<SLTCn3h<ULWCm3f<ULZCdNcFl04e1le0mN^C_NeF`0<X2_e0iNaC]NhF:<b2Ze0fNeCRNQG`02l2Ue0cNjCoMRG>NS3Ue0`NmClMVG;F]3Ue0\\\\NQDkMYG7^Of3Ve0XN\\\\EoMZEo3We0RNdEkMREW4We0nMSFU2j9lMTFX2j9hMUF[2h9fMXF]2e9cMZF`2d9aM[Fa2c9_M]Fc2`9]MaFe2\\\\9\\\\McFg2Z9YMfFj2U9XMiFm2R9VMiFQ3P9SMmFS3o8mLmF[3P9eLkFc3S9ZLjFn3S9QLlFT4R9kKmFX4R9hKlF\\\\4S9dKkF_4S9aKkFb4S9`KkFb4T9_KhFe4U9^KgFe4X9]K]Fn4_9VKkE_5Q:fJVER6f:SJdDb6X;cI\\\\Dh6b;[IUDk6j;YInCm6Q<VIkCk6V<XIgCh6X<\\\\IeCe6Z<_IbC`6^<dI_C\\\\6`<iI\\\\CV6c<oIZCo5g<VJUCh5k<^JPC_5Q=gJkBU5W=RKdBj4Z=`KcB\\\\4U=TLgBh3S=eLkBV3j<]MSC_2`<TN^Cj1Z<cNcC_1Q<nNlCV1d;ZOXDj0Y;DdD?n:OnD5b:;ZEIR:l0iEWOe9\\\\1WFhN^9d1\\\\F`N\\\\9j1^F[N[9m1_FWN[9Q2_FSN\\\\9T2`FoKiLQIc<X;^FhKQMPI[<a;_F_KXMPIQ<l;aFUK`MoHg;W<cFjJiMnH];d<bF_JTNmHQ;Q=cFTJ_NjHk:Y=]FPJjNfHg:b=QFoIZO_Hb:k=XEZJ8kG^:Xh0dEgWO[:Zh0fEfWOX:Zh0jEfWOT:[h0mEeWOR:Zh0QFeWOm9\\\\h0TFcWOk9]h0WFcWOg9]h0[FcWOc9\\\\h0aFcWO]9\\\\h0fFdWOW9\\\\h0lFdWOQ9[h0TGdWOh8[h0]GeWO`8[h0dGdWOZ8[h0iGeWOT8[h0oGeWOo7Zh0UHeWOj7Zh0XHfWOg7Xh0\\\\HhWOd7Vh0^HjWOa7Th0cHjWO^7Rh0fHnWOZ7mg0kHSXOU7gg0QIYXOo6ag0XI^XOh6\\\\g0^IdXOb6Vg0dIjXO[6Rg0jInXOV6mf0PJRYOP6if0UJWYOk5ff0YJYYOg5df0\\\\J\\\\YOd5bf0_J]YO`5bf0cJ]YO]5af0eJ_YO[5_f0hJ`YOX5^f0kJaYOT5^f0nJbYOR5\\\\f0QKcYOo4[f0SKeYOm4Zf0UKfYOi4Yf0YKgYOg4Wf0\\\\KhYOd4Vf0^KjYOb4Tf0aKkYO_4Tf0bKlYO^4Tf0cKkYO]4Uf0cKkYO]4Tf0eKkYO[4Uf0eKkYO[4Uf0fKjYO[4Tf0fKlYOZ4Tf0gKkYOY4Uf0hKjYOX4Vf0iKiYOW4Vf0kKjYOT4Vf0lKjYOU4Uf0lKjYOT4Uf0mKkYOS4Vf0lKjYOT4Vf0mKiYOS4Wf0mKiYOS4Wf0mKiYOR4Xf0oKhYOP4Yf0oKgYOQ4Yf0oKgYOQ4Yf0PLfYOP4Zf0PLgYOo3Yf0QLgYOo3Zf0QLeYOo3[f0QLeYOo3[f0RLdYOn3\\\\f0SLdYOl3]f0TLbYOm3]f0SLcYOn3]f0RLbYOo3]f0QLcYOo3^f0PLbYOQ4]f0PLcYOP4]f0oKcYOR4\\\\f0nKdYOT4[f0lKdYOU4[f0kKeYOV4[f0iKfYOX4Xf0hKhYOY4Xf0gKgYOZ4Xf0fKhYO[4Wf0fKiYOZ4Vf0gKiYOZ4Vf0fKjYO[4Vf0eKjYO[4Uf0eKkYO\\\\4Tf0eKlYO[4Sf0eKmYO\\\\4Rf0eKnYO[4Qf0eKoYO\\\\4Qf0cKoYO]4Qf0dKoYO\\\\4Pf0dKPZO]4oe0cKQZO^4ne0bKSZO^4le0cKSZO]4me0cKSZO^4le0cKSZO^4le0bKUZO^4je0cKUZO]4ke0dKTZO]4ke0cKVZO]4ie0dKVZO\\\\4je0dKVZO]4ie0dKWZO[4ie0fKWZOZ4he0fKXZOZ4he0gKXZOY4ge0gKYZOY4ge0hKXZOY4ge0gKZZOX4fe0iKYZOW4ge0iKYZOX4fe0iKZZOV4fe0jKZZOW4ee0iK[ZOW4ee0jKZZOW4ee0iK[ZOW4ee0jK[ZOV4de0jK\\\\ZOV4ee0jKZZOW4ee0iK[ZOY4ce0hK]ZOX4be0hK^ZOZ4`e0fK`ZO\\\\4^e0eKbZO\\\\4\\\\e0dKdZO^4Ze0cKfZO_4We0aKiZOb4Te0^KmZOc4Qe0^KnZOd4Pe0\\\\KP[Oe4Pe0ZKP[Oh4nd0YKR[Og4md0YKS[Oh4ld0XKT[Oh4ld0XKT[Oi4kd0WKU[Oj4jd0WKV[Oh4kd0WKU[Oj4jd0VKV[Oj4jd0VKV[Ok4id0UKW[Ok4id0UKX[Ok4gd0VKX[Oj4id0UKW[Ol4hd0TKX[Ol4hd0TKX[Om4gd0SKZ[Om4fd0RKZ[Oo4ed0QK[[OP5dd0QK[[OQ5[:iDROU6cFS5W:oDROn5hFT5R:TEROi5kFT5o9[ERO`5oFW5k9`EQOY5UGY5d9fESOQ5XG[5b9kEPOj4^G^5]9PFPOb4cG`5X9RFSO^4eGb5U9SFUOZ4fGd5W9mEVO^4dGf5W9gEXOc4`Gg5[9[E^Ol4WGj5^9mDFW5lFm5a9^DOc5`FP6e9lC:Q6RFS6Yd0kIg[OW6Zd0gIe[OZ6]d0cIc[OPL]Ob8Qe0\\\\Kb[ORL]Oc8Re0YKa[OTL^Ob8Re0YK`[OTL_Od8Re0VK_[OVL_Oe8Re0UK_[OUL_Oh8Qe0RK`[OUL@j8Pe0PK`[OVL@k8od0oJa[OVL@m8nd0lJb[OWLAm8md0lJa[OVLCP9kd0iJb[OWLCR9jd0fJd[OWLBU9id0cJe[OWLCX9fd0bJf[OVLD[9dd0^Jh[OWLD^9bd0ZJj[OXLD`9ad0XJk[OVLEe9^d0TJm[OWLEh9]d0oIn[OYLEk9\\\\d0kIn[OZLFo9Zd0eIP\\\\O\\\\LFS:Xd0_IS\\\\O]LEX:Wd0XIT\\\\O`LE^:Td0oHW\\\\ObLFg:nc0eH[\\\\OdLGh;MPDU<YObHX4fKfLFd=l:aBbIR3lKiLFn=^:cBkIc2QLlLFV>R:fBSJU2ULoLF\\\\>h9jBYJj1XLPMGa>`9nB]J_1]LPMGg>Y9PCaJW1_LRMGl>Q9RChJm0`LUMGR?h8SCoJd0bLWMGX?_8TCWK:cLZMGd?_1g_Oo3Y3VN0dL\\\\MH``0Om_On4^2fNFeL_MHRc0\\\\4Q@VOZOfLbMI_c0l3m_OEmNfLgMIhc0a3k_ONcNhLjMIPd0V3i_O:ZNfLmMJWd0m2g_Oc0RNfLPNJ[d0g2h_Oi0jMfLSNJ^d0c2h_On0dMeLUNKbd0]2i_OT1^McLWNLfd0W2i_O[1WMcLYNKjd0S2i_O_1SMbLZNLod0l1h_Og1lLaL]NLSe0g1g_On1fL^L_NNZe0^1e_OW2`L\\\\LaNO`e0V1c_Oa2XLZLeNOje0j0^_Om2QLYLgN0Rf0a0Y_OW3kKXLjN0Yf08W_Oa3cKVLlN1`f01V_Oh3\\\\KULoN1df0KW_On3SKULSO2ff0FY_OR4mJULTO3Un0h3eROTLVO4Wn0i3aROQLYO6Yn0g3_ROQLXO8\\\\n0f3[ROQLZO8^n0f3WROPL\\\\O:an0d3`SO\\\\Ldl0e3WSO[Lml0f3nROZLVm0g3eROYL_m0g3^ROXLfm0i3UROWLnm0j3nQOVLUn0l3fQOTL\\\\n0n3aQOPLbn0S4YQOmKin0T4TQOlKmn0V4Z22O1N101N2O1N2O1O1O001OM6A`0]Od0ZOg0ZNh1hNVhg<\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [835.0, 209.0, 163.0, 288.0], \"score\": 0.9999998807907104, \"association_id\": 4, \"light\": [-2.0547711849212646, -2.419786214828491, 1.935377836227417, 2.255638837814331], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Y]]a01Ve06M2O0O101O0O101O00000000001O00000000000000000000000000O10O100000001O0000000O1000000O100N3O0O10000000000000000000000000001O01OkJe0ZD[O_;S1ZDmN^;`1^D_N^;l1[DTNc;]2nCcMo;`2SD^Mj;T2gDlMV;P2QEPNm:P2UEPNk:P2UEPNj:Q2VEoMj:R2UEmMk:T2WEjMi:V2XEiMg:X2\\\\E^MXL0Z>c2_EZM[L1V>h2TBgM`2@Z;S4jC[L]<U4oBRLn<U4gBRLW=R4bBQL]=e4gAcKX>h5O2M2J6ZOf0N2O1O1O1O100O1N20O01O010OO2jNRCmIP=k5fCbI_<Y6]1HoBgI^;h5SEYJm:^5[EcJe:Y5^EhJb:V5^EmJa:P5aEPK`:n4aERK`:m4aESK_:m4`ESKa:l4_ETKb:k4^EUKc:m4ZESKh:P5SEPKQ;P5lDoJW;Q5fDoJ\\\\;S5`DmJb;X5WDhJk;[5QDdJQ<]5lCcJU<`5hC^JY<j5_CVJb<o5XCQJg<`6i0=B6J5L2N2O1N101O00jIaB`4^=]KmB]4Q=^K]CZ4b<aKnDU3Q;gLaEl2^:SMnEb2Q:_MkFf1S9ZN^GW1a8iNdGS1Z8lNmGP1o7oNdHc0X7^OUI8i6G]I6c6GcI4^6KkIKW65PJ]OW6c0U72N3M22M3M3M3N_Ta0\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [277.0, 349.0, 54.0, 118.0], \"score\": 0.9775959253311157, \"association_id\": 2, \"light\": [-2.0159709453582764, -2.469963550567627, 1.969744086265564, 2.35923171043396], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"STi5Q1Wd07K3M3M3L2O101O1O2M5K3K6C<K5P^O]MQa0j2e^O]MYa0f2b^O]M^a0d2\\\\^OaMda0S30000000ZO^^ObMca0R2l^OjMUa0T2o^OhMSa0V2R_OeMQa0Z2Q1NT^OdMm`0Z2R_OiMn`0U2Q_OlMSa0P2m^OPNWa0l1i^OTNYa0k1f^OUN\\\\a0j1c^OVN^a0k1`^OTNca0m1Z^OSNfa0n1Y^ORNha0n1W^ORNia0Q2T^OoMma0T2o]OlMQb0U2n]OkMSb0U2l]OkMTb0W2j]OiMWb0_1f]OnN2BZb0o0T^OB^O^O`b0?b]OBe0d0Rb0A`^O?Vc0000001O2M2M3LYi]>\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [494.0, 331.0, 54.0, 129.0], \"score\": 0.9999376535415649, \"association_id\": 3, \"light\": [-1.5568299293518066, -2.601545572280884, 1.5276362895965576, 2.4528980255126953], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PnY:2Ye01N100000000000001O0O1SM6Y@Ke?9X@Gg?:Y@Ff?`0R^O\\\\Oj14S`0m0j_OTOT`0U1d_OkN[`0Y1b_OgN]`0^1__ObN``0b1]_O^Na`0f1]_OZNa`0k1\\\\_OUNb`0o1[_ORNc`0R2[_OmMb`0Y2\\\\_OgMa`0^2]_ObMa`0c2\\\\_O]Mb`0j2o^O`MPa0^30Hf^O_LZa0i30If^O]LZa0c3f^O\\\\L[a0d3f^O[LZa0e3f^O[LZa0f3e^OYL\\\\a0k31O3K`^OYLaa0f35N2M2N002N2N1O3hNd^O[N]a0\\\\1P_O^NSa0]1R_O`NPa0[1d1L5GaTm9\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [426.0, 329.0, 42.0, 133.0], \"score\": 0.9997266530990601, \"association_id\": 1, \"light\": [-1.554102897644043, -2.885150909423828, 1.4333040714263916, 2.7343027591705322], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fbl83We0101O0QM7\\\\@Ib?<[@Dc?`0[@@c?d0[@\\\\Ob?h0\\\\@YO`?k0`@UOZ?S1d@mNV?\\\\1g@dNT?e1h@[NW?i1f@WNZ?P2_@PNa?U2Z@kMf?X2W@hMi?Z2U@gMj?[2T@eMl?]2R@cMn?_2P@aMP`0a2n_O_MR`0d2k_O\\\\MW`0f2f_OYM\\\\`0g2c_OXMa`0g2__OVMd`0k2T_OXMPa0h2k^O[MVa0e2f^O]M\\\\a0Y30O1O001O1O1N1]N[^O@fa0:k^OZOVa0b0U_OVOn`0c0Z_OXOi`0c0]_OXOi`0b0]_OWO_Pc;\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [644.0, 331.0, 153.0, 127.0], \"score\": 0.977187991142273, \"association_id\": 7, \"light\": [-1.7253531217575073, -2.6640708446502686, 1.617229700088501, 2.4520249366760254], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"el]=9kd0>TOi0K4L`0A4L2N5K4X^O_M^`0c2`_O]M``0e2R12N1O2N3M3M2X^OTMn`0e30O5K3M3N2M2N3L4YNZ^OCja07\\\\^OEna0M\\\\^OMha0M^^ONcc0K4M4IR[3KUeL3O000000000000000000000000O101O00000000000000000000000000000001O000O100000001O000000000000000001O2N1O00^e80`ZG3N10000000000O11O0000000000000001O01O00000000000O10000000000000000000000000000O1000001O000O10000OQof4\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [572.0, 336.0, 84.0, 120.0], \"score\": 0.9956828355789185, \"association_id\": 5, \"light\": [-2.1675400733947754, -2.3112215995788574, 2.0386886596679688, 2.1941421031951904], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"jnm;2Ye00O100O1000000000000000000000000nL4e@LT?c0c@^OV?Q1b@oN[?^1[@bNe?_1Z@aNe?b1Y@^Ng?d1W@\\\\Ni?f1U@ZNk?j1Q@VNo?V2e_OjM[`0Z2a_OfM_`0[2`_OeM``0]2^_OcMc`0^2[_ObMe`0_2Z_OaMg`0`2W_O`Mk`0a2R_O_MPa02Z^OU2e0hM_a0V2_^OiMaa0\\\\2e04L`0AO00010O000fNR^OWOoa0d0X^OYOha0d0_^OWOca0f0c^OVO^a0e0i^OXOXa0d0l^O[OTa0c0P_O[OPa0`0W_O]Ok`0>]_O[Of`0`0U2N4K3N3NRh0JUXO00001N10000000000000000000000000000001O0000001N]Pe7\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [144.0, 339.0, 56.0, 133.0], \"score\": 0.7722166180610657, \"association_id\": 6, \"light\": [-1.887503743171692, -2.315100908279419, 1.8166993856430054, 2.2711355686187744], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"i]P39md09F7G7N101N1T\\\\OClb0<T]OJgb04]]ON_b04a]ONYb07d]OMTb0j1Q^OXMRa0o2j^OUMQa0Q3h^OTMUa0c3O1N101O00O1jNU_OlMl`0n1`_OjMa`0S2c_OkM_`0S2c_OlM_`0Q2c_OmM``0o1d_OnM_`0n1e_OnMjNLaa0S2l_OoMW`0n1i_ORNX`0m1h_OSNY`0l1f_OTN\\\\`0j1e_OVN]`0h1c_OXN^`0g1b_OYN_`0f1a_OZN_`0g1`_OYNa`0f1__OZNb`0e1^_O[Nd`0d1[_O\\\\Nf`0d1Y_O\\\\Nh`0e1V_O[Nm`0c1R_O]No`0c1P_O\\\\NUa0c1h^O]NZa0f1a^OZNda0c1Z^O]Nja0a1U^O^NSb0]1j]OcN[b0\\\\1j0001OO0=FO:D4KnUUa0\"}}, {\"image_id\": 39, \"category_id\": 1, \"bbox\": [183.0, 493.0, 610.0, 494.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-2.5135061740875244, -2.0168583393096924, 2.41232967376709, 1.7849619388580322], \"segmentation\": {\"size\": [1365, 1024], \"counts\": \"hld74_Z120001N100O101O000O1000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000O100000000000000O1000000O1O1O1N2L4O1O100O1O100O10000O100O1O1O100O100O100000000O10000O10000O10000O10O100000O10000O10000O10000O100000000000000000O1000O100000000000000000000O10000000000O10000000O1000O10000000000O1000000000O1000O10000000O0100TGkNXXOU1gg0SORXOm0mg0YOnWOg0Qh0[OnWOe0Rh0\\\\OlWOd0Th0^OkWOb0Th0@kWO`0Th0AlWO?Sh0ClWO<Th0FkWO:Th0HjWO9Th0JkWO6Sh0MlWO3Rh00mWO0Qh03mWOMSh05lWOKSh08kWOHTh0:kWOEVh0<hWOEWh0=hWOCWh0?hWO@Yh0a0eWO@[h0`0eWO_O\\\\h0b0bWO_O]h0b0cWO]O^h0d0aWO\\\\O_h0e0_WO\\\\Oah0d0_WO[Obh0f0]WOZObh0g0]WOZOch0g0\\\\WOYOdh0g0\\\\WOXOeh0i0ZWOWOeh0j0ZWOWOfh0j0YWOVOgh0j0XWOWOgh0k0XWOUOhh0k0WWOVOih0k0VWOUOih0l0WWOTOhh0n0VWOSOjh0m0VWOROjh0P1UWOPOjh0R1UWOnNjh0T1UWOlNjh0U1VWOkNjh0V1aUOdMTLV1Zn0X1_UOoMlKi0dn0Y1^UORNlKd0gn0[1[UOTNkKb0in0[1[UOVNjK?kn0\\\\1YUOWNkK=ln0\\\\1XUOZNjK:mn0]1YUOZNiK9nn0]1XUO[NjK7on0^1WUO[NjK7on0^1VUO]NjK5Po0^1UUO^NkK4Po0^1TUO_NlK3Po0^1SUOaNlK1Po0^1TUObNlK0Po0^1SUOdNlKMRo0_1QUOeNnKKQo0`1PUOfNoKJQo0`1oTOhNoKHRo0`1nTOiNPLGRo0_1nTOlNoKESo0_1mTOmNPLDSo0_1mTOnNoKCSo0`1mTOnNQL@So0b1lTOmNRLARo0c1kTOkNTLBPo0d1lTOiNULCon0e1kTOgNWLDmn0g1kTOdNYLEkn0h1kTOcN\\\\LCjn0k1iTO`N_LEgn0m1iTO]NbLEdn0P2iTOZNdLFcn0P2hTOYNhLF_n0S2hTOVNjLG]n0U2gTOSNoLGYn0X2fTOQNSMEWn0\\\\2cTOnM\\\\MCPn0a2`TOlMcMCmm0b2]TOiMjMEhm0d2]TOdMoMmNWNg0\\\\o0j2dTOWMmM2^m0i2\\\\WOWMch0k2[WOUMeh0m2ZWOSMdh0P3nSOYL1g0Pl0R3lSO[L2c0Pl0U3lSOZL3`0Pl0Y3jSOYL6>jk0`3mSOUL7<ck0i3STOmK::_k0n3TTOjK=7_k0Q4RTOjK>5_k0S4PTOjKa03^k0U4oSOiKc02]k0W4nSOhKe00]k0Z4lSOgKg0O\\\\k0\\\\4kSOfKh0O\\\\k0]4jSOeKj0NZk0`4jSObKm0NXk0c4hSO`KP1MVk0g4gSO]KS1KUk0l4eSOZKV1JRk0R5dSOUKY1JQk0U5bSOSK]1Hoj0\\\\5^SOmJc1Gmj0c5YSOhJj1Elj0g5USOeJP2Dij0l5RSObJU2TOoLOjm0_6nRO_JY2oNoLI1:hm0a6lRO_J\\\\2lNUM3bm0e6jRO]J_2kNUM3am0g6iRO\\\\Ja2iNWM3^m0k6gROZJd2fNZM4Zm0n6eRO[Jf2aN^M5Wm0Q7bROZJ[3dNSj0V7]ROYJ`3`NSj0P9mUOPGRj0R9mUOmFTj0T9kUOlFUj0T9lUOkFSj0W9jTOiF_NOhl0Y9[TOVGlNAil0Z9YTOVGnN@hl0\\\\9WTOWGPO]Oil0]9UTOXGQOZOkl0_9RTOXGSOYOjl0a9QTOXGTOVOll0c9oSOWGUOVOll0c9nSOXGml0i8QSOYGnl0h8QSOXGol0i8oROXGQm0i8mROXGRm0j8lROWGTm0j8jROXGUm0j8hROWGXm0j8fROWGZm0j8dROWG\\\\m0k8`ROWG`m0j8^ROWGbm0l8YROWGem0k8YROVGgm0l8VROUGjm0l8TROUGlm0j8VROUGim0j8ZROUGfm0i8^ROUGam0j8bROUG]m0j8gROTGXm0k8lROSGSm0m8oRORGPm0m8SSORG\\\\l0]9gSOaFUl0b9nSO]Fmk0f9UTOZFgk0h9\\\\TOWFck0i9_TOVF`k0i9bTOWF]k0h9fTOWFYk0i9hTOWFXk0h9jTOWFUk0h9nTOVFSk0h9oTOXFQk0f9RUOXFnj0g9VUOVFkj0e9[UOZFej0_9dUO^F]j0Q9[VOgFei0e8VWOTGkh0i8b4M3N2N101N2O1O1O1O1N2O1N2N2N2M3N2O1N200O1O1O10001N1kIVmN_4kR1XK`mNd4aR1UKimNh4XR1TKomNh4QR1VKVnNd4lQ1XKZnNe4gQ1WK_nNe4cQ1XKanNe4aQ1SKgnNl4cS1M2N3N1N2N2O1N2O1O0O2O1O1N2O001O1N101O1O0O2O1O1O1O1N101O1O001O001N101O000000000000O1000000O1000000O10000O100O100O100O1O100O100O10000O10O10O100000000000000000000O10000000000000000000000000000000000O1000000001O00000000000O101O000000000000001O000O100000001O0O100000001O0O10001O001O00001O001O001O00001O0000001O0O10000O2O0O10000O101N10000O100000000O2O0000000O10000000000O2O000O10000O2O000O100O2O0O100O2O0O2N2N2N2N2M4M2N2N2N2N2N2N101N2N2N1O3M2M4L6H9GkYc9\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [681.0, 177.0, 316.0, 471.0], \"score\": 0.971116304397583, \"association_id\": 2, \"light\": [-2.1335456371307373, -1.6812665462493896, 2.0793142318725586, 1.5683385133743286], \"segmentation\": {\"size\": [680, 1024], \"counts\": \"e]T>1Re0>C;F6L2O1N2O1N2O1N2O1N2O1O10O0010O0i_O4S;KUDe1S;[NbDU2Z;kM`D_2[;aMbDe2[;[McDj2[;UMcDP3Z;QMcDS3\\\\;lLaDY3];hL`D]3^;cL]Dd3`;\\\\L]Dk3_;WL\\\\Do3b;RLZDS4h2gJ`4X1cHV4n1cK\\\\59aHX4l1fKa55_HX4k1iKc54PHb4X2^Kh5h6SJ]Ik5e6oIaIo5d6hIcIV6a6_IkI[6[6RIXJh6Z:K1N2O0O1O10O0000010O00000010O00000001O000000001O0000O10000O100O10000O10000O100O10000O10000O100000000O100000000000000O10000000000O1000000O100O100O100O1O1O1O1O1N2O1O100O1O100O100O10000000000O100000000000000000000O10000000000000O100O10000O100O1O100O1O1O1N2N2N2N2N2N2N2M3M3M3L4L4L5J5L4M3M3M3N2NnGbC^7\\\\<cHfC\\\\7X<eHiC[7U<eHmC[7R<_HUDa7i;ZH^Dg7_;ZHbDf7];YHfDf7X;[HiDe7V;YHmDg7Q;WHSEj7j:WHWEj7g:VHZEk7d:UH]Ek7a:VH`Ek7^:UHcEm7k;1O2N1O1O1O1O1VNgGcFY8[9iGeFX8X9jGhFW8U9kGkFU8S9mGmFT8Q9mGoFS8o8nGRGS8l8mGUGS8i8nGXGR8f8PHZGQ8c8oG_GQ8^8oGeGR8W8dGVH\\\\8[:1O1O001O1O101N1N2O1O1]NVGfFk8X9WGgFj8X9VGhFj8Y9VGfFk8Y9VGfFk8Y9WGeFi8[9XGdFi8\\\\9WGcFi8_9UGaFl8^9UGaFk8_9VG`Fk8`9UG`Fk8_9WG_Fi8_6oF\\\\K:SNh8c9ZGZFg8e9ZGZFf8g9[GWFe8i9\\\\GVFe8\\\\6lFSL`0`Md8Z6mFXLa0\\\\Mc8X6mF^La0YMb8V6nFcLa0VMb8R6QGhL>TMc8P6QGnL<QMd8n5QGRM=nLb8n5RGVM=jLc8l5QG[M>gLc8j5PG`M?dLc8h5PGdM?aLd8f5PGiMc0YLa8e5QGSNZ:eLZES4c0YO];0jD0[;HhD8];AeD>_;]OcDc0_;ZObDf0`;WOaDi0a;SOaDm0a;PO`DP1b;mN_DT1c;gN_DY1d;bN^D^1e;]N]DhNYNi1\\\\=[O^DjN^Nd1V=^O^DmNaNa1S=_O]DPOaN`1S=]O^DROaN`1R=\\\\O^DTO`N`1T=XO^DXO_N`1T=TO_D\\\\O]N`1V=PO_D@\\\\N_1W=lN`DEYN_1[=dN_DNWN]1_=[N`D7QN^1d=RN_D`0mM^1h=jM`Dg0iM]1j`0cNU_O]1k`0cNU_O]1k`0cNU_O]1k`0cNV_O[1k`0eNU_OZ1l`0fNT_OX1m`0iNS_OV1o`0iNR_OV1n`0jNR_OT1Qa0kNP_OS1Qa0mNP_OP1Sa0oNn^Oo0Sa0QOn^On0Ra0ROP_Ok0Qa0TOR_Oi0o`0WOS_Oe0Pa0ZOQ_Od0Pa0\\\\OQ_Ob0Qa0]Oo^Ob0Sa0]Oo^O?Ta0_Oo^O=Ua0Al^O=Ua0Cl^O;Va0Cl^O2hNB`b08n^ONaa0L[aa0\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [393.0, 230.0, 313.0, 309.0], \"score\": 0.9999998807907104, \"association_id\": 1, \"light\": [-1.9895436763763428, -2.4926159381866455, 1.8701348304748535, 2.3436872959136963], \"segmentation\": {\"size\": [680, 1024], \"counts\": \"jVU83Re03M4M2O2O001N2O001O01N1O4L4LkU2J[jM1O2O1k\\\\ONQa03k^O9m`0Hm^O?Pa0Bl^Od0Qa0^Ok^Oh0Qa0ZOj^Ol0Sa0VOh^OQ1Ta0QOg^OU1Va0nNe^OV1l`0aN]^O=a0W1m`0@e^Oh0Ua0i1N1G9N2O1000001O0O10000UMjKbDX4Y;TL]Do3`;_LSDc3l;dLlC^3S<iLeC`3T<cLgC`3W<dLdC^3[<eLaC^3^<cL_C_3`<dL\\\\C^3d<eLWC^3g<dLVC^3j<dLRC^3m<eLoB]3Q=eLkB]3T=\\\\2OXOQCYIo<W6`CjI_<U6cCkI\\\\<U6dClI[<S6fCnIZ<Q6gCnIY<R6gCoIX<Q6gCQJX<n5iCSJV<l5lCTJR<l5oCUJP<k5PDVJn;k5SDUJk;l5UDUJi;k5YDUJf;l5YDUJf;k5[DTJe;l5[DUJd;l5\\\\DTJb;n5^DRJa;n5_DSJ^;P6bDPJ\\\\;R6cDoI[;S6eDlI[;T6fDlIX;V6gDkIX;V6hDiIX;X6hDhIW;Y6iDgIW;Z6hDfIX;Z6gDfIY;\\\\6fDdIZ;]6eDcI[;^6cDcI\\\\;^6cDcI];^6bDbI^;^6aDcI_;^6`DbI_;_6aDaI_;_6aD`I`;`6_DaIa;`6^D`Ib;`6^D`Ib;`6]DaIc;_6\\\\DbId;^6\\\\DbId;^6[DcIe;]6[DcIe;]6[DbIf;^6YDcIg;]6YDcIh;\\\\6XDcIi;\\\\6WDeIi;Z6XDfIi;Y6VDgIk;X6VDhIj;V6WDkIj;S6WDmIi;R6XDmIi;S6WDmIj;Q6VDPJj;P6VDPJi;P6XDPJh;P6XDPJh;P6XDPJg;Q6YDnIh;Q6YDoIg;Q6YDoIg;Q6YDoIf;R6YDnIh;Q6YDoIg;Q6YDnIh;R6WDoIh;R6XDnIh;Q6XDoIi;Q6WDoIi;P6XDPJg;Q6YDoIg;P6YDQJg;o5YDQJf;o5ZDQJg;n5ZDRJf;m5ZDTJe;m5ZDTJf;k5ZDUJf;l5ZDTJe;m5ZDSJg;l5ZDTJe;m5ZDTJe;m5[DSJe;m5ZDSJf;m5ZDTJf;l5ZDTJf;l5YDUJf;l5YDUJg;k5YDTJh;l5WDUJi;k5VDVJj;j5VDVJj;j5UDVJk;k5TDUJm;k5RDVJn;j5QDVJP<i5PDXJP<h5PDXJP<h5oCYJQ<g5oCXJR<i5mCWJT<h5lCXJT<h5kCXJV<h5jCXJV<h5jCWJX<h5hCWJY<i5gCWJY<i5gCWJZ<h5eCXJ\\\\<h5dCXJ]<h5bCXJ`<f5`CZJc<oNaCP6LPKi<eNeCX6ASK`=j4`BUKb=k4\\\\BVKe=i4[BVKg=U6001N101O0O101N100O2O001N10001O00000O101O000O2N100O101O0O10001O0O1O100O2O0O1000000O2O0O100O100000001N1000000[K^Ad2c>YMaAe2_>ZMdAd2\\\\>ZMhAd2X>[MkAc2U>\\\\MnAb2S>[MRBb2n=]MXB^2i=`M\\\\B\\\\2d=bMbBZ2_=dMUDi0l;UOUDk0l;ROVDn0k;POVDP1k;nNVDQ1m;kNUDU1l;iNUDW1l;gNUDX1n;eNSD[1o;aNSD^1P<^NRDb1U<UNmCk1h?O10000000000000000O1000000O10000O10000O10000O100O1O1O10000O100O100N2N2O1O1O10000O100O101N100O10001N3N7H7Gjgb6\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [12.0, 268.0, 281.0, 242.0], \"score\": 0.9999997019767761, \"association_id\": 4, \"light\": [-2.4584155082702637, -1.9634439945220947, 2.3888773918151855, 1.8020002841949463], \"segmentation\": {\"size\": [680, 1024], \"counts\": \"_^8<lc0Im\\\\O;Pc0Gn\\\\O:Rc0Fn\\\\O;Qc0En\\\\O<Rc0Dn\\\\O<Rc0Dn\\\\O<Rc0Dm\\\\O=Rc0Dn\\\\O<Rc0Dn\\\\O<Rc0Dn\\\\O<Rc0Dn\\\\O;Sc0Em\\\\O;Sc0Fl\\\\O:Tc0El\\\\O<Tc0Dl\\\\O<Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Rc0Fn\\\\O9Sc0Gl\\\\O:Tc0Fl\\\\O:Tc0Fl\\\\O9Uc0Gk\\\\O9Uc0Fk\\\\O;Uc0Ek\\\\O:Vc0Fj\\\\O:Vc0Fj\\\\O:Vc0Fj\\\\O:Vc0Fj\\\\O:Vc0Fi\\\\O:Xc0Ei\\\\O;Vc0Fj\\\\O:Vc0Fj\\\\O9Wc0Gh\\\\O:Xc0Fh\\\\O:Xc0Fh\\\\O:Wc0Gi\\\\O8Xc0Hh\\\\O8Xc0Gi\\\\O9Wc0Gh\\\\O9Yc0Gg\\\\O8Zc0Hf\\\\O7[c0Hf\\\\O8Zc0Hf\\\\O7[c0Ie\\\\O7[c0Ie\\\\O7[c0Ie\\\\O7[c0Ie\\\\O7[c0Id\\\\O7]c0Hd\\\\O8\\\\c0Hd\\\\O7]c0Ib\\\\O8^c0Hb\\\\O8^c0Hb\\\\O8^c0Hb\\\\O8^c0Hb\\\\O7_c0Ia\\\\O7_c0Ia\\\\O7_c0Ha\\\\O9_c0Ga\\\\O8`c0H`\\\\O8`c0H`\\\\O8`c0H`\\\\O7ac0H`\\\\O8`c0H_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0F_\\\\O;ac0E_\\\\O;ac0E_\\\\O;ac0E_\\\\O;ac0E_\\\\O:bc0F^\\\\O:ac0F`\\\\O:`c0F`\\\\O:`c0F`\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0F_\\\\O;bc0D^\\\\O<bc0D^\\\\O;cc0E]\\\\O;cc0E\\\\\\\\O<dc0D\\\\\\\\O<dc0D\\\\\\\\O<Xd000O100000000O01000000000000O10O10000O0100000000000OQNMk^O3Sa00l^O0T?Eh@i03BP?L`@OmNe0c1@g>6n_O_O24HU1`1ROe>;m_O]O4`1Z1hNb>?l_O^O4^1]1dNa>P1n_O?_1bNV>MZ@Q3`1RMR>]3mAcLP>a3oA_Lo=d3PB[LQ>e3nA\\\\LR>c3oA]LQ>c3oA]LQ>c3oA]LQ>b3PB^LP>b3PB^LP>b3PB^LP>c3oA]LQ>d3nA[LS>f3lAZLT>f3lAZLT>g3jAZLW>e3iAZLW>h3hAXLX>i3gAWLY>j3eAWLZ>k3eAUL[>l3dATL[>o3cAQL]>P4bAPL]>R4bAmK_>T4`AlK`>U4^AlKb>U4]AkKc>V4\\\\AiKd>X4\\\\AhKd>X4\\\\AhKd>Y4[AgKe>Y4[AfKe>[4[AeKe>[4[AeKd>\\\\4\\\\AdKd>\\\\4[AeKd>\\\\4\\\\AdKd>\\\\4\\\\AdKc>]4\\\\AdKd>\\\\4\\\\AcKb>`4^A`K_>c4aA]K[>g4eAYKU>m4kASKU>m4jASKZ>j4fAVK^>f4bAZK_>e4`A\\\\Ka>c4_A]Ka>c4_A]Kb>b4^A^Ke>_4[A`Kh>^4XAbKi>\\\\4WAeKi>[4WAeKj>Z4VAeKk>[4UAeKk>Z4VAfKj>Z4UAgKk>X4VAhKj>V4XAiKi>V4XAjKh>V4XAjKh>U4XAlKh>S4YAmKf>S4[AmKe>R4\\\\AnKc>R4^AnKa>Q4aAoK^>Q4cAnK^>Q4cAoK\\\\>Q4dAPL[>P4fAPLY>P4hAPLW>P4iAQLV>n3lARLS>n3nAQLR>o3oAQLQ>o3oAQLQ>o3oAQLP>P4oAQLQ>o3oAQLP>P4PBPLo=Q4QBoKo=R4PBnKo=S4QBlKo=U4QBkKm=W4SBiKl=X4TBhKk=Z4TBfKj=\\\\4VBdKi=]4VBcKk=]4UBcKj=^4VBbKj=^4VBbKj=_4TBbKl=^4TBbKl=^4TBbKl=^4TBbKl=^4TBbKm=]4SBcKn=\\\\4RBdKo=[4QBdKR>Z4nAfKS>Z4lAfKV>X4jAhKW>W4iAiKX>V4hAjKX>V4hAjKY>U4gAkKY>U4gAjK[>U4eAkK[>U4fAjK[>U4eAjK]>U4cAkK^>T4bAlK_>S4bAlK_>S4aAlKa>S4_AmKa>S4`AkKb>S4_AkKd>S4Z1M3M3M4L3F;N1N3N1N2Ok^OlLW`0c0n^Oh1j0hMX`0=R_Oi1f0jMY`0N__OW2;gMo`0W2Q1I8N002UNk\\\\Oa1_c0M4M3M4L4M3L4M3H:H7FPSU?\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [1.0, 296.0, 85.0, 114.0], \"score\": 0.9826042056083679, \"association_id\": 5, \"light\": [-1.6882739067077637, -2.4285359382629395, 1.5311024188995361, 2.2635107040405273], \"segmentation\": {\"size\": [680, 1024], \"counts\": \"`P1a0ed05ZN]O^^Oh0]a0]O_^Og0_a0YO`^Oj0]a0XO_^Om0^a0UOi]O6Fm0]b0nNh]Ol13nMca06Y^Oo12kMea07Y^Oo1OlMfa06[^OP2MjMha06[^OR2LgMia08X^OS2OeMia08W^O]2ia09W^OQMia0o2W^OQMia0o2W^OQMia0o2W^OQMja0n2V^ORMja0n2V^ORMja0n2V^ORMka0l2U^OUMka0j2V^OVMka0i2U^OWMka0h2U^OXMla0h2S^OYMma0f2T^OZMla0e2T^O\\\\Mla0c2U^O]Mka0b2V^O^Mja0b2V^O^Mia0b2X^O^Mha0a2Y^O_Mfa0a2[^O_Mda0a2]^O_Mca0`2^^O`Maa0`2`^O`M_a0a2a^O^M`a0a2a^O_M^a0b2b^O^M^a0b2b^O^M]a0b2d^O^M\\\\a0b2d^O^M\\\\a0a2e^O_M[a0a2e^O_M[a0a2e^O_M[a0a2e^O_M[a0`2f^O`MZa0`2f^O`MZa0`2f^O`MZa0`2f^O_M[a0a2e^O_M[a0a2e^O_M[a0`2f^O`MZa0`2f^O`MZa0`2f^O`MZa0_2h^O`MXa0`2i^O_MWa0`2k^O_MUa0`2m^O_MSa0a2n^O^MRa0b2Z^O^M?OWa0c2Y^OiMQb0j2MYOS^OTNla0j1V^OfML;ma0o1W^OWNia0j1V^OVNja0k1U^OUNka0k1V^OTNka0k0]^OPO10ba0l0f^OmNL5_a0i0d_OVO^`0FQ^Oh0f1A_`0<f_OB[`03Q@Ko?5Q@JQ`04Q@KP`03R@LQ`01d2N2O00001Oeb^c0\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [9.0, 288.0, 146.0, 140.0], \"score\": 0.9999776482582092, \"association_id\": 6, \"light\": [-0.5107294917106628, -3.7933154106140137, 0.45811617374420166, 3.7230846881866455], \"segmentation\": {\"size\": [680, 1024], \"counts\": \"a\\\\63od0OU[O1jd0700000000000000O1000000000000000000O1000000000O10000000000000000O100000000000O010000000000000O1000000000000jM:j^OFTa0e0Q^OQOK:Sb0g0n]OTOM4Pb0P1m]ORO1NQb0T1g]OSO6JRb0j1n]OVNPb0m1o]OSNea0Z2Z^OfMca0]2]^OcM`a0a2_^O_M_a0e1`^OgN1D]a0e1e^OeNOF[a0e1h^OdNMGZa0d1m^OcNIIZa0d1m^OcNHI[a0f1k^ObNJH[a0f1j^OcNKGZa0h1h^OcNNEYa0h1i^OeNMCZa0g1j^OfNLCZa0e1l^OiNHC[a0d1m^OkNGA[a0f1k^OkNJ_OZa0o2e^OQM[a0o2e^OQMZa0Q3e^OoLZa0R3f^OmLYa0V3g^OiLWa0e1j^ONUa0OP_O0o`00R_OGHeNUa0]1o^O\\\\N5l1k`0G\\\\_O2@aNSa0\\\\1^_O4@_NRa0\\\\1__O6_O\\\\NRa0_1__O5_O\\\\NRa0_1`_O4_O\\\\NPa0a1a_O3_O\\\\NPa0`1a_O5_O[NPa0`1a_O5@YNPa0a1`_O7e`0IZ_O9e`0G[_O8f`0HX_O:h`0FU_O=k`0CR_O`0o`0@n^Oa0Sa0@g^Oe0Za0d11O1O0O2O1O001O001O0001O1O1O100OXOf^OTMN6\\\\a0\\\\2\\\\_OaMc`0l1e^OSNR11X`0k1i^OoMR16T`0j1V@WNi?e1[@\\\\Nc?a1b@^N]?`1g@^NZ?a1i@[NX?e1U20100O1O10O01O00002N2O2L3QOW\\\\O;Rd0\\\\OP\\\\Oa0_d0I7GdhPb0\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [266.0, 228.0, 172.0, 261.0], \"score\": 0.9999842047691345, \"association_id\": 3, \"light\": [-1.076859951019287, -2.7859010696411133, 0.9480077624320984, 2.6639180183410645], \"segmentation\": {\"size\": [680, 1024], \"counts\": \"lQa5;:Lnc0m0I8H:A;H8J5K5L2N3M2M4J5J6E<]OcLT_Ob3f`0c0M3O1N2M3L4J6L4K5N2M3M4L3O1N2O1O1O1O1O001O1O1i@SJR?n5n@RJR?n5n@RJQ?o5n@RJR?n5n@RJQ?n5PARJo>o5PARJP?S6OKQASJn>m5RATJm>m5SASJl>m5TATJl>k5UAUJj>k5VAVJj>j5UAWJj>i5VAXJj>h5VAXJj>g5VAZJi>g5WAYJi>f5WAZJi>g5WAYJi>f5XAZJg>f5YA[Jf>d5\\\\A\\\\Jc>d5^A\\\\Ja>c5aA]J^>d5bA\\\\J]>d5dA[J]>d5dA\\\\J[>d5eA]JZ>c5gA]JX>b5jA^JU>b5lA^JS>a5oA_JQ>`5oA`JQ>`5PB`Jn=b5RB^Jn=a5SB_Jl=a5UB_Jj=`5WBaJh=_5YBaJg=^5ZBbJe=^5\\\\BaJe=^5[BcJe=\\\\5\\\\BdJd=\\\\5\\\\BdJd=[5]BeJc=[5]BeJb=[5^BeJc=Z5^BfJb=Z5^BfJb=Y5_BgJa=Y5_BgJa=Y5^BgJc=Y5]BgJb=Z5^BfJb=Z5^BfJb=Z5^BfJb=Z5^BfJb=Z5]BfJd=Z5\\\\BfJd=Z5\\\\BfJd=Z5\\\\BfJe=Y5[BgJe=Z5ZBfJf=Z5YBgJg=Y5YBfJh=Z5XBfJh=Z5XBfJh=Z5XBfJg=[5YBeJg=[5YBdJh=\\\\5WBeJh=]5WBcJi=]5WBcJh=^5XBaJg=a5YB_Jf=b5ZB^Jc=e5]B[Ja=g5^BZJn<Z6RCfIm<[6SCeIl<\\\\6TCcIl<^6TCbIl<^6TCbIl<^6TCbIl<^6SCcIm<]6SCbIm<_6SCaIm<_6SCaIm<_6SCaIm<_6RCaIo<^6RCbIn<^6RCbIn<^6RCbIn<^6RCbIn<^6QCcIo<]6QCbIP=]6QCcIo<]6PCdIP=[6QCeIo<[6PCfIP=Y6QCgIP=X6oBhIR=W6oBiIQ=V6oBkIQ=X3cBPO<hMR=R3kBRO3kMS=R3kBRO3lMS=P3lBSO0nMU=l2nBUOMoMW=h2oBXOJPNY=d2PC[OGQNZ=^2UC_OBSN[=X2XCD]OTN\\\\=Q2]CKWOSN_=m1_CMSOVNb=g1_C0POXNe=d1^COPO\\\\Ni=]1[COSOaNh=X1\\\\CHXOnNh=m0WCI\\\\OTOd=m0S5]Od[O2`d0Dn[O3ni21ejM4L3M20L5JmeT<\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [656.0, 153.0, 357.0, 470.0], \"score\": 0.9999627470970154, \"association_id\": 1, \"light\": [-1.6189939975738525, -1.946796178817749, 1.4708223342895508, 1.7114393711090088], \"segmentation\": {\"size\": [774, 1024], \"counts\": \"R^`?2o`0h0[E^O[:Q1YEVOa6C\\\\G`1m1oN]6N^G[1o1iN[68^GW1Q2cN_6<XG[1Q2[Nf6>SGb1j1UNQ7>iEhNBP3Y3nM[7>eEgNBV3U3iMc7=cEfN@[3T3dMk7:^EnN[OZ3W3aMP87[EVOUOY3Y3^MX81XEf3d1QMT9YOUEm3Z1SMa9POREY4j0QMU:eNPEd49oLi:[NmDn4NmLU;WNhDT5IjL^;WNaDk8_;m110000O10000O10001N10000O10O101O0O1RNZD\\\\Gf;`8bD\\\\G^;c8eDZG\\\\;e8hDVGY;j8lDQGT;o8REjFo:W9VEcFi:^9\\\\E\\\\Fe:d9_EXF`:j9m11O1O100O1O100O1O1O1N2N2N2M3M3M3M3K5H8E;I7K5M3L4L4K5I7E;I7L4N2L4L4J6J6K5L4N2O1N2O1O1O1O1O101O00100O1OeEiFh5V9WJlFi5T9UJnFm5P9QJQGR6n8lIRGW6m8gISG]6l8eF^Go0F_8Q9QFoGS1POo8W9_ETH]1eNW9U;eFkD]9V;`FjDb9Y;YFgDj9];oEcDS:b;gE]D]:e;]E[Df:f;VEZDl:h;QEWDQ;i;mDWDS;l;kDRDW;P<gDoCZ;S<dDlC\\\\;V<cDhC_;_<00000002O1N1O1O1O1O001O00001O000001O000000001O001O2N2N3L3N2N1O1N1mJXDhMh;n1WE\\\\Mj:[2kEYMV:b2WFTMj9i2_FPMb9n2eFlL\\\\9R3hFkL[9Q3jFjLY9S3mFeLZ9U3nFaLZ9Z3VHRKYKA\\\\<Y5nHTKR7h4oHYKS7d4lH_KT7_4mHaKU7\\\\4lHdKV7Y4kHfKX7V4iHkK[7o3gHPL^7h3fHXL]7b3fH]L]7^3fHbL\\\\7Y3gHgL\\\\7Q3iHoL^7Z1RJfNX6DSK<\\\\>00000000O1000O1000O100000O10O1000O1000O100000O1000O100000O1000000O01000000000O100000O10000000O100000000O1000000000000000000000000O100000000000000000O10000000000000000000000000000O10000000000001O00000000000000000000000000000000001O00000000000000000000000000000000O1000000000O100000O1000000O10000000000O1000000O10000000000000000O1000000000000O1000001N10001N2NUh7\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [127.0, 189.0, 373.0, 395.0], \"score\": 0.9998842477798462, \"association_id\": 2, \"light\": [-1.6761510372161865, -2.3338568210601807, 1.533082127571106, 2.1675877571105957], \"segmentation\": {\"size\": [774, 1024], \"counts\": \"iXP3=e26da08k]O=ga0Mk]Oc0ka0Bj]Ok0Pb0YOf]OT1Sb0TO]]Ob1Tb0k2\\\\O:EY1hN7J7I5K3M4K7J7G6H9Fb0\\\\Oa0_O=E:E9J5L4N2N1N3N1N2N2N2N2O1O100O100000000000000001O00000O101O2N3M3M2N001O1N2O2N7Ij0WMSCeHP=Q7ZCmHg<l6aCQIa<g6gCWI]<`6jC^IY<Y6oCeIS<V6RDhIP<R6VDlIk;m5]DQJc;j5dDTJ];h5gDWJZ;e5iDZJX;\\\\5SEcJn:R5\\\\EnJe:l4aESKa:f4eEYK]:[4nEcKV:R4UFkKl9n3]FnKf9m3_FQLb9k3cFSL^9n0oAR1g4oM\\\\9g0YBS1]4UN\\\\9c0`BQ1V4ZN^9?bBT1R4ZNf96]B^1Pe0N3M5K`^OiNT<S1lCoNV<n0jCROX<k0hCVO^<b0dC^Ob<9aCG_b00O100000000O10000000000O10O1000000000O10O100000000000000O10O100000000000000000000000000000000O100000000000000000000000O100000000000000000O1000000000000O01000000000000000O01000000O1000000O10O10000000000000O10000000O10O10000000000000O10000000O100000000000O010000000000O1000000O10YXOL\\\\g04cXOO[g00fXO1Yg0OgXO1Yg0OgXO1Xg00hXO0Xg00hXO0Xg00hXO0Xg01gXOOXg02hXONXg02hXOOWg01iXOOWg01iXOOWg01iXOOWg01iXOOVg02jXOMWg03iXOMWg04jXOJVg06<101O00000O101O000000000000001O0O10000000000000000000O1000000000000000000O10000000000O1O1000000O100000000O1000000000000O100000000000000000000O1000000000000000000000000000000000000000000000000000000O10000000000000000000000O1000000O101O00001N10o`[<\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [500.0, 242.0, 176.0, 297.0], \"score\": 0.9999781847000122, \"association_id\": 3, \"light\": [-2.430553436279297, -1.8302949666976929, 2.3610780239105225, 1.6761736869812012], \"segmentation\": {\"size\": [774, 1024], \"counts\": \"SXj;?cg07K4K4M3oYOZO^d0i0][OBZd0a0b[OISd0=f[O6ic01o[O>2fN[b0Q1]]Oe1^b0W2]^OcJQ`0l6L4N100O1O2N1O1N2N3N010O10O01000OO2M2M3O1N0J3013I7A>G:L4O2N2N3O02O1O1O3M6S@XIP>T7YA_I_>W8M1O1O1O1O000001O0O2O003L3M3M2N1L5K4K6B`0Bf@eHb>n6hASIY>h6iAZIW>b6kA^IW>\\\\5nBdJT=W5oBiJR=S5QClJS=n4PCRKS=g4QCYKP=c4SC]Kn<`4TC`Kn<[4UCdKn<W4UCiKm<S4UCmKm<P4TCPLm<n3TCRLn<k3SCULn<i3RCXLo<g3QCYLQ=d3PC[LT=`3nB`LS=_3mBaLT=^3lBbLU=\\\\3lBdLU=[3kBeLV=[1c_OOX3fNX=S1k_O1m2mN[=k0o_O3g2RO\\\\=g0R@2d2WO_=8\\\\@7Y2BQd00000000O2O000O10000000000O101O00000O10000000000000000O10000000001O00000O100000000O100000000O100000000O1000000O10000000O10000000000O10000O10000O1N2N2L4O3MSaV8\"}}, {\"image_id\": 42, \"category_id\": 1, \"bbox\": [291.0, 135.0, 91.0, 54.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-2.182246208190918, -2.1106317043304443, 2.0846316814422607, 1.8721916675567627], \"segmentation\": {\"size\": [246, 478], \"counts\": \"SRV2c0R72N1O100O001O1TIXOi6h0WIYOf6i0[IWOc6j0^IVO`6l0`IUO[6P1dIPO[6Q1eIoNZ6R1fInNX6T1hImNV6T1jIlNV6U1iIkNV6V1jIjNU6]1O000O1000MmIdNR6]1nIbNR6a10N2001O000000OnI`NP6`1oIaNQ6c1O00000000000000001O000000000000000000000000000000GQJkNo5U1QJkNo5U1QJkNo5U1QJkNo5U1QJkNo5U1RJjNn5W1QJiNo5W1QJiNo5W1QJiNP6W1PJhNP6X1PJhNP6Y1PJfNQ6Z1oIeNQ6`11O2N1O1O3M3M1O1KbIPOc6o01J\\\\IXOe6f0\\\\IZO`7000001O000OWlf0\"}}, {\"image_id\": 42, \"category_id\": 1, \"bbox\": [172.0, 133.0, 71.0, 48.0], \"score\": 0.9999998807907104, \"association_id\": 2, \"light\": [-2.000549793243408, -2.218897581100464, 1.9622422456741333, 2.0665013790130615], \"segmentation\": {\"size\": [246, 478], \"counts\": \"e_Y13c70000001O0D0RI0m63RILm67RIHl6<JC\\\\If0[6ZO^I11l0_6WOaIi0_67O1O1L4O101N2O00KkIiNU6V1jIlNV6S1jInNV6P1kIQOU6o0jIROV6n0jISOU6l0kIUOU6k0kIUOU6k0kIUOU6V1000000000EkIUOU6k0kIUOU6k0kIUOU6k0kIUOU6l0kISOU6m0kISOU6W10GkIQOU6o0kIQOU6o0kIQOU6o0kIQOU6o0kIQOU6o0lIPOT6P1lIPOT6Q1kIoNU6Q1kIoNU6Q1kIoNU6R1kImNV6R1kImNU6U1jIjNV6V1jIjNW6V1iIiNX6Z11LgIiN]6S1cImN^6R1bInNa6o0_IQOb6n0^IROd6l0\\\\ITOg6l04HUI]OR7:PIFQ77QII^70000OQYh1\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [757.0, 226.0, 508.0, 433.0], \"score\": 0.9998824596405029, \"association_id\": 1, \"light\": [-1.9497182369232178, -1.9512653350830078, 1.7668551206588745, 1.777619481086731], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"^Pgc01dj01N100O2N1L5G9N2M5L<D<D3N2M3N3L4dYOoMnc0T2i[OTNTd0P2`[OZN^d0i1^[OZN`d0h1^[OZNad0h1\\\\[OZNbd0h1[[O[Ncd0g1\\\\[OYNcd0j1Z[OWNed0k1Y[OVNfd0P2S[ORNld0U2mZOlMRe0l3O1N2O1O1O1N2N1O2N1O1N2O2M2O100000000010O01O0g]O^JS<`0QDS5I]J\\\\M1h>c0PDo4MaJP<d0PDl4OaJP<e0PDi41bJn;g0oCh43cJk;g0PDf44eJk;g0nCf47cJi;k0lCc4;cJh;m0iCa4`0bJd;R1hC^4d0`Jb;V1gCZ4h0`J`;Y1dCX4l0`J^;[1cCV4P1_J\\\\;^1aCS4T1_JZ;`1_CR4X1^JY;`1^CQ4Z1`JW;a1\\\\CP4^1_JV;c1YCm3c1`JS;e1XCj3f1bJQ;g1UCh3k1aJP;j1PCe3R2aJm:n1lBa3Y2aJj:Q2iB]3`2bJf:T2eBZ3h2bJb:W2]B[3T3^J^:Z2WB[3]3[J[:]8hEcGW:[8lEdGT:[8nEeGR:Z8oEfGQ:X8QFhGn9X8SFhGm9W8TFiGk9V8XFhGi9W8XFiGh9V8YFjGf9V8[FjGe9U8\\\\FkGd9T8]FlGc9R8_FnGa9Q8`FoG`9Q8`FoG`9P8aFPH_9o7bFQH^9o7bFQH^9n7bFSH^9m7bFSH^9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7cFTH]9l7bFUH^9k7bFUH^9k7bFUH_9j7aFUH`9k7`FUHa9j7_FVHb9i7^FWHb9i7^FWHc9h7]FXHd9g7\\\\FYHe9f7[FZHf9e7ZF[Hg9d7YF\\\\Hh9d7WF\\\\Hj9d7UF\\\\Hl9c7TF]Hm9c7RF]Ho9c7PF]HQ:c7mE^HT:c7jE]HW:c7hE]HY:c7fE]H[:c7dE]H]:c7bE]H_:b7aE^Ha:a7^E_Hd:_7\\\\EaHg:]7XEcHk:Z7UEfHn:W7REhHR;V7mDjHU;T7kDlHV;T7iDlHX;S7hDmHY;R7gDnHZ;R7eDnH\\\\;R7cDnH^;R7aDnH`;R7_DnHb;S7\\\\DmHf;S7WDnHk;Q7TDoHm;Q7RDoHP<P7oCoHS<Q7lCoHT<R7kCnHV<R7iCnHW<S7hCmHX<T7gClHZ<U7dCkH\\\\<V7cCjH]<Y7`CgH`<Z7^CgHc<Z7[CfHe<[7ZCeHf<\\\\7YCdHg<]7XCcHh<^7WCbHi<_7VCaHj<`7UC`Hk<b7SC^Hn<d7oB\\\\HQ=g7kBZHU=h7iBXHW=j7gBVHY=k7fBUHZ=l7eBTH[=n7cBRH]=o7bBQH_=o7_BRHa=Q8\\\\BoGd=V8WBjGi=\\\\8QBdGo=_8nAaGS>`8kA`GU>b8iA^GW>c8hA]GX>d8gA\\\\GZ>c8gA\\\\GZ>d8eA\\\\G[>e8dA[G]>f8aAZG`>g8^AYGc>m8VASGk>V9k@jFV?X9g@hFZ?Y9d@gF]?Z9a@fF`?Z9_@fFa?[9_@dFa?^9]@bFd?_9Z@`Fg?j9o_OVFQ`0V:101N10000RLS@UMm?f2\\\\@UMd?h2d@SM]?h2i@VMX?f2n@WMS?d2TAYMm>h0Y@ZMS1j1e>4oCHS<MYD1h;I]D6d;G`D7a;GaD8`;EcD:];FdD9];FdD9\\\\;GfD7[;HgD6Y;JlD1U;MPEOP;1UEJl:5VEIj:7WEGk:8WEFj:9WEFj:9WEFj::VEEk:;UEDm:;TEBn:?SE^On:b0`EoNa:R1aEiNa:W1aEfN`:[1`EcNa:]1_EaNb:`1_E^Nb:b1^E\\\\Nc:e1^EWNe:i1T71^^OjMb:V2YEhMZI6]a0R2VESNi:n1UERNm:m1RESNo:m1PETNP;l1nDVNR;j1mDVNT;i1kDXNV;g1jDYNW;f1hD[NY;d1gD\\\\N[;b1eD^N\\\\;b1cD^N_;`1aD`Nb;]1^DcNc;\\\\1]DdNe;Z1[DfNf;Y1ZDgNg;W1YDjNi;S1XDmNj;P1WDPOk;m0VDSOX<8oCH_c000000000000000O100000fVO@_h0`0_WOBah0>^WOCbh0=^WOCbh0=^WOCbh0=^WOCbh0<^WOEbh0;^WOEbh0;\\\\WOA\\\\O4Xi0;\\\\WOGdh09[WOHdh09\\\\WOGdh09\\\\WOGdh09[WOHeh08[WOHeh08[WOHeh08ZWOIeh08[WOHeh07\\\\WOIdh07[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06ZWOKfh05ZWOKfh05ZWOKfh04[WOLeh04[WOLeh04ZWOMfh03ZWOMfh03ZWOMfh03ZWOMfh03ZWOMfh03ZWOMfh03YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWOMhh02YWONgh02YWONgh02YWONgh02YWONgh01ZWOOfh01YWO0gh00YWO0gh00YWO0gh00YWO0gh0OZWO1fh0OZWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh0OZWO1fh0OYWO1hh0OXWO1hh0OXWO1hh0NYWO2gh0NYWO2hh0MXWO3hh0MXWO3hh0MWWO4ih0KXWO5hh0KXWO5hh0KXWO4ih0LWWO4jh0KVWO5jh0KVWO5jh0KVWO5jh0KUWO6kh0JUWO6kh0JUWO6kh0IVWO7jh0IVWO7jh0IVWO7kh0HUWO8kh0HUWO8kh0HUWO7lh0ITWO7lh0HUWO8kh0HUWO8kh0HUWO8lh0GSWO:mh0FSWO:nh0ERWO;di0O10000000000000000O100000O10000000O100000000000O10000000O1000000000000000O1000000000O10000000O10000000000000000000000000O1000000000000000000000000O100O2O0O3Nlk;\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [1014.0, 115.0, 258.0, 548.0], \"score\": 0.9956636428833008, \"association_id\": 2, \"light\": [-0.7568425536155701, -3.3325130939483643, 0.513113260269165, 3.141535758972168], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"TY]j01dj00000001N10000O101O000]VOLeh04YWOLVO0ai04XWOOhh01XWOOhh01XWOOhh01XWOOhh01XWOOhh01WWO0ih00WWO0ih00WWO0ih00mVOLJ4Yi00mVOLJ4Yi00mVOLJ4Yi00mVOLJ4Yi00mVOLJ4Yi00mVOMI3Zi00mVOMI3Zi00mVONH2[i00lVOOI1[i00lVOOI1[i00lVOOI1Zi0OoVO0G1Zi0OoVO0G1Zi0OoVO0G1Zi0OoVO0G1Zi0OoVO1F0\\\\:7Q4HmA1F0S:g0S4XOTB1F0P:P1P4oNYB2GOn9W1m3hN^B3FNm9[1n3cN_B4FNl9^1m3_NbB6DMl9`1m3]NcB6DMj9c1n3ZNdB6DMi9f1m3WNfB7BMi9i1m3SNhB7BMg9l1n3PNiB?X9d1m3mMjB`0W9h1l3hMmB`0V9k1k3eMoB`0V9l1j3dMPC`0U9o1i3aMRC?V9Q2g3`MSC?V9R2f3_MTC?U9T2f3]MUC?U9U2e3\\\\MVC?U9V2d3[MWC?T9X2c3ZMYC>T9Y2b3YMZC>T9Z2a3XM[C>T9[2`3WM\\\\C>T9\\\\2_3VM]C>T9^2]3TM_C>T9`2[3RMaC>T9b2Y3PMcC>T9c2X3oLdC>T9e2QJmL^50]G>T9g2lIoLb5L^G>T9i2hIPMe5I_G>T9l2bIRMi5CbG?S9P3[IRMo5_OcG?S9S3TITMU6ZOdG?S9V3nHUMZ6VOeG?S9X3jHUM_6SOdG`0S9Y3gHWMa6POeG`0S9\\\\3bHVMf6nNeG?T9`3\\\\HUMk6lNeG?T9f3THQMS7jNeG?S9W4bGcLf7gNeG?S9g4aFdLg8VNeG?S9f6XOkHeG?S9f6XOkHeG?S9f6XOkHeG?S9f6XOkHeG?S9f6XOkHeG?S9f6XOkHeG?S9e6YOlHdG?S9e6YOlHdG?S9e6YOlHeG>R9f6YOlHeG>R9f6YOlHeG>R9f6YOlHeG>S9f6WOlHfG>S9g6VOkHgG=T9h6XIQJi1jNkK=T9i6jHhJl1RNVL=T9j6dHRKl1gM\\\\L=T9k6aHUKm1cM^L=T9m6]HWKo1_M`L=U9n6XHYKR2\\\\MaL=W9o6QH[KV2YMaL>Y9R7gG]K]2SMcL>[9V7RGgKo2eLdL>^9^;lITDfL>`9^;hITDhL>c9^;bITDkL>e9^;^ITDmL>g9^;ZITDoL>i9^;VITDQM>m9\\\\;PIVDSM>X:S;cH_DUM>g:f:RHlDWM>o:a:gGQEZM>S;_:aGSE\\\\M>V;_:[GSE_M>W;_:YGSE`M>X;`:fGaE[8_:dGaE^8^:aGbE`8^:^GcEc8^:[GbEf8^:YGbEg8_:WGbEk8]:TGcEm8^:QGbEQ9`:kF`EW9e:bF[E_9k:ZFUEh9o;QERDP;n;oDRDR;m;nDSDR;n;mDRDS;o;lDQDU;o;jDQDV;P<iDPDW;P<hDQDX;P<gDPDZ;o;fDQDZ;P<eDPD[;P<eDPD[;P<eDPD[;P<eDoC\\\\;Q<dDoC\\\\;Q<dDoC];i7UDXK>oL];e7^DVK6UM\\\\;c7bDVK2VM];b7dDWKNXM_;`7dDWKMYM`;_7dDXKKYMc;]7dDXKI[Me;Z7dDZKG\\\\Mg;X7cD[KF]Mj;U7aD]KE]Mm;T7`D]KC_Mn;S7`D]KB`Mn;S7cDZK_OcMo;S7dDWK]OeMP<T7fDTKZOhMQ<T7hDPKWOlMQ<T7lDlJSOPNR<T7nDhJPOTNR<T7QEeJmNWNS<S7REcJlNZNS<R7SEbJjN\\\\NT<R7TE_JhN_NV<P7UE]JeNdNY<l6ZEXJ]NlN\\\\<i6_ERJVNUO]<g6cEnIPNZO`<f6dEkImM_O`<f6dEiIlMAa<e6eEgIkMDb<d6dEfIjMFc<d6eEbIiMJd<d6fE]IfMOg<c6mEQI]M<i<b6SG^Io8b6oF^IS9b6kF]IW9c6hF]IX9d6gF\\\\IZ9d6dF]I\\\\9c6dF]I]9c6bF]I^9c6bF]I_9c6`F]I`9d6_F\\\\Ib9c6^F]Id9c6ZF]Ih9c6VF]Im9b6QF^IR:b6kE^IX:b6eE^I]:a6bE^I`:c6^E]Id:b6[E^Ig:`6YE`Ii:^6WEbIn:Y6REgI_;h5`DYJi;^5WDbJo;X5QDgJS<V5mCjJU<U5jCkJY<R5gCnJ\\\\<P5cCPKb<l4]CTKW=Y4hBgK^=T4aBlKc=d1h^OMd3_Ng=X1P_O8X3`Nj=S1S_O=Q3`Nn=m0W_Oc0i2`NU>b0Z_Om0`2aN^>O__O_1R2bNi`0[1V_OeNn`0X1Q_OgNRa0W1m^OjNUa0U1j^OkNWa0U1h^OkNXa0U1h^OkNYa0T1g^OlNYa0T1g^OlNYa0T1g^OlNZa0S1f^OlN[a0T1e^OlN\\\\a0S1d^OmN\\\\a0S1d^OmN]a0R1c^OnN^a0Q1a^OPOaa0o0^^OQOda0m0\\\\^OROha0k0X^OUOja0i0V^OWOka0h0U^OXOma0f0S^OYOna0h0Q^OXOPb0g0P^OYOPb0g0P^OYOQb0g0n]OYORb0g0n]OXOSb0h0m]OXOTb0h0k]OXOUb0i0j]OWOWb0h0i]OXOXb0g0h]OXOYb0h0g]OXOZb0g0f]OYO\\\\b0e0d]O[O]b0d0c]O\\\\O_b0c0`]O\\\\Ocb0b0]]O^Oeb0?\\\\]OAgb09]]OEhb03\\\\]OLhf0N]Q6\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [286.0, 178.0, 470.0, 489.0], \"score\": 0.9476282000541687, \"association_id\": 3, \"light\": [-1.5145964622497559, -1.4551153182983398, 1.4413031339645386, 1.3213303089141846], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"_i^71cj03N1O000O2O000O10000O1000001O0000000O100000000000000000O1000000000000000000000000000000000000000000000O100000000000000000O100000000000000000000000000O100000000000000000000O1000001O0000O1000000000O10000000000000000000000O10000000000000000000000000000000000000O1000000000000000000000000000000O1000000000000O10000000O100000O1000000000000000000000000O100000000000000000000O10001O00000000000O1kK=X]OCbb0g0Z]OYO`b0P1]]OPO^b0X1_]OhNZb0b1c]O^NTb0o1h]OQNPb0Y2n]OgMja0c2T^O]Mha0i2V^OWMfa0n2Y^ORMca0T3[^OlL`a0Z3_^OfL\\\\a0b3a^O_L[a0f3c^OZLZa0l3c^OTL[a0P4c^OPL<JT>[4^AkK63S>]4dA`K1<P>a4lASKOa0P>d4mAlJMg0R>d4mAeJMl0R>e4oA^JMo0R>g4PBXJKU1R>j4oAQJH]1R>n4QBdI_Ol1\\\\>k4nAXISOa2l>`4cAXLZ>o3WA\\\\Lf>k7M3M3O2M2O1O1O1N2N2M3M3N2M3^NTDWDP<d;dDiC`;R<d1M3N2O2M2O1O1O1O1000000O10000O1O1O1O1N2O10000000iL]E\\\\Fc:V9QFdFo9Z9XFaFg9]9aF^F^9`9UEgDQ1d1j9a9YEkDR1_1d9b9\\\\EnDZO1l1Y1^9e9_EUEV1S1[9f9aEWEV1Q1Z9d9cE\\\\ES1o0\\\\9`9dEaER1l0]9X9jElEj0k0^9S8lFRGGj0^9f7WGaG]Og0]9d7YGeG[Of0\\\\9b7\\\\GhGZOc0[9b7^GlGXO`0Z9`7bGPHWO=X9]7fGVHUO9V9\\\\7iG\\\\HSO6T9[7lG_HRO3S9[7nGbHPO2R9Y7QHeHnN0S9h2XDP1T4XLcNMR9b2QE8j3YMTNKR9]2[E2h3fMlMIR9\\\\2`EMg3nMh9S2fBIe3TNf9Q2jBDc3[Nc9P2oB_O`3aNa9o1TCZO]3gN_9n1XCWOY3lN_9l1[CTOX3PO^9i1^CUOT3RO^9g1cCTOo2UO_9c1lCROd2[Oa9_1TDPO[2Aa9\\\\1ZDPOU2Db9Y1^DPOP2Gd9V1cDnNh1Lh9P1gDoNb11n9f0eDWO]13V:6fDFT14T?Kl@5U?Jk@6U?Jj@7U?Kj@5V?Li@4W?Mh@3X?Ng@2Y?Ng@2Y?Of@1[?Mf@3\\\\?Jd@7md00oJGf_O9U`0Ni_O2W`0Oh_O1X`00g_O0Y`02e_ON\\\\`02c_ON]`03b_OM^`05`_OK``07^_OIb`09\\\\_OFe`0>W_OBi`0`0U_O@l`0`0S_O@m`0a0R_O_On`0c0P_O]OQa0b0o^O^ORa0a0m^O@Ua0?j^OAXa0=h^OCZa0<e^OD]a0:c^OF]a0;b^OE^a0;b^OD_a0=`^OC`a0>_^OBaa0>_^OBaa0?^^OAca0?\\\\^OAea0>[^OBfa0=Z^OCga0<Y^ODha0<W^OCja0=V^OCka0;V^OEla06W^OJja01Y^O0ja0IZ^O7Rf000000000O101O00000000000000000O1000001O000000O10O10000000000000000000000000000O10000000000O101O0000000000000O10O1O1000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000001O000000001O00enc=\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [729.0, 598.0, 153.0, 69.0], \"score\": 0.9980952143669128, \"association_id\": 4, \"light\": [-3.1853725910186768, -0.8079556226730347, 3.088606595993042, 0.5822269320487976], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"mkob0m0fi06I4J6N2M4L3O0O2N1O2O0O2O0O100O100O100O2O000O1000000O1000O10O100000000000000000000000000000000001O001O001O001O1O1O1O001O002N1O1N200MRWO]Noh0b1QWO^NQi0_1oVObNRi0]1nVOcNSi0[1nVOeNSi0Y1nVOgNSi0W1mVOjNUi0R1mVOoNTi0m0mVOTOZj000000O1000000000000O100000000000O100000000000O1000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000O100000000O2NclZ:\"}}, {\"image_id\": 44, \"category_id\": 1, \"bbox\": [99.0, 193.0, 282.0, 167.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-1.6002850532531738, -2.169642686843872, 1.4627466201782227, 2.0142343044281006], \"segmentation\": {\"size\": [480, 640], \"counts\": \"Sh^12n>0O100O2O0O100O1O101N100O100O2O0O100O1O001O1I7I7K5N2O100O100O1N101O1O1O1O1O100O01000000000O010000O1000O10O100000O01000O10O10O1000000O0100000000O01000O1000O01000000O0100000O010000O1000O10O100000O100000O0100000000000O01000000O01000O10O100000000O010000O01000000O0100000000000O01000000O10O1000O10000000O1000O10O10000000OiMkNfFT1V9ROhFn0V9UOjFj0S9ZOmFe0Q9_OmFa0R9AQFVOLX1R:EnEVOOU1R:L[ElN27`0Q1S:g0mEYOQ:i0oEWOQ:i0PFVOo9k0QFUOn9k0SFUOl9l0TFTOl9l0TFTOl9>`E`Nd0R1k9a0^E_Nf0o0m9o0SFQOl9Q1SFZNXO@1<d:j1RFZN_OL_:j1RFYNAL^:j1QFZNAL^:j1QFYNBM]:j1QFYNCL\\\\:k1QFXNDM[:j1RFYNCM\\\\:h1RFZNCN[:g1SF[NBM\\\\:e1UF]N@N[:a1YF`N]OOZ:j0_EmNV18SO0Z:g0QGXOfN1Z:e0QGZOeN1\\\\:a0PG^OeN1a:8mFGbN0V=0jB0V=OkB1U=OkB1U=OkB1U=NlB1U=OkB1U=NlB2T=NlB1U=OkB1U=OkB1U=NlB2T=NlB1U=OkB1U=OkB1T=OmB1T=NlB2T=NlB2T=NlB1U=NkB3U=MkB2V=NjB1Y=LhB3S>O1000O100O10O10000000O1000O1000O0100000O10O10000O1O1000000OWO0dB0\\\\=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1k0OO000eY31ZfL000000000000000000O10000000[Si3\"}}, {\"image_id\": 45, \"category_id\": 1, \"bbox\": [441.0, 282.0, 327.0, 285.0], \"score\": 0.9999969005584717, \"association_id\": 2, \"light\": [-2.0102410316467285, -2.2382113933563232, 1.9162917137145996, 2.0559146404266357], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"gcV9:Vd0n0A`0SO^N\\\\]OR2Tb0k0F9M4K4L5G9H8L3M3M3L5K6K5L2N2L5K7I5M2M3L5J8I6K4L4M4L3M3N2M2M2N3M2N2O1O2O0N2O2M2N2OO01O1O1O2M2N4K5M2O1OLTClGi<T8XCmGg<R8[CnGb<T8_ClG`<T8aCmG]<T8cClG]<S8dCmG[<T8eClG[<T8eClGZ<U8fCkGZ<U8fCkGZ<U8fCnGW<Q8jCoGV<Q8jCoGV<Q8jCoGV<Q8jCPHU<P8kCPHU<P8kCPHU<P8kCPHU<P8kCPHU<P8kCPHU<Q8jCoGV<Q8kCnGU<S8jClGW<T8iClGW<U8hCkGX<V8hCiGX<X8gChGY<Z8eCfG[<[8dCeG\\\\<\\\\8dCcG\\\\<^8dC`G]<`871O1O001O1O001O1O001O001O00000000O101N1N2\\\\HiCQ6Z<mIkC`5CfIf<h0lCY5LjI_O0b<k0XDT5KUJm;f0[DQ5JZJk;c0]DP5J^Jh;a0bDk4IdJf;>fDg4HlJb;:jDe4FRKa;7lDd4DUKb;3mDe4CXKc;NnDf4A\\\\Kf;GnDb4DgKa=n3eBRL^=h3eBXL^=d3cB\\\\La=_3`BaLf=X3[BhLj=Q3XBoLk=l2WBTMl=i2SBXMP>g2nAYMX>b2gA^Ma>[2^AeMf>W2ZAiMh>V2WAjMk>S2VAmMn>n1SARNV?d1k@\\\\N[?]1f@cN]?Y1d@gNa?Q1b@oNg?d0]@\\\\Oib000000000000000000O10000000000000000000O10000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000O100000O100000000000000000000000O10000000000000000000000O100000001O0000000000000O1000000000000000000000000O10000000000000000000000O1000000000000000000O100000000000000000000000000000000000000000000000000000000000001O00000000000000001O00000O101OeVZ5\"}}, {\"image_id\": 45, \"category_id\": 1, \"bbox\": [781.0, 241.0, 242.0, 243.0], \"score\": 0.9999998807907104, \"association_id\": 1, \"light\": [-2.714564800262451, -1.7135803699493408, 2.581700325012207, 1.5106042623519897], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"cZY`02Ue07K4K5L3K6I6L4M3M2M4J6]Od0J6K5L4L4Lg0XO9@<K4N2N2M3L4H8J5M4M3L3M2]Od0E;M3M201O1N2O1N2N2O1O1O001N2O1N2O00100O10O10O100O1O001N2O1O10002M2O0000001O000000FWBRIi=n6WBRIi=m6WBTIi=l6WBTIi=l6XBSIh=m6XBSIh=l6YBTIg=l6YBTIg=l6YBTIg=l6YBTIg=l6YBTIg=l6YBTIg=l6YBSIh=m6XBSIh=m6WBTIi=l6WBTIi=l6WBTIi=l6WBTIi=l6WBTIi=l6WBSIj=n6UBQIl=V71N2000000000000001O000000001O01O1O101N100O0010O01O0^OUBdIl=Y6VBhIj=U6XBkIi=Q6ZBoIh=k5\\\\BUJf=d5_B\\\\Jb=`5aB`J`=^5aBbJ`=\\\\5aBdJ`=Z5aBfJa=V5aBjJa=o4dBQK]=h4iBXKX=e4jB[KW=b4kB^KU=a4lB_KU=^4mBcKT=V4QCjKQ=a3bC_L_<m2TDSMm;h2WDXMj;d2YD\\\\Mh;a2ZD_Mg;\\\\2]DdMe;U2`DkMb;Q2`DoMa;n1aDRN`;l1aDTN`;k1`DUNa;j1`DTNc;k1\\\\DUNg;i1XDWNl;f1RD[NQ<d1mC\\\\NT<d1kC\\\\NW<c1hC]NZ<c1dC]N_<b1_C^Nl<Z1QCfNQ=Z1mBfNT=[1jBeNX=Z1gBfNZ=Z1eBfN]=Z1aBfNa=Z1]BfNe=Z1YBfNh=[1VBeNk=\\\\1SBdNn=\\\\1QBdNQ>\\\\1mAdNT>]1jAcNW>^1gAbNZ>`1cA`N^>`1aA`N_>a1`A_Na>_1`AaNb>[1`AeNf>k0dAUOYb000000000000000000000000000O10O1000000000000000000000000000000O1000000000000000000000000000O100000000000000000000000000O2O0000001Nd6\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [2.0, 172.0, 530.0, 391.0], \"score\": 0.9999979138374329, \"association_id\": 2, \"light\": [-1.732306718826294, -1.964768409729004, 1.620670199394226, 1.8084466457366943], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Vl12Xe02O00000O1000000O1000000000000000000O10000000000000000000000000000O10000000000000000000000000000000000000000O10000000000000000000000000O1000000000000000O1000000000O1000000000O100000000000000000O10000000O10000000O10000000000000000000O100000O10000000000000O100000000000000O10000000000O10000000O100000000000000000O0100000000000000000000O0100000000000O1000O1000000000000000O1000O10000000O10000000000000O1000O10000000O100000O1000O1000000000000000O1000O1000000000O10000000O1000000000000O0100000000000O1000000000000000O10O1000000000000000000O100000000000000000000O100000000000000000000O1000000000000000O01000000000000O10000O1000000O10000000000000000O100000000000000000000O10000000000000EY[O1hd0OY[O0gd0OZ[O1fd0OZ[O1ed00\\\\[OOdd01\\\\[OOdd01\\\\[OOdd00][O0cd00][O0cd00][O0cd00][O0cd00][O0cd00][O0cd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O_[O0ad00_[O0ad00_[O0ad0O`[O1`d0O`[O1\\\\?9QEFcK1Z?>PEAfK1Y?a0oD^OhK1Z?`0nD_OhK1[??mD@hK1\\\\?>lDAhK1\\\\??kD@iK1]?>jDAiK1^?>hDAjK1^?>hDAjK1^?>hD@kK2]?>hD@kK2\\\\?>jD@jK2\\\\?>jD@jK2\\\\?>jD@jK2\\\\?>jD@jK2]?;kDChK2^?8lDEgK3b?1iDLeK3^d0Mb[O3^d0Mb[O3^d0Mb[O3^d0Mb[O3^d0Lc[O4Z?K]E1YK4Q?5eEGZK4l>;iEA[K4j>>jE^O\\\\K4h>a0kE[O]K4g>c0kEXO_K5d>f0lEUO`K5a>k0ZFUOd9P1YFPOe9S1ZFmNd9W1ZFiNe9X1[FhNd9Z1[FfNd9[1\\\\FeNb9]1^FcN`9`1_F`N_9b1aF^N]9d1cF\\\\N\\\\9e1dFZN\\\\9h1cFXNm6LcFn1_2VNj65`Fh1e2SNh6=\\\\Fc1k2PNe6g0VF_1S3jMd6Q1mE[1^3dMd6V1dE^1f3\\\\Mf6[1PEi1]3hLRM4a:Q5WHQKWMNa:S5PFeKHCEEc:T5cEWLHWO1^Oc:V5`EbL]OoN`0YOc:X5[EgM1QMc:[5XEfM5oLb:S9^EmF`:U9`EkF^:W9bEiF\\\\:Y9dEgFY:\\\\9gEdFV:_9jEaFS:b9mE^FW9^:iFbEU9`:kF`ET9a:lF_ES9b:mF^ER9c:nF]ER9c:nF]ER9b:oF^EQ9a:PG_EP9`:QG`Eo8]:TGcEl8[:VGeEj8Y:XGgEh8X:YGhEg8V:[GjEe8T:]GlEc8i8]FTHT1SO_8^8RGaGS11k7\\\\8UIdGk6[8VIeGj6Z8WIfGj6W8XIiGj6S8XImGi6P8YIPHk6i7XIWHm6b7UI^Ho6]7RIcHR7X7oHhHV7R7kHnHZ7l6gHTI\\\\7g6fHYI\\\\7b6gH^IZ7_6hHaIY7]6hHcIY7\\\\6gHdIY7[6hHeIY7Z6hHeIY7Y6hHgIY7X6gHhIY7W6hHiIZ7U6fHkI[7S6fHmI[7Q6fHoI[7X1jDZ3l3^K[7V1kDY3l3aKY7V1kDW3n3cKX7T1lDW3m3eKX7R1nDW3j3gKX7P1TEV3c3iK[7o0dEg2R3YLZ7P1VFU2`2kL[7n0[FS2i=kM[BS2e=kM]BT2e=hM^BY2d=_MaBb2b=SMfBl2m?1O101N110O108H6J0N1000102N3M2M1N1O105L7I7IN010O0O2N5K4L3M4L3lLh^OP2[a0eMQ_OU2Yb0J6J5K3M2N2N3L9H>B7H5L2M4KeeW:\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [532.0, 529.0, 20.0, 17.0], \"score\": 0.8657422661781311, \"association_id\": 3, \"light\": [-1.9185662269592285, -1.1482255458831787, 1.7994728088378906, 1.103022575378418], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"m[S;1Ze0000O100C8W[OHhd0;V[OEid0<W[ODid0<W[ODid0=W[OBid0>21O00000000O2N2O1MfYj9\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [144.0, 190.0, 275.0, 223.0], \"score\": 0.8357560038566589, \"association_id\": 1, \"light\": [-1.8217592239379883, -0.9243718385696411, 1.7434087991714478, 0.9186803102493286], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[^P31Ze01O000000000000000O1000000000000000000000000000000000O10O10000000000000000000000000000000000000000000000O01000000000000000000000000000000O100000000000000O0100000000000000000000000000000000000000000000000000O1000000000000000000000O1000000000O100000000000000000000000000000000000000O0100000000000000000000000000000000000VJ>kEBP:j0hEWOV:n0^BQOa11P<Q1PBROkN4R3IR<T1mAKc1QO_<V1lANa1lNb<X1lAMa1kNc<Y1mAK_1lNb<\\\\1PBF^1mN[<g1\\\\BVOX1SOZ<k1^BPOX1UOY<k1dBlNS1YOX<k1lBeNm0@V<\\\\1_AlNb15i0CU<[1dAkNb12f0HS<\\\\1dAlNj1G`01R<\\\\1cAPO[4DR:^1`AQO\\\\4AT:m0fANU4UOU:j0UBEf3AV:g0XBFb3CW:e0ZBE`3FV:d0]BC^3IV:c0bB]OY30W:b0SG^On84[A2Z1Il22k:5WBNj2MQ;3VB0h2MRa00lKMPC3];MRB0a23];NUF2k9OTF1l90SF0l92SFNn90SF0d?00000nL3f@MZ?8a@H_?<]@Cd?a0X@_Oh?c0V@]Ok?d0S@\\\\OP`0c0n_O]OS`0d0k_O\\\\OV`0d0i_O\\\\OY`0d0e_O\\\\O^`0e0^_O[Oe`0h0U_OXOm`0h0Q_OXOQa0h0m^OXOUa0h0i^OXOYa0n0_^OROca0U1T^OkNna0U1P^OkNSb0U1j]OkNZb0V1a]OjNab0X1[]OhNfb0X1Y]OhNib0W1V]OiNmb0U1R]OkNPc0T1o\\\\OlNRc0S1n\\\\OmNSc0T1k\\\\OkNWc0U1h\\\\OkNXc0V1g\\\\OjNZc0U1f\\\\OkN[c0T1e\\\\OlN[c0S1f\\\\OlN\\\\c0S1d\\\\OmN\\\\c0R1?1N1O1O1O10N2N2Ck[OBWd0<o[O^OTd0`0<N2Lmo1I^PN00N3NoP21onM7iNGQ]O<oc0O001OeNJa]O6^b0Kb]O8Zb0Hf]O:Wb0Hi]O9Sb0Jm]O6Rb0Jo]O5Rb0JP^O4Qb0KQ^O2Qb0JW^O0ja01f13L4G81M^Wc<\"}}, {\"image_id\": 47, \"category_id\": 1, \"bbox\": [356.0, 179.0, 148.0, 90.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-1.8152356147766113, -2.233940362930298, 1.773349404335022, 2.06204891204834], \"segmentation\": {\"size\": [394, 583], \"counts\": \"nUY4m0[;3N2N2N1N2N200O1O1O0O2TEeNi:Z1XEgNf:Z1ZEfNf:Z1ZEfNe:Z1[EhNc:Y1]EgNc:X1^EhNa:Y1_EgNa:Y1_EgN`:Z1`EfN_:[1aEeN_:[1aEeN_:[1aEfN]:[1cEeN]:[1cEeN\\\\:]1cEcN\\\\:^1dEcN[:]1eEcNZ:_1eEaN[:_1eEbNZ:^1gEaNX:`1hE`NW:a1iE_NV:c1iE^NV:b1jE_NT:b1lE^NS:c1mE]NR:d1nE\\\\NQ:P2O0O101N100O2O0O1000JVFmMi9T2WFkMi9U2WFkMi9V2WFiMi9X2XFfMh9\\\\221O1O00000000001OOXFbMf9b2OK[FeMe9Z2\\\\FfMd9Z2[FgMe9_20001O0000000000001O00000000001O0000000000000000001O0000000000000000001O00000000000000A`FTN`9l1`FTNa9Z2001O0000010O0000001O001O1O0O2FYFoMg9Q2YFoMh9Y21O0O2O002N2N1O1O1O2M4M4DdEbN^:[1cEeN^:Y1cEfN_:X1dEdN`:Y1d0@mUn0\"}}, {\"image_id\": 47, \"category_id\": 1, \"bbox\": [49.0, 143.0, 76.0, 44.0], \"score\": 0.9999995231628418, \"association_id\": 2, \"light\": [-1.8048715591430664, -2.2066657543182373, 1.664489507675171, 2.0207598209381104], \"segmentation\": {\"size\": [394, 583], \"counts\": \"ePc05Q<5L6L1O2N1N3L3N2O1O011N1O2NNiDSOV;n0jDROU;o0kDQOU;R1O100O2N1000NoDkNP;X10001O0000000000001O000000000000000000001O00000000000000000001O000000001O1O001O001O1O1KlDROV;l0jDTOX;j0hDVOY;i0gDWO_;c0aD]O`;b0`D^OV<01O0O\\\\Q`5\"}}, {\"image_id\": 47, \"category_id\": 1, \"bbox\": [162.0, 166.0, 137.0, 110.0], \"score\": 0.9999997019767761, \"association_id\": 3, \"light\": [-2.4066805839538574, -1.4394497871398926, 2.3136775493621826, 1.314323902130127], \"segmentation\": {\"size\": [394, 583], \"counts\": \"o`n14?=d:JTEc0b:d0N2M3N2N101N1N2N2N2L5M3N3N2M2O1N2N2N1O1N2O2N1O1gFSMT9n2jFTMU9R3N10000O101O0000001O000000001O000000000000001O0000000000001O000000000000000000000000000000000000000000000001O000000010^OSGeMm8[2VGbMj8_2VG`Mk8`2UG_Mk8a2UG_Mk8b2UG]Mk8d2UG[Ml8d2TG\\\\Ml8e2SG[Mm8e2SG[Mn8d2SG[Mn8e2QG\\\\MP9b2PG^MR9`2nF`MS9_2nF`MR9`2nF`MT9^2lFbMZ9X2fFhM[9W2eFiM\\\\9V2dFjM]9U2bFlMb9P2^FPNd9n1\\\\FRNe9m1[FSNf9l1YFUNP:b1PF^NZ:d13M2N;El0TOO100001O001O002M2O1O001O001O1O0O2O1O0O4MXP]3\"}}, {\"image_id\": 48, \"category_id\": 1, \"bbox\": [39.0, 274.0, 828.0, 334.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-3.072047472000122, -2.3430330753326416, 2.8636155128479004, 2.164107322692871], \"segmentation\": {\"size\": [685, 1024], \"counts\": \"obj07Pe07O1N2N2O1N101N10000O2O000O101O0O100O1O2O0O1O101N1O1O2N1O1O2O0O100O101O000O100000001O0O10O100000O10000O10000O10000O10O10O100000000O100aL]NTCc1i<bNUC]1i<hNUCX1_<VO_Cj0_<ZOZ@[Ol2[1h<FVC;h<HWC8g<JYC5g<MXC3g<OXC1g<1XCOg<3XCLh<6WCJh<8WCHg<;WCEa<f0]CZOb<i0\\\\CWOd<j0[CUOe<n0YCROf<P1YCoNg<S1WCnNi<R1WCnNh<T1WCkNi<W1VCiN\\\\;aNkCf2i0iN\\\\;bNiCg2j0fN^;cNhCg2j0fN^;dNfCh2k0dN_;eN[C4TOc2R2dN_;fNZCQ3U1YNb;hNVCP3X1XNb;iNaBM9T3c1VNb;UOjBe2d1VNb;VOhBe2f1UNb;VOhBf2e1TNc;WOfBf2g1RNd;YOdBe2i1QNc;[ObBf2j1oMd;\\\\O`Bf2l1nMd;]O^Bg2m1kMf;Q3ZDoLf;Q3ZDoLf;R3YDmLh;S3XDmLh;T3WDkLi;V3WDiLj;W3WDeLl;\\\\3SDbLo;^3QDbLo;^3QDbLo;_3PD`LQ<`3oC`LQ<`3oC`LQ<a3oC]LR<c3nC]LR<c3nC]LR<c3nC]LR<d3mC\\\\LS<d3mC\\\\LS<d3mC[LT<f3lCYLT<g3lCYLS<h3mCXLT<g3lCYLT<g3lCYLT<g3mCXLS<h3mCXLS<i3lCWLT<i3mCULT<k3lCULT<k3mCTLS<l3mCTLS<l3nCSLR<m3oCRLQ<n3oCQLR<o3oCPLQ<P4PDoKP<R4oCnKQ<R4PDlKQ<T4oClKQ<T4PDkKP<U4PDkKP<U4QDiKP<W4PDiKP<W4QDhKo;X4QDhKo;X4QDhKo;X4QDgKP<Y4QDfKo;Z4QDfKP<Y4PDgKP<Y4PDgKP<Y4PDgKP<Y4QDfKP<Y4PDgKP<Y4PDgKP<Y4PDgKQ<X4oChKS<V4mCjKV<S4kClKW<R4iCnKc<1[AT3R2kLW=R3iBnLZ=o2fBPM\\\\=o2eBPM\\\\=o2dBQM]=n2cBRM]=n2cBRM^=m2bBSM_=l2aBTM_=l2bBSM_=l2aBTM_=l2aBTM`=k2aBTM_=l2aBTM_=l2aBTM`=k2aBTM_=l2aBTM_=l2`BUMa=j2_BVMa=j2_BVMb=i2^BWMc=h2\\\\BYMd=h2[BXMf=g2ZBYMg=f2YB[Mg=d2XB]Mi=b2WB^Mj=a2VB_Mk=`2UB`Ml=_2TBaMm=_2RBaMo=^2QBcMn=]2RBcMo=\\\\2PBeMP>[2PBeMQ>Z2oAfMQ>Z2oAfMR>Y2nAgMR>Y2nAgMS>X2mAiMR>W2nAiMR>X2mAhMT>W2lAiMT>W2lAiMT>W2lAiMU>V2kAjMU>V2kAkMT>U2kAlMV>S2jAmMV>T2iAlMX>S2hAmMX>S2hAmMY>R2gAoMX>Q2hAoMX>R2gAnMZ>Q2fAoMZ>Q2fAoM[>Q2dAoM\\\\>Q2dAoM]>P2cAPN]>Q2bAoM_>P2aAPN_>P2aAQN_>o1`AQN`>o1`AQNa>n1_ARNa>n1_ARNb>n1]ARNc>n1]ARNc>n1]ARNd>m1\\\\ASNd>n1[ARNe>n1[ARNe>n1[ARNe>n1[ARNe>n1[ARNe>n1[ARNe>n1[ARNe>o1ZAQNf>o1ZAQNf>o1ZAQNf>o1ZAQNf>o1ZAQNf>o1ZAQNf>o1ZAQNf>o1ZAQNf>P2YAPNg>P2YAPNg>P2YAPNg>P2YAPNg>P2YAPNg>P2YAPNg>Q2XAoMh>Q2XAoMg>R2YAnMg>R2YAnMg>R2YAnMg>R2YAnMg>R2YAnMg>S2XAmMh>S2XAmMh>S2XAmMh>S2XAmMh>S2XAmMh>S2XAmMg>T2YAlMg>T2YAlMg>T2YAlMg>T2YAlMg>T2YAlMf>U2ZAkMf>U2ZAkMf>U2ZAkMf>U2ZAkMf>U2ZAkMf>U2ZAkMe>V2[AjMe>V2[AjMe>V2[AjMe>V2[AjMe>V2[AjMe>V2[AjMe>V2[AjMe>V2[AjMd>W2\\\\AiMd>W2\\\\AiMd>W2\\\\AiMd>W2\\\\AiMd>W2\\\\AiMd>W2\\\\AiMd>X2[AhMe>X2[AhMe>X2[AhMe>X2[AhMd>Y2\\\\AgMd>Y2\\\\AgMd>Y2\\\\AgMd>Y2\\\\AgMd>Y2\\\\AgMd>Y2\\\\AgMd>Y2\\\\AgMd>Y2\\\\AgMd>Y2\\\\AgMd>Z2[AfMd>[2\\\\AeMd>[2\\\\AeMd>[2\\\\AeMd>[2\\\\AeMd>[2\\\\AeMd>[2\\\\AeMd>[2\\\\AeMd>[2\\\\AfMc>Z2]AfMb>[2^AeMb>[2^AeMb>\\\\2]AdMc>\\\\2]AdMc>\\\\2]AdMc>\\\\2]AdMb>]2^AcMb>]2^AcMb>]2^AcMa>^2_AbMa>^2_AbMa>^2_AbM`>_2`AaM`>_2`AaM_>`2aA`M_>`2aA`M^>a2bA_M^>a2bA_M^>a2bA_M]>b2cA^M]>b2cA^M]>b2cA^M]>b2cA^M\\\\>c2dA]M\\\\>c2dA]M\\\\>c2dA]M[>d2eA\\\\M[>d2eA\\\\M[>d2eA\\\\M[>d2eA\\\\M[>d2eA\\\\MZ>e2fA[MZ>e2fA[MZ>e2fA\\\\MY>d2gA\\\\MX>e2hA[MX>e2hA[MX>e2hA[MX>e2hA[MW>f2iAZMW>f2iAZMV>g2jAYMV>g2jAYMV>g2jAYMU>h2kAXMU>h2kAYMT>g2lAYMT>g2lAYMS>h2mAXMS>h2mAXMS>h2mAXMS>h2mAXMS>h2mAXMS>h2mAYMQ>h2oAXMQ>h2oAXMQ>h2oAXMQ>h2oAXMQ>h2oAXMQ>h2oAXMQ>h2oAXMP>i2PBWMP>i2PBXMo=h2QBXMo=h2QBXMn=h2SBXMm=h2SBXMm=h2SBXMm=h2SBXMl=i2TBWMl=i2TBWMk=j2UBVMk=j2UBVMj=k2VBUMj=k2VBUMi=l2VBUMj=k2VBVMh=k2XA_L9f0_>j2XAcL7c0a>j2WAfL6`0b>k2XAfL5?c>k2WAhL5=d>k2WAiL4<e>k2VAkL4:e>l2WAjL4:e>l2WAkL39f>l2VAmL28h>k2VAmL28h>k2VAmL28g>l2WAmL17h>m2UAmL36h>m2UAmL36h>m2UAmL37g>l2VAmL28g>l2WAlL28g>m2VAkL38g>m2VAkL38g>m2VAkL29g>m2WAjL29g>n2VAiL3:f>m2VAjL49e>n2WAhL4;e>m2WAhL4;e>n2VAgL5<c>n2XAfL5<c>n2XAfL4=c>n2YAeL4>b>n2XAeL6=b>n2XAeL6=b>n2XAeL5?a>m2ZAcL6`0`>n2YAbL7`0`>n2YAbLN^O2R1g>n2YAbLM@1R1g>m2ZAbLMA1P1h>n2YAaLNA1P1h>n2YAaLNB0o0i>n2YAaLMD0m0j>o2XA`LNDOo0i>n2ZA_LNDOo0i>n2[A]LOEMP1i>o2[A[LOFMP1h>P3]AYLOGKP1i>Q3]AVL0Q1c>j2]ATL0S1a>k2_AQL1S1`>l2`AoK1U1^>n2aAkK0Z1^>l2eAhKJ]1`>l2eAaMZ>`2cAcM[>_2cAdMd0mMj<`4aBfM7^NP=m3gBmNT=i4L2O1N2O1O1O7H5L2N001N101O001O1N101O0O101O0O101N10001O0O101O000O2O0000001O000O101O0000000000000000001O0000000O10000000000000hLZDiLf;W3ZDiLf;W3ZDiLf;W3[DhLe;X3[DhLe;X3[DiLd;X3[DhLd;Y3]DfLc;Z3]DfLc;Z3]DfLc;Z3]DfLc;Z3^DeLb;[3^DeLb;\\\\3]DdLc;\\\\3^DcLb;]3^DbLc;^3]DbLc;^3^DaLa;`3_D`La;`3`D_L`;a3aD^L_;c3aD\\\\L_;d3bD[L^;e3cDZL];f3dDYL\\\\;g3eDXLZ;i3fDWLZ;i3fDWLZ;i3fDWLZ;j3fDULZ;k3fDULZ;k3fDULZ;k3fDULZ;k3fDUL[;j3fDULZ;k3fDULZ;k3fDULZ;k3fDULZ;k3fDULZ;l3eDTL\\\\;k3dDUL\\\\;k3dDUL\\\\;k3dDUL\\\\;k3dDUL\\\\;k3eDTL[;l3eDTL[;l3eDTL[;l3eDTL\\\\;k3dDTL];m3bDSL^;m3bDSL^;m3bDSL^;m3cDRL];n3cDRL];n3cDRL];n3cDRL];n3cDRL^;m3cDRL];o3bDQL^;n3cDRL];n3cDRL];n3cDRL^;l3dDSL\\\\;m3dDSL\\\\;l3eDSL];l3cDTL];l3cDTL];k3dDUL];j3cDVL];i3dDWL];h3dDWL\\\\;h3eDXL\\\\;g3dDYL\\\\;f3eDZL\\\\;e3dD[L\\\\;d3eD\\\\L\\\\;c3dD]L\\\\;b3eD^L[;b3eD^L\\\\;`3eD`L[;`3eD`L[;_3gD`LZ;_3fDaLZ;^3gDbLY;^3gDbLY;^3gDaLZ;^3gDbLZ;]3fDcLZ;\\\\3gDdLY;[3hDeLX;[3hDeLY;Y3hDgLX;W3kDhLU;V3mDjLT;R3oDnLQ;n2SERMn:g2XEYMi:e2XE[Mj:b2XE]Mj:`2WE`Mk:]2VEcMl:[2TEdMm:[2UEdMl:Z2UEfMk:Z2VEeMk:Y2VEgMj:Y2WEfMi:Y2YEfMh:Y2YEfMg:Y2[EfMe:Z2\\\\EeMd:Z2^EeMc:Z2]EfMc:Z2]EfMc:Z2^EeMb:Z2_EfMa:Z2_EfMb:Y2_EfMa:Z2_EeMb:[2^EeMb:Z2_EfMa:Z2`EeMa:Z2_EfMa:Z2_EfMa:Z2_EfMa:Z2`EeMa:Y2`EgM`:Y2`EgMa:X2_EhMa:X2`EgM`:Y2`EgMa:X2`EgM`:Y2_DmKCi1o;X2\\\\DSLEd1o;Y2YDVLI`1n;Z2WDYLK\\\\1o;Z2TD\\\\LNY1n;[2TD\\\\LOX1m;\\\\2TD]LOV1m;]2SD^L0U1m;]2SD^L1T1m;\\\\2SD`L0T1m;\\\\2SD`L1S1l;]2RDaL2R1l;]2RDbL2P1l;^2RDbL2P1l;^2RDbL2P1l;^2RDbL3o0k;_2RDbL3o0k;_2RDbL3o0k;_2RDbL3n0l;`2QDbL4m0k;a2QDbL4m0k;a2QDbL4m0k;a2QDbL5l0j;b2QDaL6m0j;a2PDbL6m0j;a2PDbL7k0j;c2oCbL7k0j;c2oCbL8j0i;d2oCbL8j0i;d2oCbL9i0h;e2oCbL:g0h;g2nCbL=d0e;j2nCbL`0`0c;n2mCbLc0=`;Q3mCaLf0<];S3mCaLg0:];T3mCbLg09\\\\;U3mCbLh08[;V3mCbLh07\\\\;X3kCaLj06[;Y3kCaLj06[;Y3kCaLk05Z;[3jC`Ll05Z;[3jC`Lm03Z;]3iC`Lm03Z;]3iC`Lm03Z;^3hC^LP13X;_3hC^LP13X;_3hC^LQ11X;b3fC]LR11X;b3fC]LS10W;d3fC[LS11W;d3fC[LT1OW;g3dCZLU1OW;g3dCYLW1NV;j3bCXLY1LV;m3aCVLZ1KV;P4_CUL]1HU;T4]CTL`1ET;X4[CSLR?n3n@PLS?Q4m@mKT?T4m@hKU?Y4k@eKV?\\\\4m0001O001O001O1O001O1O1O1O1O2N1O2N1O2N0O2O00001O00001O00001OO100000000O1000000O10000O100O1N2N2N2L4UN`_OeNd`0Y1c_O_N``0_1f_O[N\\\\`0d1g_OXN[`0f1i_OWNX`0h1j_OUNX`0j1j_OTNX`0j1j_OlM_`0S2c_OiM``0V2b_OfMa`0Z2Y1O1O1O1O1O1O2N1O100O1O1O1O2N1O100O1O2N100O100O2O0O101N100O2O0O2O1N1O2O1N2N1O3M2O1N2N2N2N2N2N2N2N2O2M2N2O1M3KbeX3\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [695.0, 51.0, 195.0, 139.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-2.705138683319092, -1.6369837522506714, 2.6341233253479004, 1.490216612815857], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"o[Y`0h0Sg09I4M2O2M2J7M3L3M4N101O0O2O1O0O2O001O000O101O0O1O2M3M2O2O0O2bZO`MYe0a2gZO`MWe0a2iZO`MUe0f2O3M5K2N1O2M2O1N3M2O1O2N100O1O100O1N2O1N3NAn[ORMQd0o2o[OQMPd0P3P\\\\OPMPd0P3P\\\\OPMPd0P3P\\\\OPMoc0Q3Q\\\\OPMnc0P3R\\\\OPMmc0Q3S\\\\OoLmc0Q3S\\\\OoLmc0Q3S\\\\OoLlc0R3T\\\\OnLlc0R3T\\\\OnLlc0R3U\\\\OmLkc0T3T\\\\OlLkc0U3U\\\\OkLkc0U3U\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0T3V\\\\OlLjc0S3W\\\\OmLhc0T3W\\\\OnLhc0R3X\\\\OnLhc0R3X\\\\OnLhc0R3X\\\\OnLhc0R3X\\\\OnLhc0R3X\\\\OnLhc0R3X\\\\OnLhc0R3X\\\\OnLhc0R3X\\\\OnLhc0e300000^OX\\\\OlLhc0T3X\\\\OmLgc0S3Y\\\\OmLgc0S3Y\\\\OmLgc0f300000^OY\\\\OkLgc0g3000000O1000000000000OF[\\\\O[Lec0P400000000O10I\\\\\\\\ORLdc0o3\\\\\\\\OPLdc0P4\\\\\\\\OPLdc0P4\\\\\\\\OPLdc0P4\\\\\\\\OPLdc0P4\\\\\\\\OPLdc0P4\\\\\\\\OoKec0Q4[\\\\OoKec0Q4[\\\\OoKec0V400000000000F[\\\\OYLec0f3\\\\\\\\OZLdc0c3^\\\\O^Lbc0P40O101UOZ\\\\OZMfc0e2Z\\\\O\\\\Mfc0c2Z\\\\O^Mfc0^3000ROY\\\\OcMgc0\\\\2Y\\\\OeMgc0Y31QOX\\\\OfMhc0Z2X\\\\OfMhc0Y30O2O001O0O10000O101O0O100O2O001O1ROm[OlMTd0S2m[OmMTd0R2l[OnMTd0Q2m[OoMTd0o1m[OQNSd0m1o[OSNQd0e1W\\\\OZNkc0c1W\\\\O]Nic0a1Y\\\\O]Njc0a1W\\\\O\\\\Nmc0b1T\\\\O^Nmc0_1U\\\\O`Nmc0]1U\\\\OcNlc0[1U\\\\OdNmc0Z1T\\\\OfNmc0X1T\\\\OgNoc0V1R\\\\OiNQd0T1P\\\\OlNWd0i0m[OWO^f0O100O1O10001N10U[T3\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [301.0, 62.0, 130.0, 94.0], \"score\": 0.9999746680259705, \"association_id\": 3, \"light\": [-0.053224604576826096, -4.45531702041626, -0.04128401353955269, 4.309800624847412], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fkQ71hg0?F7J2N3L4K4O2M3N1N3N0O2M2O1fYO`NSf0a1lYO`NTf0a1kYO`NSf0a1lYO`NTf0`1lYO`NSf0a1mYO_NRf0b1nYO^NPf0d1PZO\\\\Noe0d1RZO\\\\Nne0d1RZO]Nle0d1SZO]Nme0b1TZO^Nle0b1TZO^Nke0b1VZO^Nie0b1XZO^Nee0e1ZZO\\\\Nee0e1[ZO\\\\Nde0d1\\\\ZO\\\\Nde0c1]ZO]Nce0c1]ZO]Nce0c1]ZO]Nce0c1\\\\ZO^Nce0c1]ZO]Nce0c1]ZO]Nbe0d1^ZO\\\\Nbe0d1^ZO]Nae0c1_ZO]Nae0c1_ZO]Nae0d1^ZO\\\\Nbe0d1^ZO\\\\Nae0e1_ZO[N_e0g1aZOYN]e0j1bZOVN]e0k1cZOUN\\\\e0l1dZOTN[e0Y20000O100O10000O10000O10IiZOeMWe0b20JiZOcMVe0^2jZObMVe0^2jZObMUe0_2kZOaMUe0_2lZO`MTe0`2lZO_MTe0b2mZO\\\\MTe0d240O100O10O10000000O1000000000000000O10000001O0000000000000001O000000000O10000000000O2O00001N1jMcZOi1]e0VNeZOi1\\\\e0UNfZOi1[e0WNfZOh1Ze0WNgZOg1[e0XNgZOf1Ze0WNiZOh1je0K5M4K7F6K<@Qel=\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [178.0, 46.0, 158.0, 101.0], \"score\": 0.9999990463256836, \"association_id\": 5, \"light\": [-2.390683889389038, -1.9020367860794067, 2.382289409637451, 1.8253531455993652], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ibU4;bg0e0\\\\O3N2L4L4I7N2N1O2N1O1M3M4O0O100O2O0O1O2N101N100O2N2O1hZOdMjd0\\\\2U[OfMjd0[2T[OfMkd0[2U[OeMkd0\\\\2S[OeMld0]2S[OcMld0^2T[ObMld0_2S[OaMmd0_2S[OaMmd0_2S[OaMmd0_2S[OaMmd0_2S[OaMld0`2T[O`Mld0`2T[O`Mld0`2T[O`Mld0`2T[O`Mld0_2U[ObMjd0^2V[ObMid0_2V[ObMjd0]2W[OcMid0]2W[OcMid0\\\\2X[OdMgd0\\\\2Z[OdMfd0[2[[OeMed0Z2\\\\[OfMdd0Z2\\\\[OfMdd0Z2\\\\[OfMdd0Y2\\\\[OhMdd0X2\\\\[OhMdd0X2\\\\[OhMdd0X2\\\\[OhMdd0X2\\\\[OhMdd0X2\\\\[OhMdd0X2\\\\[OhMdd0X2\\\\[OhMdd0X2[[OiMed0W2[[OiMed0W2[[OiMed0W2[[OiMed0W2[[OiMed0W2[[OiMed0W2[[OiMed0W2[[OiMed0W2[[OiMed0X2Z[OhMfd0X2Z[OhMfd0X2Z[OhMfd0X2Z[OhMfd0X2Z[OhMfd0X2Z[OhMfd0X2Y[OiMgd0W2Y[OiMgd0X2X[OhMhd0X2X[OhMhd0X2X[OhMhd0Y2W[OgMid0Y2W[OgMid0Y2W[OgMid0Z2V[OfMjd0Z2V[OfMjd0Z2V[OfMjd0[2V[OdMjd0\\\\2V[OdMjd0i21O0000000GU[O]Mkd0d2U[OZMld0f2T[OZMld0g2U[OWMkd0i2V[OVMkd0i2V[OVMjd0j2V[OUMkd0k25000P[OUMkd0k2T[OVMld0j2S[OXMld0h2T[OXMld0h2S[OYMmd0f2S[O[Mmd0l2000000000O100DR[OfMnd0Y2S[OgMmd0Y2S[OgMmd0Y2S[OgMmd0Y2S[OgMmd0X2T[OgMmd0Y2S[OgMnd0X2R[OhMnd0W2T[OhMld0W2Z[OaMid0^2<O2L4H8O2N100O100O10aZOTNjd0i1i0L3N3N1O2O0O2O001N1O2N2N2N3M3M3GoXO]OUg0?;K3InmS`0\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [951.0, 66.0, 71.0, 142.0], \"score\": 0.9999994039535522, \"association_id\": 2, \"light\": [-2.1080574989318848, -1.9358515739440918, 2.047731637954712, 1.7956511974334717], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_]Yf09^g0=I5E;G8L3M3K5E;K5N2O1M4M3M3N3N1N2N2L4MEU[OeMid0]2W[OdMgd0]2Y[OcMed0_2[[OaMad0c2_[O^M^d0c2c[O]M\\\\d0d2c[O]M[d0e2e[O[MYd0f2h[OZMVd0g2j[OZMTd0h2l[OXMSd0i2m[OWMRd0j2n[OWMQd0i2o[OWMnc0l2Q\\\\OUMlc0n2T\\\\ORMhc0R3X\\\\OnLfc0T3Z\\\\OlLfc0T3Z\\\\OlLgc0S3Y\\\\OmLgc0R3Z\\\\OoLec0Q3[\\\\OoLec0R3Z\\\\OnLfc0R3Z\\\\OnLfc0Q3Z\\\\OPMfc0P3Z\\\\OPMfc0P3Z\\\\OPMfc0P3Z\\\\OPMec0Q3[\\\\OoLec0Q3[\\\\OoLec0Q3[\\\\OoLdc0R3\\\\\\\\OoLcc0Q3]\\\\OoLcc0Q3]\\\\OoLcc0g300O11O001O01O01O1O1O1O1O3M5YLU\\\\Oo2id0Dl0UM^ZOj0MB\\\\Q2\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [877.0, 45.0, 106.0, 65.0], \"score\": 0.9997084736824036, \"association_id\": 6, \"light\": [-2.7772531509399414, -1.8635517358779907, 2.733935832977295, 1.7001458406448364], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"miad04jg09H4L3L4N1N1O2O1N4L3M2O001O1O00000cYOnNke0T1TZOlNle0T1TZOlNle0T1SZOnNle0R1TZOnNle0R1TZOnNke0S1VZOlNje0T1VZOlNje0U1UZOkNje0V1VZOjNje0W1UZOiNje0X1VZOhNje0X1VZOhNje0Y1UZOgNke0Y1UZOgNke0Y1UZOgNke0Y1UZOgNke0Y1UZOgNke0Y1UZOgNke0Y1UZOgNke0Y1UZOgNje0Z1VZOfNje0Z1VZOfNje0Z1VZOfNje0Z1VZOfNje0[1UZOeNke0[1UZOeNke0\\\\1TZOdNme0[1SZOeNme0\\\\1SZOcNme0^1RZObNne0_1QZOaNoe0`1PZO`NPf0`1PZO`NPf0a1PZO^NPf0c1oYO]NQf0c1oYO]NRf0b1nYO^NRf0c1nYO\\\\NRf0d1nYO]NQf0i11JnYO\\\\NRf0d1nYO\\\\NRf0d1nYO\\\\NRf0i100000000001O00010O001O001HkYOcNUf0\\\\1kYOeNVf0Z1jYOfNVf0Y1kYOgNVf0S1oYOmNQf0R1PZOnNPf0n0TZOROle0i0YZOWOge0g0[ZOYOee0e0]ZO[Ode0c0]ZO\\\\Oee0c0[ZO]Ofe0a0\\\\ZO^Ode0a0]ZO^Oee0>_ZO@ce0>P1N2O2O0O100O1O2O0O1000001N10001NRfn0\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [102.0, 146.0, 848.0, 350.0], \"score\": 0.9999916553497314, \"association_id\": 4, \"light\": [-1.2836220264434814, -2.0319995880126953, 1.1884939670562744, 1.7559003829956055], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"bi\\\\2;`0JTf0[1nYOcN1=mc0[1k[OaNOe0dc0R1V\\\\ObNMb0gc0R3M4M2N2O1N2O1N2O1N2O1O001O1O001O001O00001O001O00001O00001O00001O0000001O0000001O002O1N0010O010O00100O6J7I2N2N2N3M3M1O1O1O100Ob]OgKVa0X4h^OkKXa0T4f^OnKZa0R4d^OPL]a0o3b^ORL_a0m3`^OTL`a0l3_^OULba0j3]^OXLba0h3]^OYLca0g3]^OYLca0g3^^OXLca0g3^^OXLba0h3^^OXLba0h3_^OWLaa0i3`^OVL`a0j3`^OVL`a0j3`^OVL`a0j3a^OUL_a0k3a^OUL_a0k3b^OTL_a0k3a^OUL_a0k3a^OUL_a0k3a^OUL_a0k3b^OTL^a0l3b^OTL^a0l3b^OTL^a0l3b^OTL^a0l3b^OTL_a0k3b^OTL^a0l3b^OTL^a0l3b^OTL^a0l3b^OTL^a0k3c^OUL]a0k3c^OUL]a0k3c^OUL]a0k3c^OUL]a0k3d^OTL\\\\a0l3d^OTL]a0k3c^OUL]a0k3c^OUL\\\\a0l3d^OTL\\\\a0l3c^OUL\\\\a0m3c^OSL\\\\a0n3d^ORL\\\\a0n3d^ORL[a0o3d^ORL[a0o3e^OQLZa0P4e^OQL[a0o3e^OQLZa0P4e^OQLZa0P4e^OQLZa0P4e^OQLZa0P4d^ORL[a0o3d^ORL[a0o3c^OSL\\\\a0n3c^OSL\\\\a0n3c^OSL\\\\a0n3c^OSL\\\\a0n3c^OSL]a0m3b^OTL]a0m3b^OTL]a0m3b^OTL^a0l3b^OTL]a0m3b^OTL^a0l3b^OTL]a0m3b^OTL^a0l3b^OTL]a0m3c^OSL]a0n3a^OSL_a0m3a^OSL^a0n3b^ORL^a0n3a^OSL_a0m3a^OSL^a0n3a^OSL_a0m3a^OSL^a0o3`^ORL`a0n3_^OSL`a0n3`^ORL`a0n3_^ORLaa0P4]^OQLca0o3\\\\^ORLca0o3\\\\^ORLba0Q4[^OQLba0R4]^OoKaa0Y5N2M3O1O1O1O1O001O100O1O100O010O100@ZIe_Og6W`0c0J7I6M3M3O100O100O101N1O1O1O1N2N2N2O1O100O1O100O100O101O0O010O100O010O10O01O0010O01O010O010O010O010O01O010O10O01O10O010O010O7J2M1O2N2O0O2N1O2N1O1O2M2OWOeAoGZ>Q8fAQHX>o7iAQHV>n7kASHU>l7lATHS>l7mAUHS>j7mAWHR>i7oAXHP>h7oAYHP>g7PBZHP>e7PB\\\\Ho=e7PB\\\\HP>c7PB^HP>b7oA`Ho=`7QBaHo=_7PBbHP>^7PBbHP>^7oAcHP>]7QBcHo=]7PBdHP>\\\\7PBdHP>[7QBeHn=\\\\7QBfHn=Z7RBfHn=Y7SBgHl=Z7TBfHl=Z7SBgHm=Y7SBgHl=Y7UBgHk=Y7UBgHj=Z7VBfHj=Y7VBhHi=Y7WBgHi=Y7WBgHh=Y7YBgHg=Y7YBgHg=Y7XBhHg=Y7YBgHg=X7ZBhHf=X7ZBhHe=Y7[BhHd=W7\\\\BjHd=V7\\\\BjHd=V7\\\\BjHc=V7^BjHb=V7]BkHc=U7]BkHc=U7]BkHb=U7_BkHa=U7^BlHb=S7_BmHa=S7_BmH`=T7`BlH`=S7aBmH_=S7`BnH`=R7`BnH_=R7bBoH]=Q7cBoH]=Q7cBoH]=Q7cBoH\\\\=Q7dBPI\\\\=P7dBPI[=Q7eBoH[=Q7eBoH[=P7fBPIY=Q7gBoHY=Q7gBoHY=Q7gBPIW=Q7hBPIX=o6iBQIV=P7jBPIV=P7jBPIV=P7jBPIU=Q7kBoHU=Q7kBoHU=P7lBPIS=Q7mBoHS=Q7mBPIR=P7mBQIS=o6mBQIS=o6mBQIS=o6mBQIS=o6mBQIS=o6mBQIS=o6mBQIS=o6mBQIS=o6lBSIS=m6mBSIS=m6mBSIR=m6oBSIQ=m6oBSIQ=m6oBSIQ=m6nBTIR=l6nBTIR=l6nBTIR=l6nBTIR=l6nBTIR=l6mBUIS=k6mBVIR=j6nBVIR=j6nBVIR=i6oBWIQ=i6nBXIR=h6nBXIQ=i6oBWIQ=i6oBWIQ=i6oBWIQ=i6oBWIQ=h6PCXIP=h6oBYIQ=g6oBYIQ=g6oBYIQ=f6PC[Io<e6QC[Io<d6RC\\\\In<d6RC\\\\In<d6RC\\\\In<c6RC^In<b6RC^In<a6SC_Im<a6SC_Im<`6TC`Il<`6TC`Il<`6TCaIk<^6VCbIj<^6VCbIj<^6UCcIk<]6UCcIk<\\\\6VCdIi<]6WCcIi<]6WCcIi<]6VCdIj<\\\\6VCdIj<[6WCeIi<[6WCeIi<[6VCfIj<Z6VCeIk<[6UCeIk<[6TCfIl<Z6TCfIl<Z6TCfIl<Z6TCfIl<Z6TCfIl<Z6SCgIm<Y6SCgIm<Y6SCgIm<Y6SCgIm<X6TChIl<X6TChIl<X6TChIl<X6SCiIm<W6SCiIn<V6RCjIn<V6RCjIn<V6RCjIn<V6RCjIn<V6RCjIn<V6QCkIo<U6QCkIo<U6QCkIo<U6QCjIP=V6PCjIP=V6oBkIQ=U6oBkIQ=U6oBkIQ=U6nBlIR=T6nBlIR=T6mBmIS=S6mBmIS=S6mBmIS=S6lBnIT=R6lBnIT=R6lBnIT=R6lBnIT=_8000bMlBPJT=P6lBPJT=P6lBPJT=P6lBPJT=P6lBoIU=Q6kBoIU=^80000001N10000O100O10000O1000000O10000000000O2O00000000000O100000000O101O0O10000O1000000O10000000000O100000000000O100000O10000000000O100000O1000O1000000O1000000O1000000O101O000O10000O1000000O1000000O10001O0O100000000O100000000O1000000O10000000000O100000000O10000O10001O0O10000000000O2O00001O00001N10001O0000001N1000001N10000O2O000O101O000O10001O00000O10001O0000001N10001O000O2O001N101N101O001N101O001N2O1O1O1O1O1O2M2O2N1O2N2M3N2N2N2M3N2M3N1O1N2O001O1N101O001O00001N101O001O001O0O2O1O1O001N2O1O001N101O1N101O001O0O101O0[Ng^OcLZa0[3f^OfLZa0Y3g^OfLZa0Y3f^OhL[a0V3f^OjLZa0U3f^OlL[a0R3e^OoL[a0P3c^ORM^a0h41O000O101O0O100]M`^OfNaa0V1a^OjN`a0T1b^OlN^a0R1d^OnN\\\\a0P1e^OQO[a0n0f^OQO\\\\a0l0f^OTOZa0k0g^OUOYa0j0h^OVOXa0j0h^OVOXa0i0i^OWOWa0i0i^OWOXa0g0i^OXOXa0h0h^OXOXa0h0h^OXOXa0g0i^OYOWa0g0i^OYOWa0g0i^OYOWa0g0i^OYOWa0f0j^OZOVa0f0j^OYOWa0g0i^OYOWa0g0i^OYOXa0f0h^OZOXa0f0h^OZOXa0e0i^O[OWa0e0i^O[OWa0e0h^O\\\\OXa0d0h^O[OYa0e0g^O[OYa0d0h^O\\\\OXa0d0h^O\\\\OXa0d0i^O[OWa0e0i^O[OWa0e0i^OZOXa0f0h^OZOXa0e0j^OZOVa0f0j^OZOWa0e0i^OZOXa0f0h^OZOXa0f0i^OXOXa0h0h^OXOXa0g0j^OWOWa0i0i^OWOWa0i0i^OVOXa0j0i^OUOWa0k0i^OTOXa0k0j^OTOVa0l0j^OSOWa0m0i^OSOXa0l0i^OSOWa0l0j^OSOWa0m0i^OSOWa0l0j^OSOWa0m0j^OROVa0n0j^OROVa0m0k^OSOUa0m0k^OROVa0m0k^OSOVa0l0j^OTOVa0k0l^OTOTa0l0l^OTOTa0k0m^OUOSa0k0m^OTOTa0l0l^OTOTa0k0n^OTORa0l0n^OTOSa0k0m^OUOSa0j0n^OUOSa0k0n^OTORa0l0n^OTORa0l0o^OSOQa0l0P_OTOQa0k0o^OTORa0l0o^OSOQa0m0o^OSOQa0l0P_OSOQa0m0P_OROQa0m0o^OSOQa0m0o^OSORa0k0o^OTORa0l0o^OSOQa0m0o^OSORa0k0o^OUOQa0k0o^OTOSa0j0n^OVORa0j0n^OVORa0i0P_OVOQa0i0o^OVORa0i0o^OWORa0g0o^OYOQa0f0Q_OYOPa0e0Q_O[Oo`0d0R_O[Oo`0e0R_OZOo`0d0R_O\\\\On`0c0T_O\\\\Om`0b0T_O]Om`0b0U_O]Ol`0b0T_O^Ol`0a0U_O_Ok`0`0W_O^Ok`0a0U_O_Ok`0`0V_O@k`0>V_OBj`0>W_O@k`0>V_OBj`0=W_OCj`0;W_OEi`0:X_OEj`09W_OGi`08Y_OGh`07Y_OIh`05Y_OJh`05Y_OKh`03Z_OLg`01[_OOf`0N\\\\_O2e`0I__O6c`0Eb_O:_`0Af_O>[`0[Ol_Od0U`0VOQ@i0cc0O2O1O1O0O2O00001N10001O0O10001N100O2O0O101N103LkSg1\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [390.0, 4.0, 203.0, 231.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-2.282613754272461, -1.9257934093475342, 2.1637234687805176, 1.8335566520690918], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Q[T85Te05L2N2O1N1O100O10001N10000O1000000O10000000000O100000000000000O1000O1000O1000O0100O010N2N101O1O0O2J5M4O001O10O0100000O1000101N_Oa0M3N2N2N1N3M3N2N2N2M3M3M201N1O2N10ZO`]OcN_b0^1a]OaN`b0_1`]OaN_b0`1a]O_N`b0a1`]O_N_b0a1b]O^N_b0b1`]O_N_b0b1a]O]N^b0d1c]O\\\\N[b0f1d]O[N[b0f1e]OZN[a0]Ob_OY2SOZN]`0Dh_O;f0h1UOYNX`0d0^@V1ZOVNR`0m0^@o0@TNk?Z1\\\\@[1b?X2N1FFc@\\\\KQ?e5L4N1O2O100O`MZA\\\\Ng>c1\\\\A[Nc>g1[AZNe>V4000O1000000O100000000O100000000O100000000O100000000O1000001O00001O001O1N2O1O001O0dJm@j3MRLX?3n@g3JWLX?1PAf3IYLW?0RAe3H[LW?NSAf3F[LZ?LQAh3F\\\\L_?_OSAT4^O]LY`0b3f_O^L[`0b3e_O^L\\\\`0a3d_O_L^`0_3b_OaL_`0]3a_OdL``0[3`_OeLa`0Z3__OfLb`0X3__OhLa`0X3__OgLb`0X3__OhLa`0W3`_OiL``0V3a_OjL_`0U3b_OjL``0U3a_OjL_`0T3c_OlL_`0X2b_OcM05_`0T2^@kMc?S2^@lMd?R2]@nMc?Q2_@mMb?R2l101N11O001O001001N101N010O0O2O001O002M2O1O1N2N2M8Ge\\\\o8\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [16.0, 83.0, 295.0, 599.0], \"score\": 1.0, \"association_id\": 2, \"light\": [-1.2128093242645264, -2.8320963382720947, 0.9905497431755066, 2.675016164779663], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"dZ;1Xe03L4L3L4O2O0O101N1O1O1O1O100O2O0O100O1O1O1O1O1000O01jJGYE:e:MUE3l:NRE3n:MQE4o:MPE3P;MoD3R;MnD3R;NlD3S;NlD3T;MkD4U;LjD5V;KjD6T;KkD6U;IlD7S;JlD7T;HmD8R;InD7R;ImD8R;HoD8Q;HnD9R;GnD9mNfNY9Q1iG:nNfNX9o0kG;mNhNV9m0lG<mNiNV9k0mG<mNkNT9i0nG=nNlNR9f0QH>lNoNQ9c0RH?mNQOn8`0UH?mNTOk8=WH`0mNVOj89ZHa0mNVOh89[H`0nNVOg8;[H?nNUOh8;ZHa0oNROh8=YHa0POnNj8a0VHa0POhNP9f0QHb0S;^OmDb0R;^OoDb0Q;^OnDc0R;\\\\OnDe0R;ZOoDf0Q;YOoDh0]ObNT8d0aHj0POmN_88aHl0lNQOb82cHm0kNROa81cHn0kNROb8OdHo0jNSOa8MeHQ1jNROa8MeHQ1iNTOa8JfHS1iNSO`8KgHR1iNROa8LeHS1kNPO`8LfHT1jNPO`8LeHU1kNoN_8MfHT1kNoN_8MeHU1lNmN_8NeHV1mNkN^8OdHW1nNjN^8OcHX1oNiN]80cHX1POhN\\\\81dHW1oNjN[8OfHX1oNjNX80iHV1nNkNX80iHV1oNkNU81lHT1oNkNS83mHS1oNlNQ83oHR1POlNn74RIP1POlNm75RIP1POmNk74VIo0POkNi78VIn0ROiNe7<YIk0ROhNb7a0[Ih0TOfNV7m0fI=UOeNZ5i2`KcNXOaNn4W3iKYN\\\\O]Nh4]3kKWN_OYNe4b3lKUNAWNa4f3mKTNCUN^4i3oKRNBVN^4i3oKRNBWN\\\\4i3QLPNDWNX4m3RLmMEXNT4o3VLjMEXNR4Q4XLhMEYNo3R4\\\\LeMEZNk3U4^LbMFZNk3U4_L`MG\\\\Ng3V4aL^MI\\\\Ne3W4bL\\\\ML[N`3\\\\4bLXM1\\\\NZ3^4dLVM4[NU3c4fLQM7[Nm2k4jLjL;ZNe2R5PMcL=ZN]2Z5UM[L?\\\\NW2^5XMULd0\\\\NR2b5YMoKi0^Nm1e5YMjKm0cNf1g5[MfKn0gNc1f5\\\\McKR1kN]1f5^M_KT1ROV1c5dMZKW1YOm0b5hMVKZ1@d0_5PNPK]1C>b5RNkJa1E8e5TNfJd1G5h5SNbJh1H1j5RN`Jn1GNl5QN]JR2IIo5PNZJW2IES6PNSJ\\\\2L_OY6PNkIb2MYOb6nM`Ij22POm6kMPIV36iNV7hMeF_O2<S1d3S1dN\\\\7fM[FH1<S1c3W1_Nb7cMTF0N=S1b3Z1\\\\Nf7aMnEi0R1Z3\\\\1YNh7_NjFY3`1TNk7`NeF\\\\3b1QNl7cN_F]3h1jMn7hNYF^3m1cMP8mNQFa3S2VMV8XOeEc3W2lL]8_O\\\\Ee3Z2cLc8FREg3]2\\\\Lh8KkDi3_2VLl8OdDl3b2oKP93^Dn3d2hKT99XDo3W?QLh@o3Y?PLg@P4Z?oKf@Q4Z?oKf@Q4[?nKe@R4[?nKd@S4]?mKb@S4^?mKb@S4_?lKa@T4_?lKa@T4`?lK_@T4a?lK_@T4b?kK^@U4b?lK]@T4d?kK\\\\@U4e?jKZ@W4g?hKY@X4g?hKY@Y4g?fKY@Z4g?fKX@[4h?eKX@[4i?dKW@\\\\4i?dKW@\\\\4\\\\`0000O101O00000000000O2O00000O2YI_KbLa4T3oKeLS4o2`LiL`3P3PMfLR3R3]MfLc2U3hMdLZ2X3mMdLT2Y3PNeLQ2Y3RNdLP2Z3RNdLo1[3SNcLo1[3SNbLP2\\\\3SNaLo1]3SNbLm1]3VNaLk1]3XNaLh1]3\\\\NaLe1]3^NaLb1]3bNaL_1\\\\3dNcL\\\\1[3gNeLX1X3lNgLT1W3nNiLR1T3ROkLn0Q3WOnLi0n2[ORMe0k2_OTMb0i2@WM`0g2CXM=f2EZM;c2H]M8`2K`M5\\\\20cM1S28mMHh1c0XN]Od1g0\\\\NYOa1j0_NVO_1l0aNTO]1n0cNRO[1Q1dNoNZ1S1eNnNY1U1fNkNW1X1hNiNU1[1jNeNT1]1kNdNS1_1lNaNR1a1mN_NS1c1lN]NR1f1lN[NS1f1mNZNQ1i1nNWNP1l1oNUNn0n1QORNl0R2SOnMk0U2TOkMj0X2UOhMj0Z2UOfMj0\\\\2UOdMj0^2UObMj0`2TOaMk0a2SO`Ml0b2QO_Mo0c2oN^MP1d2nN]MP1e2oN\\\\MP1f2nN[MP1h2nNYMP1j2nNWMP1l2mNVMR1l2lNUMR1P3iNRMU1S3fNoLX1V3cNlL[1Z3^NiL`1\\\\3WNjLf1[3SNjLk1[3mMjLQ2Z3hMkLU2^3^MiL`2g3hLcLU3l:L3L4H9E:H8K6I7J5L5M3L4M2N3M3N2M3NW^k>\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [690.0, 209.0, 320.0, 284.0], \"score\": 0.999282956123352, \"association_id\": 3, \"light\": [-1.3068222999572754, -2.389974355697632, 1.1689540147781372, 2.20499587059021], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Zc\\\\>3Se07K5M3L5L2O1N2O1O101N101mM]O[_Oe0^`0E\\\\_O?^`0Ea_O=[`0Fd_O:Z`0Id_O8Z`0Jf_O6X`0Od_O2Z`0Lj_O3U`0Nl_O2Q`00P@0m?2U@Mh?5Y@Kb?9_@G]?;f@DV??k@BQ?`0PA@m>b0TA]Ok>d0VA\\\\Oh>e0YA[Oe>f0[AZOc>g0^AZO_>h0bAXO[>i0fAXOS>n0mASOa=^1`BcNX=b1iB_NQ=f1PCZNk<j1UCWNf<m1[CSNc<m1^CSN`<o1aCPN^<Q2cCoM[<R2fCmMX<U2iCkMT<W2nCgMQ<Z2QDeMl;]2WDaMg;`2[D_Mc;b2^D^M`;c2aD]M];d2dD\\\\MZ;e2gD[MX;f2gDZMY;f2hDYMX;g2iDYMV;h2jDWMW;h2jDXMU;i2kDVMU;k2kDUMT;l2lDTMS;m2mDRMS;P3kDQMU;P3jDPMU;Q3kDoLT;S3jDmLW;U3fDlLZ;V3iBlK1Kf0R1b<X3`B`Ln08b<5ZBk2T1PMj<^OWCm2OdMi>T2XAkMk>R2VAnMl>P2TAoMn>o1SAQNm>n1SARNn>m1SASNl>m1TASNm>l1SATNm>l1SAUNl>k1UATNk>l1UAVNi>j1WAVNj>i1VAXNi>h1XAXNg>h1YAXNg>h1YAXNg>h1YAYNf>g1[AYNd>g1\\\\AZNb>h1]AXNc>h1^AXN`>i1`AWN_>k1`AVN^>k1cATN]>l1cAUN[>m1eARN[>n1fARNX>o1hAQNX>P2hAPNV>Q2jAoMV>R2jAmMU>T2kAlMT>V2lAjMR>X2mAhMR>Z2mAfMQ>_2mA`MQ>d2mA\\\\MR>h2kAXMT>j2lAVMR>l2mATMR>n2nAQMQ>Q3nAoLQ>R3PBnLm=T3SBlLk=W3UBhLg=\\\\3ZBdL`=b3aB\\\\LV=m3kBRLQ=R4PCmKn<V4RCjKk<X4VCgKh<T2]BkNk0POg<R2eBkNe0ROf<Q2hBlNc0ROd<S2kBhNc0TO`<V2nBeNc0TO^<W2RCbNc0UOY<[2UC_Nc0VOT<^2ZCZNd0WOg;i2hCmMb0ZOd;k2kCiMc0[O`;n2nCfMb0]O];o2SDVMlN@f1:Z;P3VDQMo00i:P3YDiLT17b:Q3[DcLW1<]:R3^D[L[1c0U:S3XGmLg8T3YGlLf8U3[GjLd8V3]GjLb8W3_GXLjK=f<\\\\3`GVLlK=c<^3eGbLZ8_3fGbLY8^3hGaLW8`3iG`LX8^3iGbLW8^3iGbLX8]3iGbLX8\\\\3iGdLW8\\\\3iGdLX8k1iChNo3]OX8f1QDiNh3BW8b1UDjNe3CW8`1XDjNb3FV8o0YCROR18X3\\\\OjK;d<l0\\\\CROQ18V3@iK:c<j0aCQOo0:T3BhK9e<h0dCoNl0>S3@jK;e<c0SEAg2KU;4kDMU;2kDNV;1kDNV;1jDOW;0iD0W;0iD0X;OhD1];JcD7^;HbD7^;IbD7];JcD6[;LeD4X;OhD1W;0iD0W;0iD0W;0jDOU;2kDNU;2kDNU;2kDNU;2lDMS;4mDLS;7jDIV;9hDFX;>bDZOfK7h?`0^D\\\\OjK4h?a0[D]OmK2g?c0YD^OoKOh?d0WD^OQLNh?e0UD^OSLLh?h0TD\\\\OTLLh?i0RD\\\\OVLKg?k0RD[OWLHh?n0PDZOXLGh?Q1oCXOYLFh?T1nCVO[LDg?X1mCTOT<o0lCQOR<R1mCnNP<U1PDjNm;[1QDfNm;\\\\1SDdNk;_1TDaNj;a1VD_Ni;b1WD^Ng;d1YD\\\\Nf;f1YD[Ne;f1ZD[Ne;f1[DZNc;h1]DXNa;k1^DVN^;m1aDTN];n1cDRN[;P2eDPNZ;R2eDnMZ;S2eDoMY;R2gDnMX;S2hDmMW;U2hDkMW;V2iDjMU;Y2jDgMS;\\\\2mDdMP;`2oD`Me:l2[ETMa:Q3^EoL^:U3bEkL\\\\:X3cEhL\\\\:Y3dEgL[:Z3eEfLZ:\\\\3eEdLZ:]3fEcLY:_3fEaLY:a3fE_LX:c3hE]LV:f3iEZLV:h3iEXLV:j3iEVLV:l3iETLV:n3iERLV:P4iEPLW:Q4hEoKX:Q4iEnKW:S4hEmKX:S4hEmKX:T4gElKY:T4gElKY:^1RD^Of1SOX:m0hDMo0VOY:i0nDOi0XOZ:f0QE1d0YO[:d0TE3a0XO[:c0WE4>YO[:b0YE5;YO]:?[E78ZO]:=^E85[O_:9`E;O]Ob:3eE>I_Oi:EgEj0@A[>=dAC^>;bAE_>:aAF`>:_AFb>9]AHd>7\\\\AIe>7ZAIg>6YAJg>6YAJh>5XAKj>4UALl>3TAMm>2SANn>1RAOo>1o@0Q?0o@0R?On@1R?On@1R?0m@0S?0l@1T?0j@1V?Oe_O^OId0b`0Og@2X?0f@1Z?Od@3\\\\?Na@4_?M^@5a?M\\\\@5d?LY@6g?Kl_Oa0S`0@l_Oa0S`0Ak_O`0T`0Bi_O`0U`0Bj_O?T`0Dh_O?U`0Eh_O=U`0Gh_O;V`0Hg_O:W`0Hg_O:X`0Hf_O9Y`0Kb_O7]`0M[_O8e`0IY_O8f`0JX_O7g`0KW_O6i`0V2O001O1O1O100O10O2O1[Mj_O2X`0Jn_O1W`0HQ@O\\\\`0@Q@0W`9\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [376.0, 385.0, 206.0, 45.0], \"score\": 0.8635349869728088, \"association_id\": 1, \"light\": [-1.244920015335083, -2.621506929397583, 1.1544196605682373, 2.5408709049224854], \"segmentation\": {\"size\": [666, 1002], \"counts\": \"Ynd74ed02O1O0000001O00000000000000000000001O000000001O0000000000001OO101O000000000000000000010O00000000000000000000000001O000001O000000000010O0000000001O0001O00000000001O0001O00000001O00000001O0001O00000000001O01O0000000000000001O01O00000000000000010O00000000000010O00000001O0000001O00001O001O000000010O00000O10001O1OcW36VhL7H5F7M4M10001N10M3O10001O001000O2O1O3L3N2N1O1OO1N2N2N200O100O1001O1O000000O1O2M3L4H8M3LUi`8\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [2.0, 208.0, 145.0, 81.0], \"score\": 0.9999278783798218, \"association_id\": 2, \"light\": [-0.9684669375419617, -1.5292199850082397, 0.8735955357551575, 1.3061792850494385], \"segmentation\": {\"size\": [666, 1002], \"counts\": \"oa11fd05N1O001O001O1O001O001O0000001O0000000000001O0000010O0001O001O010O00001O001O01O0000000000001N100000000O10001O0O1001O000000O11O01O0000000001O000O2O0001N3M2O0O2M3N1O1O1O1O1M3O1iNQOX^OZ1ea0o0O1O1O100O100O100O10000000000001O1O001O002N4L2N6J7I1O2N3M1O2N2N2N6J2N1O1O1O1O001O00001O1O1O0000001O1N2O1O1N3M5G[j[a0\"}}, {\"image_id\": 52, \"category_id\": 1, \"bbox\": [148.0, 77.0, 337.0, 474.0], \"score\": 0.9999952912330627, \"association_id\": 1, \"light\": [-1.5056328773498535, -1.8947323560714722, 1.364293098449707, 1.7225998640060425], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fWS31We06M0O101N1000001N1000001N10000O100000001O0O1000001N1]O_O\\\\\\\\Oa0dc0AZ\\\\O?ec0B[\\\\O>ec0B[\\\\O>dc0C[\\\\O>ec0CZ\\\\O>Vc0^Oi\\\\O41>Vc0^Oh\\\\O60=Wc0^Oi\\\\O50=Wc0_Oh\\\\O5O=Yc0^Og\\\\O8N:[c0_Of\\\\O7N;\\\\c0^Of\\\\O8M:]c0_Oe\\\\O7M;^c0^Od\\\\O8N:U>Q4VB]KG9M9P>[4XBSKK9L:o=`4VBmJOU1i=R4VBiJ1V1g=T4VBfJ3V1f=W4UBdJ3V1g=W4VBcJ3V1f=a4ZB`Kd=b4[B^Kc=V6O1N2N2O1O1O100O1OVNeBfKZ=Z4gBfKW=[4kBdK^7bNiMj5jJdKY7kNdKL`Nf5dNcKT7RO_K0aN^5lNaKQ7VO[K2cNX5RO`Ko6YOWK4dNT5WO_Km6[OUK6dNQ5ZO^Kl6^ORK7dNo4^O\\\\Kk6@PK9dNl4A[Kj6DlJ<bNf4IZKh6GhJc0]N_43WKg6KbJ\\\\7gNiHf6O]J[7mNfHf61XJ\\\\7ROcHe64UJ[7VOaHd66SJZ7ZO`Hc67QJZ7\\\\O_Hc68oIZ7_O]Hb6:lIZ7C\\\\Hb6:hI\\\\7FZHb6;dI]7KXHa6<]Ib73QH`6?VId7;mG_6]:bIcE_6[:bIeE^6j6RInJ`0XN^6h6UInJ?YN\\\\6j6SIlJc0ZNZ6W:hIiEX6U:jIkEV6R:mInES6P:oIPFQ6m9SJRFm5l9UJTFk5h9YJXFg5e9\\\\J[Fd5b9`J]F`5a9bJ_F]5_9fJaFZ5\\\\9iJdFW5Y9lJgFS5X9oJhFQ5V9QKjFn4V9SKjFl4U9VKkFj4T9WKlFh4T9ZKkFe4U9\\\\KkFc4T9_KlFa4Q9bKoF]4o8fKQGY4l8kKTGU4h8oKXGP4e8TL[Gl3b8WL^Gi3_8[L`Gd3_8^L`Gc3^8_LbGa3]8`LcG_3\\\\8cLdG]3X8hLgGX3n5]IgJ_3[OS3l5cIbJ_3Bn2h5iI`J]3Hj2c5oIaJZ3Lg2Q3WJPM0MV32c2P3`JdLM7R35`2P3eJ^LM:P38^2o2gJ]LM9P3;\\\\2P3eJ^LN6R3;\\\\2Q3dJ_LM1W3?X2R3bJaLLL]3a0U2S3aJaLLHa3d0R2T3_JcLLDd3e0Q2U3]JdLMAf3e0Q2X3ZJcLN_Oh3f0P2Y3XJeLN[Ol3f0n1\\\\3VJeLNXOn3h0n1\\\\3TJfLNWOP4g0n1[3TJjLLSOS4h0m1Z3UJlLISOU4g0m1Y3VJnLGROV4g0m1X3WJPMDQOY4g0l1X3WJQMBQO[4f0l1W3XJSM@PO\\\\4f0l1W3XJTM^OPO^4e0l1V3YJVM\\\\OnN`4f0k1V3YJWMZOnNb4e0k1V3YJWMZOnNb4e0k1U3ZJYMWOnNd4d0k1S3\\\\J]MROnNf4b0l1P3_J_Ne3aNl1m2bJbNa3bNm1i2eJfN]3aNn1E_Ih1[12h0lM^Od0Q5EcIb1Y1c04QNG5Y5DgI^1W1Q1BTN2I_5BkIY1U1^1SOUN<Ca5@oIU1S1[4[OPKc5^ORJT1R1_4XOoJe5[OSJV1Q1`4WOoJf5WOVJX1m0d4UOnJi5ROXJZ1l0g4ROmJk5iN_Jb1d0j4QOkJ`79`Im4oNjJa79_IP5nNgJc78`IR5kNgJf75_IW5iNcJi76^IX5hNbJj75_I[5dNaJn73]I^5dN_JP83YIa5fN\\\\JQ84WIc5fNYJT84TIe5gNWJV83QIj5gNSJY83mHm5iNoI\\\\87eHn5nNjI^8;UHX6[O]I`8?lG[6BVIc8b0XGh65eHd8_9ZGaFf8`9ZG_Ff8b9YG]Fi8d9UG\\\\Fk8R;1O001O0O2O001O1O1N2O1O1O001O001O1N101^GoFh4R9RK[Gf4f8SKcGi4_8RKgGl4Z8QKiGn4X8nJlGQ5V8lJmGR5T8kJoGT5Q8kJPHU5Q8hJRHW5o7eJUHZ5k7cJYH\\\\5h7`J\\\\H_5d7^J`Ha5`7\\\\JdHc5]7ZJfHe5Z7ZJhHe5Y7XJjHg5V7XJkHg5V7YJkHf5U7YJmHf5S7ZJmHf5S7YJnHg5R7YJoHe5R7ZJoHf5Q7ZJPId5Q7\\\\JoHd5Q7[JPIe5P7[JPIe5P7[JQIc5P7]JPIc5P7]JPIc5P7]JPIc5P7]JPIb5Q7^JoHa5R7_JnH`5S7`JmH`5S7`JmH_5T7aJlH^5U7bJkH^5V7aJjH^5W7bJiH^5W7aJjH_5W7`JiH`5W7`JiH`5X7_JhH`5Z7_JfHa5Z7_JfHa5[7^JeHb5\\\\7]JdHc5]7[JdHe5]7ZJcHf5^7XJcHh5^7WJbHi5_7UJbHk5_7SJbHm5_7QJbHo5_7oIbHQ6^7nIcHQ6_7mIbHS6_7jIcHV6^7gIdHY6]7cIfH]6\\\\7^IgHb6[7XIiHh6m:000000@_AVJa>j5_AVJa>j5_AVJb>h5_AXJa>g5`AYJa>e5aAZJ`>c5bA]J^>a5dA_J]>_5dAaJ]>\\\\5eAdJ\\\\>Z5eAgJ\\\\>V5eAjJ\\\\>T5fAkJ[>S5fAmJ[>Q5fAoJ[>o4fAQKZ>n4gARKZ>k4iATKX>h4kAXKU>f4mAZKT>c4nA]KR>a4PB`Ko=_4RBaKo=]4RBcKn=\\\\4TBcKm=[4TBeKm=X4UBhKl=U4VBkKl=P4WBPLk=k3YBTLi=h3YBXLi=d3YB\\\\Lh=a3ZB_Lg=^3\\\\BaLe=]3\\\\BcLd=[3^BeLc=Y3^BgLc=V3_BjLb=S3`BnLa=o2`BQMa=l2aBTMa=i2aBVMa=f2aBZM`=d2aB\\\\Ma=a2`B_Ma=_2`BaMb=Z2aBfMa=U2bBkMa=n1cBRN`=g1eBXNj=d0kB\\\\Ob=MhB3Xa0000001O000000000O2O0O2OmoV;\"}}, {\"image_id\": 53, \"category_id\": 1, \"bbox\": [218.0, 279.0, 156.0, 88.0], \"score\": 0.9999995231628418, \"association_id\": 2, \"light\": [-1.7320530414581299, -2.4777469635009766, 1.647101879119873, 2.231595754623413], \"segmentation\": {\"size\": [480, 640], \"counts\": \"PaV31n>101O0000000000001O0000000001O00000001O0000000001O0001O00000001O0001O0000001O0000000O2O00001O00000RNMoD3o:0PE1o:0PE2m:0RE1l:1SE0k:2UEMj:4VEMi:4UEMj:5TELk:6RELm:5RELn:5PELP;5nDKS;=aDG^;<_DEa;=]DBc;b0YD_Og;i0mC[OR<h0kCYOU<i0hCWOY<h10O1O100O1N21O1O01GbCnM_<o1bCRN_<l1bCTN^<l1bCTN_<j1bCVN`<h1`CWNb<g1_CYNa<g1_CYNc<d1^C\\\\Nd<a1]C_Nd<`1\\\\C`Nd<_1]CaNd<^1\\\\CbNe<\\\\1\\\\CdNe<Z1\\\\CfNd<Y1]ChNb<X1^ChNc<V1^CjNb<T1`ClN`<S1bClN^<S1cCmN^<Q1cCoN]<o0eCRO[<l0fCTOZ<l0fCTOZ<k0gCUOZ<i0gCWOY<h0hCXOX<h0hCXOX<h0hCXOX<h0hCXOX<h0hCXOX<h0hCYOW<g0iCYOW<g0iCYOW<h0hCXOX<h0iCWOV<k0iCUOW<l0hCTOX<n0fCROZ<o0eCQO[<o0eCQO[<P1eCoN[<S1h01N2O1O2N1O1O1QC^N^<f1VCcNi<j1010O0000001O0O100O100L4BPCfNV=U1>N1O2L4M200O2B=L5M3N2Ma\\\\l3\"}}, {\"image_id\": 53, \"category_id\": 1, \"bbox\": [15.0, 182.0, 214.0, 177.0], \"score\": 0.9999998211860657, \"association_id\": 1, \"light\": [-2.2207934856414795, -2.0773699283599854, 2.1353251934051514, 1.988672137260437], \"segmentation\": {\"size\": [480, 640], \"counts\": \"\\\\[71o>1N2O2N1O1O001O1O1O001O0000001O0000001O00000000001O0000000000001O000001O000001O000000010O0000000001O0000010O00001O0000000000001O00000001O00000000001O0001O000001O00001O000\\\\LBgH=l6g0]HYO`7S1WHmNh7Y1SHgNk7^1SHaNl7R2bGnM\\\\8^2[GbMb8b2\\\\G^Mb8f2^GXMb8i2_GUM`8o2^GPMa8W3fFfLI2`9Z4[FeKe9]4ZFbKe9a4YF`Ke9c4YF]Kf9f4XFZKg9h4YFWKg9j4XFVKh9j4XFVKg9l4YFSKg9m4YFSKg9n4YFQKg9o4YFQKg9T500O10_NdF_M[9^2kF_MU9_2oF_MQ9_2RGZMZOYOc9Z3XG\\\\MUOZOc9U3^G`MnN]Ob9S3bG_MlN^Ob9R3cG`MkN^Ob9R3cG`MkN^Ob9Q3dGaMjN^Ob9Q3dG`MkN_Oa9Q3cGaMlN^Oa9Q3cGaMlN^Oa9R3aGaMnN]Oa9S3]GbMSO[O`9T3ZGcMVOYO`9V3UGcM\\\\OWO_9W3SGbM_OWO]9Z3oFaMEUO\\\\9j4fFVKZ9h4hFXKX9g4jFXKU9h4lFXKT9f4nFZKR9b4RG^Kn8_4UGaKk8]4WGcKh8]4YGcKg8\\\\4ZGdKf8[4[GeKe8Z4\\\\GfKd8Y4]GgKc8X4^GiKa8Y4]GgKc8[4[GeKe8]4mFdKIOZ9S5cFmJ]9[500000000000000000M301N1SOgFRLZ9]3aGSLf8g3\\\\1C>L4K5L3M4N101N101O0O201O01N2O00O110O01O0000011O0N1001102NOO0010O2N1O00O02N2N1O100N2O4L3L3kNYCMh<N`CMb<NfCM_<Je^P6\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [769.0, 95.0, 165.0, 425.0], \"score\": 0.9999999403953552, \"association_id\": 3, \"light\": [-2.342529296875, -2.225149393081665, 2.219557285308838, 2.100689172744751], \"segmentation\": {\"size\": [659, 1024], \"counts\": \"\\\\\\\\_?3^d04N001N1000001QHS1VJmNc5a1VJ_Nf5g1XJYNc5m1[JTN]5U2bJlMY5Y2fJgMW5]2hJcMV5`2iJ`MU5b2jJ`MS5c2lJ]MP5g2]GRMk0;e7f2TG_MS1Kg7i2RGbMS1Gi7h2RGeMS1Ci7k2QGfMT1_Oi7n2nFkMU1WOj7U1XF[O:h0b1^N\\\\MN]:`1iF\\\\OROZ1Z2VNi7T1RGQ2S1kLj7R1YGQ2m0lLi7R1\\\\GR2k0lLi7n0aGU2f0mLi7i0gGY2`0nLh7f0mG[2;oLh7c0PH^28oLg7b0SH_26nLg7c0RHa27lLf7b0SHd27jLd7c0SHf29gLa7e0UHf2:eL_7f0VHg2;cL]7h0VHf2>bL[7k0PHg2f0^LX7P1hGg2R1XLV7V1SGQ3i1iKS7]1fFo2Z2dKn6c1_Fk2f2bKj6g1ZFi2m2`Kh6j6YIVIe6l6[ISIe6m6\\\\IRIc6P7]IlHf6T7[IjHTMI[9]7aIhHSM0Z9W7dIgHnL9]9o6eIYIZ6f6bI`I]6_6_IgI`6W6^ImIa6S6]IQJb6n5\\\\IUJd6j5XI[Jh6d5PIeJo6[5jHmJV7R5gHRKY7o4dHSK\\\\7n4^HWKa7l4VH[Kj7g4iGdKW8_4_GhKa8\\\\4VGiKi8]4nFfKQ9o7M2dN]1K4O10O20OXOcH_DZ7W;PIiDo6U;TIkDl6S;VImDj6Q;XIoDh6n:[IREe6j:_IVEa6g:bIYE^6e:dI[E\\\\6d:eI[E\\\\6c:fI]EZ6`:iI`EW6[:nIdES6Y:PJgEP6W:SJgEn5W:TJiEl5U:VJjEk5R:YJnEg5d9hJ[FX5`9mJ`FS5^9PK`FQ5^9QKaFP5\\\\9SKdFm4X9XKfFi4m8dKSG\\\\4h8iKXGW4f8kKZGU4d8mK[GT4d8mK\\\\GS4b8oK^GQ4_8RLaGn3\\\\8ULdGk3Z8WLeGj3Y8XLgGi3W8XLiGi3U8XLkGi3S8XLmGi3Q1]H\\\\5l3bIj3j0cH`5d3fIm3?iHf5\\\\3kIa5o5bJQJb5e5dJ[J_5\\\\5gJdJZ5n4SKQKP5f4WKZKi4b4[K]Kf4^4`KaKa4X4eKhK[4T4iKlKW4R4kKnKV4o3lKPLU4o3lKQLT4n3mKRLS4l3oKTLQ4k3PLULP4i3RLWLo3f3SLZLm3e3TL[Lm3c3TL]Ln3a3RL_Lo3_3RL`LQ4]3PLcLR4Z3oKfLS4X3mKhLT4U3nKkLS4S3nKmLS4P3oKPMQ4n2RLQMo3l2SLSMo3j2SLVMn3h2SLWMo3h2RLWMo3g2P7O3L4M3M2N3L4M2M4L4K5J6J6K5K7GYfi1\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [127.0, 106.0, 130.0, 393.0], \"score\": 0.9999997615814209, \"association_id\": 4, \"light\": [-2.8293466567993164, -2.193561553955078, 2.657528877258301, 1.941909909248352], \"segmentation\": {\"size\": [659, 1024], \"counts\": \"PUb27Xd09I4J6XKWOSBS1Q1KR<3UBi0`1XOU<_2gCbMU<e2hC[MS<P3fCQMU<Z3dCgLY<^3dCcL4kNj:f4nD`L8kNg:k4kD]L>hN_:_5]DRLT1aNS:b7lE_Hl9h7UFXH`9Q8bFoGU9X8lFiGP9Y8QGgGl8\\\\8UGdGi8]8XGcGf8]8[GdGd8Y8_GhG_8V8bGmG^8Q8cGPH\\\\8o7fGQHZ8l7iGTHW8Y5mFiKP1oNS8R5bG\\\\K?AP8P5QHnJ32l7m4fJSKZ5h4kJXKU5f4lJZKU5d4mJ\\\\KS5b4oJ^KQ5a4PK_KP5_4RKaKm4^4UKaKl4]4UKdKk4[4VKeKj4Z4WKeKi4\\\\4WKdKi4[4XKeKg4\\\\4YKcKf4_4YKbKU4P5kKoJi3^5WLbJf3a5ZL_Jd3c5\\\\L\\\\Jb3i5\\\\LWJ`3P6]LPJ`3W6[LjIc3Z6[LeIe3^6YLbIg3a6VL_Ij3f6QLYIP4k6lKUIT4m6jKSIV4o6gKRIY4R7cKnH\\\\4W7`KiH`4Z7]KfHc4[7\\\\KdHe4_7XKaHh4d7SK\\\\Hm4g7PKYHP5h7oJXHQ5j7mJVHS5l7kJTHU5o7gJRHY5R8dJmG\\\\5U8cJjG]5Y8aJfG_5a8[J^Ge5l8RJSGn5R9nIlFT6V9c25YFhFi7S;N2N10O1O1O1O2N2N2N1O1O1mKdFmL\\\\9k2ZGfLEfL_8V4QHhL`0Q2QOQM_8o3_HYLd0`2^NYM`8i3UKkMiKVO=\\\\Og8]3\\\\K`MbLoN[O6j8T3eKnLYMNV7k2YNVMh1e2\\\\NZMf1b2\\\\N_Me1\\\\2_NcMc1W2aNjM`1Q2dNoM\\\\1m1hNSNZ1g1jNYNY1`1kN_NY1Y1lNgNV1T1mNlNU1P1mNPOT1n0mNQOT1n0mNROT1l0mNTOT1k0lNUOV1i0jNVOY1h0gNXO[1f0fNYO[1f0eNZO\\\\1f0cNZO^1e0bN[O_1d0aN[Oa1e0^N[Oc1d0]N\\\\Od1c0\\\\N]Of1?\\\\N@j19XNGn11TNOX;O10000OaT]?\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [267.0, 107.0, 142.0, 411.0], \"score\": 0.9999860525131226, \"association_id\": 2, \"light\": [-1.614616870880127, -2.432457447052002, 1.4507945775985718, 2.2614662647247314], \"segmentation\": {\"size\": [659, 1024], \"counts\": \"ZS\\\\54]d04L3M4L2M3M3N3M2O1000O010000N2N2M2lBQOd6Q1YITOc6n0[IUOd6j0ZIZOe6e0ZI]Of6b0XIAh6=XIFg68ZIIe68QEBn17P98lDLD]Oj0?e:9aDT2=dMQ;9WDc2c0TMU;:SDi2f0mLT;a0oCh2i0hLS;[5hDgJ]OSOb:U7gEkIFQOT:]9kEdFR:_9iE_FH5]:^9hEgFV:[9jEeFR:_9mEbFn9c9QF^Fm9d9RF]Fl9e9TF[Fj9g9UFZFi9h9VFXFY8IlHR:kNVFn7d0[HR:c7`1O1OoMkHPF[ONk7b9PJ_Fo5Y9YJhFg5n8bJRG^5d8mJ[GT5Y8XKeGj4Q8`KmGb4m7dKRH]4k7fKTH[4i7gKXHY4e7jKZHW4c7lK]HT4a7nK_HR4_7PLaHP4^7QLaHP4_7PLaHP4^7QLbHo3^7QLbHo3]7RLbHo3_7PLaHP4_7PLaHP4`7oK`HQ4`7nK`HS4a7lK_HT4a7lK_HT4c7jK]HV4f7gKZHY4k7bKUH^4P8\\\\KPHe4S8XKmGh4W8TKiGl4]8nJcGR5d8gJ\\\\GY5h8cJXG]5j8aJWG_5h8aJXG_5i8`JWG`5j8_JVGb5i8^JWGb5j8]JVGe5i8ZJWGg5i8XJWGo6b7QI^HV7\\\\7iHdHY7[7fHeH[7Y7fHgH[7W7fHiH[7U7fHkHZ7T7gHlHZ7R7gHnHZ7Q7fHoH[7o6fHQI[7n6eHRI]7k6dHUI_7h6aHXI`7f6aHZIa7b6aH^I`7^6dHaI]7[6fHdI\\\\7R2RFY1d2eL[7n1WFY1_2iL[7l1YFW1_2lL[7i1\\\\FS1\\\\2TM[7f1^Fn0Z2\\\\M[7b1`Fj0Z2cMX7`1bFg0Y2hMW7`1cFc0X2mMW7^1eF>W2TNV7[1gF:W2[NT7Y1hF7V2_NV7W1fF6V2cNV7T1hF5S2gNV7S1jF2Q2jNX7R1jF0o1nNY7P1jFOo1POX7P1kFMn1ROZ7o0jFMl1TO]7k0jFNj1WO^7i0kFMg1ZO_7h0lFKf1]O`7e0mFLc1_Oa7c0PGEe1H]7?nKAT46SLJ[<000000O100000001O000OgY[<\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [415.0, 84.0, 142.0, 445.0], \"score\": 0.9999995827674866, \"association_id\": 1, \"light\": [-1.9088292121887207, -2.4794390201568604, 1.7584627866744995, 2.2993385791778564], \"segmentation\": {\"size\": [659, 1024], \"counts\": \"dY[8l0bc0i0XO;F4L4M3M3M3M3L5H8kDUMW4T3gKmLS4[3jKeLS4`3kK`LS4b3lK`LR4b3mK^LQ4d3oK\\\\Lo3g3PLYLo3h3QLXLl3l3SLTLi3S4SLoKi3W4TLiKi3f4kKZKR4\\\\5[KdJc4V6eJjIX5^6dJaIY5k6^JnHYL1X9a7QJ\\\\HV6j7U35M2nFTHX5P8\\\\JZHa5l7UJ[Hi5i7PJ\\\\Hm5h7mI]HP6g7kI\\\\HS6h7gI\\\\HX6g7bI^H\\\\6g7\\\\I]Hc6g7TI_Hk6e7oH^HQ7d7jH_HV7d7cH`H]7[:O100000O1O1O0O2N2M1O2M22M:F3M[ORHlDi7P;_HoD\\\\7P;kHPER7l:UITEh6m:ZISEe6l:]ITEa6m:_ITE^6m:dISEX6P;iIPET6Q;nIoDQ6P;QJPEn5n:TJREl5l:WJTEi5j:YJVEf5i:\\\\JWEd5h:]JWEd5g:]JZEc5c:`J]E`5^:eJbE[5Y:jJfEW5V:mJjES5S:PKmEP5o9TKQFl4g9\\\\KXFe4^9eKbF[4Z9iKfFW4V9mKiFT4T9oKlFQ4P9SLPGm3j8YLVGg3e8^L[Gb3a8bL^G_3`8cL`G^3^8cLbG^3\\\\8cLdG^3Z8cLfG^3X8cLgG_3W8bLiG`3S8bLmGc3k7`LTHh3`7]L`Hn3R7ULnHT4CPHb6n3kIg4P6[KoIh4l5[KTJh4e5\\\\K[Jf4^5_KaJe4Y5^KgJe4S5^KmJd4n4_KQKc4f4eKZK\\\\4[3nLeLS3R3UMmLl2n2YMRMh2j2[MPMlHMj9Q3\\\\MRMkHKj9Q3^MSMf2l2[MTMe2k2\\\\MUMe2i2\\\\MWMe2i2ZMWMg2h2YMXMh2h2WMXMl2e2TM[Mo2b2RM]MP3b2oL^MR3a2nL`MR3`2mL`MT3`2kL`MV3`2iLaMV3`2iL`MX3_2hLaMY3_2fLaM\\\\3]2dLcM^3\\\\2aLdMb3Y2^LfMf3V2\\\\LiMg3S2ZLmMi3n1YLRNk3i1WLUNP4d1d7K6H`0^OYZ\\\\9\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [300.0, 230.0, 124.0, 318.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-2.273569107055664, -1.8418679237365723, 2.2173025608062744, 1.7739224433898926], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"n\\\\X64md0m0[O`0@>B:G4K5L4K6K3M2N2N3N2N4K6K3M6J9G4L2YNlKRBY4h=QLPBR4l=RLPBQ4m=RLoAR4o=RLkAT4Q>RLdAY4V>kKbA]4Z>_1K=C9G3ZDWHd9n7UFVHi9P8oESHP:P8lERHS:Q8gERHY:Q8`ETH_:n7\\\\EUHd:l7ZEUHf:m7VEUHj:S8kDPHU;S90000000000000000000fMkE^IU:_6SF[In9e6UFXIk9h6WFUIj95PE\\\\5Y1[Jh96TE]5W1XJi96TEa5W=TJQCm5V>0oBlIl:T6aDoIn;R6PD]Jb;d5\\\\D]Jd;d5[D[Jf;f5YDZJg;f5YDYJh;h5WDXJi;i5VDWJj;i5VDWJj;i5UDXJk;i5TDWJl;i5TDWJm;h5SDXJn;h5QDXJP<g5PDYJR<[4jBZLS1[OT<U4UCZLe0BW<m3_C_L7D[<i3cCcL0D^<f3eCfLLD_<c3iCiLGCb<_3lCnL@Dd<[3PDPM\\\\OEd<X3UDQMWOFf<S3\\\\DSMmNKg<n2bDUMgNMg<k2fDVMdNOg<e2lDYM]N2h<Z2cFfM^9U2fFkM[9P2iFoM[9h1kFXNX9`1lF`NW9Y1nFgNV9n0QGQOX99VGGW?O100O1O100O100O1000000O100O10000O100O100O100O100O10001NUf_<\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [475.0, 284.0, 72.0, 199.0], \"score\": 0.9999997019767761, \"association_id\": 2, \"light\": [-2.4200611114501953, -2.1639678478240967, 2.3037362098693848, 2.098292350769043], \"segmentation\": {\"size\": [683, 1024], \"counts\": \"mTm9<ld05K4M3M3L4N100OO01O101O10001N2O1N3L6[\\\\OhNSb06P^Ok1ha0[NQ^Oj1la0o0L2N2Z@cLZ=_3`BkLZ=X3`BkL`=X3[BkLd=X3XBiLh=Y3VBgLj=\\\\3RBeLn=`3mA`LkN\\\\OU?Z4jAZLQO]OT?]4gAVLUO]OT?b4aAVL_>m3^ASLc>n3^AoKc>Q4bA_KXO2U?`4iAaKY>_4eAcK\\\\>]4bAcKT?h3k@WLZ?f3e@ZL]?f3a@ZL`?\\\\2W@UN7_Ob?Y2\\\\@WN1@d?V2^@YNNAd?T2a@[NJAf?Q2d@\\\\NFBg?P2g@[NCEf?o1j@YNAGf?m1o@WN]OLd?g1ZBYNf=a1`B^Na=_1bBaN_=W1hBiNY=R1kBnNV=n0mBROT=k0nBTOT=h0oBXOR=`0UC@l<;XCEj<7XCIn<KXC4Qa0O000O10000O101N2OVfm9\"}}, {\"image_id\": 56, \"category_id\": 1, \"bbox\": [348.0, 254.0, 330.0, 118.0], \"score\": 0.9999995827674866, \"association_id\": 1, \"light\": [-2.2720415592193604, -2.1071808338165283, 2.1315906047821045, 1.8658407926559448], \"segmentation\": {\"size\": [601, 785], \"counts\": \"cb\\\\62gb01O000O101O0O2O001O0000000000000O2O0000001O0000000000001O0000000000001O000000001O00000000001O000000001O0000000000001O00000000001O000000000000001O0000000PO5R_OKi`0`0Q_O@n`0b0Q_O_Om`0b0T_O]Ol`0d0S_O\\\\Ol`0e0T_O[Oj`0[1O1N3N1O100O100O101N100O101O0O100000000O101O0O10000O2O0000000O10001O000O100O100O100O1000[On_OdNQ`0\\\\1o_OdNP`0S2O10YOQ@dNm?^1S@bNm?^1S@bNl?_1T@aNl?_1T@aNk?`1U@`Nj?a1V@_Ni?b1W@_Ng?b1Y@^Ne?d1[@\\\\Nd?]200O10YO]@XNb?i1^@WNb?`2O1O[O`@SN_?n1a@SN]?e200O100O[Oe@nM[?R2e@nMZ?S2f@mMZ?i200O1000000O\\\\Oh@iMX?W2h@iMX?W2h@iMW?X2i@hMW?X2i@hMW?X2i@iMU?X2k@hMU?X2k@hMU?X2k@hMU?X2k@hMU?X2k@hMU?X2k@hMU?X2k@hMU?X2k@hMU?X2l@gMT?Y2l@gMT?Y2m@fMS?Z2m@fMR?[2n@eMR?\\\\2n@cMR?]2n@cMR?]2n@cMR?]2n@cMR?^2m@cMR?]2o@bMQ?^2o@bMQ?_2n@aMR?_2PA_MP?b2PA]MQ?b2o@^MQ?c2n@]MR?e2m@ZMS?g2l@YMT?g2l@YMT?V30000000Eo@nLQ?T3n@kLR?W3m@hLS?X3m@hLS?Y3m@fLS?[3l@eLT?[3l@eLT?[3m@dLS?]3l@cLT?]3l@cLT?]3l@cLU?\\\\3k@dLU?`30Mk@bLU?^3k@bLU?^3k@bLV?]3j@cLV?]3j@cLV?\\\\3k@eLT?[3k@fLU?Y3l@gLT?Y3k@hLU?X3k@hLU?W3k@jLV?U3j@kLV?U3j@kLV?T3k@lLU?T3k@lLU?S3k@nLU?P3m@PMS?n2o@RMQ?m2PASMP?m2n@UMR?j2n@WMR?i2n@WMR?h2n@YMR?g2n@YMR?g2n@YMR?g2n@YMR?f2o@YMQ?h2o@XMQ?g2PAYMP?g2PAYMP?g2PAYMP?g2PAYMP?g2PAYMP?g2PAYMP?g2PAYMP?g2PAYMP?g2PAYMP?g2PAYMo>h2QAXMo>h2QAXMo>i2PAWMP?i2PAVMQ?j2o@VMQ?k2n@UMR?k2n@UMR?k2n@UMR?k2n@UMR?l2m@TMS?l2m@TMS?l2m@TMS?l2n@SMS?m2l@SMT?m2l@RMU?n2k@RMU?o2j@QMV?o2j@QMV?P3i@PMX?o2i@PMW?P3i@PMW?P3i@PMW?Q3h@oLX?Q3h@nLY?R3g@nLY?R3g@nLY?R3g@nLZ?Q3f@oLZ?Q3f@oLZ?R3e@nL[?R3f@mL[?R3e@nL[?R3e@nL\\\\?Q3d@oL]?P3c@oL^?Q3b@oL_?P3a@PM_?P3b@nL_?R3a@nL`?Q3401O3M1O1O1N2O001O003M3M2N1O1O001O0O2O1O1N3N2M2O0O2O001N3N3L3N1N2O2M2O2M3M6Gl_n1\"}}, {\"image_id\": 57, \"category_id\": 1, \"bbox\": [102.0, 110.0, 341.0, 259.0], \"score\": 0.9999980330467224, \"association_id\": 1, \"light\": [-1.9967072010040283, -1.6627368927001953, 1.9514491558074951, 1.553058385848999], \"segmentation\": {\"size\": [480, 640], \"counts\": \"lR`17f>6L4M1N101O0O2O001O1N4M2N001N2O001O001O1O1O1O6J2M2O001O00001O0O101N5L5K2N2M101O00001O0O2O009G4L1O001O00001O00001N2O001O001O00001O000O101O0O100000001O000O1000000000000000000000000000000O10001O000O100001O00WNgMkFY2P9oMmFQ2n8WNoFj1j8]NVGb1]8mNaGS1T8YOkGg0n7BPH?h7KUH5c7>SHAk7k3N2O1O1O2MZMYHWNd7n1]HoM`7V2_HiM_7[2`HdM^7_2bH`M]7R5O100O1O1O1O1O1O1O1000000O10000000000000000O10000O10000O100O100O100O100O100000000000oNUImIk6V6TIhIl6Y6TIfIk6\\\\6UIcIk6^6TIbIl6^6UIaIk6`6TI`Il6`6TI`Il6Z700000000000000000000000000000000000000000WOTI^Il6b6TI^Il6[70000000000000000000000000000000000000000000000000000000O100000000000000O100000000000000O101O00001O001O001O1O1N2O001O001O001O1O1O3M2N2N2N1O1O1O1O2N2N2N1O1O1O001O1O1O2N1N2O1O1O001N2O1O2N1N2O1O001O1O1N2O2N1O1O001O0O2O000\\\\OnFYKT9f4lFYKU9g4kFXKW9g4iFXKY9f4hFYKY9g4gFXK[9g4eFXK]9f4eFXK]9g4cFXK_9f4bFXK`9h4=N2O001N2O1O1O2N2M2O1O1O1O1N3N3M9G:E3N2N1N2N3M4K;D_1gMQCGX_l2\"}}, {\"image_id\": 58, \"category_id\": 1, \"bbox\": [353.0, 186.0, 36.0, 84.0], \"score\": 0.9999741911888123, \"association_id\": 1, \"light\": [-2.1039488315582275, -2.023393392562866, 2.023387908935547, 1.8118160963058472], \"segmentation\": {\"size\": [431, 640], \"counts\": \"hjd41^=1O0000000_N1bE0Z:5dEK\\\\:5dEKZ:6gEJT::lEGP:=PFDm9?RFAl9c0RF]On9j0kEVOT:`1WE`Ng:d1WE]Ng:g1WEXNf:T2QElMl:Y2TEeMk:]2UEbMj:`22001O0001O2N3M3K2O3M2QOjDAZ;:P1K8I[^Y3\"}}, {\"image_id\": 59, \"category_id\": 1, \"bbox\": [60.0, 109.0, 379.0, 285.0], \"score\": 0.999998927116394, \"association_id\": 1, \"light\": [-2.3024373054504395, -1.8262712955474854, 2.186697483062744, 1.6463152170181274], \"segmentation\": {\"size\": [428, 640], \"counts\": \"m[i0>l<4K5UE^Of8f0TGBf8a0UGFf8=WGGe8<YGEe8>PFTOn0>P9m0nFTOo8Q1nFPOQ9R1mFoNR9S1lFnNS9U1jFlNT9X1iFjNU9Z1gFgNX9_1bFbN]9c1^F^NR9QOXGf3d8cLVG^3g8hLUGY3h8g0N2O0NoN`G\\\\M^8b2hGZMU8g2SHSMi7P3^HjL^7Z3hH`LT7e3QIULi6Q4T1500O100000010O1O1O0O2O1N2N101hM_KcKc4]4^K`Kd4`4^K]Kc4b4`K[K`4f4bKWK_4i4mKjJT4V5nKhJR4W5PLgJR4X5oKgJQ4Y5QLdJQ4[5PLcJR4\\\\5PLaJR4^5PL_JR4_5QL]JR4b5RLWJS4g5PLQJV4n5e1O1O2N1N2RKdHj3]7SLfHl3Z7SLgHm3Y7SLgHm3[7oKgHQ4_7fKdHZ4W8000O100000000aMVK\\\\Lj4c3ZKZLf4d3^K_JKY1g4W4aKZJM^1b4W4eKTJNd1]4W4WLdJPOa0i4i4[L`JSOd0b4k4_LYJWOi0Z4k4XMQKi2l4\\\\MRKd2j4cMSK]2i4iMTKX2k4jMSKW2k4mMRKT2m4nMoJU2P5T3O1O100O100OaJTK`2k4_MXK`2h4`MXK`2g4aMZK^2e4cM\\\\K\\\\2d4dM]K[2b4fMaKVMBn4l4mMeKS2[4mMeKS2[4mMeKS2Z4nMfKR2Z4nMgKQ2Y4oMgKQ2Y4oMgKQ2Y4oMgKQ2Y4oMgKQ2Y4oMgKQ2Z4nMfKR2Z4nMfKR2Z4nMfKR2[4mMdKT2\\\\4lMdKT2\\\\4lMdKT2]4kMcKU2]4kMcKU2^4jMbKV2^4jMcKU2^4jMbKV2^4jMbKV2_4iMaKW2_4iMaKW2`4hM`KX2`4hM`KX2a4hM^KX2b4hM^KX2b4hM^KY2b4fM^KZ2b4fM^KZ2b4fM^KZ2b4fM^KZ2c4eM]K[2c4eM]K\\\\2b4dM^K\\\\2b4dM]K]2d4bM\\\\K_2c4bM\\\\K_2a0ZKc2W2lL_2>^Ke2S2lLd28^Kk2m1mLk2L`KU3h1hL[MIj6]3T3O0O100O10O0fHjL_4V3]KQM^MFn6Z3_KZM^4f2dKYM[4h2dKXM\\\\4h2dKYM\\\\4g2cKYM]4g2cKYM]4h2aKYM_4g2]JcLKf0h5g2\\\\JeLKc0j5i2YJfLLa0k5i2WJhLN?k5j2UJiLO=l5j2SJlL09n5l2PJlL28n5l2nInL46n5m2lIoL45P6l2lInL56o5l2nIkL49n5l2QJgL2<n5m2TJaL0b0l5m2YKSMg4m2YKRMh4n2XKRMh4n2XKRMh4n2XKRMh4n2XKRMh4n2XKQMi4o2WKQMi4o2WKQMi4o2WKQMi4o2WKQMi4o2WKQMi4n2XKRMh4n2XKRMh4m2YKRMh4n2XKRMh4n2WKSMi4m2WKSMi4l2XKTMh4l2XKTMh4l2XKTMh4l2XKTMh4l2XKTMh4k2YKUMg4k2YKUMg4k2YKUMg4k2YKVMf4j2ZKVMf4j2ZKVMf4j2ZKVMf4k2YKUMg4k2YKVMf4j2ZKVMf4j2ZKVMf4j2ZKVMf4j2ZKVMf4j2ZKWMe4i2[KWMe4i2ZKXMf4i2YKWMg4i2YKWMg4i2YKWMf4j2ZKVMf4j2ZKVMf4k2YKVMe4k2[KUMe4l2ZKTMe4m2[KSMe4n2ZKRMe4o2[KQMd4Q3[KoLd4S3[KmLd4U3[KkLe4U3[KkLd4W3[KiLd4Y3[KgLe4Y3[KgLe4Z3ZKfLe4\\\\3ZKdLf4\\\\3YKfLf4[3YKeLg4\\\\3XKZLXNH`6o3WKYL[NF]6T4VKUL^NG\\\\6V4TKSL`NH[6X4RKPLcNH[6\\\\4nJlKgNH[6_4kJhKlNHY6d4gJdKPOHX6h4eJ`KSOIW6i4dJ^KUOJV6k4bJ[KXOKU6j4cJZKYONQ6j4eJXKZONQ6j4eJXK\\\\OMn5l4eJWK]OMm5m4fJUK_OMj5P5fJTK_OLj5R5fJSK@Jj5[3ZJgM:VOCFh5U3QKcMAEGAf5Q3\\\\KgMTOIP6_2oJiMlNLS6[2SKoNm4o0VKQOh4o0YKQOg4n0[KROd4m0]KSOb4m0`KQOa4n0aKPO`4o0bKoN^4P1eKeMXN;T6n1gKXMjNb0_5T2RMgMo2V2WMdMk2Y2YMgMg2U2]MkMb2R2bMoM\\\\2Q2eMoMZ2Q2hMnMW2R2jMnMV2Q2kMoMT2R2lMnMT2Q2mMoMR2Q2oMPNP2o1QNQNo1o1RNPNn1n1TNRNk1n1VNRNj1l1XNTNh1k1ZNTNf1k1[NUNe1j1\\\\NVNc1j1^NVNb1i1_NWNa1h1`NXN`1h1aNWN_1h1bNXN^1h1bNXN]1h1dNXN\\\\1h1eNWNZ1j1fNVNZ1j1fNVNY1k1gNUNX1k1iNUNV1l1jNTNV1l1jNTNT1m1nNSNQ1m1oNSNP1n1PORNo0n1SOQNm0o1SOQNl0P2TOPNl0P2TOPNl0P2UOPNj0Q2UOoMk0R2TOnMm0R2ROnMn0R2SOmMm0T2ROlMn0U2QOkMo0U2ROjMn0W2QOjMn0V2ROjMo0V2POjMP1V2POjMP1V2QOiMo0X2POhMQ1W2oNiMQ1X2nNiMR1V2oNiMR1W2mNiMU1V2jNjMW1V2hNjMZ1U2eNlM\\\\1S2cNmM_1R2aNmMa1R2^NnMd1Q2[NoMf1Q2YNoMh1Q2XNnMi1Q2WNoMj1Q2UNoMl1P2TNPNm1P2SNoMn1Q2g4O101O1O2M3N1O0O10O01O10O0001O1O001O1O1O2N3M2N1O2N1O1O1O1O0O2N3N1N2N3M<[ORjc2\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [15.0, 235.0, 293.0, 224.0], \"score\": 0.9999988675117493, \"association_id\": 1, \"light\": [-1.8708120584487915, -1.8456785678863525, 1.7090442180633545, 1.676763653755188], \"segmentation\": {\"size\": [471, 640], \"counts\": \"SW73a>4K6L3N3M2M2N3lDYOT8j0eGFP8;mGKo75lG3Q8NeG<Z8DbGb0[8_OdGd0Y8]OeGf0W8]OeGh0Y8YOdGk0Z8WOcGl0[8VOcGl0Z8WOdGk0Z8WOcGl0\\\\8UOcGl0[8VOdGk0Z8WOeGj0Y8XOgGh0W8YOiGh0W8YOgGh0X8YOfGi0Y8XOeGj0Y8bNkF]Ok0R2Y8aNmF^Oh0R2Z8^NRG@b0S2[8[NWGC;S2^8XNYGF7S2_8XNZGF4T2`8WN]GGOT2c7^McHh0LHIU2g7^M`Hf00Q3_7\\\\L]He04o2_7`LWHc08o2`7nM]HT2c7mMZHU2f7mMVHU2j7\\\\20000WHYIb7g6\\\\H[Id7d6]H]Ib7c6]H^Ic7b6\\\\H_Id7a6\\\\H_Id7_6^HaIb7]6`HbIa7Z6bHgI^7V6eHiI\\\\7U6fHkIZ7Q6jHnIW7n5mHRJS7k5PIUJP7h5RIYJn6b5WI]Jj6`5YI`Jg6^5\\\\I`Je6`5[I_Jg6_5Z1N2N4L4L6J4L3N1O2N1O00kF_KT8b4jG`KU8`4jGaKV8`4iGaKV8`4iG`KX8`4fGaKZ8_4fGbKY8_4eGbK\\\\8]4dGcK]8]4aGdKa8Z4_GfKb8Y4^GgKb8Y4nF_K99j8W4lFaK:8j8W4kFbK;7k8]4TGcKl8]4SGdKn8\\\\4PGeKQ9]4kFcKY9[4eFfK\\\\9i3cFhL_9T3bFmLa9m2bFSMV;R1mDmNZ;k0gDVO[;g0fDYO4QOi:a1UE^O1SOn:W1TEFMUOQ;o0TEMKTOS;k0TE0JUO[;;RE`0CVOd<k0[CVOe<j0[CVOe<k0ZCUOf<g0PCTO96g<f0PCTO86h<f0QCTO67i<d0RCUO57i<d0SCTO39i<c0UCUO18j<c0UCUO09k<b0UCVOO8l<b0UCFk<:UCFk<:TCGl<9TCGl<8UCHk<8UCHk<8UCHk<8UCHk<:SCFm<;RCEm<=RCCn<T1O00000000000000LSC]Nn<c1SC\\\\Nm<d1SC\\\\Nm<c1400LoBaNR=^1400O1O101L3N3N1O1O101O0O100000001N1000000001O00000000000000000O10O10000000000000000000000O10000000000000000000000E\\\\BCd=<^BCb==^BCb=<_BDa=<_BDa=;aBD_=<aBE^=;bBE^=;bBE^=:cBF]=:cBF]=:cBF]=9dBG\\\\=9dBG\\\\=9dBG\\\\=8eBH[=8eBH[=8dBI\\\\=7dBI\\\\=7dBI\\\\=7dBI\\\\=7dBI\\\\=7cBJ]=5dBK\\\\=5dBK\\\\=5dBK\\\\=5cBL]=4cBL]=4cBL]=4cBK^=4cBL]=4cBL]=4cBK^=5aBL_=4aBL_=4aBL_=4`BM`=3`BLa=4_BLa=5]BKd=5[BKf=6YBIh=8VBIi=:UBFk=b010000O100O10000O10000O1O1N2O10000O1000000O100O1000000O1000000O101O0O100000000O10000O101O00001N2O2LlYh4\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [265.0, 236.0, 31.0, 39.0], \"score\": 0.9999235272407532, \"association_id\": 4, \"light\": [-1.6054775714874268, -2.2644662857055664, 1.4792580604553223, 2.1254653930664062], \"segmentation\": {\"size\": [471, 640], \"counts\": \"VTj35>LC5g=j0K4N2O10000000O1HbBVOa=`0c0I5NWBF[=1gB0P>000000000000000000000000000001O00fnm4\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [309.0, 231.0, 56.0, 61.0], \"score\": 0.9164212942123413, \"association_id\": 2, \"light\": [-1.9533379077911377, -1.9171288013458252, 1.811262607574463, 1.7792768478393555], \"segmentation\": {\"size\": [471, 640], \"counts\": \"o[^4`0Q>:I5L2N5L1N2OKgBnNW=[1N4L4M4L2N2N0000O3N3L4L3XOTCXOI1W=b0SC]OU=`0mBAT==lBCW=9jBGS>00000000001O000000000000000001O01O0001O0000000000001O00000O10lVn3\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [382.0, 241.0, 27.0, 29.0], \"score\": 0.9996330738067627, \"association_id\": 3, \"light\": [-2.330597400665283, -2.2946531772613525, 2.231111526489258, 2.192498207092285], \"segmentation\": {\"size\": [471, 640], \"counts\": \"gn_55b>O10O1N1N3G9L7L2O1O0001O2N1@_BHX>1N10000001O000000000O10eoY3\"}}, {\"image_id\": 61, \"category_id\": 1, \"bbox\": [82.0, 172.0, 147.0, 301.0], \"score\": 0.9999992847442627, \"association_id\": 2, \"light\": [-1.0044996738433838, -4.334400177001953, 0.8307410478591919, 4.204932689666748], \"segmentation\": {\"size\": [473, 640], \"counts\": \"aZV11f>4M2O1N2O000O2XI3bNMZ1:cNGX1`0_JTOY2<V3d0VJ^O_2NZ3g0RJB`2H\\\\3h0PJFa2B^3k0jILc2ZOb3l0dI4f2POd3P1_I8j2hNe3U1[I8n2cNf3\\\\1SI6`0[Ne13g4d3_I_Lg1Mi4h4UKXKj4k4SKVKl4n4QKRKn4P5QKPKm4T5PKmJn4V5QKjJm4X5SKhJk4[5SKfJl4\\\\5RKeJm4]5RKcJm4a5oJ`Jo4e5mJ\\\\JQ5g5mJZJR5i5lJWJS5k5lJUJS5o5iJRJV5W6bJiI^5[6]JfIb5]6[JdIc5_6\\\\JaIb5d6ZJ]Id5i6XJWIg5n6TJSIk5P7SJPIl5S7RJmHl5Q8M4M2N2O1O1N2O1N2O10000OSOfJ]HZ5a7gJ`HY5^7hJcHX5[7iJfHW5W7kJjHU5o6QKRIo4h6WKXIi4f6XK[Ih4c6XK_Ih4a6XK_Ii4_6WKbIi4^6UKcIl4\\\\6TKeIl4[6SKfIn4Z6QKfIQ5Z6mJfIW5W6hJiI\\\\5U6bJkIc5Q6\\\\JoIg5o5XJQJl5m5RJSJP6l5oITJR6k5nIUJT6j5kIVJV6i5jIWJW6g5jIYJW6e5jI[JX6b5iI^JX6`5iI`JX6]5kIbJV6[5WJZJj5c5YJ\\\\Jg5`5^J_Jc5^5_JbJa5Z3_IkMS1jN_5[5cJdJ_5W5eJhJ^5R5gJlJ\\\\5o4fJQK\\\\5k4hJSKY5k4iJTKY5i4iJVKZ5e4hJ[KY5b4iJ^KY5^4iJbKX5]4iJbKW5]4jJcKW5\\\\4iJdKX5[4hJeKZ5Y4gJfKZ5Y4fJgK[5W4fJiK[5V4eJjK[5U4gJjKZ5T4gJlKZ5R4hJmKY5P4jJoKW5o3jJQLX5k3lJSLV5j3mJTLU5j3kJVLX5f3jJYLZ5a3iJ]L[5]3iJbLY5Z3iJfLZ5V3hJhL[5U3V3N2O1N101N1O2M2O1O2N1O2O2M3M2TOoCmNS<o0WDiNk;S1P1N2N2O1N3L7HSbm5\"}}, {\"image_id\": 61, \"category_id\": 1, \"bbox\": [582.0, 162.0, 57.0, 170.0], \"score\": 0.9999995231628418, \"association_id\": 1, \"light\": [-2.202094078063965, -2.163949728012085, 2.0406506061553955, 1.9979678392410278], \"segmentation\": {\"size\": [473, 640], \"counts\": \"PQ]8:]>8Hk0TO5M4K4K3M5Ii0ZO7I;E?@8H6K3N3M3N1O0O2mE[KP:e4PF]Kn9b4SF^Kl9c4TF^Kj9b4VF`Ki9_4XFaKh9^4ZFbKe9]4\\\\FcKd9\\\\4]FeKd9Y4\\\\FgKe9W4]FiKb9W4^FiKd9T4]FmKe9n3]FRLm9_3XFaLk9V3[FkLg9i2bFWM_9_2kF`MV9]2lFcMU9Z2mFfMS9\\\\2lFdMU9W2nFiMT9o1gEjMo07[9k1UGUNk8h1WGXNk8`0_E3>]Ob00b99iE24G`0Nd97QFJM1?Md96SGKYO0d9OfH1R;O01O010O0010N2Ne4\"}}, {\"image_id\": 61, \"category_id\": 1, \"bbox\": [302.0, 71.0, 280.0, 401.0], \"score\": 0.9997153282165527, \"association_id\": 3, \"light\": [-1.602038860321045, -2.1158246994018555, 1.4658737182617188, 1.9356578588485718], \"segmentation\": {\"size\": [473, 640], \"counts\": \"kh[49Z>8C>F:K6J5K4M3L4M2N2M3N3L4L4L4M3L4`E\\\\Mf8f2VGWNn7m1nGXNm7n1lGZNm7n1hG\\\\NP8]4I8G7I7J4L4M1N3M2N2N2M4M2O1N20O01O0001O000O10000O0O2LhIhHX5T7kJPIQ5m6RKVIl4i6UKYIh4g6ZKYId4g6_KXI_4i6cKVI[4j6hKUIQ4P7RLoHW3f7kLZHR3f7RMYHl2g7WMXHh2g7ZMYHd2h7]MXHb2g7aMXH^2i7cMVH\\\\2j7eMVHY2l7hMSHV2o7jMQHT2P8mMoGS2R8nMmGQ2S8PNmGo1T8QNlGn1U8SNiGn1W8TNgGl1Y8Z21O0O2O001O00001N101O0O2O0O2N101N101N3N1N3N2M3N1N2O2M2N3jJ_Fo4d929F9H5J4M2M4M2M4L4M4K4L2M4L3M2L4M4K4M3M3M4M2N2O1O10000O10O10O1001O1N2O2M3N2M2N3N2O1N2]OYMPEj2\\\\:eMfE^2n9jMSFX2f9`MTF_O7S3`9`M[F[O6X3Z9_M`FXO7Z3W9^MdFWO5]3S9^MYGc2d8_M\\\\Gc2`8_M`Gc2[8eLjFb0k0j2X8cLQGb0g0m2S8bLXGa0e0o2o7`L^Ga0c0R3j7_LcG?c0S3g7`LfG=c0T3d7aLhG<c0U3c7aLhG;e0U3a7bLhG:f0U3a7bM_H_2_7bM`H_2_7bMaH_2]7bMbH_2]7bMcH^2\\\\7dMcH]2\\\\7cMdH]2[7dMeH]2Z7cMeH^2[7bMeH_2Z7aMfH_2Z7aMfH`2X7aMhH_2X7`MiHa2V7_MjHb2U7^MkHd2T7ZMnHf2Q7ZMoHh2o6XMRIh2n6WMRIj2m6VMTIj2k6UMVIk2k6TMVIl2i6iL[HoNm0Y4g6gLcHiNh0`4d6fLgI[3Y6bLiI_3V6`LkIb3S6^LnIc3P6\\\\LQJf3n5YLRJi3l5WLTJj3k5ULWJk3h5ULXJl3g5SLZJn3d5RL]JP4a5oKaJS4[5nKeJ^4o4aKRKR5[4nJeKU5W4kJjKW5T4iJlKY5R4fJoK]5m3cJTL`5i3_JXLc5f3\\\\J[Lf5b3[J^Lg5`3YJ`Lh5o1mIRM<o0h5m1oIRM9Q1i5j1RJSM5S1j5h1TJRM3V1k5f1UJPM1Z1k5d1XJoLM]1m5b1eK^N]4_1dKaN]4^1cKbN_4[1bKeN_4Z1aKfN`4Y1`KgNa4X1_KhNb4W1^KiNc4W1\\\\KiNf4U1[KjNh4S1YKlNi4S1XKkNj4S1nKUNS4k1SLnMo3P2TLmMl3R2VLmMk3Q2VLoMk3o1VLQNk3m1WLRNk3j1WLVNj3h1ZLUNh3h1[LVNh3f1ZLYNh3b1[L^Nh3V1bLiNa3k0hLUO^90001O00001O000000O100O10000O1000000O1O1O1O1O10000O10O10O100O1O1001O2L5KV[j0\"}}, {\"image_id\": 62, \"category_id\": 1, \"bbox\": [137.0, 141.0, 151.0, 187.0], \"score\": 0.9992598295211792, \"association_id\": 1, \"light\": [-2.5492546558380127, -1.6976176500320435, 2.395972728729248, 1.5300581455230713], \"segmentation\": {\"size\": [375, 500], \"counts\": \"\\\\]b17Z;KiD7V;6O1O3M7H3N1O1O1O3M2N1N20O10O1aMTOaIHlNT1]7EQIKXOa0`7`1^H`N^7n2O01lNdHoMY7T2hHlMU7]3M3N1O00000O1000000000000000000001O000000000000001O00000XN[I^Ne6_1]IjMB3R7P2]IoMDMo6R2_IRNBLn6Q2aIUNAHo6Q2aIlN_6S1bImN_6R1aInN`6P1aIPO_6P1aIPO`6o0aIPO_6P1aIPO_6P1aIPO`6o0aIPO_6P1aIoN`6Q1aIoN^6Q1bIoN^6Q1cInN]6R1dImN\\\\6S1dImN\\\\6T1dIkN\\\\6U1dIkN\\\\6V1dIjNZ6W1gIiNX6X1gIiNX6W1iIiNU6Z1iIfNV6\\\\1iIeNT6]1mIcNQ6_1oIaNo5`1QJaNm5a1SJ`Ni5c1VJ^Nh5d1WJ\\\\Nh5f1WJ[Ng5h1XJXNg5h1YJYNe5j1[JUNd5m1[JRNe5V2TJiMk5\\\\2RJbMo5b2QJiLHIW6b3oIbLOBX6Q4eIZL>Ed5Z4gIlKa08b5Q4^JRLa5P4ZJSLe5P4XJRLg5P4RJULm5k400O100O10O00100N2N10100O100O100O1O1O100O1O1N2N2L4^OfIcK^6Y4b0J6TOl0M3N2O1N2L4M3N2O1O1nMeGe0^8YOWH2k7L[HMh72[HJg75[HHh76[HCj7<i1O1O1O2N2Mn_]2\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [197.0, 155.0, 137.0, 168.0], \"score\": 0.9999469518661499, \"association_id\": 1, \"light\": [-2.7201004028320312, -1.8868587017059326, 2.583465576171875, 1.7267870903015137], \"segmentation\": {\"size\": [327, 500], \"counts\": \"dVo1110U:000j]51jaJ6XFLd9=O1N2O1ON21aF]OZ9b0gF_OX9`0iFAU9?mFBP9>QGCn8<RGFm89TGHk87VGIj87VGJh87XGIh87XGIh87XGIh88WGHS7HXJ`0eNHn6N\\\\J;eNGm61]J9fNEk64_J7hNBg6:aJ5hN_Oe6?cJ2i51VJ0c56\\\\JK]5<cJE[5=dJDY5>gJBW5a0hJ@R5i0jJWOT5n0iJSOU5T1fJkNY5X1fJhNV5]1iJcNU5_1dJgN[5[1aJSN\\\\O7Q6i1_JTN_O2P6m1^JhN_5\\\\1^JUNEGl5Z3TJeLk5m3OFWJVLh5n3WJQLh5W4N3NM\\\\JfKb5\\\\4^JcKa5^431H8L4N200O101OO1000O1000000000O1O1VORKXLQ5e3VKULl4i3j000O1O2O0O1N2O1O2[NPJPOT6i0RJTOQ6h0SJTOQ6j0QJSOW6e0PJROX6h0f1N1O1O3M100O2O2M1O2O2N2OO01N2O2M1O101N2OO2OO101O0O10O010O01N4L4KYid1\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [82.0, 111.0, 146.0, 195.0], \"score\": 0.9998956918716431, \"association_id\": 2, \"light\": [-2.59403133392334, -1.6733864545822144, 2.4641075134277344, 1.4995403289794922], \"segmentation\": {\"size\": [327, 500], \"counts\": \"W]j0Q1Q97M1N2M3H8N2N2O1N2K4B>O2O0O10BbHVNV7k1TIVNd6g1aI[N]6d1eI]NY6c1hI]NX6b1iI^NW6_1lIbNS6Z1QJgNm5V1WJjNi5U1XJlNg5S1ZJmNe5R1]JRO^5m0dJTOZ5j0iJWOU5i0lJWOS5i0nJXOQ5g0PKYOo4g0RKZOm4f0SKZOm4e0TK[Ol4d0UK\\\\Ok4d0UK\\\\Oj4g0TKYOl4h0SKXOm4i0RKWOn4j0hJ`NQOf0W6j0fJdNROa0X6l0bJhNTO<X6P1\\\\JDd5c0iIHW6P242N2N1O0N3MnNSJoMl5o1\\\\JmMb5o1hJkMW5W2RKfMX4j2hK[MoN]Oj4_3XLWMg3j2XLXMf3h2[LZMc3f2]L[Mb3e2^L[Mb3e2^L[Ma3f2`LZM_3f2aLZM_3g2aLXM_3h2aLYM^3g2bLXM_3i2`L[M\\\\3e2eL\\\\MY3e2fL\\\\MY3e2gLZMY3i2dLWM\\\\3X3WLgLh3k3oKlKQ4[4eKiKZ4R51N101bJPLb4R4XKULe4i4N1WObJfLa5V3eJeL]5S3hJoLZ5_2UKcMk4Z2WKfMk4V2WKjMi4T2ZKkMg4S2ZKmMg4P2[KPNe4n1]KRNd4l1^KQNd4o1]KRNb4m1_KSN`4n1_KRNb4m1_KSNa4m1]KUNc4k1[KVNe4k1YKVNnN[Of5_2ZKWNoN]Oe5]2XKYNRO[Of5^2TK_Nm4b1oJaNP5a1mJ`NS5b1jJ_NV5d1cJ`N]5U301O0000@_JZLa5]3hJcLX5T3QKlLo4R3TKlLm4k1[JROk0QOk4k1bJkNg0XOg4l1mJ_Na0Cb4m1eLRN[3n1_2O1011O002N000O100O2N1O10OO2O00000001O00001N1WOPH[OQ8`0VH]Om7>XH^On7:XHBn76WHEdmf2\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [3.0, 129.0, 132.0, 143.0], \"score\": 0.9999935030937195, \"association_id\": 1, \"light\": [-1.5783782005310059, -2.2537548542022705, 1.4897308349609375, 2.1186025142669678], \"segmentation\": {\"size\": [480, 640], \"counts\": \"Xe12m>2O0O100000000000000O10000000000O101O00001O0000001O00000WNHjD8S;NjD2U;1iD0U;2jDNU;5hDLW;<]DIb;=WDEh;>UDCj;`0SDAl;b0PD@o;c0nC^OP<e0mC]OQ<f0mC[OQ<i0kCYOR<l1OMkC]MT<d2lC\\\\MS<e2mC[MQ<g2PDXMP<h2PDXMo;i2QDWMn;j2SDUMl;l2TDUMi;m2XDRMg;o2YDQMg;o24100O1O1O1O1O100O1O1O1O100O100O1O100O1O1BdLXE\\\\3f:jLUEW3i:mLTET3j:a00K5M3N2O1O10100000O1000O10000000000000001N1O1O2N2N2O2M2fN]DUOd;K]DH3:h;DZDO1:o;\\\\OUD7N:Q<YOUD<K9j<EXC:i<EXC9j<FWC9j<FXC7i<JWC4k<KVC3k<MVC0m<OXCKi<5Q1000010O00001O001N2O1N2MRa\\\\7\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [115.0, 71.0, 290.0, 192.0], \"score\": 0.9822423458099365, \"association_id\": 3, \"light\": [-2.618898630142212, -2.132978916168213, 2.538008213043213, 2.0021839141845703], \"segmentation\": {\"size\": [480, 640], \"counts\": \"jTf11n>101O000O100000000000000000000000000000000000000001O0000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000001O0000000000O10000000000000000000001O0000000000000000001O0000001O000000000001O0001O0000000000000001O01O000000000000000001O01O00000000000000001O01O00001O10O000001N102N\\\\_79W`H<F5LO01O001O0000000XBTO^=l0cBVO[=j0fBVOY=i0iBWOU=h0nBWOQ=j0PCVOo<j0c00O2N10000YN^OPEb0n:NcD4];3XD1g;5RDNk;c10001O100O10N2000O10N3N1O2O0N2L4O1O100O1M4N100M2O1010O2J6^Ob0O002N1O1N3N102N1O1O1O1O1O2N2M200O2O1O1O00005J6K1O004M9kEjKl8W4kFPLU9n40O2O1cLhFn0Y9POlFl0T9TOnFk0R9TORGg0o8XOWGc0i8]OZG`0f8@[G?f8@\\\\G>d8B_G<`8DiG^NXOd0P9n0iHQOW7n0lHPOT7o0nHPOQ7P1RInNn6R1TIkNm6T1UIkNl6Q1XInNh6n0\\\\IROf6i0^IVOc66`F^OP3;`65dF]Oo2<]66WJIj54YJKg54[JKe54\\\\JLe53\\\\JLd53_JJc55`JH`57cJF^59g4N6HfQ^3\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [408.0, 111.0, 153.0, 74.0], \"score\": 0.9997970461845398, \"association_id\": 2, \"light\": [-2.1923232078552246, -1.556628942489624, 1.9583044052124023, 1.4147099256515503], \"segmentation\": {\"size\": [480, 640], \"counts\": \"c]o51o>00001O0000000000001O00000000000000001O00000000000000000000000O10000000000000000000000O10000001O0O1000000000000000000000000000000000001O000000001O0OQl10PTN00001O0_h21_WM3M2O00M3O1O2O0Ocn08VQO6fAEd=a0TBAk=k0O1O001O001O01O1N2O1O0O2M2O00O2YOCjB?U=BhB`0W=f0O1N2N1O100O1O00VOnB^O2CS=o0mBPO00^=S164N3O1O0000O11O00OPChNZ<W1eCmNY<S1fCoNZ<o0fCRO[<n0cCSOg<d0TC@n<W13M4I7eN\\\\BQ1S>F5J_lT1\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [206.0, 173.0, 103.0, 197.0], \"score\": 0.9999999403953552, \"association_id\": 3, \"light\": [-2.4928064346313477, -2.3020191192626953, 2.3832266330718994, 2.1644153594970703], \"segmentation\": {\"size\": [427, 640], \"counts\": \"oSf2Q1W<6H8J5I5N3L6K6K6K8G4K;F5K6J4M2N3M3Lb0_O2L3L3E\\\\KlGh4LYKn7e3SHcMOhNk7b3`HbMDoNh7`3WInLROCf7^3_IgLmNLc7]3PJfLm5Z3SJgLl5[3SJeLl5^3QJbLo5a3nI`LQ6g3iIXLW6T4mHnKEN]7_50000011N6YJbHU5m7N1@jGgKV8o3mGhKN:V8`3_HaL`7[3dHeL]7V3gHkLZ7k2nHUMT7\\\\2YIdMh6c0\\\\HHQ1Fc6`0_HGo0Ie6=eH^Oi06c6:iHUOk0`0_69jJGZ57dJI^57`JId57XJJj56TJIo57nIIT67jIIZ66cIJ_67^IId65]IJg63XIMk6NWI2Q:000001O000000001O00001O00010O0010O01O010O000000000000N2M30000O11O00001N3N5J4J`eY4\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [466.0, 205.0, 130.0, 122.0], \"score\": 0.999999463558197, \"association_id\": 2, \"light\": [-2.037116289138794, -1.9565048217773438, 1.948776364326477, 1.8175127506256104], \"segmentation\": {\"size\": [427, 640], \"counts\": \"mbR63U=3O2N1O1O100O10000000O1O1O1O10000O10O1O000010OTDIf:7YELe:3ZE0d:0\\\\E1d:N\\\\E4b:L^E5`:M`E3`:L`E5`:K^E6S9DYG7B7S9HWG1D9S9IXGND:R9LVGME9T9MTGKZOf0EoNi9c0VGIZOg0GmNi9d0TGJYOf0JmNi9d0PGX1WOTNh9g0cF`1FiMg9\\\\3YFdLg9[3ZFeLf9j2kFVMU9h2mFXMS9h2lFYMU9d2mF\\\\MS9Z2WGfMj8V2YGiMi8U2WGlMk8Q2VGoMn8k1SGVNP9f1QGZNP9e1PG[NR9c1nF]NS9c1lF]NT9c1lF]NU9c1jF]NV9d1iF\\\\NW9e1hF[NX9f1gFZNZ9f1eFZN\\\\9Q2XFoMi9R2UFnMk9S2UFlMl9S2TFmMl9U2RFkMo9T2QFlMP:T2oElMR:S2nEmMR:S2oElMR:S2nEmMS:R2mEnMT:R2kEnMV:]1]ERO=AW:[1_ERO:CX:Y1`ESO8DZ:V1`EVO5D]:R1`E[O2C_:P1`E]O1D_:m0bE^OOEb:i0`EBNEe:d0`EGJEg:b0`EIIEi:?`EKGFl:;^EOFGm:7_E1CIo:4_E3BIR<7nCIR<7nCIS<6mCJS<6mCKR<5nCKS<4mCLS<4mCMS<2lC0S<1lC0T<0kC1T<1jC0U<1jCOV<2iCNW<4gCLZ<4eCL\\\\<0gC1i<0000000000000001O00001O0000001O0O10001O0O100O2O0O2NUQb0\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [381.0, 174.0, 90.0, 132.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-2.1022214889526367, -2.4524614810943604, 1.975163221359253, 2.3005526065826416], \"segmentation\": {\"size\": [427, 640], \"counts\": \"lRo46Q==D9D9K3L3O1O1O1O1O1O2O002N2M1N3M2C?M2M2O1O2O00010Oh0XO3N4L1O00000>B0mN[F_Ne9`1]F^Ne97VF59^Oc9;VF7l:HUE7?\\\\OU9<^F8;\\\\OY9:^F:7]O\\\\97_F=2\\\\Oa95_F`0M[Of94_Fa0H[Oj94bF`0@]Oo92bFa0]O]OR:1eF`0WO_OU:1eFa0SO^OY:0fFc0nN]O^:NhFd0gN^Ob:NgFd0fN^Od:OhF`0cNBd:OlF<_NEf:NlFe0V9ZOlFc0T9^OlFa0T9@kF`0U9AkF>V9BjF3]NNj:NjF3\\\\NOk:MkF1[N2l:JSG6n8HTG7P;01O1O0O101O00000000000000000000000001N10000000000004KdUV2\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [149.0, 196.0, 45.0, 69.0], \"score\": 0.9999706745147705, \"association_id\": 4, \"light\": [-1.895050287246704, -2.1973555088043213, 1.7592391967773438, 2.0614590644836426], \"segmentation\": {\"size\": [427, 640], \"counts\": \"S\\\\n14U=4M2M2N2O1N01L4O100O02M2L4M3L5M2N2N1\\\\DSOj:o0XEQO`:2jDn0g0PO]:W1dEiN\\\\:V1eEjN[:V1eEjN[:V1eEjN[:V1eEjN[:W1dEiN\\\\:X1XElNGLQ;m1nDTNQ;m1nDSNS;l1lDUNV;2hD2>i0DSOU<j0kCVOW<=kCFNM[<5TDKf<000000000000000001NTgi5\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [48.0, 158.0, 97.0, 243.0], \"score\": 0.9999997019767761, \"association_id\": 5, \"light\": [-1.4438670873641968, -2.41955828666687, 1.3491846323013306, 2.321937084197998], \"segmentation\": {\"size\": [427, 640], \"counts\": \"oVd04P=l0XO6L3K6K5L3N2N3L3N2L5K5K3M6YOk0F7K7H?B7cH`K\\\\O2Q5d4\\\\K^K@0S5l4PKWKLOS5Q5gJTK3MV5Q5cJUK5LW5Q5aJTKD@a0;Z5Y5WJRK<G]5U6_JlIa5V6]JkIb5o600001O00001lNTJgJn5P3jJZMZOFQ6WOnIU3P1kL]O4Gd0\\\\6X2fJhLNP1^5T2nKkMT4R2mKoMT4o1nKoMS4P2nKoMR4Q2PLmMQ4S2PLlMo3U2PLkMP4X2nKgMS4Z2kKfMU4\\\\2jKdMV4^2gKbMY4`2fK_M[4b2cK^M^4b2bK]M`4b2_K^Mb4c2\\\\K]Md4e2[KZMf4f2YKZMh4f2WKZMi4g2VKYMk4BXIc2l1kMm4]ObIc2_1oMn5m1oITNU6h1kIXN[6c1dI]Nc6\\\\1\\\\IdNS7o0lHQOd7?[HBR82mGMW81gG0\\\\8ObG0a80]G0d80ZG1h8OVG0n8OPG1R9OlF0V91hFOZ90eF0]90aFOb92ZFOi94QFLS:5hEKZ:6cEJ_:7^EHd:9YEHi:8UEHm:8QEHQ;8mDGV;8hDIZ;5fDK\\\\;2eDN];NeDCH4f;2gDIE4a<L_C4a<L_C3b<M_C2b<L_C3c<K_C4l<OWQ^6\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [591.0, 192.0, 47.0, 57.0], \"score\": 0.9996769428253174, \"association_id\": 7, \"light\": [-1.8675199747085571, -2.665215253829956, 1.6992682218551636, 2.518364667892456], \"segmentation\": {\"size\": [427, 640], \"counts\": \"ldf72W=5M0O1\\\\OMPD3P<NnC2S<0iC2W<a001fCZOm;S1O001O14L2N1O1O1AkCEY<3kCMV<LoC6b<2N1O1001O:E00001O000000000000000000000000000000001O00nb0\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [108.0, 199.0, 46.0, 107.0], \"score\": 0.999681293964386, \"association_id\": 6, \"light\": [-0.5446591377258301, -3.374915838241577, 0.3732808530330658, 3.1849234104156494], \"segmentation\": {\"size\": [427, 640], \"counts\": \"_W]11<5Z<NdC<GGR<OVDi0d;XO[Di0d;WO\\\\Dm0`;QObDQ1`;kN`DW1^;mN]DU1m;0N2M1O201^EbNU9^1kFbNU9^1kFcNT9]1lFdNS9]1lFdNS9\\\\1mFeNS9[1lFeNT9\\\\1kFeNV9Z1iFfNX9Y1hFgNY9Y1fFgN[9Y1dFfN^9[1`FeNa9\\\\1\\\\FeNe9\\\\1YFcNi9\\\\1WFdNl9N[ET1h0nNP;7PEIT;4kDKW;6fDK[;5dDK^;3bDM_;2aDN`;1`DOa;2]DMd;4[DLf;4YDLh;4WDLi;5VDJl;2WDNc<M300000i[Z6\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [555.0, 194.0, 37.0, 53.0], \"score\": 0.9902762770652771, \"association_id\": 8, \"light\": [-0.900679349899292, -2.8377864360809326, 0.7197003960609436, 2.699003219604492], \"segmentation\": {\"size\": [427, 640], \"counts\": \"kcW76U=1`C0c;n0N2N2N3N22L2N1N1EnC[OU<a0oC[OU<a0k0_OR`22SSMM[<4eCLm<000]e10cZN00000O2O000001Okhc0\"}}, {\"image_id\": 66, \"category_id\": 1, \"bbox\": [24.0, 108.0, 146.0, 128.0], \"score\": 0.9999956488609314, \"association_id\": 1, \"light\": [-1.7365856170654297, -2.3635032176971436, 1.555342197418213, 2.2333059310913086], \"segmentation\": {\"size\": [559, 559], \"counts\": \"TX=;l`0b0g_OYOT?n0e@XOV?b1M4QAoMZ>\\\\2ZAiMa>[2]AgM`>[2`AeM_>]2`AdM^>]2bAdM\\\\>]2dAcMY>`2hA`MU>b2kA^MT>c2lA]MS>d2mA]MP>e2PB[Mn=g2RBYMk=j2TBXMj=i2VBWMi=j2WBVMi=j2WBVMh=Z3N100O1O100O100O100O100O100000^O`BUM`=k2`BUM_=k2bBUM^=k2bBUM^=k2bBUM^=k2bBVM]=j2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=i2bBWM^=]30[ObBYM^=g2bBYM^=g2bBYM^=g2bBYM^=g2bBYM^=\\\\30000000ZObB[M^=e2bB[M^=d2cB\\\\M]=d2cB\\\\M]=d2bB]M^=c2bB]M^=b2cB^M]=b2cB^M]=b2cB^M]=b2cB^M]=b2cB^M]=b2cB^M]=b2bB_M^=a2bB_M^=a2bB_M^=a2bB_M^=a2bB_M^=a2bB_M^=Y30000000WObBaM^=_2bBaM_=^2aBbM_=^2aBbM_=^2aBbM_=^2aBbM`=]2`BcMa=\\\\2_BdMa=\\\\2_BdMb=[2^BeMb=[2^BeMb=[2^BeMc=Z2]BfMc=Z2]BfMd=Z2\\\\BeMd=[2\\\\BdMf=[2ZBeMg=Z2YBfMh=P30ZOXBeMi=Z2WBfMk=Y2UBfMl=Y2TBgMm=m21O1O1O1O2M3N1O1]OhAmMZ>c22N1N2O1O1O3M3M2N1O2M3N6J1O1O1N3N8H3M1O002M4AS@VOQ`0d0Q@[OR`0a0P@_OP`0?S@_Oo?=T@Cn?8W@EWed6\"}}, {\"image_id\": 66, \"category_id\": 1, \"bbox\": [253.0, 198.0, 232.0, 345.0], \"score\": 0.999989926815033, \"association_id\": 2, \"light\": [-1.720597505569458, -1.7084234952926636, 1.5545456409454346, 1.588791847229004], \"segmentation\": {\"size\": [559, 559], \"counts\": \"Y^Z4l0h?\\\\1\\\\O:G5K3M4M2K5I8A`0\\\\Of0C<H6K5K4M3L4L4aD_Jj:e5UE^Jd:h5ZEZJb:j5]EVJ`:m5`ETJ^:n5aESJ]:o5cEQJ[:Q6dEPJZ:R6eEoIY:S6gEmIX:S6hEnIV:T6jEkIV:V6jEiIW:W6iEgIX:[6hEaI[:`693M2O0O1O1O2N1O4Le0[O6J3M3M100O101O1N2O1O1O0O010O10O01O10O01O1O2O0O1O1O010O00000001O00001O00000000O10000000000O1000000O10O10N101M1G:M2O2O00100000O20O3N1N2O00001O00001O0000001O001O001O001O00[JXHIOPNi7V2^HCJXNh7S2fH\\\\OEaNe7R2kHUODiNa7P2QIlNDVOZ7m1VIfND]OV7l1YIcNCAT7k1[IbNBCS7j1]I`NBGP7h1aI^NAIn6h1dIZNAOj6g1hIRND7c6f1nIfMHd0Z6f1PJ`MKj0T6e1TJ\\\\MKo0Q6e1VJXMKT1n5d1ZJSMKX1l5c1bJcLLk1a5b1iL_NV3a1jL`NV3_1kLaNT3_1lLbNS3]1nLcNSLRN[6Q2[ImNX4P1oKUN^6l1aIiNS4V1mKVNa6i1aIhNR4Z1jKWNd6d1eIfNo3^1gKYNg6a1hIaNl3f1cKZNk6[1jNo0X1oNhNR1[1jNeNV1_1eNbN[1c1`N^N_1d1`N[N`1h1]NXNc1j1[NWNd1j1[NVNe1l1YNUNf1l1ZNUNd1l1[NUNc1m1\\\\NTNc1m1[NTNe1m1ZNTNe1m1ZNSNf1m1ZNTNd1n1[NSNd1n1ZNTNe1l1[NUNd1l1ZNVNe1l1XNVNg1l1VNUNj1S2lMPNS2g8O0O2O1O1N2O001O0O10000O1000000O10000O010O10O0100O010000O010O010OkNa@;`?Dc@:\\\\?Ge@7\\\\?Hf@7Z?If@7Z?Hh@6X?Kh@5X?Ki@3X?Li@4W?Lj@2V?Nk@1W?Nj@1V?Nk@1W?Mk@2U?Mm@1U?Mo@MU?1V1N101N2N1MnmW1\"}}, {\"image_id\": 67, \"category_id\": 1, \"bbox\": [312.0, 17.0, 185.0, 91.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-2.874713897705078, -0.7887160778045654, 2.68137526512146, 0.6083005666732788], \"segmentation\": {\"size\": [250, 1000], \"counts\": \"WW\\\\2?V77M2N3L3N2M2M4CnNoIT1P6nNkIU1U69O0O2O1O1N2O2N1N3N=B3N1N2O0O1O1O2N1O1O1000O100000000O100000000001O00001O0010O6J2N1O2N1O1O1O1YOgJkNY5S1jJkNW5T1lJiNU5V1bJaNN423_5V1bJkN0L_5X1aJnNi5P1UJUOj5h0UJ[Ok5Z10ZOUJ]Ol5a0TJ@m5>SJBo5S1101VOoIIQ66oIJS6n02ROkI3U6LkI5V6IkI7W6EjI<X6AgIa0Z6\\\\OfIf0f61O1O1O1N2O100O10000O100000000O100000000O01000000O10O10000000O10000O10000O100O10000O100O10000O10000O1000000O1000000O101OO10O1000000O10000O1000O01000O010000O10O0100O1O001O100O1L4N2O100N2N200O100O1O1O100O100O1O101NXhj3\"}}, {\"image_id\": 67, \"category_id\": 1, \"bbox\": [664.0, 173.0, 162.0, 72.0], \"score\": 0.9999460577964783, \"association_id\": 2, \"light\": [-3.066480875015259, -0.8151680827140808, 2.906999111175537, 0.6130837798118591], \"segmentation\": {\"size\": [250, 1000], \"counts\": \"nYR55c73N3M3M3L5L2O1N4M5J3M2N2O1L5M3M2N1O2J6L3O2N1O1N20001O0O100000000000O1000000000000000000000001O000000001O001O00001O00001O0000001O000O100000000000000O100000000O100000000O100000000O100O100O2O000O2N1O100O1O2M2N2O1O10010HcIPO^6V11O000FaIXO`6f0aI[O_6c0bI^O^6b0aI_O_6`0`IAa6l0O100001N100O10001N2O1O0O2O001N101N10000O1O2N1O1O1O2N1O100O1O100N2O1O1O1O1O100O100O2N101M\\\\YZ1\"}}, {\"image_id\": 68, \"category_id\": 1, \"bbox\": [282.0, 205.0, 182.0, 151.0], \"score\": 0.999998927116394, \"association_id\": 1, \"light\": [-1.8568733930587769, -2.0399229526519775, 1.637036681175232, 1.808189868927002], \"segmentation\": {\"size\": [427, 640], \"counts\": \"eje38R=2O1N2O1M2gCE`;>YDMa;5\\\\DO_;S1K5iDZNb:i1YE\\\\Nc:X2M3N1N3M2O2N2M3K6J5M4K3N3N10000000O100O10000001O1O3M6K011O20M4L1O1N1O1O00001O1O2N001O0]ObFYM_9e2eFWMEMc9j2PGWMQ9g2oFZMS9c2nF^MV9\\\\2kFdMX9Y2gFhM]9S2dFmM_9P2`FQNa9n1^FSNd9k1\\\\FUNf9h1[FXNf98iEh0b0QOg91WF<6De9L\\\\F82LR<00FQC8o<HQC8Q=00000O10000000000O101O0000000000000000000000000000000000000010O0000000000000010O000000000000000001O0001O000000000000000001O000000000000000O2O0000000000000001O00000000000001O000000000000001N100000001O001O000O10001O000O10aQY2\"}}, {\"image_id\": 69, \"category_id\": 1, \"bbox\": [159.0, 184.0, 106.0, 100.0], \"score\": 0.9999998807907104, \"association_id\": 2, \"light\": [-2.347399950027466, -2.1087279319763184, 2.2986884117126465, 1.9571105241775513], \"segmentation\": {\"size\": [480, 640], \"counts\": \"ciZ27d<5kD5S;6YD6d;d1O1O100O1N10@QDQNm;n1=51O1100mCkMZ;W2cDkM];V2`DlMa;T2]DmMc;i2O01O1O2N2N2N2N1N102N1O1O2N1O2N5Ke0[O3M9G5L5J4M8[OgA1c>100O1000O100000O10O10000O1000O0100O10000O010000000000O10000O01000000O10000O1000O1000O100000000000000O100000000000000O100000000Oc`_5\"}}, {\"image_id\": 69, \"category_id\": 1, \"bbox\": [319.0, 169.0, 209.0, 220.0], \"score\": 0.9999995827674866, \"association_id\": 1, \"light\": [-1.8900774717330933, -1.8224979639053345, 1.7497179508209229, 1.692368745803833], \"segmentation\": {\"size\": [480, 640], \"counts\": \"bge4V1_=?J5M1O2M3N2N2N2N1003N:E10WOWNXDi1f;XN[Dg1e;ZN[Df1c;[N]D9\\\\Ok0V<nN^D1CP1n;oN`DKIT18mNb:5RFm0YOSOc:NTFn0]OPOb:1PFo0Q;nNoDR1n;TOoBm0P=UOoBj0R=VOmBk0R=WOmBi0R=XOnBh0Q=ZOnBf0R=ZOnBf0R=[OmBe0R=\\\\OnBd0R=^OjBd0U=e0O001N1010O10O10O010O010O100O1000N2O2M3N2M3N0O2M2N3mNoMRET2k:UNlDn1S;Q1N2N2M3N2N2N2O1O1O1O2N1TFYL]8h3^G^L`8b3^GbLC]O\\\\8S4lGdLGZO\\\\8T4hGgLJUO^8X4`GgL2QO^8e5aG[J_8e5aG\\\\J^8e5aG[J_8e5bGZJ^8f5bGZJ^8f5cGYJ]8g5dGXJ\\\\8h5dGXJ]8g5cGYJ]8g5cGYJ_8e5aGZJd83]Gn4OoJV9P5jFPKW9o4hFRKY9m4gFSKZ9k4fFVK[9i4eFWK]9g4bFZK_9d4aF]K`9a4`F`Ka9]4`FdKa9m41O2N1O1O1O1N2O5fKUFU3o9eLUFY3l9eLVFZ3k9dLWF[3k9bLXF[3k9^L[Fa3c:N101N101N2O2N1N2O1O0O101nNVDlNj;U20POVDjNk;U1UDkNl;T22oNRDmNn;S1RDlNn;S2100O002POoCnNQ<Q20O1POnCPOS<o0mCROR<n0nCSOS<k0nCTOT<j0lCVOU<i0kCXOU<h0kCWOV<h0jCYOU<g0kCZOU<e0kC[OW<c0iC]OX<b0iC^OW<a0iC_OX<`0hCAW<`0iC_OX<`0hC@X<`0hC@Y<?gCBX<?gCAZ<>fCB[<=eCD\\\\<:dCF]<9dCG\\\\<7eCI\\\\<6dCK\\\\<4eCK\\\\<3eCM^<NdC3e=O0010O010O01O1O00001O10O00010O0001O1O00010O1O1O001O1O1O1O001O100O1O001O1O1O001N3M[Td1\"}}, {\"image_id\": 70, \"category_id\": 1, \"bbox\": [131.0, 103.0, 299.0, 331.0], \"score\": 0.9999984502792358, \"association_id\": 1, \"light\": [-1.6214666366577148, -1.5529286861419678, 1.499680757522583, 1.462753176689148], \"segmentation\": {\"size\": [457, 685], \"counts\": \"Sij11U>4N2O2N1N101O001O1O001O1O001O1O1O001O001O1O1O1O1O1O1O1O1O1O4L100O101O1N2O0O001O100O0010O1O01O01O00001000O0100O01O01O1O010O10O1O10000O100O010O2O0O2O0O01000O2N010O010O10000O001O00100O1O001O010O100O010O001O1O1O0010O0YOaCE^<:eCD\\\\<;fCEX<:iCGW<8jCIOZOm;m0UDJK[OP<j0UDKJ\\\\OQ<i0WDJG^OQ<h0YDIF@P<g0ZDIEBP<e0\\\\D2c;O]DFEDo;e0\\\\DFHBm;h0\\\\DDo;<RD]ODH[<l0PDZOW<f0jCXOW<i0iCTOZ<k0fCSO\\\\<n0`0O1O0000010O01O1O1eCQOZ;P1`DWO_;h0^D\\\\Oa;d0\\\\D@c;a0[D@e;`0ZDBf;>WDDi;<UDFk;9TDIm;4SDNm;OSD5m;IRD9o;DQD>j:ROnE=VOe0i:MWEj0d6[NoLR3k2VMQMl2n2VMPMk2o2VMoLl2P3VMmLl2S3TMkLn2T3TMiLn2W3SMgLn2Y3RMfLo2Z3SMcLn2]3SM`Lo2`3SM[LP3e3QMkJOmNX3X6jLgJ`4Y5aKeJ`4[5aKbJ`4f3bJkLP1]O^4f3gJkLl0[O^4j3kJhLh0YOe3[OdKj46_Lb0XOl2d5gLQK`0UOX2O_LP6l0jJd0nNQ2g6]MYJY4g5jKVJU4l5nKoIQ4S6ULeIk3]6YL\\\\Ii3f6ZLTIg3n6[LnHg3Q7h1100O010O1O001N1O10001O2O001O1N101OO1O1O2N1O1O1O2O1O2N2N2M2O1O1O2N3MmI`I_4^6[KjIe4T6YKoIh4o5XKRJg4o5WKUJg4j5YKYJf4e5ZK^Jd4a5[KaJf4]5ZKdJf4\\\\5XKfJi4Z5UKgJk4Y5SKjJl4W5RKkJl4V5RKnJl4R5SKoJl4R5SKoJm4Q5QKQKn4P5QKPKo4Q5PKPKo4Q5PKoJP5R5nJPKQ5Q5nJPKQ5W5fJlJY5T7N3M2PL^GU2c8iMaGT2`8jMcGT2^8jMeGT2]8iMgGT2[8iMiGT2Z8hMjGU2X8hMkGQ1QOTOU9IlGn0UOWOQ9HmGm0TO\\\\Oo8CSHm0PO@Q;;PEFT;4nDLT;1lD0U;MlD5R;KoD4R;KnD6Q;IQE6P;IQE6o:JSE5l:KVE3k:KWE4j:JXE6i:GYE8S;ZOoDf0[<0O01O0001O0000000100000101OO1O0O0001O0000010O000010O000001O001O001O1N6IT^a3\"}}, {\"image_id\": 70, \"category_id\": 1, \"bbox\": [604.0, 160.0, 51.0, 59.0], \"score\": 0.9957584142684937, \"association_id\": 2, \"light\": [-1.6982966661453247, -1.7963228225708008, 1.6216434240341187, 1.6197988986968994], \"segmentation\": {\"size\": [457, 685], \"counts\": \"`h]82W>0000001O00000000001O0000000000000001O001O001OV>1hA2N2M2M3M4K4K5H8M4G8I7K5N201O0001O=C3M4L?A5K4K4M2NlU=\"}}, {\"image_id\": 71, \"category_id\": 1, \"bbox\": [187.0, 42.0, 308.0, 485.0], \"score\": 0.9999991655349731, \"association_id\": 1, \"light\": [-2.0386576652526855, -2.512890577316284, 1.82522714138031, 2.2690789699554443], \"segmentation\": {\"size\": [559, 559], \"counts\": \"PbV3e0e`06J6I7L3N3L4M2O1N3M2J6I7A`0bN]1I7J6N2O1N3N1nKYK[Kh4a4fKSK[4h4TLmJn3l4`LjJc3d4WMQKl2V1UK^O\\\\2VOb2U1bKmN]2JS2T1lK_N^2:h1S1TLWN\\\\2c0a1R1[LSNY2i0]1R1aLjMY2Q1X1R1nLVMV2f1l0S1j1iNWNU1n1gNSNX1S2_NQNa1Z2nMlMWOXJh2_>XMcAg2U=`MnC_2h;oMTDP2h;YNcCTO0b2Z<`NbCoN5a2V<fN_ClN:_2T<ROgCS1V<oNkCR1Q<nNRDS1i;QOXDo0a;WO_Di0Y;_OhDa0P;FQE:l:HUE8i:JVE7i:IWE8g:JYE6f:KZE4f:LZE5d:M\\\\E3b:O]E2`:1`EO]:3dEMZ:5fEKX:7gEJX:6iEJV:7iEJV:6kEJT:7kEJS:7nEIQ:8nEIP:8QFHm9:RFGk9<UFDi9>WFBg9?ZFAe9`0[F@d9`0\\\\FAb9`0_F@`9a0`F_O_9a0aF@^9a0bF_O]9a0cF@[9b0eF^OZ9c0fF]OX9d0iF]OU9d0jF]OU9c0lF]OS9c0nF]OR9c0nF]OQ9c0oF^OQ9b0oF^OP9b0QG^Oo8b0QG^Oo8b0PG@n8`0SG@m8`0RGAn8>SGBm8=SGDl8=TGCl8<UGEj8;UGFj8;VGFj88WGIh87XGKf85ZGMd82\\\\G1b8N_G4_8JcG7\\\\8GfG;X8DiG=V8CiG?V8@kGa0U8^OjGd0U8[OlGf0S8ZOmGg0R8XOnGk0P8TOQHo0l7POUHR1i7mNXHU1g7iNYHY1f7fN[H\\\\1c7dN]H]1b7bN_H_1`7aN`H`1_7_NaHc1_7ZNcHh1\\\\7VNdHm1Z7QNhHR2V7lMjHY2S7fMlH_2Q7_MPId2n6[MRIh2l6VMUIl2k6RMTIQ3k6mLVIT3j6jLWIW3i6gLXIZ3h6cLZI^3f6`LZIc3d6\\\\L]Ie3c6YL^Ii3`6WL`Ij3`6TL`IP4^6oKbIS4\\\\6lKeIU4[6hKfI[4X6cKjI^4V6_KlIb4T6\\\\KmIe4S6YKmIj4R6UKnIm4Q6RKoIP5Q6mJPJU5P6iJQJY5m5eJTJ]5l5`JUJb5j5\\\\JWJe5i5ZJXJf5g5YJZJi5e5VJ[Jk5e5TJ\\\\Jm5c5QJ^JR6_5nIaJY6Y5eIiJj6h4UIXKU7_4iHcK[7Y4cHhK_7W4_HkKa7V4\\\\HkKe7U4YHmKh7S4VHnKj7R4UHnKm7R4QHnKR8Q4lGoKW8P4gGPL\\\\8P4aGPLb8o3\\\\GRLe8o3XGQLj8o3TGQLn8o3PGQLQ9o3nFQLT9T61O2N3N2M2N3M3M3M2N100O2N1O2O1N3M3M3N2M2N2N1O2N1O2N5K6Jc0lIjEi3R<L4M1N3N001O1O0CXBRMh=l2YBTMi=i2YBWMg=h2YBYMg=f2YB[Mg=d2YB\\\\Mg=d2ZB\\\\Mf=c2ZB^Mf=a2ZB_Mg=`2ZB`Me=`2[BaMe=_2ZBbMe=^2\\\\BaMe=]2\\\\BcMe=\\\\2\\\\BdMd=[2\\\\BeMe=Y2]BfMd=Y2\\\\BhMd=W2\\\\BiMe=U2]BkMc=T2]BlMc=S2^BnMa=R2_BnM`=S2aBmM^=S2bBmM^=R2dBnMZ=S2fBmMZ=S2fBnMX=S2iBlMW=S2jBnMU=R2kBoMS=R2nBmMR=S2nBnMP=R2QCnMo<R2QCnMn<R2SCnMm<R2TCnMj<R2WCnMi<Q2XCoMh<Q2XCoMg<R2ZCnMe<Q2\\\\CoMd<Q2\\\\CoMc<R2^CnMa<Q2`CoMa<P2_CQN`<n1bCQN^<n1cCSN\\\\<m1dCSN\\\\<l1eCUNZ<k1gCTNZ<j1gCVNY<j1gCWNX<i1hCWNX<i1hCWNY<g1iCYNV<g1jCYNW<e1jC\\\\NV<b1lC^NT<a1lC_NU<_1mC`NT<_1lCbNT<]1lCcNU<\\\\1lCcNT<\\\\1mCdNT<S1aB`N[1>T<Q1cB^NZ1a0S<Q1fB[NX1c0S<Q1TDoNl;Q1TDPOl;o0TDQOl;o0TDROk;n0VDQOk;n0UDROk;n0UDQOm;n0TDQOl;o0UDPOl;o0UDPOl;o0UDPOk;P1VDoNk;P1VDoNk;P1UDPOl;n0VDPOm;n0TDPOo;n0RDPOQ<n0PDlNV<R1V2N1O1N3M4K4Li`R1\"}}, {\"image_id\": 72, \"category_id\": 1, \"bbox\": [31.0, 12.0, 452.0, 192.0], \"score\": 0.9846331477165222, \"association_id\": 2, \"light\": [-1.610504150390625, -1.5373189449310303, 1.5129187107086182, 1.38938307762146], \"segmentation\": {\"size\": [236, 592], \"counts\": \"RZ71[70O3[IOj54TJMk53TJOj53TJNl52RJ0m52PJ1o50PJ0P61nI0R61lI0S61jI2V6NjI2V6OiI1W6OhI2X6NhI2X6NhI2X6a00000O1000O1000000O100000000000O01000000000000O1000000O1000000O10000000000000000O1000000000000O1000O10O100000000000000O1000XOZOjJf0U5\\\\OjJd0V5\\\\OjJd0V5]OiJc0V5^OjJb0V5_OiJa0W5_OiJa0X5^OgJb0Z5^OfJb0Z5^OfJb0[5]OeJc0[5]OeJc0\\\\5\\\\OdJd0[5]OeJc0[5]OeJc0[5]OeJc0Z5^OfJb0Z5^OfJb0Z5^OfJb0Z5^OfJb0Z5^OfJb0Z5^OfJb0Z5^OeJc0[5]OeJc0[5]OeJc0[5\\\\OfJd0Z5\\\\OfJd0Z5\\\\OfJd0Y5]OgJc0Y5^OfJb0Z5^OfJb0Z5^OfJb0Z5^OfJb0Y5_OgJa0Y5_OgJa0Y5_OgJ`0Z5AeJ?l2YORO8RN?i2]OTO5RN>i2_OTO3SN>h2BSO1TN=i2BRO3TN;j2CPO3VN:i2EoN3WN8j2FkN6ZN4k2HdN<^NKo2K_N=aNHP3L]N?aNER3MZN`0dNCQ30VN\\\\1i1fNSNU1mNTNo2k0nMT1UOPNl2n0lMT1XOjM00l2T1iMS1^2PO_MQ1^OhM0010e2X1jMQ1@lMOLg2Y1hM2BiNN>0C1Le2[1gMOLXOGGf2T1dMNOGY2>dML4GW2>bML8FU2`0`ML:FU2Q2kMPNS2Q2mMoMZ1lNoNU3FQNX1oNnNR3IoMW1SOnNn2KoMW1TOlNn2MnMV1VOlNl2NnMV1WOjNm2NnMW1VOjNl2OnMV1ZOgNi23mMV1n2jNRMV1n2jNRMV1n2jNRMV1n2jNQMW1o2iNQMW1o2hNRMX1o2gNQMY1P3fNoL[1R3dNnL\\\\1R3dNnL[1T3dNlL\\\\1T3dNkL]1U3cNkL]1U3cNkL]1V3bNjL^1V3bNjL^1a410000000000000000O1000000O1O100O1000000000000O100000000000000O1O1K5N2O100000000000000O1SOQN]Lo1c3QN]Lo1b3RN^Ln1b3RN^Ln1a3SN_Lm1a3SN_Lm1`3TN`Ll1`3TN_Lm1a3SN_Lm1a3SN^Ln1a3SN_Lm1a3SN_Lm1a3SN^Ln1b3RN^Ln1b3RN^Ln1b3RN^Ln1b3RN^Ln1b3RN^Ln1b3RN^Ln1b3RN]Lo1c3QN]Lo1c3QN]Lo1c3QN]Lo1c3QN]Lo1c3RN\\\\Ln1e3QNZLP2f3PNZLP2f3PNZLP2g3oMTLV2l3kMQLW2P4hMmK[2U4cMjK^2]454L3M1O1O1O2N1O2N1O001O0000000000hNoMVMP2g2UNWMl1f2WNYMi1b2^N\\\\Mb1a2dN\\\\M\\\\1]2nN`MR1\\\\2TObMl03TNU1S1fNk02^Nm0i0POi02aNk0f0TOj0NdNj0d0WOn0HiNe0;XOaNLf649001O3M2Mb0WInNV6U1O1O002N1O0O10001OO100000000O1O1O1000000000000O1000000000000O10000O100O1O1N2N2O1O10000O10000000000O10000O1O100O100O1000000O100000000000000O1000000000000000000O100000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000O10000000000O1000000O2O0000000O1000000000001N1000001N100O1O10000O10001O00000O100000000O10001N101Olnh0\"}}, {\"image_id\": 72, \"category_id\": 1, \"bbox\": [4.0, 6.0, 283.0, 203.0], \"score\": 0.9997137784957886, \"association_id\": 1, \"light\": [-1.9300109148025513, -1.296594500541687, 1.7985711097717285, 1.1540155410766602], \"segmentation\": {\"size\": [236, 592], \"counts\": \"no08R74N0O2O0O2O1N3N2M4L3YNZOcLh0X3D^Lb0[3<hK4^1TOMi2I_M4e2F^M:m2TOZMk0d401N10000lNPJl0P6TOPJl0P6SORJl0m5ROVJn0j5QOWJo0i5POXJP1g5oN[JQ1Q6OdNXOTLh0g3]O]K07c0Z4^O`KO6c0X4@bKM6c0X4@bKM6c0X4_OdKL4f0X4^OdKL4f0X4]OeKM3f0X4]OeKL3h0X4[OgKL1i0Y4ZOfKM1i0Y4ZOfKL1k0Y4YOgKK0l0Y4XOhKLOl0Y4XOhKLOl0Y4XOiKJOn0X4XOiKJNo0Y4XOhKIOo0Y4XOiKF0R1W4XOiKD1U1V4WO[Li0e3WO[Li0e3WO[Li0e3WO[Li0e3VO[Lk0e3UO[Lk0e3UO[Lk0e3UO[Lk0e3UOZLl0f3TOZLl0f3TOZLl0e3UO[Lk0e3UO[Lk0e3UO[Lk0e3UOZLl0f3TOZLl0f3TOZLl0e3UO[Lk0e3UO[Lk0d3VO\\\\Lj0c3WO\\\\Lj0d3VO\\\\Lj0d3VO\\\\Lj0d3VO\\\\Lj0d3VO\\\\Lj0d3UO]Lk0c3UO\\\\Ll0d3TO\\\\Ll0c3UO]Lk0f1gNVO>TOk0c1lNWO9UOl0`1ZOPOJ@l0\\\\1@QOECk0Z1DQOAEk0Y1GPO^OGk0X1JoN[OHl0X1LnNXOJl0W1OmNUOLl0W10lNTOMl0V13kNQOOl0U15kNoNOm0T17lNlN0m0S19lNjN1m0S1:kNiN2m0R1=jNeN5n0Q1?hNcN6o0R1?gNbN7o0R1`0fNaN8o0Q1b0eN`N:n0Q1c0dN^N;P1Q1c0cN]N<P1P1e0cN[N=P1P1e0cN[N=P1P1f0bNZN=Q12bNOS2BZN=Q1OfN1o1CZN=Q1LjN3l1DWN>S1KkN1l1FVN>S1KlN0k1GVN>S1JmN0l1GTN?S1JnNNl1ISN?S1IoN@C6Y22RN?S1IYODc13QN`0S1IZOBd14oMa0S1H\\\\OAd15mMb0S1H]O_Oe16kMc0S1HT2DiLd0T1FT2EiLe0S1FU2DhLf0S1FU2DgLg0U1CV2DfLi0T1Ci2=WMBj2>VMAk2?UM_Om2a0TMZOP3f0j1100000000000000000O1000000000000000001N10001M3[OdINJ1]`2OkfM0O2hHNT75O101O1O4L7SI1o5h0N0O2O00000000O10000O10000000000O10000O1O100O1000000O10000O10000O100000000O100000000O100000000O1O1O1000000O1000000O1O1N2O1O10000O100O1O100O100O10000000000000000O1000000000000000000000000O1000001O000000000O100000000O101N1IUILl60RI03Nl62QIOV7O40XTV2\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [470.0, 77.0, 130.0, 73.0], \"score\": 0.9974878430366516, \"association_id\": 1, \"light\": [-2.962721347808838, -1.3337149620056152, 2.9230706691741943, 1.1066453456878662], \"segmentation\": {\"size\": [457, 685], \"counts\": \"oka6;g=a0E1O2O001N101O00000000000000000O100N2O1O1O2K4F:O1O2N1N2O2O0O100O1O2O000O101N10000O1000000O10000O100000000000000000000000O1000000O100000000001O000000000000001O0000O2O001O001Od0\\\\O001O001O1O000O101O0000000O1001O1O000O100000O1000000EUCWOm<g0SCZOm<e0TCZOn<d0RC\\\\Oo<d0PC\\\\OQ=c0PC\\\\OR=a0oB@Q==RCBP=;QCEQ=8QCGi=O100O1O1O10hiU1\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [284.0, 162.0, 232.0, 198.0], \"score\": 0.952420711517334, \"association_id\": 4, \"light\": [-1.8667590618133545, -2.1961753368377686, 1.6721913814544678, 2.074253559112549], \"segmentation\": {\"size\": [457, 685], \"counts\": \"omn3;k=5M2N2M2O2N2N1O100O1O100O2O0O100O2O0O101N101O1N2O1N4M3L2O1gF_Ng5c1lIlNQ6U1iISOR6o0mISOn5R1PJPOm5S1QJnNo5S1oInNQ6S1nImNR6S1mInNS6S1kInNU6R1jIoNV6R1iInNV6S1iInNW6R1iInNW6S1hImNX6S1gInNX6T1gIlNY6U1fIkNZ6V1eIiN\\\\6W1dIiN\\\\6X1bIiN^6W1bIhN_6Y1`IgN`6Y1_IhNb6X1]IgNd6Y1\\\\IgNc6[1[IeNf6\\\\1WIfNi6[1SIhNm6\\\\1mGFS8=`GLL\\\\Mc8b5]G^Jc8b5]G_Jb8e5O000000O1000000O11N101O00010O0000001O00O10000000000VL`Gj1a8TNbGj1_8UNdGf1_8XNeG[1f8cN]GZ1JRMa8b1gGY1H[M]8[1mGX1F_M[8Z1oGV1FbMZ8W1QHU1GdMX8U1THU1EfMW8T1UHU1DhMW8Q1XHV1AiMW8P1YHV1AjMV8o0ZHV1AkMU8m0\\\\HW1@lMT8g0bH\\\\1ZOnMMBk7P1RI_1WOoMKCl7l0VIa1SOPNJEl7i0XIa1SOQNIEl7h0YIb1ROQNHGl7d0]Ic1nNSNIFl7b0aIc1jNUNIFl7?gIb1eNYNGGm7;mIb1_N\\\\NGGn78oId1[N^NHFn75SJf1WN`NFFP81VJi1TN`NFFP8OYJi1RNbNEFP8M[Jj1QNdNBFR8K]Jh1QNSOb7UO^Jg1PNSOd7TO^Jg1oMUOc7TO_Je1nMXO\\\\:g0dEYO]:e0dE[O^:c0bE]O`:`0`EAb:<^EoNB4Q;k0[ESOE2P;j0\\\\ETOE1P;j0ZEVOF0Q;h0YEXOHOR;f0UE\\\\OJLW;c0nDALLW;b0mDBMKW;a0mDDLKX;`0lDELKX;?mDFKKY;>kDHLJZ;<kDKJI\\\\;9lDNHI];7lD0GI^;6kD2FH`;4kD4EHa;2jD8CGd;NkD=^OFi<<QC[O09o<m0000O100000000000000CQC@o<?RCAn<>TCAl<>UCBk<>UCBk<>VCAj<?VCAj<?VCAj<?WC@i<`0WC@i<`0WC@i<`0WC@i<`0XC_Og<b0YC^Og<c0XC]Oh<c0XC]Oh<c0XC]Oh<c0YC\\\\Of<e0ZC[Of<e0ZCZOg<f0ZCYOe<h0\\\\CVOe<j0=1N100000000O10000000O10O10000000000O100000O10000O010000O010O10ZOjB8W=HiB8V=IkB6U=ImB6S=JnB3S=MoB1R=OnB1R=OnB0S=0nBOR=0oB0Q=0QCKR=5a0O10000000000O1000001MQT[2\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [0.0, 243.0, 273.0, 187.0], \"score\": 0.9999945163726807, \"association_id\": 2, \"light\": [-2.0140891075134277, -2.2225916385650635, 1.8552098274230957, 2.106954336166382], \"segmentation\": {\"size\": [457, 685], \"counts\": \"[8l1d0a0d8g2K3N2O0O10000O2O000O10000O100O1O100O100O1000000O100000000000000O100O100O1O1O1O1000000O10000000000000000O10000000000000000000000000000000000001O000000000O1000000O100O1O1O100O1O1O100O101N1N2O100O2O0O1000000O10000O100O1O2N101N1000001O0O10000000000O2O0O1O1O100O101N100O10000O1O1O1O1M3O1N2M3M3L4M3N2O100O1001O0gE^Lk9c3RFaLl9o3O1O1O1O1O1O001OdN_FPNb9n1_FSN`9l1aFTN_9l1aFTN_9k1bFYMK<d9Z2bFZML:b9[2cF[ML9a9[2dF]MJ8b9Z2eF^MJ6b9[2eF_MI6b9[2eF_MI5d9[2cF`MI4e9[2cFbMG2g9\\\\2bFbMG2h9[2aFcMG1i9\\\\2`FdMFOl9[2_FfMEOl9[2_FgMELn9\\\\2]FhMGI3J^9e2hFhMIA64X9c2iFiM6ER9a2iFjM0JV9\\\\2jFkMLLZ9Y2jFkMKM[9W2kFnMGM]9U2lFoMCOb9P2lFiNT9W1lFiNT9V1mFjNS9U1nFkNR9T1oFlNQ9S1PGnNo8R1RGmNn8S1RGmNn8R1SGnNm8R1SGoNl8Q1TGoNl8Q1TGoNl8P1UGPOl8o0UGPOk8o0VGQOl8k0VGUOo;000000000001O00001O0O10000000000O10001O0000001O000000000O100O1O100O101N1O100O2N1O100O100N2O101O0O1O1O1O2O0O1O1O1O1O2N100O100O1O2O000O3LX`g5\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [201.0, 0.0, 286.0, 240.0], \"score\": 0.9907969236373901, \"association_id\": 3, \"light\": [-2.4716954231262207, -1.5999400615692139, 2.389530897140503, 1.4282585382461548], \"segmentation\": {\"size\": [457, 685], \"counts\": \"ohi27g=a0nEAl5e0oI^Ol5i0oIXOl5n0SJROe5X1XJiNe5[1ZJeNd5^1[JbN`5d1_J\\\\N\\\\5k1aJVN]5n1aJSN]5o1bJQN]5Q2aJPN]5U2_JlM_5Y2]JhMb5[2[JfMd5\\\\2ZJeMf5`2SJbMm5\\\\3SIfLl6`3hHgLX7`3oGRMP8d400O1N2O1N2O1N2O1N2N2O1OPOcHVK[7k4hHSKW7n4kHPKS7R5oHlJP7l3dHgL=\\\\Oo6k3hHdL=@k6i3aJVL_5e3fJ[LZ5a3kJ^LU5^3oJbLQ5Y3TKgLl4R3[KnLe4o2_KPMa4n2aKRM_4n2aKRM_4m2bKSM^4m2bKSM^4m2aKTM_4l2aKTM_4k2bKUM^4a2eHUMn2:]4`2jHRMi2>]4_2nKaMR4^2oKbMQ4^2oKbMR4\\\\2oKdMQ4P2cHiM;Ge2`0ZMBe6^2TIeM9Lc2?ZMDe6[2kLQN`LDe6Z2lLRN_LEd6Y2nLQN^LFd6X2oLQN]LHd6W2oLQN]LHd6V2PMRN\\\\LHd6V2PMRN\\\\LIc6U2QMRN[LJd6T2QMRN[LJd6S2SMRNYLKe6R2RMSNXLLf6Q2RMSNXLLf6P2SMTNWLLf6o1TMUNULMh6l1TMWNTLMi6j1TMXNSLOj6f1UM[NQLOk6d1UM]NoK1m6MnHV1W4kNoK2n6HRIV1T4oNkK4]7i0kLPOkK7[7f0lLQOkK9W7f0PMQOhK:V7e0TMQOfK:U7d0WMROcK;R7f0\\\\MnNbK=T7b0[MPOaK?X7=XMRO`Kb0Z79bMGa2JbHEm4c0b2DkM<T2ElM;S2FnM9R2GnM9Q2HoM8R2@bM1VK?b<C_C<_<FaC:^<GbC9[<KdC5\\\\<NbC1^<ObC1^<EYC199^<EZC2:7h<GXC9j<DXC;j<CVC=[=0kIB`N=f701O1O]OEYC:]=N1O1O001O001O001OhYf0MZfYO2K8K2N2O0M4O0O2O0O2O1L4K4N3N1O1O2N1O1N2M3L301000000000000000O10000000000O1000000000000000000O100000001O0000001O00000000001O0O100O2O0O100O2O000O10000O1O2O0O1O1000001N100O10000O101O000O101O001O000O101O001O1O1N2O1O1O001N101N3LVUh2\"}}, {\"image_id\": 74, \"category_id\": 1, \"bbox\": [207.0, 87.0, 267.0, 178.0], \"score\": 0.999997079372406, \"association_id\": 1, \"light\": [-2.0531067848205566, -1.5799458026885986, 1.8422915935516357, 1.4408549070358276], \"segmentation\": {\"size\": [314, 500], \"counts\": \"kco1f0Q95M2M=C8I2N6Ig0ZO3N1O>B;E1O1O2N2N002N1O1O001O001O1O1O001O000000001O001O1O1O00000000000000000000O10000O100O100O100000000O1O1N200O10000O1O1N200O1O1O1O1N2M3O1N2N2M3O1N2J6L5a0_O00oNaIfN_6Z20O2O2M2O1N1O2YNSI>P7T13VNmHh0V7l03M3M;FN1O2N2N101000O1O1O001O1000O0010O00100O0010N101O1O10O010O0O1000010O010N2O001O10O1O1O001O100O10O0O0100010O10O0000O10010O10O10O1O0010O010N101O10O01O000000000100O000000000010O01O00000000010O01000O0010O0O1O2N101O0010O1O1O001O100O1O1O1O1O1O100000000O10000O100N2O100O100O1O1O1O1O1O100O1O100N2O100O100O1O1O1O1O100O2O000O100O100N3N1O100O3M5JWl7\"}}, {\"image_id\": 75, \"category_id\": 1, \"bbox\": [64.0, 68.0, 447.0, 420.0], \"score\": 0.9999988079071045, \"association_id\": 1, \"light\": [-1.8610122203826904, -2.016125440597534, 1.6720860004425049, 1.8263859748840332], \"segmentation\": {\"size\": [566, 643], \"counts\": \"fdS11ca06K5K2N2N1O1N2O1N2O2N1O1O1O1O1O1O01O01OO1O1000O10000000001N101M2_OUOS@Q1l?<0100O10O10000000000O100000000O0100000001O000000000000010O00001O001O1O1O1O1O1O1O0010O01O001O101N1O1O10O01O0010O00010O01O10O01O1O100O001O1O100O2N1O1O1O1O0010O01O1O1O1O3M2O0O2N1O1O001O101N1O1O2N2N1O1O0010O01O100O100O2O0O10O01O010O010O100O2O1O1O1O000O1001N10100O01O1N100TN_LbEa3\\\\:kLYEU3f:mLYET3e:nL[EQ3d:QM[EP3c:QM]Eo2b:SM]En2a:TM^Em2a:SM`En2]:SMcEn2Z:TMfEm2X:TMhEm2U:TMmEk2Q:WMoEj2n9XMRFh2m9YMTFg2i9ZMYFe2f9\\\\M[Fd2c9]M^Fc2`9]MbFc2\\\\9^MeFb2Y9^MiFc2T9]MnFc2P9\\\\MSGc2l8]MVGc2g8^MZGc2d8]M^Gb2a8^MaGb2]8^MdGb2[8_MfGa2Y8^MhGc2V8]MlGc2S8\\\\MnGf2o7[MRHe2l7[MVHe2i7ZMYHe2f7[M\\\\He2c7ZM_He2`7ZMcHf2[7ZMfHf2Z7YMhHg2V7YMkHh2S7XMoHh2o6XMSIk2h6VMYIn2a6RMbIn2[6SMhIk2V6UMlIj2S6WMmIj2Q6VMQJi2o5WMQJj2m5WMSJi2m5VMTJk2j5VMWJi2i5WMWJj2h5UMYJl2f5TMZJm2d5TM\\\\Jm2c5RM^Jo2a5QM`Jo2_5QMaJU1gL[Of8@dJT1hL[Od8AeJS1hL\\\\Ob8@iJR1dL_Oc8_OlJP1`LBb8^OQKn0]LEa8^OTKj0ZLJ`8]OZKe0VLO^8]O]Kd2a4^M_Kc2]4_MdKk0XLUOS81fKe0\\\\LZOl73hK`0_L]Of75kK<bL^Ob77mK;aL]Ob78mK<`L]Oa79oK:`L\\\\Oa7:PL:_L\\\\O_7;SL8_L]O]7<UL5_L_O[7<WL4_L@Y7=XL2aL@V7>ZL2`L@U7?[L0bL@Q7a0^LNbL@P7b0YJeNCY1UO@n6c0WJjNCR1ZO_Ol6e0UJoNCm0\\\\O_Ok6f0TJROCh0_O_Oi6i0RJUOEb0@@h6j0oIZOG;CAf6l0kI]OL5CBd6R1aIA7IFCa6[1oHFi0[OHD_6T4hIWLJD\\\\6X4iISLLEZ6[4hIoKOEX6_4hIlK0EW6`4hIkK2DU6c4iIhK3ER6f4jIeK4DQ6i4jIcK6Co5m4jI`K8Am5S5iI\\\\K:Ak5X5hIWK>_Oi5_5fIQKb0@f5d5eIlJe0_Oe5i5cIiJh0^Oc5m5cIdJj0@a5P6bIaJm0_O`5R6bI^Jo0_O]5W6cIYJQ1_O[5[6bIVJT1_OX5^6cISJU1^OV5f6bIjIY1@g4a8ZK_GX4n8hKTGo3T9QLoFj3R9WLnFg3S9ZLmFd3T9\\\\LlFc3U9^LkF`3U9aLjF`3U9bLjF^3W9aLiF_3Y9`LfF`3\\\\9_LcFa3_9]LaFc3a9\\\\L^Fc3e9\\\\LYFe3j9XLVFh3m9VLRFj3P:TLPFl3R:RLnEn3U:PLiEQ4Z:mKeES4^:kK_EW4e:fKYE[4l:dKnD^4h;nJUDS5[<L6J5K1O1O1O1O001O001O00001O0000000000000000000000000001O001O1XMXL`Gi3[8cL]G^3^8jL^GW3^8PM^GS3^8QM_GR3]8QMaGR3[8QMcGQ3Z8QMeGQ3Y8PMfGS3W8oLfGW3U8kLiG[3P8gLnGa3k7bLRHc3i7_LTHf3h7_LRHf3j7^LQHd3n7`LmGc3Q8R3O1O010O10O0100O10O0100O100XK[F\\\\1g9`N^F]1e9^NbF]1`9`NfF\\\\1[9aN_Gf0c8WOZHMh71`HHa75fHE[7:iHCX7<jHBW7<kHBW7>jH_OX7`0iH_OX7a0iH\\\\OZ7c0hHXO]7e0iHTO\\\\7i0UIcNQ7Z1h4M3M2O0O101N2O1N2O2M2O1O0O100O10001N101N2O001N100O100O01O10O01O010O00010O010O0000O2O0000001O001O001N101N2N2O1N2N3L3I8IngX2\"}}, {\"image_id\": 76, \"category_id\": 1, \"bbox\": [414.0, 199.0, 220.0, 280.0], \"score\": 0.9752633571624756, \"association_id\": 2, \"light\": [-1.7313331365585327, -2.0458791255950928, 1.6067445278167725, 1.9275455474853516], \"segmentation\": {\"size\": [479, 640], \"counts\": \"goQ67g>6I4M2O1O001N2O1O00001O0000000000000O1001O00000000000000000000000000000001O00001O001O1O00010O00001O010O00102M<D3M2bBcNR<=`DV1V;WOfDj0X;YOfDh0Y;ZOeDg0Z;\\\\OdDl0S;WOjDb1]:aNaEa1]:`NaEa1^:aNaE_1^:aNbE_1_:aNaE_1_:aNaE_1_:`NcEb1Z:^NfEd1W:]NiEd1V:\\\\NjEd1U:_NiE`1W:aNiE_1V:f1O2NkLmEb1o9^NRFh1h9XNWFl1e9TN\\\\Fn1a9RN_Fo1`9RN_Fo1`9QN`FP2_9i1OXLbFP2]9i101O01OTNcJVJ]5g5kJUJT5h5PKWJQ5g5RKXJm4h5TKXJk4g5YKTJi4l5\\\\KmIf4R6]KjIe4V6]KfIe4Y6]KdIe4\\\\6]K`Ie4_6]K_Id4a6^KZIX1Aj0T7SNUIb4j6aKRIa4m6cKoHS1Oa0R7b40fLoHfNP7[1QIdNo6\\\\1RIcNn6]1RIcNn6^1RIaNm6`1SI`Nl6a1TI_Nk6b1UI^Nj6d1UI\\\\Nj6e1VI[Ni6f1WIZNi6g1VIYNi6h1WIXNi6h1WIXNh6j1XIUNh6cMYI^3OoNg6cM]I\\\\3LQOg6cM]I]3KPOg6dM^I\\\\3KPOf6dM`I\\\\3KoNd6fMaI[3KoNd6fMaI\\\\3JnNd6gMbI[3JnNc6hMbI[3LlNa6jMcIZ3LlN`6kMcIZ3MkN^6nMcIX3OjN]6oMdIW3OjN]6Z2dIeM[6]2dIcM[6^2eIbM[6^2eIbMZ6_2gI`MX6a2hI_MW6b2jI]MU6d2kI\\\\MU6d2kI\\\\MT6e2mIZMS6f2mIZMR6g2nIYMS6f2mIZMS6g2lIYMT6g2lIYMT6g2mIXMS6h2mIXMS6h2mIXMS6h2nIWMQ6j2oIVMQ6i2QJVMo5j2QJVMo5j2RJUMn5k2RJUMn5k2SJTMm5l2TJSMl5m2TJSMl5m2UJRMk5n2WJPMi5P3XJoLh5W2lIkK=n1g5Q2TJoK6o1f5P2WJPL3P2f5hNjIj1a0\\\\M0Q2e5iNWJ\\\\15jM1o1c5Q2[JPL3n1b5R2\\\\JoK4m1`5U2[JnK6l1_5V2[JmK7m1^5W2ZJlK9l1]5X2]JhK8o1[5Y2eJ`K1V2Z5Z2hKcMX4\\\\2PL]MP4c2SLZMm3f2ULXMk3h2WLVMi3j2YLTMg3m2ZLQMf3o2\\\\LoLd3Q3^LmLb3T3_LjLa3V3aLhL_3X3cLfL]3Z3dLeL\\\\3[3eLdL[3\\\\3fLbL[3[OoI[3h2XMY3AkIW3o2UMV3d3kLZLU3g3lLWLT3i3mLVLS3k3mLTLS3m3mLQLT3P4lLoKT3R4lLmKT3T4lLkKT3W4lLgKT3Z4mLbKU3_4kL\\\\KZ3c4hLYKZ3g4`3O1O2O0O1O2[KlE[4^:M4M3aMXEa0k:XO\\\\Ed0o:lN[Em0h<VOl`2\"}}, {\"image_id\": 76, \"category_id\": 1, \"bbox\": [87.0, 284.0, 24.0, 17.0], \"score\": 0.9955835342407227, \"association_id\": 1, \"light\": [-1.9751381874084473, -2.3066630363464355, 1.8584407567977905, 2.1195807456970215], \"segmentation\": {\"size\": [479, 640], \"counts\": \"coX11f>5[AMc>4]ALb>901N1000000O010O10MaAF_>9aAH_>8aAH^>9bAG^>9bAG^>9bAG^>9bAF`>9`AGa>;1O1K]AKd>8_Ug7\"}}, {\"image_id\": 77, \"category_id\": 1, \"bbox\": [58.0, 72.0, 295.0, 200.0], \"score\": 0.9999631643295288, \"association_id\": 1, \"light\": [-2.224545955657959, -2.035783052444458, 2.074793815612793, 1.819098711013794], \"segmentation\": {\"size\": [343, 500], \"counts\": \"\\\\cc01f:2N2M101O00000O10000O10000O1O100O1O1O1O1N2N1O2N2N2O02O001O0010O01O0000001O00101N2N2OO10O0100O001O001O1O1O1O1O1O001O00001O1O001O001O001O00001N101O001O00001N1000001O00010O001O00001O000mMnNaJR1P5^OoJb0l4DSK=f4IZK7a4N_K2_40aK0_40aK0^41bKO^41bK0\\\\41dKO\\\\41eKN[42eKOZ41fK0Y41fKOZ41fK0Y41fK0Y40gK1W40iK0W41hK0W40jKOV42iKOW41hK0W41iKNW43hKNW44hKLW45hKNU44jKNS43lKNS43lKNS43lKMU42lKNT42kKOT42kK1S40kK3S4MlK5S4KlK8R4HnK9Q4GnK:Q4FoK;Q4EnK<R4DmK<T4DkK<W4DgK<Z4DeK<\\\\4EbK<^4DaK<`4D_K<a4E^K;c4E\\\\K;e4EYK=g4DXK;i4FUK9m41iJNW58eJF\\\\5<cJB^5?WJgM>[1\\\\5X1nJ]NS5d1VKPNl4Q2WKiMk4`2nJ\\\\MS5U3UJ]L18m5j3lIUL64\\\\5j4L1O10O00100O1O1O1O1O001O001O1O1O2N1O0SNYJYM9l0_5g1RKWNn4f1VKYNk4c1ZK[Ng4a1^K]Nc4^1[JgMl0h0k4m1YKmMi4Y1RJiN^1_Od4g1nIkNk7S1UHoNj7P1WHQOi7m0WHTOi7l0WHSOk7l0UHTOk7l0THTOn7k0QHVOP8i0PHVOR8h0nGYOS8f0mGZOT8e0lG[OT8e0lGZOU8f0jG[OV8e0jG[OW8d0iG\\\\OW8d0iG[OX8e0hG[OX8e0hGZOY8f0gGZOY8g0fGXO[8h0eGXO\\\\8g0dGYO]8f0cGYO_8f0aGZO`8e0`G[Oa8]11N101O000O2O1O001N2N2O1N1O101N1O1_OnF\\\\OS9d0mF\\\\OT9c0lF\\\\OU9S10O2N1BhF\\\\OY9o02O1DdF\\\\O^9a0dF^O]9`0dF@]9?dF@]9?dF@^9>dF@\\\\9`0eF]O^9b0;10O010O10O010O1O001N1M4M2O101O1O001O100O010O10O1000000000000000000000001O001O001O001O1N101N1O2N2O1N2N2N3M4L[Sa1\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [0.0, 89.0, 240.0, 265.0], \"score\": 0.9999961256980896, \"association_id\": 2, \"light\": [-1.3952348232269287, -2.249032735824585, 1.367882490158081, 2.1623899936676025], \"segmentation\": {\"size\": [453, 604], \"counts\": \"Y:e0_=2O1O000O2O00000000000O10000000000000O100000000000000000dIXO[Oh0b0LmN4Q13jNMU16iNJV18iNHV19jNGV1:iNFV1;jNFU1;jNEU1<kNDU1<kNDU1=jNCV1=jNCV1>iNCV1>iNBV1?jNAV1`0iN@W1`0iN@W1a0gN@Y1`0gN@Y1a0fN_OZ1a0fN_OZ1b0eN]O\\\\1c0dN]O\\\\1d0cN\\\\O]1d0cN\\\\O]1e0bN[O^1e0bN[O^1f0aNZO_1f0aNZO_1g0`NXOa1h0^NYOa1h0_NXOa1h0_NXOa1i0^NWOb1i0^NWOb1j0]NVOc1j0]NVOb1l0]NTOc1l0]NTOc1m0\\\\NSOc1o0\\\\NQOd1P1[NPOd1R1[NnNd1T1[NmNc1U1\\\\NkNc1X1[NhNe1Y1ZNgNf1_1TNaNk1c1QN^No1c1PN]NP2c1PN]NP2c1PN]NP2d1oM\\\\NQ2d1oM[NR2f1mMZNS2f1mMZNS2f1mMZNS2f1mMZNS2f1mMYNT2g1kMZNU2f1kMZNU2f1kMYNV2g1jMYNV2g1jMYNV2f1kMZNU2f1kMYNV2g1jMYNV2f1kMZNU2f1kMZNU2e1lM[NT2e1lM[NT2e1kM\\\\NT2e1lMZNU2f1kMZNU2f1kMZNU2f1kMZNU2g1jMYNV2g1jMYNV2g1jMYNV2f1kMZNT2P1cNoN^1o0dNQO\\\\1n0eNRO[1m0fNSOY1m0hNSOX1l0iNTOV1l0kNTOU1k0mNTOR1m0nNSOQ1m0POSOP1m0POSOo0m0ROSOn0m0ROSOn0l0SOTOm0m0SOROl0P1SOPOm0P1SOPOm0Q1ROoNn0R1QOnNo0R1ROmNn0T1QOlNn0U1SOjNm0W1ROiNm0X1TOgNl0Z1SOfNl0\\\\1SOeNk0^1TJUN_4<\\\\1S2bNmM^1T2hNeMX1\\\\2hNcMW1_2iN`MW1a2iN^MW1k2_NUMb1l2]NTMc1m2ZNUMf1m2XNSMh1h1oInNX4[Oi1^1[JROm3Ai1W1bJiNP41^1T1HlN9S1GnN:P1GoN:P1FQO;m0FSO:m0FRO<m0DSO<m0EQO<o0DQO=n0CQO>o0BQO>o0BQO>P1AoNa0P1@oN`0R1_OmNb0S1^OmNb0T1]OlNc0U1\\\\OjNf0V1YOjNg0V1YOjNg0X1WOhNi0Y1WOfNj0Z1UOfNk0[1TOdNm0]1ROcNn0^1RO`NP1a1nN^NS1c1h51O0O2O1N100O1O000001O000O100001O1O1O2N1OiNmC=R<BQD<o;DQD=o;BPD?P<BnC?R<k0O0000010O000100O0100O10O00001O000000001O001^OlCQOT<m0PDQOQ<m0RDQOo;m0SDROn;m0RDSOo;k0SDSOn;l0g0M2O1O2N100O2N2M4JVmP5\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [171.0, 149.0, 232.0, 164.0], \"score\": 0.9999993443489075, \"association_id\": 1, \"light\": [-1.1382577419281006, -2.6391255855560303, 0.9977095127105713, 2.5149576663970947], \"segmentation\": {\"size\": [453, 604], \"counts\": \"Sn[22S>001N101O006J0O10001O000O1000000O100000000O1000O100000PLCXJ=g5FWJ:h5IVJ7j5JUJ6j5LUJ4k5NSJ2m50QJ0n52QJNo52QJNo53oIOP62oINP63PJMo55PJKP66oIJP68oIHQ69nIHQ69nIGQ6;nIER6<mIDS6<lIET6;lIES6;nIER6;nIER6;nIEQ6<oIDQ6<oIDP6=PJCo5>QJBo5>QJBn5?RJAm5`0SJ@m5`0SJ@l5a0TJ_Ol5a0TJ_Ok5b0UJ^Ok5b0UJ^Ok5b0UJ^Ok5b0UJ^Ok5b0UJ^Oj5c0VJ]Oj5c0VJ]Oj5c0VJ]Oj5c0VJ]Oj5c0VJ]Oj5c0VJ]Oj5c0VJ]Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5d0WJ\\\\Oi5e0WJZOi5f0WJ[Oh5f0WJZOi5f0WJZOi5g0VJYOj5g0VJYOj5g0VJYOj5h0UJXOk5h0UJXOk5i0UJVOk5j0UJVOk5k0TJUOl5k0TJUOl5l0SJTOm5l0SJTOm5m0RJSOn5n0QJROo5n0QJQOP6P1oIPOQ6P1oIPOQ6Q1nIoNR6Q1nIoNR6R1nImNR6T1mIlNS6T1mIlNS6U1lIkNT6U1mIjNS6W1lIhNU6X1kIhNU6Y1jIgNV6Y1jIgNV6Y1jIgNV6Y1jIgNV6Y1jIfNW6[1hIeNX6[1hIeNX6[1hIeNX6[1hIeNX6\\\\1gIdNY6\\\\1gIdNY6\\\\1gIdNY6\\\\1gIdNY6\\\\1gIdNY6\\\\1hIcNX6^1gIbNY6^1gIbNY6^1gIbNY6^1hI`NY6`1gI`NY6`1gI`NY6`1gI`NY6`1gI`NY6`1hI_NX6a1hI_NX6a1iI^NW6b1iI^NX6a1hI_NX6a1hI_NX6`1oGZNd05]7a1oG[NSOOV14i7a1oG\\\\NQO2U11k7a1nG^NPO3U1Nm7`1oG`NmN4W1Km7a1oGnN4_On7c1nGoNQO\\\\OU:d1kFPOoN^OU:b1lFQOmN^OW:`1mFROmN]OV:a1nFQOmN\\\\OV:b1UGjNT9c0cEXO\\\\14Q9b0gEVO[17n8b0dG]O\\\\8b0fG]O[8a0gG^OY8a0lGYOV8g0e2N1O100O2O0O1O100000000O2O000000000000010O000001O0000010O00001O010O002N1O6J1O2N1O1O1O00001O00001O0001O00001O00001N101O001O1O1O1O1O1O1O1N2N2N3JShh2\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [1.0, 245.0, 71.0, 65.0], \"score\": 0.9779088497161865, \"association_id\": 4, \"light\": [-1.9518145322799683, -1.7912046909332275, 1.8689275979995728, 1.517995834350586], \"segmentation\": {\"size\": [453, 604], \"counts\": \"]g0325e=601O0000000000000O10000000000000000000000000000000000000000001O00\\\\Od0B2`CRO^<Q1bCoN^<Q1bCoN^<Q1bCoNg<h0YCWOl<6SC00Ko<O[C0E1Q=NbC3^<M_C5h;I^D0J8f;LSE3o:KRE5o:IRE7^<O1000000O2O000O10000000000O100000001O0000001NkQ[7\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [389.0, 96.0, 194.0, 218.0], \"score\": 0.9860426783561707, \"association_id\": 5, \"light\": [-0.8777138590812683, -3.120169162750244, 0.7785965204238892, 3.0139546394348145], \"segmentation\": {\"size\": [453, 604], \"counts\": \"S\\\\\\\\54P>2O001N10000O10001O00000O10000000000000000000000000001O00000kJG_L7b30WL0i30XLNi33T51O1O2OO3L4Lb>4ZAc0^O01M4M5]OdB4k=Kkj01SUO6J5L2M4M2N2O2M5L4K;F7H6K4K2O1O1O1N2O2N2N1O0O2O000O1000001N100000000O101O00000O2O001O1O1N101O1O2M2O3jG`M`4b2ZKeMd4]2XKfMg4\\\\2\\\\JkLiNo0k6h2_IXNa6o1hHaNX7U4010N2L4M2I8C=N2BkGjJX8S5>O2M2O1N3N1O1O2N1O1O1N2N2N2M4M2O1M3L4J6N2N2O1O1N2O1O1eMcEn0]:oNgEo0Z:nNiER1W:nNjEP1W:oNjEQ1V:oNjEQ1W:nNjEQ1V:oNjEP1V:POkEP1U:POlEo0T:QOlEo0T:QOlEo0T:QOmEn0T:QOmEn0S:ROnEm0R:SOoEk0R:UOoEj0R:UOoEj0Q:VOPFi0P:XOoEh0R:WOnEi0R:WOnEi0R:WOnEi0R:WOnEh0S:XOmEh0T:WOlEi0T:WOlEi0U:VOkEj0U:VOkEj0U:VOkEk0U:TOkEl0U:TOkEl0U:TOkEm0T:TOkEn0S:ROmEo0R:ROmEP1R:oNnES1P:nNoER1Q:nNoENF0Z:3PFJLOT:7PFI1Nm9:QFG7Ne9;TFF:Na9<VFD=^Ol9m0hEC\\\\;=eD@];`0dD^O];b0eD[O];c0fDZO\\\\;c0iDYOZ;`0QW9\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [15.0, 148.0, 177.0, 153.0], \"score\": 0.9357948303222656, \"association_id\": 6, \"light\": [-1.2964812517166138, -2.810828685760498, 1.1588151454925537, 2.6896133422851562], \"segmentation\": {\"size\": [453, 604], \"counts\": \"Qi6b0b=0100O10O010000000000N2O100O10001O02O0nBEY<<WC7e<g0QFlNX7b0eHC\\\\79eHI\\\\70jH0h:L5L4IYc=3h\\\\BO32G6J68CS3MSM1N1000001N2O0000001O00000O2O0000000000000000000O10001O000000000000000000000000000000001O0000000001O00000000001O1N6K1NYl00eSO1PB0n=5M3O0O10001O00000000000000O10000000000O10001O000O101O001O000000000000O10O10000O10001O0O10000O2Ocoe5\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [372.0, 235.0, 40.0, 51.0], \"score\": 0.9858524203300476, \"association_id\": 3, \"light\": [-2.602219581604004, -1.8443431854248047, 2.4810314178466797, 1.7289029359817505], \"segmentation\": {\"size\": [453, 604], \"counts\": \"djT59d=I_B:]=9K5J6M3M7IMYCnNf<S1ZCmNe<[1O000000000000H\\\\CoNd<Q1\\\\CoNd<Q1\\\\CoNd<Q1\\\\CoNd<Q1\\\\CoNd<R1\\\\CmNe<R1[CnNe<S1[ClNd<U1]CjNc<W1\\\\ChNe<X1\\\\CgNc<Z140O100O1000N2O1O2Oi0UO5K6Iged2\"}}, {\"image_id\": 79, \"category_id\": 1, \"bbox\": [143.0, 100.0, 143.0, 238.0], \"score\": 0.9999964237213135, \"association_id\": 1, \"light\": [-1.6336169242858887, -2.24882435798645, 1.4796475172042847, 2.1123785972595215], \"segmentation\": {\"size\": [356, 297], \"counts\": \"\\\\ja1h0Z:5Kb0\\\\O=F7Jj1UN7Je0[O?A7I[1eN2N4K4M1O00001O2N2N1O1O001N1000000001O0000000000000000000O1000000O101N100O1N2O1O1O1O1O102N1TNhLnJUOh0V4P2[LfN;A]355MOB=?ISO?m0l4000000O010O1000O010O10O100O1000O1O1000O100O01000O10O01O0100O10O01000O01000O1000O010O010000O100000O100O10001N100000O10O1O01O10O01O010O1O1O100O1O1O1J6O1N2O2N1O1O2N1Oka3\"}}, {\"image_id\": 80, \"category_id\": 1, \"bbox\": [1.0, 123.0, 227.0, 175.0], \"score\": 0.99998939037323, \"association_id\": 1, \"light\": [-1.385632038116455, -2.131655693054199, 1.3555667400360107, 2.0025970935821533], \"segmentation\": {\"size\": [391, 640], \"counts\": \"kd0d0b;2WM]O\\\\G6Q2?`61ZIOe65XIKg67XIIf6:YIFd6>[IBb6a0^I_O`6d0_I\\\\O`6f0_IZO`6h0_IXO`6j0_IVO`6l0_ITO`6m0`ISO`6n0_IRO`6P1_IPOa6P1_IPO`6R1_InNa6S1^ImNa6U1^IkNb6U1^IkNb6V1]IjNb6X1]IhNc6Y1\\\\IgNc6[1\\\\IeNd6\\\\1[IdNd6]1\\\\IcNd6^1[IbNd6`1[I`Ne6a1ZI_Ne6b1ZI_Nf6b1YI^Ng6c1XI]Nh6d1WI\\\\Nh6g1VIYNj6h1UIXNj6k1TIUNl6m1QITNn6o1PIQNo6Q2PIoMo6T2oHlMQ7V2mHjMR7Y2lHgMT7[2iHfMV7\\\\2iHdMW7\\\\2iHdMV7^2iHbMW7^2iHbMV7`2iH`MU7b2kH]MU7e2iH\\\\MW7d2iH\\\\MV7f2iHZMW7g2hHYMW7h2iHXMW7i2gHXMX7i2hHWMW7k2hHTMV7o2jHQMS7S3kHnLT7T3kHlLT7U3mHjLR7X3mHhLS7X3nHgLQ7[3nHeLR7[3nHeLQ7]3oHbLP7`3oH`LP7b3PI]LP7d3RIYLm6i3UITLj6o3TIQLl6P4SIPLm6Q4RIoKm6S4SIkKn6V4SIhKm6Y4TIeKk6]4UIbKk6_4TIaKl6a4RI_Km6c4SI\\\\Kl6P50000O10O10000000O1000000000000000000000000000000000000000O10000KTIoJl6Q5TIoJl6Q5UInJl6P5VInJk6R5UInJk6R5UInJk6Q56O100O1OnHRKm6n46OlHTKo6k4PIWKP7P51O000APIgKP7W4QIjKo6T4QInKo6Q4oHRLR7k3oHVLQ7i3PIWLP7h3QIXLP7e3RIZLo6e3QI\\\\Lo6a3TI_Lm6^3UIbLk6\\\\3WIdLi6[3XIeLh6Z3YIfLh6Y3XIgLh6X3XIiLh6V3YIjLg6V3YIiLi6U3XIkLi6S3XImLh6Q3ZIoLi6m2XISMm6G_Hd2e0eMZ7X2fHiM[7U2fHkM\\\\7R2eHnM[7R2eHnM]7P2cHPN^7n1cHRN_7l1aHTN`7j1aHVNa7h1_HXNa7g1_HZNb7d1_H\\\\Nb7b1_H^Na7`1aH`N`7]1bHcN^7[1dHeN\\\\7[1dHeN\\\\7[1dHeN]7Y1dHgN\\\\7Y1dHgN]7W1dHiN\\\\7W1dHiN]7U1bHmN^7R1`HPOa7o0ZHWOg7h0WHXOk7g0THSOT8g0oG\\\\OQ8d0lG@T8>jGFU8:iGIV86iGMV82kGOU80jG2U8MkG4U8KlG6S8ImG8S8GmG:S8DnG=R8@PHb0o7^OQHb0o7]OQHd0o7\\\\OQHd0o7\\\\OoGg0P8YOoGh0Q8WOPHi0P8WOPHi0P8WOPHh0R8WOnGh0S8XOmGg0T8YOlG?YOiNm8g0jG2k8NUGLT91lFMY90gFEe9:[FEf9;ZFDh9;XFEh9;XFEh9;XFDi9=VFCk9<VFCj9=WFBi9>\\\\F]Od9c0Q1OPE^O[:b0f0O0D]D4d;GaD7i;O1N2N2NnSm4\"}}, {\"image_id\": 80, \"category_id\": 1, \"bbox\": [117.0, 141.0, 471.0, 249.0], \"score\": 0.9999897480010986, \"association_id\": 2, \"light\": [-2.55672025680542, -2.01513671875, 2.5335772037506104, 1.9119880199432373], \"segmentation\": {\"size\": [391, 640], \"counts\": \"_P]1l0Y;5L3M2N2O1O1N10000O2O0O1O1O1O2O0O1O1O10001N1000000gLZNPLh1j3^NULb1h3bNWL^1g3eNWL\\\\1g3fNYLZ1f3hNXLY1g3hNYLY1e3iNYLX1g3hNYLX1f3iNYLX1g3hNYLX1f3jNXLW1h3iNXLW1h3iNaI\\\\O^2k1P4kN^I]Ob2h1P4TOnKm0R4TOmKl0R4UOnKk0R4VOmKj0S4VOlKk0T4VOkKj0U4WOjKi0U4XOkKh0U4YOiKh0W4^OcKb0]4A`K?_4C_K>a4C^K=b4D]K<c4F[K:e4JVK7i4ORK1n41PKOP54mJLS56kJJT58kJHU59jJGV5:iJFW5;hJEW5<iJDW5=hJCX5>fJCY5>gJBY5?fJAY5a0fJ_OZ5b0eJ^O[5c0dJ]O[5e0dJ[O\\\\5g0bJYO]5i0bJWO^5j0aJVO^5l0aJTO^5n0aJRO_5o0_JRO`5P1_JPO`5Q1`JoN_5S1`JmN`5S1`JmN`5T1_JlNa5U1^JkNb5U1^JkNb5V1\\\\JkNd5V1[JjNe5W1ZJiNf5X1YJhNg5Y1XJgNh5Z1XJeNh5]1VJcNj5^1UJbNk5_1UJ`Nk5b1SJ^Nm5c1RJ]Nn5d1RJ[Nn5g1PJYNP6h1PJWNP6j1oIVNQ6l1nISNR6Q4000000O10000000000000VNZJSMf5n2\\\\JoLd5Q3]JnLc5R3^JmLb5S3^JmLb5S3_JkLb5U3^JkLb5U3_JjLa5V3_JjLb5U3^JkLb5U3_JjLa5V3_JjLa5V3`JiL`5W3`JhLa5X3`JgL`5Y3`JgL`5Y3eJbL[5^3jJ]LV5c3kJ[LW5d3jJ[LV5e3jJ[LV5e3jJ[LU5f3lJYLT5g3lJYLT5g3lJYLT5g3lJYLT5g3lJXLU5h3lJWLT5i3lJWLT5i3lJWLT5i3lJVLU5j3kJVLU5j3lJULT5k3lJTLU5l3kJTLU5l3lJRLU5n3kJRLU5n3kJRLU5n3kJQLU5P4lJoKT5Q4lJoKT5Q4lJoKT5Q4lJnKT5S4lJmKT5S4lJmKT5S4lJlKU5T4jJmKU5T4kJkKV5U4jJkKV5U4jJjKW5V4iJjKV5W4jJhKW5X4iJhKW5X4hJiKX5W4hJiKX5W4hJhKX5Y4hJgKX5Y4hJgKX5Y4hJgKX5Y4hJgKW5Z4iJfKW5Z4hJgKX5Y4hJgKX5Y4hJfKX5[4hJeKX5[4hJeKX5[4hJeKW5\\\\4iJdKW5\\\\4hJeKX5[4hJeKW5\\\\4iJcKX5]4hJcKW5^4iJbKW5^4iJbKV5_4iJaKW5`4iJ`KV5a4jJ_KU5b4jJ_KU5b4kJ^KS5d4lJ]KR5e4nJZKQ5h4nJYKP5i4oJXKn4k4QKVKl4m4SKTKk4n4TKSKk4n4TKRKm4n4SKRKl4o4TKQKl4o4TKQKk4P5TKQKl4o4TKQKk4P5UKPKj4Q5VKnJj4S5UKnJj4S5VKmJi4T5WKlJi4T5VKlJj4U5SKnJl4S5oJRKQ5n4mJTKR5m4mJTKS5l4lJUKT5k4lJUKS5l4lJTKU5l4kJTKU5l4jJUKU5k4lJUKT5k4kJVKT5k4lJUKT5j4lJVKT5k4lJUKT5j4lJVKT5c4SK\\\\Ko4_4QKdKQ5g51_N`J\\\\La5b3`J_L`5`3`J`L`5`3aJ`L_5`3aJ`L_5_3aJbL_5^3aJbL_5^3aJaL`5^3aJbL^5_3aJbL_5]3bJcL^5]3bJbL_5]3aJdL_5[3bJeL^5Z3cJeL^5[3aJfL_5Y3bJgL^5Y3bJgL^5X3bJiL]5X3cJgL^5X3cJhL]5X3bJiL^5W3bJiL^5W3bJiL^5V3cJjL]5V3cJjL]5V3cJiL^5W3bJiL^5W3bJiL^5W3bJiL]5X3bJhL_5X3aJhL_5X3aJhL_5X3aJhL_5X3aJgL`5Y3`JgL`5Y3`JgL`5Y3`JgL`5Y3`JgL`5X3aJhL_5X3aJhL_5X3`JiL`5W3`JhLa5X3_JhLa5X3_JhLa5X3^JiLb5W3^JiLb5W3^JiLb5V3^JkLb5U3^JkLb5U3^JkLb5U3]JlLc5T3]JlLc5T3]JlLc5T3]JlLc5S3]JnLc5R3]JmLd5S3\\\\JmLd5S3\\\\JmLd5S3[JnLe5Q3\\\\JoLd5Q3[JoLf5Q3ZJoLf5Q3ZJoLf5Q3YJPMg5P3YJPMg5o2YJQMi5n2WJRMi5n2WJRMi5n2WJRMi5n2VJSMj5m2VJSMj5m2VJSMj5l2WJTMi5l2WJTMi5l2VJUMj5k2VJTMk5l2UJTMk5l2UJTMk5l2UJTMk5l2TJUMl5k2TJTMm5l2SJTMm5l2SJTMm5b4100000O100000O100000001O0000000O10000000000O10000000000O100000000000001[NmITMS6l2mITMS6l2mISMT6m2lISMT6m2lISMT6m2lISMT6m2lISMT6m2lISMT6l2mISMT6m2lISMT6m2lISMT6m2lIRMU6n2kIRMU6n2kIRMU6n2kIRMU6n2kIRMU6n2kIQMV6o2jIQMV6o2jIQMV6o2jIQMV6o2kIPMU6P3kIPMU6P3kIPMU6P3kIPMU6P3kIPMU6P3kIoLV6R3iInLW6R3jImLV6S3jImLV6S3jImLV6T3iIlLW6T3jIjLW6V3iIjLW6V3iIjLW6V3iIjLW6V3jIiLV6X3iIgLX6Y3hIgLX6Y3iIfLW6Z3iIfLW6Z3jIeLV6[3lIcLT6^3oI^LP6c3SJZLm5g3SJXLm5h3VJULj5l3VJRLk5o3UJPLk5T4QJlKn5Z4mIfKS6]4jIcKV6_4iI`KW6a4hI_KW6c4hI]KX6c4hI]KX6d4hIZKY6f4gIZKX6h4hIWKY6h4hIVKY6j4iITKW6m4hISKX6m4iIRKX6n4gIQKZ6P5eIPK[6P5eIPK[6Q5eInJ[6R5eInJ\\\\6R5cInJ]6R5cImJ^6^50000001O0O10000O1000000O10000000000O101J\\\\IgJd6Y5\\\\IgJd6Y550001N100O2O0O2N2N1O2O1N1O1O2O0O100O2O0O1O2N1N3N3J9dMTIhNZ7k0X2eNaE1J9m:AlE1^Pd0\"}}, {\"image_id\": 81, \"category_id\": 1, \"bbox\": [1.0, 113.0, 365.0, 331.0], \"score\": 0.9999992251396179, \"association_id\": 1, \"light\": [-1.9978625774383545, -2.21738862991333, 1.8542757034301758, 2.1013591289520264], \"segmentation\": {\"size\": [533, 400], \"counts\": \"gl0:Z`02O1N2N2O1N101O1N2O001O0O2O001O001N101O1O1N2O1O00001N10001O1O001O0O101O00000O100O2O000O2O00000O0100O0100O010O1000O100O1^Oo@^OS?`0b0O1000O01000000000000000`KFeH:W74_HL]7:aHF\\\\7?bHA]7b0aH^O^7d0aH\\\\O^7g0`HYO_7j0_HVO`7Q1ZHoNe7[1RHeNm7e1jG[NU8k1fGUNX8Q2dGoM[8U2bGkM]8Z2_GfM`8^2]GbMc8a2YG`Mg8c2VG^Mh8e2VG[Mj8h2RGYMn8j2oFWMP9m2kFTMT9P3iFQMV9R3gFnLY9T3eFlL[9V3bFkL^9V3aFkL]9V3bFkL^9V3aFjL_9V3QFPLIl0U:U3oESLHj0Y:S3oE]MP:c2oE^MP:d2nE^MQ:b2oE^MQ:b2oE^MQ:b2oE^MQ:b2oE^MQ:a2PF`Mo9`2PFaMP:^2QFbMo9]2RFcMn9[2TFeMl9Z2UFfMk9X2WFhMi9W2YFiMf9V2[FjMe9U2]FjMc9U2_FkM`9S2cFlM]9R2fFnMY9o1jFQNV9l1mFTNS9i1QGWNn8d1WG\\\\Ni8`1[G`Ne8]1^GcNb8[1aGdN_8[1bGeN^8Y1eGfN[8Y1fGgNZ8X1hGgNX8W1jGiNV8U1mGjNS8T1oGlNQ8R1QHoNn7o0UHPOk7n0WHROi7m0XHSOh7l0YHUOf7j0\\\\HVOc7j0]HVOc7j0]HVOc7i0_HWO`7i0`HWO`7h0aHXO_7h0aHYO^7g0bHYO^7f0cHZO]7f0cH[O\\\\7e0dH[O\\\\7f0dHZO[7g0dHYO\\\\7h0cHYO\\\\7h0cHXO]7i0bHXO]7i0bHWO^7j0aHVO_7k0_HWO`7i0`HWO`7j0_HWO`7j0_HXO_7j0_HWO`7j0_HWO`7l0]HTOc7n0ZHTOe7o0XHQOh7R1UHnNk7T1SHmNl7U1RHkNn7V1QHjNo7X1nGjNQ8W1nGjNQ8W1nGiNR8Y1lGhNS8Z1kGfNU8\\\\1iGdNW8^1gGbNY8`1eG`N[8b1cG^N]8d1aG\\\\N_8e1`G[N`8f1_GZNa8g1^GYNb8h1]GcMZOkNY9c3\\\\GaM@hNT9h3[G_MHbNn8Q4XG]M^9d2aF\\\\M_9f2_FYMb9i2\\\\FWMd9k2ZFUMf9l2YFSM9cNc8[4TGQM8iN`8W4WGoL9mN^8U4XGnL:oN\\\\8T4YGmL;PO[8U4YGiL=ROZ8X4VGfLb0oNX8a4RG^Lc0UO[8a4oFYLb0\\\\O]8e5`GaJ^8]4RGgK`0O]8]4^HeKb7]4[HeKc7_4YHbKf7b4UH`Kj7e4PH^Kn7e6N2O1N2O2O0O100O1O010O001N[NWHaJh7]5cH[J]7b5jHZJV7e5oHVJR7j5QIQJQ7S6nHiIS7Y6nHbIT7_6_12NlNZFfJI?g9i4nFYKk8f4\\\\G\\\\K^8c4gG^KV8c4kG^KQ8e4oG\\\\Kn7f4SHZKk7g4VHYKg7j4YHVKd7l4]HTK^7Q5bHoJZ7U5fHkJW7W5kHhJT7Y5lHgJS7Y5nHgJQ7Z5PIeJo6[5RIeJn6[5RIeJn6[5RIeJn6[5RIeJn6[5RIeJn6[5SIdJl6]5TIcJl6]5TIcJl6^5SIcJl6]5TIcJl6]5TIcJl6]5TIcJl6]5TIdJk6\\\\5UIdJk6\\\\5UIdJk6\\\\5VIdJi6\\\\5WIdJi6\\\\5WIeJh6[5XIfJg6[5YIeJf6[5ZIfJe6Z5\\\\IeJd6[5\\\\IfJc6Z5]IfJc6Z5]IfJd6Y5]IfJc6Z5]IgJb6W1WHe1W1TMc6e0kHS2b0XMc6`0SIU2;ZMc6;[IW22_Mb67`IY2N`Mc64bI[2KaMc63dI[2JbMb61fI\\\\2HcMd6MfI`2GaMg6KdIc2F`Mj6HbIg2G]Mm6CaIP3FVMY9j2Y201N10000O1001O1oDlL_8U3^GnLa8S3[GQMd8P3YGSMf8o2UGUMj8l2TGVMk8l2RGVMm8l2PGVMo8l2nFVMQ9j2oFVMQ9i2PGWMP9i2PGVMQ9i2QGVMo8h2SGWMn8h2SGXMm8f2VGXMk8g2WGXMi8g2YGWMh8g2\\\\GWMd8h2^GVMc8i2`GTMa8l2bGoL`8P3iElLS<R3oCnLQ<Q3PDoLP<Q3PDPMo;o2RDRMm;m2SDUMl;j2UDZMg;e2YD`Mc;_2^DbMa;^2^DcMb;]2\\\\DeMd;[2[DfMe;[2WDhMi;X2UDkMj;U2UDlMk;T2TDmMl;S2TDmMl;T2RDnMm;R2SDnMm;R2RDoMn;Q2RDoMn;Q2RDoMo;Q2PDoMP<Q2PDnMQ<R2nCoMR<R2mCnMT<Q2lC`MIM[<c2kC_MNLW<f2iC^M2KV<f2gC_M5IU<i2eC]M9HS<j2cC^M=FQ<k2bC_M?CQ<m2_C`MV=_2jBaMW=^2hBbMZ=l21N101N101A?K5I7I8Dc_a0\"}}, {\"image_id\": 82, \"category_id\": 1, \"bbox\": [6.0, 215.0, 151.0, 115.0], \"score\": 0.9999999403953552, \"association_id\": 2, \"light\": [-2.628650426864624, -2.285537004470825, 2.4813942909240723, 2.105029344558716], \"segmentation\": {\"size\": [333, 500], \"counts\": \"mW2g0c96L100O100O10000O10000O101TN]OlIc0R6DhI=W6EhI;S6JmI6Q6MmI4R6NmI2R60mI0S60lI1S60mI0R62mINS62lIOS62lIOT61lIOT62kINU63jIMV64hIMX63hIMX64gILY64gILY6T2O1000oNSIbNl6`1TI_Nl6a1TI_Nk6b1TI_Nl6a200000mNTIeNl6^20000000UN_IJa64aIL_64aIL_64[I_NM\\\\1h65[I_NM\\\\1h65[I_NL]1i64[I`NK\\\\1j64[I`NK\\\\1j64[I`NJ]1k64ZI1f60YI0g61XIOh62WINi63VIMj6k10000O10000000bNUIZOk6>VIWN4Y1g6=XIYN2Z1f6<aID^6;eID[6;fIEZ6;fIEZ6:gIFY69hIGX68iIHW68iIHW68iIHX67hIIX67hIIX67hIIY66fIKZ65fIKZ66eIJ[66eIJ[66eIJ[67dII\\\\67dII\\\\67dII\\\\67dII\\\\67dII\\\\67dII\\\\67dII\\\\67dII\\\\67dIH]68cIH]68bII^67bII^67bIH_67bII^67bII^66dIG^69bIF_6:aIF_6:aIF^6;bID_6<aIC`6=`IC`6=`IBa6>`IA`6?`I@a6`0`I]Ob6d0^I[Ob6f0YI]Oh6c0SIBm6?QIAP7`0nH_OT7b0iHmNLN\\\\7W1dHnN0J\\\\7_21N1N300O1O1N1O2O100O1O1N2BVHXNk7f1YHXNg7g1\\\\HUNf7j1?00A?L4O1O100O2N2N101N2MbZ_3\"}}, {\"image_id\": 82, \"category_id\": 1, \"bbox\": [235.0, 223.0, 265.0, 107.0], \"score\": 0.9999951124191284, \"association_id\": 1, \"light\": [-2.148973226547241, -2.544511079788208, 2.0201010704040527, 2.34623122215271], \"segmentation\": {\"size\": [333, 500], \"counts\": \"`f\\\\2?8Jm8Q1L3O1N2O0O101N2O0O2O000aN]NTJd1i5`NUJ`1j5cNTJ]1k5fNRJ[1n5fNQJZ1o5gNPJY1P6hNoIX1P6iNPJW1P6iNPJW1P6jNoIV1Q6jNoIV1Q6kNnIU1R6lNmIT1S6nNkIR1U6POiIP1W6PO[IZOOf1f6ROWI]OJJ2g1m6SOSI`1m6aNQI`1o6VOoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7JoH6Q7c10XNoH4Q7LoH4Q7LoH4Q7LoH4Q7LoH4Q7LoH4Q7LoH4Q7MoH2Q7NoH2Q7NoH2Q7NoH2Q7NoH2Q7NoH2Q7NoH2Q7NoH2Q7NoH2Q7e10ZNoH0Q70oH0Q70oH0Q70oH0Q7f10[NoHNQ71PIOP71PIOP7mNQI`0Oc0P7lNSI`0Md0P7lNTI?Md0o6lNVI?Ke0o6lNWI>Jf0o6kNYI>Hg0o6kNYI>If0n6lNYI>If0n6lNYI>Je0m6nNXI?Ic0o6nNYI?Hb0o6oNYI`0Ga0P7oNYIa0F`0Q7oNYIa0G?P7POYIb0F>Q7POYIb0F>Q7POYIc0F<Q7QOZIb0E=Q7QOZIc0E;Q7RO[Ic0D:Q7SO[IP2e6PN[IP2e6PN[IP2e6PN[IP2e6PN[IP2e6oM\\\\IQ2d6oM\\\\Ih0C2Q7VO]Ig0D1o6XO]Ig0D1o6XO]Ih0DOo6YO]Ii0CNP7XO^Ik0AMQ7WO_Il0ALP7XO_Im0@KQ7WO`In0@JP7YO_Im0AJP7YO_In0AHP7ZO`Im0@IP7[O_Il0AIP7[O_Im0AGP7\\\\O`Il0@HP7\\\\O`Il0@HP7\\\\O`Im0_OGQ7\\\\O`Im0_OGQ7\\\\O`Im0_OGQ7\\\\O`Im0_OGQ7\\\\O`Im0_OGQ7\\\\O`Im0_OGQ7\\\\O`Im0@FP7]O_In0AEP7^O^Im0BEP7^O^Im0BEP7^O]In0CDP7^O]In0CDP7^O]In0BEQ7^O[In0DDQ7^OZIo0ECQ7_OXIo0GBQ7@SIR1L^OQ7BoHR10\\\\OQ7X1oHhNQ7X1oHhNQ7X1oHhNQ7X1oHhNQ7X1PIgNP7Y1PIgNP7Y1PIgNP7Y1PIgNP7Y1PIgNP7Y1QIfNo6Z1QIfNo6Z1QIfNo6Z1QIfNo6Z1QIfNo6Z1QIfNo6Z1QIfNo6Z1QIfNo6Z1RIeNn6[1RIeNn6a0RIWO08n6=XIYOJ:n6<ZIYOH;n6<[IXOG<n6<\\\\IWOF=n6;]IXOE=n6;^IWOD>n6;^IWOD>n6:`IWOB?n69bIWO@`0n69dIUO^Ob0n68_IlND9Oc0n67_IoNC7Od0o66^IPOE5Ne0o66]IROE3Nf0P76\\\\ITODN0i0P75\\\\Ia0d6@[I`0e6BXI?h6BWI>i6CoHgN4f1m6GRI9n6GRI9n6GRI9n6FSI:m6]O\\\\Ic0d6[O^Ie0b6VOcIj0]6UOdIk0\\\\6TOeIl0[6SOeIn0[6ROfIm0Z6SOfIm0Z6ROgIn0Y6ROgIn0Y6ROgIn0Y6ROgIn0Y6QOhIo0X6QOiIn0W6ROiIn0W6ROiIn0W6QOjIo0V6QOkIn0U6ROkIn0V6QOjIo0V6QOkIn0U6ROkIn0U6ROlIm0T6SOlIm0T6SOlIm0U6ROlIm0T6SOlIm0T6SOlIm0T6SOlIm0U6SOkIl0U6TOlIk0T6UOlIk0U6TOlIk0T6UOmIj0T6VOkIj0U6VOkIj0U6VOkIj0U6VOlIi0U6UOlIk0T6UOnIi0S6VOPJg0P6YORJe0n5ZOTJe0l5[OTJe0m5ZOSJf0m5ZOTJe0l5[OTJe0l5ZOUJf0k5ZOVJe0j5[OWJd0i5[OYJd0h5[OYJd0g5[OZJe0f5[OZJe0f5[O[Jd0e5\\\\O[Jd0e5\\\\O[Jd0e5\\\\O\\\\Jc0e5\\\\O\\\\Jc0d5\\\\O^Jc0b5]O_Jb0a5^O`Ja0`5_O`Ja0`5_O`J`0a5@_J`0a5_OaJ`0`5_O`Ja0`5_O`Jb0_5^OaJb0_5^ObJa0^5^OdJa0\\\\5_OeJ`0\\\\5_OdJa0\\\\5_OdJa0\\\\5_OdJa0\\\\5_OeJ`0\\\\5^OeJb0[5^OeJb0[5^OeJb0\\\\5\\\\OfJc0Z5\\\\OhJb0b701O00001O0O2N2MXF\"}}, {\"image_id\": 82, \"category_id\": 1, \"bbox\": [392.0, 168.0, 50.0, 47.0], \"score\": 0.9801158905029297, \"association_id\": 3, \"light\": [-2.095547914505005, -1.9473230838775635, 2.0290589332580566, 1.8543992042541504], \"segmentation\": {\"size\": [333, 500], \"counts\": \"keo31\\\\:000000001eN0WH0i71VHOj71VH0i71VHOj71VHOj71VHOj71VHOj71WHNi72WHNi73VHMi74WHLj73VHMj73VHNi72WHNi72WHNi72WHNi73VHMj73VHMj73VHMi74WHLi74WHLi74WHLi74WHLi75VHKj75VHKj75VHKj76THKl75THKl76SHJm76SHJn75RHKn75RHKn75PHHSO2m85THKm74SHLn72THMm72SHNV90000000000000mdb0\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [85.0, 181.0, 143.0, 150.0], \"score\": 1.0, \"association_id\": 3, \"light\": [-1.9551489353179932, -2.146815776824951, 1.8244822025299072, 1.8338992595672607], \"segmentation\": {\"size\": [434, 650], \"counts\": \"XWT1j0d<=F3L5I7J4N3N2M4L4L4L3N6TEgMR:Q3L3M1\\\\FgLm8^3jFgLU9k301N1000000O100O10000O10O10O01O1MoFjKl8k3\\\\GZLb8c3aG]L_8b3aG_L_8`3bG`L^8`3bG`L^8`3bG`L^8`3bG`L^8`3bG`L^8c3_G]La8g3[GYLe8l3VGTLj8o3SGQLm8P4SGoKm8Q4SGnKn8S4PGnKP9R4PGnKP9R4PGnKP9R4PGnKP9Q4PGPLP9o3PGRLQ9W40EnFULS9i3nFXLS9S4001O1N2gNVGhMk8R2ZGnMf8P2\\\\GPNf8n1YGSNi8k1WGTNl8j1TGVNn8h1RGXNn8h1RGXNo8g1QGXNQ9h1nFXNS9g1mFYNT9f1kFZNV9f1jFZNV9f1jFZNV9g1iFYNX9f1hFZNW9g1iFXNX9g1iFYNV9h1jFXNV9g1jFZNU9g1kFZNT9?_FG?JR9<VHDi7;YHEg7:ZHFf7:ZHFf7:ZHGe78\\\\HHd78\\\\HIc77]HIb78^HHb78^HIa78^HIa77_HJ`75aHK_75aHL_73aHO]71cHO]71cH0[71eHO[71eH0Z70gHOY71gHOY71gHOY70hH1W7OiH1X7NhH2X7LjH4V7KkH5U7ImH7S7HnH8R7GPI8P7HPI8P7GQI8Q7GPI8P7GRI8P7_OXI`0n9N103M2N001N3N1O0O2O1O0O2O00001N10001O000O101O1N2N_ab5\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [564.0, 200.0, 72.0, 53.0], \"score\": 0.9999727010726929, \"association_id\": 2, \"light\": [-1.1943455934524536, -3.1854255199432373, 1.0203909873962402, 3.007648229598999], \"segmentation\": {\"size\": [434, 650], \"counts\": \"lW_73]=4M1O2O1O2M1O3N1VCNQ<4mCOP<j001N1O100000000O10001O00O100O1_OWD[Og;e0\\\\DUOENo;m0`DTO`;l0`DTO`;l0`DTOa;k0_DUO`;m0_DSOa;m0_DSOa;m0_DSOa;n0^DROb;n0^DROa;o0_DQOa;o0_DRO`;n0`DRO`;n0`DRO`;o0_DQOa;o0[DPOI0l;Q1XDROKMm;S1TDROOKm;^1SDaNn;^1RDbNn;^1RDbNn;^1SDaNm;`1RD`Nn;`1RDaNm;_1SDaNm;_1SDaNn;a10000001O2KoCeNY<:[DG\\\\O0\\\\<2_DNc<000001O000000000000000OSf5\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [313.0, 194.0, 66.0, 50.0], \"score\": 0.9964579939842224, \"association_id\": 1, \"light\": [-2.2086687088012695, -1.866742491722107, 2.1498403549194336, 1.732201337814331], \"segmentation\": {\"size\": [434, 650], \"counts\": \"^kT4;T=<E4M<D2N2O0O1000000O010000O1F:O1O1O101O0001N110O:F2M2O000000001O010O1O0000O101DoCVOP<e0YDWOh;g0YDYOg;e0[D[Of;c0[D]Of;b0[D]Of;a0[D_Of;`0ZD@h;=YDCi;:XDGk;5VDJk;3WDMg<0000000001O000001O000001O0Omcb3\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [375.0, 192.0, 59.0, 73.0], \"score\": 0.9446463584899902, \"association_id\": 5, \"light\": [-2.3433125019073486, -1.932665467262268, 2.2269644737243652, 1.8305939435958862], \"segmentation\": {\"size\": [434, 650], \"counts\": \"XTo46Y=:H9F7J7I7I4M2M2O2N3M6J3M001O000000001O001O10O1O1O2O0O3L3N2N1O1O1O1O1O1O2N101N1O1GnCPOW<0UDKE`0n<00000000000001O001N2O3M1N101N1NcYk2\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [114.0, 188.0, 108.0, 78.0], \"score\": 0.9994637966156006, \"association_id\": 4, \"light\": [-2.566096782684326, -1.8358180522918701, 2.4890050888061523, 1.7027133703231812], \"segmentation\": {\"size\": [434, 650], \"counts\": \"R``13^10_:6[EOXOIg:9QFNXOIg:9QFO_:0bE0^:0bE4Z:KgE5Y:KgE6X:IhE;lN@Q;5RF>jN_OS;2RFb0jN\\\\OT;3QFa0kN\\\\OT;3PFb0lN[OT;3PFb0lN[OT;3oEc0mNZOT;3oE>lNFT;LPF=nNFR;MPF<oNGQ;NoE:QOIo:MPF:QOIo:NnE;SO_OL3S;3mE<TO]OO2P;6lE<TO\\\\O10P;8jE=UOZO21o:9iE<]O\\\\Oi:9hE<^O\\\\Oj::fE:@\\\\Oj::eE;A[Oj::eE;BYOj:<dE;CXOi:<eE<BXOj::dE?BWOj:=`E>m:AoDf0n:[OkDl0U;SOiDP1V;POkDP1V;nNjDV1T;hNmDY1S;eNmD\\\\1U;aNlD^1T;cNlD\\\\1T;dNmD[1T;fNlDX1S;kNkDU1U;lNgDW1Y;iNgDW1Y;jNfDV1Y;kNfDV1Z;mNbDT1^;`00O0MdDRN\\\\;m14A`0H\\\\DlNZ;S1fDPOW;P1jDPOV;o0kDROT;m0mDSOS;m0mDSOS;m0nDROR;n0nDROR;n0nDROQ;o0oDQOQ;o0oDQOQ;o0oDQOQ;o0PEoNQ;R1mDoNS;Q1mDoNS;Q1mDoNS;Q1mDoNS;R1cDjN04\\\\;T1bDiN23\\\\;T1cDhN14\\\\;U1kDkNU;U1_DgN84Y;U1_DgN84Y;U1kDkNU;U1kDkNU;T1lDlNU;S1kDmNU;T1kDkNU;U1kDjNW;V1hDjNX;W1gDiNY;W1gDiNY;W1gDiNY;X1fDhNZ;Y1eDgN[;Y1eDgN[;Z1dDeN^;[1`DfN`;[1_DeNa;\\\\1^DdNb;l0YD@4Dd;k0YDC1Af;k0\\\\DCMAi;k0[DDKAj;k0[DDKAk;k0ZDCKBl;l0WDBLCn;l0UD@MDn;k0\\\\DZOGKo;g0^D\\\\OCLU<b0ZDAAMX<=[DCk;:iVe5\"}}, {\"image_id\": 84, \"category_id\": 1, \"bbox\": [109.0, 59.0, 196.0, 142.0], \"score\": 0.999998152256012, \"association_id\": 1, \"light\": [-1.6446223258972168, -2.3219258785247803, 1.550551176071167, 2.1846954822540283], \"segmentation\": {\"size\": [600, 521], \"counts\": \"hPP24db01N101O1O1O2N1O001O001O3M;k]OUOga0P1N1O001O00000000001O000000001O00000O2O0000010O00001O00010O0lMkNeBU1X=WO`Bi0X=AVBEjNj0o>FgAU1W>oNdAT1\\\\>mNbAT1\\\\>PO_AS1`>POWA<2_N5NH2i>R3`AQMHKg>g3YAYLf>h3[AWLd>k3[ATLe>m330O100O1O1O1O1O100O1O100O1O10lNcAmM\\\\>R2dAYMO9]>\\\\2fA]MN4\\\\>\\\\2hAaM1MX>^2kAfMOIV>_2mAjMMEV>_2oAnMM^OU>c2oAPN^>o1cAQN]>l1fATNZ>i1iAWNV>i1kAWNU>h1lAXNT>g1nAXNR>g1oAYNQ>e1QB[No=d1RB\\\\Nn=c1SB^Nl=a1UB_Nk=a1UB_Nk=`1VBaNi=_1WBaNi=^1XBbNh=^1XBcNg=]1YBcNg=]1ZBbNg=]1YBdNf=\\\\1[BcNe=]1[BdNd=\\\\1\\\\BcNe=^1[BaNe=_1[BXNnNJg>P2YBYNQOBg>V2XBYNk=h1PB^Nn=c1iAfNV>\\\\1eAgN[>Z1aAiN_>h20000000O1NVAZLf>S3]ARM1Lb>Q3_AmL33^>o2]ASM20`>l2^AVM1Na>l2^AWMONc>l2]AXMNMd>k2]AZMNKe>l2\\\\AdMd>\\\\2\\\\AdMd>Y3O00000000001O0001O00001O1O0M[ASLf>l33O2O0O1O1H8@m@UMY?U2TAhMX?R2]AUNn=i1SBXNm=f1SB\\\\Nm=c1RB^Nn=a1SB_Nn=`1RB`Nn=`1RB`No=`1PB`NP>b1]ARNL;h>]2SAcMm>`2QA_Mo>b2PA]MR?d2n@YMS?i2>1O2N2M2O1O001O1O00O010O2N1O100O1O1O4L2N1N2O1O1N6J3N1N2N3L5K5VO\\\\_OWOi`0`0l0M4L4KY^n3\"}}, {\"image_id\": 84, \"category_id\": 1, \"bbox\": [131.0, 267.0, 331.0, 279.0], \"score\": 0.9999998211860657, \"association_id\": 2, \"light\": [-1.7467753887176514, -2.2876245975494385, 1.6389873027801514, 2.1860272884368896], \"segmentation\": {\"size\": [600, 521], \"counts\": \"mW]23bb05N1O1O001O0O2O001O001O001O10O01O001N101O1O1O2M101O001N101N1O3N1N2O1N2O001N2N2N2O2M101N101N2N101O0O2O001O0O2O001O000O2O0O100O10000O101N100O100O1O100O010O1O100O0O2O0O_Oo_O]NR`0b1T@XNm?h1a0001N101N1O2N1010O01O1O1O1O1O001O100O101N1O101O0O10001N1000001O0O10001O001N10001O00001N1000000000000O10000000000000000000000000O01000000000000000000O1000000000O1000000000000O1O2N1N2LZMWNeDg1l:b0\\\\C\\\\Ob<j0\\\\CUOb<o0]CPOb<S1]CmNa<U1`CiN_<Y1aCgN\\\\<]1cCbN]<_1cC`N]<b1cC]N[<f1dCYN\\\\<i1cCWN[<l1eCSNY<P2gCnMX<U2hCjMW<X2iCgMU<\\\\2kCcMS<_2oC^MQ<d2QDYMm;j2SDUMk;n2UDQMj;R3TDmLk;Y3RDfLm;^3QDaLT1_Nb9S5]EXLm0lNb9P5dEnKh0VOb9P5gEfKd0]Oc9T5fE\\\\Ke0Dc9W6ZFlIe9T6XFPJh9o5WFSJi9k5VFXJj9l5kE[JU:T7O1000000001O00000O2N2N2mNfEbI_:[6Q1L4M3J6K5L3N30001N101O02^DVJP:n5gEXJX:S6SEYJk:j601O1M2N2O1O1O1O10O0100O100N01oN_EkIb:X5m1H8M3N3L4L4J5N3N2N1O2M3N1N30O3M3ZCWKX;l4bD_KV;d4eDbKW;a4cDeK[;m5N10001N1000O100O1O1O1O1M3M3H8L4O1N2N2M3L4M300O1O1O1O11O6J8Hi0WO3N0O0100O100O1O1N1^N^DnKc;f3UElKm:o3\\\\EkKe:S4_EiKd:S4bEfKc:V4R2A?I8K4L4L3L5L3N3N2L6H[YR1\"}}, {\"image_id\": 84, \"category_id\": 1, \"bbox\": [3.0, 302.0, 279.0, 137.0], \"score\": 0.9999703168869019, \"association_id\": 3, \"light\": [-1.4805381298065186, -2.387301206588745, 1.3920605182647705, 2.1963675022125244], \"segmentation\": {\"size\": [600, 521], \"counts\": \"dT22cb06L2O1O1O1O100O1O1O2N1O1O1O1N2N1K6M2O2M2O2N1O1O100O2O1O0O101N101O001O00001O1O1O1O1O001O2N1O001O001O2N1O00000000001O01NF]Nc_Oa1P`0j0kNaMiAg2T>`MeAb2Z>n010O0010mNYATNg>n21J5O11O010000O1LQA_Ln>c33001O001O0001O001O001O001O00001O00001O001O001O0010O00100O001O0010O01O1O1O1O001O00000000001O000000000000O10001O0000O010000O100O10000O100000000O100000000O10O1000O10000O10000O100O1000O0100O1O00100O010O10O10O10000O010O1O10000O10000O100000000O1000000O10000000000000000O1001O00010O001O0010O0100O2O0O10O01O1O100O01O010O1O1O010O0O101O001N2O1O001O1N101O1O1O1O1N2O0O2O2M3N4K2O1N2O1N103L6J:F9G5jNa^Oi0ia0N2N2N2_Oo]O8Zb0N2N2O1MPe[4\"}}, {\"image_id\": 85, \"category_id\": 1, \"bbox\": [22.0, 89.0, 144.0, 126.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-1.7662980556488037, -2.126965045928955, 1.5834202766418457, 1.9358689785003662], \"segmentation\": {\"size\": [358, 359], \"counts\": \"Vj75o:4L4L4M2M5PF_OB7P9?XG0c8]1K5[Od0L4N2O1N101O002N001O0000000000000000000000000ZHdLb7`310O01O1O3M9H1N3M1O2N1O00000kNjGZOV87fHiNUO=U8i0mHWOS7h0nHXOR7h0nHYOQ7g0oHYOQ7g0oHYOQ7f0PIZOP7f0PI[OP7c0QI]OQ7?QIAQ7JbI6b6AcI?c8O000000000000001O0001O00000000000000000001O000001O000001O00000000000000000000O100000001O00000001O01O0000001O0000001O00000010O00001O0000001O000O2O1O0O101O0O101O0O2O0O2O1MkXS2\"}}, {\"image_id\": 86, \"category_id\": 1, \"bbox\": [10.0, 0.0, 198.0, 338.0], \"score\": 0.9999693632125854, \"association_id\": 1, \"light\": [-1.4932551383972168, -1.7461302280426025, 1.3741525411605835, 1.7306509017944336], \"segmentation\": {\"size\": [371, 483], \"counts\": \"bk33o92UF2`0>]OBo8m0VGROMk0k86SG7l8X100000001WK[Mb0e2aKZM_3d0c0R2mK\\\\M\\\\3j0b0k1oK`MZ3h0f0U2WOWN>k1_O\\\\N;f1C_N8b1GeN2\\\\1MfN1[1NhN0X1OkNNU12mNLT13POJP14UOIj07YOGg06I]O6b0O\\\\O0b05[OKd0>oM^L9T3f1b0kM^L?o2f1i0bM[Lh0m2d1]1]NbNa1`1_NaN_1`1bN_N^1a1bN`N\\\\1a1eN_NZ1a1fN`NY1`1hN`NW1`1iNaNV1_1kNaNT1_1mN`NS1E^N`MAMP1n2P1B_OZMAU3o0@P1`0oN_OS1a0lN^OU1c0jN]OW1c0hN]OY1b0gN^O[1`0eNA[1?dNB]1=bND^1<aND`1<_NE`1<_ND\\\\OTMj0X3JEYOZMh0R3NFVO[Mk0o2OHRO\\\\Mn0m20KkNZMU1k20NfNZMY1i21O`N[M_1g22o0OQO1n00QO0o01QONo03PONo04POKP16oNKo06QOJo07QOIn07ROIm08TOGk0WOlKNZ3j0h0ZOoKLY3i0g0\\\\OQLJY3h0f0_OQLIZ3f0e0BRLHX3e0e0DTLGW3e0b0GXLFS3c0a0K\\\\LEQ3>?2`LBo2;ZN[2gN\\\\Mm28ZN_2jNYMk28XMmN7e3FWMj29UMb32ULh2e5YM\\\\Je2d5[M]Jd2c5]M]Jc2b5]M_Jb2a5_M_J`2a5`M`J_2a5`M`J`2_5aMaJ_2^5aMcJ^2]5cMdJ\\\\2[5kM`JS2`5mMaJS2^5nMbJQ2`5mMaJR2a5cMSIO^1\\\\2a5`MUI3\\\\1[2i5dMZJY2g5gMZJX2e5hM]JV2d5jM\\\\JU2c5nM\\\\JQ2d5QN\\\\Jm1d5XN[Jd1e5]N\\\\Ja1d5`N\\\\J_1d5bN\\\\J]1d5cN]J\\\\1c5eN\\\\J\\\\1c5eN\\\\J[1d5gNZJZ1e5lNVJT1i5nNUJS1j5oNTJQ1l5QORJP1l5ROTJm0l5TOTJk0l5VOTJi0l5YORJh0m5YOSJf0l5\\\\OSJd0m5]ORJc0n5]OSJb0d5TN]I[1n0a0c5XN\\\\IY1Q1>a5]N\\\\IV1S1<_54`JK`56`JI_59aJG^59bJG^5:bJE^5;bJE^5<bJC^5?aJ@^5b0aJ_O^5a0cJ^O]5c0cJ\\\\O]5d0dJ[O\\\\5e0dJ[O\\\\5e0eJZO[5f0eJZO[5g0eJXO[5h0eJXO[5h0fJWOZ5j0eJVO[5j0gJTOY5m0gJROY5o0fJQOZ5P1fJoNZ5Q1gJnNY5S1fJmNZ5S1gJlNY5T1hJjNY5V1hJiNX5W1jJgNV5Y1kJfNU5Z1lJeNT5[1mJdNS5\\\\1nJcNR5]1oJaNR5_1oJ`NQ5a1oJ^NQ5b1oJ^NQ5b1PK]NP5c1QK[NP5f1oJZNQ5f1QKWNP5i1UKQNl4P2ZKhMg4Y2ZKcMh4]2ZK_Mh4b2T2001O1O1O1O2N001O00001O1O002N000000O1000000N3M3[O\\\\_S3\"}}, {\"image_id\": 86, \"category_id\": 1, \"bbox\": [187.0, 8.0, 265.0, 328.0], \"score\": 0.9999929070472717, \"association_id\": 2, \"light\": [-1.655167579650879, -2.4752323627471924, 1.477607011795044, 2.3404879570007324], \"segmentation\": {\"size\": [371, 483], \"counts\": \"\\\\hS23_;6K1N2O1O1N2O001O1O1O2N1O1O1QF@i8b0mFc0V8^OfGm0R8UOiGQ1T8d1C6J4L2N2N1O3M3M2N2N1O1O101ZKeKP2]4`MbKZNa0h3n3mM_K\\\\Nc0h3n3kM_K]Nc0j3m3UNRLm1n3QNRLP2n3oMeKkM5X4U4lMeKmM65Fm3\\\\4QNeKPN81Io3Y4WNnKiMJR4V4UNPLiMJT4T4RNRLY2o3dMSL]2l3bMTL_2n3_MQLc2P4VLhK38i0Km0V4nMPL;Nj03g0Q4jMWLd0Dl06d0P5_NjJm08d0m4_NkJm09d0l4]NlJo08f0j4[NnJn0:g0g4ZNPKo0:f0f4ZNQKP1:e0e4WNUKT17e0c4TNZKV13f0c4SN[KW13e0b4SN\\\\KX13d0a4SN\\\\KY14d0`4RN\\\\K[15b0_4PN^K_13b0^4iMdKf1O`0]4fMhKi1Lb0^5_OcJb0Z5_OfJc0b3cMQMj1^Od0U5\\\\OlJd0R5^OmJc0Q3`MmMn1SOc0k2^MTNQ2QOc0g2[MZNT2oNc0\\\\2^MfNQ2nNc0W2]MkNR2nNc0S2\\\\MoNR2oNc0h1bMYOl1oNc0`1hM_Og1QOc0[1hMDf1QOe0T1hMKd1QOf0j0mM4_1ROg0b0oM;[1SOh0<PNa0Y1SOk0KYNP1n0TO^1QOjMj1j0UOZ3j0gLVOY3k0fLUOZ3l0eLTO[3m0eLRO[3o0dLQO\\\\3P1cLoN^3Q1cLnN]3S1bLmN^3S1cLlN]3T1cLkN^3V1bLhN_3Y1aLfN_3Z1bLdN_3]1aLaN`3`1`L^Na3c1_L[Nb3f1^LXNc3i1\\\\LUNf3k1\\\\LRNe3n1]LoMe3Q2]LjMe3V2l21O01O01O01000O10000O010O010O010O100O10O01O0100O0100O10000O01O01O010O00100O1O1O010O0100000000O0100O0100O100O100O01000000010O01O000O10O1001O9G4L3M1O00O0100O1O2O0O1O0010O01O010O01000O010O010O00010O00001O01O000010O01O1O010O010O01O1O010O001O010O10O100O10O01O1O010bNiG^OY8=nG_OS8>RH_Oo7?SH@m7>VHBj7<XHCh7<YHDh79[HFf79[HFf78[HIf75[HJf74\\\\HKe74\\\\HKe73\\\\HNc72^HMc72^HMc72^HMd72\\\\HMg70[HNh7OYH0j7NVH1l7OSHOR8MnG3U8JlG5j9O001N3M2O2LYm:\"}}, {\"image_id\": 87, \"category_id\": 1, \"bbox\": [18.0, 5.0, 294.0, 184.0], \"score\": 0.9999967217445374, \"association_id\": 2, \"light\": [-2.3636131286621094, -1.2050409317016602, 2.144115924835205, 1.0063164234161377], \"segmentation\": {\"size\": [305, 405], \"counts\": \"b]5a0m85L3M5K4L3N3K5UJhNX3[1cLiN[3Y1bLjN]3V1aLlN^3W1]LlNa3W1\\\\LkNc3W1ZLkNe3W1ULnNj3T1RLoNl3T1QLnNm3W1mKlNR4V1jKmNU4U1iKkNW4X1eKiNY4]1aKcNa4_1\\\\K^Ni4c1SK\\\\NP5o24N2O0O1000O1_KUL^3i3bL[L[3d3eL^L[3b3eL^L[3a3eLaLZ3_3fLaLZ3_3fL`L[3a3dL^L]3n3WLRLi3P4ULoKk3T4RLlKo3V4oKjKQ4W4nKhKS4X4mKhKS4X4mKhKS4X4mKgKT4Y4kKhKU4W4lKiKT4W4lKhKT4Y4lKgKT4a3dKmL8BU4[3mKmLOHT4Y3RLkLKLS4W3ULlLHLS4W3XLlLDNR4W3[LkLCNQ4W3]LkLBNQ4V3^LlLANQ4P1PLa0?aN@NQ4n0SLa0=cN_ONP4n0WL=<fN]O0P4k0[L9<lNYO0P4h0`LFk0BeN0o3h0cLCk0EcN0n3g0gLCh0FcNOn3f0kLDd0GcNOl3f0PMCa0HcNOk3e0TMC>HcN1j3c0XMC;IcN1i3c0[MB9JcN1h3d0\\\\MA9IdN2f3d0^MA8IdN2e3e0`M_O7JdN2d3f0aM^O7IeN3c3f0bM]O6JdN4d3e0cM\\\\O5KdN4d3e0eMZO3MdN4d3e0fMYO2MeN5c3e0gMWO3NcN5d3g0ESOhL6c3g0ESOhL6c3f0FTOgL6c3f07ZOIf07YOJg06YOJf07ZOIe08[OHd09\\\\OGc09^OH?:AG=:CI7HC`L6V8LhG5X8KhG4Y8LgG4Y8LgG4X8MhG3X8MhG3X8MgG4Y8LgG3Y8NgG2Y8OeG2[8NdG3\\\\8MdG2]8NcG2]8NbG3^8MbG3]8NcG1^8ObG1^80`G1`8O`G1`80^G0b82YG2g8<0O1000000000O100000000O1000000000000O2O001O0O101O00000O10000O2O00000000O001O1O10O0100000000O1O1000000O1000O10000000O1000000O10000000000O1000000O10000000O10O100O1O10000O10000000001O0000000000001O0O101MdW9EihF0O101O0O100O100000000O10000001N1000_GJh75WHLi73XHNg72XHOh71XHOh70YH0h7NXH3c80000000000000O1000000O100O1000000000000000O3L3NTak0\"}}, {\"image_id\": 87, \"category_id\": 1, \"bbox\": [295.0, 0.0, 44.0, 30.0], \"score\": 0.9999549984931946, \"association_id\": 1, \"light\": [-1.8264102935791016, -1.6992520093917847, 1.6943771839141846, 1.5954134464263916], \"segmentation\": {\"size\": [305, 405], \"counts\": \"alg21\\\\96]O1^G1^8c0O10000000H_G@a8?aG@_8=fGAZ8?gG@Y8?=00O]GBU8;nGER84UHLk72WHNf80000000000000000000O100000000000000000O100000000000Vdc0\"}}, {\"image_id\": 87, \"category_id\": 1, \"bbox\": [210.0, 26.0, 179.0, 172.0], \"score\": 0.9999992847442627, \"association_id\": 3, \"light\": [-1.7391562461853027, -2.089385509490967, 1.540544033050537, 1.9263391494750977], \"segmentation\": {\"size\": [305, 405], \"counts\": \"ocn14\\\\95J4oNHcH;Y7JdH8Y7McH5[7NcH3Z7OeH3Y7OfH3W7NiH2V70hH2W7OgH2Y73aHO;]OY6g1dI\\\\N[6h1_I\\\\N^6]2N2O1N1O2O2M5L3M4M7QKbL[3a3`LdL^3]3_LeLa3`3XLaLi3a3RLbLo3`3iKdLl4f32N001O001O2N2N1O001O1O2N2N1cNjJ]NX5^1QK\\\\NR5^1VK]Nk4a1XK]Ni4b1XK]Ni4a1YK^Ni4`1VKaNl4\\\\1UKdNl4[1TKWNPO7m5_1TK[NPO4P6]1QK_NQO1Q6\\\\1PKcNTOMl5^1QKeNSOMm5\\\\1PKhNROMQ6S1RKPOmNMS6m0TKUOjNNT6i0SKZOhNNX6a0TK@eNOX6>UKCcNOZ6:TKHbNN_61SK0_NO`6LTK5[NOa82_GNa82^GOb81^GOb80_GOa82^GOb81]GOd81[G0e8O\\\\G1d8O[G1f8OZG1e80ZG1f8OYG2g8NYG2g8NYG1g8OYG2g8NYG2g8NYG1h8NXG3g8NYG1h8OXG1h8OXG1h8OWG1j8OVG0k80UG0k80UGOl81SG0m80SG0m80SGOn81RGOm82RGOn891000O1000000O100000000000000000000O10000O100O10O10O100000000O100000000O1000O10O10000O0100000O1000000000O10O10000000000000O1000O10000001AXG2i8M\\\\GOd80^GOb81^GOb80_G0a80`GNa82bGK^84`00000O101O0000cc4\"}}, {\"image_id\": 88, \"category_id\": 1, \"bbox\": [62.0, 194.0, 253.0, 419.0], \"score\": 0.8827324509620667, \"association_id\": 1, \"light\": [-1.6005719900131226, -2.6096689701080322, 1.461965799331665, 2.4065053462982178], \"segmentation\": {\"size\": [924, 520], \"counts\": \"`[h1e0Wl01N2O001O1O2N1O1O1O1O2N1O1O1O2N2N1O001O1O2N2N00001O001O2N2]UOgN_i0Z1^VOkN_i0V1^VOPO^i0c2D5K`0@5K3M1O1O2N1O2N2N3M1O00O1?A4L2N1O2N2N2N1O1nIbJdD^5X;kJcDV5Y;TK`Dl4];\\\\K_Dd4\\\\;cKaD]4];hKaDe0[KP1Q`0_NbD1jKc1c?_NcDHlKl1_?_NeDYOQL^2X?]NfDmNQLn2X?ZNfD_NnK`3[?WNhES2W:SNTD]3j;mLcC]3]<kLYCX3e<lLUCX3j<jLRCX3n<lLlBV3T=nLeBU3[=QM\\\\BR3d=VMPBn2o=ZMgAi2Y>ZMaAi2_>YM]Ai2c>YMXAj2h>YMQAk2o>YMf@n2Y?\\\\5O0000001N10000O100O100O100O010O010O010O01O1O001O1N2O001O1O1O1O1O1O1O1N2O1N2N2M4L3N2M3M4L3O1O2O1O1N2O001N101O001O00001O0O10001O00001O00001N10001O1O2N2M5L4L3M2N3M3L4M4L5K4K5L3M2N2M3N3M9Gg1XN9H2N2M3N2N2O3L3M2N2N100O1O101O0O2O2N1N1001O0O2O1O1O1O0O20O\\\\LjIa@V6\\\\?oIc@P6[?TJe@j5X?ZJi@d5U?`Jj@_5T?dJl@[5R?hJn@X5P?iJPAW5o>iJSAU5n>jJTAU5k>kJ[AP5d>PKcAj4]>UKiAf4V>ZKmAc4T>\\\\KoAb4P>^KRBa4n=ZKWBd4l=UK\\\\Bh4f=QKbBk4mb0J6K5J5K3N2M3M4M2M5K4M3L3N2M3N2M4L5L3L4M2M3N2L7J<C:F8H:YOP_h5\"}}, {\"image_id\": 88, \"category_id\": 1, \"bbox\": [312.0, 430.0, 151.0, 292.0], \"score\": 0.8741651177406311, \"association_id\": 2, \"light\": [-2.1785502433776855, -2.27419376373291, 2.024045944213867, 2.0673534870147705], \"segmentation\": {\"size\": [924, 520], \"counts\": \"fUj8Q1ek0;G6J6K6J6J4L4K4fJgMm_O^2P`0jMd^O7iLS2cd0fMa^O?cLo1ld0bM`^O`3aa0_L^^Ob3ba0^L]^Od3ba0]LZ^Of3fa0[LT^Oj3la0XLn]Ol3Rb0VLj]Ol3Vb0VLf]Ol3[b0SL\\\\\\\\OJ\\\\OU4\\\\d0nKi[Ob0Bb3jd0hKZ[OT6id0jIS[OY6Qe0jIdZOZ6]d0lIi[Of6Ud0_Ii[O`6Vd0dIi[OZ6Vd0jIh[OT6Xd0PJf[On5Yd0_1L3M2N1O2OcKT\\\\OJkc05U\\\\ONjc01V\\\\O0jc0OT\\\\O4lc0^4000O10000000000000000000000001O001O00000000001O1O1O1O001O0000001O0000000000000000000000000000000000000000000000000000000000000000O1O1O1O1N2M3L4@`0H8M3L4SOm0^Ob0K5M3L4F:^Ob0F:L4L4kNU1B>J6M3L4D<L4M3M3N2O1O1O1O10000000001O000O101O2L=ZOZPc1\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [32.0, 17.0, 403.0, 284.0], \"score\": 0.9635078310966492, \"association_id\": 2, \"light\": [-2.565894603729248, -1.6161234378814697, 2.4324986934661865, 1.4201130867004395], \"segmentation\": {\"size\": [725, 525], \"counts\": \"Phf024l0Se0k0C3OO0O20OO0N1K6N20J700001000O1001O01O001O1O1N101O1O1O1O001N101O1O1O00001O1O1O1O0000001O100O010O0000001O1O1O010O000000001O1O1O0000001O00010O00100O01O1O1RNn[OBO^1Yd0bNi[OM:_1ld0J3M8H2O0O3M4M4L0O2N4M0O2M2O2N1O_Lk2c]ORM[b0X3<4L4L3N2O1N2O0O2O1O0O2O001O0O2O00001O001O001O001O1N101O010O00001O00001O0000001O00010O00000010O0001O001O100O4L3M6\\\\^O`Kg`0\\\\5L0O2N1O101N2N101N2N1O2O1N1O1O0000001O0001O00000001O00001O001O0010O01O001O1O001O2N1O1O2N1O1O2N1O1O2N1O3M3M7I6J5K2O0O1O1O001O001O1O1O001O1O2N1O1O2N1O2N2N10O01O001O000000001O000001O00000000001O0000001O00001O0000001O000000001O000000000000001O000000000000000000000000001O0000000000000000000000000000000000000000000000000000001O000000000000000000001O00001O00001O001O1O00001O0000001O0000001O00000000001O000000000O10001O0000000O100000000O2O0O1N3fMiAPL[>g3TBoKP>\\\\3ZCgKo<P4o2N3N1N3N2N1O2L3M4K4L4N3M2N2N3N1N3M3M2N3I7F;C=H7C>J5L4BXeo1\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [118.0, 339.0, 332.0, 286.0], \"score\": 0.9999485611915588, \"association_id\": 1, \"light\": [-2.214358329772949, -1.9751724004745483, 2.0635926723480225, 1.8014012575149536], \"segmentation\": {\"size\": [725, 525], \"counts\": \"Umc27Zf06M4L4L3M2N2O1N1O2O1N3M3M3M2N101N2O2M3N2M2O1N2N2N2N3M3M3M2M2O2N2N3L4M3N2M2O1N2O3M4L9G9G5K1O1O2N2N3M2N2N1O100O1O2N4M2M3M2O^L]^OQ2ca0eMh^OZ2Za0`Mj^O`2Wa0\\\\Mm^Oe2Sa0VMR_OP3i`0kLZ_OW3e`0dL__O[3b`0bLb_O\\\\3^`0aLe_O_3ja0O9G10OXMc\\\\OX2]c0dMi\\\\OZ2Vc0bMP]O\\\\2Qc0`MX]OY2lc0G4L3M3N2NRM[Nj@d1P?bNQA]1k>gNVAX1i>iN[AS1c>oNaAm0^>UObAk0\\\\>WOcAj0Z>ZOeAe0Z>]OeAc0Z>_OeAa0Z>_OgAa0Y>^OgAc0X>^OgAc0Y>\\\\OgAf0X>YOhAi0W>VOiAk0V>VOiAk0W>TOiAm0W>SOhAm0Y>ROgAo0Y>QOfAP1Z>oNfAR1Z>nNeAS1Z>mNgAT1X>kNhAV1Y>iNeAY1[>fNeAZ1\\\\>fNbA[1_>dN`A^1`>aN_A`1a>aN^A_1c>`N\\\\Aa1d>_N\\\\Ab1c>^N\\\\Ac1d>]N[Ad1f>[NYAf1g>ZNXAg1h>ZNUAh1k>XNSAj1m>VNQAl1P?SNo@n1Q?SNm@o1S?QNl@o1T?RNj@o1W?RNf@P2Z?UN`@l1_?XN\\\\@j1d?WNY@k1f?VNY@k1g?VNV@k1j?WNS@k1l?WNP@k1Q`0WNk_Ok1T`0WNh_Ok1X`0XN__On1b`0VNU_Oo1j`0n100000000001O00001O001O001O001O0O2M2VM`^O9fa0E[^O>ca0A]^Of0^a0XOb^Om0Za0SOf^On0Ya0QOg^OQ1Ya0nNf^OU1Xa0kNh^OW1Wa0gNi^O`1\\\\`0bLU@n1_Of1U`0`LY@j1Ai1S`0`L[@e1Cl1P`0bL[@b1Dn1P`0aL\\\\@_5b?bJ]@_5b?bJ]@_5b?bJ^@]5b?dJ]@]5b?dJ]@\\\\5c?eJ]@[5b?fJ]@Z5c?fJ^@Y5b?hJ^@W5b?iJ`@U5`?lJ`@m0Ga2i?bLa@j0Hd2g?cLa@g0If2f?dLa@d0Ii2f?cLc@`0Im2e?cLc@=HQ3e?cLe@7GW3d?cLf@1I\\\\3a?eLh@IHc3`?eLn@]OEo3]?eLdA]3\\\\>eL`A]3`>fLYA^3h>W20001O000001O0^IV@Y6T`0O1O1O0001O001O100O1BdI`@]6_?gI\\\\@[6d?<O0O101O000O1000000000001O0000000O10001O1O001O1O1O1O0000001O0000000000000000001O0000000000000000aIj@_5V?`Jm@^5S?aJPA]5P?bJQA^5o>aJSA^5m>aJTA_5l>`JVA_5k>^JXAa5h>\\\\J[Ad5e>ZJ^Ae5b>XJaAh5_>VJdAi5]>TJeAl5\\\\>QJfAo5[>nIgAR6^?01O0000000O1000000000001O1N2N3M3M2N1O2N2O0O2O2N1N2O1N2M3N2M3N3M2N2N2N3L6XLi^Oi1]a0mMk^Oo1[a0iMj^OT2nb0N1N3M2M4L4L3N3M2M4L4L6I6J5L3N3M4Kkad1\"}}, {\"image_id\": 90, \"category_id\": 1, \"bbox\": [130.0, 232.0, 377.0, 221.0], \"score\": 1.0, \"association_id\": 2, \"light\": [-1.7752056121826172, -2.4285738468170166, 1.603422999382019, 2.20890736579895], \"segmentation\": {\"size\": [613, 820], \"counts\": \"US^2k0Wb09H4M3M3L=P_OeN^?l2D6K2N2N1O3L3M7J4L2N2N2M3M4K5L3N2N2O1N2N3L3N2N2N101O2N2M2O1O000O2O0O1O2N1O2O1N2O1O0O2O000000000000001O000000001O00001O0000001O001O1O001O00001O00000000000ZChI_<X6`CiI`<W6`CjI_<V6aCjI_<^601O000000001O000000001O00001O00001O0001O0000001O1O1O1O1O010O00000O101O00001N2]O[CdJg<Z5YCfJh<X5XCiJi<V5WCiJj<W5VCiJk<U5VCkJk<T5UCkJm<T5RCmJP=Q5PCPKP=n4QCRKP=m4PCSKQ=l4nBUKS=i4nBWKS=h4mBXKT=f4lB[KV=b4kB^KV=a4iB`KX=^4iBbKX=]4gBdKZ=Z4gBfKZ=Y4fBgK]=U4cBmK]=R4cBnK^=Q4aBPL`=n3aBRL`=m3_BTLc=i3^BXLd=e3ZB]Lj=]41O1O1O010O2hNQBVMP>h2PBYMR>c2PB]MP>b2PB_MQ>_2PBaMQ>]2nAeMS>Z2lAgMV>V2iAlMY>_31O1O1[NeA^N\\\\>^1fAcN\\\\>Z1eAgN]>T1dAmN^>P1cAPO^>n0bASO`>j0aAVOa>d0bA]Oa>:dAGl`01L4B>K5L4BdMo_Oa2i?a0O1O1N2O1N2M3N1O2I7O100O100O10000O100000O010000O10000O1000000000O1000O0100O100O1000000O100000000O1000000O100O10000O10000O100000000O1000000O10000O10000O100O1O100O1O100O2O0O1000O010000O100O100O100O100O100000000O100000000O10000O100O1000000O1000000O10000000000O1000000O100O100O10000O100000000O10000O100O1O10000O10000O2O00000O2O0O2O2M2O1N2O001N10001O0O2O1O0O5K`0@4M2M3N1N2O1N101O1N2N3M6J<DRPk5\"}}, {\"image_id\": 90, \"category_id\": 1, \"bbox\": [483.0, 192.0, 331.0, 224.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-1.6415183544158936, -1.7139639854431152, 1.5554664134979248, 1.559539794921875], \"segmentation\": {\"size\": [613, 820], \"counts\": \"Y]Q9m0Tb08J4M3M2M4L5Db0E5L2N2O1N2M4L7I<E4L3M3M3M4M4K6J2O2N0O2N2N2M4M3M2N2O0O2N2K5K5M2O2O0O2N1O1K5L4N200O1O1O1O1N2O1O10000O10000000000O1O1O1O1O1O100O100000000aChIQ<X6nCiIR<W6mCjIS<V6mCkIR<U6nCkIR<U6nCkIR<U6mCmIR<S6nCmIR<S6nCmIR<S6nCmIR<S6nCnIQ<R6nCoIR<Q6nCoIR<Q6nCoIR<Q6nCPJQ<P6oCPJQ<P6oCPJQ<P6oCPJQ<P6oCQJP<o5PDQJP<o5oCRJQ<n5oCRJQ<n5oCRJQ<n5oCRJQ<n5oCRJQ<n5nCSJR<m5nCSJR<m5mCTJS<l5mCTJS<l5lCUJT<k5lCUJT<k5lCUJT<k5lCUJT<k5lCUJT<^6000O10000O10000O1000001ZOhC]JX<b5hC_JX<`5iC`JW<`5hCaJY<]5gCdJZ<Y5gChJZ<V5fCkJ[<R5fCoJ[<P5eCoJ\\\\<P5dCQK\\\\<n4eCRK\\\\<l4dCUK\\\\<j4eCVK[<h4fCYKZ<f4dC\\\\K]<c4aC`K`<a5101N2O1iNZCmKg<Q4ZCoKg<o3YCQLi<m3XCSLi<l3VCULj<j3VCWLj<R5001O00001O002N1^NPCmLQ=c4001N2O001O1O2N2TNgBhM[=U2eBlM]=Q2dBoM]=o1cBRN^=j31O3M8_MVBWOS><PBET>5nAKU>1kA0W>MjA3Y>HhA9\\\\>BeA>`>YOaAj0i>bN[Ad1g>VNn@Y2T?bMh@e2Z?VMg@l2_?jLd@W3[?kLc@V3]?kLa@V3^?900O1O100O100O10000000000000000000000000000000000000000000O1000000O100O1000O0100000000O10000000000000000000000O10000O1O010O1000000O100000O0100O100O10000O1000O010000O100O10000O2N100O2N1O2N1O10001N101N101N2J6M200O2O0O101N101N1N3N2M200O2O0O10001N100O2O0O2N2O0O101O000O2O001N101N2O0O2O0O2O1N2N7EYY3\"}}, {\"image_id\": 91, \"category_id\": 1, \"bbox\": [512.0, 251.0, 60.0, 52.0], \"score\": 0.9976826310157776, \"association_id\": 2, \"light\": [-2.3705224990844727, -1.9791377782821655, 2.272822856903076, 1.8562800884246826], \"segmentation\": {\"size\": [453, 640], \"counts\": \"`hR72o=8H:H6L1kBZOh<h0VCZOh<g0XCYOh<h0WCXOi<h0WCYOh<g0XCYOh<g0XCYOh<h0WCXOh<i0XCWOh<i0XCWOh<i0XCWO`<LbCm0NWOi<h0WCXOj<g0VCYOk<f0UCZOl<d0UC\\\\Ok<d0UC\\\\Ok<c0VC]Oj<b0WC^Oj<a0VC_Oj<`0WC@i<`0WC@i<a0VC@i<`0WC@i<a0VC_Oj<c0TC]Ol<d0TC[Ol<o00HSCXOm<P11O0000000000001HQCXOo<h0QCXOo<g0RCYOn<d0VC[Oj<c0XC]Oh<`0[C@e<?\\\\CAd<>^CAb<?^CAb<?^C@c<`0]C@d<?]C@c<`0a0O1M3O1N2O2NYjm0\"}}, {\"image_id\": 91, \"category_id\": 1, \"bbox\": [76.0, 106.0, 449.0, 331.0], \"score\": 0.999988853931427, \"association_id\": 1, \"light\": [-1.514719843864441, -1.8166227340698242, 1.3914930820465088, 1.6492068767547607], \"segmentation\": {\"size\": [453, 640], \"counts\": \"[jQ1[2d;?E4M2N2N3N1N1O2O1O1O0O2O1O1O001N2O001O1N101O0O2O1gEaLj9_3VFaLj9`3TFbLk9^3UFbLk9_3TFaLl9m3O000000001O01O01O1O3M2N1O1O1O1O1O001O001O001O001O001O1O1O1O00001O000O101N1O10001N101N101O0O10001O0O1000001N100O101N100O5K3L2O1O2N1O1O1O2N1O100O1O1N3M2N2MB[HcJl6h6O2M2N2O000O2O0O01000000N12O1O1O1O1O1O1O1O1O1O1O1O001N101O00000SJSHb5m7\\\\JWHb5i7]JZHa5f7^J\\\\Ha5d7_J\\\\Ha5d7_J]H`5c7_J^Ha5b7_J^Ha5b7_J_H`5a7`J_H`5a7_J`Ha5`7_JaH`5_7`JaH`5_7_JbH`5_7`JbH_5^7`JeH^5R8O2N2N1N2O001N100O101N1O10001N101N2O001O0O2O001O000O101O001O001N2O1OcN`KXI]4h6cKZI[4f6eK[IY4f6hKYIX4g6hKZIW4g6hKYIX4g6iKYIU4h6kKXIU4h6kKXIT4i6lKWIT4i6lKXIS4h6nKWIQ4j6oKVIP4k6PLVIo3j6RLUIm3l6SLTIm3l6SLUIk3m6TLSIl3m6ULRIk3n6ULRIj3o6VLQIj3o6VLQIj3o6VLQIj3o6VLRIh3o6XLQIh3o6XLQIh3o6XLQIh3P7WLPIi3P7WLPIi3P7WLPIi3P7WLPIi3P7WLPIi3P7WLPIi3P7WLPIh3Q7XLoHh3Q7XLoHh3Q7XLoHh3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLnHi3R7WLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLmHj3S7VLlHk3S7VLmHi3T7WLlHi3T7WLkHj3U7ULlHk3T7ULkHl3U7TLkHl3T7ULkHl3U7TLkHl3U7TLkHl3U7TLjHm3U7TLkHl3U7TLkHl3U7SLkHn3U7RLkHn3T7SLlHm3T7RLlHo3T7QLlHo3S7QLmHP4S7PLmHP4R7PLnHQ4R7oKmHR4R7nKoHR4Q7mKoHT4Q7lKoHT4P7mKPIS4P7lKPIU4P7kKPIU4o6lKQIT4o6lKQIT4o6kKQIV4o6jKQIV4n6kKRIU4n6kKRIU4n6jKSIV4m6jKSIV4m6jKSIV4m6iKSIX4l6iKTIX4k6gKVIY4j6gKVIY4[80^NgKWIZ4i6fKWIZ4Z80^NfKYIZ4g6fKXI[4h6eKXI[4g6fKYIZ4g6eKZI[4f6eKZI[4e6fK[IZ4e6eK[I\\\\4d6eK\\\\I[4c6eK^I[4b6eK^I[4a6fK_IZ4a6eK`I[4`6eK`I[4_6fKaIZ4_6fKaIZ4^6gKbIY4^6gKaIZ4_6eKbI[4^6eKbI\\\\4\\\\6eKdI[4\\\\6eKdI[4\\\\6dKeI\\\\4[6dKeI\\\\4Z6eKfI[4Z6dKgI\\\\4Y6dKgI\\\\4Y6dKgI\\\\4Y6cKgI^4Y6bKgI^4Y6bKgI_4X6aKhI_4X6aKhI_4X6`KiIa4U6`KjIa4V6_KjIa4V6_KjIb4U6^KkIb4U6^KjId4U6\\\\KkIe4T6[KlIe4T6[KlIf4S6ZKmIf4S6ZKlIh4S6XKmIh4S6XKmIh4S6XKmIi4R6WKnIi4R6WKnIi4R6WKnIi4R6WKmIj4S6VKmIj4R6WKnIi4R6WKnIj4Q6VKoIj4Q6VKoIj4Q6VKoIj4Q6VKoIj4Q6VKoIj4P6WKPJi4P6WKPJi4P6WKPJi4P6WKPJj4o5VKQJj4o5VKQJj4n5WKRJi4o5VKQJj4o5WKPJi4P6WKPJi4P6WKPJi4P6WKPJi4P6WKPJi4Q6VKoIj4Q6WKnIj4Q6VKoIj4Q6VKoIj4Q6VKoIj4Q6WKnIi4R6WKnIi4S6VKmIj4S6WKmIh4S6XKmIh4S6XKmIh4S6XKmIi4R6XKlIi4T6WKlIj4S6VKmIj4S6WKlIj4S6VKlIk4T6VKkIk4T6VKjIl4U6XKgIh4Y6YKfIh4Y6YKeIi4[6WKdIj4[6WKcIj4]6WKbIj4]6VKcIj4]6WKaIk4^6UKbIk4^6VKaIj4_6VK`Il4_6UK`Ik4`6UK_Im4`6TK^Im4b6TK]Im4b6UKZIn4e6TKWIn4h6Y1O3M4gIVIo4n6]JgI^5e7K3M2N2VKWGT4k8gK\\\\G1Jc3l8WLPHg3Y9N2M4M8H4K2O2M2N3N1M4L3M4dNaDKd;IREIQ;3YE\\\\OR;`0\\\\1M4K4J^cb1\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [4.0, 399.0, 311.0, 256.0], \"score\": 0.9999988079071045, \"association_id\": 5, \"light\": [-1.3576252460479736, -2.5921144485473633, 1.288487434387207, 2.4517982006073], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Z_3^3_d08J3L4M2N3M2M4L4L4L3N2O1N2N2O001N1O2N2N1O2M3N2N1O1O2O0O1O1O1M3M2N3N2N2O1O10O01O1N1O2M3M201O1O010O10O0100O001N1O1010O00001O0O]^OhIaa0c6He0[O2N2O0O2O0O10000O101N1O1O1O1O100O1000000O100000000000001O0000000000001O000O10001O00^No_OUKQ`0k4o_OUKQ`0k4o_OUKQ`0k4o_OUKQ`0k4o_OUKR`0j4n_OVKR`0\\\\600000001O\\\\No_OYKQ`0g4o_OYKQ`0g4o_OYKQ`0f4P@ZKP`0f4P@ZKP`0f4o_O[KQ`0e4o_O[KQ`0e4o_O[KQ`0d4P@\\\\KP`0d4P@\\\\KP`0d4P@\\\\KP`0d4P@]Ko?c4P@^KP`0b4P@^KP`0b4P@^KP`0b4P@^KP`0a4Q@_Ko?a4Q@_Ko?a4Q@_Ko?a4Q@_Ko?a4Q@_Ko?a4P@`KP`0`4P@`KP`0`4P@`KP`0`4P@`KP`0X6000000000000000000000000000000000000000000000000000000000XNP@`KP`0X600000YNP@^KP`0b4P@^KQ`0X600000ZNo_O]KQ`0Y60000000000000000000000000000000000000000000000000000000000001O000000000O101O0000001N10001lNi_O]JW`0c5j_O\\\\JV`0e5j_OZJW`0f5i_OYJW`0h5i_OWJW`0j5h_OUJZ`0j5g_OUJY`0l5g_OSJY`0n5j_OmIW`0S6l_OjIU`0V6k_OiIU`0W6k_OiIU`0X6k_OfIV`0Z6j_OfIV`0Z6k_OeIU`0\\\\6k_OcIU`0]6l_OaIU`0`6k_O^IW`0a6j_O]IW`0d6i_O[IW`0f6h_OYIY`0h6f_OXIZ`0h6g_OVIZ`0k6e_OUI[`0j6f_OUI\\\\`0j6=01O1N2O1N2O1N101N101N1O2M3N2L5L3M3N2O12N1ON2O2N3M4L4K4M2N1O1O1ZOW]ORLkb0m3X]OmKlb0Q4c0N3N5J5L2M2O1N2O1O0O2O2M4M3L4M2M3M3M2N2N2N3M3M3LY1YNgWc`0\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [526.0, 389.0, 56.0, 58.0], \"score\": 0.9999697804450989, \"association_id\": 3, \"light\": [-1.766079306602478, -2.7871062755584717, 1.6535357236862183, 2.6414835453033447], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"imZ<2Tg0=VYODgf0e0TYOZOkf0j0RYOVOlf0m0SYOTOkf0n0UYOQOgf0W1I6M3ONdYObN[f0_1eYOaN[f0_1eYOaNZf0`1fYO`NZf0`1fYO`NYf0a1gYO_NYf0e1OKhYObNWf0^1jYObNVf0^1jYObNVf0^1jYObNUf0_1kYOaNUf0_1kYOaNUf0_1kYOaNUf0_1kYOaNUf0_1kYObNTf0^1lYObNTf0^1lYObNTf0]1mYOcNSf0]1mYOcNSf0]1mYOcNSf0]1mYOcNSf0]1mYOcNSf0]1mYOcNSf0\\\\1nYOdNRf0\\\\1nYOdNRf0\\\\1nYOcNSf0]1mYOcNSf0]1mYOcNSf0]1mYOcNSf0\\\\1nYOdNRf0\\\\1nYOdNRf0\\\\1nYOdNRf0[1oYOeNQf0[1oYOeNQf0Z1PZOfNPf0X1RZOhNne0X1RZOhNne0W1TZOgNme0X1?N1N3L4E;L5J6FkS[:\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [587.0, 375.0, 155.0, 223.0], \"score\": 0.9999993443489075, \"association_id\": 2, \"light\": [-1.6612759828567505, -2.9664218425750732, 1.587932825088501, 2.776059150695801], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Ydh=V2fe08D:L201O00001O0O0S]OeMW`0[2__OQN_`0o1[_OYNc`0g1Z_O^Nd`0b1[_O`Nd`0`1Z_OcNd`0^1[_OdNd`0\\\\1Z_OgNe`0Y1X_OjNh`0V1W_OlNh`0T1V_OnNj`0S1S_OoNm`0S1n^OPORa0R1j^OPOVa0V1b^OlN^a0k3000000000O1010O000001O00O010b^OVJf`0j5Z_OWJe`0h5[_OYJe`0g5R_OUJL5Qa0e5T_OVJK5Qa0e5T_OWJJ4Sa0c5T_OYJI5Ra0b5U_OYJH6\\\\a0W5m^OdJF5]a0W5m^OeJE4Ya0\\\\5Q_ObJD3Wa0`5T_OhJl`0X5T_OiJk`0W5U_OiJk`0W5U_OiJk`0W5V_OhJj`0X5V_OhJi`0Z5V_OfJj`0Z5V_OfJj`0Z5V_OfJj`0[5U_OeJk`0\\\\5U_OdJi`0_5V_O`Jj`0a5U_O_Jk`0c5T_O\\\\Jl`0f5R_OZJn`0g5R_OXJm`0j5R_OVJn`0l5Q_OSJo`0o5o^OQJQa0R6m^OmISa0`601O000000000000O100O1001O001O001O0000000000000O10000000O1O1O1O1O1N2O1M3Mn^OaIi`0]6Y_OcIg`0Z6[_OgIe`0W6]_OiIc`0U6R_OjI61h`0S6S_OnIZa0P6f^ORJZa0j5j^OVJVa0f5n^OZJSa0f5l^OZJTa0f5l^OZJTa0g5k^OYJUa0g5k^OYJUa0g5k^OYJUa0h5j^OXJVa0j5i^OUJWa0n5g^OPJZa0Q6f^OnIZa0S691O001O00001O00O2N100O2N1O2M3eNR^O`LQb0W3^^O`Lea0\\\\3b^O^Laa0_3d^OXLba0e3_1L4L5oNi[OSNZd0h1n[ORNUd0j1V\\\\OlMmc0o1T1M4M3N2N3L7EfSc6\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [735.0, 390.0, 289.0, 198.0], \"score\": 0.9999980330467224, \"association_id\": 1, \"light\": [-2.2113921642303467, -2.4269659519195557, 2.1111209392547607, 2.3462908267974854], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ZiWa01ng01O2O1N1000001O00001O00000kM2X\\\\OOcc0<V\\\\ODgc0a0W\\\\O_Ofc0g0W\\\\OYOec0o0W\\\\OROcc0X1X\\\\OhNbc0`1\\\\\\\\O`Nac0e1]\\\\O[Nac0h1^\\\\OXN`c0l1^\\\\OTNac0n1^\\\\ORNac0P2^\\\\OPNac0R2^\\\\OnMac0T2^\\\\OmM`c0U2_\\\\OkM`c0X2^\\\\OhMac0Z2^\\\\OfMac0\\\\2^\\\\OdMac0^2^\\\\ObMbc0_2]\\\\OaMbc0`2^\\\\OaMac0`2^\\\\O`Mac0a2_\\\\O_Mac0a2_\\\\O_Mac0b2_\\\\O]M`c0d2`\\\\O\\\\M`c0e2_\\\\O\\\\M_c0e2a\\\\O[M_c0f2`\\\\OZM_c0g2a\\\\OYM_c0h2`\\\\OXM_c0j2`\\\\OWM_c0i2a\\\\OWM^c0k2a\\\\OUM_c0k2a\\\\OUM^c0m2b\\\\ORM^c0o2a\\\\OQM_c0P3`\\\\OQM^c0P3b\\\\OPM^c0Q3a\\\\OoL_c0R3`\\\\OnL`c0R3`\\\\OnL`c0S3_\\\\OmLac0S3_\\\\OmLac0T3^\\\\OlLbc0U3]\\\\OkLbc0V3^\\\\OjLbc0W3]\\\\OjLbc0W3]\\\\OiLcc0W3\\\\\\\\OjLdc0W3[\\\\OiLec0X3Z\\\\OhLfc0X3[\\\\OgLec0Y3[\\\\OgLec0Z3Z\\\\OfLfc0Z3[\\\\OeLec0\\\\3Z\\\\OdLfc0\\\\3Z\\\\OdLfc0\\\\3Z\\\\OdLfc0]3Z\\\\ObLfc0^3Z\\\\OcLec0]3\\\\\\\\ObLdc0_3[\\\\OaLec0_3[\\\\OaLec0`3Z\\\\O`Lfc0m30000000001O000000000000000001O000000O10001O0000000001O000000000000000000000000000L[\\\\OmKec0S4\\\\\\\\OkKec0U4[\\\\OkKec0U431O00000010O0001O00001O00001O0000001OQO\\\\\\\\ObMdc0]2_\\\\OaMac0]2c\\\\OaM]c0_2c\\\\ObM[c0^2f\\\\ObMZc0]2g\\\\OcMYc0[2i\\\\OfMVc0Y2k\\\\OgMUc0W2m\\\\OjMRc0T2P]OlMPc0R2R]OoMmb0P2T]OPNlb0o1U]ORNjb0n1V]ORNib0n1W]OTNhb0l1X]OTNgb0m1Y]OTNeb0l1[]OUNdb0l1\\\\]OUNbb0k1_]OUN`b0l1_]OVN`b0i1a]OWN^b0i1c]OXN\\\\b0h1b]OZN]b0h1`]O[N_b0e1^]O_N`b0c1\\\\]OaNbb0a1Z]OcNdb0b1V]O`Nib0\\\\3O2N100O100O1O1O1O1O101N100O10000000000O1000000O100O1O1O1O100O100O100O100O100O100O1000000O1000000O1000000O10000O100O1O100O10000O1000000000000000000000000000000000000000000000000000000000000000000001O00000O1000001O00001O1O001O0O2O2TOR^OaKQb0\\\\2o]OcNc0dNga0S2i^OUNQ6\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [378.0, 385.0, 137.0, 218.0], \"score\": 0.9999852776527405, \"association_id\": 4, \"light\": [-1.9051074981689453, -2.623290538787842, 1.770370364189148, 2.415045976638794], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"YPl8>\\\\g0;H5Ji0WO5M4M2N0O1iLXNU@k1f?_NT@a1l?eNn_O\\\\1kMjMla0R1Q@T1SNlMka0W1g_OP1_NjMha0d1a^O\\\\1HPMga0Y5[^OgJea0X5\\\\^OhJda0W5]^OiJba0W5_^OjJ`a0U5a^OkJ_a0U5a^OkJ_a0T5b^OlJ^a0U5a^OlJ^a0U5a^OkJ_a0V5`^OjJMKVa0\\\\5l^OiJK0Wa0X5m^OhJL1Va0X5m^OgJL2Wa0X5l^OgJL1Xa0Y5k^OfJM1Xa0Y5k^OfJM1Xa0Z5j^OeJM2Ya0Y5j^OeJM1Za0Z5i^OfJK1\\\\a0Y5i^OfJK1\\\\a0Y5i^OfJKO^a0[5g^OfJKN_a0\\\\5f^OfJKN_a0\\\\5f^OfJKN_a0\\\\5f^OgJJM`a0\\\\5f^OgJIMba0\\\\5e^OgJIMba0\\\\5e^OgJHNca0Z5f^OhJGNca0Z5f^OPKZa0P5f^OPKZa0P5f^OPKZa0P5f^OQKYa0o4g^OQKYa0o4g^ORKXa0n4h^ORKXa0n4h^ORKXa0n4h^OSKWa0n4h^ORKXa0o4g^OQKYa0P5f^OPKZa0Q5e^OoJ[a0R5d^OnJ\\\\a0S5c^OnJ\\\\a0T5b^OlJ^a0U5a^OkJ_a0l5O00001O0000001O0000001O0000O1Jh^OdIXa0^642N2N3M2N1O00001O0000000000000000O1O1O1L5L3L4H8F:L4N2O1O10000O11O001O1O001O001O010O1O001O10O001O1O2VKS^O\\\\3Pb0`LU^O]3ma0]LY^Oa3la0SL^^Oi3Vc0F4K5K5J7I:gM[[Oe0Qnm;\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [566.0, 375.0, 100.0, 119.0], \"score\": 0.8362147808074951, \"association_id\": 6, \"light\": [-1.597334623336792, -1.8804066181182861, 1.477692723274231, 1.7211627960205078], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"inX=2mg02O2N1N2O100O1O1N101O001N1dNATZO0j0`0Pd0j1Q[OUNld0P2S[OoMkd0e2M2O1O2OAZ[OdMed0]2\\\\[ObMcd0`2][O`M`d0c2`[O\\\\M_d0e2b[O[M\\\\d0f2d[O[MZd0`1j[OQOM_OXd0`1m[OPOJAYd0W1g[O_N7i0IBXd0n0i[OeN213k0IBYd0<g[OZOO1>JJl0IDYd09T\\\\OWO<k0WOFYd06R]O4eNGXd03o\\\\O\\\\OkNi0NIXd02U]O5cNIXd02V]O3cNKWd02W]O1cNMVd01Y]O0bNOUd01_]OH_N6Rd01a]ODaN:oc02a]OAcN<lc03b]O^OfN:_OEYd0c0]^O@ia0>m200O10001O0000000000001O000001O00O10000O10001O000001N101O000000O1O11N2O1OO1N2N2001O1O0O3M2O1`ZO\\\\Obc0e0[\\\\OC_c0>\\\\\\\\OHbc09[\\\\OIec0:X\\\\OFhc0<V\\\\OCjc0?T\\\\OBlc0?nZOSOn0?Sd0?lZOVOn0<Wd0;oZOVOl0;`h\\\\8\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [1263.0, 917.0, 159.0, 132.0], \"score\": 0.9999813437461853, \"association_id\": 1, \"light\": [-2.006920099258423, -2.2403976917266846, 1.9138602018356323, 2.0598037242889404], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"e_Wk11f_10_`N5]_1:``NDP_1l0C=C<K5L4K4N3K5M3N1N2O2N1N2O1lbNXMn\\\\1j2QcNWMn\\\\1j2RcNWMl\\\\1i2TcNXMk\\\\1i2UcNXMi\\\\1P3N2OIZcNWMd\\\\1j2\\\\cNVMb\\\\1k2^cNVM`\\\\1l2`cNTM^\\\\1m2bcNTM]\\\\1m2ccNSM\\\\\\\\1n2dcNRM\\\\\\\\1Y3OEecNTMZ\\\\1l2fcNTMY\\\\1m2gcNSMY\\\\1l2hcNTMW\\\\1m2icNSMV\\\\1n2jcNRMV\\\\1n2icNSMW\\\\1m2icNTMU\\\\1m2kcNSMU\\\\1m2kcNSMU\\\\1m2kcNSMU\\\\1m2kcNSMT\\\\1n2lcNRMT\\\\1m2ncNRMR\\\\1n2mcNSMR\\\\1n2ncNRMR\\\\1n2ncNRMR\\\\1n2ncNRMQ\\\\1o2ocNQMQ\\\\1n2PdNSMn[1n2RdNRMn[1n2RdNRMn[1n2RdNRMn[1n2RdNRMn[1m2TdNRMk[1o2TdNRMl[1n2TdNRMl[1n2TdNRMl[1n2SdNSMm[1m2SdNSMm[1m2SdNSMm[1m2RdNTMn[1l2RdNTMn[1l2RdNTMn[1l2QdNUMo[1k2QdNVMn[1j2RdNVMn[1^3000000000001O0000000000000000000000[OSdNWMm[1i2SdNWMm[1i2SdNWMm[1^301O00000000000000\\\\OTdNTMl[1`300000]OTdNRMl[1n2TdNRMl[1n2TdNRMl[1n2UdNQMk[1P3TdNPMl[1P3TdNQMk[1o2VdNPMj[1P3VdNPMj[1P3VdNPMj[1P3VdNPMj[1P3VdNPMj[1P3WdNoLi[1Q3WdNoLi[1Q3WdNoLj[1P3VdNPMj[1P3VdNPMj[1Q3UdNoLk[1Q3UdNoLl[1P3TdNPMl[1P3TdNPMm[1o2SdNQMm[1P3RdNPMn[1P3RdNPMo[1P3QdNoLo[1Q3QdNoLo[1a31O001O1O1O1O1N101O2N1O1N2N9cL]cNl2n\\\\1N6@ibNkMX]1T2hbNkMZ]1S2gbNmM[]1Q2ebNnM_]1n1abNSNc]1h1_bNVNe]1f1]bNXNf]1d1=N5K7H;F6K3F:Ac`N3S`[m0\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [785.0, 925.0, 151.0, 91.0], \"score\": 0.9974883794784546, \"association_id\": 3, \"light\": [-1.971238374710083, -1.9754910469055176, 1.846227765083313, 1.889282464981079], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Y^jT15i_15L4Kb0^O8I3N2M2O3L3N0O101N1O1M3O100O1O001O1QbNRNk]1S2N10KWbNSNh]1S200OKYbNSNf]1n1ZbNRNe]1o1[bNQNe]1n1[bNSNd]1n1\\\\bNRNd]1m1\\\\bNTNd]1j1^bNVNa]1j1`bNVN`]1j1_bNWN`]1i1abNWN_]1i1abNWN^]1i1cbNWN]]1i1bbNXN^]1g1cbNYN\\\\]1h1dbNXNZ]1j1fbNVNY]1k1fbNVNY]1k1gbNUNY]1j1hbNVNX]1j1hbNVNW]1k1ibNUNW]1k1ibNUNW]1k1ibNUNW]1k1ibNUNV]1l1jbNTNV]1l1jbNTNU]1n1jbNRNV]1n1jbNQNV]1P2jbNPNU]1R2jbNnMU]1S2kbNmMU]1S2kbNmMU]1S2kbNmMU]1S2kbNmMU]1S2kbNmMU]1S2kbNmMU]1S2kbNmMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1T2lbNlMT]1U2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMV]1T2jbNlMU]1U2kbNkMU]1T2lbNlMT]1T2lbNlMU]1S2kbNmMU]1S2kbNmMU]1S2kbNmMU]1R2lbNnMT]1R2kbNoMU]1Q2kbNoMU]1Q2kbNoMU]1P2lbNPNT]1P2lbNPNT]1P2lbNPNT]1P2lbNPNT]1o1mbNQNS]1o1mbNPNT]1P2lbNPNT]1o1mbNQNS]1o1mbNQNS]1o1mbNQNS]1o1mbNQNS]1o1mbNQNS]1n1nbNRNR]1n1nbNRNR]1n1nbNRNR]1m1obNRNR]1n1nbNRNR]1n1nbNRNR]1m1obNSNP]1n1PcNQNQ]1n1QcNPNP]1o1QcNQNo\\\\1m1VcNnMl\\\\1P2c0O1O1O1O1L4O1N2O1M4K4K5M3O1N2L5N1N2O1O1O2O1N2N2O1N2M3M4LjRSd1\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [879.0, 935.0, 173.0, 333.0], \"score\": 0.9999999403953552, \"association_id\": 2, \"light\": [-1.5380094051361084, -2.8044872283935547, 1.4407109022140503, 2.61962628364563], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"ldWY1X1\\\\^1`0B>I7G8G8I8I6L3M3N1N3N1N2UdN^LV[1e3hdN]LU[1e3jdN\\\\LR[1h3mdNZLUY17ggNe3SOULnX1V1lfNP35kKkX1n5SgNTJjX1n5UgNSJhX1P6XgNPJfX1R6YgNoIeX1S6[gNmIcX1U6\\\\gNlIcX1U6]gNkIaX1W6^gNjI_X1X6bgNiIWX1]6igNbIRX1d6mgN]IPX1f6PhNZImW1^7N2M]OXhNSIaW1j7L4N2N[OghNgHWW1Z7ihNgHVW1X7lhNhHRW1X7ohNiHoV1X7RiNhHjV1[7UiNgHhV1[7XiNfHeV1[7\\\\iNfH^V1_7`iNdHPV1j7RjNVHiU1m7XjNTHgU1k7ZjNVHeU1j7ZjNXHeU1i7YjNYHfU1g7ZjNZHeU1e7\\\\jN\\\\HdU1c7]jN]HbU1c7^jN^HbU1`7`jN`H_U1_7bjNbH^U1]7cjNcH\\\\U1]7ejNcHZU1]7gjNcHYU1\\\\7hjNdHXU1\\\\7hjNdHYU1Z7gjNgHYU1Y7gjNgHYU1X7hjNhHXU1X7hjNhHXU1X7hjNhHXU1W7ijNiHXU1V7hjNjHXU1V7gjNkHYU1T7hjNlHXU1T7hjNlHYU1T7fjNkH[U1V7cjNkH^U1T7bjNlH_U1T7`jNlHaU1S7^jNnHcU1Q7]jNnHfU1P7YjNQIiU1m6WjNSIjU1m6TjNTImU1k6SjNUInU1j6QjNVIQV1i6oiNWIRV1h6miNYITV1g6jiNYIXV1f6fiN[I\\\\V1d6diN[I^V1d6biN[IaV1Q82N2_N[iNbIgV1]6YiNaIjV1^6ViN^IoV1a6QiN]IRW1g72iNlhN_IWW1_6ihNaIYW1R2lhNo0KnL[W1m1RiNU2PW1cM[iNU1XOUM^W1a1diNT1oN[M^W1_1kiNo0fNcM`W1\\\\1PjNk0aNhM`W1]1RjNf0_NnM`W1Z1TjNe0]NQN`W1Y1UjNb0]NUN_W1X1WjN=\\\\N[N_W1V1XjN;[N_N^W1U1YjN7\\\\NdN\\\\W1T1YjN4]NhN]W1T1UjN0aNlN\\\\W1S1TjNJeNROZW1S1SjN@kN]OUW1Q1VjNTOlNKPW1o0ZjNmNiN4QW1l0YjNiNiN;RW1h0[jNdNfNc0VW1d0PlN\\\\OXT1<hkND\\\\T18dkNG_T18`kNHaT17_kNIcT15]kNJeT16ZkNIhT16XkNJjT15UkNJmT15SkNKoT13QkNMQU11ojNNTU11kjNNXU10hjNOZU10fjNO]U1OcjN1_U1LbjN3bU1I_jN7cU1E_jN:dU1C]jN=eU1@\\\\jN?fU1^O\\\\jNb0aZ100O100O10000O100O2O0O1O2O00001O001N1000001N100000001N10001O00000LX`NOkYf^1\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [542.0, 913.0, 112.0, 133.0], \"score\": 0.7587939500808716, \"association_id\": 4, \"light\": [-1.4438802003860474, -2.4256060123443604, 1.4199044704437256, 2.3360772132873535], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"kn]i07e_1:YNJ\\\\bNH8i0V]1FYbNM5c0^]1[1N3J6K6I6L3N4L7I3O2M7I1O1O010O0001O0001OYOQdN\\\\MQ\\\\1a2RdN^Mo[1`2TdN^Mm[1P2kcNmM<2k[1o1ddNoM^[1P2bdNPN`[1n1`dNRNa[1n1^dNRNc[1m1]dNRNe[1m1[dNSNf[1l1ZdNTNg[1j1ZdNUNh[1j1XdNVNk[1i1SdNWNP\\\\1g1ocNXNW\\\\1e1fcN[N^\\\\1c1acN]N_\\\\1c1acN\\\\N_\\\\1e1bcNYN^\\\\1i1bcNUNBLc\\\\1P2ncNPN_O1`\\\\1S2YdNnMc[1V2[dNkM^[1^2`dNbM^[1e2]dN\\\\Mb[1e2]dN[Mc[1e2]dN\\\\Mb[1c2_dN]M`[1d2`dN\\\\M`[1e2`dNZM_[1i2_dNWM`[1k2_dNUM`[1n2^dNRMb[1P3\\\\dNPMd[1R3ZdNnLf[1T3XdNlLg[1W3XdNhLh[1Y3XdNfLh[1[3WdNeLi[1]3UdNcLk[1^3TdNcLk[1]3UdNcLl[1]3TdNbLl[1^3TdNbLm[1^3RdNbLn[1^3RdNbLo[1^3PdNbLQ\\\\1^3ocNaLR\\\\1^3ncNbLS\\\\1^3lcNbLU\\\\1]3lcNbLV\\\\1\\\\3jcNdLW\\\\1[3icNeLX\\\\1Z37010001N2N2N1O2N101N101N1O9XMhbNV2g]1J4L2N101N5K2N1O1O2N1O2N2N1N2N3M3M3M3L<BYPZQ2\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [1105.0, 905.0, 64.0, 138.0], \"score\": 0.9912539720535278, \"association_id\": 5, \"light\": [-2.3504486083984375, -1.432337999343872, 2.284728527069092, 1.3832335472106934], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"W`jc12n_11O00001K4O100Z`NHc_1723N2N1O0O101N1N2_NOScN2h\\\\1e1OK21O3UOj0OO4O100O1N2O2O01TdNVMbZ1i2^eNXMbZ1g2_eNYMaZ1f2`eN[M_Z1e2`eN\\\\M`Z1d2`eN\\\\M`Z1e2_eN[MaZ1d2_eN]MaZ1c2_eN]MaZ1c2_eN^M`Z1b2`eN^M`Z1b2_eN_MaZ1a2_eN_MbZ1`2^eN`MbZ1_2_eNbMaZ1[2aeNeM`Z1W2ceNiM_Z1T2aeNmMbZ1o1_eNQNcZ1n0adNROP1ObZ1g0fdNSOl06`Z1`0bfN@`Y1=afND`Y17cfNI_Y13cfNM`Y1McfN3f\\\\1O000O101O001N2Oc_UY1\"}}, {\"image_id\": 94, \"category_id\": 1, \"bbox\": [75.0, 52.0, 989.0, 775.0], \"score\": 0.9999950528144836, \"association_id\": 1, \"light\": [-1.555666446685791, -1.5569946765899658, 1.4576420783996582, 1.5483322143554688], \"segmentation\": {\"size\": [855, 1280], \"counts\": \"jZo12dj02M2O2N2O0O2O0000001O0000001O000O101O0000001O0000000000001O000000000000001O000000001O0000001O0000001O00000000001O000000000000001O000000000000001O0000000000000000001O00000O10000000000000001O00000000000000001O000000000000001O0000000000000000001N10000000000000001O0O100000001O0000001O00001O0O10001O000000001O000000001O0000001O0000001O000000001O0O10000000001O0000000000001O000000001O00000000001O00000000000000001O000000000000001O000000001O000000001O000000001O000000000000001N100000001O000000001O00001O00001O000000001O0O10000000000000001O0000000000001O000000001O000000001O000000000000001O0O100000000000001O000O1000001O0000000O1000001O000O10000000000000000O2O0000000000001N100000001O000O100000000O1000001O0000000000000O100000001O00000000001O00000O100000001O000000000000000000000000000O2O0000000000000000000000001O00000000000000001O000000000000000000001O000000000000000000000000000000001O00000O100000000000000000001O00000000000000001O00000000000000000000000000000000001O000000000000000WFYMbKg2V4bMiK^2n3lMQLT2k3QNTLo1k3SNTLm1k3UNTLk1k3WNTLi1k3YNTLg1l3YNTLg1l3YNTLg1l3ZNSLf1m3ZNSLf1m3ZNSLf1m3ZNSLf1n3ZNQLg1n3YNRLg1n3YNRLg1n3ZNQLf1o3ZNRLe1n3[NRLe1o3ZNQLf1o3[NPLe1P4[NPLe1P4[NPLe1P4\\\\NoKd1R4[NnKe1R4[NnKe1R4[NnKe1R4\\\\NmKd1T4[NlKf1T4ZNkKf1V4YNjKg1X4XNgKh1Z4XNeKh1\\\\4WNdKi1]4WNbKi1_4WNaKh1`4XN_Kh1b4XN]Kh1c4YN\\\\Kg1e4ZNYKf1h4ZNXKe1h4]NVKc1k4]NTKc1l4^NSKb1n4^NRKb1m4^NSKb1n4^NRKa1n4`NQK`1o4`NSK^1n4bNRK]1n4dNVKW1j4iNXKU1h4lN]Kn0d4RO\\\\Km0d4TO\\\\Kk0d4UO]Kj0c4WO^Kg0c4YO]Kf0c4[O]Kd0c4]OiIV2W6lMhHS3Y7mLcHV3]7kL^HY3b7iLZHY3f7iLUHZ3k7gLRH[3o7fLmG\\\\3S8gLhG[3X8gLeGZ3Z8jLaGX3_8kL\\\\GW3d8lLWGV3i8nLPGU3o8PMjFS3V9RMbFQ3^9VMWFn2h9]MkEf2U:fMZE_2f:lMkDX2T;SNYDV2g;TN]B^3c=k4O10000O10001N2O0O2N1O2N1O2M2O2M3N1N3M3N1O2N2N2N2N2N2N2N1O2N1O1O1N2N3M2N2M3L4L4L5K4N2M3M3N2N3N1N2O1N2O0O2N2O1N2M3N1O2M3M3M3N1O2N2O1N2O1O001O00001N100O2O0O1O2M2N3M2O2N1O2N2N101N101O0O101N1N2O1M3N2M2N3L4L300000010O10O1000O101O0O10O01N2O1M2O2N1O1O100O01000001O01O1O001O000O1O1N1O1UOl0L4N2N2N3N1O2O1N200O1001O00001O001N101N1O2N101N101O1O001O10O01O10000000000O100000O1000000000O1000000000000O1O100O1O1O1O1O1N2O1N2N2O1N2O1O1O1O1O1O100O10000O10000O101O0O10000O10000O10000O101N1O2N2N2N2M3M3N2M3M4L4M3M3M2N3M2N2N2O1N2O2M2O1O2M2O2M3N3L3N3L3N2M3N2M3M2O1N2N2N2N3M2N2M4M3M3M5K6J5K6J6K5J3M3N1N3N1N2O1N2O1O1O001O1O2N1O1O2N1N3N2N2N2N2N1O2N1O1O1O1O001O001O001O1O001O1O001O1N2O001O1O1O1O1O1O1O2N1O1N3N1O1O1O001N2O1O001O001N101O001N1O1O1O1O1O1O1N2O1N2N2O1O1O1O1O1O1O1O100O2N1O2N1O2N2N2M2N3M3M3K5J7J5ZIR@^2R`0_MZ@S2l?iMW@T2l?iMW@S2m?kMU@R2o?jMT@S2o?kMS@R2P`0lMR@Q2Q`0lMR@Q2Q`0lMS@P2Q`0mMQ@P2R`0mMQ@o1T`0mMP@o1T`0lMP@P2U`0lMo_Oo1V`0lMo_Oo1V`0kMP@o1W`0jMo_Ol1^`0kMh_Ol1g`0iM__Ol1Pa0iMY_Ok1Ta0jMW_Ol1Qe0F;G9F=D;E<DSmc5\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [1654.0, 833.0, 168.0, 397.0], \"score\": 0.9999997019767761, \"association_id\": 5, \"light\": [-2.3788399696350098, -2.1802754402160645, 2.2940850257873535, 2.0555713176727295], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Poa]2>]_1:B;E;E:ROZNXcNo1Z\\\\1R1J7L3M4L5M3L5L4L3L6jeNYLiW1o3hgN^LRX1i3agNbLZX1f5L4L2N3M1000000O1O2N1N2N2N2N2O2M2O2L4J6K6K5J6J6K5K5K6K5K5L3N2N:dkNjEZR1Z;blNgD]S1[;alNeD_S1\\\\;`lNeD_S1[;alNfD^S1Z;blNfD]S1[;clNeD]S1[;blNgD]S1Y;clNgD]S1Y;blNiD]S1W;clNiD]S1V;dlNkD[S1T;elNmD[S1Q;flNPEZS1l:ilNVEVS1d:olN]EQS1[:WmNeEiR1T:^mNlEbR1n9dmNSF[R1j9hmNVFXR1i9imNWFXR1g9hmN[FWR1e9imN[FWR1d9jmN\\\\FVR1d9jmN\\\\FVR1e9imN\\\\FVR1e9imN[FWR1f9hmNZFXR1f9hmNZFYR1f9fmNZF[R1g9cmNYF^R1g9amNZF`R1e9_mN[FdR1d9ZmN\\\\FiR1b9VmN^FnR1_9QmNaFSS1[9nlNdFUS1Z9jlNfFYS1W9glNiF]S1S9clNmFaS1o8_lNQGdS1l8\\\\lNTGfS1j8ZlNVGgS1i8ZlNVGhS1h8XlNXGjS1e8WlN[GlS1Q7ZkN_Il0@mS1d4SkN^Lj0PO;NjS1^4PlN[K?OK8gS1Z4WnNWKVN?dS1V4RPOjKPP1m3XPORLjo0g3[POYLho0`3\\\\PO`Lfo0Z3^POfLco0U3aPOkL`o0l2hPOTMYo0b2QQO]MQo0\\\\2TQOdMmn0V2XQOjMkn0g1aQOZNan0[1gQOeN[n0T1jQOkNZn0i0PROVOSn0>VROBRn0MWRO3kV10001O01O000001O0000001O000000000000000000001O0000000000001O0000001O000000000000001O000O2O0001O01O00001M3Ogia:\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [711.0, 769.0, 177.0, 539.0], \"score\": 0.9999883770942688, \"association_id\": 4, \"light\": [-1.2882165908813477, -3.7673351764678955, 1.2220697402954102, 3.730821371078491], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"]^[Q14h_1:G:E8H6K5K5K4K6H8H9I7K5J5H8E<D=F;F9H8I7H;Dc0\\\\iNUJQR1b6UmNkIcR1^6QmNlIiR1Z6QmNlIjR1X6RmNlIkR1W6QmNlIlR1Y6PmNjIkR1^6nlNgIlR1d6ilN`ISS1i6elN[IX1[N[o0`8VoNYI\\\\1ZN[o0b8SoNWIV1dNeo0Y8onNWI[1`Neo0_8enNWIe1\\\\Ndo0R;\\\\POnDco0S;\\\\POnDco0S;]POnDao0S;_POmD`o0S;aPOmD_o0R;aPOoD^o0Q;cPOPEZo0P;hPOPEWo0o:kPOQETo0n:nPOSEPo0l:RQOTEln0n:TQOREkn0n:VQOREhn0P;XQOQEen0Q;[QOoDdn0Q;]QOoDjm0WO`POl;f1mDfm0CZPOb;P2kDdm0P<\\\\ROPDam0S<_ROmC_m0V<`ROjC]m0Y<cROgCZm0]<eROcCYm0_<gRObCVm0a<iRO_CVm0e<gRO[CXm0l<bROTC]m0U=[ROkBdm0`>SQO_Aln0e>QQO[Aon0f>QQOYAnn0i>QQOXAnn0h>RQOXAnn0i>QQOWAon0i>RQOVAnn0j>RQOVAnn0k>QQOUAon0k>QQOVAnn0j>RQOVAnn0j>RQOVAnn0j>RQOVAnn0j>RQOWAmn0i>SQOWAmn0i>SQOXAln0h>TQOXAln0h>SQOYAmn0g>SQOYAmn0g>RQOZAnn0f>RQOZAnn0f>RQO[Amn0e>RQO\\\\Ann0d>RQO\\\\Ann0d>QQO]Aon0c>QQO]Aon0b>QQO_Aon0a>QQO_Aon0a>QQO_Aon0a>QQO_Aon0a>QQO_Aon0`>QQOaAon0^>RQObAnn0]>SQOcAmn0\\\\>TQOdAln0[>UQOeAkn0Z>VQOfAjn0W>YQOiAgn0S>]QOmAcn0o=aQOQB`n0i=eQOWB[n0e=iQO[BWn0b=lQO^BTn0`=nQO`BRn0_=oQOaBQn0]=RRObBnm0\\\\=TROdBlm0Z=VROfBkm0W=WROiBim0U=YROkBgm0S=[ROmBfm0P=]ROoBem0m<]ROSCdm0k<]ROUCem0h<\\\\ROXCfm0e<\\\\ROZCgm0b<ZRO^Cgm0a<YRO_Cim0_<WROaClm0\\\\<TROdCnm0Z<RROfCRn0U<oQOkCVn0P<kQOnCZn0n;fQORD^n0j;bQOVDbn0f;_QOYDen0c;[QO]Dhn0`;XQO`Djn0];WQObDln0\\\\;TQOdDnn0j8foNVH]1oNnn0g8koNWHW1QOPo0d8QPOUHP1WOPo0_8[POSHe0^ORo0X8fPOQH:FQo0S8oPORH0KSo0m7UQOSHJORo0j7YQOTHF1To0f7dQOlG[O=So0a7YTO_Hhk0T7dTOlH^k0j5kUOUJVj0Y5\\\\VOfJei0R5cVOmJ^i0o4fVOPK\\\\i0j4hVOUK[i0c4lVO\\\\KWi0]4mVOcKVi0V4oVOiKSi0S4oVOmKRi0P4QWOoKQi0m3QWOSLQi0i3QWOWLQi0e3RWOZLQi0a3QWO_LSi0Z3PWOfLTi0T3nVOlLUi0P3mVOnLVi0o2kVOQMYi0i2iVOWM^i0`2dVO`MVj0d1lUO\\\\Nlj0j0WUOUOZk0IWUO7YU1000001N10000O2O1NbgZf1\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [899.0, 808.0, 178.0, 456.0], \"score\": 0.9999900460243225, \"association_id\": 3, \"light\": [-2.6913609504699707, -2.234588861465454, 2.5306384563446045, 2.0599782466888428], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Z^UZ13g_1d0\\\\O8I7L4K4M2M4K5M3M2O2N1N2N1M4M3N2N2L4K6K4M4M2O1N2L3N3N2N102akN`L[l0c3bSO^L]l0f3`SO[L]l0h3aSOYL\\\\l0l3bSOTL\\\\l0o3cSOQL[l0R4dSOnK[l0T4dSOlK[l0W4cSOjK\\\\l0X4aSOiK^l0[4_SOeK`l0_4]SOaKcl0b4ZSO^Kfl0d4XSO\\\\Kgl0i4USOWKkl0o4QPO[KbNF]Q1i5SoNgJ^O@^Q1T6hnN`JH\\\\O`Q1Z6`nN]JNZOaQ1_6[nNYJ3XO`Q1f6UnNVJ:TO_Q1o6mmNQJc0PO]Q1Z7bmNnIn0iN^Q1a7ZmNkIV1eN^Q1h7RmNiI]1`NaQ1S8dlNaIh1]NcQ1U:[nNkEdQ1X:YnNiEgQ1X:WnNiEhQ1Y:VnNhEjQ1Y:TnNhEkQ1[:RnNfEkQ1^:TnNbEhQ1e:UnN\\\\E]Q1U;_nNkD[Q1c;]nN]D`Q1n;VnNTDgQ1U<SnNkCjQ1]<QnNdCkQ1b<RnN^ClQ1f<RnNZClQ1i<SnNdB0a0lQ1m<UnNSCjQ1o<UnNQCkQ1o<UnNQCkQ1o<UnNQCkQ1o<UnNQCkQ1o<UnNRCjQ1b=0000000001O000000XOWnNYCiQ1g<WnNYCiQ1f<XnNZChQ1e<YnN[CgQ1d<YnN]CgQ1b<ZnN^CfQ1c<YnN^CfQ1b<ZnN^CfQ1b<ZnN^CfQ1b<ZnN^CfQ1c<YnN]CgQ1b<ZnN^CfQ1a<[nN_CeQ1^<^nNbCbQ1\\\\<`nNdC`Q1Z<bnNfC^Q1W<enNiC[Q1U<gnNkCYQ1Q<knNPDTQ1g;UoNYDkP1];_oNcDaP1V;foNjDZP1Q;koNoDUP1n:noNRESP1k:ooNUERP1h:PPOXEQP1c:SPO]Eoo0^:TPObEno0Y:UPOgEoo0S:SPOmEoo0P:RPOPFQP1l9QPOSFTP1h9loNXF]P1^9doNbFdP1V9]oNiFfP1S9[oNmFgP1P9ZoNPGhP1m8YoNSGiP1i8ZoNVGiP1d8ZoN\\\\GhP1_8[oNaGhP1Z8[oNeGgP1X8ZoNhGgP1^6alNaJi2QOgP1V6QmN]J[2\\\\OeP1m4enNXKi0KbP1W4loNZKE?`P1n3^SORLcl0j3aSOUL`l0i3aSOVLbl0f3aSOYLal0d3`SO\\\\Ldl0^3^SObLdl0[3^SOdLdl0X3^SOhLdl0V3\\\\SOjLel0U3\\\\SOjLgl0S3YSOmLjl0P3VSOPMnl0l2SSOSMol0j2RSOUMQm0i2oROWMTm0d2oRO[MTm0a2mRO_MXm0[2iROeM_m0R2bROnMcm0l1_ROSNgm0f1ZROZNmm0^1TRObNRn0W1PROgNWn0Q1kQOoNZn0j0hQOVO^n0b0dQO^Obn05fQOJXW100O10001O0000000O10001O000O10001MkX_]1\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [140.0, 850.0, 170.0, 471.0], \"score\": 0.999999463558197, \"association_id\": 2, \"light\": [-2.857588768005371, -1.8944783210754395, 2.765239715576172, 1.7801560163497925], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"lXc62n_100001O00000000000000000000001N100hE6ZTOJ`k0e0VTO]Obk0m0[TOSObk0R1\\\\TOnNak0W1\\\\TOjNbk0Y1\\\\TOiNak0Z1]TOgN`k0]1^TOeN^k0_1aTOaN]k0b1aTO`N\\\\k0c1cTO]N]k0e1`TO]N^k0e1aTO[N_k0g1^TO[N`k0g1_TOYNak0h1^TOYN`k0i1^TOXNak0j1^TOVN`k0l1`TOTN^k0o1`TOSN]k0P2bTOPN[k0S2eTOmMZk0U2eTOkMZk0X2cTOiM]k0Y2SnNSM\\\\4d0`m0]2VmNoMT5Fem0c2elNTNc5YOgm0e4WRO[Kim0g4TROZKlm0i4QROWKom0m4lQOTKTn0o4iQOQKVn0S5fQOnJZn0T5dQOlJ\\\\n0W5aQOiJ^n0\\\\5]QOeJcn0`5WQOaJhn0e5SQO[Jmn0j5mPOWJRo0m5jPOTJTo0R6hPOnIVo0Z6dPOfIZo0^7doNbHYP1Q8VoNPHhP1Z8PoNfGnP1a8mnN_GQQ1h8jnNXGSQ1R9fnNnFUP1c;_nN]D]Q1P<[nNoCbQ1V<\\\\nNjCbQ1Y<]nNfCaQ1^=N2O1N2O100O10O10O10000000O10000000000O100000000O10000000001O00000O2O1O1O1O1UO\\\\nNXCfQ1f<ZnNZChQ1d<XnN\\\\CjQ1b<WnN]ClQ1`<TnN`CnQ1]<SnNcCoQ1[<RnNdCPR1Y<QnNgC[R1l;fmNTDPS1U;RmNjDPS1R;RmNnDoR1o:TmNoDnR1n:VmNPEkR1m:WmNSEjR1j:YmNUEhR1g:[mNYEeR1d:_mN[EbR1a:bmN^E^R1_:fmN`E[R1]:gmNcEYR1Z:kmNdEVR1_7gmNfI7kNSR1Q7jnNPIANeQ1m6YQOSIgn0j6]QOUIdn0f6`QOZI`n0b6dQO^I]n0\\\\6iQOcIWn0X6nQOhISn0S6QROmIPn0P6RROPJnm0m5VRORJkm0Q3jlNmN`5RNgm0h2bmN]NP5iN`m0e2mmNSNi4XO[m0a2UnNlMe4BWm0b2]UO]Mdj0a2^UO]Mdj0b2]UO]Mcj0b2_9O001O001O001O1O1O1O2N3M2M3N2N3L5L5J4L4K6Ia0VOiQ^a2\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [411.0, 836.0, 158.0, 409.0], \"score\": 0.9999992847442627, \"association_id\": 1, \"light\": [-1.891375184059143, -2.516927719116211, 1.7533477544784546, 2.436117172241211], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Y]Yc0c0R_1=K5L4M2N2O1M4M2N3L3M4M2N3RjNoMhm0S2WROnMhm0T2UROnMim0U2UROkMkm0X2RROiMlm0]2nQOdMRn0g2cQOYM]n0k3alNbLU1DYR1P4YlNaL\\\\1_O[R1S4SlNcL_1[O]R1V4nkNbLe1XO]R1Z4fkNcLl1TO]R1_4]kNaLU2QO^R1d4QkNaLa2kN^R1g6amNYI^R1h6bmNXI]R1i6cmNWI\\\\R1k6bmNVI\\\\R1l6dmNUIYR1m6gmNSIWR1P7hmNPIWR1Q7imNoHVR1S7imNmHWR1T7hmNlHWR1V7hmNjHXR1W7fmNjHYR1X7fmNhHZR1Y7emNgH[R1[7cmNeH]R1^7`mNcH^R1a7_mN_HaR1l7ZkNmH7WO_T1i9WkNWFhT1m9UkNSFkT1n9TkNRFjT1Q:UkNoEjT1S:UkNmEhT1V:XkNjEfT1Y:YkNgEfT1Z:ZkNfEeT1k:00OB\\\\kNbEdT1^:\\\\kNbEdT1^:\\\\kNbEcT1_:]kNaEcT1_:]kNaEbT1`:^kN`EbT1`:^kN`EaT1a:_kN_EaT1a:_kN_E`T1b:`kN^E_T1c:akN]E^T1d:bkN\\\\E[T1g:ekNYEYT1i:gkNWEXT1Y;N3O0N2O1O1N2M3O1N2O1O2OYO[lNSEdS1l:^lNTEaS1k:alNUE]S1l:dlNTE[S1l:elNUEYS1l:hlNTEUS1n:llNRESS1n:nlNREQS1n:QmNQEnR1P;RmNPEnR1o:SmNQElR1o:UmNQEkR1n:VmNREjR1m:WmNSEiR1m:WmNSEhR1m:YmNSEgR1m:YmNSEgR1m:YmNSEgR1l:ZmNTEfR1l:ZmNTEfR1l:ZmNTEfR1l:ZmNTEfR1l:ZmNTEfR1l:ZmNTEfR1l:ZmNUEeR1l:ZmNTEfR1k:[mNUEeR1j:]mNUEcR1j:^mNVEbR1i:_mNWEaR1g:amNYE_R1f:bmNZE^R1e:cmN[E]R1c:emN]E[R1a:gmN_EYR1\\\\:mmNcESR1U:UnNkEkQ1d9gnN[FYQ1`9lnN`FTQ1Z9RoNeFoP1U9XoNjFhP1P9_oNoFaP1i8goNWGYP1b8ooN]GQP1^8TPObGlo0U8]POkGco0c7oPO]HQo0R7`QOnH`n0h6kQOWIUn0f6nQOYISn0e6oQO[IQn0c6QRO]Iom0a6SRO_Imm0`6TRO`Ilm0]6WROcIim0[6ZROcIgm0[6[ROeIfm0Y6[ROgIem0X6\\\\ROhIem0V6\\\\ROjIdm0U6]ROkIdm0n0klNe3a5\\\\Kfm0e0XmNh3T5bKem0b0`mNe3l4hKhm0=cmNe3g4nKmm02dmNj3a4SLio0f3ZPOZLio0`3\\\\PO]Lgo0_3`6K5K6E<D=_O?D=Dd0ZObbYU2\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [1078.0, 901.0, 153.0, 200.0], \"score\": 0.7727476954460144, \"association_id\": 6, \"light\": [-1.8842180967330933, -1.8429027795791626, 1.9086542129516602, 1.715842843055725], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"mQbb1:e_12O1O0000000O101O000000000000000000000O101O000000001O000O100000000000000O100000000000000O1000000000000000000000000000000000001O000000000000000000000000O1000000000aL^O`gNb0YX1JbfNIaN=jZ1N`fN0_N2nZ13WfN:fNCQ[16ieNh0TOROP[1:eeNj0[OlNmZ1?`eNk0CfNkZ1d0]eNh0HdNiZ1i0YeNg0M`NgZ1]3YeNcLeZ1_3[eNaLbZ1b3^eN^L`Z1d3`eN\\\\L^Z1g3aeNYL]Z1k3aeNUL]Z1n3beNRL\\\\Z1Q4ceNoK[Z1S4eeNmKYZ1U4geNkKVZ1Y4ieNgKTZ1]4keNcKSZ1_4meNaKPZ1b4PfN^KoY1c4QfN\\\\KoY1e4QfN[KmY1g4SfNYKlY1h4TfNXKkY1i4UfNWKjY1j4VfNVKjY1j4VfNVKiY1k4WfNUKiY1k4WfNUKhY1k4YfNUKgY1k4YfNUKgY1j4ZfNVKfY1i4[fNVKfY1i4[fNWKeY1g4]fNYKcY1d4`fN\\\\K`Y1S4[fN_K;>ZY1h3\\\\gNXLdX1f3^gNZLbX1e3_gN[LaX1e3_gN[LbX1c3_gN]LaX1b3`gN^L`X1a3agN^LaX1_3agNaL`X1\\\\3bgNdL_X1Y3cgNgL^X1U3egNkL]X1Q3egNoL\\\\X1m2ggNSM\\\\X1f2hgNZM[X1`2hgN`MaX1S2dgNlMdX1h1`gNXNgX1\\\\1^gNdNlX1k0[gNUOnX1?UgNATY1NVgNO^\\\\10001O001O000000002E[`N5imYV1\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [79.0, 286.0, 108.0, 72.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-2.6792237758636475, -0.958175778388977, 2.5270631313323975, 0.7743752002716064], \"segmentation\": {\"size\": [400, 431], \"counts\": \"PUo0o0`;3M3L6K3L3N3M2N2N3N0O2N1O2O0O2N101O00001O0O2O000O1000000000000000O100000001N100001OO2O00000000000000O10OfEnMX:T21MgEoMY:P2gEQNZ:n1fERNZ:Q21O10001O001N101O1N2O1O1N3N1EWEmNk:n0UETOm:W11O2\\\\OPECS;9mDIT;4lDN[;?4MO00000101N1O1O1O100O1O1O100O1O1O2N1O1O1O1O100O1O100O1O2OSPo2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [303.0, 87.0, 46.0, 41.0], \"score\": 0.9999138116836548, \"association_id\": 8, \"light\": [-2.440868377685547, -1.4156724214553833, 2.3889338970184326, 1.300392508506775], \"segmentation\": {\"size\": [400, 431], \"counts\": \"e^f35X<:H4L5K3N1N2O1N2O001N1O1O1N2O20O001O001O0O2O1O0O2O1O001N2O2N102M3M3BUD1U<00000000O10001O0O1O2NYmo0\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [267.0, 137.0, 59.0, 38.0], \"score\": 0.9972447156906128, \"association_id\": 6, \"light\": [-2.672443389892578, -1.0642905235290527, 2.5745749473571777, 0.7776073217391968], \"segmentation\": {\"size\": [400, 431], \"counts\": \"P^X37U<9J3M4M3L3N3M1O2N001O1O001O0000000000000000000000000000000000000001O001O001O001O1JbDWO`;l01O1O3M>B00000O100O10000O1O101N2N]kX1\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [129.0, 161.0, 88.0, 54.0], \"score\": 0.9999997019767761, \"association_id\": 4, \"light\": [-2.830810070037842, -0.7733525633811951, 2.7781567573547363, 0.6347252726554871], \"segmentation\": {\"size\": [400, 431], \"counts\": \"cbb11m;1TDe0`;:K5M2N2N2N2N1O2N2O00001O0O10000000001OO1001O0000000000000001O00000000000000000O1000000O10000O101N1OQEcNn:_1001JQEiNP;Z12O1GmDSOT;k0lDUOV;h0kDYOW;b0kD_OV;>jDDX;7eDNm;200000O100O1O10000O10001N1O100O1O100O10000O1O2O0Oo\\\\c2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [8.0, 199.0, 107.0, 63.0], \"score\": 0.9999991655349731, \"association_id\": 2, \"light\": [-2.5032382011413574, -0.6484165787696838, 2.3141820430755615, 0.3579447865486145], \"segmentation\": {\"size\": [400, 431], \"counts\": \"fZ3S1\\\\;2N3L3O1N3M2O0O2O0O2O0O101O00000O101O0O2O0000000000000O100000000M3003M1OO100000000O10000O10000000O10000O2O0001O10O0O2O0L[E]Ne:f12KYE_Nh:_1XEbNh:c10O2O1O2M3N1O2M2GkDUOV;g0lDYOW;`0lDAV;:mDGc;:N3O0100O10000O100O100O1O100O10000O100O100O100O100N200O1O1O3MhVk3\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [158.0, 113.0, 58.0, 37.0], \"score\": 0.9999814629554749, \"association_id\": 5, \"light\": [-2.5542402267456055, -1.4405854940414429, 2.382253646850586, 1.132584571838379], \"segmentation\": {\"size\": [400, 431], \"counts\": \"hjm1164n;>H3M7J1O1O0O101O00000O1O1000000O1000000O100000000000000001O00001O002N2N1N2O1N3M4LUD_Ok;b09G000O1000000O100O100O101N1O3MTkc2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [185.0, 55.0, 40.0, 27.0], \"score\": 0.9981728792190552, \"association_id\": 3, \"light\": [-2.1285219192504883, -1.3471519947052002, 2.0332045555114746, 1.1181325912475586], \"segmentation\": {\"size\": [400, 431], \"counts\": \"cZX25U<:K3M2N2N1O10000O100000001O00000001O0O1010O10O1O001O1O1O1GUDKX<00O100000001N100O10c\\\\`2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [352.0, 60.0, 35.0, 35.0], \"score\": 0.8931755423545837, \"association_id\": 7, \"light\": [-2.60404634475708, -1.2028782367706299, 2.4799039363861084, 0.9292681813240051], \"segmentation\": {\"size\": [400, 431], \"counts\": \"`bY44Y<7J3N2M3M4K4N2L3N2O100000000000000000000000O1001O0000XOiD7W;FnD8R;E[E1f:L\\\\E4`;01O0O2OUSa0\"}}, {\"image_id\": 97, \"category_id\": 1, \"bbox\": [0.0, 388.0, 688.0, 550.0], \"score\": 0.9999999403953552, \"association_id\": 2, \"light\": [-2.4804975986480713, -2.4740512371063232, 2.316201686859131, 2.265965223312378], \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"ba0i5X\\\\1Z1PO>C6L4L4M2N3M2O2XhNlGaMX1QV1l6fkNaJkS1`5QlNfJlS1\\\\5RlNfJmS1Z5RlNhJlS1Y5TlNgJkS1Z5UlNfJjS1[5VlNeJjS1[5VlNeJiS1\\\\5WlNdJhS1]5XlNdJfS1]5ZlNcJdS1_5\\\\lNaJcS1`5]lN`JbS1a5^lN^J_S1f5alNZJ\\\\S1i5dlNWJYS1m5flNSJXS1o5hlNPJVS1S6jlNmIUS1T6llNkISS1V6mlNjISS1V6mlNiISS1X6mlNhISS1X6mlNhIRS1Y6nlNgIRS1Y6nlNgIQS1Z6olNeIRS1[6nlNeIQS1\\\\6olNdIQS1\\\\6olNdIQS1\\\\6olNdIPS1]6QmNbIoR1^6QmNbIoR1^6QmNbInR1_6RmNaInR1_6RmNaInR1_6RmN`InR1a6RmN_InR1a6RmN_ImR1b6SmN^ImR1b6TmN]IkR1d6UmN\\\\IkR1d6UmN\\\\IkR1d6UmN\\\\IjR1e6VmN[IiR1f6WmNZIiR1f6WmNZIhR1g6XmNYIhR1g6XmNYIgR1h6YmNXIfR1i6ZmNWIfR1i6ZmNWIeR1j6[mNVIeR1j6[mNVIdR1k6\\\\mNUIdR1k6\\\\mNUIcR1l6]mNTIcR1l6]mNTIbR1m6^mNSIbR1m6^mNSIaR1n6_mNRIaR1n6_mNRI`R1o6`mNQI_R1P7amNoH`R1Q7`mNoH_R1R7amNnH_R1R7amNmH_R1T7amNlH_R1T7amNlH^R1U7bmNjH_R1V7amNjH^R1W7bmNiH]R1X7bmNiH^R1W7bmNiH]R1X7cmNhH]R1X7cmNgH]R1Z7cmNfH\\\\R1[7cmNfH]R1Z7cmNfH\\\\R1[7dmNeH[R1\\\\7emNdH[R1\\\\7emNdHZR1]7fmNcHZR1]7fmNcHYR1^7fmNcHYR1^7gmNbHXR1_7hmNaHWR1`7imN`HWR1`7imN`HVR1a7jmN_HUR1b7kmN^HTR1c7kmN]HUR1d7kmN\\\\HSR1f7mmNZHQR1h7omNXHoQ1j7PnNWHnQ1k7RnNUHmQ1l7SnNTHkQ1n7TnNRHlQ1o7TnNQHkQ1P8UnNPHjQ1Q8UnNPHjQ1Q8VnNoGiQ1R8WnNnGhQ1S8XnNlGhQ1U8WnNlGhQ1U8XnNkGgQ1V8YnNjGfQ1W8YnNiGhQ1W8XnNiGgQ1X8YnNhGfQ1Y8ZnNgGeQ1Z8ZnNgGeQ1Z8[nNfGdQ1[8[nNeGeQ1\\\\8ZnNeGeQ1\\\\8[nNdGdQ1]8[nNdGdQ1]8\\\\nNcGdQ1]8\\\\nNcGcQ1^8\\\\nNcGdQ1]8\\\\nNcGcQ1^8\\\\nNcGdQ1]8\\\\nNcGcQ1^8\\\\nNcGdQ1]8\\\\nNcGdQ1]8[nNcGeQ1^8[nNbGeQ1^8[nNbGeQ1^8ZnNcGeQ1^8[nNbGeQ1^8[nNbGeQ1^8[nNbGeQ1^8[nNbGdQ1_8[nNbGeQ1^8[nNaGfQ1_8ZnNaGfQ1_8YnNbGgQ1^8YnNbGfQ1_8YnNaGhQ1_8XnNaGhQ1_8XnN`GiQ1`8VnNaGjQ1_8VnNaGiQ1`8WnN`GiQ1`8WnN_GjQ1a8VnN_GjQ1a8UnN`GkQ1`8UnN`GjQ1a8VnN_GjQ1a8VnN_GjQ1a8VnN_GjQ1a8UnN`GkQ1`8UnN_GkQ1b8TnN_GlQ1a8TnN_GlQ1a8TnN^GmQ1b8RnN_GmQ1b8SnN]GnQ1b8SnN^GlQ1c8TnN]GlQ1c8SnN]GmQ1d8SnN\\\\GmQ1d8SnN\\\\GlQ1e8TnN[GlQ1e8TnN[GkQ1f8TnN[GlQ1e8TnN[GkQ1e8VnN[GjQ1e8VnNZGkQ1f8UnNZGjQ1g8UnNZGkQ1f8UnNZGkQ1f8UnNZGjQ1f8VnN[GjQ1e8VnNZGkQ1f8UnNZGkQ1f8TnN[GkQ1f8UnNYGlQ1g8TnNYGlQ1f8UnNZGkQ1f8UnNZGkQ1f8UnNYGlQ1g8TnNYGlQ1f8TnN[GlQ1e8TnN[GlQ1e8TnN[GkQ1e8VnNZGkQ1f8UnNZGkQ1e8VnN[GjQ1e8VnN[GjQ1d8WnN\\\\GiQ1c8XnN]GhQ1c8XnN\\\\GiQ1c8XnN]GhQ1b8YnN^GgQ1b8YnN^GgQ1a8ZnN_GfQ1`8[nN`GeQ1`8\\\\nN^GeQ1a8\\\\nN_GdQ1`8]nN`GcQ1`8]nN_GdQ1`8]nN`GcQ1`8]nN`GcQ1_8^nN`GcQ1`8^nN_GbQ1`8_nN`GaQ1_8`nN`GaQ1`8anN^G_Q1a8cnN^G]Q1a8enN^GZQ1a8inN]GXQ1`8lnN_GTQ1_8PoN_GPQ1^8VoN^GkP1_8\\\\oN]GdP1a8QPOkFPP1R9\\\\POdFeo0Z9`PObFao0[9ePObF[o0[9kPOaFVo0\\\\9oPObFQo0]9RQO`Fon0`9SQO]Fnn0b9ZQOUFhn0j9[QOdEgK6PS1W:bQOdEan0[:k400O10000O100O10O10O1O100O1O001O1O1O1N2O001N2O1O001O100O0010O01O010O1O010O00100O10O0100O00100O100O010O100O100O100O1O100O100O100O010O100O100O100O100O100O100O100O100O100O100O1O100O100O10000O100O10000O10000O10000O10000O10000O100O100O1O100O1N2O1O1O1O1O1O1O1O1O100O1O1O100O100O100O10000O100O100O10000O10000O10000O10000O1000000O10000O1000000O10000O100O10000O100O100O10000O100O1000000O10000O10000O1000000O1000000O100000000O10000000000O10000000000O10000000000O100000000000000O10000000000000000000000000000000000000000000000000000O10000000000000000000000O10000000000000000000000O100000000000000O1000000000000O100000000000000O1000000O10000O10000O2O0O100O100O1O1O1O1O1N2O1O1O1O1O1O1O1O1O2O0O100O100O1O100O2O0O1O100O100O2N100O1O100O2N100O1O1O101N1O1O1O1O2N1O1N2O2N1O1O1O1O2N1O100O101N100O1O2O0O1O1O2N1O1N2O1N3N1N2O1O1N3N1O1N2O1O2N1N2O1O1O2N1O1O2N1N2O2M2N3M2N3M3M2M4M3L4M3M2N3L4L3M4M3M2N3N1O2M3N2M3M3M3M5C=E;I7HQUSS2\"}}, {\"image_id\": 97, \"category_id\": 1, \"bbox\": [160.0, 305.0, 1885.0, 1254.0], \"score\": 0.9999997019767761, \"association_id\": 1, \"light\": [-1.0363788604736328, -2.746638536453247, 0.9240784645080566, 2.611626386642456], \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"P]m71]b15K5K4L5K4L5K5L3L5L4K5L3M4L4N2M3M3N2M3N2M3M4M2M4M2M4M2M3N2N2M3N2M3N2M3N2N2M3N1N2O1N2O2N1N2O2M2O2M2O2M2O2N2M3N2M3N2M3N2M4M3L6K5J6K5J6K6I6K4K4L4M3L4M3L4M3L3N3L3M4M2M6K5J6K5J5K6K4K5K3M4M2M3M3M3N2M3N2M3N2M3N2N2M3N2N3L3N1N2O2N1N3N1N3N1O2M3N2M3N2N2M3N2M4M2M4M3M2M3N2M4M2M3N1N3N2M3N1O2M3N1N3N1N2O2M2O1N2O2M101N2O0O2O1N1O2O0O2O0O2O001N10001N10001O001N10001O0O2O00001O0O2O001O0O2O00001N101O001N101O001N101O001N101O0O2O1O001N101O001N2O001N101O1O0O2O1O0O2O1O0O2O1O00001O0O2O00001O001O0O101O00001O0O2O00001O001O0O101O001O00001N101O00001O001N10001O001O001O0O101O001O00001N101O00001O001O0O101O0000000000000000001N10000000000000000000000O10001O000000000000000O100000000000001O000O1000000000000000000O100000001O0000000O10000O2O0O101N10001QQOg^Ocl0Za0[SOi^Odl0Wa0ZSOl^Odl0Va0XSOn^Ofl0Sa0XSOP_Ogl0Pa0WSOS_Ogl0o`0USOU_Ojl0k`0TSOX_Ojl0j`0RSOZ_Oll0g`0QSO]_Onl0c`0PSO`_Onl0b`0PSO`_Ool0``0QSOb_Oll0_`0SSOc_Okl0_`0SSOc_Oll0]`0SSOe_Okl0]`0SSOe_Okl0\\\\`0USOe_Ojl0[`0USOg_Oil0[`0USOg_Oil0Z`0WSOg_Ohl0Y`0WSOi_Ogl0X`0YSOi_Ofl0X`0XSOi_Ogl0X`0YSOi_Oel0X`0ZSOi_Ofl0X`0YSOi_Oel0X`0ZSOi_Oel0X`0[SOh_Oel0Y`0YSOi_Oel0X`0[SOh_Odl0Y`0[SOh_Oel0X`0[SOh_Odl0Z`0ZSOh_Odl0Y`0\\\\SOg_Odl0Y`0\\\\SOg_Ocl0Z`0\\\\SOh_Obl0Z`0]SOf_Ocl0Z`0\\\\SOg_Ocl0Z`0]SOf_Obl0[`0^SOf_O`l0\\\\`0^SOe_Obl0[`0^SOe_Oal0\\\\`0_SOd_O`l0]`0aSOc_O^l0^`0aSOb_O^l0_`0bSOa_O]l0``0cSO`_O]l0``0cSOa_O[l0a`0dSO__O[l0b`0eSO^_OZl0c`0gSO\\\\_OYl0d`0gSO]_OWl0d`0iSO\\\\_OVl0f`0iSOZ_OWl0f`0iSOZ_OVl0g`0jSOZ_OTl0g`0mSOX_OSl0h`0mSOX_ORl0i`0nSOW_OQl0k`0nSOV_OPl0k`0PTOU_Onk0m`0RTOS_Olk0o`0TTOR_Ohk0Qa0XTOo^Oek0Ta0\\\\TOk^Oak0Xa0_TOi^O^k0Za0aTOf^O\\\\k0]a0dTOc^Oci0jM]WOfc0POa^Oai0oMWWOcc0XO^^O`i0VNoVO_c0A[^O_i0\\\\NiVO[c0HZ^O]i0bNbVOWc02V^O[i0kNYVOSc0;R^O[i0\\\\c0eVOe\\\\OYi0\\\\c0gVOd\\\\OXi0]c0hVOc\\\\OWi0^c0iVOc\\\\OUi0^c0kVOb\\\\OTi0_c0lVOb\\\\ORi0`c0mVO`\\\\ORi0ac0oVO^\\\\OPi0cc0PWO^\\\\Onh0cc0RWO]\\\\Olh0ec0TWO\\\\\\\\Ojh0ec0VWO[\\\\Oih0fc0WWO[\\\\OPg0dMPZOTf0oNX\\\\OPg0iMlYOPf0TOX\\\\Oof0lMhYOme0YOW\\\\Onf0QNcYOje0_OV\\\\Olf0TNaYOge0DT\\\\Okf0YN[YOee0JS\\\\Oif0\\\\NXYOde0NP\\\\Ojf0_NTYObe02P\\\\Ohf0bNQYO`e07n[Ogf0eNnXO_e0;m[Off0ae0ZYO`ZOdf0ae0\\\\YO_ZOdf0ae0\\\\YO`ZObf0be0]YO^ZOcf0be0]YO_ZOaf0be0_YO^ZOaf0be0`YO^ZO^f0ce0bYO]ZO^f0ce0bYO^ZO\\\\f0de0cYO]ZO\\\\f0ce0dYO]ZO\\\\f0ce0dYO^ZOZf0ce0fYO^ZOYf0be0gYO^ZOXf0ce0hYO^ZOWf0be0iYO_ZOVf0be0iYO_ZOUf0be0kYO^ZOUf0be0kYO_ZOTf0ae0lYO`ZORf0ae0oYO_ZOPf0ae0PZO`ZOne0be0QZO_ZOne0ae0RZO`ZOme0`e0SZOaZOke0ae0TZO`ZOke0`e0UZOaZOie0ae0VZO`ZOie0`e0WZOaZOge0`e0YZObZOee0_e0ZZObZOde0_e0\\\\ZObZOce0_e0\\\\ZObZObe0_e0^ZObZOae0^e0`ZObZO^e0`e0aZOaZO^e0_e0bZObZO\\\\e0`e0cZOaZO[e0`e0eZO`ZO[e0ae0dZO`ZOZe0ae0fZO`ZOXe0be0gZO_ZOXe0ae0hZO`ZOVe0be0iZO_ZOUe0be0kZO_ZOSe0ce0lZO]ZOTe0ce0mZO]ZOQe0ee0nZO\\\\ZOPe0fe0oZO[ZOod0fe0Q[O[ZOmd0ge0R[OYZOmd0he0S[OYZOkd0ie0T[OXZOjd0je0U[OVZOjd0ke0V[OVZOid0ji0N2N1O2O1N1O2O0O2N2O0O2O1N101N2O1N2O001N2O1N2O1O1N2O1O1N3N1O1N2O1O0O2O1N2O1O1N101O1N2O0O2O1N2O0O2O1N101N101N2O0O2N101N1O2O1N1O2N101N1O2N1O2N101N1O2N101N1O2O0O2O0O2O0O2O0O2O0O2O0O2O001N101N101O001O001N101O001O001N101O001O000O2O001O00001N10001O00001N1000001O0O101O0000001N1000001O000O101O00001O000O101O0000001N1000001O0O10001O000O101O000O10001O0O10001O0O10001N1000001N10001N10001N101N101O0O101N101O0O101N101N100O2O000O101N10001N10000O2O000O10001O000O10001O00000O101O00000000000O2O000000001O000O10001O0000001O0O10001O00001O00001N10001O001O00001O001N101O001O001O1O001O000O2O001O000010O0001O00001O00001O0000001O00001O00001O00001O00001O0010O0001O001O001O1O001O001O1O001O1O001O1O001O001O010O001O00001O00001O00001O00001O0O10000000001O000000001O00000000001O0000000O2O000000001O000000001O0000001O0000001O0000001O0O10001O0000001O00001O00001O00001O00001O001O001O00001O0010O0001O00001O0000001O00001O0000001O0000001O000000001O000000001O01O00000001O00000000001O000000001O0000001O0000001O00001O0000001O00001O001O00001O001O001O00lGPDhYOo;Xf0RDgYOn;Yf0SDgYOl;Yf0UDfYOk;Zf0WDdYOi;]f0WDcYOh;]f0YDbYOg;^f0ZDbYOe;^f0\\\\DaYOd;_f0]D`YOc;`f0_D_YO`;af0aD^YO_;bf0bD]YO^;df0bD\\\\YO];df0dD[YO\\\\;ef0eDZYO[;ff0fDZYOY;ff0gDZYOY;ff0hDYYOX;gf0hDYYOX;gf0iDYYOV;hf0iDXYOW;hf0jDWYOV;if0jDWYOV;if0jDWYOV;if0kDWYOT;if0lDWYOT;if0lDWYOT;if0mDVYOS;kf0lDUYOT;kf0mDTYOS;lf0mDTYOS;lf0mDTYOS;lf0nDSYOR;mf0nDSYOR;nf0nDRYOQ;nf0oDRYOQ;nf0oDRYOQ;of0oDPYOQ;Pg0oDPYOR;Pg0nDoXOR;Qg0nDoXOR;Rg0mDnXOS;Sg0mDlXOS;Tg0mDlXOT;Tg0lDkXOT;Ug0lDlXOS;Ug0mDjXOS;Wg0lDiXOU;Vg0lDiXOT;Xg0kDhXOU;Xg0lDgXOT;Zg0kDfXOU;[g0kDdXOV;[g0jDeXOV;\\\\g0jDcXOV;]g0jDcXOW;]g0iDcXOV;]g0jDcXOV;^g0jDaXOV;_g0jDaXOW;_g0iD`XOW;`g0iD`XOW;`g0jD_XOW;ag0iD^XOW;bg0iD^XOW;cg0iD\\\\XOW;dg0iD\\\\XOX;cg0iD]XOV;dg0jD[XOV;eg0jD[XOW;dg0jD[XOV;fg0iDZXOX;eg0iDZXOW;fg0iDZXOW;gg0iDXXOX;gg0hDYXOX;gg0hDYXOY;gg0gDXXOY;hg0gDYXOX;gg0hDYXOY;fg0hDYXOX;hg0gDXXOZ;gg0fDYXOZ;gg0fDYXO[;fg0eDZXO[;gg0eDXXO\\\\;gg0dDYXO\\\\;gg0dDYXO\\\\;gg0dDYXO];gg0bDZXO];fg0cDZXO^;eg0bD[XO^;eg0cDZXO];gg0bDYXO_;fg0aDZXO_;fg0aDZXO_;fg0aDZXO`;eg0`D[XO`;eg0`D[XO`;eg0aD[XO^;eg0bD[XO^;eg0bD[XO_;dg0aD\\\\XO_;dg0aD\\\\XO_;dg0aD\\\\XO_;dg0aD\\\\XO_;dg0bD[XO^;eg0bD[XO^;eg0bD[XO_;dg0aD]XO^;cg0bD]XO^;cg0bD]XO^;cg0bD]XO^;cg0bD]XO^;cg0cD\\\\XO^;cg0bD]XO^;cg0bD]XO^;cg0bD]XO^;cg0bD^XO];bg0cD^XO];bg0cD^XO];bg0dD]XO];bg0cD^XO];bg0cD^XO];bg0cD^XO];bg0cD^XO];bg0cD^XO];bg0cD_XO\\\\;ag0eD^XO[;bg0eD^XO\\\\;ag0dD_XO\\\\;ag0dD_XO\\\\;ag0dD_XO\\\\;ag0dD_XO\\\\;ag0dD_XO\\\\;ag0dD`XO[;`g0fD_XOZ;ag0fD_XO[;`g0eD`XO[;`g0eD`XO[;`g0eD`XO[;`g0dDaXO\\\\;_g0dDaXO\\\\;_g0dDaXO\\\\;_g0cDbXO];^g0cDbXO];^g0bDcXO_;\\\\g0aDdXO_;\\\\g0aDdXO_;\\\\g0`DeXO`;[g0`DeXO`;[g0_DfXOa;Zg0_DfXOa;Zg0^DgXOb;Yg0^DgXOb;Yg0]DhXOd;Wg0\\\\DiXOd;Wg0[DjXOe;Vg0[DjXOe;Vg0ZDkXOf;Ug0ZDjXOg;Vg0XDkXOh;Ug0WDlXOi;Tg0WDlXOi;Sg0WDnXOi;Rg0VDoXOj;Qg0UDPYOk;of0UDRYOk;nf0UDRYOl;lf0TDUYOl;kf0SDVYOm;jf0RDWYOn;hf0SDXYOm;hf0RDYYOn;ff0RD[YOn;ef0RD[YOn;df0RD]YOn;cf0RD]YOn;bf0RD_YOn;af0RD_YOn;`f0RDaYOn;_f0RDaYOn;^f0RDcYOn;\\\\f0SDdYOm;\\\\f0RDeYOn;Zf0SDfYOm;Yf0SDhYOn;Vf0SDjYOm;Vf0SDjYOm;Uf0SDlYOm;Sf0TDmYOl;Qf0UDPZOk;je0[DVZOe;ee0`D[ZO`;_e0eDbZO[;Xe0kDhZOU;Re0QEnZOo:ld0VEU[Oj:id0XEW[Oh:gd0ZEX[Og:gd0YEZ[Og:ed0ZE[[Of:cd0[E^[Oe:ad0\\\\E_[Od:`d0\\\\Ea[Oe:]d0\\\\Ec[Od:\\\\d0\\\\Ee[Od:Zd0]Ef[Oc:Yd0]Eh[Oc:Xd0]Eh[Oc:Wd0]Ej[Oc:Ud0^Ek[Ob:Td0^El[Oc:Sd0]En[Oc:Rd0[EP\\\\Oe:oc0ZES\\\\Of:lc0ZEU\\\\Of:kc0XEW\\\\Oh:hc0WEZ\\\\Oi:ec0VE]\\\\Oj:cc0SE`\\\\Om:_c0REc\\\\On:]c0oDf\\\\OQ;Yc0nDi\\\\OR;Wc0iDn\\\\OW;Qc0aDX]O_;hb0XD`]Oj;cl0000000000000000000000000000O10000000000000000000000000000QD_CnAa<k=fCTB[<d=lC]BT<\\\\=SDdBm;U=YDlBh;R=XDoBh;P=YDPCg;n<ZDSCf;l<[DTCe;k<[DVCe;i<[DWCf;h<[DXCe;g<[DZCe;e<[D\\\\Ce;c<[D^Ce;a<\\\\D_Cd;`<\\\\DaCd;^<\\\\DcCd;]<[DcCf;\\\\<ZDeCf;Z<ZDgCf;X<ZDiCf;W<XDkCh;T<WDnCi;Q<TDSDl;m;PDWDP<h;mC\\\\DS<d;hC`DY<_;cCfD]<Z;^CkDb<T;ZCQEf<]h0000000001N10000000000000000O100000001O0000000O10000000000kAUCdFk<Z9WCfFj<X9VCiFj<U9XCjFi<U9XCkFh<S9YCnFg<P9[CPGe<o8\\\\CQGd<m8]CTGc<k8^CUGb<i8_CXGa<g8`CYG`<e8aC[G`<c8bC]G_<a8aC`G_<^8cCbG]<]8cCdG]<Z8eCfG[<Z8dCgG\\\\<X8dCiG\\\\<V8eCiG\\\\<V8dCkG\\\\<U8cClG]<S8cCnG^<P8aCRH_<n7]CVHc<i7\\\\CYHd<g7YC\\\\Hg<c7WC_Hj<`7TCcHl<]7QCfHo<Y7oBjHQ=V7nBkHR=Zf01N100000001O0O10001O00001N10001O001N10001O001N10001O001O001O001O001O1a@[BPJe=^e01`@ZBRJg=\\\\e01`@XBUJh=Ze01_@WBXJj=We00_@VB[Jj=Ve01]@UB^Jk=Te01O001O001O00001N10001O00001O00001O00001O00001O00001O0O2O00001O001O001O001O001N2O001O001N2O001O1O1O0O2O1O1O1O1O1N2O1O1O2N1O1N2O1O2N1O1O1N101O1O1O1O1O0O2O1O1O001O001N2O001O001O1O0O2O1O001O1O1O0O2O1O1O1N2O1O2N2N3L3N3M3L3N3M3M3L5L3M4K6K8a^On]Oo1Yb0iMg]OW2ab0`Ma]O`2fb0WM[]Oj2jb0oLX]OP3jb0nLV]OS3kb0jLW]OU3kb0hLV]OY3lb0cLV]O\\\\3lb0bLT]O_3mb0`LS]O`3nb0^LR]Ob3ob0]LQ]Od3Pc0ZLQ]Oe3Qc0YLo\\\\Oh3Rc0WLm\\\\Oi3Uc0ULk\\\\Ol3Uc0SLl\\\\Ol3Vc0SLi\\\\On3Xc0PLh\\\\OQ4Yc0nKg\\\\OQ4Zc0nKf\\\\OS4[c0lKd\\\\OU4\\\\c0jKe\\\\OU4]c0jKb\\\\OW4_c0gKb\\\\OY4^c0gKa\\\\OY4ac0eK`\\\\O[4`c0eK_\\\\O\\\\4bc0bK_\\\\O]4bc0cK]\\\\O^4dc0aK\\\\\\\\O_4dc0`K\\\\\\\\Oa4ec0^K[\\\\Oa4fc0_KY\\\\Ob4hc0\\\\KY\\\\Od4gc0\\\\KX\\\\Od4jc0ZKW\\\\Of4ic0ZKV\\\\Og4kc0XKU\\\\Og4lc0XKT\\\\Oi4mc0VKS\\\\Oj4mc0UKS\\\\Ok4oc0TKQ\\\\Ol4Pd0SKo[Om4Sd0QKn[Oo4Rd0QKm[OP5Td0nJm[OQ5Ud0nJj[OS5Wd0kJi[OU5Xd0kJh[OT5Zd0jJf[OW5[d0gJf[OX5\\\\d0gJc[OZ5^d0dJb[O\\\\5ad0bJ^[O_5cd0_J^[O`5ed0]J[[Oc5hd0[JX[Od5kd0YJU[Oh5ld0VJT[Oj5od0SJR[Ol5Pe0RJP[On5Se0PJmZOo5Ue0oIkZOQ6Xe0lIiZOS6Ye0kIgZOU6\\\\e0hIeZOW6]e0gIcZOY6_e0eIbZOZ6ae0bI`ZO_6ae0_I`ZO`6be0^I^ZOb6ee0ZI]ZOe6ee0YI[ZOh6fe0VI[ZOi6ge0UIZZOj6ie0SIWZOn6je0PIWZOo6ke0oHUZOR7le0lHUZOS7me0kHTZOU7me0iHSZOW7oe0gHRZOY7ne0fHRZOZ7Pf0dHQZO\\\\7Pf0bHQZO]7Pf0cHoYO^7Rf0`HoYO`7Rf0^HoYOa7Rf0_HmYOb7Tf0\\\\HmYOc7Tf0\\\\HmYOd7Tf0[HlYOe7Tf0ZHlYOf7Vf0YHjYOg7Vf0XHkYOh7Vf0WHjYOh7Wf0WHjYOi7Wf0UHjYOj7Wf0VHiYOj7Xf0THhYOl7Yf0SHhYOm7Yf0RHgYOm7Zf0RHgYOn7Zf0PHgYOo7[f0PHeYOP8[f0oGfYOP8\\\\f0nGdYOQ8^f0nGcYOQ8_f0mGbYOR8`f0lGaYOS8`f0lGaYOR8bf0lG_YOS8cf0kG]YOT8ef0jG]YOU8ef0hG]YOV8ff0hG[YOV8hf0gGZYOW8if0fGYYOX8if0fGXYOY8kf0dGWYOZ8lf0cGVYOZ8nf0cGTYOY8Qg0dGQYOY8Sg0dGnXOZ8Vg0cGlXOZ8Xg0bGkXO\\\\8Xg0aGjXO\\\\8Zg0aGgXO^8\\\\g0^GgXO`8\\\\g0]GfXOb8]g0YGfXOf8\\\\g0WGfXOi8\\\\g0SGfXOl8\\\\g0QGeXOo8^g0mFdXOS9^g0iFdXOV9^g0gFdXOX9^g0eFdXO[9^g0bFcXO]9_g0`FcXO`9_g0\\\\FbXOd9`g0ZFaXOf9`g0WFbXOh9`g0VFaXOj9`g0TFaXOk9bg0QF`XOo9ag0oE`XOP:bg0nE^XOS:cg0kE^XOU:cg0hE_XOW:cg0gE^XOY:cg0eE_XOY:cg0eE^XO[:cg0cE^XO]:bg0bE_XO]:cg0`E_XO`:bg0^E`XO`:bg0^E_XOb:bg0[E`XOd:bg0ZE_XOf:bg0WE`XOh:ag0WE`XOi:ag0TEbXOj:`g0SEbXOm:_g0QEbXOn:ag0nDaXOQ;ag0lDbXOR;ag0jDaXOV;`g0gDbXOX;ag0dDbXOZ;ag0bDaXO];ag0`DaXO^;cg0]DaXOa;bg0[D`XOc;dg0XD_XOg;dg0TD`XOi;dg0QD`XOm;dg0mC`XOR<cg0hCbXOU<cg0dCaXO[<bg0^CcXO`<bg0XCdXOe<ag0SCdXOl<an0N3M2O1O2O0O1O2O0O101N100O101N100O2O0O101O0O10001N10000O10000O100O2O000O100O100O100O1O2O0O1O100O1O1O1N3N1N2O1N2N2O1N3N1N2O1O1N2O2M2O1O100O2N100O1O101N100O101N100O101N101O0O2O000O101N100O2O0O100O2O0O1O2O0O1O2O0O1O2N1O2N1N2N3N1N2O2N1N3N1N3N1N2O2N1N3N1O2M2O2N1O2N1O2N1O2N1O2N2N1O2N1O2N2O1N3M4L3M3M4L4K4L5K5J7J6I7J7H7Ic0]O`0_O8G9G9G:F9Dc0^Oe0UOc1iMce3\"}}, {\"image_id\": 98, \"category_id\": 1, \"bbox\": [3.0, 322.0, 1335.0, 878.0], \"score\": 0.9999992251396179, \"association_id\": 1, \"light\": [-1.9661667346954346, -1.962018609046936, 1.8838880062103271, 1.8500280380249023], \"segmentation\": {\"size\": [1355, 1943], \"counts\": \"ji4j3TU1h1SOh0C:G8K5K5L4K4M4L3O1N101N101N101O0O101O00001O00001O001N101O001O001O1O001O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1N101O1O001O1O001O1O001O001O0QKPGdXOP9Yg0VGcXOk8[g0YGUUOFQ2Q9hh0^GoTOHW2k8gh0bGkTOI\\\\2f8gh0eGfTOKa2a8gh0hGaTONe2Z8hh0mG]TOMk2V8fh0PHYTO0o2Q8fh0YIXWOh6fh0[IXWOf6eh0]IZWOd6dh0_IZWOc6ch0`I\\\\WOa6ah0aI^WOa6_h0bI_WO`6^h0eI^WO^6]h0jI]WOX6`h0oI[WOS6^h0WJ\\\\WOk5Xh0dJcWO]5Sh0nJkWOR5Ph0TKPXOl4ng0WKPXOj4mg0ZKQXOf4mg0]KRXOd4kg0`KTXO_4jg0dKUXO]4ig0fKUXOZ4ig0jKVXOV4fg0nKYXOR4eg0RLYXOn3eg0VLZXOj3cg0YL\\\\XOg3bg0]L\\\\XOc3ag0bL]XO_3^g0fLbXOY3[g0lLcXOT3Yg0QMfXOo2Wg0VMgXOk2Ug0YMjXOg2Sg0]MlXOc2Qg0bMnXO]2Qg0eMnXO\\\\2Pg0fMoXOZ2Pg0hMoXOX2Pg0kMnXOV2Pg0lMoXOT2Pg0nMoXOR2Pg0PNoXOQ2of0QNPYOo1Pg0RNoXOo1of0SNPYOm1of0UNPYOl1of0UNPYOk1of0WNPYOj1nf0XNQYOh1of0XNQYOk1kf0WNTYOl1hf0VNWYOm1ff0SNZYOQ2af0QN_YOR2\\\\f0oMdYOR2Zf0PNeYOP2Zf0RNeYOo1Yf0RNgYOo1Xf0RNgYOn1Xf0SNhYOn1Uf0UNjYOk1Uf0VNkYOk1Sf0WNlYOi1Sf0XNmYOh1Qf0[NnYOf1oe0\\\\NQZOd1me0^NSZOb1je0bNUZO_1he0cNXZO]1ee0gNZZOY1de0iN\\\\ZOW1ae0mN^ZOT1_e0nNbZOQ1[e0ROeZOn0Ye0UOfZOk0Xe0WOhZOj0Ve0XOiZOh0Ve0YOjZOg0Te0[OlZOe0Se0]OlZOd0Re0]OnZOc0Qe0^OoZOb0Pe0@oZO`0Pe0AP[O`0nd0AR[O?md0CS[O<ld0ET[O<jd0EV[O;id0GV[O:hd0GX[O9hd0GX[O:fd0f:O2N2O0O=C6K1N2N2O0O2N2N101N1O2O0O2N100O2N100O2N100O1O2O0O100O2N100O2O0O2O0O2N101N102M2O1N2O2M2O1N2O2M2O1N2O1N2N2N100O2N101N1O1O2O0O1O2N1O1O100O1O2N1O1O1O1O1O1O1O2N1O1O1O1O100O1O2O0O100O10dDW_OVMh`0k2Y_OTMg`0m2Y_ORMf`0o2[_OQMd`0o2]_OPMc`0Q3\\\\_OoLd`0Q3]_OnLb`0S3__OlLa`0U3^_OkLb`0U3__OjLa`0V3`_OjL_`0W3`_OiL_`0X3b_OgL^`0Z3a_OfL_`0Z3b_OeL^`0\\\\3a_OdL_`0]3`_OcL_`0^3b_OaL^`0`3a_OaL^`0_3c_O`L]`0a3b_O_L^`0b3b_O]L^`0d3a_O]L]`0e3c_OZL]`0i3a_OVL_`0l>O000O2O001O001O0O2O00001O0O2O00001O000O101O00`Gn_OgFR`0[9o_ObFP`0a9Q@]Fn?d9R@[Fn?g9R@WFn?k9Q@TFo?m9Q@RFo?P:Q@oEn?S:Q@lEo?Xb000000001O000000000000000000000000000000000000000000UJV@UAi?m>Z@o@f?Q?^@k@b?V?`@g@`?Y?`@g@`?Z?`@e@`?[?a@d@_?]?`@c@`?]?a@b@_?_?`@a@`?_?a@`@_?`?a@`@_?a?a@^@_?b?a@^@_?b?a@^@_?b?b@^@]?b?c@^@]?c?b@]@^?c?c@\\\\@]?d?c@\\\\@]?d?c@\\\\@]?d?c@\\\\@]?d?d@[@\\\\?e?d@[@\\\\?e?d@[@\\\\?e?d@[@\\\\?e?e@Z@[?f?e@Z@[?f?e@Z@[?f?e@Z@[?f?e@Z@[?f?f@Y@Z?h?e@X@[?h?e@X@[?h?f@W@Z?i?f@W@Z?i?f@W@Z?i?g@V@Y?j?g@V@Y?j?g@V@X?k?i@T@W?l?i@T@W?l?i@T@W?l?j@S@V?m?j@S@V?m?k@R@U?n?k@R@U?n?k@R@U?n?l@Q@T?P`0k@P@U?P`0l@o_OT?Q`0l@o_OT?Q`0l@o_OT?Q`0m@n_OS?R`0m@n_OS?R`0m@n_OS?R`0n@m_OR?S`0n@m_OR?S`0n@m_OR?S`0n@m_OR?S`0o@l_OQ?T`0o@l_OQ?T`0o@l_OQ?T`0o@l_OQ?T`0o@l_OQ?T`0o@l_OR?S`0n@m_OR?S`0m@n_OS?R`0m@n_OS?R`0m@n_OS?R`0l@o_OT?Q`0l@o_OT?Q`0l@o_OT?Q`0k@P@U?P`0k@P@U?P`0k@P@V?o?i@R@W?n?i@R@W?n?h@S@X?m?h@S@X?m?g@T@Y?l?g@T@Y?l?f@U@Z?k?f@U@Z?k?f@U@Z?k?e@V@[?j?e@V@\\\\?i?d@W@\\\\?i?c@X@]?h?c@X@]?h?c@X@]?h?c@X@]?h?b@Y@^?g?b@Y@^?g?b@Y@^?g?b@Y@^?g?b@Y@^?g?a@Z@_?f?a@Z@`?e?`@[@`?e?`@[@`?e?`@[@`?e?_@\\\\@a?d?_@\\\\@a?d?_@\\\\@a?d?_@\\\\@a?d?_@\\\\@a?d?_@\\\\@a?d?^@]@b?c?^@]@b?c?^@]@b?c?^@]@b?c?]@^@c?b?]@^@c?b?]@^@c?b?]@^@c?b?\\\\@_@d?a?\\\\@_@d?a?\\\\@_@d?a?[@`@e?`?[@`@e?`?Z@a@f?_?Z@a@f?_?Z@a@f?_?Y@b@g?^?Y@b@g?^?X@c@h?]?W@d@i?[?X@e@h?[?W@f@i?Z?V@g@j?Y?V@g@j?Y?U@h@k?X?T@i@l?W?T@h@m?X?R@i@n?W?R@i@n?W?Q@j@o?V?Q@j@o?V?P@k@P`0U?P@k@P`0U?P@k@P`0hd01O00000000000000000000000000000000000000000O100000000000000000000000000000000O10000000000000000000000000000O1000000000000000000000000O100000000000000000000O10000000000000000O100000000000000O1000000WJi_O^AW`0b>i_O^AW`0b>i_O^AW`0b>j_O]AV`0c>j_O]AV`0c>k_O\\\\AU`0d>k_O[AV`0e>k_OZAU`0f>k_OZAU`0g>k_OXAU`0h>k_OXAU`0h>k_OXAU`0i>k_OVAU`0j>k_OVAU`0j>l_OUAT`0k>l_OTAU`0m>j_OSAV`0m>j_OSAV`0m>k_ORAU`0o>j_OQAV`0o>j_OQAV`0o>j_OQAV`0o>k_OPAU`0Q?j_Oo@V`0Q?j_On@W`0R?i_On@W`0R?j_Om@V`0T?i_Ol@W`0T?i_Ol@W`0T?i_Ol@W`0T?i_Ol@W`0T?i_Ol@W`0U?i_Oj@W`0V?i_Oj@W`0V?i_Oi@X`0W?h_Oi@X`0W?h_Oi@X`0X?g_Oh@Y`0X?g_Oh@Y`0X?g_Oh@Y`0X?h_Of@Y`0Z?g_Of@Y`0Z?g_Of@Y`0[?f_Od@[`0\\\\?e_Od@[`0\\\\?e_Od@[`0\\\\?e_Oc@\\\\`0]?d_Oc@\\\\`0]?d_Oc@\\\\`0^?c_Oa@^`0_?b_Oa@^`0_?b_Oa@^`0_?b_O`@_`0`?b_O_@^`0a?b_O^@_`0b?a_O^@_`0b?a_O^@_`0b?a_O]@``0c?`_O]@``0c?`_O\\\\@a`0d?__O\\\\@a`0d?__O[@b`0e?^_O[@b`0e?^_OZ@c`0f?^_OY@b`0g?^_OY@b`0g?^_OX@c`0h?]_OX@c`0h?]_OW@e`0h?[_OW@f`0i?Z_OW@f`0i?Z_OV@g`0j?Y_OV@g`0j?Y_OU@h`0k?Y_OT@g`0l?Y_OT@g`0m?X_OR@i`0n?W_OR@i`0n?W_OR@i`0n?W_OQ@j`0o?V_OQ@j`0o?V_OQ@j`0o?V_OP@k`0P`0U_OP@k`0P`0U_Oo_Ol`0Q`0T_Oo_Ol`0Q`0T_Oo_Ol`0Q`0T_On_Om`0R`0S_On_Om`0R`0S_Om_On`0S`0R_Om_On`0S`0R_Ol_Oo`0T`0Q_Ok_OPa0U`0P_Ok_OPa0U`0P_Oj_OQa0V`0o^Oi_ORa0W`0n^Oi_ORa0W`0m^Oi_OUa0V`0k^Oi_OVa0W`0j^Oh_OWa0X`0i^Og_OXa0Y`0h^Of_OYa0Z`0g^Oe_O[a0Z`0e^Oe_O\\\\a0[`0d^Oe_O\\\\a0[`0d^Od_O]a0\\\\`0c^Oc_O_a0\\\\`0a^Od_O_a0\\\\`0a^Oc_O`a0]`0`^Oc_Oaa0\\\\`0_^Od_Oaa0\\\\`0_^Oc_Oca0\\\\`0]^Od_Oca0\\\\`0]^Oc_Oda0]`0\\\\^Oc_Oea0\\\\`0[^Od_Oea0\\\\`0[^Oc_Oga0\\\\`0Y^Od_Oga0\\\\`0Y^Od_Oha0[`0X^Oe_Oia0Z`0W^Oe_Oja0[`0V^Oe_Oka0Z`0U^Of_Oka0Z`0U^Of_Ola0Y`0T^Og_Oma0X`0S^Oh_Ona0W`0R^Oh_OPb0W`0P^Oi_OPb0W`0P^Oi_OQb0V`0o]Oj_ORb0U`0n]Ok_OSb0T`0m]Ok_OTb0U`0l]Ok_OUb0T`0k]Ol_OVb0S`0j]Om_OVb0S`0j]Om_OWb0R`0i]Om_OXb0S`0h]Om_OYb0R`0g]On_OYb0R`0g]On_OZb0Q`0f]On_O[b0R`0e]On_O[b0R`0e]On_O\\\\b0Q`0d]On_O]b0R`0c]On_O^b0Q`0b]Oo_O^b0Q`0b]On_O_b0R`0a]On_O_b0R`0a]On_O`b0Q`0`]On_Oab0R`0_]On_Oab0R`0_]Om_Obb0S`0^]Om_Obb0S`0^]Ol_Odb0S`0\\\\]Om_Odb0S`0\\\\]Om_Odb0S`0\\\\]Ol_Ofb0S`0Z]Om_Ofb0S`0Z]Om_Ofb0S`0Z]Ol_Ogb0T`0Y]Ol_Ohb0S`0X]Om_Ohb0R`0Y]On_Ogb0R`0Y]Om_Oib0R`0X]Om_Ohb0S`0X]Om_Ohb0S`0X]Om_Ohb0S`0X]Om_Oib0R`0W]On_Oib0R`0W]Om_Ojb0S`0V]Om_Okb0R`0U]On_Okb0R`0U]On_Olb0Q`0T]Oo_Olb0Q`0T]Oo_Olb0Q`0T]Oo_Omb0o?T]OQ@lb0o?T]OQ@mb0n?S]OR@mb0n?S]OR@mb0n?S]OR@nb0m?R]OR@ob0n?Q]OR@Pc0m?P]OS@Pc0m?P]OS@Qc0l?o\\\\OT@Qc0l?o\\\\OT@Rc0k?n\\\\OU@Rc0j?o\\\\OV@Rc0i?n\\\\OW@Rc0i?n\\\\OW@Sc0h?m\\\\OX@Sc0h?m\\\\OX@Tc0g?l\\\\OY@Tc0g?l\\\\OY@Tc0g?l\\\\OY@Uc0f?k\\\\OZ@Uc0e?l\\\\OZ@Uc0f?k\\\\OZ@Vc0e?j\\\\O[@Vc0e?j\\\\O[@Vc0e?j\\\\O[@Wc0d?i\\\\O\\\\@Wc0d?i\\\\O\\\\@Wc0c?j\\\\O]@Wc0b?i\\\\O^@Wc0b?i\\\\O^@Wc0b?i\\\\O^@Xc0a?h\\\\O_@Xc0a?g\\\\O`@Yc0`?g\\\\O`@Zc0^?g\\\\Ob@Yc0^?g\\\\Ob@Yc0^?g\\\\Ob@Yc0^?g\\\\Ob@Zc0]?f\\\\Oc@Zc0\\\\?g\\\\Od@Yc0\\\\?f\\\\Oe@[c0Z?e\\\\Of@[c0Z?e\\\\Of@[c0Z?e\\\\Of@\\\\c0Y?d\\\\Og@\\\\c0X?e\\\\Oh@[c0X?e\\\\Oh@[c0X?d\\\\Oi@]c0V?c\\\\Oj@]c0U?d\\\\Ok@\\\\c0U?d\\\\Oj@^c0U?b\\\\Ok@^c0U?b\\\\Ok@^c0U?b\\\\Ok@^c0T?b\\\\Om@_c0R?a\\\\On@_c0R?a\\\\On@_c0R?a\\\\On@_c0R?a\\\\On@`c0P?a\\\\OPA_c0P?a\\\\OPA_c0P?a\\\\OPA`c0o>_\\\\ORAac0n>_\\\\ORA`c0n>a\\\\ORA_c0n>a\\\\ORA_c0n>a\\\\ORA_c0m>b\\\\OSA^c0m>b\\\\OSA^c0m>a\\\\OTA_c0l>a\\\\OTA_c0k>b\\\\OUA^c0k>b\\\\OUA^c0k>b\\\\OUA^c0k>b\\\\OUA^c0k>a\\\\OVA_c0i>b\\\\OWA^c0i>b\\\\OVA_c0j>`\\\\OWA`c0i>`\\\\OWA`c0h>a\\\\OXA_c0h>`\\\\OXA`c0i>`\\\\OWA`c0i>_\\\\OXAac0h>_\\\\OWAbc0h>^\\\\OYAbc0g>^\\\\OYAbc0g>]\\\\OYAdc0g>\\\\\\\\OYAdc0g>[\\\\OYAfc0g>Y\\\\OZAgc0e>Z\\\\OZAgc0f>X\\\\O[Ahc0e>W\\\\O[Aic0f>W\\\\OZAic0f>V\\\\OZAkc0f>U\\\\OYAlc0f>T\\\\O[Alc0e>T\\\\OZAmc0f>R\\\\O[Anc0e>R\\\\OZAoc0f>P\\\\O[Aoc0f>Q\\\\OYAPd0f>P\\\\O[APd0e>P\\\\OZAQd0f>n[O[ARd0e>n[O[ARd0e>n[OZASd0f>l[O[ASd0f>m[OZASd0f>l[O[ATd0d>m[O[ATd0e>l[O[ATd0e>k[O\\\\AUd0d>k[O\\\\AUd0d>k[O\\\\AUd0\\\\b010O100000O1000O10O1000O010000O01000O010O0010O0010O0001O001O0O2O1M2O2M\\\\\\\\OV\\\\Ol`0gc0W_OZ\\\\Oi`0ac0\\\\_O`\\\\Oc`0[c0a_Og\\\\O^`0Xc0b_Oi\\\\O]`0Xc0c_Oi\\\\O\\\\`0Wc0d_Oi\\\\O\\\\`0Wc0d_Oi\\\\O\\\\`0Wc0d_Oi\\\\O[`0Xc0e_Oh\\\\O[`0Xc0e_Oh\\\\O[`0Xc0e_Oh\\\\O[`0Xc0e_Oh\\\\O[`0Yc0d_Og\\\\O\\\\`0Yc0d_Og\\\\O\\\\`0Yc0d_Og\\\\O\\\\`0Zc0b_Og\\\\O^`0\\\\c0__Od\\\\Oa`0_c0\\\\_Ob\\\\Oc`0ac0Z_O_\\\\Of`0ec0V_O[\\\\Oj`0jc0Q_OV\\\\On`0_b0n\\\\O]^Oo1TOSa0^b0S]O_^Oe1SOXa0]b0V]Oa^O_1RO[a0\\\\b0Y]Oc^OY1QO^a0\\\\b0Y]Og^OU1mNba0[b0[]Ok^Oo0jNfa0Zb0\\\\]Oo^Ok0hNha0Xb0^]OT_Of0dNla0Wb0`]OV_Ob0cNna0Vb0a]OX_O`0bNoa0Ub0b]OY_O?bNoa0Ub0b]OZ_O>aNPb0Tb0c]O[_O<bNPb0Sb0e]O[_O;cNoa0Qb0g]O]_O9bNPb0Pb0h]O^_O8bNPb0oa0i]O`_O6aNQb0oa0j]O__O5bNQb0na0k]Oa_O3bNQb0la0m]Ob_O2bNQb0ka0n]Oc_O1bNQb0ja0o]Oe_OOaNRb0ia0P^Of_ONbNQb0ga0R^Oh_OLaNRb0ga0R^Oh_OLaNQb0ga0T^Oh_OJcNQb0da0V^Oj_OHbNRb0ca0W^Ok_OGbNRb0ba0X^Om_OEbNRb0aa0Y^Om_OEbNRb0ba0W^Oo_ODaNTb0aa0V^OP@DaNUb0`a0U^OR@BaNWb0_a0U^OS@A_NZb0_a0S^OT@@`N\\\\b0]a0R^OV@^O`N^b0\\\\a0R^OW@\\\\O`Nab0Za0Q^OY@YO`Nfb0Xa0o]OZ@WObNhb0Va0o]OfAQb0\\\\>l]OfASb0[>j]OhATb0[>i]OfAWb0[>g]OgAXb0[>e]OfAZb0\\\\>d]OfA[b0\\\\>a]OfA^b0]>_]OeA`b0\\\\>^]OeAab0Tc0O0O101O0O2O1O0O2O1O1O2M2O2N3M3L5L6J1O2N1O1N3N1O2N1O1O1OO100O10000O10000O100O10000O101N100O100O100O100O100O100O1O1O1O1N2O1O1O1O100O1O100O100O0010O10001N100O2O0O100O2N1O100eLZWOPEgh0P;YWOoDhh0Q;XWOnDjh0Q;VWOnDkh0R;UWOmDlh0R;UWOmDlh0S;TWOlDmh0T;SWOkDnh0U;RWOjDoh0U;RWOjDoh0V;QWOjDoh0V;QWOiDQi0V;oVOiDRi0W;nVOiDRi0V;oVOiDRi0W;nVOiDRi0W;nVOhDSi0X;mVOgDTi0X;mVOhDSi0X;mVOhDSi0X;mVOgDTi0X;mVOhDSi0X;mVOgDTi0Y;lVOgDTi0Y;lVOfDUi0Y;lVOgDTi0Y;lVOfDUi0Z;lVOeDTi0Z;mVOfDTi0Y;lVOfDUi0Z;lVOdDUi0[;lVOeDTi0[;mVOcDTi0];lVOcDTi0\\\\;nVObDSi0^;mVOaDTi0^;nVO`DTi0_;lVOaDTi0^;nVO`DTi0_;mVO_DTi0`;nVO^DSi0b;oVO[DSi0c;oVOZDSi0f;nVOXDTi0f;nVOXDSi0h;oVOTDSi0k;oVORDTi0m;mVOoCVi0P<mVOlCVi0S<kVOjCWi0V<kVOfCWi0Y<lVOcCWi0\\\\<oVO]CRi0b<TWOVCPi0i<Y201N2O1O1N2O1O1N2O001O1N2O001O1N101O001N2O001O001N101O1O1O1N2O1O2N001N101O001O0O2O001O0O2O001O0O2O001O0O101O0O2O001N10001N10001N101N10001N100O2O0O101N100O100O2O000O101N10000O2O0O10001N100O2O000O2O000O2O001N2O2M2O1O0O2O1O1N2O001N2O001O0O2O002M2O2M3N1O2M3N2M2O2M3M2O2M3N1M4M3M3M2M4M3M3M4L3M4L3M3M4L3M4M2M4L3M4L3N2M4L3N2M3M2O2M3K5J5K5J6K5K5L4M3M3M4K4M3M3M3L4M3M3M3N2N2N2N2N2N2N2N3M6K5`NRiNXOTW1b0nhN\\\\OXW1=lhN@YW1;ihNCZW1:hhND[W18hhNGYW17jhNFYW17ihNGZW15ihNIYW15ihNJYW13jhNJXW13khNKXW12jhNLXW12jhNMXW10jhNNXW1OkhNOXW1NjhN1WW1MkhN1XW1LjhN2XW1LjhN3hX1Nlhoh0\"}}, {\"image_id\": 98, \"category_id\": 1, \"bbox\": [851.0, 279.0, 1059.0, 652.0], \"score\": 0.999951183795929, \"association_id\": 2, \"light\": [-1.9086461067199707, -2.295729398727417, 1.8502525091171265, 2.102015972137451], \"segmentation\": {\"size\": [1355, 1943], \"counts\": \"X_VS17gY1>L4M3M3M3N2M3N2M3N1O1N2O2M2N2O1N3M2O1N2N2N3M2N2M3O1N3N1O1O1O1O1O2N1O1O1O1O1O1O1O2O0O1O1O1O1O1O101N1O100O100O101O0O1000001N10000O101O000O101O0O102N1O3L3N2N3M2M4M3M1N101O000O2O00001N100XoNcKbk0]4\\\\TOfKck0[4ZTOhKdk0Y4YTOjKgk0V4WTOmKhk0T4UTOoKjk0Q4TTORLkk0n3TTOSLkk0n3TTOSLlk0m3STOULlk0k3STOVLmk0j3RTOWLmk0j3RTOXLmk0h3QTOZLok0g3nSO[LRl0e3lSO^LRl0c3mSO^LSl0b3kSOaLTl0_3kSObLUl0^3jSOcLUl0^3jSOdLUl0\\\\3iSOfLWl0Z3hSOhLWl0X3iSOhLVl0Y3iSOiLVl0W3iSOjLWl0V3hSOlLVl0V3hSOlLWl0T3iSOlLVl0U3iSOmLVl0S3jSOnLUl0R3jSOoLUl0R3kSOoLTl0Q3lSOPMSl0P3lSORMRl0o2nSORMQl0n2oSOSMPl0m2oSOWMmk0k2RTOWMlk0i2TTOYMjk0g2UTO\\\\Mhk0e2XTO]Mfk0c2YTO`Mdk0a2\\\\TO`Mck0`2]TObMak0^2^TOeM_k0\\\\2aTOfM]k0Z2bTOgM^k0Y2bTOhM\\\\k0Z2cTOfM]k0Z2bTOhM]k0X2cTOhM]k0X2bTOjM\\\\k0X2cTOhM]k0X2bTOjM]k0W2aTOjM_k0V2aTOkM^k0V2`TOlM_k0T2`TOnM_k0S2`TOmM`k0S2_TOoM`k0R2^TOPNak0P2^TORNak0o1^TORNak0n1^TOTNak0m1]TOUNbk0l1\\\\TOWNbk0i1\\\\RObJX1g3[l0h1XROfJ[1d3[l0h1UROhJ_1c3Zl0e1RROnJb1_3[l0d1nQORKe1^3Zl0Z2eSOjMWl0W2gSOnMUl0S2iSORNSl0n1mSOUNPl0l1nSOYNnk0h1PTO\\\\Nmk0e1RTO_Njk0a1UTOcNhk0^1VTOeNhk0\\\\1WTOgNfk0Y1YTOkNck0W1[TOlNck0U1\\\\TOmNQ2]JXf0g6eWOnNQ2\\\\JZf0f6dWOQOo1ZJ]f0f6cWOQOn1ZJ_f0e6bWOSOm1YJaf0e6`WOTOm1YJbf0d6`WOTOl1YJdf0c6_WOVOl1WJef0d6^WOVOk1WJgf0d6\\\\WOWOk1WJhf0b6]WOWOj1XJhf0c6\\\\WOWOj1WJjf0b6[WOYOj1UJkf0d6YWOWOl1UJkf0j6RWOROS2TJkf0o6mVOmNX2TJjf0U7hVOhN^2SJjf0c=VYO]Bjf0c=WYO\\\\Bif0d=WYO\\\\Bhf0e=XYO[Bhf0e=XYO[Bhf0e=XYO[Bhf0e=XYO[Bgf0f=YYOZBgf0f=ZYOYBff0g=ZYOYBef0h=[YOXBef0h=[YOXBef0h=[YOXBdf0i=\\\\YOWBdf0j=[YOVBdf0k=\\\\YOUBdf0k=\\\\YOUBdf0k=]YOTBbf0m=^YOSBbf0m=^YOSBaf0n=_YORBaf0n=_YORBaf0n=_YORBaf0n=_YORB`f0o=`YOQB`f0o=`YOQB`f0o=`YOQB`f0o=aYOPB_f0P>aYOPB_f0P>aYOPB^f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA^f0S>bYOmA^f0S>bYOmA^f0S>bYOmA^f0S>cYOkA^f0U>cYOjA]f0V>cYOjA]f0U>eYOjA[f0V>eYOiA\\\\f0W>eYOhA[f0X>fYOgAZf0X>hYOgAWf0Z>m20000O1000000O1000000O0100000O1000000O100000O010000O1000000O10000O0100000000000001O00000000001O000000001O0000000O101O000000001O000000001O000000001O000000001O0000001O000000001O000000001O000000001O000000001O0000001O00000nTO`AYj0`>dUOdA[j0]>aUOhA]j0X>aUOkA^j0U>`UOnA_j0R>`UOPB_j0P>`UOSB^j0n=`UOTB_j0l=`UOVB`j0i=_UOYB`j0\\\\<_UOnC0H`j0X<cUOoCMJ_j0V<eUOQDJK`j0R<iUORDGM_j0P<kUOYETj0d9dUOnD:^1Qj0b9jUOkD7d1ni0a9lUOjD6e1ni0a9lUOjD6e1ni0a9lUOjD6f1mi0`9mUOjD6f1mi0`9mUOkD5e1ni0`9mUOkD5e1ni0`9mUOkD5f1mi0_9nUOkD5f16aMRi0n;cVOkD4\\\\2Yi0i8cVOkD4\\\\2Yi0h8dVOlD3]2Xi0g8fVOkD2^2Xi0g8fVOkD2^2Xi0g8fVOlD1]2Yi0g8fVOlD1]2Yi0g8fVOlD1]2Yi0g8fVOlD0^2Zi0f8fVOmDO]2[i0f8fVOmDO]2\\\\i0e8eVOnDO]2\\\\i0e8fVOnDM^2\\\\i0d8gVOnDM^2\\\\i0d8gVOoDK^2^i0c8gVOoDK^2^i0c8gVOPEJ]2_i0c8gVOQEI\\\\2`i0c8hVOQEF^2ai0a8iVOREE]2bi0a8iVO`HWi0`7iVO`HWi0`7jVO`HUi0`7kVO`HUi0_7lVOaHTi0_7mVOaHRi0_7nVOaHRi0_7oVOaHPi0_7PWObHoh0^7RWOaHnh0_7RWObHmh0^7TWObHkh0^7VWObHih0^7XWOcHfh0\\\\7\\\\WOeHch0Z7^WOgH`h0Y7aWOiH\\\\h0Z7WWOnHkh0Y<0O10000O100000000000aYOQ_Obc0o`0Y\\\\OV_Ogc0j`0V\\\\OY_Ojc0g`0T\\\\O\\\\_Okc0d`0o[Ob_OQd0^`0k[Og_OTd0Y`0k[Oh_OVd0W`0h[Ok_OXd0U`0h[Ok_OXd0U`0g[Ol_OYd0T`0f[On_OYd0R`0f[Oo_OZd0Q`0e[OP@[d0P`0e[OP@[d0P`0d[OQ@\\\\d0o?d[OQ@\\\\d0o?c[OR@]d0n?b[OS@^d0m?a[OT@_d0l?a[OT@_d0l?`[OU@`d0k?^[OX@ad0h?][OZ@dd0d?[[O^@ed0b?X[Oa@hd0_?U[Od@kd0\\\\?Q[Oh@od0X?P[Oi@Pe0W?oZOj@Qe0V?oZOj@Qe0V?nZOk@Re0U?mZOl@Se0T?mZOl@Se0U?kZOl@Ue0T?kZOl@Ve0S?iZOn@We0R?iZOn@We0S?hZOm@Xe0S?gZOo@Xe0Q?hZOo@Xe0R?gZOn@Ye0R?fZOo@[e0P?eZOPA[e0Q?dZOo@\\\\e0Q?dZOPA[e0P?eZOPA[e0Q?cZOPA]e0P?cZOPA^e0P?aZOPA_e0P?aZOQA^e0P?aZOPA_e0P?`ZOQA`e0o>`ZOQA`e0P?_ZOPAbe0o>^ZOQAbe0o>^ZOQAbe0o>^ZORAae0n>^ZOSAbe0m>^ZOSAbe0m>^ZOSAce0m>\\\\ZOSAde0m>\\\\ZOSAde0m>\\\\ZOSAde0m>\\\\ZOSAde0m>\\\\ZOSAde0m>\\\\ZOSAee0l>[ZOTAee0m>ZZOSAfe0m>ZZOSAfe0m>ZZOTAee0l>[ZOTAee0l>[ZOTAfe0k>ZZOUAfe0k>ZZOUAfe0k>ZZOUAfe0l>YZOTAge0l>YZOTAge0l>YZOTAhe0k>XZOUAhe0k>XZOUAhe0k>XZOUAhe0k>XZOUAie0j>WZOVAie0j>WZOVAie0k>VZOUAke0j>UZOVAke0j>UZOVAke0j>UZOVAle0i>SZOXAme0h>SZOXAme0h>SZOXAne0g>RZOYAne0g>RZOYAne0g>RZOYAne0g>RZOYAoe0g>PZOYAPf0g>PZOYAPf0g>PZOYAQf0f>oYOZAQf0f>oYOZAQf0f>oYOZARf0e>nYO[ARf0e>nYO[ARf0e>mYO\\\\ATf0c>lYO]ATf0c>lYO]ATf0c>lYO]ATf0d>kYO\\\\AVf0c>jYO]AVf0c>jYO]AVf0c>jYO]AWf0b>iYO^AWf0b>iYO^AWf0b>iYO^AWf0b>hYO_AYf0`>gYO`AYf0`>gYO`AYf0`>gYO`AZf0_>fYObAYf0^>gYObAYf0^>gYObAZf0^>dYOcA\\\\f0]>dYOcA\\\\f0]>dYOcA]f0\\\\>cYOdA]f0\\\\>cYOdA^f0[>bYOeA^f0[>bYOeA_f0Z>aYOfA_f0Z>`YOgAaf0X>_YOhAaf0X>_YOhAbf0W>^YOiAbf0W>^YOiAcf0V>]YOjAcf0V>]YOjAdf0U>\\\\YOkAef0T>[YOlAef0T>[YOlAff0S>ZYOmAff0S>YYOnAgf0R>YYOnAhf0Q>XYOoAhf0Q>XYOoAif0P>WYOPBif0P>WYOPBjf0o=VYOQBjf0o=VYOQBkf0n=UYORBkf0n=UYORBkf0n=UYORBlf0m=TYOSBlf0m=TYOSBmf0l=SYOTBmf0l=SYOTBmf0l=RYOUBof0k=PYOUBPg0k=PYOUBPg0k=PYOUBQg0j=oXOVBQg0j=oXOVBRg0i=nXOWBRg0i=nXOWBRg0i=nXOWBSg0h=mXOXBSg0h=mXOXBSg0h=nXOWBSg0h=mXOXBSg0h=mXOXBSg0h=mXOXBTg0g=lXOYBTg0g=lXOYBUg0f=kXOZBVg0e=jXO[BVg0e=jXO[BWg0d=iXO\\\\BWg0d=iXO\\\\BXg0c=hXO]BYg0b=gXO^BZg0a=fXO_B[g0`=eXO`B[g0`=eXO`B\\\\g0_=dXOaB]g0^=cXObB^g0]=bXOcB^g0]=cXObB^g0]=bXObB`g0]=`XOcBag0\\\\=_XOdBag0\\\\=_XOdBbg0[=^XOeBcg0Z=]XOfBcg0Z=]XOfBdg0Y=\\\\XOgBdg0Y=\\\\XOgBeg0X=[XOhBfg0W=ZXOiBfg0W=ZXOiBgg0V=YXOjBgg0V=ZXOiBgg0V=YXOjBhg0U=XXOkBhg0U=XXOkBig0T=WXOlBig0T=WXOlBjg0S=VXOmBjg0S=VXOmBkg0R=UXOnBkg0R=UXOnBlg0Q=TXOoBmg0P=TXOoBlg0Q=TXOoBmg0P=SXOPCmg0P=SXOPCng0o<SXOPCmg0P=SXOPCng0o<RXOQCog0n<QXOQCPh0o<PXOQCQh0n<oWORCQh0n<PXOQCQh0n<oWORCQh0n<oWORCRh0m<nWOSCRh0m<nWOSCSh0l<mWOTCSh0l<mWOTCTh0k<lWOUCUh0j<lWOUCTh0l<kWOTCVh0k<jWOTCWh0l<iWOTCXh0k<hWOUCXh0k<hWOUCYh0j<gWOVCYh0j<gWOVCZh0i<fWOWCZh0i<fWOVC\\\\h0i<dWOWC\\\\h0i<dWOWC]h0h<cWOXC^h0g<bWOYC^h0g<bWOXC`h0g<`WOYC`h0g<`WOYCah0f<_WOZCah0f<_WOZCbh0e<^WO[Cbh0e<^WOZCdh0e<\\\\WO[Ceh0d<[WO\\\\Ceh0d<[WO\\\\Cfh0c<ZWO]Cfh0c<ZWO]Cgh0b<YWO^Cgh0b<YWO^Chh0a<XWO_Cih0`<WWO_Cjh0a<VWO_Ckh0`<UWO`Ckh0`<UWO`Clh0_<TWOaCmh0]<TWOcClh0]<SWOdCnh0[<RWOeCoh0Z<QWOfCoh0Z<QWOfCPi0Y<PWOgCQi0X<oVOhCQi0X<oVOhCRi0W<nVOiCSi0V<mVOjCTi0U<lVOkCUi0T<jVOmCVi0S<jVOmCWi0R<iVOnCXi0Q<hVOoCYi0P<gVOPDZi0o;fVOQD[i0n;eVORD[i0n;dVOSD]i0l;cVOTD^i0k;bVOUD^i0k;bVOTD`i0k;`VOUDai0j;_VOVDai0j;^VOWDci0h;]VOXDci0h;]VOXDdi0g;\\\\VOYDdi0g;\\\\VOXDfi0g;ZVOYDfi0g;ZVOYDgi0f;XVO[Dhi0d;YVO\\\\Dhi0c;XVO\\\\Dii0d;WVO\\\\Dii0d;WVO\\\\Dji0c;VVO]Dji0c;VVO\\\\Dli0c;TVO]Dli0c;TVO]Dli0c;SVO]Dni0c;RVO]Doi0b;QVO^Doi0b;QVO^Doi0b;QVO]DPj0c;PVO]DPj0c;PVO]DQj0b;oUO^DQj0b;oUO^DQj0b;oUO^DQj0b;oUO^DRj0a;nUO_DRj0a;nUO_DRj0a;nUO^DSj0b;mUO^DTj0a;lUO_DTj0a;lUO_DTj0a;kUO`DVj0_;jUOaDVj0_;jUOaDVj0_;jUOaDVj0_;jUOaDWj0^;iUObDWj0^;iUObDWj0^;iUObDXj0];hUOcDXj0];hUOcDXj0];hUOcDYj0\\\\;gUOcDZj0];fUOcDZj0];fUOcDZj0];fUOcD[j0\\\\;eUOdD[j0\\\\;eUOdD[j0\\\\;eUOdD\\\\j0[;dUOeD\\\\j0[;dUOdD]j0\\\\;cUOdD]j0\\\\;cUOdD^j0[;bUOeD^j0[;bUOeD^j0[;bUOdD_j0\\\\;aUOdD`j0[;`UOeD`j0[;`UOeD`j0[;`UOdDaj0\\\\;_UOdDaj0\\\\;_UOdDaj0\\\\;_UOcDbj0];^UOcDcj0[;^UOeDbj0[;^UOdDcj0\\\\;]UOdDcj0\\\\;\\\\UOeDdj0[;\\\\UOeDdj0[;\\\\UOeDdj0[;\\\\UOeDej0Z;[UOfDej0Z;[UOeDfj0[;ZUOeDfj0[;ZUOeDfj0Z;[UOfDej0Z;[UOfDej0Z;ZUOgDfj0Y;ZUOgDgj0X;YUOhDgj0X;YUOhDgj0X;YUOhDgj0X;YUOgDhj0Y;WUOhDij0X;WUOhDij0W;XUOiDhj0W;XUOiDhj0W;WUOjDjj0U;VUOkDjj0U;VUOkDjj0U;VUOkDjj0U;VUOkDjj0U;UUOlDkj0S;VUOlDkj0T;UUOlDkj0T;UUOlDkj0T;UUOlDlj0S;TUOmDlj0S;TUOlDmj0T;SUOlDmj0T;SUOlDmj0S;TUOmDlj0S;TUOlDmj0T;SUOlDmj0T;SUOkDnj0U;QUOlDoj0T;QUOlDoj0T;QUOkDPk0U;PUOkDQk0T;oTOlDQk0T;oTOlDQk0S;PUOlDQk0T;oTOlDQk0T;oTOlDQk0T;oTOlDQk0T;oTOlDQk0T;oTOlDQk0T;oTOlDQk0T;oTOkDRk0U;nTOkDRk0U;mTOlDSk0T;mTOlDSk0T;mTOlDSk0S;nTOmDSk0R;mTOnDSk0R;mTOmDTk0S;lTOmDTk0S;lTOmDTk0S;lTOmDTk0S;lTOlDUk0T;kTOlDUk0T;kTOlDUk0T;kTOlDUk0S;lTOlDUk0T;kTOlDUk0T;kTOkDWk0T;iTOlDWk0T;iTOlDWk0T;iTOkDXk0U;hTOkDXk0U;hTOkDXk0T;iTOlDWk0T;hTOmDXk0S;hTOmDXk0S;hTOlDYk0T;gTOlDZk0S;fTOmDZk0S;fTOmDYk0T;gTOlDYk0S;hTOmDXk0S;hTOmDXk0S;hTOmDXk0S;gTOnDXk0S;hTOmDXk0S;hTOmDXk0R;iTOnDWk0R;iTOmDXk0S;hTOmDWk0T;iTOlDWk0T;hTOmDXk0S;hTOmDXk0S;hTOmDXk0R;iTOnDVk0S;jTOmDVk0S;jTOmDVk0S;iTOnDWk0R;iTOnDVk0S;jTOmDVk0S;jTOlDWk0S;jTOmDVk0S;jTOmDUk0T;kTOlDUk0T;kTOlDUk0T;kTOkDVk0U;iTOlDVk0U;jTOkDVk0T;kTOkDVk0U;jTOkDUk0V;kTOiDVk0W;jTOiDVk0W;jTOiDVk0W;jTOhDVk0Y;jTOgDVk0Y;jTOgDVk0X;kTOgDUk0Z;jTOgDVk0Y;jTOgDVk0Y;jTOgDUk0Z;kTOeDVk0[;jTOeDUk0\\\\;kTOdDUk0\\\\;kTOdDUk0[;lTOeDTk0[;lTOeDTk0[;lTOeDTk0[;lTOeDTk0[;lTOeDTk0Z;mTOfDSk0Z;nTOeDRk0[;nTOdDSk0\\\\;mTOdDSk0\\\\;mTOdDSk0[;nTOeDSk0Z;mTOfDSk0Z;mTOfDSk0Z;mTOfDSk0Z;mTOfDSk0Y;nTOgDRk0Y;nTOgDRk0Y;nTOgDRk0Y;nTOfDSk0Z;nTOeDRk0[;nTOeDRk0Z;oTOfDQk0Z;oTOfDQk0Z;oTOfDQk0Z;oTOeDRk0[;oTOdDQk0[;PUOeDPk0[;PUOeDPk0[;QUOcDPk0];PUOcDQk0\\\\;PUObDQk0];PUOcDPk0];QUObDoj0^;QUOaDPk0_;PUOaDPk0_;QUO`Doj0_;RUO`Doj0`;RUO_Dnj0a;RUO_Dnj0a;SUO]Dnj0c;RUO]Dnj0c;SUO\\\\Dmj0c;TUO\\\\Dmj0d;TUO[Dlj0e;TUO[Dmj0d;SUO\\\\Dmj0c;TUO\\\\Dmj0d;TUO[Dmj0d;SUO\\\\Dmj0d;SUO\\\\Dmj0c;UUO[Dlj0e;TUO[Dmj0d;SUO[Dnj0d;TUO[Dlj0e;TUOZDnj0d;TUO[Dlj0e;TUOZDmj0f;TUOXDmj0g;UUOWDmj0h;UUOUDlj0j;YUOmClj0S<g1O0O101N10000O2O000O2O00001N1000001N10001O0O101O000O101O001O1O0O2O001O1O1O0O2O1O1O1O1O1N2O1O1O1O1N2O1O1O2N1O1N3N1O2N1O1N3N1O1O2M2O1O1O1O1N2O1O1O1N3N1N3N2M3M2N3M4L3L4M4K7I7J7H7I7I7H8G:Di0VOd1cM_S[1\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [1061.0, 398.0, 489.0, 543.0], \"score\": 0.9999983906745911, \"association_id\": 4, \"light\": [-2.299370288848877, -1.6282846927642822, 2.284679412841797, 1.5237293243408203], \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"^[ha16i_12O0O10001O0O101O0000001O0O100000001O0000000000001O000000000O10001O00000000000000000000001O00000000000000000000001O00001O001O001O001O00000010O0001O001O000010O000001O01O0001O0001O01O0000001O01O0000001O01O000001O00000000001O00001O01O0001O000000000001O000001O000001O000001O0000000000001O000000000000001O000000001O000000000000001O0000000000001O000001O01O0000000001O0001O0000000001O01O0000001O01O0000000010O00000010O000001O01O0000001O0001O01O00001O01O000001O01O0000010O00001O0000010O0001O01O010O00010O0000010O00001O2O0O001O01O01O01O0001O1O001O1O1O1O10O01O001O0O2O1N2MiWi01RhVOd0]O;G5K4L4K5K5J6I6M4M3iiNhMbn0Z2YQOlMen0V2RQOSNkn0o1oPOXNnn0k1nPOXNPo0j1`nNnMbM<jS1j1XnN_NbMIST1k1UnNhN_MAXT1k1SnNnN]MZO]T1n1mmNS1oQ1UOemNS1VR1XOZmNR1_R1]OalN[1XS1UOVlNT1eS1YOmkNm0nS1e5M3M3L5LgF]lN[6^S1hIilNS6QS1QJUmNk5fR1XJ`mNd5\\\\R1_JfmN`5TR1gJgmN]5TR1iJgmN[5TR1kJfmNZ5VR1mJcmNW5XR1[4L4N2N2N2O1N3N1N2N2N20001N100000000O1N2O1N101N2O1O1O01O1N2O0N3N2M3NO0O1^N^BfPOb=\\\\o0lBSPOb:dR1S1:M3O1O2N2M3K4N3N2N2O10O10000000000000000O2O001O1O002N2N3M4L3M3M2N2N2N3M4L4M2M2N3M2N2N2N2N101N1O1O010O00100O010000O101O5K3M2O1Nf0ZO00O100O10O01000O100O1000O10000O2N5K9Hk0TO9G5L2L3N1N2O1N2O1N101OgJ^oNSKbP1b4RPOVKno0c4cPOSK]o0f4SQOPKnn0i4aQOoJ_n0m4iQOnJXn0n4nQOPKRn0m4SROPKom0l4VROQKkm0k4\\\\ROPKgm0k4`ROPKcm0k4fROmJ^m0n4kROkJXm0P5PSOjJSm0R5SSOhJSm0n4XSOjJUm0a4\\\\SOTKZm0j1nnNiLd4h0`m7\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [544.0, 411.0, 248.0, 345.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-2.2042348384857178, -2.8448379039764404, 2.0733160972595215, 2.7537271976470947], \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"\\\\f`i03m_11O2N1O1O001O1O001O1O1O1O001O1O001O1O001O001O001O00001O001O0000001O001O00001O00001O00000010O0001O01O0000001O01O0001O0000010O0000001O000000000010O00000001O00001O001O1O01O0001O00001O00001O00001O000001O00000001O0000001O0000000001O0001O00001O00000001O00000001O000000000000000001D;L4L4H8B>[Oe0K5mLQNRhNR2fW1YNRhNl1gW1^NRhNf1fW1fNQhN_1iW1jNQhNY1TV1e0fiN^OlU1S1niNPOkU1_1fiNhNPV1i5I8G8J6M3M3N3N1N2N2N200O2O00000O100O0100dNnjNjGRU1T8VkNfGjT1W8]kNeGcT1X8ckNeG]T1X8hkNeGXT1Z8lkNdGTT1[8PlNaGQT1^8YlNWGiS1i8g1O1O1O100O1O1O1O1O1N2O100O1O100O100O10000O1O1O2M2N2oH\\\\hNe5fW1WJ`hNe5aW1YJihN^5XW1_JohN]5RW1aJSiNZ5nV1dJViNZ5kV1aJ_iNX5nX1G3L3dLjeNj0WZ1SOoeNh0SZ1VORfNe0PZ1YOYfN<kY1B]fN5fY1\\\\NUeNn0Z1a0QZ1_ORfN=oY1CUfN8mY1FZfN3gY1M\\\\fN0dY10^fNMeY11^fNJeY15`fNDdY1;cfNSOhY1l0h2N100O2O1N2O1N2N2N2OO01N101O1O3J7IlngS1\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [102.0, 373.0, 308.0, 478.0], \"score\": 0.9999998211860657, \"association_id\": 2, \"light\": [-2.1599862575531006, -1.9142029285430908, 2.1336424350738525, 1.8152872323989868], \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"Tii42n_11N2N3M2O001N10001N10000O1000000O10000000000000000000O10000000000O10001O01O00000000001O00001O0O101O001O0000001O001O001O00000O2O00001O0000001O0000001O00001O0000001O001O001O00001O001O001O001O001O00001O1O1O1O1O000010O01O1O001O001O01O01O0010O01O0001O01O01O010O01O01O000001O01O000000001O000010O01O0000001O000000000000001O0000000000000000001O0000000010O00000001O001O001O0000001O1O1O1O1O1O001O0000PL^OchNa0UW12`hNN[W1;bhND\\\\W1a0ahN_O]W1e0ahN[O]W1i0bhNVO\\\\W1o0ahNQO^W1S1_hNmN^W1X1`hNhNYW1`1ghN_Non0ZOVVOa2iJUNfn0MUVOS2TKPNcn09oUOk1]KkMcn0d0fUOd1eKhMdn0h0cUOd1fKdMfn0j0bUOg1eK^Mhn0m0bUOg1eK[Mhn0T1\\\\UOd1nKUMfn0_1WoNVN]10i1Y3TOjL_n0`2coN^Na2\\\\2BaLYn0j2YoNeNb0bNj0d3S1YLXn0Q3nnN4[1f0`1SLXn0e6ooN\\\\MnR1g2llN^MRS1e2glNcMUS1_2elNhMXS1[2`lNlM^S1\\\\7O1O2N1O3M5K5K3M2N4K7J6J4M1N1O1O1O001O00001O0000000000000001O001O001O1O1O1O3M4SCdmNn;bR1eChmNX<RS1N1O2N1O001O1O1O3M2N1O2N001O1O002N1O1O2fKikNgLYT1d2flNPM]S1e2ZmNdJoN>jS1a4doNVK^P1a4ooNYKSP1`4UPO\\\\Koo0^4WPO_KPP1W4VPOeKWP1k3RPOoKTP1i3TPORLoo0h3WPOSLlo0k3XPOPLlo0l3V6N3L;G<D7J7H>A7H8I6I7H>]O[nde1\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [1058.0, 419.0, 191.0, 255.0], \"score\": 0.9994995594024658, \"association_id\": 3, \"light\": [-1.0563969612121582, -2.1087543964385986, 0.9683046340942383, 2.053906202316284], \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"Sdca12m_12O1O1O001O0000001O000O11O01O000000001O000000000000001O0000001O0001O0000000001O00000000000000001O0001O00001O0000001O1O1O001O001O1O1O1O1N10R_1d0X`N6K2O0O100O1TaNROf^1m0[aNSOc^1o0\\\\aNROb^1o0_aNQO_^1Q1aaNoN]^1S1caNmN\\\\^1S1daNoNZ^1Q1gaNoNX^1P1jaNPOT^1Q1laNPOR^1P1PbNPOo]1o0SbNQOl]1n0VbNROi]1l0[bNSOd]1k0_bNVO_]1j0bbNVO\\\\]1j0gbNUOW]1k0kbNUOT]1j0nbNVOQ]1i0QcNWOn\\\\1i0ScNWOk\\\\1j0VcNVOh\\\\1k0YcNVOd\\\\1k0]cNUOa\\\\1l0`cNTO_\\\\1k0ccNUO\\\\\\\\1k0ecNUOY\\\\1k0icNVOT\\\\1k0mcNUOQ\\\\1l0PdNTOo[1l0SdNSOk[1n0VdNROh[1o0YdNROd[1n0^dNRO`[1n0bdNRO\\\\[1o0edNQOY[1Q1hdNnNU[1T1ldNlNQ[1W1odNjNnZ1W1SeNiNkZ1X1VeNhNgZ1Y1[eNgNcZ1X1`eNhN^Z1Y1ceNgN\\\\Z1Y1eeNgNYZ1Z1ieNeNVZ1[1keNeNRZ1\\\\1PfNdN`X12bfN[1o0dNWX1<dfNP1W1dNmW1h0ffNe0^1cNhW1Q1]fNc0l1\\\\NcW1[3_hNeL_W1\\\\3bhNeL\\\\W1Z3fhNfLXW1Z3khNeLTW1Z3nhNfLPW1[3QiNeLmV1\\\\3TiNdLjV1\\\\3XiNdLeV1]3]iNcL`V1_3aiNaL]V1`3diNaLYV1`3hiN`LWV1`3jiN`LTV1b3liN^LSV1c3niN]LoU1e3QjN[LmU1f3TjNZLkU1g3UjNYLjU1h3VjNXLhU1j3YjNULfU1m3YjNSLfU1n3ZjNRLdU1P4\\\\jNPLbU1S4^jNlK`U1W4_jNiK_U1Z4ajNeK^U1]4ajNcK^U1a4_jN_KaU1e4\\\\jNZKdU1d5mhNcJnNIUX1o5VhNhJBXOXX1W6ggNkJOmNZX1c700O3N1N2O00100O1O0O101O0000XLahNoN`W1j0ihNSOWW1i0RiNSOnV1g0ZiNVOgV1e0`iNXO`V1e0fiNXO\\\\V1d0jiNWOYV1c0QjNdMVN<jW1i1ZjNcMVN>bW1g1hkNXNZT1\\\\OdgNj1[4gNcT1l0ekNPO_T1h0jkNTOWT1c0SlN[OoS1=XlNAmS13]lNIZe[>\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [882.0, 393.0, 152.0, 216.0], \"score\": 0.9998453855514526, \"association_id\": 5, \"light\": [-1.7061784267425537, -2.242403745651245, 1.6508846282958984, 2.1406469345092773], \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"\\\\b[Y15j_12O1O000O1000000O1000001N10000000000N2O2O00000000001O1O001O001O00001O001O0000001O00000000000000000000001O00000000@i`N7W_1Gm`N7T_1Dh`N06<R_1DQaN;[_1OQOJTbN7]]1KkaN>=Gi]1c0TbN_OfZ1EThNh0VMCbZ1ORhN>\\\\MC_Z1=kgNOfMD`Y1DPgNm0i1IgMG_Y1CofNQ1k1CgMI_Y1ClfNV1m1]OhMJ_Y1DifNY1P2WOhML^Y1GhfNX1P2ROmMOZY1IgfN1Ao0_2UOQN2WY1K`fN1NP1X2nNWN5RY1M]fN60l0Y2fN]N;lX1Y2_iNgM_V1^2^iNbMaV1`2^iN`MaV1b2^iN_M`V1c2`iN\\\\M`V1e2_iN[M`V1g2_iNYM`V1i2_iNWM`V1l2^iNTMaV1Q3[iNoLdV1V3XiNjLhV1[3SiNeLlV1a3PiN^LPW1g3kgNTLC5bX1i3hgN]L]OIjX1l3fgNWM[X1j2bgNXM^X1i2_gNYMaX1h2[gN[MeX1S3ifNPMXY1i430O101O001O0000O1000000O100O2N1M3M3I7G9M4M2M4M3N2M3M3J5M6K5I6J5L5K>@9I7J6K4M2M3N3M4K3M7K5K9RNnaN[1^^1O0Mb0_O4L2N2M4L6K5Jdo\\\\h0\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [177.0, 161.0, 84.0, 40.0], \"score\": 0.9999570846557617, \"association_id\": 3, \"light\": [-1.2137212753295898, -3.244729518890381, 1.0767147541046143, 3.098618507385254], \"segmentation\": {\"size\": [404, 639], \"counts\": \"kPV21c<000000001O00000000000O1000000000000000000000000QO3WEMg:8VEHh:f0lDZOT;S10O11OO1000000HhDTOX;h0kDYOU;g0kDYOU;f0lDZOT;f0lDZOU;e0kD[OU;e0kD[OU;f0jDZOV;f0kDYOU;g0kDYOU;g0kDYOU;g0kDYOU;g0kDYOU;g0kDYOU;g0kDYOU;f0=00O10O1000001O2N1]DXOX;R101O00000000O1DkDXOU;h0<N101O1O1M300N2O1O1O2O1O001O1NTod4\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [323.0, 172.0, 164.0, 67.0], \"score\": 0.9990130066871643, \"association_id\": 2, \"light\": [-2.9685521125793457, -1.4602298736572266, 2.8674001693725586, 1.338233470916748], \"segmentation\": {\"size\": [404, 639], \"counts\": \"Seo33`<10000O1000000000001O000000000000000001O1Odf6MmlH1^<3O1000000O100000000000000aN0ZF0e93YFMg93YFNf96VFJj98TFHk9;SFEn9;QFEo9;QFEo9<PFDP:<PFDP:<PFDP:<PFDP:<PFDP:;QFEo9<PFDP:<PFEo9;QFEn9=QFCo9>PFBP:?PF@P:a0oE_OQ:b0nE^OR:c0mE]OS:g0iEYOW:i0gEWOY:j0fEVOZ:l0dETO\\\\:m0cESO]:m0cESO\\\\:o0cEQO]:o0cEQO]:P1bEPO]:R1bEnN^:R1cEmN]:T1a03nDhNb:f1O1O0000001OO010O10O10L3@a0O010O100000O100000001O0000001O001O0001O0O100000000O101N1O100O10001O00000001O1TEnNT:T1dETOY:o0cESO]:b11O0000000O100H`E[Na:e1_E[Nb:d1^E\\\\Nb:d1]E]Nd:b1\\\\E^Nf:a1YE`Nf:g12N1_N`El0a:nNkEk0V;J4K2N2LQhk1\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [2.0, 167.0, 82.0, 55.0], \"score\": 0.9999991059303284, \"association_id\": 1, \"light\": [-1.7245115041732788, -1.7713325023651123, 1.661803126335144, 1.6027673482894897], \"segmentation\": {\"size\": [404, 639], \"counts\": \"SP11b<2O00000O10000000O11O000000000O1000000dN9kEGT:=iECU:h0cEWO]:o0]EQOb:W1SEjN0Lm:c111O001OO100O1CVEQOi:m0\\\\EoNe:o0]EQOc:n0^EROb:m0_ESOa:m0_ESOb:l0]EUOc:k0]EUOc:k0]EUOc:l0]ESOc:m0]ESOc:m0]ESOc:n0\\\\EROd:n0\\\\EROd:n0\\\\EROd:n0\\\\EROd:n0\\\\EROe:m0[ESOe:m0[ESOe:m0[ESOe:l0\\\\EUOc:k0]EUOc:k0]EUOc:j0^EVOb:j0^EVOb:j0^EVOb:k0]EUOc:l0\\\\ETOd:l0]ESOc:P1[EnNf:W1SEhNo:b1M1O1O0O100O10000000O1O1A?N2O1L4M3K6Mfij6\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [236.0, 161.0, 103.0, 76.0], \"score\": 0.9918889999389648, \"association_id\": 5, \"light\": [-1.9616875648498535, -1.9224305152893066, 1.9325679540634155, 1.7314825057983398], \"segmentation\": {\"size\": [404, 639], \"counts\": \"eZm22b<1N2O00001N1000000000001O00000000000O1000000000000000001O0000000000001O0000000000000000000000000000000000000000000YNGSG9l8ISG7m8JRG6l8OQG0o84oFKm8?iEAg00_9o0_FQO`9R1^FnNb9U1[FkNe9X1XFhNh9Z1VFeNk9^1SFaNm9b1PF]NQ:h1kEVNW:=TFg0i9VO[Fj0f9RO]Fm0e9PO]Fo0e9kN`FT1a:O1O2XO_DN2Ma;1mDOV;O]D=e;_O^Da0a;_O_Da0l;NEBZD002N7]<J1O02O00000000O100000000000000O2OZTf3\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [122.0, 164.0, 40.0, 43.0], \"score\": 0.909229576587677, \"association_id\": 4, \"light\": [-1.98431396484375, -1.8888356685638428, 1.899726390838623, 1.823244571685791], \"segmentation\": {\"size\": [404, 639], \"counts\": \"dZ`11b<2O000000000VOOQE1j:=mDCQ;c0eD[O02Z;R1O1O100OMfDnNZ;n0jDROV;l0lDTOT;l0lDTOT;o0iDQOW;T1eDkN[;W10O010FlDSOU;m091O3\\\\DTOZ;S1O1000000000010O00O2CnDUO];>b0GePl5\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [694.0, 393.0, 82.0, 130.0], \"score\": 0.9994164705276489, \"association_id\": 1, \"light\": [-1.4566268920898438, -2.506281852722168, 1.4473121166229248, 2.3383705615997314], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"XPY`01og000001O000O10000000000000000000000O10000000000000001O0000000000000000000SO3gYOMSf0:lYOFne0`0RZO@he0h0VZOXOhe0k0WZOUOfe0o0YZOQOee0R1ZZOnNce0V1\\\\ZOiNed08_[OR1KfNbd0c0W[Oo04^N^d0j2^[OVM`d0o2^[OPM_d0U3`[OiL`d0Y321O2M3N2O000O1O1N2O1M4L3N200O2OO1000000O100O1M3N2J7G9C>Eg0]O9G9FSci5\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [811.0, 620.0, 79.0, 95.0], \"score\": 0.999973714351654, \"association_id\": 6, \"light\": [-2.4981460571289062, -1.3832125663757324, 2.419283151626587, 1.2242450714111328], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"WnPc02mg02O00000000000000001N100000000000000000000000000000000000000000000lMd0e[O[OVd0S2aZOmM\\\\e0]2N2O1O002OO00010000O10000O10001N1000001O000000000O10000O10O10O0100O101N1O3M3M2K6J6_Oa0iNfYOD49NFhbT3\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [370.0, 426.0, 169.0, 259.0], \"score\": 0.9999969601631165, \"association_id\": 4, \"light\": [-1.6728720664978027, -2.047834634780884, 1.6101090908050537, 1.9275100231170654], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"WUf82ng00O10001O00000000000000000000000O10000000000000000000001O000000000000000000000O100000000000001O00000000000000000000000000O10000000000000000000000000000000000000000000hK;U@Ec?f0Z@ZOT?Y1k@gNd>k1[AUNa>P2^APN`>T2^AlM]>Z2bAfMZ>_2eAaMY>b2gA]MV>g2iAYMU>i2SAXMnMOn`0k2PA`MkMESa0n2m@cMoM^ORa0S3]@SN]NjNTa0V3k_OeNoNUNUa0]3[_OjN_OiMUa0^6j^ObIUa0_6l^O`ITa0a6k^O_ITa0b6l^O^ITa0b6l^O^ITa0b6l^O^ISa0c6n^O\\\\IRa0d6n^O\\\\IQa0d6P_O[IPa0f6P_OZIn`0h6S_OWIl`0j64100O2N100O2M2J7G8L5K5M3O0O100O101O1OO100O100000000O10000O100000000O101N100O1N2N3K5H9C=K4H8G:J6G>mNQ1H5K6J8I9F8H7F>CPY[;\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [263.0, 625.0, 80.0, 94.0], \"score\": 0.9999443888664246, \"association_id\": 2, \"light\": [-2.019892930984497, -1.8540269136428833, 1.934251308441162, 1.6932622194290161], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[nU62ng01N1000000000000000000000001O0O10000000000000000000000000000001O00lM=h[OFoc0Z2N2O0000100O1O1O1O10000O100O10000O10000000000000000000000001O001O1O0000001O0O2O1N2M4K6F>mNnYOSOejn?\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [747.0, 432.0, 139.0, 211.0], \"score\": 0.9999979138374329, \"association_id\": 3, \"light\": [-0.9671781659126282, -3.2174465656280518, 0.9112351536750793, 3.1319632530212402], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mk`a02mg010001O00O1001O000000000O101O0000000000000000001O00000O10000000000000001O000000000000000000000000000000000000000000000001O00000O2OCMmXO3Qg04jXOLSg08mXOGQg0;oXOEof0>QYO@nf0b0RYO^Olf0d0TYO\\\\Ojf0f0VYOZOhf0h0XYOXOff0j0ZYOUOdf0m0]YOSO`f0o0aYOQO]f0P1dYOPOVf0T1lYOlNne0Y1SZOgNhe0\\\\1ZZOdN[e0d1hZO\\\\NRe0g1Q[OYNld0g1d002O1O0N3OPNgNT\\\\O:7m0cc05V\\\\OKlb0oNS]O_1IBTc0R3011N2N3M3N2N2O1O1O1N2001O1O1O1O1O103M1O2O1O0O1100O1N4N1N1N30N0O10g0ZOhMS]OXOob0=a]OZOcb0=k]O[OWb0`0o]O\\\\OTb0VOU]O5m0c0oa0QO_]O1j0l0Rc0iNW]OU1od0K7H5L6I8DflV3\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [896.0, 464.0, 119.0, 178.0], \"score\": 0.999992847442627, \"association_id\": 5, \"light\": [-2.3406729698181152, -1.076428771018982, 2.2003321647644043, 0.8449060916900635], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mcPe03mg0000O101O000000000000000000001N1000000000000000000O100000000000000000000000000000000O10000001O000000E;N2O1N2N2N2O1O1O1O1O1LNVYOVOgf0m0ZYOQOYc06Y]OOa1n0mMmNWc0;j\\\\O=h1`0XNgNTc0d1Y^OIbNcNTc0j1m]OJPO\\\\NRc0P2e]OHYb0R3M20O1N3O0N3L4O1N3M3M2M4N2K5N2M3N2N2N2N2O1O100O1000000001O00100O010O12O1O0N2N100010N1O10XOa[OmMad0h2k\\\\OfLea0d2U]ObMMK<0C`0Pc0g1`]OiMEN3P1nb0Q1j]OnMWOR1dc0NoZ6\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [482.0, 623.0, 143.0, 100.0], \"score\": 0.9404515624046326, \"association_id\": 7, \"light\": [-2.134822368621826, -1.3726140260696411, 2.080300807952881, 1.239915132522583], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_VZ;2mg010000O100000000000000000000O10000000000000000000000O10000000000000000O100000000000000000000O10000000001O0000000000000000001O000000000000000000000000000000000000000000000001O00eN[1N2O1N2O1NOmYOYNQf0i1PZOVNne0l1O4_Oa0M3O100N2O1O1N2O1O10000O1000000O1001OO100000000O11O000000001O1O001O003M1O4_MV[Od1ge0L2N2N2N1O2N2MnbZ9\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [161.0, 755.0, 93.0, 158.0], \"score\": 0.9999867081642151, \"association_id\": 4, \"light\": [-0.8379345536231995, -3.114539384841919, 0.74271559715271, 2.989703893661499], \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"X^h63hZ10O101O0O10000000000000O1000O100000000000000O1000O100000000000O10O1000000000O1000O10000000_K2TnNOkQ13TjN0b0Ok1N^S16QjNV1f0[OOXO[U1Q2]jNmN7RO[U1T2RjNUOb0gN\\\\U1i3bjNWL_U1h3ajNXL_U1i3`jNWL`U1i3ajNULbU1i3^jNWLdU1g3[jNZLeU1f3[jNZLeU1e3\\\\jN[LeU1c3\\\\jN]LdU1b3]jN^LdU1a3\\\\jN_LdU1a3\\\\jN_LcU1b3^jN\\\\LcU1e3UjNbLkU1b3kiNbLWV1Q4O2OO0100O0100O2O0O1O101N2N2M3N2M3N3M2WN`iNTObV1g0iiNPOZV1l0QjNjNSV1Q1RjNkNRV1k0ZjNoNjU1g0`jNSOfU1d0_2@fUQ[2\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [995.0, 682.0, 224.0, 455.0], \"score\": 1.0, \"association_id\": 3, \"light\": [-2.319035053253174, -2.2324140071868896, 2.289306879043579, 2.1254732608795166], \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"bXeY16dZ11O2N101N1O1O2M2O1K5O1M3O1O100O10000O10000O1000000O10000O10000O1000000O10000O10001N100O100O2O00001N100O101O0O100O1O1O1O2O00O10O1gIgNPSOY1jl0WOlROi0nl0@oRO`0ol0DoRO;]j0lNXROm0Y37Sj0]O]RO?_34li0J_RO5c31ki01]RO2e3Mli07ZROOh3Jki0=YROLj3Gki0a0XROKj3Dmi0f0UROGo3Bii0n0QRODU4^Ohi0T1iQOD_4XOfi0Z1bQOCh4SOei0S4ZVOlKfi0U4ZVOkKei0W4ZVOiKfi0X4YVOhKgi0Z4WVOfKii0]4TVOcKki0a4\\\\SOVK_O9Um0d4RSO^KENYm0e4mROdKGG[m0i4gROgKK@^m0l4]ROoK0VOcm0Z8YROfGfm0]8XROdGgm0_8VROaGjm0a8TRO_Glm0c8QRO^Gom0e8nQO\\\\GQn0h8kQOXGUn0l8hQOSGXn0Q9dQOoF\\\\n0T9cQOjF]n0k:O00001N1000001N10000O2OO1000O1000000O1O1O10QOlQOWESn0j:mQOVESn0j:mQOVERn0k:nQOUERn0k:nQOUERn0k:nQOTERn0m:nQOSERn0k;0OOTOoQOREPn0o:PROQEom0m;O2N2N1O2N1OROZROkDem0S;]ROnDam0Q;aROoD_m0o:cRORE[m0n:dROTE[m0V<O_NeROeEYm0o9TSORFll0j9YSOVFel0i9]SOWFcl0g9`SOXFal0e9aSOZF`l0e9bSOXF`l0g9bSOXF^l0g9cSOYF]l0g9dSOYF\\\\l0e9fSOZFZl0g9eSOhEoNG]m0a:dSOgEROEZm0d:eSOfETOCXm0f:eSOfEll0[:TSOeEll0Z:TSOgEkl0\\\\:SSOcEml0`:PSOaEol0a:PSO^EPm0d:oRO\\\\EQm0e:mRO[ESm0g:lROYEUm0g:jROXEXm0g:gROZEZm0f:eROYE]m0g:bROYE`m0f:_ROYEcm0g:\\\\ROXEgm0g:WROZEjm0g:TROXEmm0j:QROVEPn0j:oQOUESn0k:lQOUEUn0k:jQOTEXn0l:gQOTE[n0j:eQOVE]n0h:bQOXEbn0e:^QO[Edn0d:\\\\QOZEgn0d:YQO\\\\Ein0b:WQO]Ekn0b:UQO]Emn0b:TQO\\\\Eon0b:QQO^EQo0`:PQO^ESo0`:oPO]ETo0a:f0L3N3M3mIcoNa1aP1XNhoNc1ZP1YNloNc1VP1[NnoN_1UP1`NooN[1TP1dNooNV1UP1hNQPOo0SP1POQPOk0RP1TOPPOg0TP1WOooNe0UP1XOooNb0UP1\\\\ORPO:TP1CUPO0PP1OUPOHQP16QPODUP1:moNBXP1;koN^OaP1:doNnN[Q1f0`oNeMdQ1Q2j3I5J7I7H;D:C:EaSeR1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [507.0, 741.0, 85.0, 108.0], \"score\": 0.9979438781738281, \"association_id\": 5, \"light\": [-1.9424463510513306, -1.7909250259399414, 1.9008660316467285, 1.738189458847046], \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"UdWe03gZ11000001O000O100000000000000000000O100O10000000000000001O00OiM6XiNEaN5TX1;UiNEfN1QX1b0RiN_OmNNoW1i0mhN\\\\OUOKlW1m0khNZOYOIkW1S1chNWOCFhW1Y1[hNWOM@fW1\\\\2YhNdMeW1^2[hNaMdW1a2\\\\hN_MbW1d2]hN\\\\MaW1f2_hNZM`W1h2_hNXM`W1j2_hNVM`W1k2`hNUM`W1k2ahNSM`W1m2`hNSM`W1m2`hNSM`W1m2bhNPM_W1P360000O100O100000000O1000000000000O100O1O1N2L4L4L5J6L4J7J5K6J6I7D<I9EPell1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [739.0, 703.0, 311.0, 228.0], \"score\": 0.9999998211860657, \"association_id\": 2, \"light\": [-2.363316774368286, -1.7990927696228027, 2.3440380096435547, 1.6445926427841187], \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"[Znn01iZ12N100O2O0O100O100000000O1000000O10000000000cLNPlN3kS17PlNIlS1`0okN@nS1g0nkNZOoS1m0kkNTOST1S1hkNmNVT1Z1ekNfNUT1d1gkN\\\\NTT1n1gkNRNVT1W2ckNkMZT1Y2dkNgMZT1]2dkNcM[T1`2ckN`M\\\\T1b2bkN_M]T1c2bkN]M]T1e2bkN\\\\M\\\\T1f2ckNZM\\\\T1h2bkNYM\\\\T1k2bkNUM]T1o2`kNQM^T1R3akNnL^T1T3`kNmL_T1U3`kNkL_T1V3akNjL^T1X3`kNjL^T1W3bkNiL]T1Y3bkNgL]T1[3bkNeL\\\\T1^3ckNbL\\\\T1`3ckN`LZT1g3bkNYL[T1n3`kNSL]T1R4akNnK]T1U4bkNkK]T1W4bkNiK]T1Y4bkNgK^T1Y4bkNgK]T1[4ckNdK\\\\T1^4ckNbK\\\\T1b4bkN]K\\\\T1h4akNXK^T1l4`kNTK]T1o4bkNQK]T1Q5ckNnJ]T1S5bkNmJ]T1U5ckNjJ\\\\T1W5dkNiJ\\\\T1W5dkNiJ[T1X5ekNhJZT1Y5fkNgJYT1Z5gkNfJXT1[5hkNeJWT1\\\\5ikNdJVT1]5jkNcJUT1^5kkNbJTT1_5lkNaJTT1_5lkNaJST1`5mkN`JST1`5lkNaJST1`5mkN`JRT1`5okN`JQT1`5nkNbJPT1U60000O1000000O100000000O100000000O10000000000O1000000O10000O100000000O1000000000000O1000000000000000O1000000000000000000000000000000000O10000000000000000000000000O1000000000000000000000000O10000000000O1000000O1000000O1O100O1O1O1000000O100_OPlNVJQT1j5PlNUJPT1l5PlNRJQT1n5PlNPJQT1P6PlNoIPT1R6okNnIQT1R6okNmIRT1T6nkNkIRT1U6nkNkIRT1V6nkNhIST1X6okNfIQT1[67001O00001O00000000O1000000000000O100O1O1O2M2O1O1O100O1O1O1O1001O1O1O0000O1000000O1000000AWkNlJiT1T5WkNlJiT1T5WkNlJiT1S5XkNlJiT1T5WkNlJiT1S5XkNmJhT1S5XkNmJhT1S5XkNmJhT1S5XkNlJiT1S5XkNmJhT1S5XkNmJhT1S5XkNmJhT1S5YkNkJiT1T5WkNlJiT1T5XkNjJiT1V5>0000O2O0000001O00001O1O0O2O001O1N101O001N2N2O1K6J5H9XOi0G9I6E;K5L4I9E;Im^gY1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [1855.0, 673.0, 105.0, 236.0], \"score\": 0.999872088432312, \"association_id\": 1, \"light\": [-2.519199848175049, -1.7358407974243164, 2.421501398086548, 1.5984529256820679], \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"i[d]23^Z1c0@:J4YiNQO\\\\S1S1dlNnNQS1\\\\1olNdNiR1d1VmN^NdR1g1\\\\mNYNaR1h1amNXN]R1h1emNXNYR1h1imNXNVR1f1mmN[NPR1b1UnN^NiQ1`1[nN`NcQ1`1_nN`N`Q1_1bnNaN]Q1_1cnNbN]Q1\\\\1enNdNZQ1\\\\1gnNdNYQ1[1hnNeNVQ1\\\\1jnNeNUQ1[1lnNeNRQ1\\\\1onNdNoP1^1PoNcNoP1_1PoNaNnP1e1lmN]NPNNRT1j1imN\\\\NRNKQT1o1jmNYNSNHoS1W2gmNVNXNCfS1g2kmNmMZN\\\\OdS1Q3nmNfM\\\\NYObS1Z3lmN`M`NVOaS1g3cmNVMkNSO`S1Q4ZmNPMVOnN^S1W4WmNnLZOkN_S1[4PmNmLAhN^S1a4clNQMN^N^S1c6blN]I^S1c6blN\\\\I_S1e6alNZI_S1f6alNZI_S1f6alNZI_S1f6alNZI_S1f6alNYI`S1g6alNXI_S1h6alNXI_S1h6blNWI^S1i6blNWI^S1i6blNWI^S1i6blNWI_S1i6`lNWIaS1h6`lNVIdS1h6[lNXIgS1f6YlNZIiS1d6WlN\\\\IkS1b6UlN^IQT1[6okNfITT1W6lkNiIZT1Q6fkNoI`T1j5akNUJbT1i5^kNWJdT1f5\\\\kN[JfT1a5\\\\kN_JfT1e3[lNdLTOGcT1e1gkNVOe1TOeR1`1VlNjN\\\\1F^R1]1YoNaNiP1[1ZoNeNhP1W1a4M2O1O100O2N2M3N2O1O1O2O0N2N3L4M3N5LO3M10O20N3M1N1O100O2O10O1O0O1N2O1O8FhQe3\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [1389.0, 719.0, 108.0, 179.0], \"score\": 0.9846436381340027, \"association_id\": 6, \"light\": [-1.3230714797973633, -1.9888036251068115, 1.2607073783874512, 1.831061840057373], \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"\\\\]Tj1d0QZ1=VOg0I;H4K8I>B001O000000001N1000000000001O01O0002N3M1O3M=C5K:F8iiNQLRU1n4N2N100O0010O1O011M2O2N3L6J3M3N2N2M2M3M3M4I6A?\\\\Oc0F;G:F9J7I6M3N3MUgNcN]X1Y1agNjNbX1Q1_gNPOcX1l0^gNUOdX1e0_gN\\\\ObX1a0`gN_OdX1<\\\\gNEjX15UgNLmX11SgN0QY1JPgN7jY10O010NDeeN<[Z1200000O10000000000000O10001O001O3M1O010O1O001O01O00000O101N2N`mPg0\"}}, {\"image_id\": 103, \"category_id\": 1, \"bbox\": [269.0, 232.0, 83.0, 36.0], \"score\": 0.9999258518218994, \"association_id\": 1, \"light\": [-2.4573473930358887, -1.4590890407562256, 2.3914718627929688, 1.24228835105896], \"segmentation\": {\"size\": [480, 640], \"counts\": \"W[n31o>000000000O2O000000000000000000001O000000000000001O0000000IO_A1V>LkA;NHS>b0lA^OR>i0N2O10NQBSOn=o00100O2O0000000000000000000000001O000000000000000000000000000000000000000001O001O1N2O1N2N3KchV4\"}}, {\"image_id\": 103, \"category_id\": 1, \"bbox\": [412.0, 258.0, 155.0, 45.0], \"score\": 0.9999946355819702, \"association_id\": 2, \"light\": [-2.611506462097168, -1.389829158782959, 2.5437936782836914, 1.3070571422576904], \"segmentation\": {\"size\": [480, 640], \"counts\": \"U]Q62n>001O000000001O00000000000000000000000001O0000000000000001O000001O0000000000000000000001O00000000000000000000000000001O00000000000000001O00000000001O0001O00000000YOKnA=7Gg=c0VB[Oh=m0TBROi=R1WBmNh=U102O0O1O101N10000000O1000000000000000000001O0000000000000000000000000000001O0000000000000000000000000000000000O100000000O10000O100O1O1O3MmnQ1\"}}, {\"image_id\": 103, \"category_id\": 1, \"bbox\": [30.0, 266.0, 241.0, 98.0], \"score\": 0.9492465257644653, \"association_id\": 3, \"light\": [-2.7907330989837646, -1.3343381881713867, 2.689455509185791, 1.1997389793395996], \"segmentation\": {\"size\": [480, 640], \"counts\": \"j\\\\>2m>2N2O00001O00000000000000001O000000000000000000001O0000000000001O000000000000000000000000000000000000001O000000000000000000000000000000000000000001O0000000000000001O000000000000000000000000000000000001O00000O1L^Oi0K3N1K6[Od0L5O0O1O1L4L4O101N1N2O00100O1N2O1O100O1000000O10000000000000000000001O0O2O00001O00000000000000001O00000000001O000000001O0000001O000000000O100000000000000000000O0100000000O1O100O10000O100O1O1FPDcMQ<]2PDbMP<]2RDaMo;^2:0000O100001O001O001O001O1O1O01N2N2O1N1O2O2L7I4L2O2N3M5H6L3N2M5Ine\\\\5\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [529.0, 508.0, 87.0, 193.0], \"score\": 0.999996542930603, \"association_id\": 2, \"light\": [-2.5989582538604736, -1.9863722324371338, 2.5323104858398438, 1.8452094793319702], \"segmentation\": {\"size\": [768, 892], \"counts\": \"i]]<2mg02N100O2O000000000O100000000O1000000O10000O100000000jLHd^O7Ya02a^OO]a04b^OL]a05c^OK\\\\a07b^OJ\\\\a09b^OH\\\\a0;S]O_O26jb0<o\\\\OHIDYO7Pd0>e\\\\O=[OYONLQd0`0_\\\\Oc1ac0_NZ\\\\Od1ec0`NR\\\\Of1nc0V1O10CP\\\\OjLoc0V3R\\\\OiL\\\\b0Jo^O^3eNhLYb0On^OZ3kNfLTb04m^OY3oNcLSb07h^OZ3UO_LQb0;a^O\\\\3_OYLna0`0Z^O\\\\3ITLla0f0R^OY34PLja0S5W^OlJja0S5W^OmJia0R5Y^OmJga0R5Z^OnJfa0Q5[^OoJea0]4T^OgK9Lca0Y4[^OdK62`a0Y4S_OgKm`0Y4S_OgKm`0Y4S_OgKm`0Z4R_OfKn`0\\\\4P_OdKPa0`4k^OaKUa0d4f^O[K[a0g4b^OZK^a0k4]^OUKca0P5W^OQKia0Q5U^OoJka0S5S^OmJma0V5P^OjJQb0W5l]OjJTb0`51Hj]OfJWb0a0l]Ol3McKXb0<Q^OQ4GaKZb0;S^OR4Tb0jKP^OU4Tb0dKQ^OY4nb0M3M5K=CZ2fM:F8H;D5KjZ^6\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [256.0, 378.0, 168.0, 325.0], \"score\": 0.9997072219848633, \"association_id\": 5, \"light\": [-2.0313827991485596, -2.1138219833374023, 1.9271438121795654, 2.05053973197937], \"segmentation\": {\"size\": [768, 892], \"counts\": \"deP64kg0101O00001O001O002N001O000000001O000000000000000000O1000000000O1000O1000000O10O10O1O100O1000O10000000000O10000000000000000O10001O000O1000000mHMYF3^9a0WF_Of9i0UFWOh9n0VFROh9Q1TFeNnI9j?W1TFPOj9S1SFoNl9S1PFPOo9R1nEPOP:R1nEPOP:R1oEoNo9T1oEmNo9V1oEkNP:W1mEkNS:X1`BjMV1Q1Y<X3cCbLQMZO\\\\?U4`CcLTMXO\\\\?V4^CcLVMXO[?W4ZCdL\\\\MTOY?Z4WCeL`MQOY?\\\\4QCgLfMmNY?^4mBgLjMkNY?`4hBhLoMhNX?d4WBSMaNYNX?g4eA_MSOjMW?k4[AcM^OcMU?T5n@^MM_MS?U8m@kGQ?V8PAjGn>V8TAjGj>V8XAjGe>U8]AmGR>a8oAaGl=`8XBUGC7R>e8aB[G]=c8gB\\\\GY=_8nB`GQ=X8QCaGUO7j=U8TCdGRO7j=R8WCgGoN7i=n7]CkGjN7i=j7aCoGfN7i=g7kCXHV<g7kCYHU<f7lCZHT<e7mC[HS<c7oC\\\\HR<c7oC]HQ<d7nC\\\\HR<e7mC[HS<f7lCYHU<h7jCXHV<j7hCVHY<k7eCUH[<n7bCQH_<R8^CnGb<T8\\\\ClGf<U8WCjGk<Z8PCfGS=e8_B[Gc=j8VBVGl=o5gAcL<\\\\MR>i5oAcLNdM[>]5mAmLk>i2\\\\AUMe>g2_AWMb>g2dATM]>k2jAoLV>P3mAmLT>R3nAlLS>S3PBjLP>V3TBfLm=]3TB^Lm=S3[@oKg1j0P>k2f@[LZ1e0S>e2PAiLj0=Z>e2SAlLd0:[>h2TAoLb04]>k2RARMc0N^>P3n@SMg0G^>U3j@WM]a0i2b^OZM]a0e2b^O]M`a0_2c^O^M`a0_2c^O_M_a0_2b^O_Maa0^2a^O`Maa0^2b^O\\\\Mda0b2R2J6K3K6I7G;G6J7G^an:\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [794.0, 527.0, 85.0, 164.0], \"score\": 0.9982554912567139, \"association_id\": 4, \"light\": [-2.77700138092041, -1.7688889503479004, 2.7262258529663086, 1.590911865234375], \"segmentation\": {\"size\": [768, 892], \"counts\": \"^Udb03mg00O10000O101O00000O100000000000000O101O0000000000000J6H8UN_O_[OY1hc0l1TOjL^\\\\O0M]3[c0o0J6K5K5N11N1100000000000000000000000000000000000000000000000000000000000000000000000O100O10001O1O2M4SMW^O[OVb0lNd_Og0Pd0I^S9\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [394.0, 482.0, 108.0, 228.0], \"score\": 0.9999985098838806, \"association_id\": 1, \"light\": [-1.7242259979248047, -2.4542157649993896, 1.6070733070373535, 2.3581559658050537], \"segmentation\": {\"size\": [768, 892], \"counts\": \"SVX92mg0101N1O1O100O100O1000000O10000000000O1000000O1000000001O00B>M3N2N2L4J6L4YLjN^@\\\\1^?mNT@\\\\1k?gNm_O_1S`0dNP^O@e0S2\\\\a0Dk]Ok0Tb0a2000000O100O01F\\\\]OXKcb0h4^]OXKbb0g4_]OYK_b0g4c]OYK[b0g4g]OXKWb0h4l]OXKRb0f4R^OZKka0c4[^O]Kca0`4b^O`K\\\\a0_4g^OaKTa0]4S_ObKf`0_4a_OaKU`0e4o_O[Kd?n4`@RK_?k4e@UKZ?j4h@UKY?k4f@VKZ?i4g@WKY?i4g@VKZ?i4g@WKY?i4g@VKZ?j4f@VKZ?k4e@UK[?m4b@SK_?n4`@RK`?Q5]@oJc?S5[@mJe?U5Y@jJh?X5V@hJj?\\\\5R@dJn?_5o_O`JR`0b5l_O^JT`0e5j_OZJV`0i5o03M5K=C4L2N2N000001O1O2jIV_OR5n`0eJZ_OY5ja0K7eMV]OTOmb0KY^OMka0J_^O1na0YOf^O<na0fN`^OV1Yd0K5L3M3K5J8HkjS9\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [664.0, 517.0, 84.0, 155.0], \"score\": 0.9997069835662842, \"association_id\": 3, \"light\": [-2.0725674629211426, -2.289454936981201, 1.9636352062225342, 2.0591750144958496], \"segmentation\": {\"size\": [768, 892], \"counts\": \"kdb?1ng02O1N100000000O1000000000000O1000001O0000000O100aKNPA2P?Oo@1Q?Oo@1Q?0n@0Q?2n@OQ?<b_Of0^`0D_^OU1aa0Ko\\\\Oe0Qc0U20000O10O1000000Kk\\\\O_KUc0a4k\\\\O^KVc0b4j\\\\O^KVc0b4k\\\\O]KUc0c4k\\\\O\\\\KVc0d4300000000000000000000000000000000000000000000000000000000000001N10000N3^L_]O8RO0cc0@o]O\\\\O`N8:f0RS\\\\3\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [229.0, 807.0, 177.0, 117.0], \"score\": 0.9999984502792358, \"association_id\": 2, \"light\": [-1.9081461429595947, -2.0081026554107666, 1.8461370468139648, 1.8772175312042236], \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"akP81dS14O2jlNKeR17RmN2lR10RmN1nR1>N2O000O101O00000O2O2N1N2O0O2O1O1]oNmN^n0T1]QOROan0o0]QOROcn0n0]QOSObn0n0]QOROcn0o0\\\\QOQOdn0o0\\\\QOROcn0o0\\\\QOQOcn0P1]QOPOcn0Q1\\\\QOoNdn0S1ZQOnNen0T1ZQOkNfn0]1RQOcNnn0d1kPO\\\\NUo0f1jPOZNUo0h1iPOXNWo0k1fPOUNZo0n1dPOQN\\\\o0R2bPOmM^o0V2`POiM`o0`2WPO`Mio0b2VPO]Mjo0e2UPOZMko0h2TPOWMlo0n2QPOQMno0P3QPOPMoo0Q3PPOoLPP1R3PPOmLPP1S3PPOmLPP1T3PPOkLPP1U3PPOkLPP1U3PPOkLPP1U3PPOkLPP1V3ooNjLQP1V3ooNjLQP1V3ooNjLQP1V3ooNjLQP1V3noNkLRP1U3noNkLRP1U3moNlLSP1]3000000000000000000000000000BmoNVMSP1i2noNWMRP1i2noNWMRP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1h2ooNXMQP1g2PPOYMPP1g2ooNZMQP1f2ooNZMRP1e2noN[MRP1e2noN[MRP1e2noN[MRP1e2noN[MSP1d2moN[MTP1e2loN[MTP1e2loN[MTP1e2loN[MUP1d2koN\\\\MUP1c2loN]MTP1c2loN]MTP1c2loN]MTP1c2koN^MUP1b2koN^MUP1a2loN_MTP1a2loN_MTP1a2loN_MTP1b2koN^MVP1a2joN_MVP1a2joN_MWP1`2ioN`MWP1`2ioN_MYP1`2foNaMZP1_2foNaM[P1_2doNaM]P1^2coNbM^P1]2boNcM_P1\\\\2aoNdM`P1[2`oNeM`P1[2`oNeMaP1Z2`oNeMaP1Z2_oNeMcP1Z2]oNfMcP1Z2]oNfMdP1Y2\\\\oNgMdP1Y2\\\\oNgMeP1X2[oNhMeP1Y2ZoNgMgP1X2YoNhMgP1X2YoNhMhP1W2XoNiMiP1V2WoNjMiP1V2WoNiMkP1V2UoNjMkP1V2VoNiMkP1W2ToNiMlP1W2ToNiMlP1W2ToNiMlP1W2ToNhMnP1a2001O00001N101O1O001O001O1N101O1O0O2O1KdnNkM]Q1T26N1O001O1N101O1O001N2O1N2N4JkS_X1\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [754.0, 766.0, 345.0, 277.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-1.294809341430664, -2.349046468734741, 1.3053364753723145, 2.199120044708252], \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"VS\\\\j06bS14L1O2N101N100O100O2N1O1O1O100O1O100O2O0O1N2L4N2O1O100O10000O100O100O1O1O1O100O10000O100O1O1N2O1N2O100O1000000gLeNTTO[1ik0nNQTOR1lk0WOoSOh0ok0_OlSOa0Sl0EhSO;Vl0MdSO3Zl06_SOJ^l0c0USOgMeNf1Sn0T2mQOlMQn0o3O1N2OdNRROoLmm0P3[ROjLdm0X3]ROfLbm0\\\\3^ROcLam0`3^RO_Lam0e3\\\\RO[Lcm0i3[ROVLdm0n3YRORLem0R5O1O1O1O1O1O1O1N2I7J6M3N2N2N2N2N2N2L4L4M3O1O1O100O1O2O0O1O100O1O1O1O100O100O100O10000O100O100O1O1OTO_TObI`k0_6`TOaI`k0^6`TOcI_k0^6aTObI_k0^6aTObI^k0^6cTObI]k0^6cTObI]k0^6bTOcI]k0^6cTObI]k0]6dTOcI\\\\k0]6dTOcI\\\\k0]6dTOcI\\\\k0]6cTOdI\\\\k0]6dTOdI[k0[6fTOeIZk0[6fTOeIZk0[6fTOeIZk0[6fTOeIYk0\\\\6gTOdIYk0\\\\6gTOdIXk0]6gTOdIYk0\\\\6gTOdIXk0]6hTOcIWk0^6iTObIWk0^6iTObIVk0^6kTObIUk0^6kTObITk0_6lTOaITk0_6lTOaITk0_6lTOaITk0_6kTObIUk0^6kTObIUk0^6kTObITk0_6lTOaITk0_6lTOaITk0_6lTOaITk0_6lTOaITk0_6lTOaITk0_6lTOaITk0_6lTOaITk0_6lTOaITk0b7000lNlTOcITk0]6lTOcITk0]6lTOcITk0]6lTOcISk0^6mTObISk0^6mTObISk0^6mTObISk0^6mTObISk0]6nTOcIRk0]6nTOcIRk0]6nTOcISk0\\\\6mTOdISk0\\\\6lTOeITk0[6lTOeITk0[6lTOeITk0[6lTOeITk0[6lTOeITk0[6lTOeITk0Z6mTOfISk0Z6mTOfISk0Z6mTOfISk0Y6nTOgIRk0Y6mTOhISk0X6mTOhISk0W6nTOiIRk0W6nTOiIRk0W6nTOiIRk0W6nTOiIRk0V6oTOjIQk0V6oTOjIQk0V6oTOjIQk0V6oTOjIRk0U6mTOlISk0T6mTOlISk0T6mTOlISk0T6mTOlITk0R6mTOnISk0R6mTOnISk0R6mTOnISk0R6mTOnISk0R6mTOnISk0R6mTOnISk0R6lTOoITk0Q6lTOnIUk0Q6lTOoITk0Q6lTOoITk0Q6lTOoITk0Q6lTOoIUk0P6kTOPJUk0P6kTOPJUk0P6kTOPJVk0o5jTOQJVk0o5iTORJXk0m5hTOSJXk0m5hTOSJXk0m5hTOSJYk0l5gTOTJYk0l5gTOTJYk0l5gTOTJYk0l5gTOTJYk0l5gTOSJ[k0l5eTOTJ[k0l5eTOTJ[k0l5eTOTJ[k0l5eTOTJ[k0l5eTOTJ[k0l5eTOTJ[k0l5eTOTJ[k0l5dTOUJ\\\\k0k5dTOUJ\\\\k0k5dTOUJ\\\\k0k5dTOUJ]k0j5cTOVJ]k0j5cTOVJ]k0j5cTOVJ]k0j5cTOVJ]k0j5cTOUJ^k0k5bTOUJ^k0k5bTOUJ^k0k5bTOUJ^k0k5bTOUJ^k0k5bTOUJ^k0k5bTOUJ^k0k5bTOUJ^k0k5bTOUJ^k0k5bTOTJ_k0l5aTOTJ_k0l5aTOTJ_k0l5aTOTJ_k0l5bTOSJ_k0l5aTOTJ_k0l5aTOTJ_k0l5aTOTJ_k0l5aTOTJ_k0l5aTOSJ`k0n5_TORJak0n5_TORJbk0m5^TOSJbk0m5_TORJak0n5_TORJak0n5_TORJbk0m5^TOSJbk0m5^TOSJck0l5]TOSJdk0m5]TORJdk0m5\\\\TOSJdk0m5\\\\TOSJek0l5[TOTJfk0k5ZTOUJfk0k5[TOSJfk0m5ZTOSJgk0l5YTOTJgk0m5YTORJgk0n5YTORJhk0m5XTOSJhk0n5XTOQJhk0o5XTOQJik0o5VTOPJkk0P6UTOPJkk0Q6UTOnIlk0R6STOnImk0R6STOnInk0R6QTOnIPl0Q6PTOnIRl0Q6oSOnISl0Q6lSOoIVl0o5jSOPJXl0P6hSOoIYl0P6gSOPJZl0o5fSOQJ[l0n5fSOPJ\\\\l0P6cSOPJ^l0o5bSOQJ`l0n5_SORJcl0m5]SORJel0l5[SOSJhl0]63M2O2M101N2O1N3N2M5K5E]ROWJfm0g5[ROWJgm0h57N1O1O1O2N4K6K9G6I5L1N2O1N2N3N2M8H:Fb0^O7I4L4L3M3M3L6I:lNonNoNoUY`0\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [127.0, 818.0, 142.0, 176.0], \"score\": 0.9999558925628662, \"association_id\": 3, \"light\": [-1.9012290239334106, -1.6384541988372803, 1.942243218421936, 1.5120997428894043], \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"Vo^41gS1101O0O10000000000000001O0000000O1000000000000000000000000000000000000O1001O000000GKnlN5QS10klN0TS13jlNMTS15mlNJQS18olNHoR1;PmNEoR1<QmNDoR1<RmNCmR1?RmNAmR1`0SmN@mR1`0SmN@ko03lRO?YM^Oio06kRO>\\\\M\\\\Ogo09kRO<^M[Oeo0<kRO9aM[Oao0`0lRO6bM[O_o0b0mRO4eMZO[o0f0nROOiM[OWo0i0nROMkMYOTo0n0oROInMZOon0R1oROFSNXOln0T1oRODVNYOjn0V1mRO@[NZOhn0Z1eRO\\\\OgNZOcn0^1_ROYOQOYO_n0a1\\\\ROWOUOYO]n0d1[ROROZOZOXn0i1ZROmN@YOUn0m1WROkNFXOPn0Q2XROgNIXOlm0U2YROcNLXOim0X2XROaN0WOfm0\\\\2VRO]N6WOcm0^2TRO\\\\N:VO`m0b2SROYN=UO`m0d2oQOYNa0RO`m0j2fQOXNk0nN_m0T4aROlK^m0S4dROmK\\\\m0Q4fROnK[m0Q4fROoKZm0P4gROPLYm0n3iRORLWm0l3kROSLVm0j3mROVLSm0h3oROXLQm0e3RSOZLol0c3TSO]Lll0a3WSO^Lil0a3h1O1O2N1O1001O00011N1O1O2N2N2N2N2N2N2O1NgN[PO_Ndo0]1fPO_NXo0^1oPO_NPo0]O]POd1g0oNjn0[OgPO^1c0VOjn0TOjPO`1?\\\\O\\\\o0`0gPO^O\\\\o0=gPOCao02bPONao0GhPO:Zo0^OlPOb0UQ1:F1O10O1O0O2N2N2N2N3N3L2N1O1N2O1O2M3M4LUVX]1\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [60.0, 802.0, 94.0, 44.0], \"score\": 0.9992496371269226, \"association_id\": 4, \"light\": [-1.1708110570907593, -2.7677438259124756, 1.1092907190322876, 2.71724009513855], \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"dlS2>XS14N1N2O2O0O1M3O1O1O1O100O100YmNQOcR1S100OO]mNnNcR1R1]mNnNbR1S12000O100O10000O1000000_mNkN^R1U1bmNkN^R1T1cmNlN]R1T1cmNlN]R1T1bmNmN^R1R1cmNnN]R1Q1dmNoN\\\\R1Q1cmNQO\\\\R1o0dmNQO\\\\R1n0emNRO[R1n0emNRO[R1m0emNTO[R1l0emNTO[R1l0emNTO[R1l0emNTO[R1l0emNTO[R1l0emNTO[R1l0emNTO[R1l0emNTO[R1k0fmNUOZR1k0fmNUOZR1k0fmNUOZR1l0emNTO[R1l0emNTO\\\\R1k0dmNUO\\\\R1k0dmNUO\\\\R1l0dmNSO]R1m0bmNTO]R1m0bmNSO^R1m0cmNRO]R1o0bmNQO_R1o0amNPO_R1P1amNPO_R1U11K`mNoN`R1Q1`mNoN`R1Q1`mNoN`R1Q1amNnN`R1Q1`mNoN`R1Q1_mNPOaR1P1_mNPOaR1o0`mNQOaR1n0^mNSObR1e0`mN\\\\ONObR1c0dmNZOK4aR1`0imN@XR1>imNBWR1=imNDWR1;jmNEVR1:kmNFVR19jmNGVR18kmNHUR17lmNIUR15lmNKTR15lmNKUR13lmNMVR10kmN0oR100O2O00001O1OPmXa1\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [89.0, 122.0, 121.0, 390.0], \"score\": 0.9999978542327881, \"association_id\": 4, \"light\": [-0.7210670113563538, -4.290132522583008, 0.6200842261314392, 4.18428897857666], \"segmentation\": {\"size\": [639, 735], \"counts\": \"Yhg17_c0k0oNe0G<D5L4L6m^O`M\\\\`0W3J5l_OcL^?c3U@kLe?l3Jb0_O6lCUKh8T5nFWKg8Q5PGVKk8P5aFcK[9b4WFkKe9]4oEiKo9d4_EaK4QNi9n8WFSGg9n8YFSGd9P9ZFTG`9P9_FQG]9R9cFoFY9U9fFlFW9V9hFkFV9V9kFkFQ9U9RGkFl8U9VGlFg8T9[GlFd8U9\\\\GlFb8T9`GkF_8U9bGlF\\\\8U9dGkF[8R9iGoFS8o8RHRGj7m8ZHXG`7b8gH_GW7\\\\8PIcGo6X8WIiGh6Q8^IPH`6k7fIUHY6h7kIYHS6f7PJYHo5f7SJ[Hk5d7WJ\\\\Hg5d7[J\\\\Hd5d7]J]Ha5d7`J[H_5f7aJZH_5e7bJ\\\\H]5c7dJ]H\\\\5c7YIeGNi0h6c7gHaH6LS7c7]HSI8ZO[7e7WHZI9RO^7g9bHYF^7h9aHXF_7h9aHXF_7i9_HXFa7h9_HXFa7i9]HYFc7f9\\\\H[Fd7e9YH^Fh7b9WH^Fi7b9VH_Fj7a9VH_Fk7`9UHaFj7j:2cNSHdFo7Z9QHfFQ8X9oGhFS8V9mGjFT8U9lGkFV8^:2PEoGn9U8kETHo9R8gETHW:l8bN^F[Hd9b7]F^He9^7]FbHe9m5jGSJT8j5QHVJm7j5VHUJh7k5ZHUJd7k5_HTJc7h5`HWJb7d5aH\\\\J`7_5eH`J]7i0^FbN0k0i0K`1NY7g0gFYNOQ1l0GY17V7e0aGeNT4f0\\\\4a0aM^O`2>dMA^2;dMEa25aMJd20_MNi2E\\\\M;g2ZObMe0l:01O1O1O001O1N10001N100O100O1O1N2L4N3N1O2L4OYTW:\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [263.0, 160.0, 159.0, 414.0], \"score\": 0.9999910593032837, \"association_id\": 3, \"light\": [-1.9736642837524414, -2.24381422996521, 1.8494497537612915, 2.091608762741089], \"segmentation\": {\"size\": [639, 735], \"counts\": \"b\\\\T5i0cb0n0C<D4L4L4L3_Oa0I6I8J5L4[@WLS?j3k@XLU?i3j@WLU?k3j@ULU?l3j@ULU?m3h@UL`0Kn=_4mAnKHK^=W4jBkLm<T3SC[M_<e2aC`MZ<`2fCdMV<]2hCgMU<Z2kCfMU<Z2mCaMV<_2kC]MX<d2iCTM]<l2dClLo2fNk5^4WGdLT3POb5h4lFPLm3ZOT5d5VK^Jg4c5ZK^Jc4c5fI]JbN2d7c5eIkJ]NBm7e5aIQK]N\\\\OR8d5\\\\IWK^NUOW8e5WIYK`NTOY8e5SIZKbNROZ8g5oHYKgNPO[8h5kHZKiNoNZ8l5hHVKnNPOU8R6dHSKVOmNP8]6[HlJEhNj7m6hGhJ>[Nf7c9YH_F`7g9`H[FX7j9iHWFS7l9mHUFR7k9mHVFR7j9oHWFP7i9oHXFQ7h9nHZFP7f9QIZFo6f9PI[FP7d9QI]Fm6d9RI]Fn6b9SI^Fm6a9TI_Fl6X1lH_58ZIl6W1mHkMIT7=lIm6T1nHfM0W74QJn6R1oHeM0V72UJo6o0`Im4@TJP7o0aIn4\\\\OUJR7m0bIP5XOUJV7k0cIP5UOVJX7j0eIo4ROXJX7h0kIm4kN\\\\JZ7g0iIo4mNZJZ7g0hIP5nNYJZ7f0iIQ5mNYJZ7f0iIQ5lNZJ[7e0hIS5lNXJ\\\\7f0gIR5mNXJ]7f0dIS5oNWJ]7h0aIR5ROWJ\\\\7m0[Id6e6aITIa6m6`IdHm6\\\\7k2010bFdHW6\\\\7hIeHX6[7hIeH`4NTJ`7oN`Hd11Y50SJd7j8]HVGj7b6cHXID6l7Z6kHYIZO=l7V6UISIQOg0k7R6^IlHkNS1h7m5fIdHgN_1f7f5dJUJ`5a5jJ\\\\JX5\\\\5QK`JP5\\\\5TKdJl4X5XKgJh4V5[KkJd4n4dKRK[4d4oK\\\\KQ4:hG]2]4YMj37nG[2[4^Mh33RHm1FQMa4P1g3NYHm1DoL]4V1g3POfG=h0Y2CPMX4[1g3gNPH?e0Z2T4`Nj3POYH0nNh1Q5WOj3lN\\\\HNROl1g4ZOo3eNRIP2n2[O[6Q1VIoNo6l0RISOP7l0oHUOR7j0mHVOU7i0kHVOV7j0iHVOY7h0gHXOZ7h0eHXO\\\\7h0dHWO]7h0dHWO^7g0bHZO_7d0bH[Ob7a0^H_Oe7>\\\\HAe7>[HBg7:\\\\HEe77^HId7EmH:U7^ORIa0P7YOUIf0S=O001N2O2N001N101O1O001O0O100O100O2O0O1O101N101N100O2N2N2N2N2NmhR6\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [18.0, 140.0, 83.0, 290.0], \"score\": 0.9999992251396179, \"association_id\": 1, \"light\": [-2.751067638397217, -2.2769033908843994, 2.614375114440918, 2.0595803260803223], \"segmentation\": {\"size\": [639, 735], \"counts\": \"Z^;c0Wc0;F8H8I5K5M4h^OVNT`0P2Y_OeN_`0c2J3L4K6K6PCXL`9k3]FVLb9P4UDWMLkNm;c6fC]IY<h6cCXI[<l6bCUIj;I]Dg7Y;d0L4L5L3O1O100O2O0O1000001O00[OREYHn:d7VE[Hj:c7XE]Hh:`7[E`He:^7^EbHa:^7`EaH`:_7`EaH`:^7aEbH_:_7`EbH_:^7`EcH`:]7_EdHa:]7]EdHc:]7[EdHe:]7ZEcHf:^7XEdHg:^7WEbHj:^7UEbHk:^7UEbHk:^7UEbHl:^7REcHn:]7REcHo:V82N1O2N1O2N3M>B5oNTDeIo;U6TDkIn;P6UDPJn;j5UDVJm;g4mCPKb08d;a4VE_Kk:Z4\\\\EeKe:f3PFYLP:Z3^FeLc9n2jFQMX9b2TG\\\\Mo8CgDX1b2TO\\\\:a0kE^OX:=jECZ:7iEH\\\\:OiEOU?N1O2O1M3NUW[<\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [186.0, 114.0, 116.0, 366.0], \"score\": 0.9999990463256836, \"association_id\": 5, \"light\": [-1.558030605316162, -3.112318754196167, 1.5226482152938843, 2.919893980026245], \"segmentation\": {\"size\": [639, 735], \"counts\": \"jZd3f0ob0`0C;]Oc0^Ob0_Oa0J5L5K5i_OaLe?o3L3M4L4M4J5J6K5K4K3O1O1PEdJ]7]5cHeJ[7[5eHfJX7]5gHeJU7^5kHbJS7_5nHaJR7_5mHbJR7^5oHbJQ7]5oHdJP7]5PIcJP7]5PIcJo6^5PIcJP7]5PIcJP7]5PIcJo6]5RIcJl6^5TIdJj6\\\\5WIdJh6]5WIdJi6\\\\5VIeJj6\\\\5TIeJm6[5`HXJfM>i9[5_HYJeM>k9\\\\5]HXJfM=l9^5\\\\HUJgM>l9`5\\\\HQJhM?k9b5kH^JU7c5iH^JW7d5gH]JY7e5dH[Jo5JUHP6g1VJS6LUHV6_1oI[6MUHZ6g0VImNc0V8NVHd6DUICO;;X8NUHP8BSHX8NUHo7AUHY8NUHn7BSHY80THm7CSHY80THm7CSHY81RHm7ERHY81QHo7FoGY83oGo7HnGY83oGP8GmGZ83PHY2JT2L`KZ83PHh1>b2WOdK[82PHd1d0d2QOgKZ81QHa1j0d2kNjK[81nG_1Q1c2gNmKZ83kG[1Z1b2aNPLZ85hGW1d1`2ZNTLZ85hGT1k1_2SNXL[80kGS1o1c2kMZL\\\\8LnGQ1Q2h2eM[L]8HQHc0`2S1\\\\MYOFYOd8^OnGd0f2o0fM]OROAh93bId0YN\\\\O^NMf93gI=dNWOPN9a96oID[LE_22fM?g9NoIDYOLRMb0o9CkIIYOOmLf0T:ZOkIMXO1iLh0hb000O10O10000O1O1000000O101N1O1O10001N100O100O2O000O1O1001O000N2BX]ODjb03f0N1M4H[\\\\O0]\\\\^8\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [523.0, 149.0, 180.0, 366.0], \"score\": 0.7850414514541626, \"association_id\": 2, \"light\": [-2.066526412963867, -2.8742430210113525, 2.0345473289489746, 2.73620343208313], \"segmentation\": {\"size\": [639, 735], \"counts\": \"dfV:3jc03N2m\\\\OI[b09`]OM^b03a]O0]b00d]ON^b0Oe]OOaa0Oc^O0ONd`0n0]_OROYa0T1d05L4M3M3L4M3L5L3N[1dN=e_O`L_?V4I7J8F9F7J5K4L5_O`0M4K3M4H8F:M3M3O1O001O00001N1O1N2O1N2M3N210O10O0100O1M4K6K5J5J7SOYGXFm8f9VGTFP9h9h0L5K5K2M3N2N1O2M3N3M3M1O2N1O1O1N2O1O1O1O1O1O001O01O0000000O1000001O000O101M3J6TOl0J;F9I5K5J4L4I7]Oc0G9H7_Ob0L301ROSCgJn<U5VCkJl<o4XCQKk<f4[CZKg<]4`CcKb<X4aChKa<T4aClKa<P4aCPL`<o3`CRLc<i3^CWLd<g3\\\\CYLg<c3ZC]Lh<a3XC_Lj<_3VCnKQO:k=e3VCPLoN;n=b3SCRLQO;o=_3QCVLPO;Q>]3PCULQO>R>Y3VCgLl<V3UCjLm<T3TCkLo<Q3RCPMP=l2QCTMT=f2nBYMX=a2hB_M\\\\=\\\\2eBeM]=X2dBgM_=V2aBjMb=S2^BmMe=P2\\\\BoMh=m1XBTNi=i1XBWNj=g1WBXNj=g1VBYNk=e1VB[Nk=c1WB\\\\Nj=b1WB^Nj=`1WB`Nj=^1WBbNk=Z1WBfNm=S1VBmNP>h0UBXOW>0TB1m`0000O10000000000000O2O00001N101O001N100O2N2Nebc0\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [377.0, 138.0, 204.0, 351.0], \"score\": 0.9411299228668213, \"association_id\": 6, \"light\": [-2.462404727935791, -2.0584676265716553, 2.392778158187866, 1.955246925354004], \"segmentation\": {\"size\": [639, 735], \"counts\": \"fb[71c03bb05S]O2d>HWEl0f:VOXEm0e:TOZEo0c:SO\\\\En0c:SO[EP1a:TO\\\\EP1_:WO[El0`:_OWEc0g:ASEc0l:]OVE>m:CSE8P;IRE3P;MQE0Q;1PEKQ;6PEHQ;8QEDQ;=aCkNYOi0V=<`C]OfN:k=8^CJXN3Y>5\\\\CS1d<POXCR1h<QOSCP1m<ROPCo0Q=QOmBQ1R=nNnBU1mNbMc=W1aCY1eNjMe=l0fCW2S<eMPDh2c;UM_DT3Z;YLTCMc1o3T;SL\\\\CL_1V4Q;oK^CM`1V4o:aLoDb3_9ZK]FV13b3^9[K[FU15d3\\\\9[K[FT17d3[9ZK\\\\FT16f3Z9YK]FS17h3Y9WK^FQ1F`NNY5m9WK_Fm0HcNK[5m9VK`Fi0IhNIY5m9XKaFe0JjNHZ5l9WKcFNUONf0DDZ5n9WKcFJYONc0GC[5m9VKfFGYOOb0J@[5o9VKfFC\\\\O0`0M^OZ5P:WKeFA_O0>L@]5m9WKeF_OB0n0Y5k8YKdF^ODNn0\\\\5h8ZKfFZOFOm0\\\\5g8]KdFXOHOm0\\\\5g8_KbFUOLNm0^5d8`KcFSOLNO18^5Z9bK^FTO2JN28^5Z9cKYFXO6EO27_5[9cKTF\\\\O:@025a5]9bKnEAT1\\\\O@b5^9\\\\LSGQN]Oe5`9[LRGPN\\\\Og5b9YLRGoMYOl5e9VLQGoMXOm5f9TL\\\\FnM241d5a9ZL[FoM254_5_9^LZFoM159[5\\\\9aLYFPN15>V5X9eLYFPN15d0P5R9lLWFQN14m0h4k8SMVFRN23n0g4j8UMRFVN3OQ1f4j8WMmEYN5KS1f4k8jMQF`MQ1i4n8iMnE`Mo0j4T9WNjFk1V9UNhFm1X9SNfFn1[9RNcFP2^9oM`FQ2b9PN\\\\Fk1k9TNTFd1V:eKWEm1c0T2a:iKTEP2:n1i;PNWDm1m;RNSDm1o;SNPDk1T<SNkCm1W<RNiCl1Z<SNeCj1_<WN`Cd1f<[NYCb1k<^NTC_1P=bNmB^1U=cNgB`1Y=b210O010O001N5K4aCZI_;l6UDcIa;e7F7I6K5L3N2M2_EVGk9m8SFTGk9n8TFSGi9Q9TFQGk9P9TFQGl9P9RFQGn9n8RFSGn9m8QFSGP:l8QFTGo9k8RFTGo9l8PFTGQ:k8PFVGo9j8QFWGKEP:d0PFf76VHi92`FZ7HdHj9MiFV7_OlHk9HmFW7YOjHGJb;X7iDkHHLa;V7iDPIBLf;Q7iDSI@Mh;m6kD]I4BU:Q7hE]I3BU:Q7iE]I1BV:o6kE_IOBV:n6mE`I@DNNe:m6nEaI_ODNNe:l6oEcI]OCONe:k6PFeI[OBOOf:k6oEdI\\\\OBOOf:l6nEcI]OB0Nf:m6lEcI_OAONg:o6jEcI@_OP;n6_EcIB^Oo:o6_EcIl:^6REcIo:\\\\6kDRIDc0a;Z6kDTIBc0e;V6jDXI@b0g;U6hDQJY;n5gDRJZ;m5fDTJ[;j5eDVJk;EfCQ5`0[Ko;TOQDZ50cKl<Q4VCoKX=5WBW2b0dMh>o1ZAQNW?\\\\1k@dN\\\\?S1f@mNb?f0c@[Od?7b@Ija00000000001O01O01O1O1O2N0010O01O001O1O1O000000001O01O0001O010O001O1O1O001O00g\\\\O_OUc0`0j\\\\OAVc0`0h\\\\OAYc0c01^Of\\\\OM1:ec0H\\\\\\\\ONfc00ado2\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [679.0, 158.0, 48.0, 228.0], \"score\": 0.9634349346160889, \"association_id\": 7, \"light\": [-0.7862544655799866, -3.927856922149658, 0.553732693195343, 3.791858434677124], \"segmentation\": {\"size\": [639, 735], \"counts\": \"alW=l02[Oab0X1J8H4K6L4K4L3TBQNg9Q2PFYNiL[Ol<^2oEcNSMPOm<`2gEhNYMiNP=c2]EPOc:P2SDZNm;m1jCUNV<n1fCTNY<Q2_CQNb<R2YCPNg<e4O0000000000FQCdIo<[6RCeIn<Z6SCfIm<Z6SCfIm<Z6SCfIm<Z6SCgIl<Y6UCfIk<[6TCeIl<[6TCfIk<[6TCeIl<]6SCcIm<]6RCcIo<g61N2N2O0O1O3iKZCd0^OTMZ=o1aCW1_<gN_CP1XOSMY=l1bCP1TOTM[=e1kCS1kNWM\\\\=_1SDT1cN\\\\M]=W1QEEaM^OVX5\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [18.50724983215332, 141.82464599609375, 89.29244995117188, 242.17535400390625], \"score\": 0.9983856678009033, \"association_id\": 0, \"light\": [-1.291250228881836, -2.9035146236419678, 1.1808265447616577, 2.7304975986480713], \"segmentation\": {\"size\": [384, 592], \"counts\": \"PPn6\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [384.0069885253906, 88.62519073486328, 145.35092163085938, 206.40432739257812], \"score\": 0.9954388737678528, \"association_id\": 0, \"light\": [-1.771710991859436, -2.679422616958618, 1.6171355247497559, 2.5110933780670166], \"segmentation\": {\"size\": [384, 592], \"counts\": \"PPn6\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [130.2649688720703, 62.032169342041016, 211.3441925048828, 188.37806701660156], \"score\": 0.9924898743629456, \"association_id\": 0, \"light\": [-1.6594600677490234, -2.085425853729248, 1.523667335510254, 1.8985586166381836], \"segmentation\": {\"size\": [384, 592], \"counts\": \"PPn6\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [318.9190368652344, 65.1844253540039, 66.257080078125, 154.7557373046875], \"score\": 0.9751431941986084, \"association_id\": 0, \"light\": [-1.168255090713501, -3.487178087234497, 1.0720500946044922, 3.3405590057373047], \"segmentation\": {\"size\": [384, 592], \"counts\": \"PPn6\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [63.776920318603516, 76.09317016601562, 85.15103149414062, 189.34124755859375], \"score\": 0.4661687910556793, \"association_id\": 0, \"light\": [-1.6231809854507446, -2.6185638904571533, 1.634565830230713, 2.461646556854248], \"segmentation\": {\"size\": [384, 592], \"counts\": \"PPn6\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [159.294189453125, 69.74923706054688, 97.90341186523438, 179.5300750732422], \"score\": 0.3307031989097595, \"association_id\": 0, \"light\": [-1.6343657970428467, -2.5536890029907227, 1.4323668479919434, 2.432464599609375], \"segmentation\": {\"size\": [384, 592], \"counts\": \"PPn6\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [1.0, 189.0, 387.0, 211.0], \"score\": 0.9999998807907104, \"association_id\": 1, \"light\": [-1.2506386041641235, -2.439653158187866, 1.2147259712219238, 2.2922749519348145], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"`n0d0Yg09I4L3XYOnN]f0]1N1d[OfNRN0`c0]1Y^OTOfa0m0W^OWOfa0n0T^OUOka0o0o]OTOoa0o0m]OTORb0n0g]OXOWb0k0d]OXO\\\\b0o0P]OeMM_1Qc0o0o\\\\OfML\\\\1Uc0o0m\\\\OhMK[1Wc0n0l\\\\O^OTc0b0k\\\\O@Sc0a0n\\\\O]OSc0c0m\\\\O\\\\OTc0d0l\\\\O[OUc0e0k\\\\OhMKW1[c0Q1j\\\\OhMKU1]c0S1i\\\\OfMLR1_c0X1e\\\\OeMNo0`c0\\\\1c\\\\ObM0n0`c0`1a\\\\O_M4k0]c0g1o\\\\OUNSc0k1o\\\\OQNSc0o1o\\\\OmMSc0S2n\\\\OkMSc0U2o\\\\OgMSc0Y2n\\\\OeMSc0[2o\\\\OaMSc0_2W10O100000O100000O10O10O1000O01000O01000000000000000001O000O100000000000000000O1000000000000001O00001O00001O00001O001O001O1O1O1O001O1O001O001O1O1O001O1O1O1O1O1O1O1O001O1O1O1O1O2N2N2N1O1O1O1O001O00001O00001O001O1O1O001O1O1O1O001O00001O0000001O1O001O1O9G1O2N1O2N1O1O1O101N3f]OTKZa0o4`^OUK_a0l4\\\\^OkJF<ma0i4\\\\^O[Kca0f4X^O^Kha0b4X^O_Kha0`4X^O`Kha0a4V^O`Kka0_4U^ObKja0^4U^OcKka0]4U^OcKla0]4R^OdKna0X5O0001O00000000010O00000000001O00001O00010O00001O000010O00000001POn]OPLRb0P4n]OPLRb0P5O00001OPOP^OPLPb0P5001O0001O000000010O0001O00001O1O01O01O00010O000000001O01O000001O00001O00100O1O1O1O1O001O1O001O1O1O1O1O1O1O1O1O001O1O1O1O10O01O00001O0000001O00001O00001O001O001O00001O00001jNn\\\\O\\\\MRc0d2n\\\\O\\\\MRc0c2o\\\\O]MQc0b2P]O]MQc0b2P]O^MQc0a2o\\\\O_MQc0`2P]O`MPc0`2P]O`MPc0_2Q]OaMPc0^2P]ObMPc0]2Q]OcMPc0[2Q]OeMob0Z2S]OeMnb0X2T]OhMmb0U2U]OkMlb0o1Y]OQNhb0i1]]OWNcb0g1_]OYNbb0e1_]O[Nab0d1a]O[N`b0[1h\\\\OfMj0o0^b0e0g\\\\OiNU1b0Ub0c0h\\\\OgNV1f0Rb0a0m\\\\OaNV1n0na0<d^OD]a08f^OH\\\\a04f^OL[a02f^ON[a00f^O0[a0Nf^O2[a0Kh^O4Ya0Jh^O6Ya0Hh^O8Ya0Fh^O:Ya0Ci^O<Za0@i^O?\\\\d000000001N100000001O0000001O00000O10001O000O10001O000O101N1000000O2O0O2N101O0O10001N1O2M4LUdl>\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [325.0, 156.0, 208.0, 282.0], \"score\": 0.9999990463256836, \"association_id\": 3, \"light\": [-1.7725043296813965, -1.6201351881027222, 1.6100480556488037, 1.452576994895935], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"aQd71ng02O1O2N1N4M3M4L1O0000000001O01O000000000001O00000000000010O00O1000001O000000001O000O101O0000000O2O00000000000O2O00000O2O0O2N][2NddM6K6I3N2N2M3M2N2O2N1000001O0O1O1O1M3M3O1O000O1O1O1O01N3MPMTOP_Oi0Ra0]Oj^Oa0Xa0Be^O>[a0Cc^O>^a0B`^O>aa0C^^O;ea0D[^O;fa0FY^O9ha0HW^O9ga0IW^O9ga0IU^O:ja0GT^O<ka0ES^O<ma0EQ^O<Pb0Cn]O>Sb0Bj]O`0Wb0@f]Ob0[b0]Od]Oe0\\\\b0[Od]Oe0[b0\\\\Od]Oe0\\\\b0[Oc]Od0_b0]O]]Od0eb0\\\\OZ]Od0gb0]OX]Oc0hb0^OV]Od0hb0^OV]Od0gb0_OV]Oc0ib0@R]Od0lb0^OR]Oc0nb0_Oo\\\\Ob0Pc0Bj\\\\O`0Sc0W2iMnK_@U4_?TLY@n3e?XLV@i3i?ZLS@h3m?[Ln_Of3S`0]Lg_Oe3Z`0]Lc_Oc3^`0`L^_O`3d`0bLY_O^3h`0fLR_O\\\\3n`0hLg^O^3Za0c11N2N2N2N2O1O1O1N2N2N2O1O100O1O1O1O2O000O2O002N7iJd]O\\\\4Wc0I2M2O_\\\\O^Leb0a3Z]OdLbb0\\\\3^]OhL_b0`4M2O0O00001O2N2M2O0O2O3L2O1OTNU^ObMla0P2b^OPN^a0f1l^O[NSa0a1Q_OQO]`0k0g_OTOY`0i0k_OVOU`0g0m_OYOU`0b0o_O^OT`0;P@ER`05S@Jn?0Z@Mj?K^@0i?E`@6jc0K3M3M3M4L4L4L3M2N3L8H7I5Kej_;\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [434.0, 197.0, 413.0, 331.0], \"score\": 0.9477934837341309, \"association_id\": 2, \"light\": [-2.358762502670288, -1.9423885345458984, 2.3071320056915283, 1.8939151763916016], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mmU:1og0001N101O1O00001O0000000000000000001O0000000000000000001O0001O0001O00001O0001O0000000001O01O000000000001O01O000001O0001O0000001O01O000001O00000000000000000000001O00000000000000000000000000nI5PDJSb0OYJMcC3U<8iCGn;Of^O8n4HVK0Ta04g^O4l4KYKLRa09d^O4P5GZKLj`0?T_OEk41b;U1f_OgNi44Y;]1WEcNg:^1ZEbNd:`1gA^N^12j<a1gA_NZNLf24X=b1hA_NVN2f2M\\\\=b1fAbNWN0f2LbMK[?g1VBPOP1^O_MK[?i1SBaNb1JoLN[?i1RB_Nd1JoLN[?R2fAYNP2GoLNZ?S2iAWNn1GoLOZ?S2jAUNn1InLOZ?U2iAQNP2LlLOZ?U2kAmMP2OkLOZ?V2lAgMR24hLOZ?W2bEiMTK1Y?W2bEhMTK2[?V2SBgM`10SM4Y?W2QBfMj1MfL7_?T2hEfMgJ7a?R2hEiMeJ6d?P2gEkMcJ7e?n1m@kMS4=P;;_@;>nNR4<S;6g@84UOT4=R;2o@<ETOi0OY2?\\\\<NTA>\\\\OTOl02W2?_<H_A6k33j<^OXCb0Sb0001O01O100O1O010O01O010O001O01O0001O01O001O001O010O00001O0001O01O00000001O01O0001O01O01O00001O01O000001O0001O0000010O00010O000001O0001O000001O01O0010O010O0001O01O01O0000001O01O01O000010O0001O01O01O0000001O00001O100O001O001O001O1O001O001N3NWP?0ho@1O2O0O2O1O0000001O000O2O00001O001O0000001O0000000000001O00000010O000001O0000000010O00001O0001O01O0001O01O0001O0001O0010O01O10O010O000010O000000010O00000100O010O1O10O0001O01O0000001O01O0001O0000010O000001O01O00001O00010O00001O00001O01O0001O0000000000001N100000001O0000001N2O0N3NhWT4\"}}, {\"image_id\": 109, \"category_id\": 1, \"bbox\": [254.0, 148.0, 215.0, 272.0], \"score\": 0.9999924302101135, \"association_id\": 1, \"light\": [-2.0563411712646484, -1.8421040773391724, 1.8699778318405151, 1.6203060150146484], \"segmentation\": {\"size\": [533, 799], \"counts\": \"\\\\bT41b`03M3N101O001N2O1O001O001O001O1O100O1O1N2O2N3M1O1O1O1O1O1O1O1O1N2O1N2N2N2N2O0O2O1M3N2N2N2N1O2M2M3YNoMhDT2T;PNiDR2V;QNgDQ2X;oMgDR2Y;oMdDS2[;oMYD]2f;cMoCh2Q<YMlCi2T<XMjCi2V<XMhCi2X<XMfCi2Z<i00O10O10O10000O101O000O010O100O1O1O100O100O1N2O100O1NeNhL]ER3g:RMWEj2l:WMTEh2m:YMREg2m:[MREd2o:]MPEc2P;^MoDa2R;`MmD`2S;bMkD]2V;cMjD\\\\2W;eMhD[2X;fMgDZ2Y;fMhDX2Y;iMfDW2Z;iMfDW2Z;iMfDW2Z;jMeDV2[;kMeDT2[;lMfDS2Z;oMiDl1W;mN[Dh0e;FUD1l;1TDMl;4TDKm;6RDIn;9WD@i;k0oCROQ<R1TDdNm;^1SD`Nm;a1SD]Nn;f1PDXNQ<k1mCSNT<P2jCmMW<W3SDhKa;Y3UD_M;]O];n3aDZL[;o2\\\\DVM43^;[3^DlL`;V4O1N1N2M3N2N2O1N2K5L4N2O001O0001N10O1O10O10001N2N21OROaEgK]:U4gElKY:R4gEPLX:o3hESLV:m3kEULQ:l3PFULc8_OmG[4BVL`8BkGX4GUL]8FiGV4JTL]8JdGS40SL[86WGi3>QLZ8Y5fGgJZ8Y5gGgJX8X5iGhJW8W5jGiJV8V5lGiJT8V5mGjJS8V5mGiJT8V5nGiJR8W5oGhJQ8X5QHeJP8[5SH`Jo7`5c1O2O1N3NRF`Jl8Z5QGmJP9n4oFWKV9a4iF`Kc9U4ZFnKh9P4TFSLn9l3lEYLU:i3eEZL]:o40O2O0O001O1N3L4M3L3N101O0O10001N3M3L4L4L3N2N4J<]M]2I6K5K6I`0XOVa[5\"}}, {\"image_id\": 110, \"category_id\": 1, \"bbox\": [462.0, 4.0, 167.0, 94.0], \"score\": 0.9937086701393127, \"association_id\": 2, \"light\": [-2.3981189727783203, -1.7403916120529175, 2.258676528930664, 1.5717086791992188], \"segmentation\": {\"size\": [640, 640], \"counts\": \"njP91oc00O1000000000000000000000000O100000000000000000000000000O1000001O000000000O10001O000000001O00000O10000000000000000000000000000\\\\M3UAMk>5SAKm>5SAKm>5SAKm>5SAKm>5SAKm>6RAJn>:n@FR?`0h@@W?b0h@^OX?c0S@[OXO2e`0d0m_OB[OJh`0j0e_O@AFj`0m0\\\\_ODI_Ok`0V2T_OjMl`0W2S_OiMm`0X2R_OhMn`0Y2Q_OgMo`0Y2Q_OgMo`0Z2P_OfMPa0Z2P_OfMPa0[2o^OeMQa0[2o^OeMQa0e2000000000000000000000000000000000000000000000O10000000000Gn^OdMRa0[2o^OeMQa0[2o^OeMQa0[2o^OeMRa0Z2n^OfMRa0Z2n^OfMRa0Z2n^OfMRa0[2m^OeMSa0[2m^OdMTa0d2000000001O0Lk^O]MUa0g20Ml^OZMTa0f2301O01O0001O0000001O001O1O1O2N2N10O000100O2N1O000O2O0O2O1N3L3M:DPk6\"}}, {\"image_id\": 110, \"category_id\": 1, \"bbox\": [10.0, 426.0, 458.0, 209.0], \"score\": 0.9999998211860657, \"association_id\": 1, \"light\": [-2.568974494934082, -1.4551295042037964, 2.5061392784118652, 1.2820651531219482], \"segmentation\": {\"size\": [640, 640], \"counts\": \"Xj6a0_c01N100O2O000000001O001N101O1O0000001O000000001O00001O001O001O00000000001O000000001O001O1O00001O0000000000001O00001O000000001O0000000000001O0000001O000000000000001O0000000000001O00000000000000000000000000001O00000000000000000000000000001O0000000001O00000000000001O00000001O000000000000001O000000001O01`KjNTFV1i9oNUFQ1j9ROTFn0j9UOUFk0k9VOTFj0k9WOVFh0i9ZOVFf0j9ZOVFf0i9\\\\OVFd0i9^OVFb0j9_OUFa0j9BnBAP2m0Q;DiBHR2d0T;FhBIS2`0T;IfBJU2=U;JdBJW2<T;KdBKW2:U;LcBKW29V;M`BLZ27V;M[B2^21V;Y1iDgNW;Z1hDfNX;]1eDcN[;c1_D]Na;f1\\\\DZNd;h1ZDXNf;j1XDVNh;j1XDVNh;k1WDUNi;l1VDTNj;m1UDSNk;n1TDRNl;o1SDQNm;P2RDPNn;S2oCmMQ<U2nCjMR<Y2kCgMU<Z2dC_LkNW1a=Z2dC`LjNV1b=[2cC_LkNV1b=[2bC`LlNU1b=\\\\2aC_LmNU1b=\\\\2aC_LmNU1b=]2aC]LmNV1b=]2aC]LmNV1b=^2`C[LPOV1a=_2^C[LQOV1a=`2eC_M[<b2dC^M]<b2cC]M]<d2bC\\\\M_<c2bC\\\\M^<e2aC\\\\M_<c2bC\\\\M_<d2`C\\\\M`<d2`C\\\\Ma<d2_C[Mb<e2iB`LGk0b=d2eBcLHi0d=f2`BbLLh0f=f2ZBdL1e0f=k2SBbL6c0h=^3XBbLi=]3WBcLi=]3WBcLj=\\\\3VBdLk=[3UBeLk=[3UBeLm=Y3TBfLm=Y3SBgLP>V3PBjLR>T3nAlLS>S3mAmLT>S3kAmLU>S3kAmLV>R3kAmLU>S3kAmLU>S3kAmLV>R3jAnLV>R3kAmLU>S3kAmLU>R3lAnLT>R3lAnLT>R3lAnLT>R3lAnLT>R3mAmLS>S3mAmLS>S3mAmLS>S3mAmLS>S3mAmLT>R3lAnLT>R3mAmLT>R3lAnLT>Q3mAoLT>P3lAPMU>o2kAQMV>n2jARMV>m2kATMU>k2kAUMU>k2kAUMV>j2jAVMV>j2kAUMU>k2kAUMU>k2kAUMU>l2jATMV>m2iASMW>m2iASMW>n2iAQMX>o2hA[LBK3:c>a3hAWLA601g>b3kAaL]OLh>d3hAlLX>U3fAkL[>V3cAkL^>U3`AlL`>U3^AlLc>U3ZAlLg>U3UAmLl>S41O001O001O001O001O001O1O1O1O001O00001O001O001O1O1_N\\\\AdMe>Y2cAaM^>\\\\2dAdM]>Z2cAgM]>X2dAhM]>V2dAjM\\\\>V2dAjM]>T2dAlM\\\\>T2dAmM\\\\>R2cAoM^>o1cAQN^>n1bARN^>n1bARN_>l1bATN^>l1bATN^>l1bATN_>k1aAUN_>j1bAWN^>h1cAWN]>i1cAWN]>i1cAWN^>h1bAYN^>f1cAYN^>g1aAYN`>f1`AZN`>f1`AZNa>e1_A\\\\N`>d1aA[N_>e1aA[N`>d1`A\\\\N`>d1`A\\\\N`>d1`A\\\\Na>c1_A^N`>b1`A^Na>a1_A_Nb>`1_A_Na>a1_A_Nb>`1^A`Nc>`1\\\\A`Ne>_1[AbNd>^1\\\\AbNd>^1\\\\AbNe>]1[AcNe>]1[AcNe>]1\\\\AbNe>]1[AcNe>]1[AcNf>\\\\1ZAdNf>\\\\1ZAdNg>[1YAeNh>Z1XAfNi>Y1WAgNi>Y1WAgNj>X1VAhNj>X1VAhNk>W1UAiNk>W1UAiNk>W1UAiNl>V1TAjNl>V1TAjNm>U1SAkNn>T1SAkNn>T1RAlNn>T1RAlNo>S1QAmNP?R1PAnNP?R1QAmNP?R1PAnNP?R1PAmNR?R1n@nNR?R1n@nNS?Q1n@nNT?P1l@POU?o0k@QOV?n0j@ROV?n0j@ROW?m0j@ROV?n0j@QOX?m0j@ROV?o0i@QOW?o0k@mNX?R1k101O001O1O1O001O000000010O0001O001O001O1O01O0001O01O000010O01O1O00001O0000001O01O000001O0000001O010O00100O0010O010O0001O00001O00001O001O001O1O1N2O1N2O2M2N3M2O1N2N2NYnZ3\"}}, {\"image_id\": 110, \"category_id\": 1, \"bbox\": [73.0, 214.0, 282.0, 135.0], \"score\": 0.9960300922393799, \"association_id\": 3, \"light\": [-2.6936357021331787, -0.7546097040176392, 2.6039159297943115, 0.6130751371383667], \"segmentation\": {\"size\": [640, 640], \"counts\": \"`n]12mc02O1O0O100000000000000000000000001O000000000000000000000000000000001O00000000001O00000000000000001O00000000000000000O1000001O00000000000000000000000000001O000000000000000000000001O000000000000000000000000000000000000000001O000001O0000000000000000000000000000000000001O00000000000001[M3UAMm=He@a0[1Gm=Oc@>]1Co=1b@>^1Ao=3a@>_1_OP>4_@`0_1\\\\OQ>e1kA[NT>i1iAWNW>j1hAVNW>l1hATNX>l1hATNX>m1hARNX>P2fAPNZ>U2aAkM_>V2`AjM`>W2_AiMa>W2_AiMb>W2]AiMc>X2\\\\AhMe>Y2ZAfMg>[2WAeMj>[2UAeMk>\\\\2TAdMm>\\\\2RAdMo>[2QAeMQ?Z2n@fMS?Z2l@fMU?Z2j@fMW?Z2h@fMY?Z2f@fM\\\\?X2e@gM`?U2_@kMd?S2[@mMg?R2m_OjMG4]`0R2i_OmMI1_`0S2f_OmMJ0``0T2d_OmMLOa`0f2^_OZMc`0e2]_O[Md`0d2\\\\_O\\\\Me`0c2[_O]Mf`0c2Y_O]Mh`0b2X_O^Mi`0b2V_O^Mk`0a2V_O^Ml`0a2S_O_Mn`0`2R_O`Mn`0i21O001O2N1O001O001O2N001O1O001Eh^OlMYa0k1o^OUNRa0g1Q_OYNPa0d1R_O\\\\No`0c1Q_O]No`0b1R_O^No`0`1R_O`No`0^1S_OaNn`0]1S_OcNn`0[1S_OeNn`0Z1R_OfNn`0Y1S_OgNn`0X1R_OhNo`0W1Q_OiNQa0U1o^OkNRa0T1n^OlNSa0S1m^OmNTa0R1l^OnNUa0R1j^OnNVa0R1j^OnNWa0Q1i^OoNXa0P1h^OPOYa0P1f^OPOZa0P1f^OPO[a0o0f^OPO[a0o0e^OQO]a0n0b^ORO`a0m0_^OSOba0m0]^OSOda0l0\\\\^OTOda0m0[^OSOfa0l0Z^OTOga0k0Y^OUOia0i0W^OWOla0f0T^OZOna0d0R^O\\\\Ona0e0Q^O[OPb0d0P^O\\\\OQb0c0o]O]ORb0b0n]O^OTb0`0l]O@Ub0?k]OAVb0>j]OBWb0>h]OBYb0=g]ODZb0:f]OF[b09e]OG]b07d]OH]b08b]OH^b08b]OH_b08`]OH`b08`]OHab08^]OHcb08\\\\]OHeb08Z]OHgb09W]OFjb0;U]OElb0;S]OEnb0;Q]OEQc0;m\\\\OETc0;k\\\\OEUc0<k\\\\OCVc0=i\\\\OCWc0>h\\\\OAZc0>f\\\\OBZc0>5O10O01O001N101N101N3M3M`ia5\"}}, {\"image_id\": 111, \"category_id\": 1, \"bbox\": [112.0, 284.0, 247.0, 316.0], \"score\": 0.9999499320983887, \"association_id\": 1, \"light\": [-2.1159300804138184, -1.7760145664215088, 2.085092544555664, 1.6663285493850708], \"segmentation\": {\"size\": [640, 418], \"counts\": \"X]V27ic01N101O1O1N3Z]OOYa02e^O0Za00e^O2Za0Oe^O2Za0Ne^O4Za0Md^O5[a0Kc^O<Xa0Ef^O?Wa0Ai^Oa0Ua0@i^Ob0Va0^Oj^Ob0Va0^Oi^Od0Va0]Oi^Oe0Ua0\\\\Oi^Oj0Ra0YOf^OU1Qa0o0O1N1O2O0O2O2N2N000OO2N1O2BU_OcMn`0W2Q_OdM41`0g0l>U2l@QNT?U2b@oM_?Y3?@g0YOK6eNZ1H8K6L3K4J7J5L5K4O101O0000O1O0O2O00001O2O00cMaJZF^5g9hJSFW5n9PKjEo4Y:TKcEk4^:XK^Eh4c:[KYEf4h:\\\\KUEd4l:]KREc4o:_KnDa4S;bKhD^4Z;eKaD\\\\4`;hKZDX4h;o1001O1O1O1N2O1O1O1NgCaHU<^7kCdHT<\\\\7kCfHT<Y7kCiHU<V7jClHV<`7O00001O0000^IQDT5n;jJZDP5f;nJ^DQ5a;lJcDS5];kJfDU5Y;iJiDW5W;hJkDW5U;hJmDW5T;gJmDY5S;fJnDZ5R;fJoDY5Q;fJPEZ5P;fJPE[5o:dJSE\\\\5l:dJTEZ4kNRLP<CVEZ4kNULm;AYEY4jNXLk;_O[EX4kNZLi;]O^EX4iN\\\\Lh;\\\\O_EW4kN]Le;[ObEW4iN_Ld;ZOdEU4iNcLa;WOgEU4iNfL^;UOjET4hNjL[;QOnEU4gNkLZ;POoEU4gNlLY;oNPFU4gNmLX;nNQFV4fNmLX;mNSFU4eNoLW;lNTFU4eNPMV;jNVFW4cNQMU;hNXFW4cNRMT;gNYFX4bNQMU;fNZFZ4`NQMU;dN]F[4]NQMV;dN]F[4]NPMW;dN^F\\\\4[NnLX;fN]F]4ZNmL3cNh:2mFh6n8XIRGk6l8SIUGo6i8QIWGU5WN\\\\L`:^NZGT5[N\\\\L[:_N[GR5fNWLm9fN^GQ5bN`Lo9]N`G]7]8bHdG_7\\\\8`HdGa7\\\\8]HeGd7\\\\:O1N2N3M3L4M2N2N2N2N2M4L5J5K4M3M3M2M4K501N2N1O2N0010O0001O0001O01O000001O01O00000SOdAVL\\\\>j3dAVL]>j3dASL]>m3dAPL^>Q4nA[KY>e4a00010O01O001N2O2N1O1O1O00001O01O0000000000001O001O0O101O1O1N2N1O2N2N2N1O2O1N3gL]@Q2g?cMh@U2j`0H6I3N2M2O1N2N3N3L4K5K4M2M3M4K=^Od\\\\T1\"}}, {\"image_id\": 111, \"category_id\": 1, \"bbox\": [261.0, 0.0, 143.0, 166.0], \"score\": 0.9999996423721313, \"association_id\": 2, \"light\": [-1.456679344177246, -2.1222362518310547, 1.321978211402893, 1.9714711904525757], \"segmentation\": {\"size\": [640, 418], \"counts\": \"TXS51nc02O1O001N10001O00001O001O00001O000000001O010O1O3M10O01O0010O0001O01O01O00001O000010O0001O001O001O010O00001O00010O1O001O0010O0000aNJT_O6_`0o0h^OnNM]OYa0c2N5K4M4L2QN_M[Bf2c=bMTB`2l=gMkA[2T>i10000000000000O1000000O1O100O100O10000O1O1O1N2O1O1N2N2N2N2I7N2N2F:M3M3J6L4N2L4J6N2G9B>L4O1O100O2O0O11000O20O1O1O10b0_O1N03K>C6I9G9G2M6Ji0TOZe8\"}}, {\"image_id\": 112, \"category_id\": 1, \"bbox\": [280.0, 833.0, 279.0, 142.0], \"score\": 0.9999976754188538, \"association_id\": 2, \"light\": [-2.5709924697875977, -2.166045904159546, 2.4933228492736816, 2.04335355758667], \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"XRh;;^Z1>C6I4L5I7I6G9M2oNWNlhNm1SW1UNfhNP2YW1RNZhNH5W2aW1YN\\\\hNi1bW1YN\\\\hNh1`W1]N^hNd1`W1n0M2N2N2N3N1O100O1N2O1O1O2N1O100O100O10000O1O1O1O100O011O0O10000000000O1000000O10000O10000O1000000O1000000000000000000000000000000000000000000000000000001O000000000000000000O1000000O100000000O10000O1O100001O00L_iNmKaV1S4_iNnK`V1R4`iNnK`V1W4000001O00001O00000000000000000001O000000000O101O001O00001O00000O10001O00000000001O0O1GYiN]LhV1k3001O001O001O00000O101O00002N2_OohNSM`W1e0`hN7>TOoY10O010000000O01000000000000O100000000O10000000O10O1000000000000O1000000000000O10000000000O100000000O100000000000000O100000000O100000000000000000000000000O1000000000000000000000000O10000O100O10WhXn1\"}}, {\"image_id\": 112, \"category_id\": 1, \"bbox\": [579.0, 152.0, 1398.0, 1217.0], \"score\": 0.9999426603317261, \"association_id\": 1, \"light\": [-2.7585465908050537, -1.544111967086792, 2.666482448577881, 1.3671910762786865], \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"Y`Xh0k0[Y1l0E<C=D=A>C>A>A?B>B?QK^JmSOP6ek0iKcROb4Qm0TLZROU4\\\\m0`L^QO\\\\4Xn0Z4G9H7I8L3L5L3M3L4M3M3M3M2N3M3M5K5K5K5K5L4K4L5K5K6K3L3M3M4L3M4K5L4L4L4K5L5QWOh_OPg0^`0`XOS@Yg0S`0WXO_@bg0da0H9F:FP1QOQ1nN?B=C<C6K4L5K4L4L4L4L4L4M3L3M4L3N3M2N2N3M2N2N2N2N2O1N2N2O1N2N2O1N1O2N2O1N2N2N2N2N2N2N2N2N2N1O2M3N2N2M3N2M3M3N2M3M3M3N1O2M3N2N2N2N2N2N2O1N2N2N2O0O2N2O1N2O1N101N2O0O2O0O100O2O0O100O100O1O101N1O100O1O1O1O2N1O1O1O1O1O1N2O2N1N2O1O1O1O1O1O1O2N100O1O100O1O100O2O0O1O10000O100O1000000O10000O1000000O10000O1000000O10000O100O10000O10000O100O1000O0100O104K6K3L2O0O101N100O2N100O2O0O1O2N100O1O2N1O100O2N100O1O2O0O100O2O0O101N100O101N101N10001N10001N101O0O2O001N101O0O2O00001N101O000O2O001O0O101O00001O0O2O00001O001N101O001O0O2O1O001N101O001N101O001N10001O0O2O000O2O00001N101O0O101O0O100O2O000O2O0O101N1O1O2O0O1O2N1O2N1O2N101N1O2N101N1O2O0O2O0O2O0O2O0O2O0O2O0O2O0O2O001N101O0O2O1O1N3N1O2M2O1O1O1N2O001N2O1O001O001O00001O00001O00001O0000001O00000O2O00001O000000YMQIbQOn6^n0RIbQOn6^n0RIbQOn6^n0SIaQOm6_n0SIbQOl6^n0TIbQOl6^n0TIbQOl6^n0UIaQOk6_n0UIbQOj6^n0VIbQOj6^n0VIbQOj6^n0WIaQOi6_n0WIbQOh6^n0XIbQOg6_n0YIaQOg6_n0YIaQOg6_n0ZI`QOf6`n0ZI`QOf6`n0ZI`QOf6`n0ZIaQOe6_n0[IaQOe6_n0[IaQOe6_n0\\\\I`QOd6`n0\\\\I`QOd6`n0\\\\I`QOd6`n0\\\\IaQOb6`n0^I`QOb6`n0^I`QOb6`n0^I`QOa6an0`I^QO`6bn0`I^QO`6bn0`I_QO^6bn0bI^QO^6bn0bI^QO^6bn0bI^QO]6cn0cI]QO]6cn0dI\\\\QO[6en0eI\\\\QOZ6dn0fI\\\\QOY6en0gI[QOY6en0gI[QOY6en0gI[QOX6fn0hIZQOX6fn0iIZQOU6gn0kIYQOT6hn0lIXQOT6hn0lIXQOS6in0mIWQOS6in0mIWQOR6jn0nIVQOQ6kn0PJTQOP6ln0PJUQOn5ln0RJTQOm5mn0SJSQOl5nn0TJRQOl5nn0TJRQOk5on0UJQQOj5Po0WJoPOh5Ro0XJnPOg5So0YJmPOf5To0ZJlPOe5Uo0[JkPOd5Vo0\\\\JjPOd5Vo0\\\\JjPOc5Wo0^JhPOa5Yo0_JgPO`5Zo0`JfPO_5[o0aJePO_5[o0aJePO^5\\\\o0bJdPO]5]o0dJbPO\\\\5^o0dJbPO[5_o0eJaPO[5_o0eJaPOZ5`o0gJ_POY5ao0gJ_POX5bo0hJ^POX5bo0hJ^POW5co0iJ]POW5co0jJ\\\\POV5do0jJ\\\\POV5do0jJ\\\\POV5do0jJ\\\\POU5eo0lJZPOT5fo0lJZPOT5fo0lJZPOT5fo0mJYPOS5go0mJYPOR5ho0nJXPOR5ho0oJWPOQ5io0oJWPOQ5io0oJWPOQ5io0PKVPOP5jo0PKVPOo4ko0QKUPOo4ko0QKUPOo4ko0RKTPOn4lo0RKTPOn4lo0RKTPOn4lo0RKTPOm4mo0SKSPOm4mo0TKRPOl4no0TKRPOl4no0TKRPOl4no0TKRPOl4no0UKQPOj4PP1VKPPOj4PP1VKPPOj4PP1VKPPOj4PP1VKPPOj4PP1WKooNi4QP1WKooNi4QP1WKooNh4RP1XKooNg4QP1YKooNg4QP1ZKnoNf4RP1ZKnoNf4RP1ZKnoNf4RP1ZKnoNf4RP1[KmoNd4TP1\\\\KloNd4TP1\\\\KloNd4TP1\\\\KloNd4TP1]KloNb4TP1^KloNb4TP1^KloNb4TP1^KloNb4TP1_KkoNa4UP1_KkoN`4VP1`KjoN`4VP1aKioN_4WP1aKioN_4WP1aKioN_4WP1aKioN_4WP1bKhoN^4XP1bKioN]4WP1cKioN]4WP1dKhoN\\\\4XP1dKhoN\\\\4XP1eKgoN[4YP1eKgoN[4YP1eKgoN[4YP1fKfoNZ4ZP1fKfoNY4[P1gKeoNY4[P1hKdoNX4\\\\P1hKdoNX4\\\\P1hKdoNX4\\\\P1iKcoNW4]P1iKdoNV4\\\\P1jKdoNV4\\\\P1kKcoNU4]P1kKcoNU4]P1kKcoNU4]P1lKboNT4^P1lKboNT4^P1lKboNT4^P1lKboNT4^P1lKboNT4^P1lKboNS4_P1mKaoNS4_P1mKaoNS4_P1mKaoNS4_P1mKaoNS4_P1mKaoNS4_P1mKaoNS4_P1mKaoNS4_P1mK`oNT4`P1lK`oNT4`P1lK`oNT4`P1lK`oNT4`P1lK`oNT4`P1lK`oNT4`P1lK`oNT4`P1lK`oNS4aP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNR4bP1nK^oNQ4cP1oK]oNQ4cP1oK]oNQ4cP1oK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNR4dP1nK\\\\oNQ4eP1oK[oNQ4eP1oK[oNQ4eP1oK[oNQ4eP1oK\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1oK]oNQ4cP1oK]oNQ4cP1oK]oNQ4cP1oK]oNQ4cP1oK]oNQ4cP1oK]oNQ4cP1oK]oNQ4cP1oK]oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1PL\\\\oNP4dP1oK]oNQ4cP1oK^oNP4bP1PL^oNP4bP1PL^oNP4bP1PL^oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNn3dP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1QL]oNo3cP1PL^oNo3cP1QL]oNo3cP1QL]oNo3cP1PL^oNP4bP1PL^oNP4bP1PL^oNP4bP1oK_oNQ4aP1oK_oNP4bP1PL^oNP4bP1oK_oNQ4aP1oK_oNQ4aP1oK_oNQ4aP1oK_oNQ4aP1nK`oNQ4aP1oK_oNQ4aP1oK_oNQ4aP1nK`oNR4`P1nK`oNR4`P1nK`oNR4`P1nK_oNR4bP1mK_oNS4aP1mK_oNS4aP1mK_oNS4aP1mK_oNS4aP1lK`oNT4`P1lK`oNS4aP1mK_oNS4aP1mK_oNS4aP1mK_oNS4aP1lK`oNT4`P1lK`oNS4aP1mK_oNS4aP1mK_oNS4aP1mK_oNS4aP1lK`oNT4`P1lK`oNT4`P1lK`oNT4`P1lK`oNT4`P1lK`oNT4`P1kKaoNV4^P1jKboNV4^P1jKboNV4^P1jKboNV4^P1jKboNV4^P1iKcoNW4]P1iKboNY4]P1gKcoNY4]P1gKcoNY4]P1gKcoNY4]P1gKcoNY4]P1gKcoNY4]P1gKcoNZ4\\\\P1fKdoNZ4\\\\P1fKdoNZ4\\\\P1fKdoNZ4\\\\P1fKdoNZ4\\\\P1fKdoNZ4\\\\P1fKdoNZ4\\\\P1fKdoN[4[P1eKeoN[4[P1eKeoN[4[P1eKeoN[4[P1eKeoN[4[P1dKfoN\\\\4ZP1dKfoN\\\\4ZP1dKfoN]4YP1cKgoN]4YP1cKgoN]4YP1cKgoN]4YP1cKgoN]4YP1cKgoN]4YP1cKgoN]4YP1cKgoN]4YP1cKgoN^4XP1bKhoN^4XP1bKhoN^4XP1bKhoN^4XP1bKhoN^4XP1bKhoN^4XP1bKhoN^4XP1bKhoN^4XP1bKhoN_4WP1aKioN_4WP1`KjoN`4VP1`KjoN`4VP1`KjoN`4VP1`KjoN`4VP1`KjoN`4VP1`KjoN`4VP1`KjoN`4VP1`KjoN`4VP1`KjoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1_KkoNa4UP1`KjoN`4VP1`KjoN`4VP1`KioNa4WP1_KioNa4WP1_KioNb4VP1^KjoNb4VP1^KjoNb4VP1^KjoNb4VP1^KjoNb4VP1^KjoNb4VP1^KjoNb4VP1^KjoNb4VP1^KioNc4WP1^KhoNb4XP1^KhoNb4XP1^KhoNb4XP1^KhoNb4XP1^KhoNb4XP1^KhoNb4XP1^KhoNb4XP1^KgoNc4YP1]KgoNd4XP1\\\\KhoNd4XP1\\\\KhoNd4XP1\\\\KhoNd4XP1\\\\KhoNd4XP1]KfoNd4ZP1\\\\KfoNd4ZP1\\\\KfoNd4ZP1\\\\KfoNd4ZP1\\\\KfoNd4ZP1\\\\KeoNe4[P1[KeoNe4[P1[KeoNe4[P1[KeoNe4[P1[KdoNf4\\\\P1ZKdoNf4\\\\P1ZKdoNf4\\\\P1[KcoNf4\\\\P1ZKdoNf4\\\\P1YKdoNh4\\\\P1XKdoNh4\\\\P1XKdoNh4\\\\P1XKdoNh4\\\\P1XKcoNi4]P1WKcoNi4]P1WKcoNi4]P1WKboNj4^P1VKboNj4^P1UKcoNk4]P1UKboNl4^P1TKboNl4^P1TKboNl4^P1TKaoNm4_P1SKaoNm4_P1SKaoNm4_P1RKaoNo4_P1QKaoNo4_P1QK`oNP5`P1PK`oNQ5_P1oJaoNQ5_P1oJ`oNR5`P1mJaoNS5_P1mJ`oNT5`P1lJ`oNT5`P1lJ_oNU5aP1kJ_oNU5aP1kJ^oNV5bP1iJ_oNW5aP1iJ^oNX5bP1hJ^oNX5bP1hJ]oNY5cP1gJ]oNY5cP1fJ]oN[5cP1eJ]oN[5cP1eJ\\\\oN\\\\5dP1dJ\\\\oN\\\\5dP1cJ]oN]5cP1cJ\\\\oN^5dP1bJ\\\\oN^5dP1bJ\\\\oN^5dP1aJ\\\\oNa5cP1_J]oNa5cP1_J\\\\oNb5dP1^J\\\\oNb5dP1]J]oNc5cP1]J]oNc5cP1]J\\\\oNd5dP1\\\\J\\\\oNd5dP1\\\\J[oNe5eP1ZJ\\\\oNf5dP1ZJ[oNg5dP1ZJ\\\\oNf5dP1YJ\\\\oNh5dP1XJ\\\\oNh5dP1XJ[oNi5eP1VJ\\\\oNj5dP1VJ[oNk5dP1UJ\\\\oNl5dP1TJ\\\\oNl5dP1SJ\\\\oNn5dP1RJ[oNo5dP1RJ[oNo5eP1PJ[oNQ6eP1oIZoNR6fP1mIZoNT6fP1kIYoNW6fP1jIYoNW6gP1hIYoNY6gP1gIXoNZ6hP1eIXoN\\\\6gP1dIXoN^6hP1bIWoN^6jP1aIVoN`6iP1`IWoNa6iP1^IWoNc6iP1\\\\IWoNe6iP1ZIWoNg6hP1YIXoNh6hP1WIYoNi6gP1VIYoNk6fP1UI[oNk6eP1TI\\\\oNl6dP1TI[oNm6dP1SI]oNm6cP1RI]oNo6XR10ZNQI_oNo6VR11000000O1000000O10000O10000O100O100O100O100TNiHRPOX7mo0hHTPOX7ko0hHUPOY7io0hHWPOY7ho0gHXPOZ7go0eH[PO[7co0fH]PO[7ao0fH_PO[7`Q11O2N1O1O100O1O100O1O100O100O100O10000O100O10000O100jLSH\\\\SOo7bl0RH^SOn7al0RH`SOn7_l0SH`SOn7_l0RHbSOn7]l0RHcSOo7]l0QHcSOo7\\\\l0QHdSOP8[l0PHfSOP8Yl0QHfSOP8Yl0PHhSOP8Xl0oGhSOR8Wl0nGjSOR8Ul0nGkSOT8Sl0mGmSOS8Sl0kGnSOV8Ql0iGPTOY8Yo00O2N101N1O2N1O2N101N2N1O2O1N2O0O2N2O1N2O1O1N3N1N2O2N1N2O1O1N2O1O1N2O1O1N101N2O0O2O1N101N101O0O2O0O101O0O2O001N101O0O2O001O1N101O1O1N101O1O1N2O1O1O1O1N2O2N1O1O2M2O1O1O1O1N2O1O1O1N101O1N2O001N101O0O2O1N101N101O0O2O001N2O001N2O1O1N101O1O1N3N1O1O1N3N1O2N2M3N2N2M4M2N1N3N2M2O2M2O2M2O1N2N2N2O1N2N2N2O1N2N101N2N2O0O2O1N2N101N2O1N2O1N2O2M2O1N3M2O1N3M2O1N3M2N2O2M2N2N3M2N3M2PLf_O[]O\\\\`0db0d_O[]O_`0bb0a_O]]Ob`0ab0__O^]Od`0`b0[_O`]Og`0^b0Z_O`]Oj`0^b0V_Oa]Ol`0]b0T_Ob]OPa0\\\\b0P_Oc]ORa0[b0n^Od]OVa0Zb0j^Oe]OXa0Yb0h^Of]O[a0Yb0d^Of]O_a0ce09G8O2N2N2N2N1O2N2N101N1O2O1N2O0O2N2O1N2O001N2O1N3N1M3N2N2M4M2M4M3L3M4L4UMm[Oh^OVd0Ua0k[Oj^OXd0Sa0h[Om^O[d0Pa0f[Oo^O\\\\d0o`0d[OQ_O^d0m`0c[OQ_O`d0m`0`[OS_Obd0k`0^[OU_Odd0j`0\\\\[OU_Ofd0i`0Z[OV_Oid0h`0W[OX_Okd0f`0U[OZ_Omd0e`0R[OZ_OQe0d`0oZO\\\\_OSe0b`0lZO__OUe0a`0jZO^_OYe0Qc02N1O2N2N1O2N1O2N101N1O2O0O2O0O2O0O2O001O1N101O1N2O1O001N2O1O2M2O1O1O2M2O2N2N1N3N1O2M2O1O1O1N2O1O1N2O1O001N2O001N2O001N101O1O0O2O001N101O0O2O1O0O2O1N2O1O0O2O1N2O1O2M2O2M3N2M3N3L3N2M3N2M3N1N3N2M3N1N3N1N3M2O2M2O2M2O1N3N1N2O1N2O1O1N101O1N2O1N2O1N2O1N2O1N2N3N1N2N3M2O2M3]M^TOfEek0Y:[TOeEik0X:XTOfElk0W:VTOgEmk0V:TTOhEQl0T:QTOiESl0T:nSOkEZl0m9hSOPFil0`9XSO^FYm0Q9jROlFfm0e8[ROXGVn0h6PRO]HLg0en0X6VRO_HVOW1Po0j5mROSJ\\\\m0a5iRO]J^m0X5iROeJ_m0P5fROkJdm0k4bROoJhm0f4^ROUKkm0`4]RO\\\\Kkm0X4\\\\ROeKlm0o3\\\\ROnKom0c3WRO\\\\LSn0U3TROiLUn0i2QROUMYn0o1\\\\ROPNXU1O2N101Mhjl2\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [33.0, 535.0, 714.0, 320.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-1.5676579475402832, -1.788027286529541, 1.3961153030395508, 1.6365773677825928], \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"Qcb1i0b_18K4M4N1N2N2N2O1N3N1N3N2M3N3L6K6I8H?B5J7J9G2M2aeNULVX1l3ggNYLVX1h3ggN^LTX1d3igN`LUX1a3igNeLRX1]3jgNiLRX1X3`gNXM]X1o2UgNZMiX1Y5N1O2N1O1O2N2VhNkHTV1V7jiNmHSV1V7jiNlHUV1U7jiNlHUV1V7hiNmHVV1T7hiNnHWV1S7hiNnHWV1S7giNoHXV1R7giNoHXV1P7jiNlHYV1S7jiNiHXV1V7kiNdHYV1[7_1O100O100O10000O10000O1000000O10000000000000001N1000000000001O000O11O0010O01O001O1O010O1O1O1O10O01O1O1O001O0010O01O1O1O2N1O5K4L4L5K3M1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O001O1O001O1O00000000000000O010eMnhNWKRW1f4XiNSKhV1i4_iNTKaV1j4ciNTK]V1k4fiNRKmNgNVW1W6oiNPKhNPOUW1o5UjNPKdNTOVW1k5ZjN[KfU1e4^jNXKaU1g4cjNVK]U1j4ejNUKZU1k4hjNSKWU1n4jjN_JSNKSW1f5ljN\\\\JQN0RW1d5ojNYJPN3QW1d5QkNVJoM7oV1c5bkN]J^T1c5akN^J_T1b5akN_J^T1`5ckN`J]T1`5ckN`J]T1`5ckN`J\\\\T1a5dkN`J[T1`5ekN`J[T1`5ekN`J[T1`5ekN`J[T1`5ekN`J[T1`5dkNaJ\\\\T1_5dkNaJ\\\\T1_5dkNbJ[T1^5ekNbJZT1_5fkNaJZT1_5fkNaJZT1_5fkNaJZT1_5fkNaJZT1_5fkNaJYT1`5gkN`JYT1`5gkNaJXT1_5hkNaJXT1_5hkNaJXT1`5gkN`JYT1`5gkN`JXT1a5hkN_JXT1a5hkN_JXT1a5hkN_JXT1a5hkN_JXT1a5hkN_JXT1a5hkN_JXT1a5ikN^JWT1b5ikN^JWT1b5ikN^JWT1b5ikN^JWT1b5ikN_JVT1a5jkN_JVT1a5jkN_JVT1a5jkN_JVT1a5jkN_JVT1b5ikN^JWT1b5ikN^JWT1b5jkN]JVT1c5jkN]JVT1c5jkN]JVT1c5jkN]JVT1c5jkN]JVT1c5jkN]JVT1c5jkN]JVT1c5kkN\\\\JUT1d5kkN\\\\JUT1d5kkN\\\\JUT1d5kkN\\\\JTT1e5lkN[JTT1e5lkN[JTT1e5lkN[JTT1e5lkN[JTT1e5lkN[JTT1e5mkNZJST1e5nkN[JRT1e5nkN[JRT1e5nkN[JRT1e5nkN\\\\JQT1d5okN\\\\JQT1d5okN\\\\JQT1d5okN\\\\JQT1d5okN\\\\JQT1d5okN\\\\JQT1d5okN\\\\JQT1d5okN\\\\JQT1d5okN\\\\JQT1d5PlN[JPT1e5PlN[JPT1d5QlN\\\\JoS1d5QlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1c5RlN]JnS1b5SlN^JmS1b5SlN^JmS1b5SlN^JmS1b5SlN^JmS1b5SlN^JmS1a5TlN_JlS1a5UlN^JkS1b5UlN^JkS1a5VlN_JjS1a5VlN_JjS1a5VlN_JjS1`5WlN`JiS1_5XlNaJhS1_5WlNbJiS1]5XlNdJgS1\\\\5YlNdJgS1[5ZlNeJfS1[5ZlNeJfS1[5ZlNeJfS1Z5[lNfJeS1Z5[lNfJeS1Z5[lNgJdS1Y5\\\\lNgJdS1Y5\\\\lNgJdS1Y5TkNoI^Oh0^U1Y5TkNQJ[Og0aU1X5SkNTJZOd0cU1X5RkNVJZOb0dU1X5QkNWJ[Oa0dU1X5PkNYJ[O?eU1X5ojN[J[O=fU1X5mjN]J]O;fU1X5ljN^J^O:fU1X5kjN`J^O8gU1X5jjNaJ_O7gU1Y5hjNaJA6gU1Y5gjNbJB6fU1X5hjNcJA5gU1X5gjNdJB4gU1X5gjNdJB4gU1X5fjNeJC3gU1X5gjNcJC5fU1Y5fjNaJD7fU1X5fjN_JF9dU1Y5fjN\\\\JG;cU1Z5fjNYJH=bU1Z5fjNWJJ?`U1[5jjNoIHg0]U1[5VlNeJjS1\\\\5UlNdJkS1]5TlNcJlS1^5RlNcJnS1^5QlNbJoS1_5PlNaJPT1`5okN`JQT1a5nkN_JRT1a5nkN_JRT1g5hkNYJYT1l5akNTJ_T1m5`kNSJ`T1o5^kNQJbT1P6]kNPJcT1Q6\\\\kNoIdT1R6[kNnIeT1T6YkNlIgT1U6XkNkIhT1W6VkNiIjT1X6UkNhIkT1X6UkNhIkT1Y6TkNgIlT1Z6SkNfImT1Z6SkNfImT1[6RkNeIoT1Z6PkNgIPU1Z6ojNfIQU1Z6ojNfIQU1[6njNeIRU1[6njNeIRU1[6njNeIRU1\\\\6mjNdISU1]6ljNcITU1^6kjNbIUU1_6jjNaIVU1b6gjN^IYU1e6cjN\\\\I^U1g6^jNYIbU1j6[jNVIeU1n6WjNRIiU1Q7TjNoHlU1S7RjNnHmU1S7RjNmHnU1T7QjNlHoU1U7PjNkHPV1V7oiNjHQV1W7niNhHTV1W7kiNjHUV1W7jiNiHVV1W7jiNiHVV1X7iiNhHXV1W7hiNiHXV1W7hiNiHXV1W7hiNiHYV1V7fiNkHZV1U7fiNkH[V1U7diNkH\\\\V1U7diNkH\\\\V1U7diNkH]V1T7ciNlH]V1U7biNkH^V1U7biNkH^V1U7aiNlH`V1e5[iN`J4KaV1a5`iNcJOLaV1^5diNeJKMaV1]5fiNfJHMbV1\\\\5hiNfJEOdV1Y5iiNgJC0dV1X5kiNfJB2cV1W5miNeJA4bV1V5PjNbJ@8`V1U5SjN`J]O<aV1Q5VjN_J[O`0_V1P5YjN\\\\JZOd0]V1o4\\\\jNYJYOh0[V1n4_jNPJ]OS1TV1m4\\\\kNSKeT1k4\\\\kNUKdT1k4\\\\kNUKdT1j4]kNVKcT1j4\\\\kNWKeT1g4\\\\kNYKdT1g4\\\\kNYKdT1f4]kNZKcT1f4]kNZKdT1e4\\\\kN[KdT1d4]kN\\\\KdT1c4[kN^KeT1a4\\\\kN_KeT1`4[kN`KfT1_4ZkNaKgT1]4YkNdKhT1Z4YkNfKiT1X4WkNhKjT1V4VkNkKlT1R4UkNnKlT1Q4TkNoKnT1n3RkNSLoT1l3QkNTLoT1k3RkNULoT1j3QkNVLPU1i3PkNWLPU1h3QkNXLPU1g3PkNYLPU1g3PkNYLPU1f3QkNZLPU1e3PkN[LPU1e3PkN[LQU1d3ojN\\\\LQU1d3ojN\\\\LRU1c3mjN^LTU1a3ljN_LUU1`3kjN`LVU1_3jjNaLXU1]3hjNbL[U1\\\\3ejNdL\\\\U1[3djNeL]U1Z3cjNfL_U1X3ajNhL`U1W3`jNiLaU1V3_jNjLaU1U3`jNkLaU1T3_jNlLbU1S3^jNmLbU1T3]jNlLdU1S3]jNlLdU1T3[jNlLeU1T3[jNlLfU1T3YjNlLhU1T3WjNlLjU1S3WjNkLkU1U3TjNkLnU1U3PjNkLQV1Z3iiNfLYV1]3biNcL`V1^3^iNaLdV1_3ZiNaLhV1_3egN\\\\KS1U1YW1`3\\\\gNcKY1m0\\\\W1o3bhNQL_W1P4`hNnKbW1R4agNjJ>T1RX1S4ZgNnJc0o0SX1a4lgN_KUX1a4jgN_KWX1a4hgN_KYX1b4egN^K\\\\X1b4cgN^K_X1b4`gN\\\\KbX1e4\\\\gN[KfX1h4UgNXKUY1b4gfN^KfY1W4XfNiKkY1U4TfNjKoY1V4oeNjKSZ1U4leNkKVZ1W51O001N2O001O001O001O1O1O1N3N2N3M1O2N1O2N0O2O1O001O0O2O001O1O1O1O1O1N2O1O001^MedNL\\\\[1MmdN0S[1LTeN1mZ1LXeN1hZ1M[eN2eZ1L_eN2bZ1JbeNlNmN`0b[1b0eeNhNmNf0_[1?heNdNnNm0Z[1=leN_NnNU1W[18_fNHbY1JkfN6UY1FofN:RY1ARgN?oX1iNggNX1h[11O1O1O1O1O01000O10000000000000O1000000000000000000000000000000O1000000000000000000O1000000O100000000O100000000000000O1000000000000000000O10000000000O10000000000000000O100000001O0000000000000000000O10001O000000000O10000000001O0O100000000000001O000O100000001O00000O10001O00000000000O10000000000O1000000000000O1000000000000000000000000O10000000001O0000000O100000000000000O100000000O1000000O10000O10001N100O2N2N5IoYdm1\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [1000.0, 556.0, 1039.0, 957.0], \"score\": 1.0, \"association_id\": 3, \"light\": [-1.625296950340271, -2.278841733932495, 1.5809178352355957, 2.0611679553985596], \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"a]]_13]`14K5L4K6J5K6J7H8I4K4L5K4K5L4M3L4M3M3M3M3N1N3N2N1O2N1O2M2O2N1O2M2O2L3L5K4L5K5J6SMYLihNl3kV1hLehNa3XW1jLngNh3PX1^2N2N2N2N3M2N2O1N2N2O1N001O1O001O001O00001N10001O001N10001O0O2M2O2M2O2M2O2O1O0O2O1O001N2O001N2O1N101O1O1O010O1O10O01O100O010O100O1O10O0100O100O100O100O10O0100O100O100O100O1000O0100O1000000O1000000O1000000O1000000O10000O1000000O10000001O001O0010O0001O001O00001O0000001O0000001O0000001O00O101O00000000001O00000000001O000O10001O000000001O001O00100O1O1O1O2O0O2N2N3M2N3N1N3M2N2N2N2N2O1N2N1O2N2N1O1O2N2N2N2N2N3M3M3M3M5K8H=C=C6J5K5K4L4L4L3M2N1O2N100O1O1O2N1O2N1O2N2N2N2N3M2N3N4K4L5K4L3M3M3M3M3M3M3M3M3M3M3M4L3M4L;Ek0UO4M3L5K5K5K6J3M3M2N2N2N2N1oXOS\\\\O\\\\a0oc0`^OU\\\\O^a0lc0`^OV\\\\O_a0lc0]^OW\\\\Oba0jc0\\\\^OX\\\\Oca0ic0[^OY\\\\Oda0hc0Z^OZ\\\\Oea0gc0Y^O[\\\\Ofa0gc0V^O[\\\\Oja0fc0T^O[\\\\Ola0gc0Q^O[\\\\Ona0fc0o]O\\\\\\\\OQb0fc0l]O\\\\\\\\OSb0fc0i]O\\\\\\\\OWb0gc0d]O[\\\\O\\\\b0hc0_]O[\\\\O`b0gc0[]O\\\\\\\\Oeb0gc0U]O\\\\\\\\Okb0gc0o\\\\O]\\\\OPc0fc0i\\\\O^\\\\OWc0ec0b\\\\O_\\\\O^c0dc0\\\\\\\\O_\\\\Odc0dc0V\\\\O`\\\\Oic0cc0Q\\\\O`\\\\Ooc0cc0k[O`\\\\OUd0ec0d[O]\\\\O\\\\d0hc0\\\\[O\\\\\\\\Ocd0hc0W[OZ\\\\Oid0jc0Q[OX\\\\Ood0kc0lZOW\\\\OTe0lc0fZOX\\\\OZe0jc0WZOb\\\\Oie0ac0iYOj\\\\OWf0Wc0cYOn\\\\O]f0cf000000000000000000000O10000000000000000000000000000000O10000O1000000`LdZOj[O]e0Td0jZOg[OVe0Wd0P[Oe[OPe0Yd0W[Oa[Ojd0^d0\\\\[O][Odd0ad0c[OZ[O]d0ed0h[OV[OYd0hd0m[OT[OSd0U`0dYOQC]2fLPd0V`0iYOPC[2gLlc0V`0oYOoBX2iLic0U`0UZOnBV2iLfc0V`0YZOnBT2jLcc0V`0^ZOlBR2kLac0W`0aZOkBP2mL_c0V`0eZOkBn1lL^c0X`0fZOjBn1mL\\\\c0W`0jZOiBm1mLZc0X`0mZOhBk1nLYc0X`0P[OgBj1nLWc0Y`0S[OfBh1oLVc0Y`0W[OcBg1PMTc0[`0Z[OaBdh0^=aWO]B`h0a=fWOYB\\\\h0f=jWOPB[h0n=lWOgAZh0X>mWO\\\\AYh0c>UXOi@Rh0V?cXOo_Odg0P`0_4O1O00100O1O100O1O100O100O11O0000001O000000001O000000001N10000000001O00000000001O1O0O2O1O1O1O1O1O1N3N1O2N2N2N2oXO]_Oo=f`0Q]O\\\\_Oe08Xb0^`0o\\\\Of_O84hb0W`0m\\\\OP@K2Vc0P`0k\\\\OW@B0bc0j?i\\\\O[@_OOgc0g?g\\\\O_@]OMjc0f?g\\\\O`@[OMmc0d?f\\\\Ob@XONQd0a?d\\\\Oe@XOLSd0`?c\\\\Og@VOKWd0_?a\\\\Oi@TOKZd0\\\\?a\\\\Ol@QOJ]d0\\\\?_\\\\On@POHad0[?]\\\\OPAnNGed0Z?Z\\\\OSAlNFjd0Y?W\\\\OUAiNEPe0W?T\\\\OYAfNDUe0T?R\\\\OkDnc0W;m[OlDSd0V;e[OPEZd0S;[[OVEed0l:oZO^EQe0Qc001O00000O100000001O0O1000000000000000000000000000000000000000000000000000000001O0O2O0O2O0O2O0O2N1O2O0O2dM_YOeZObf0Qc0b\\\\OT\\\\O`c0Qb0f^Ob]O\\\\a0oa0Z_Oh]Og`0ma0j_Ok]OY`0Qb0P@h]OQ`0Vb0V@c]Om?Zb0V7N3N1O2N2N1O2N1O1O2M2O1O2M2O1N3M2M3N3I6J6J7J5J6L4N3M2N2O2M2O1O2M2O3MR1nN2N3L3N2M4L3M4L3L5F9_Ob0WOh0[Of0K5J5M^ROQEhe0j:SZO_Ene0]:PZOhEPf0U:QZOlEQf0P:PZORFQf0k9oYOVFSf0f9nYO\\\\FRf0b9nYO`FSf0]9lYOeFUf0X9lYOjFTf0T9lYOnFUf0o8kYOSGUf0k8kYOWGUf0f8lYO\\\\GUf0`8mYOaGTf0[8mYOfGTf0V8nYOlGSf0o7PZORHQf0j7PZOWHRf0c7RZO^Hoe0]7TZOcHne0W7VZOiHme0P7WZOQIke0h6YZOXIje0b6YZO^Ije0]6XZOdIje0W6WZOjIle0R6UZOoIme0l5UZOTJne0h5SZOXJQf0c5PZO^JRf0^5oYObJTf0Z5mYOgJVf0T5kYOlJWf0Q5jYOPKVf0n4kYORKVf0l4kYOUKUf0j4kYOVKVf0h4kYOYKVf0d4kYO\\\\KVf0b4kYO_KVf0^4kYObKWf0Z4kYOgKVf0V4kYOjKWf0R4kYOoKVf0n3kYORLXf0i3jYOWLYf0c3jYO^LXf0Z3mYOfLWf0o2PZOQMVf0c2PZO^MfK[N^d0l3Q@iM`K]Ncd0_3S@TNYK`Nfd0S3V@^NRKaNld0g2W@hNlJcNod0]2Z@QOdJeNUe0Q2\\\\@[O]JgN[e0d1[@FXJiNae0X1[@0QJkNge0n0[@8lIkNme0e0[@a0fIlNRf07`@n0\\\\IlNXf0Gf@_1PIkN^f0WOm@n1eHlNSS1U1klNlNTS1V1jlNkNVS1U1jlNkNVS1V1hlNlNWS1T1hlNmNXS1S1hlNmNYS1S1elNnN[S1R1dlNnN^S1R1`lNoN`S1R1_lNnNaS1R1^lNoNbS1R1]lNnNdS1R1ZlNoNfS1R1XlNoNhS1Q1XlNoNhS1R1WlNnNjS1R1TlNoNlS1Q1TlNoNlS1R1RlNoNnS1Q1RlNoNoS1P1QlNPOoS1Q1PlNoNPT1Q1okNPOQT1P1okNPORT1o0nkNPOST1Q1lkNoNUT1P1jkNQOVT1o0jkNQOVT1P1ikNPOXT1o0gkNROZT1m0fkNSOZT1n0ekNRO]T1l0bkNUO`T1i0`kNVOcT1i0[kNXOgT1f0YkNZOhT1f0VkN[OkT1d0UkN\\\\OlT1d0SkN\\\\OnT1c0RkN]OnT1d0QkN[OQU1e0mjN\\\\OSU1d0mjN\\\\OTU1d0kjN\\\\OUU1e0jjN[OVU1f0ijNZOXU1f0fjNZO[U1f0ejNZO[U1g0djNYO\\\\U1h0cjNXO^U1h0ajNXO_U1h0ajNXO_U1i0`jNWO`U1j0_jNUObU1l0]jNTOcU1m0egNoMb1T1hV1P1_gNVNe1i0lV1S1YgNZNi1c0nV1U1RgN^No1=oV1^2ohNbMQW1_2nhN`MSW1a2mhN^MSW1d2khN\\\\MUW1e2jhN[MUW1g2jhNYMVW1h2jhNWMVW1j2ihNVMWW1j2ihNVMWW1k2hhNUMWW1m2hhNRMYW1n2ghNRMXW1P3hhNoLWW1R3ihNnLVW1T3ihNlLVW1V3ihNjLVW1W3jhNiLVW1X3ihNhLVW1Z3ihNeLWW1]3hhNcLWW1_3hhNaLXW1`3ghN`LXW1a3hhN_LWW1c3hhN]LXW1c3hhN]LWW1d3ihN\\\\LVW1f3ihNZLWW1f3ihNYLWW1h3ihNXLVW1i3jhNWLVW1j3hhNWLWW1j3ihNVLWW1j3ihNVLVW1k3jhNULVW1k3ihNVLWW1k3hhNULWW1l3ihNTLWW1l3ihNSLXW1m3hhNSLWW1n3hhNSLXW1n3ghNRLXW1o3hhNQLXW1o3hhNQLXW1o3hhNQLWW1Q4hhNnKXW1S4hhNmKXW1S4hhNmKWW1U4hhNjKXW1W4hhNiKXW1X4ghNhKXW1Z4ghNeKYW1\\\\4ghNdKYW1]4fhNbK[W1_4dhNaK[W1`4ehN_K\\\\W1b4chN^K]W1b4chN]K]W1e4bhN[K^W1e4bhN[K^W1f4ahNZK_W1f4ahNZK_W1g4`hNXKaW1h4_hNXKaW1i4^hNWKbW1i4^hNWKbW1j4^hNUKbW1l4]hNTKcW1l4]hNTKcW1m4\\\\hNRKeW1o4ZhNQKfW1o4ZhNQKfW1P5YhNPKgW1Q5YhNnJgW1R5YhNnJgW1S5XhNlJiW1T5XhNkJgW1W5YhNhJgW1X5YhNgJhW1Y5YhNfJgW1[5YhNcJhW1]5XhNcJhW1]5YhNbJgW1_5YhN_JhW1a5YhN^JgW1b5YhN]JhW1d5XhN[JhW1e5YhNZJgW1f5YhNYJhW1g5YhNXJgW1i5YhNTJiW1l5^100001O00001O00001O001O001O1O002N1O1O100O2N1O1O1O1O1O001O001O00100O1O1Ob0^O5K1O100O1O00100O001O001O1O00100O1O001O1O001O001O001O001N2O001O1N2O2N1O1N2O1O001O1O1N101O1O2N1O1O1O1O2N1O1O1O1O001O1O001O0000001O0000001O00000000001O0000001O001O01O01O001O001O01O01O0001O001O1O1O001O10O01O1O001O001O001O001O001O001O1O1O1O1O00100O1O1O1O001O001O001O001O000eHShNR6nW1lI^hNi5bW1UJbhNi5_W1UJdhNi5\\\\W1UJhhNi5XW1VJkhNh5VW1VJlhNi5TW1VJohNh5QW1WJQiNh5oV1lI^iNS6cV1kI_iNT6aV1kIaiNT6_V1kIdiNS6]V1kIeiNT6[V1kIgiNT6YV1kIjiNS6WV1kIkiNT6UV1lImiNR6SV1mIoiNR6RV1lIQjNR6oU1nISjNP6mU1oIUjNP6kU1PJVjNo5VX101O00001O001O00001O0O101O001O00001O000O2O001O0O2O001N2O2N5J6K4J8I5J8Hi0WO;D`0\\\\OnX<\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [580.0, 503.0, 251.0, 175.0], \"score\": 0.9839528203010559, \"association_id\": 2, \"light\": [-1.2959157228469849, -2.1402437686920166, 1.2163035869598389, 2.0156610012054443], \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"cT`k0g0X_1e0K4L3O1O0O2O1O0O2O00001O001O1O00001O001O00000000001O0000001O000000000000001O0000001O00001O00001O00001O001O0TdNUNRY1l1ffN\\\\NYY1d1dfN`N[Y1a1bfNbNSNWOU[1X2ffNbNUNWOT[1X2efNaNWNYOS[1W2]fNhN_NROT[1W2ZfNiNbNoNU[1Z2UfNiNeNnNV[1Z2RfNjNhNlNV[1[2neNlNkNjNW[1[2keNlNnNjNW[1[2ieNTOWZ1l0heNUOXZ1l0feNVOYZ1l0deNUO\\\\Z1n0_eNdN^OPOS[1c2ndN_N6oNlZ1o3ZeNQLfZ1n3[eNRLeZ1m3[eNTLeZ1k3\\\\eNULdZ1k3\\\\eNULdZ1j3^eNULbZ1i3`eNWL`Z1f3ceNZL]Z1b3heN]LXZ1a3jeN_LVZ1_3leNaLTZ1^3meNbLSZ1]3neNcLRZ1\\\\3oeNeLPZ1Z3QfNfLoY1Y3RfNgLnY1W3UfNhLkY1W3VfNiLjY1V3WfNjLiY1U3XfNkLhY1T3ZfNkLfY1U3ZfNkLfY1U3ZfNkLfY1T3[fNlLeY1T3\\\\fNkLdY1U3\\\\fNkLdY1U3\\\\fNkLdY1U3\\\\fNkLdY1U3]fNjLcY1V3]fNjLcY1V3]fNjLcY1V3]fNjLcY1V3]fNjLcY1V3]fNjLcY1V3]fNjLcY1W3\\\\fNiLdY1W3]fNhLcY1X3\\\\fNiLdY1W3\\\\fNiLdY1W3\\\\fNiLdY1X3[fNhLeY1X3[fNhLeY1Y3ZfNgLfY1Z3YfNfLgY1[3XfNeLhY1]3VfNbLkY1a3RfN_LnY1c3PfN]LPZ1e3oeNZLQZ1g3ndNULh04ZZ1h3kdNWLj01[Z1Q4deNoK\\\\Z1R4deNmK\\\\Z1U4ndNhK<3fZ1`4XeN_KhZ1d4UeN\\\\KkZ1g4TeNVKmZ1l4QeNSKP[1m4:O1PO`dNmLb[1m2ldNbL][1X3Q1L4O1O1O1O1O1O1O100O1O10000O1000000001O00000000000gdNnLSY1S3lfNnLTY1R3jfNoLVY1R3ifNnLWY1R3ifNoLVY1R3ifNnLWY1R3hfNoLXY1R3gfNoLXY1Q3hfNoLXY1Q3hfNoLYY1P3gfNPMYY1P3gfNPMYY1P3gfNQMXY1o2gfNRMZY1m2ffNSMZY1n2efNRM[Y1n2efNRM[Y1n2efNRM[Y1m2ffNSM[Y1l2efNTM[Y1l2efNUMZY1k2ffNUMZY1k2ffNUMZY1k2efNVM[Y1j2efNVM[Y1j2efNVM[Y1j2efNVM[Y1j2efNVM[Y1i2ffNWMZY1i2gfNUMZY1k2ffNUMZY1k2ffNUMZY1j2gfNVMYY1j2hfNUMYY1i2hfNVMYY1k2gfNTMYY1l2gfNSMZY1n2efNRM[Y1n2ffNQMZY1o2gfNoLZY1R3Q2100010O01O001O2N1O1O2N1PcNfL22:OZ[1o3bdNTL][1m3`dNTL`[1o3YdNULh[1Y4100O010O10O02N1N2N2M3M3O1O1O1O1O1M3K6K4K5F:O1O1O2N101N1N3N1N3N1cN_bNIc]14bbNHf]1ZOPbN7?;f]1WOSbNNm0=[cfi1\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [1496.0, 471.0, 536.0, 326.0], \"score\": 0.9998537302017212, \"association_id\": 4, \"light\": [-0.8778482675552368, -3.032834053039551, 0.7427571415901184, 2.9284486770629883], \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"`ZmV2\\\\1g^1g0D6K4M3M2O1N2N101N2O0O2O001N2O1N2O001N2O1O1N101N2O1N2O1N2O1N101O2N2N1O2O0O1O1O1O1O1O001N2O001O1O001O002N1O2N1O1O1O1O001O001O001O001O001O00100O1O2N101N1O100O1O00001O0000001O00001O2N1O2N1O2N1O2N1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O000010O000000000000000001O0000000000000000000001O000000000000000000000000010O01O001O001O100O0O2O00001O001O00001O00100O1O1O1O1O1O001O1O1O2N5K8G8I3M2O1N1O100O001O01O010O1O1O3M2N3M2N2N1O1O2N2N3M4L3M1O1O1O1O1O1O1O6J5K2N2N2N1O1O2N2N3MSfNlJfX1k4UgN]KkX1`4SgNcKnX1[4QgNgKoX1X4PgNiKQY1V4ofNkKQY1T4nfNmKTY1Q4lfNoK[Y1j3dfNXLbY1`3^fNaLoY1R3PfNPMRZ1m2neNSMUZ1j2jeNWMXZ1g2heNYMZZ1e2eeN]M[Z1b2eeN^M\\\\Z1a2ceN`M_Z1^2aeNbMaZ1\\\\2^eNeMdZ1Y2[eNiMfZ1U2ZeNkMhZ1R2WeNPNjZ1o1UeNRNlZ1m1ReNUNoZ1j1PeNWNR[1f1mdN\\\\NV[1`1jdNaNY[1g0[eN[OY[1ZO]eNf0W]10O1000000O2O000000000O2O0000000000001O0000000000000000001O0000000000000000010O0000000000000000000001O00000000000000001O00000000000000000000001O0000000000O1000000000000O1000000000000000000O10000000000000000O100000000000001O01O000000000000000010O000000010O000001O0001O01O000000010O00000010O0001O01O000001O01O000001O01O0001O01O0000000010O000001O0001O00000000000001O0000000000000000000O1000000000001O0O1000000000000000000O100000O1000000000000000001O0O10000000001O000O2O1O00001N10001O001O1O1O001O1O001O001O00001O1O2N1O1O001O4L1O001O1O2M2O1O1O1O1O001N3N2L4Fo_NM\\\\Pi0\"}}, {\"image_id\": 114, \"category_id\": 1, \"bbox\": [248.0, 332.0, 467.0, 353.0], \"score\": 0.9999951720237732, \"association_id\": 1, \"light\": [-2.5735113620758057, -2.1470134258270264, 2.3930411338806152, 1.8519644737243652], \"segmentation\": {\"size\": [929, 1600], \"counts\": \"cdQ79fl03O1N101O000O101N100O101N10000000000O1eGHmC8R<JmC6R<MlC3S<OlC2R<1kC0T<3jCMT<7jCIU<:iCFU<?hCAV<d0fC]OT<m0hCSOQ<m1VCSNi<R2SCnMl<W2PCiMo<[2nBeMQ=`2kB`MU=b2iB^MV=e2hB[MX=g2fBYMY=j2dBWM\\\\=j2cBVM\\\\=l2cBTM]=m2bBSM]=o2bBQM^=Q3`BoL_=S3`BlL`=W3^BiLa=[3\\\\BeLc=^3[BbLd=a3ZB_Le=d3YB\\\\Lf=f3XB[Lg=g3XBYLh=h3WBXLh=j3WBVLi=k3VBULj=k3VBULi=m3VBSLj=n3UBRLk=o3SBRLm=o3RBPLo=P4QBPLo=Q4PBoKP>R4oAnKQ>S4nAmKQ>T4oAkKR>V4mAjKS>W4lAiKT>W4kAjKU>W4jAiKV>W4jAiKU>Y4jAfKW>Z4iAfKW>[4hAeKX>\\\\4gAdKX>]4hAbKY>_4fAaKY>a4fA_KY>c4fA]KZ>d4eA\\\\KZ>f4eAYK[>i4dAWK[>k4dAUK\\\\>l4cATK\\\\>n4cARK]>o4bAQK^>P5aAPK^>R5aAnJ_>S5_AnJ`>T5_AlJa>T5_AlJa>U5^AkJb>V5]AjJc>W5\\\\AiJd>X5[AhJe>Y5ZAgJf>Z5YAfJg>\\\\5WAdJi>^5UAbJk>d5PA[JP?h6m_OYIR`0R7c_OnH\\\\`0X7__OhHa`0]7[_ObHe`0_7Z_OaHf`0`7Y_O`Hg`0a7Y_O^Hg`0c7X_O]Hh`0c7X_O]Hh`0d7X_O[Hh`0f7W_OZHi`0f7X_OYHh`0h7Y_OVHg`0k7V_OeG^N?\\\\b0m7Q_OiGeN7Zb0Q8k^OmGlN1Yb0T8d^OQHSOJXb0Y8]^ORH\\\\ODWb0_8Q^OUHH[OWb0l9i]OTFWb0m9i]ORFWb0n9j]OPFWb0P:j]OoEVb0R:<00000O10000000000000WN^]OSIab0X6_]OaH7T1Zb0Z6Z^OeIfa0Z6]^OdIca0[6_^OdIaa0[6a^OdI_a0[6c^OdI\\\\a0\\\\6e^OdI[a0\\\\6f^OcIZa0\\\\6i^ObIWa0]6k^ObIUa0^6l^OaITa0^6o^O`IQa0_6Q_O`Io`0_6T_O_Il`0`6W_O]Ij`0b6X_O]Ih`0c6Z_O[If`0d6\\\\_O[I^NXOda0]7o_OZI\\\\N\\\\Oca0Y7S@ZIZN^Oaa0Y7V@WIYNAaa0W7W@XIXNAaa0W7X@WIVNCba0V7X@WIVNCca0U7X@VIUNGba0T7Y@TIUNHba0T7Z@RITNKca0S7Y@QISNMda0S7Y@nHSN0da0R7\\\\@iHPN7da0Q7[@mHjM4la0o6Y@nHeM8Rb0l6V@cIj?^6U@bIk?`6S@`Im?a6R@_Io?a6P@_IP`0c6n_O]IR`0d6m_O\\\\IS`0e6l_O[IT`0f6k_OZIU`0f6k_OZIU`0g6j_OYIV`0h6i_OXIW`0i6h_OWIY`0i6f_OWIZ`0k6d_OUI\\\\`0m6c_ORI]`0P7a_OPI_`0T7]_OlHc`0[7V_OeHj`0_7R_OaHn`0`7P_OaHPa0`7o^O`HQa0a7n^O_HRa0b7m^O^HSa0c7l^O]HTa0c7l^O]HTa0d7k^O\\\\HVa0d7i^O\\\\HWa0e7h^O[HXa0f7h^OYHXa0h7g^OXHYa0b8m]O^GSb0c8l]O\\\\GUb0m5W^O[KDgNVb0k5^^OYK[OlNWb0h5f^OYLZa0e3i^OYKPOPOWb0f5l^OYLTa0e3o^OZLQa0e3Q_OZLPa0d3Q_O\\\\Lo`0c3S_O\\\\Lm`0d3T_OZLn`0d3T_O[Ll`0e3U_OZLl`0e3U_OYLm`0e3U_OZLl`0e3V_OYLk`0f3V_OYLj`0g3W_OXLj`0f3W_OYLj`0g3W_OXLj`0g3W_OXLi`0h3W_OXLj`0g3W_OXLi`0g3Y_OWLh`0i3X_OWLi`0h3X_OWLh`0i3X_OVLi`0j3W_OVLi`0j3X_OTLi`0l3X_ORLi`0n3W_ORLi`0o3W_OPLi`0P4X_OnKi`0R4Y_OlKh`0S4Y_OkKh`0V4X_OiKh`0W4Z_OfKg`0Z4Z_OeKf`0[4[_OdKe`0]4[_OaKf`0_4[_OfJhMNmb0\\\\5\\\\_OdJgM1mb0\\\\5\\\\_OaJgM4mb0[5]_O[Kd`0f4\\\\_OYKe`0f4\\\\_OYKd`0h4\\\\_OWKd`0j4]_OTKd`0k4]_OTJcMd0Qc0X5\\\\_ORJdMf0Pc0X5^_OnIcMk0Pc0W5^_OjIdMo0ob0V5c_OjJ^`0V5`_OkJa`0T5__OlJb`0S5]_OnJd`0R5[_OmJf`0S5Y_OnJh`0Q5X_OoJh`0Q5X_OoJi`0Q5V_OoJj`0Q5V_OoJj`0Q5V_OoJj`0Q5V_OoJj`0R5V_OmJk`0R5U_OnJk`0S5T_OmJl`0S5T_OmJl`0S5U_OlJk`0U5T_OkJm`0U5R_OkJn`0U5R_OkJn`0V5T_OmI`Mf0\\\\c0^5S_OlIbMe0\\\\c0_5Q_OlIcMe0\\\\c0_5R_OkIcMe0[c0a5Q_OiIfMe0Yc0c5P_OhIhMd0Yc0c5W_O\\\\Ji`0d5X_O[Ji`0d5X_O[Ji`0e5V_O[Jk`0d5U_O\\\\Jl`0c5T_O]Jm`0b5S_O^Jm`0b5S_O^Jn`0a5R_O_Jo`0a5P_O_JPa0a5P_O^JRa0a5n^O_JRa0a5m^O`JTa0_5l^OaJTa0_5l^OaJTa0_5k^ObJVa0^5i^ObJWa0^5i^ObJWa0^5i^ObJXa0]5h^OcJYa0\\\\5f^OeJZa0[5f^OeJ[a0Y5e^OhJ\\\\a0W5d^OiJ_a0T5`^OmJca0P5]^OPKfa0m4Y^OTKia0j4W^OVKka0h4U^OWKma0h4S^OXKna0g4R^OYKoa0f4Q^OZKoa0f4P^O[KQb0d4o]O\\\\KQb0d4P^O[KQb0d4o]O\\\\KRb0c4n]O]KRb0c4n]O]KSb0b4m]O^KTb0a4l]O_KUb0`4k]O`KWb0^4i]ObKYb0\\\\4g]OdK\\\\b0Y4d]OgK_b0U4b]OkK`b0S4`]OmKbb0Q4^]OoKcb0P4]]OPLdb0o3\\\\]OQLeb0n3[]ORLfb0m3[]ORLeb0n3[]ORLfb0l3[]OSLfb0m3Z]OSLgb0k3Z]OULgb0j3Z]OULgb0i3Z]OWLgb0h3Y]OXLib0e3X]O[Ljb0c3V]O]Lkb0a3V]O_Lkb0_3W]O`Ljb0_3V]OaLkb0]3V]OcLjb0]3V]OcLkb0[3W]OdLjb0[3V]OeLjb0[3W]OdLib0[3X]OeLhb0[3X]OeLhb0Z3Z]OeLfb0[3Z]OdLhb0Z3Y]OfLgb0Y3[]OeLfb0[3Z]OdLgb0[3\\\\]ObLeb0]3`3O1O2N1O1O1O100O2O000O10000O2O0O100O101N1O101N1O1O100O1O100O1O100O1O1O100O1O1O1O1O100O2O000O100O2O00000O2O00000O10001O001N101O001O1O1N101O1O1O1O1N2O1O001O1O1O001O2M2O2N2N2N1O1O2N1O1O001O1O1N2O1O1O1O1O1N2O2N1O1N2O1N2O1O1N3L3M4KU]Ri0\"}}, {\"image_id\": 114, \"category_id\": 1, \"bbox\": [692.0, 247.0, 785.0, 435.0], \"score\": 0.9999806880950928, \"association_id\": 2, \"light\": [-2.2552762031555176, -1.5606473684310913, 2.0712199211120605, 1.376671314239502], \"segmentation\": {\"size\": [929, 1600], \"counts\": \"g]dc03nl00O2O00001O0000001O0O101O00001O000O2O000000000000001O0O100000000000001O00001O00001oIa0^^O_OY`0n1c_ORNi?e2T@[Mg?m2V@TMe?S3X@mLe?X3Y@hLe?\\\\3Y@dLf?_3X@aLg?b3W@^Lh?e3U@\\\\Lk?e3U@[Li?h3U@XLj?j3U@VLk?l3S@TLl?o3R@QLm?R4Q@oKn?R4Q@nKn?T4Q@lKo?V4o_OkKo?W4P@iKo?Z4o_OgKP`0Z4P@eKo?^4o_OcKo?_4P@aKo?a4P@`Kn?b4Q@_Kl?e4R@[Km?g4R@YKm?i4R@XKl?j4S@VKk?m4T@SKk?P5S@PKl?R5T@mJj?V5U@jJj?X5U@hJj?_5P@aJo?g5j_OZJT`0j5i_OVJV`0k5j_OUJV`0l5i_OTJV`0n5i_ORJW`0o5h_OQJW`0P6j_OPJU`0Q6j_OoIU`0S6j_OmIV`0S6j_OmIV`0S6j_OmIU`0T6k_OmIT`0T6k_OlIT`0U6l_OkIT`0U6m_OjIS`0V6m_OkIQ`0V6o_OjIQ`0V6o_OjIP`0W6P@iIP`0W6P@iIo?Y6P@gIo?Z6Q@fIo?Z6R@eIm?\\\\6S@eIk?\\\\6U@dIj?]6V@cIi?^6W@bIi?^6W@bIh?_6X@aIg?`6Y@`Ig?`6Y@`If?b6Z@^Ie?b6[@^Id?c6\\\\@]Id?c6\\\\@^Ib?c6^@]Ib?c6^@]Ib?c6_@]I_?d6a@\\\\I_?c6b@]I^?b6c@^I\\\\?c6d@^I[?a6f@_IY?a6h@_IX?a6h@_IW?a6k@^IU?a6l@_IS?a6n@`IQ?_6PAaIo>`6QA`In>P6cAPJ\\\\>P6eAPJ[>o5fAQJY>o5hARJV>n5kARJT>n5nAQJQ>o5PBQJo=o5RBRJm=m5TBSJk=m5VBSJj=l5WBUJg=l5YBTJg=k5ZBVJe=i5\\\\BWJc=i5^BWJb=i5^BWJb=h5_BYJ_=g5bBYJ^=f5cBZJ]=f5cBZJ\\\\=f5fBYJZ=f5gBZJX=f5iBZJW=e5jB[JV=d5kB\\\\JT=d5mB\\\\JR=e5nB[JR=d5oB\\\\JP=d5QC\\\\Jn<d5SC]Jk<c5VC]Ji<c5XC]Jg<c5ZC]Jf<b5ZC_Je<a5\\\\C_Jc<b5]C^Jb<b5_C^Ja<b5_C^J`<b5aC_J^<a5bC_J]<b5cC^J]<a5dC_J[<b5eC^J[<b5eC^J[<a5fC_JY<b5gC^JY<b5gC^JY<a5hC_JX<a5hC_JW<b5iC^JW<b5iC^JW<a5jC_JU<b5kC^JU<b5kC^JT<c5lC]JT<b5mC^JR<c5nC]JR<c5nC]JQ<d5oC\\\\JP<d5PD]Jo;d5QD\\\\Jn;e5RD[Jn;e5RD[Jm;e5TD[Jk;f5UDZJk;f5UDZJj;g5UDZJj;f5WDZJi;f5WDZJi;f5WDZJh;f5YDZJg;f5XD[Jg;f5ZDYJf;g5ZDYJf;f5[DZJe;f5[DZJd;g5\\\\DYJd;g5\\\\DYJd;g5\\\\DYJd;g5\\\\DYJc;g5^DdIZK4X`0X6^DcI\\\\K4V`0Y6_D`I^K6S`0Z6_D]IbK8n?\\\\6`D\\\\IcK7m?]6`D[IeK7k?^6`D[IeK6k?`6aDWIgK8h?`6WE`Ih:a6XE_Ih:a6XE_Ih:a6XE_Ig:b6YE^Ig:b6ZE]Ie:d6[E\\\\Id:e6\\\\E[Id:e6\\\\E[Ic:e6^E[Ib:e6^E[Ia:f6_EZIa:f6_EZI`:g6`EYI`:g6aEXI_:h6aEXI^:i6bEWI^:i6bEWI^:i6bEWI^:i6bEWI]:j6dEUI\\\\:k6dEVI[:i6hDPITL7T?i6gDRITL5U?i6gDTIRL3V?j6gDUIRL1W?j6gDVIQL0X?j6fDXIQLNY?j6fDZIoKL[?j6eD\\\\IoKJ\\\\?j6eD]IoKH[?l6eD^IoKF\\\\?l6eD_InKE]?l6dD`IoKD]?l6dDaInKC^?l6cDbIoKA_?m6bDbIoKA^?n6bDbIPL@^?n6bDcIoK_O_?n6aDdIPL]O`?o6`DdIQL\\\\O_?P7_DeIRL[O_?P7_DeIRL[O_?P7^DgIRLYO_?Q7_DfIRLXO`?R7^DfIRLXO`?R7]DgISLWO`?R7]DgISLWO`?Q7]DiISLVO`?Q7]DiISLVO`?Q7\\\\DjITLTOa?R7[DkISLSOa?S7[DkITLROa?S7ZDlIULQOa?S7YDmIVLPOa?S7YDmIVLPOa?S7XDnIWLoNa?S7WDoIYLlNa?U7TDQJ[LjN`?V7TDQJ\\\\<o5cCSJ\\\\<m5cCTJ]<l5bCUJ^<k5`CWJ`<i5_CXJ`<i5^CYJb<g5]CZJc<e5]C\\\\Jc<d5\\\\C]Jd<c5[C^Jd<c5[C^Je<b5ZC_Jf<a5YCaJf<_5YCbJf<_5YCbJg<^5XCcJh<^5VCcJi<^5VCcJj<]5UCdJj<]5UCdJk<]5RCeJm<\\\\5QCfJo<Z5oBhJQ=Y5lBiJS=X5kBjJU=V5jBkJV=U5hBmJX=T5eBnJZ=S5dBoJ\\\\=R5aBPK_=P5_BRKa=o4[BTKd=n4YBTKg=l4VBWKj=j4SBXKm=i4QBXKo=h4oAYKQ>i4mAXKS>i4kAXKU>h4jAYKV>h4hAYKX>h4fAYKZ>g4eAZK[>g4cAZK]>g4aAZK_>f4aAZK_>g4_AZK`>g4`AYK`>h4^AYKb>h4]AXKc>h4\\\\AYKd>h4ZAYKf>h4XAYKh>g4WAZKi>g4UAZKk>g4SAZKm>f4RA[Kn>f4PAZKQ?f4n@[KR?f4l@[KT?f4j@[KV?e4i@\\\\KW?e4g@\\\\KY?d4f@]KZ?d4d@]K\\\\?c4c@^K]?b4b@_K^?b4`@_K`?a4_@`Ka?a4]@_Kd?a4[@`Ke?a4Y@`Kg?`4X@aKh?_4X@aKh?^4Z@aKf?_4Z@`Kg?`4Z@_Kf?a4[@^Ke?a4\\\\@_Kd?a4]@]Kd?c4\\\\@]Kd?c4]@\\\\Kc?c4^@\\\\Kc?d4^@[Kb?e4^@ZKc?f4^@YKb?f4_@YKb?g4_@XKa?h4_@WKb?i4_@VKa?i4`@VKa?j4_@VKa?j4`@TKa?l4_@TKa?l4_@SKb?l4`@SK`?m4`@YJWORNZ`0e7_@UJ\\\\OUNU`0f7`@PJ@XNQ`0g7`@mID[Nl?h7`@kIG[Nj?j7`@hII]Ng?j7b@fIJ^Ne?k7c@eIJ_N@Hb?S8VAbILaN\\\\OLa?P8YA`INaNVO3b?j7^A^IMcNTO5a?i7`A\\\\IOcNPO:`?e7eAYI:TOQ>b7hAVI8YOQ>^7lASI6@m=\\\\7QBnH5Fj=[7UBiH3Mh=X7ZB]H9;]=W7PEiHP;U7QEmHn:S7REmHn:R7SEnHm:R7SEnHm:Q7TEoHl:Q7TEoHl:P7UEPIk:P7UEPIk:o6VEQIj:o6VEQIj:o6VEQIj:n6WESIh:m6XESIh:m6XESIh:l6YETIh:k6XEUIh:k6XEUIh:j6YEVIg:j6YEVIg:j6YEWIf:h6[EXIe:h6[EXIe:h6[EXIe:g6\\\\EYId:g6\\\\EYId:f6]EZIc:f6^EYIb:f6_EZIa:f6_E[Ia:c6`E]I`:c6`E]I`:b6aE^I_:b6aE^I_:a6bE_I^:`6cE`I]:`6cE`I]:_6dEaI\\\\:_6dEaI\\\\:^6eEbI[:^6eEbI\\\\:]6dEcI\\\\:\\\\6eEeIZ:[6fEeIZ:Z6gEfIY:Z6gEfIY:Z6gEfIY:Z6gEfIZ:X6gEhIY:X6gEhIY:X6gEhIZ:W6fEiIZ:W6fEjIY:U6hEkIY:T6gElIY:T6gElIZ:S6fEmIZ:S6fEmIZ:R6hEmIY:R6gEnIY:R6gEnIY:R6gEnIZ:Q6fEPJY:o5hEQJX:o5hEQJX:o5hEQJY:n5gERJY:m5hESJX:m5hESJX:m5hESJY:l5gETJY:l5gETJY:k5hEVJW:j5iEVJW:j5iEVJW:j5jEUJW:i5jEWJV:i5jEWJV:i5jEWJV:h5kEXJU:h5kEXJU:g5lEYJT:g5lEYJT:f5mEZJS:f5mEZJS:e5nE[JR:d5oE\\\\JQ:d5oE\\\\JQ:c5PF]JP:c5PF]JP:b5QF^Jo9b5QF^Jo9b5QF_Jn9`5SF`Jm9`5SF`Jm9_5TFaJl9_5TFaJl9_5TFaJl9_5TFaJl9^5UFbJl9]5TFcJl9]5TFcJl9]5TFcJl9\\\\5UFdJk9\\\\5UFdJk9\\\\5UFdJk9\\\\5UFdJk9[5VFeJj9[5VFeJj9Z5WFfJi9Z5WFfJi9Y5XFgJh9Y5XFgJh9X5YFhJg9X5YFhJg9W5ZFiJf9V5[FjJe9U5]FjJc9V5]FjJc9T5_FlJa9S5`FmJ`9S5`FlJa9S5`FmJ`9R5aFnJ_9Q5bFoJ^9Q5bFoJ_9o4bFQK^9o4bFQK^9n4cFRK]9n4cFRK]9m4dFSK\\\\9m4dFSK\\\\9m4dFSK\\\\9l4eFTK[9l4eFTK[9l4eFTK[9k4fFUKZ9k4fFUKZ9j4gFVKY9j4gFVKY9i4hFVKY9i4hFWKY9h4gFXKY9g4hFYKX9f4iFZKW9e4jF[KV9d4kF\\\\KU9c4lF]KT9c4lF]KT9b4mF]KT9b4mF^KS9b4mF^KT9`4mF`KS9_4nFaKR9_4nFaKR9^4oFbKQ9^4oFbKQ9^4oFbKQ9]4PGcKP9]4PGbKQ9]4PGcKQ9\\\\4oFdKQ9\\\\4oFdKQ9[4PGeKP9[4PGeKQ9Y4oFhKQ9X4oFgKS9W4nFiKR9W4nFiKS9U4nFkKR9T4oFlKR9S4nFmKR9R4oFnKR9P4oFPLQ9o3PGQLQ9m3PGSLP9m3PGSLQ9k3PGULP9k3PGULP9j3QGVLP9h3QGXLo8h3QGWLQ9g3PGYLP9g3PGYLP9f3QGZLo8f3QGZLP9e3PG[LP9d3QG\\\\Lo8d3PG]LP9b3QG^LP9a3PG_LP9a3PG_LP9`3QG`Lo8`3QG`Lo8_3RGaLn8_3RGaLo8]3RGcLn8\\\\3SGdLm8\\\\3SGdLm8[3TGdLm8[3TGeLm8Z3SGfLm8Y3TGgLl8X3UGhLk8X3UGhLl8V3UGjLk8U3VGkLj8U3VGkLk8S3VGmLj8S3VGmLj8R3WGnLi8R3WGnLj8P3WGPMi8P3WGPMj8n2XGQMh8o2XGQMh8n2YGRMh8m2XGSMh8l2YGTMh8k2XGUMh8k2XGUMi8i2XGWMh8i2XGWMi8g2XGYMh8f2YGZMg8f2YGZMh8d2YG\\\\Mg8d2YG\\\\Mg8c2ZG]Mg8b2YG^Mg8a2ZG_Mf8a2ZG_Mg8_2ZGaMf8_2ZGaMf8^2\\\\GaMe8^2[GbMe8]2\\\\GcMd8]2\\\\GcMd8]2\\\\GcMe8[2\\\\GeMd8[2\\\\GeMd8Z2^GeMb8[2^GeMb8[2^GeMc8Y2^GgMb8Y2^GgMb8Y2_GfMa8Z2_GfMa8Y2`GgMa8X2_GhMa8X2_GhMa8X2_GgMb8X2_GhMa8X2`GgM`8Y2`GgM`8X2aGhM_8X2aGhM_8X2aGhM_8W2bGiM^8W2bGiM^8W2cGhM]8W2dGiM\\\\8W2dGiM\\\\8V2eGjM[8V2fGiMZ8V2gGjMY8V2hGhMY8W2hGiMX8W2hGiMX8V2jGiMV8W2jGiMV8V2lGhMU8X2kGhMU8W2mGhMS8X2nGgMR8Y2[9O10000O10000O1000000O10000O10000O10000O10000O10000O10000O1000000O10001O000000000000000000000000000000000000000000000000001N10001O000O2O001O001O1N101O1O001O0O101O001O000O2O00001O00000O2O00000O2O00001N10001O0O101O0O2O001N3N2M3N1O2M3M2K5J7JfX_3\"}}, {\"image_id\": 115, \"category_id\": 1, \"bbox\": [28.0, 105.0, 399.0, 470.0], \"score\": 0.9999502897262573, \"association_id\": 1, \"light\": [-1.5479533672332764, -2.2627956867218018, 1.504338264465332, 2.0722930431365967], \"segmentation\": {\"size\": [799, 533], \"counts\": \"\\\\Qf06[h0h0_O>C8I7K5L4L3T[OYN\\\\b0j1^]O_N\\\\b0d1`]ObN[b0b1`]OeNZb0_1`]OjNYb0_1\\\\]OjN]b0R4J8H4L3M2N3M2O1O1N2O2N2N2N2M3N1O1O1O1N20O01O2N1O2O1N2N2N1O1O1O1O001O1O1O1O2N2N2N1O2M2O1O1O1N2O1O1N2O1N3N2M2O2M2N2M3N3M2O2M2N3N2M4M3L4M2N2N3M4L6I8I6J3M3M2N1O2M2O1O1O2M4RDXE_9k:PFjEi9Z:oEkEo9Z:iEkEV:^:]EgEb:\\\\<N2O1N1O100O1O1O100O1O1O2O0O100O0100oLSFPGm9j8lFcFS9W9WGfFi8T9_GjFa8Q9fGlFZ8Q9kGnFU8n8QHPGo7j8XHUGh7g8]HWGd7i8^HUGa7k8bHRG_7n8dHnF]7Q9hHjFY7V9kHeFV7[9oH\\\\FU7d9R4100000O1001N100000000000000O10000000001O00000000000O10000010O000001O000000001O00010O2N1O1O2N7J:Eb0^O:F8H2N2N3N2M3M;UDnCS:a=N1O1O1O100O00000010O00001O00001O00001O2N1O2N1O1O1O1O1O001O001O000000001O0000001O00001O1O2N1O2N2N2N1O1O1O1O001O1O1O1O1O2N1O2N2N2N1O1O1O1O1O2N2N2N3M2N1O1lKmCZKT<b4XDUKi;h4\\\\DUKe;g4cDSK`;g4iDTKX;f4REUKP;^4_E^Kc:V4jEgKY:m3TFnKo9k3YFRLi9i3]FTLd9i3`FULa9h3cFUL_9h3eFVL\\\\9h3fFVL\\\\9g3hFVLZ9f3lFVLW9e3oFVLU9d3UGPLS9j3ZGfKP9R4S6K5K5M2M3N2N2N3L4L7I8E;F7H8I6K5L3M4M2M4K7I:@he8TO\\\\[G3M1O101N100O10000O101O0O10001O0O100O10001O00001O001O001O0001O01O01O00001O0000001N1000001O001O00001O0000001O001O0000001O10O01O001O00001O00001O1N2O1O2M3MW[b2\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [21.0, 144.0, 251.0, 397.0], \"score\": 0.9999944567680359, \"association_id\": 1, \"light\": [-1.6831748485565186, -2.458819627761841, 1.5680763721466064, 2.2817885875701904], \"segmentation\": {\"size\": [608, 738], \"counts\": \"kn<5jb02N10001O0000001O00000O2O01O00000001O000000001O00000000001O0O1000000O101O0O1O10000O2O0000000O1000000aI4iINS69iIGU6>hIBW6`0hI@X6`0hI@W6b0hI^OX6c0gI]OY6c0gI]OX6e0gI[OX6g0gIZOV6j0hIVOU6o0jIPOT6T1jIlNT6X1jIhNT6[1kIeNU6[1kIeNT6]1kIcNT6^1mIaNS6_1mIaNR6a1mI_NR6b1nI^No5e1QJZNl5j1TJVNf5P2ZJPNb5T2_JkM^5X2bJhM\\\\5Z2eJeMX5^2hJbMU5b2jJ]MS5g2mJYMP5j2QKUMm4n2RKRMl4Q3SKoLj4V3TKjLh4]3UKcLh4b3VK^Lg4g3WKYLh4i3WKWLh4k3WKVLf4m3YKSLd4R4ZKnKc4V4\\\\KjKa4[4]KeKb4_4[KaKe4b4XK_Kf4e4WK[Ki4j4RKVKn4n4nJRKQ5R5lJoJS5S5kJmJU5V5hJjJX5^5`JbJ`5Y6eIgI[6^6`IbI`6a6]I_Ic6e6YI[Ig6j6TIWIk6Q7mHoHS7U7iHlHU7W7iHhHX7Z7fHeH[7\\\\7dH`HjNBZ8Q8iHZHjNM[8o7eHoGnN;[8j7dHhGQOa0Z8j7bHdGROf0[8h7cH_GROj0[8k7_HYH`7n7XHTHh7b9O1aNlERJT:l5PFQJQ:n5RFPJn9o5VFmIk9R6ZFiIg9W6fF\\\\IZ9c6b1000000000001O0000000000001O000000000000O100000000O1O1iNZITFf6c9bI^F^6`9dI_F]6^9fImEC1i6n9hIRFHEa6S:mIXFd6d9aI\\\\F^6b9dI^F\\\\6a9eI_F[6_9gIbFX6Z9lIfFT6V9PJjFP6S9SJmFm5Q9UJoFl5>VIn6P1dHj5;[Im6m0hHi58]Ih6Q1oHc56`Ic6T1WI]53bIa6U1\\\\IZ50cIb6U1^IZ5DlIj6m0bIk6Y6ZIfIg6i5iIWJW6b5PJ^JQ6\\\\5TJdJn5W5UJiJm5Q5WJoJk5i4[JWKg5`4aJ_K`5\\\\4dJdK]5X4fJhK[5T4hJlKZ5n3jJRLX5e3oJ[LT5Y3VKfLl4U2mF_M_4<f4i1_LWNc3`1dL`N_3Y1eLgN_3Q1fLnNd3b0bL^Og32`LNh3E]L;T;1O0001O000000001O0001O001O001O00001O00001O1O2N1O10O000001O1O1O3M5K1O00000001O1O=C8I2M1O2N001N101O00001N10VUd8\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [409.0, 80.0, 254.0, 444.0], \"score\": 0.999993085861206, \"association_id\": 2, \"light\": [-2.3119454383850098, -2.2191927433013916, 2.2144832611083984, 2.0210072994232178], \"segmentation\": {\"size\": [608, 738], \"counts\": \"kYc72lb04M2O00001O1O1O00001O010O0000010O01O1O0000001O00001O00000000000O100000HJe]O8Xb0Kf]O6Yb09O1000001N100000001O002N1O001O00001O1O00001O001O00000010O010O001O00100O1O001O010O0010O0000010O0010O01O001oG\\\\OdMd0V2O]M2a2JWFFm6a0j2I]FDh6d0j2G_FFd0DW5n0e3I`FG?LKO_4e0g4HaFI:m0Z4Cj4GbFK6Q1Z4^Ol4GdFL2S1]4ZOk4IcFM3R1^4YOi4g0gF1`4XOf4l0gFMc4WOd4m2[KTMb4n2^KSM^4P3bKQM\\\\4P3eKoL[4P3fKQMY4n2hKRMX4m2iKSMW4l2kKSMU4c0SG>j4POR4`0YG<f4TOn3c0[G<e4QOn3f0\\\\G<d4nNn3h0\\\\G>c4jNP4i0\\\\G?c4iNo3j0\\\\G`0c4fNP4^3nKcLP4`3oK_LP4f3lKZLS4n3fKRLX4Z4^KgK_4k4RKTKl4S5oJmJo4X5nJhJP5]5mJcJR5c5iJ^JU5m6aISI^6`7PI`Ho6k7hHUHU7T8dHlG[7Y8aHgG]7`8_H_G^7j8]HUG\\\\7[9PIPFk6Z:RIdEk6a:SI`Ek6c:TI\\\\Ek6e:VIZEj6g:VIXEk6g:WIVEj6j:XITEh6l:YISEg6m:YISEg6m:ZIREf6n:ZIQEg6o:ZIoDg6Q;d00000000O1O1O100O1O1fNkDhJV;l4XERKg:m4]EPKc:P5`EmJa:R5bElJ]:T5fEiJZ:W5iEfJg8OSG\\\\57cJg82PG\\\\5:`Jk80lF_5a0WJj84fFe5P;VJREj5Q;PJREo5P<0001O1O001O001O000000001O0001O00001O001O001O2M3N3L3N2M4QNoIhFU6j8[JoFo5]8jJlFg5b8e2lNT1H6K6J6J6I6J5K6K4L4K4M4XOUCTKQ=`4n0H8K5N3000O1O2N1O002N2N2O1O01O0O1N101N2nKcA^3]>[LkAc3m>L4L6J6J7I6J4L3M4L9F9G8H:E9G9D=BPX\\\\1\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [307.0, 93.0, 88.0, 264.0], \"score\": 0.9943506717681885, \"association_id\": 3, \"light\": [-1.8827636241912842, -2.520230531692505, 1.803507685661316, 2.4170329570770264], \"segmentation\": {\"size\": [608, 738], \"counts\": \"U]f5;cb04K4j@Ag;b0QDGn;8RDHn;8RDHo;7QDHS<4oCKW<NQDJVMOh05h:=kD]OQ3o1f7iNSI[1i6fNYEWOX39RL\\\\1\\\\;UOYEXOg38gKi0X;GYEZOk3MkKP1Q;KVEZO\\\\4h0_60QE[O`4d0_69fDWOk4?_6l0`ISOa6n0^IROb6n0^IROb6o0]IQOc6P1^HYNoLd0d:U1UH]NYM;c:Y1SH]NZM8d:]1nG_N]M2f:a1jG_NaMLEIY:o1\\\\H_NhM\\\\OL8o9P2[H\\\\NeNDP9R2ZHYNfNEP9U2XHUNhNFP9_2nGkMQOGQ9a2lGhMROGR9b2kGgMSOGR9c2kGfMROGS9c2kGfMROGS9c2mGdMPOIS9d2mGbMPOJS9e2nG]MQONQ9g2PHRMTO7l8j2oIUMQ6m2^FkLU28^7l2lITMT6k2mIUMS6k2mIUMS6j2nIVMR6i2oIWMQ6=[E_1e4TNQ6:bE[1^4[NR63gE^1Y4_NR7\\\\1RIdNP7U1UIkNm6o0WIQOk6i0YIWOi6e0YI[Oj6`0XI@m68VIHn60VI0l6IYI7h<0000000TMIoB8o<OkB1V=0hB0[=NeB1]=NbB2_=MaB3_=MaB3_=MaB3`=L`B4_=N`B3_=NaB1_=1_BOY`0000000001O000000000001N1N3MUZ[6\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [136.0, 490.0, 108.0, 81.0], \"score\": 0.9760500192642212, \"association_id\": 5, \"light\": [-2.209212303161621, -2.0941460132598877, 2.0924034118652344, 1.9137473106384277], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"P`V36ig04L3M3L?A<D5K4K8J4L1O2N1O101O0O2O0O101ZZOgMce0Y2]ZOgMce0Y2]ZOgMce0Y2]ZOhMbe0X2^ZOhMbe0X2^ZOhMbe0X2^ZOiMae0W2_ZOiM`e0X2`ZOhM`e0X2`ZOhM`e0X2`ZOhM`e0X2`ZOiM_e0W2aZOiM_e0W2aZOiM_e0W2aZOiM_e0\\\\20JaZOkM_e0U2aZOkM_e0U2aZOkM_e0U2aZOkM_e0U2aZOkM_e0U2aZOkM_e0U2aZOkM_e0U2aZOkM_e0U2aZOlM^e0T2bZOlM^e0T2bZOlM^e0T2bZOlM^e0[2000000001O000000000JaZOkM_e0U2aZOkM_e0[2001O001O001O001N3N1O1O2N1O1O2M2O001N2O0O2O000bNQZOl0ne0ROWZOk0ie0TO[ZOi0ee0VO]ZOi0de0UO_ZOi0be0TObZOj0\\\\f0N1O2N2M1000O010000O10000O1O2N1O1N2N2O6Hl^Xb0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [780.0, 471.0, 137.0, 84.0], \"score\": 0.9999969601631165, \"association_id\": 4, \"light\": [-2.066602945327759, -1.530534029006958, 2.067291736602783, 1.3636939525604248], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^_Yb0a02@Xg0g0M2N4K3N101N1O2O0O2N100O10000O1O100O10000O1O101N101N1O2O001N2O1N101O1N2O001O001O0QZOQNme0o1SZOQNme0o1SZORNle0n1TZOSNke0m1UZOSNke0S2N4L1O001OD]ZO[Nce0e1]ZO\\\\Nbe0Q201O0000C_ZO\\\\N`e0d1`ZO\\\\N`e0d1`ZO\\\\N`e0d1`ZO]N_e0c1aZO]N_e0c1aZO]N_e0c1aZO^N^e0b1bZO^N^e0b1bZO^N^e0b1bZO^N_e0a1aZO_N_e0a1aZO`N^e0`1bZO`N^e0`1bZO`N^e0`1bZO`N^e0`1bZO`N^e0`1bZO`N^e0`1bZO`N^e0`1bZO`N^e0`1bZO`N^e0`1bZOaN]e0Q2000000000000000@cZO]N]e0c1cZO]N]e0c1cZO]N]e0c1cZO]N]e0c1cZO\\\\N^e0d1bZO]N]e0c1cZO]N]e0c1dZO\\\\N\\\\e0d1dZO\\\\N\\\\e0d1eZO[N[e0f1dZOZN\\\\e0f1eZOYN[e0h1eZOWN[e0i1fZOVNZe0k1eZOUN\\\\e0k1dZOTN\\\\e0l1eZOSN[e0n1dZORN]e0m1dZORN]e0n1bZORN_e0m1aZOSN`e0m1_ZOSNae0m1_ZOTNae0k1`ZOTN`e0k1aZOUN`e0j1`ZOVN`e0j1_ZOWNbe0g1_ZOYNbe0f1^ZOZNee0b1[ZO_Nfe0_1YZObNhe0k10jN]ZOOce0NlZOFTe09mZOGUe03nZONcf0O001O000O2O00001O00000O101O0OUg_2\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [560.0, 638.0, 173.0, 129.0], \"score\": 0.8247973918914795, \"association_id\": 1, \"light\": [-1.61687433719635, -2.4304730892181396, 1.514270544052124, 2.2244136333465576], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[eT=T1if04N2N1O2L4ZOe0L5N2N100O1N2O1O100O100O1O100O2O0000001O00001O3M2N5K1O1O1O:F9G2N1O001O001O1O001O0000000000O100O100000000000000000000001O00000000000000000000000000000000000000000001O1O1O001O00001O001O1O001O001O001O002N1O001O001O1O1O1O1O001O4L2N1O1O1O1O4L2N1O001O1O1O1N10010O00001O001hMgZOg1[e0UNjZOg1je0N2N1O1O001O001O1O001O1O1O1O1O1O1O2TOdYO0Vg00O10000001N102N1O000O2O1N5FYXOOYXj6\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [802.0, 379.0, 67.0, 40.0], \"score\": 0.999229371547699, \"association_id\": 6, \"light\": [-1.94554603099823, -1.8613712787628174, 1.9363211393356323, 1.659966230392456], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Qlib0295Rg0`0M100O101O0O2O2N1O2N1O001O000000000000OTYOPOjf0P1VYOPOjf0S1000001O00000000000000001O0000000000000001O001O001O1ITYOXOmf0m0001O2N010O1O3ElXODVg09kXOGWg03mXOMUg0NoXO1`g0O101O001O0O2OPkc3\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [331.0, 391.0, 73.0, 50.0], \"score\": 0.980665385723114, \"association_id\": 3, \"light\": [-1.9935696125030518, -2.0126826763153076, 1.9523273706436157, 1.7911441326141357], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fdh79bg0c0^O5M4M1O2N2O1N2O1O1O00000000000000000`YObN^f0^1bYObN^f0^1bYObN^f0`11O00000MaYOeN_f0[1aYOeN_f0[1aYOeN_f0[1aYOeN_f0[1aYOeN_f0[1aYOeN_f0[1aYOeN_f0Z1bYOfN^f0Z1bYOgN]f0Y1cYOgN]f0Y1cYOgN]f0Z1bYOfN^f0Z1bYOfN_f0Y1aYOgN_f0Y1aYOgN_f0Y1aYOgN_f0Y1aYOgN`f0X1`YOhN`f0X1`YOhN`f0X1`YOhNaf0W1_YOiNbf0V1^YOjNcf0U1]YOkNff0R1ZYOnNgf0Q1YYOoNif0S1001O001O001JTYOVOmf0i0SYOWOof0g0QYOYOXg0c03M2HcXOHag03`XONbg0O^XO2gg0100O1001N100O1O`b`>\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [642.0, 388.0, 88.0, 78.0], \"score\": 0.9999831914901733, \"association_id\": 2, \"light\": [-1.9115309715270996, -1.618683934211731, 1.900923728942871, 1.4824225902557373], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ZmQ?9dg08J2N2N3M1O2N2O0N3N2N1M4G8N3M3M2O2N1N2O1O1O2O0TZORNfe0n1XZOUNge0k1YZOUNfe0l1ZZOTNfe0l1ZZOUNee0S2000IZZOTNfe0l1ZZOTNfe0l1ZZOUNee0k1[ZOUNee0S20I[ZOTNde0l1\\\\ZOTNde0l1\\\\ZOTNde0l1\\\\ZOTNde0l1\\\\ZOTNde0l1\\\\ZOTNde0l1\\\\ZOUNce0k1]ZOUNce0k1]ZOUNce0k1]ZOUNce0k1]ZOUNce0k1]ZOUNce0k1]ZOUNce0k1]ZOVNbe0j1^ZOVNbe0T20G^ZOTNbe0l1^ZOTNbe0l1^ZOTNbe0l1^ZOTNbe0l1^ZOTNbe0l1^ZOTNbe0U2000000000001O0001O00001N3N1EYZO[Nhe0i0XZOG0Aje0b0]ZOHKFie0?aZOFHKhe0=bZOEHNie08T[OHnd03U[OMmd0NV[O2]f0O000O2O001N10001NfQl6\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [236.0, 382.0, 65.0, 50.0], \"score\": 0.9999468326568604, \"association_id\": 7, \"light\": [-2.15321683883667, -1.5541000366210938, 2.1321964263916016, 1.4092891216278076], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^\\\\a5h0Ug04N3L4M1N2WYOkNff0Y1NM\\\\YOlNbf0V1^YOjNaf0[10L_YOiN`f0X1`YOiN_f0W1aYOiN_f0W1aYOiN_f0\\\\100OLbYOhN^f0]100000KbYOhN^f0X1bYOhN^f0X1bYOhN^f0X1bYOhN^f0X1cYOgN]f0Y1cYOgN]f0Z1bYOfN^f0Z1bYOfN^f0Z1bYOfN^f0Z1bYOfN^f0Z1bYOfN_f0Y1aYOhN^f0X1bYOhN^f0X1bYOhN^f0X1bYOhN_f0W1aYOiN_f0W1aYOiN`f0V1`YOjNaf0U1_YOkNaf0U1_YOkNbf0T1^YOlNbf0S1^YOnNcf0Q1]YOoNdf0P1\\\\YOPOef0o0[YOQOff0n0ZYOROgf0m0YYOSOhf0l0XYOTOif0k0WYOUOjf0j0VYOVOkf0h0VYOWOlf0h0TYOXOPg0NSYO?MCYg0;gXOE\\\\g08dXOH]g07bXOJag00bXO0fg0100O001000O103Khjm`0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [156.0, 359.0, 66.0, 46.0], \"score\": 0.9997868537902832, \"association_id\": 8, \"light\": [-1.817606806755066, -1.6981611251831055, 1.7811594009399414, 1.5236561298370361], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"P\\\\e33bg0a0H8I2L4M2O1O1O100O10O01ZYOiNcf0W1]YOjNbf0V1]YOkNcf0U1]YOkNcf0U1]YOkNcf0U1]YOkNcf0T1^YOlNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOkNaf0U1_YOlN`f0T1`YOlN`f0T1`YOlN`f0T1_YOmNaf0S1`YOlNaf0S1_YOlNbf0T1^YOlNbf0T1^YOlNbf0T1^YOlNcf0S1]YOmNdf0R1\\\\YOnNdf0R1[YOoNff0P1ZYOPOgf0n0ZYOSOff0l0YYOUOjf0h0VYOXOkf0g0UYOYOlf0f0TYOZOmf0d0SYO]Onf0a0RYO@nf0`0RYO@of0>QYOCPg0<PYOC\\\\g01dXO0`g0K`XO6eg00O1000000O11N100O1O2N2NfSib0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [874.0, 459.0, 66.0, 64.0], \"score\": 0.9973750114440918, \"association_id\": 10, \"light\": [-1.095752239227295, -3.4410297870635986, 1.027757167816162, 3.2497246265411377], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"en_d03jg03O2N100O3M2O0O2O1N2O1O1O0O2O2N2N3M5K5WYOnNZf0]1O1O010O1O1O001O5K1O001O00000000JoYO]NQf0c1oYO]NQf0c1oYO]NQf0c1oYO^NPf0b1PZO^NPf0b1PZO^NPf0b1PZO^NPf0b1PZO^NPf0b1PZO^NPf0b1PZO^NPf0b1PZO^NPf0b1PZO^NQf0a1PZO^NPf0b1PZO^NPf0b1PZO^NPf0b1PZO^NPf0c1oYO]NQf0c1oYO]NQf0c1oYO]NQf0b1PZO^NQf0a1oYO_NQf0a1nYO`NRf0`1nYO`NSf0_1mYOaNSf0_1mYOaNTf0]1mYOcNTf0\\\\1lYOcNVf0\\\\1kYOcNVf0\\\\1;LYan1\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [462.0, 329.0, 56.0, 35.0], \"score\": 0.9166027307510376, \"association_id\": 9, \"light\": [-1.1616456508636475, -2.2552239894866943, 1.078052282333374, 2.053464412689209], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^jj:5jg03M9G3N5J10000lXOXOPg0h0oXOZOPg0f0PYO[Oof0e0QYO[Oof0e0QYO\\\\Onf0d0RYO\\\\Omf0e0SYO[Omf0e0SYO[Omf0e0SYO[Omf0e0SYO[Omf0e0SYO[Omf0e0SYO[Omf0e0SYO[Omf0e0SYO\\\\Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO]Omf0c0SYO\\\\Oof0c0QYO]OPg0i01IoXO]ORg0a0oXO_ORg0`0mXOATg0>lXOBWg0:iXOG[g0OkXO1bg0000000000000000001N10001OjTk;\"}}, {\"image_id\": 118, \"category_id\": 1, \"bbox\": [951.0, 492.0, 70.0, 65.0], \"score\": 0.9998806715011597, \"association_id\": 1, \"light\": [-0.5989295244216919, -4.0478315353393555, 0.5641908645629883, 3.8672099113464355], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ViYf03lg01000001O0UOLRYO<9H_f0d0^YO\\\\O`f0h0^YOXO`f0l0_YOTO^f0o0aYOQO_f0R1^YOnNaf0Y1OL`YOjN_f0\\\\1OMbYOfN]f0_1O1O100O1OMgYOaNWf0`1jYOaNUf0^1kYOcNTf0]1lYOdNTf0\\\\1kYOeNTf0[1mYOeNRf0Z1PZOfNPf0Z1PZOfNPf0Z1PZOfNoe0Z1RZOfNne0Z1RZOfNme0\\\\1RZOdNne0\\\\1RZOeNme0\\\\1RZOdNne0\\\\1RZOdNne0]1RZObNne0_1QZOaNne0a1QZO_Noe0b1QZO]Noe0c1RZO\\\\Nne0e1QZO[Noe0f1QZOYNne0h1RZOXNne0m10LRZOVNne0j1RZOVNne0n11O00000LQZOWNoe0i1QZOWNoe0h1RZOXNne0h1QZOYNoe0g1QZOYNoe0f1RZOZNne0f1RZOZNoe0d1QZO]Noe0b1RZO^Nne0a1SZO_Nme0a1RZO`Nne0_1SZOaNme0_1SZOaNme0`1RZO`Nne0`1RZO`Noe0`1QZO^NPf0a1:M3M^h1\"}}, {\"image_id\": 118, \"category_id\": 1, \"bbox\": [319.0, 491.0, 160.0, 244.0], \"score\": 0.9999995827674866, \"association_id\": 2, \"light\": [-2.609635353088379, -1.5210497379302979, 2.4842567443847656, 1.4353506565093994], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_n_74lg0001N10000000000000000000001O00000000000O100000000000O100001O0000000000001O000000001O00000000001O000000001O0000001O00001O001O00001O0QKa0]A@]>i0_AWO_>Q1[AoNc>Y1WAgNg>^1VAbNh>d1TA\\\\Nj>m1o@TNn>Q2o@oMP?U2m@kMR?_2e@aMX?k2_@UM_?V3X@jLg?]3S@cLl?W4[_OiKd`0\\\\4X_OdKf`0_4Y_OaKe`0b4Z_O^Kd`0d4\\\\_O\\\\Kc`0e4]_OZKb`0h4^_OXK_`0k4a_OUK]`0m4c_OSK[`0o4e_ORKY`0o4g_OQKW`0P5j_OPKS`0S5m_OmJo?W5R@gJk?]5U@cJi?^5X@bJf?`5Z@`Jc?c5]@[JTOC^`0S6^@YJd?h5\\\\@VJf?k5Y@TJg?n5X@RJh?o5X@oIi?T6T@kIm?[6m_OdIT`0d6d_O\\\\I\\\\`0l685K1O1O0000000001O0O2N2N3K5I6G:K5M4M2M2O1O2O0O10000O02N10O010O1O1O2N100O001N2`Na^O\\\\Laa0`3f^OZL^a0g2o]OWMec0c2P1K3M4M4M2M5K4M4K6Ja0^Ob0VOXVh<\"}}, {\"image_id\": 118, \"category_id\": 1, \"bbox\": [458.0, 487.0, 162.0, 243.0], \"score\": 0.9999996423721313, \"association_id\": 3, \"light\": [-2.345736265182495, -1.6979455947875977, 2.2251012325286865, 1.5720188617706299], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[Vh:1ng02O001O001O0O101O00001O000000001O01O000O1000000000000000000O1001O000000000000000000001O000000000000000000001O00000000000000001O00000000001O000001O^K3QAMk><PAEl>`0RA@j>f0TAZOh>l0VATOg>Q1WAoNe>Y1WAhNb>d1XA\\\\Ne>j1XAVNf>o1WAQNf>Y2S_OUNWOJ3Gaa0V5\\\\^OjJca0X5\\\\^OiJ`a0Z5`^OfJ]a0\\\\5d^OdJZa0]5g^OcJXa0]5j^ObJTa0_5m^OaJPa0b5P_O_Jm`0b5T_O^Jh`0f5X_OZJb`0k5__OUJ]`0n5d_ORJZ`0P6f_OPJW`0S6j_OlIU`0V6j_OjIT`0X6m_OhIP`0]6m_ObIS`0d6i_OZIW`0k6f_OPI^`0T746K3M1O001O00000000000000000000O100N2J6K6J5ZOf0M4O2M3N1N2O0O2N2O2M3N1N3N1kNl]O[LVb0b3n]O\\\\LTb0`3P^O]LTb0X3X^OdLja0S3a^OhLca0P3e^OkL`a0k2P2I7K6K5K5L4M3M4L3L5L;D6J4K8H9Eo]^9\"}}, {\"image_id\": 119, \"category_id\": 1, \"bbox\": [644.0, 564.0, 124.0, 354.0], \"score\": 0.9999995827674866, \"association_id\": 2, \"light\": [-1.8138419389724731, -2.108020067214966, 1.653789758682251, 1.9222205877304077], \"segmentation\": {\"size\": [1024, 768], \"counts\": \"eiTd07co09L2\\\\NAeSOa0ok04fSOMSl0<jSOEQl0b0lSO^OPl0o0gSOROUl0a1\\\\SO_N_l0i1_SOVNZl0R2fSOlMTl0]2jSOcMQl0X3M3N3M3M3LI\\\\TOXLak0l3_TOTL]k0P4cTOPL\\\\k0Q4dTOoKZk0\\\\4O2M2^O`K_UOf4[j0c0I7I7iN^JiWOg5Rh0W1H7J7L4L3N3K4M3N2N3L3M4M3N1O2M2O1N2N3N2N2O1N2O1O1O001O1O1O1O00001O1O1O001O1O1O1O1O001O001O1O001O1O001O1O00001O001O1O1O1O1O1O1O1O1O1OSN]ZO^Ibe0b6^ZO_Iae0a8O2N1O1O2N2N1O1O1O1O1O001O001O1O001O001O10cMS[OeIod0m2i\\\\OVK[N>2_1`m0N2O2M>]OYUO\"}}, {\"image_id\": 119, \"category_id\": 1, \"bbox\": [351.0, 546.0, 275.0, 478.0], \"score\": 0.9892474412918091, \"association_id\": 3, \"light\": [-1.228653073310852, -3.0768086910247803, 1.1624512672424316, 2.9321439266204834], \"segmentation\": {\"size\": [1024, 768], \"counts\": \"\\\\fo:3mo01N100000000000000000000000000001N10000O10Poc08aP\\\\O:I6L2N3L4M3L3L5L4L3M4M1O2L3O2M2O2N101N1O2N2N1N3N2RWOfMTd0[2[[ODVc0>e\\\\OMSc05j\\\\O2Pc00m\\\\O6nb0KP]O:lb0HQ]O>kb0BS]Oe0gb0\\\\OV]Ol0eb0TOV]OX1bb0iN[]Oa1^b0^Nb]Oi1Xb0TMmZOoNm2R4dN]K_a0a1T]OiNk2^4]NdKYa0U1a]OhNh2b4[NbKZa0W1_]OlNg2^4\\\\NaK]a0X1[]OQOe2[4_N]K_a0`2m_OY2]NZKda0c2f_O]2^NSKja0f2Z_OQ3XN]J\\\\b0i2Q_OX4m`0lKm^OW4Ra0mKi^OU4Va0nKg^OS4Ya0oKc^OT4\\\\a0oK_^OS4`a0RLY^OR4fa0U5O001O00001N100O100O100O100O1O1O1N2N2O2M2O1O1O1O1N2N2M3M3N2O1N2O1000000000000O0100000000O100O100O100O10000001O00001O0000001O00001O000000001O0000000001O0001O000001O001O001O001O1O1O2N2N2N3M2N2N3M2N4L6J6J8H8H4L3M2N1O1O2M2O2N2N2M3N2N1N3N1M4L4I8hNZ1ZOh0D;F7J4M4L3N3M2M4M3M4K4M3ZORXOUIQh0i6oWOWISh0g6lWOZIVh0d6jWO\\\\IXh0U72N1O2N3M2N2000000101O00O0O100N2O1O2M6K8G5L2N1N2OO01N2O1O1O1O1PN[VOcMfi0Z2_VOcMoi0m1VVOoMQj0i1UVORNoi0i1XVOPNmi0j1^VOkMfi0P2jVOaM[i0e1m2PO\\\\RO[Omm0?m0K5K[W]4\"}}, {\"image_id\": 119, \"category_id\": 1, \"bbox\": [221.0, 543.0, 143.0, 436.0], \"score\": 0.999858021736145, \"association_id\": 1, \"light\": [-1.604217767715454, -1.2465581893920898, 1.5239500999450684, 1.1088776588439941], \"segmentation\": {\"size\": [1024, 768], \"counts\": \"lkm68go02N1M3N3N1N1L5N010VJa0SWOZOn26je0j0mVO_On2IRf0X2gYOiMUf0`2fYO`M]MVO`h0f3nYO_Mee0n2UZOXM^e0W3ZZOlL^e0]3_ZOdL[e0c3cZO^LXe0h3eZOZLWe0k3gZOVLSe0R4jZOnKRe0Z4jZOfKSe0_4jZObKUe0`4jZOaKTe0a4jZO`KUe0b4iZO_K4[N_c0W6]\\\\O_KLcNdc0P6^\\\\O^KGkNhc0h5`\\\\O_KElNic0g5a\\\\O]KBPOkc0f5a\\\\O[K_OUOmc0d5b\\\\OWKXO^OSd0_5c\\\\OTKROEYd0[5c\\\\OhL[c0[3d\\\\OdL[c0_3c\\\\OaL\\\\c0a3c\\\\O_L]c0c3a\\\\O]L^c0e3b\\\\OZL^c0g3c\\\\OWL]c0l3a\\\\OSL^c0Q4S\\\\O[Lmc0l700O1O1O1N2L4M3N1N3K5_Oa0L4N20O0000001O0O1L4L3L5M202M2M4K5M2N3N3M2M4L3M3O1XNo[O`GRd0Y8U\\\\OgGkc0Q8]\\\\OnGdc0n7a\\\\OPH`c0n7b\\\\ORH`c0j7b\\\\OUHac0h7a\\\\OUHcc0h7^\\\\OVHec0h7]\\\\OiGiN@Ue0d8T\\\\OjGiN@Ue0d8S\\\\OlGSd0P8Q\\\\OoGjN@md0[8^\\\\OTHnc0h7U\\\\OWHoc0c7T\\\\O]Hoc0]7S\\\\OcHRd0V7Q\\\\OiH\\\\d0g6h[OXIbd0X6e[OgI]d0R6i[OmIYd0m5l[ORJVd0i5n[OVJTd0d5Q\\\\O[JQd0^5T\\\\ObJoc0T5Z\\\\OiJjc0n4i3K4L4K4L5L4L3O3M3N4K4L3N2N3N2bMUTOa0Qn0E4K3N7H:F5L2M10N3N2M4M4K3O0O0100O010O1O1N3L3NcXc<\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [187.0, 9.0, 88.0, 181.0], \"score\": 0.9999994039535522, \"association_id\": 2, \"light\": [-2.1347641944885254, -1.7876262664794922, 2.0574452877044678, 1.6994097232818604], \"segmentation\": {\"size\": [599, 506], \"counts\": \"Va]32db02N2N10000[^OJi`06V_OMh`03V_O0i`00V_O1j`0OV_O2i`0NV_O3k`0LU_O5j`0KU_O6k`0KS_O6m`0JR_O7o`0Io^O7Ra0Il^O9Ta0Hj^O9Va0c0100O100O10000000QMROTDo0k;SOTDn0j;TOTDm0j;UOVDk0j;VOTDk0l;TOeCMQNQ1Y>SOdCNRNP1Z>QOcC2QNn0]>nNaC6oMo0`>kN`Cd1`<\\\\N_Cf1a<XN`Ci1`<VN`Cj1a<UN_Cl1`<UN^Cl1a<VN^Cj1b<WN]Cj1b<WN[C?YNa0]>POVCb0]N>\\\\>SOSCa0bN;[>UOmBd0jN6Y>WOkBc0nN5W>ZOfBc0UO2^=nNmB>=b0YO2Z=VOkB8JT14\\\\OX=\\\\OfB7GT1=bNC7b=0cBP2:hMC6`=j2QCoLA5_=j2QCQMB3^=i2SCTM_O2^=i2UCTM^O2^=h2UCVM^O2]=g2VCWM]O1^=g2VCXM\\\\O0_=h2UCXM\\\\OOa=h2SCXM_OM_=l2QCWMb=i2]BWMe=i2ZBWMh=h2VBYMl=f2SBZMY>\\\\2eAdM]>[2aAfMa>Y2^AfMd>d1o@bN;Kh>`1PAdN7Kk>_1PAgN3Jo>\\\\1o@lNOIT?Y1m@SOLAZ?Z1k@XOc?f0^@\\\\O`?c0a@]Ob?>`@Ao?0R@OQ`0NP@1S`0Kn_O4U`0Im_O5V`0Hm_O6V`0Em_O8[a0N5J]PW4\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [271.0, 57.0, 87.0, 215.0], \"score\": 0.9999999403953552, \"association_id\": 4, \"light\": [-2.3803296089172363, -2.4910342693328857, 2.2698817253112793, 2.3196003437042236], \"segmentation\": {\"size\": [599, 506], \"counts\": \"Shn45_b06K3M4[^OBk`0b0Q_OBb`0k0[_OWOc`0k0Z_OWOd`0`1O1N100O1O1O1000000O10O012M2NZO\\\\N]@a1a?gN\\\\@V1c?kN_@S1`?oN`@Q1`?mNb@R1_?mNb@R1^?nNc@R1\\\\?nNe@R1Y?nNi@Q1W?oNj@Q1V?nNk@Q1U?POj@P1Q=YOdCG[OP1l<BeC^O^OP1l<EdC[O@Q1h<IgCVO@R1h<JfCSOBT1h<LcCPOEU1h<LaCoNGU1g<O`ClNHV1g<1`ChNIb0@@W=Y1^CdNK`0EAS=`1WC_N1`0G@Q=d1SC\\\\N651Jg<h1lB\\\\N;13Jh<n1bBZNb0N5Ig<f2RCaM8Hg<i2nB_M<Gf<m2lBZM`0Ib<R3kBSMd0K`<W3iBmLg0M^<X3kBhLi01n;f3XCWLk03l;i3XCQLn06j;j3XCmKP19h;l3]DSLd;l3\\\\DULc;j3_DULb;j3^DVLc;j3_DRLc;^3SChLa>R3iAdL]>U3f0M3M3I8K5oMR@T1R`0jNR@R1P`0kNT@S1m?jNW@S1m?gNY@W1g`0L4L6J3N3M2N1O3M1N3M2N3L7IQ^f2\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [260.0, 332.0, 100.0, 233.0], \"score\": 0.9999666213989258, \"association_id\": 6, \"light\": [-1.7193775177001953, -2.626013994216919, 1.6952707767486572, 2.5421602725982666], \"segmentation\": {\"size\": [599, 506], \"counts\": \"cch4d0Rb04L4K3N2N2_OQOS_OU1i`0>00O1O1O1N2K5^NUNUBn1j=TNPBQ2P>lMQBV2o=iMQBX2o=hMoAZ2R>cMlAa2Q?4M3N2O10000O100O1KTM^@m2m>TM`A3Bh2n>kMRAS2o>j00OO02O0oMPA>P?]OWA?j>XOo@SO:b1R>cNjAi0LQO:a1o=3hA\\\\N:]1o=MhAdN11:[1j=2ZBaNN[1b=9cBZNM[1`=:\\\\CCe;QO_C]1m0B_;_1dD_N\\\\;b1fD\\\\NZ;d1gD[NX;f1iDYNW;g1jDWNW;i1iDXNW;h1iDWNV;j1kDUNU;l1kDSNU;n1kDQNU;P2jDPNV;Q2iDnMY;S2eDnMZ;T2eDkM[;W2cD^Mg;e2WDZMk;f2SDZMm;i2QDVMo;m2oCSMQ<P3lCPMU<Q3iCPMV<S3hClLY<X3aCiL`<Z3\\\\CfLe<\\\\3XCdLi<_3SCaLn<b3nB^LS=d3jB\\\\LW=e3gB[LZ=h3bBWL`=k3^BTLc=m3[BSLf=n3YBQLh=^42N1^NZBRNg=i1fBnM[=n1mBlMU=R2oBdMTO@Q>i2mBgMX=i0YB@c0FW=EiAb0b2Ig;AlA?b2O[?01O1O1O0000001O000O2N^nd2\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [333.0, 302.0, 122.0, 252.0], \"score\": 0.9999995827674866, \"association_id\": 5, \"light\": [-2.01057505607605, -2.8892548084259033, 1.8902469873428345, 2.786228895187378], \"segmentation\": {\"size\": [599, 506], \"counts\": \"]XS64bb02O0O100O1O10O01e^O1n?NP@9l?GS@<k?CT@f0e?ZOW@n0f?QOX@T1e?lNZ@V1e?jNY@X1g?hNU@]1j?cNQ@b1o?^NP@d1o?]No_Oc1S`0]Nd_ON0g1\\\\`0=10O100O1O10OM3O1O2L4O1O00100O1TO[MfAe2T>eMgA\\\\2X>dMiA\\\\2U>fMkAZ2T>gMlAY2o<ZM]C<FZ2h<aM`C3J\\\\2c<eMaCOM\\\\2a<hM`CK0]2]<lMbCF2^2[<nMbCD3^2Y<QNbCB5]2V<UNcC^O8]2Q<[NcCZO<[2o;_NaCXO`0Y2m;dN^CUOe0W2h;CXD<f;GZDJUOUN^<T2\\\\DEYOVNU<[2bD]O]OVNm;a2fDXO_OVNh;f2hDSOAWNb;m2kDkNDXN^;S3lDbNH[NZ;W3[DdL8i14[NW;h3aDlM9\\\\NS;m3eDdM8`NQ;n3kD\\\\M5fNo:P4mDXM4hNl:S4QETM3iNj:^3iDjL<m01jNi:_3QEgL6m00mNf:`3WEfL2n00kNf:b3ZEeL0m01kNc:e3]EdLNl01kNc:e3`EdLLk02jNb:h3aEcLKk01jNc:g3cEbLJm01hNc:i3bE]LOR1MgNb:j3cEYL1V1Q;a2mDYL4V1n:a2nDYL4V1n:b2jD[L9S1l:a2QF_Mn9]2WFbMi9\\\\2YFeMf9[2ZFeMf9Z2cDcLW1S1V:Y2cDiLT1m0Y:Z2bDjLU1l0Y:[2`DfL\\\\1n0T:^2\\\\DcLd1n0P:j2SFTMm9l2SFTMm9]OaDe2c1lMn9ZOeDh2]1nMo9VOhDk2Z1mMk:h1[C_Mj1h0o:VO\\\\CS2KnMk1i0m:POfCV2EoMh1k0l:ROlCl1CVNd1l0m:SOUD`1]O_Nc1m0j:UOWD\\\\1^ObNa1l0i:XOZDW1]OgN^1h0k:]O\\\\Do0]OVOR1=U;_O]Dl0]OXOR1;U;A\\\\Dj0^O\\\\OQ15X;EZDh0^O^OQ1O\\\\;JWDg0]OAJBf08o;LVDf0]OFDCo0M^=d0RB2ZO@P`08k@d0U?\\\\Ol@`0W?@j@;[?Df@9\\\\?Id@4]?Oa@0`?0`@Na?1a@M`?3b@Hb?6b@Db?9b@Ac?<_1L4Ig^m0\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [203.0, 440.0, 78.0, 89.0], \"score\": 0.9999993443489075, \"association_id\": 1, \"light\": [-1.382045030593872, -2.5899693965911865, 1.1870228052139282, 2.5425260066986084], \"segmentation\": {\"size\": [599, 506], \"counts\": \"aWg35Yb0=K2N4L3N0O2N1O2WOXOa_Oi0]`0ZOa_Og0\\\\`0B\\\\_Oa0a`0C[_O?c`0C\\\\_O>b`0m0N101NjNb_O5]`0Mc_O3\\\\`0Nd_O1[`0V1O2OkNg_OLX`0Z101N1O2O000O1O100O10000O1O10O1mNP@_OP`0a0P@^OQ`0b0o_O^OP`0b0R@\\\\Oo?c0R@]On?c0S@\\\\Ol?d0U@[Ol?e0U@ZOk?e0V@[Oj?d0W@[Oj?d0X@[Oh?e0Y@ZOh?d0Y@[Oh?e0Y@YOi?e0Y@ZOg?e0Z@ZOg?e0[@YOf?g0[@XOe?g0]@WOd?i0\\\\@VOf?h0\\\\@WOd?i0\\\\@WOe?g0^@WOb?h0`@WOa?g0a@WO`?h0b@VO`?i0c@RO_?m0V1O2N1N2N2O2M2O2N2N2N2M4LcUS4\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [312.0, 154.0, 77.0, 182.0], \"score\": 0.9999926090240479, \"association_id\": 3, \"light\": [-2.4010109901428223, -2.041396379470825, 2.2370071411132812, 1.9283287525177002], \"segmentation\": {\"size\": [599, 506], \"counts\": \"hif5145Vb0`0F3M2O2N1N2O1N102M1O2O0000001O0000O100O01O0100NMoNd^OP1k>TOZCT1f<nNVCT1j<nNRCU1j<POSCR1i<TOTCm0h<WOUCl0g<XOVCj0h<YOUCk0i<VOUCm0i<VOQCn0n<TOmBQ1P=TOiBP1U=UOcBJ^OJm=e0VBFOeN1g0h=b2ZBcL4h0`=f2lBXMQ=i2RCUMm<j2WCTMg<m2aClL]<U3gC_L`<b3Z1O1000N2O00100010M3O1WOSAeMP?Z2g000O10000O1O1N2O1N2M3O1N3M3M4^N]_Oi0^a0J5G;[OeXT2\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [450.0, 1.0, 48.0, 23.0], \"score\": 0.8962678909301758, \"association_id\": 7, \"light\": [-2.429330825805664, -1.13275146484375, 2.4054479598999023, 0.9546248912811279], \"segmentation\": {\"size\": [599, 506], \"counts\": \"gWW88^b02O002N1O1O1OLi]OIVb08j]OGUb0:k]OFTb0?OMm]ODSb0;n]OEQb0<n]OEQb0<o]ODQb0<o]ODPb0=P^OCPb0=P^OCPb0=P^OCPb0a010O0101N1O1O010N2O11OM3001O001O00000000Mj]OEO0Ub0:m]OHSb08n]OHQb08o]OHQb07P^OIPb06R^OJna05S^OJma05<Mge4\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [85.0, 518.0, 60.0, 92.0], \"score\": 0.9999998211860657, \"association_id\": 2, \"light\": [-1.6010644435882568, -2.248863458633423, 1.472118616104126, 2.032942533493042], \"segmentation\": {\"size\": [768, 782], \"counts\": \"gZP2:Zg0<H9K4K5K5L4K5J6L4J6J6J6N2N2N2N200O100O10O1O1M4FgZOlMXe0g1S[O[Nmd0`1V[ObNjd0Y1Z[OgNgd0U1][OjNdd0S1_[OmNad0Q1`[OoNad0n0a[OSO_d0i0e[OVO]d0`0k[OAWd08m[OHWd00n[O0Pf001O010O000010O010O0100O001O100O1O100O1O100O2N2NjUm>\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [45.0, 341.0, 86.0, 158.0], \"score\": 0.9999969601631165, \"association_id\": 1, \"light\": [-2.0344443321228027, -2.28987455368042, 1.8854783773422241, 2.036294460296631], \"segmentation\": {\"size\": [768, 782], \"counts\": \"cTR1m0mf0;D;I4K5L5J6M2M2N3L6J4M3L3L6I7J7K7H;G3N6J7I4L4L1O00001O0000000001O1O2M4M<D2N3L3M2L40001N1jMU\\\\O4lc0IZ\\\\OM@gNXd0W1_\\\\O4cc0Ja\\\\OLUOnN^d0Q1b\\\\OYOVOLLK^d0k0^]OROTN5ad0f0`]O\\\\Ocb0`0]]OAeb0:^]OEhb01\\\\]OO\\\\e0OO1000O100000000000O010000000O100000001O00001O0000O101N1O2N10QiW?\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [260.0, 329.0, 107.0, 113.0], \"score\": 0.9784096479415894, \"association_id\": 3, \"light\": [-1.4921032190322876, -2.145676851272583, 1.451856017112732, 1.9806708097457886], \"segmentation\": {\"size\": [768, 782], \"counts\": \"Y]S62gg09M1O1UOJfYO<JAee0k0UZOG5_Ohd07S[Ok15nMed0c2[[O]Mcd0e2][O[Mbd0f2][O[Mad0g2_[OYM`d0h2`[OXM`d0h2`[OXM_d0i2`[OXM`d0h2`[OXM_d0i2`[OXM^d0j2b[OVM]d0k2b[OVM^d0j2a[OWM^d0j2a[OWM_d0U31iNe[OgN\\\\d0R1S\\\\OeNmc0[1]\\\\O\\\\Ncc0c1]\\\\O\\\\Ndc0c1]\\\\O]Ndc0a1]\\\\O_Ncc0a1]\\\\O_Nec0\\\\1^\\\\OdNcc0m0k\\\\OSOXc0e0m\\\\O[Oic0MY\\\\O3je0000000000000001O0000000000000000000000000000O101GW`73n_H5K1O001O000000000000O1000000000000000000000000O10000000000000001O0H^XO2cg0L^XO4hg0O001O1N[jf9\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [396.0, 291.0, 185.0, 366.0], \"score\": 0.9999683499336243, \"association_id\": 4, \"light\": [-2.3273000717163086, -1.928513526916504, 2.2268805503845215, 1.7737674713134766], \"segmentation\": {\"size\": [768, 782], \"counts\": \"VcY99eg03O1O00001O0001O00101N1OF:N2O2N1N2O0O3M2N2N3N1O1N2N2M4M2N2N2N2M3K5M2O2N2M3K5F9L5M3M2oLQMQAP3o>aM_@_2c?mMn_OS2T`0oMj_OP2W`0SNe_On1[`0TNc_Ol1]`0VNa_Oj1^`0XNa_Oh1_`0ZN__Oe1b`0]NZ_Oc1h`0_NU_O`1m`0aNQ_O^1Ra0cNl^O]1Sa0fNk^OZ1Sa0iNl^O`1j`0cNT_O_1i`0cNW_On0QOdMea0a1X_Oj0TOfMba0c1X_Of0WOhM_a0d1Y_Oc0YOjMZa0g1[_Oa0YOiMS>AlDZ2fMS1]=fLgD\\\\2iMP1_=gLbD]2mMm0a=iLXDb2UNg0c=iLhCo2bN9f=kLbCR3eN4i=mL\\\\CW3dNMQ>nLUCU7k<mHoBW7P=lHkBW7U=kHfBW7[=jHbBX7^=jH^BX7b=mHWBU7i=SImAP7R>_10000O10001OZNRB[Im=b6UB`Ij=^6XBcIg=[6[BfId=X6^BhIb=W6_BjI`=U6aBkI_=S6cBoI[=o5gBRJX=l5jBUJU=j5lBXJR=g5oBYJR=f5nB[JQ=f5nBZJR=f5nB[JQ=e5PC[Jo<e5QC\\\\Jn<d5RC]Jm<d5RC\\\\Jn<f5PC[Jo<h5oBWJQ=k5nBUJQ=m5nBRJR=o5nBPIDN^=S7QCiHD3Z=V7QCeHH4W=Y7oBbHK4V=^7lB]HO5U=`7jBZH16V=a7kBUH0:U=b7PCmGKb0V=a7RChGIh0W=_7oBhGIj0JSO\\\\=Y8nBmGHi0IXOf=l7gBSHJR1e=i6kBWIW=i6gBXIZ=g6fBYI\\\\=f6bB\\\\I^=d6aB\\\\Ia=c6]B^Ie=b6XB^Ij=e6RBZIo=i6oAVIQ>l6mASIU>R7gAUH21X>Z8PBaGQ>\\\\8RBbGP>^8oAbGR>`8jA`GY>R1eAh51UI[>k0mAn5IVIZ>c0YBU6]OVI\\\\>>`BY6TOYI]>:gBY6lN[I_>8lBY6eN_I`>5PCV2^N\\\\12WLc>2SC9QN\\\\1>o1KYLe>1VDo0`Md2ab0\\\\Lo]Ob3\\\\c0K7IN2O2N100O100000000000YM_[Oo1ad0PN`[OP2ad0oM`[OP2`d0oMa[OP2ad0nMa[OQ2`d0lMb[OT2`d0hMa[OX2dd0aM_[O^2Qe00000N2O2N2M3L4ROn0M3N2N2O2N1O1O1M4M3M8F9IZVf4\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [619.0, 575.0, 205.0, 440.0], \"score\": 0.9999998807907104, \"association_id\": 2, \"light\": [-2.775064468383789, -1.181300401687622, 2.7195534706115723, 1.0464476346969604], \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"QRff02YU1;H4L4L4K5K4M4L2N3M3M2M4M3N1O2N2N1bnNCQOjNWm0e1SSO<SObNbm0T1RSOg0mNdNjm0i0PSO[2nl0iMgRO`2Um0P3L4L4M2N2M2N3L3L4M3M3O0O2O010O100O1OlN^SO\\\\Ibl0c6dSOYI\\\\l0f6iSOUIXl0m6jSOkHZl0Y7`SOgHcl0P82O101N14M4L2M3N0O2O0O2N1N2O2nUO\\\\GTg0f8jXOaGPg0a8mXOcGQg0_8kXOeGRg0]8iXOhGVg0Z8eXOkGXg0W8dXOmG[g0T8bXOoG]g0S8`XOoG^g0U8]XOnGbg0X8UXOlGjg0o:M2O1O01N2N2M4L3N2M4KbWOnCfg0l;^XOUDbg0X;RYOfDPg0g:cYOWE^f0f:dYO[E\\\\f0e:cYO\\\\E]f0d:cYO[E]f0f:dYOXE]f0h:bYOXE^f0j:`YOVEaf0k:]YOUEcf0P;m15aUOjEXi0X;0000O1N1N3N1O1O1O2O01O0ZKZXObKfg0W4mXO]KSg0^4WYO]Kif0^4_YO_Kbf0[4hYO`KXf0[4YZOYKhe0c4iZOoJWe0n4R[OkJPe0R5U[OkJld0S5W[OkJjd0S5Z[OiJhd0T5^[OhJcd0V5k[O\\\\JWd0a5S\\\\OVJoc0h5W\\\\ORJkc0h5_\\\\OQJdc0k5S6L3M4L4L4O[VOiJRc0V5m\\\\OlJSc0S5m\\\\OnJSc0R5k\\\\OPKUc0o4k\\\\OTKTc0j4l\\\\OYKmIBkh0Q5Z]O_KhIBoh0k4[]OfKbI@Vi0e4Z]ORLib0i3Y]OXLjb0c3W]O]Lmb0^3T]ObLnb0[3S]OfLnb0f2_VOiLd6a0Pc0c2eVObL\\\\6k0Rc0`2_]O_Mdb0]2]]OcMeb0Z2\\\\]OfMeb0R2b]OoM^b0j1h]OVNXb0e1m]O[NSb0b1P^O^NQb0i0UVOfNn7a0ma0h0]VO^Nh7j0la0d0k^O[OUa0a0o^O_ORa0<R_ODn`07W_OIj`01[_OOf`0M]_O3d`0K]_O5d`0I]_O7d`0G]_O9c`0F^_O:b`0E__O;b`0Aa_O?hj00001O0000000000000O101O00000000001O0O1000000000000000001O000000000O100DZkNNfT11\\\\kNNeT11\\\\kNNdT11]kNNdT11^kNNcT11^kNMcT13=Nc^T;\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [341.0, 662.0, 95.0, 127.0], \"score\": 0.9999558925628662, \"association_id\": 1, \"light\": [-1.799481749534607, -2.4855844974517822, 1.705861210823059, 2.342593193054199], \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"nY`<3YU1;F<Df0\\\\O4K5H=D7I:F6K3N3K4K5M3O11O0O2OnmNeLnQ1[3SnNeLlQ1[3TnNgLjQ1X3XnNhLgQ1W3[nNiLeQ1V3\\\\nNjLdQ1U3\\\\nNlLdQ1R3^nNnLbQ1R3]nNoLbQ1R3^nNnLbQ1R3^nNoL`Q1R3`nNnL`Q1R3`nNoL_Q1Q3`nNPM_Q1R3`nNnL`Q1R3`nNoL_Q1Q3anNoL^Q1S3anNmL^Q1T3anNmL_Q1S3anNmL_Q1S3anNmL_Q1S3anNmL^Q1T3bnNlL^Q1S3cnNnL\\\\Q1R3dnNnL\\\\Q1R3dnNnL\\\\Q1R3dnNnL]Q1R3bnNnL^Q1R3cnNmL]Q1T3bnNlL^Q1T3bnNlL_Q1T3`nNlL`Q1T3`nNlL`Q1T3`nNlL`Q1T3`nNlL`Q1T3`nNkLbQ1T3^nNlLbQ1S3^nNnLaQ1S3_nNmLaQ1R3`nNnL`Q1Q3anNoL`Q1n2bnNRM^Q1m2cnNSM^Q1j2dnNVM\\\\Q1h2fnNXMYQ1g2hnNZMXQ1e2inN[MVQ1e2knN[MUQ1e2knNZMWQ1e2inN[MWQ1e2inN[MWQ1e2inN[MWQ1f2hnNZMXQ1g2hnNXMWQ1l2fnNTM[Q1n2bnNRM^Q1Q3`nNnLbQ1S3[nNmLiQ1P3WnNoLjQ1P3VnNPMjQ1Q3UnNoLlQ1P3UnNoLmQ1P3RnNPMnQ1P3RnNoLkQ1KXnNT3MQMkQ1MVnNZ3QR1O1UORnNgML0RR1R2cnNoM^Q1NkmNg1i0\\\\N^Q1HomNg1e0aNnQ1P1XnNoNmQ1j0WnNUOmQ1e0UnNZOPR1a0RnN^OTR1;omNBYR16\\\\^[i0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [70.0, 614.0, 137.0, 338.0], \"score\": 0.9999976754188538, \"association_id\": 3, \"light\": [-1.399128794670105, -3.0252525806427, 1.3072267770767212, 2.8926117420196533], \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"Vib2:lR1f0fmN7kQ1T2E9H4L4K4M4L3N2N1N2N1N3O1O2N2N2O2M1O000O1N3O1O2N3M2O2M2N3L3M4I7K5J5L4L4N2L6I6L6J7J4M4L6Jc0]O5K3M3M3M3M3M3M5^TOXGRj0m8fUOWGYj0R9ZUORGfj0l9010O1O001O2L5J7I7J6K3J6B?I8G7J]SOkGBM_l0V8nSOSHRl0j7mSOZHRl0b7QTO_HPl0[7TTOgHlk0T7WTOmHik0n6\\\\TORIdk0g6cTOYI^k0_6iTOaIWk0[6mTOeITk0W6oTOhISk0T6PUOlIQk0P6RUOPJPk0k5SUOUJoj0g5SUOXJoj0f5RUOZJoj0d5RUO[JPk0c5QUO]Joj0a5SUO^Joj0_5SUO`Joj0^5RUObJPk0]5oTOcJSk0\\\\5lTOdJWk0Z5hTOeJZk0Z5fTOfJ\\\\k0Y5cTOgJ^k0Z5`TOfJak0Z5_TOeJbk0\\\\5\\\\TOdJek0\\\\5ZTOdJhk0[5WTOeJlk0Z5RTOeJRl0Z5lSOfJXl0X5fSOhJ]l0V5bSOjJal0T5^SOlJfl0P5ZSOPKkl0k4USOUK[m0\\\\4dROdKgm0Q4YROoKkm0m3UROSLPn0h3PROXLYn0_3gQOaLbn0V3^QOjLin0o2XQOPMRo01doNR2Z1mMQP1g1ooNYNXP1`1hoN`N]P1Z1doNfN`P1V1`oNiNdP1R1^oNnNlP1c0YoN]O]S10000000001O0O10000O101O1NTbgQ1\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [745.0, 623.0, 240.0, 287.0], \"score\": 0.8466956615447998, \"association_id\": 5, \"light\": [-1.6550297737121582, -2.2614855766296387, 1.523871660232544, 2.178253412246704], \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"^mYk03ZU13O1O2O000001O001O0000000000000000000000000000001O0000000000000000O10000000000O1000000000000O10000000000000000000000000000UI`0fWO@Th0h0kWOWOSh0m0kWOSOSh0Q1kWOPORh0\\\\1dWOdNZh0a1cWO_N[h0g1aWOYN^h0k1^WOVNah0n1\\\\WORNch0R2ZWOnMeh0U2YWOkMgh0X2VWOiMhh0[2UWOeMkh0\\\\2TWOdMkh0_2SWOaMlh0a2TWO^Mlh0e2QWO[Mnh0j2nVOVMQi0m2mVOSMSi0o2kVOQMTi0n2nVORMQi0k2SWOUMlh0h2XWOXMfh0h2\\\\WOXMch0g2_WOYM_h0h2bWOXM[h0i2hWOVMVh0i2mWOWMPh0i2SXOWMkg0k2UXOUMkg0m2SXOSMlg0R3PXOmLPh0[3jWOdLVh0_3hWO`LWh0c3gWO[L[h0f3g42N3LM4^O]nNTMfQ1i2bnNoLaQ1n2b0D<L4M3N2F:000000O1000000O1M31XROUNig0k1SXOYNmg0g1RXO[Nmg0e1SXO[Nmg0e1RXO]Nmg0c1RXO^Nng0b1QXO_Nog0b1PXO^NPh0b1oWO`N\\\\K@hk0P2lXO`N[KBhk0n1lXOaN[KBjk0l1jXOcN\\\\KAjk0l1hXOeN^K_Ojk0m1eXOfN`K^Okk0l1dXOgN_K_Omk0k1aXOgN`KAok0i1_XOgN`KBQl0h1^XOfNaKBRl0e1_XOiN^KCTl0b1_XOkN]KCUl0`1_XOmN\\\\KCXl0\\\\1]XOQOZKC^l0W1YXOVOYKCbl0S1UXOmNkJM?3cl0P1UXOoNjJM>4el0n0SXOPOkJN<5gl0l0VXOgNoJ647hl0l0eXOhNdJ<hl0m0SSOgNd5;[g0o0lROmNe54`g0a1\\\\XO`Neg0a1YXO_Ngg0c1WXO]Njg0d1TXO\\\\Nlg0e1SXO[Nng0e1QXO[NPh0g1mWOYNTh0g1kWOYNVh0h1hWOXNYh0i1eWOWN]h0i1aWOWN`h0k1]WOUNdh0o1dTOhM39Zk0T2\\\\TOgM75^k0W2_RO_M72?5\\\\13`k0R2VSOfML6^12`k0n1[SOmMmN0:4n11ak0l1\\\\SO]NPOGS20bk0j1\\\\SObNkNEW2Odk0g1[SOWOQ1ROek0e1\\\\SOXOn0TOgk0b1\\\\SOZOm0TOgk0b1\\\\SOZOm0TOhk0`1\\\\SO\\\\Ol0TOik0_1[SO\\\\Om0UOik0]1ZSO_Om0TOjk0[1ZSO@l0VOlk0V1ZSOCk0WOPl0n0XSOKh0VOVl0e0WSO4d0WOWl0b0VSO7b0XOYl0?VSO8b0YOZl0<USOOYN\\\\OX2:[l09WSONj0IQl05YSOOg0Lal0BkRO`0e0Nin00XQO0in0NXQO2in0LWQO5in0JXQO6in0HXQO8in0@^QO`0iQ10000000000O100000000000000000000000000000000O1000000000000O10000O1O1O100O1O100O2O0O100O2O00000O2O1MVlW5\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [916.0, 679.0, 174.0, 184.0], \"score\": 0.9996600151062012, \"association_id\": 4, \"light\": [-1.8434827327728271, -2.6574783325195312, 1.782740831375122, 2.547135591506958], \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"TXbQ11_U11O1N1ZN>^mNB`R1e0[mN[OcR1g0\\\\mN[OaR1h0]mNYOaR1i0_mNXO_R1i0`mNXO_R1i0amNWO^R1k0`mNWO^R1j0bmNVO^R1j0amNXO^R1h0amN[O^R1d0bmN]O^R1b0amN_O`R1`0`mNAaR1=_mNCbR1<^mNEaR1:_mNH]R1;cmNFYR1=gmNCWR1>jmNCUR1=kmNCTR1=mmNDRR1<nmNDPR1>PnNCmQ1?SnNBjQ1`0VnN@dQ1f0\\\\nN[O`Q1i0_nNWO_Q1k0anNUO^Q1l0bnNUO\\\\Q1l0dnNTO[Q1m0enNSOYQ1o0gnNROVQ1o0knNQOSQ1Q1mnNoNQQ1S1onNmNPQ1T1PoNlNoP1U1QoNlN\\\\P1XOVoNn1=jNTP1EYoNc1c0hNSP1R2moNmMRP1T2noNlMRP1U2moNkMRP1V2noNjMQP1X2noNhMRP1X2noNiMQP1X2noNhMRP1Y2moNgMSP1Z2loNfMTP1[2koNeMUP1\\\\2joNdMVP1]2ioNbMWP1a2goN_MYP1b2foN^MZP1d2eoN[MZP1h2doNYM[P1i2coNWM\\\\P1k2doNTM\\\\P1n2boNRM^P1o2boNPM^P1P3boNPM^P1Q3aoNoL_P1R3aoNmL_P1T3aoNkL_P1U3boNjL^P1W3boNhL^P1Y3boNgL]P1Z3coNeL]P1\\\\3boNdL^P1]3boNbL]P1`3boN`L^P1b3aoN]L_P1d3`oN\\\\L`P1e3`oNZL_P1\\\\4O1O1FPKRPOR5lo0;N100O1O100O00100OO21O0O2O001O000010O001O10O01O001N101O0O2N1O2N2M4O1N3M1O1O1UO`oNdLaP1Y3`oNhLaP1U3aoNkL`P1P3coNQM^P1i2goNWM[P1e2foN\\\\MZP1b2hoN^MYP1`2hoN`MYP1_2goNaMYP1^2hoNaMZP1]2goNcM[P1Z2foNfM[P1Y2eoNgM\\\\P1W2eoNiM\\\\P1V2doNjM]P1U2coNkM^P1T2boNlM_P1S2aoNmM`P1R2`oNnMbP1P2^oNPNcP1n1^oNRNbP1n1_oNQNbP1n1_oNQNbP1n1^oNRNcP1m1`oNPNbP1n1^oN^MWOa0]Q1n1[oNgMXO7^Q1Q2YoNYNhP1f1VoNTN[OC`Q1X2VoNVNQQ1i1nnNXNSQ1f1nnN[NSQ1c1lnN_NTQ1_1mnNbNSQ1]1lnNfNRQ1Y1onNhNQQ1W1onNiNQQ1V1QoNjNoP1U1QoNkNPQ1S1RoNmNnP1R1RoNoNnP1P1SoNPOmP1o0SoNSOlP1k0VoNTOkP1k0UoNUOlP1i0VoNWOkP1g0VoNXOkP1f0XoNXOjP1f0WoNYOjP1e0YoNYOgP1f0\\\\oNWOfP1g0^oNQOiP1j0_2PORo\\\\1\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [424.0, 645.0, 35.0, 109.0], \"score\": 0.9069292545318604, \"association_id\": 6, \"light\": [-2.5713467597961426, -1.9673274755477905, 2.533358097076416, 1.8424452543258667], \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"daa?c0gT1f0[O=E5IXNblNk1kR1e0L4`mN\\\\MRR1g2mmNZMPR1g2PnN[MnQ1f2RnNYMoQ1f2RnNZMoQ1e2PnN\\\\MoQ1d2SnN[MmQ1c2UnN]MlQ1a1XnNhNOGhQ1X1`oNhNaP1V1`oNjN_P1V1boNjN]P1V1doNkN\\\\P1S1eoNmNZP1T1foNlNYP1V1foNjNZP1V1foNjNYP1X1foNhNZP1X1foNhN[P1a0mmNLj1CYP1a0nmNIk1FVQ1NPmNOj13]S10000O101O0O1Od]`h0\"}}, {\"image_id\": 123, \"category_id\": 1, \"bbox\": [539.0, 227.0, 207.0, 337.0], \"score\": 0.999992311000824, \"association_id\": 1, \"light\": [-2.773355007171631, -1.6094236373901367, 2.6731553077697754, 1.4593079090118408], \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"^ogc02WU1:G:J5J7J4K5J8I7eQO[N`h0j1]WOYN]h0n1`WORN[h0V2bWOjMZh0^2RWO]MkK6nl0c2iVOeMWLIll0g2fVOfM]LCjl0l2aVOhMdL\\\\Ohl0P3UVORNTMnNbl0W3kUOXNbMaN`l0]3gUOVNiM]N]l0b3eUOTNnMZN[l0e3aUOVNTNUNXl0i3`UOUNXNRNUl0m3`UOSN[NPNSl0Q4_UOPN_NnMok0W4^UOmMcNlMmk0[4[UOlMhNiMhk0c4YUOhMPOdM^k0P5\\\\UO_MVOaMVk0Z5_UOXM\\\\O]Mmj0e5aUORMBYMgj0n5^UOoLLRMcj0g9]UOYFcj0g9]UOYFdj0f9]UOYFdj0f9\\\\UOZFdj0f9]UOYFdj0f9]UOZFbj0U8fUO`HH[Obj0P8nUObH@^Ocj0k7SVOeHZO@cj0i7WVOeHWOAcj0g7ZVOfHSOCcj0h7YVOeHTOCcj0h7YVOeHTOCcj0h7ZVOdHSODcj0i7XVOdHUOCcj0i7XVOdHUOCcj0i7XVOdHSOEej0i7UVOcHVODej0l7PVObHZOCfj0_3aUO5;jL^OBfj0Z3PVOjMVOe0a0gNBAfj0T3WXOZN]MSOF_Ofj0P3dXOZNhLZON\\\\Ofj0n2jXOB`L`Mfj0l2mXOC]LaMfj0j2QYOCYLcMej0h2WYOCSLfMfj0e2]YOAmKjMfj0d2`YO@jKlMfj0b2dYOAeKmMgj0b2eYO@dKnMgj0b2fYO_OcKoMfj0b2iYO^OaKPNfj0b2kYO\\\\O_KRNgj0`2nYO[O[KTNij0^2QZO[OVKWNij0]2TZOZOSKYNij0Y2ZZO[OnJ\\\\Nhj0V2_ZO\\\\OiJ^Nij0R2eZO\\\\ObJbNjj0_1UUO]Ng5^1ZJfNkj0S1h[O3]IjNlj0l0Q\\\\O6TImNnj0f0U\\\\O:mHPOUk09W\\\\Od0dHSO`S1j0`lNVOdS1e0^lNZOeS1c0[lN]OhS1?YlNAjS1:YlNDjT100000_nj0OaQUO100N22M10C482N10000O100000O10O1000000000O10000000000000000000000000000O1000O1000000000000O1000000000O10O100000000O1000000000O01000000O10O100000000O10O1000O1000O10O10000O1000O01000000O1000000O2O0O4LZXXo0\"}}, {\"image_id\": 123, \"category_id\": 1, \"bbox\": [773.0, 0.0, 453.0, 1019.0], \"score\": 0.9999985694885254, \"association_id\": 2, \"light\": [-1.0755094289779663, -2.7574503421783447, 0.8905140161514282, 2.5602004528045654], \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"ciZl02\\\\U16J6I7I6J7G8H9F9F:J5L4K5M2N101O0O2O001O001N101O001N101N2O0O2VGSMo^Om2W`0WN__Oj1W`0gN`_OZ1\\\\`0PO^_OQ1^`0XO\\\\_Oh0a`0@Z_O`0a`0I[_O7``04Z_OM_`0>\\\\_OB\\\\`0l0^_OTO]`0Y1[_OgNa`0d1X_O\\\\Ne`0l1V_OTNg`0S2U_OmMi`0X2T_OiMi`0\\\\2T_OdMj`0a2S_O_Mj`0g2S_OYMj`0o2R_OPMi`0Z3R_OfLi`0f3P_OZLf6[OgKe4_MoKe61YKW4nMhKe6b0mJl3[NbKe6j0iJh3aN^Kc6o0gJg3dNZKb6U1eJe3gNVKb6Z1bJd3kNQK_6c1`Ja3nNlJ_6k1[J_3SOfJ]6U2WJ]3XO^J]6`2oI\\\\3_OTJ^6j2gI]3FiI`6Q3bI]3KbIa6V3^I^3O[Ia6\\\\3ZI^33VIa6`3XI^35RIb6c3UI_37nHc6f3RI_3<iH`6m3nH^3b0dH^6S4jH^3h0]H\\\\6[4cH_3Q1THZ6d4UHe3a1eGW6Wb0jIf]OS6`b0mI]]OP6ib0oIU]Om5Sc0QJk\\\\Ol5\\\\c0RJb\\\\Ol5bc0RJ^\\\\Ol5fc0QJ[\\\\On5hc0oIY\\\\Oo5kc0nIV\\\\OQ6mc0lIT\\\\OS6oc0iIS\\\\OV6Pd0gIQ\\\\OW6Sd0eIo[OZ6Td0bIn[O]6Ud0_Im[O`6Vd0]Ik[Ob6Yd0YIi[Oe6\\\\d0WIe[Og6bd0RI`[Ok6ld0iHW[OT7Ue0^HP[O_7\\\\e0PHlZOk7_i0M3M3N2N2N2N2N2M3N2MVEbHhG[7Y8iHeGS7\\\\8SIaGi6a8ZI^Gc6c8aI[G]6f8fIXGY6j8eIWGZ6m8aIUG^6n8^ITGa6P9ZIRGe6R9TIRGk6jc000O1O100O1O1N2O1N2N2N2N2M3M3M3N2N2N2O1N2O1O1O1O1O1O100O1000000000000O1000000000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000jB`J\\\\J`5\\\\5nJ^JR5X5^KbJb4Q5SLgJm3g4PMnJP3]4oMYKQ2_4^NZKb1`4jNZKV1b4SOYKm0c4[OYKe0d4AYK?d4HXK8d40WK1d4:VKF`4k0YKUOZ4b1\\\\K^NV4^2WF[IZOW4V:o3\\\\DTIP1m2]:f8TFZGf9d8bF\\\\GZ9f8hFZGT9i8mFWGo8l8RGTGh8R9XGnF_8Z9bGfFT8c9mG]Fe7o9]HQFU7\\\\:lHdEh6e:[I[EZ6n:hIRES6Q;oIoDl5T;VJlDf5X;ZJhDb5[;_JeD]5^;dJbDV5c;kJ]Dm4k;RKVD`4W<aKiCo3e<SL[CW3Z=jLfBc2k=_MUB[2P>fMPBU2U>jMlAQ2X>PNhAl1[>UNeAe1a>[N`A^1e>bN\\\\AV1k>kNUAo0P?bMZ[OmMf5\\\\4T?hMY[OhMd5[4W?mMY[OeM`5Z4Z?RNX[ObM^5Y4\\\\?VNX[O^M]5Y4]?ZNX[O[M[5X4`?]NV[OZM[5U4a?bNV[OUM[5U4c?fNT[ORMZ5V4c?hNV[OnLY5W4d?kNV[OiLY5Y4b?oNX[OcLX5\\\\4a?ROZ[O]LW5`4_?TOfAj0[>WOfAg0Z>ZOfAe0Z>\\\\OfAb0\\\\>^OeA`0\\\\>@eA>\\\\>BeA;\\\\>FeA7]>JcA3_>McAN`>2aAJb>6_AEd>=]A\\\\Oh>d0ZAWOi>i0YAROj>n0ZAlNg>V1]AcNe>]1_A]Nb>e1bATN_>m1fAlM[>V2iAbMY>_2mAYMU>h2UBkLl=V3`B[Lc=f3fBmK\\\\=T4P:1OO100O1O1O1O1N2O1O1O1O010N2O1N2K4M4O1O1O00100O100O1O1O001O1O100O010000000000O10000000000O100000000000000O100000O10000000O100000O10000000O01000000O10O10O100O100O10000O10000000000O100000000000000O101N1O100N3O0O1O100O101O0O100O1O1O1O2N100O101N101O001O1N101O001O0O100O1O1O1O1O2N100O100O2O0O2N1O2N2N2N2N1O2O0O2O1N2N1O3L3N2O1N2N2O1N2N2N;@V^e=\"}}, {\"image_id\": 124, \"category_id\": 1, \"bbox\": [390.0, 73.0, 229.0, 371.0], \"score\": 0.9999998807907104, \"association_id\": 2, \"light\": [-1.8832952976226807, -2.0687482357025146, 1.7391412258148193, 1.937930703163147], \"segmentation\": {\"size\": [533, 651], \"counts\": \"d\\\\[68\\\\`02N2N2M2O2N2O0N3N1O2O0O2O1N1O2O001N100O2O001O1O1N10@g@EX?;j@CV?<m@CQ?>o@CP?=QABo>>QACn>=SADk><UAEj>;WADh>=XADf>>YACf>=ZADe><\\\\ACc>>]ABc>?\\\\ABc>>]ACb>>^ABa>>_AC_>>bAA^>`0bA@]>b0cA\\\\O]>e0cAZO]>f0dAYO\\\\>g0dAYO\\\\>h0gATOX>m0kAPOU>P1lAoNT>R1lAlNU>T1f01O00001O001O00001UJgNXLY1a3VOWLk0^3E\\\\L;[31bL0[34cLL[38cLH[3;dLFY3>fLBW3a0hL_OV3d0iL\\\\OU3g0jLZOT3h0lLXOR3j0mLVOR3l0mLTOS3m0lLTOR3o0lLQOT3P1kLPOT3T1iLlNW3W1fLjNX3b1]L^Nc3l1SLUNl3^2`KcM_4e2[KZMd4P3TKPMk4Y3lJ`KfMf0[7n3oJWKcMQ1\\\\7l3TKZLi4k3PKYLn4n3gJYLW5P4WJZLh5n3gI\\\\LX6h3aI\\\\L_6g3ZI]Le6h3jHeLV7]60001O0O100000000O10000O100O00100N2O1N2O1O1O10O01O01OO10000N1O1M4H8J6N2O2O2M3L3M4M3N2O1N3N2N2M2O001O000O1000000000000000cIQKT1o4jNXKQ1h4lN`KABhLn4c3iK]OEgLb4X3dLHPOlL\\\\4S3PMLiNmLX4U3SMJjNnLS4V3VMJkNmLo3X3XMGoNlLk3\\\\3ZMCPOmLg3^3^M@]4>jK[OX4c0mKYOT4e0PLXOQ4h0RLSOP4n0SLlNo3U1ULcNn3FfHIa39k3LkHG\\\\38k3OoHEY38i32QIEV37j33RIGU31k35UIIV3Hh3<VIMb3iNd3X1mH0g;N\\\\D4b;J`D7^;IcD8\\\\;FhD9W;FkD:T;DoD<Q;BQE?n:^OTEc0l:ZOVEf0l:UOWEj0m:lN[ES1V=M2O1N1O010O001O1O1O1O3M3M1O2N100O1O2O2M3N1N100O2O013L3L10O0001O00002MS^`0\"}}, {\"image_id\": 124, \"category_id\": 1, \"bbox\": [100.0, 105.0, 214.0, 377.0], \"score\": 0.9999992251396179, \"association_id\": 1, \"light\": [-1.8383949995040894, -2.2993838787078857, 1.7525489330291748, 2.0986063480377197], \"segmentation\": {\"size\": [533, 651], \"counts\": \"V_d1P1b?4L5M2M2O1n@dNj>]1UAeNi>\\\\1XAdNf>]1^A`N`>a1`A_N_>b1aA_N^>a1bA_N^>a1bA`N]>`1cA`N]>`1dA^N\\\\>c1;00000000000O10000O100O1O0O2N2_JlN_KV1[4SOaKm0^4WO_Kj0`4XO^Ki0a4YO^Kf0c4[O[Ke0e4^OXKa0j4BRK=P5GjJ8X5LcJ4_5O\\\\J1f51VJ:`5I^J=[5EcJ>Y5FeJ;X5IfJ7Y5LeJ4Y51cJ0\\\\54aJL^56aJJ_58_JHa5:\\\\JGd5;YJFg5=UJCl5P1_IRO`6_1mHdNS7c1dH^N]7d1_H^Na7d1\\\\HSMVOG]8Y3ZHPMYOH\\\\8\\\\3VHmL^OH[8g3iGbLNGX8_5hGdJU8]5lGcJR8^5nGcJP8^5PHbJQ8]5PHcJo7^5oGcJR8]5nGcJQ8^5nGcJR8]5mGcJT8\\\\5mGdJR8]5mGdJS8o6O001O1N2M2O100000001O0O1N3N1O101O00001O1N1OWNbH_J\\\\7b5dH_J[7a5fH^JZ7b5gH^JY7b5fH^JZ7c5eH^J[7b5eH]J\\\\7c5cH]J^7b5cH^J\\\\7c5dH\\\\J]7d5cH\\\\J]7d5bH\\\\J^7e5bH[J]7e5dHZJ]7f5bHZJ^7g5bHYJ]7_7O2N11O000YNeHUJ\\\\7i5fHVJZ7g5iHZJW7c5lHZJV7e5kHSJ^7k5dHRJ_7l5bHTJ_7h5eHXJ[7a5lH^JU7h4eIXKZ6b4mI]KT6`4oI`KQ6^4QJaKP6]4RJbKo5^4QJbKo5]4RJcKm5^4TJaKl5^4UJbKk5^4UJbKj5^4WJaKj5_4WJ`Ki5`4WJaKf5b4YJ^Kf5e4XJ\\\\Kc5i4\\\\JXK_5m4aJRKZ5U5dJkJY5Y5fJgJX5[5hJeJV5^5iJaJW5a5hJ^JX5e5fJYJ[5j5cJUJ\\\\5S6^JlI`5^6ZJiI\\\\5]6bJdIZ5\\\\3eIYNR1ZNV5\\\\3PJWNj0[NU5\\\\3UJYN`0aNY5T3[J[N5gN^5k2dJi0Z5SOlJl0S5QOTKm0j4QOZKn0d4QO`Kn0_4QOcKo0\\\\4POfKo0Z4oNhKQ1X4mNlKQ1T4mNoKR1Q4lNRLS1n3lNULR1k3mNXLQ1h3nNZLQ1f3nN\\\\LQ1d3oN^Ln0c3RO^Lm0b3SO`Lk0`3UObLh0_3XObLg0^3YOdLe0\\\\3[OfLb0[3@dL`0[3CdL;\\\\3HcL7\\\\3NbLO^3<ZLBe3`0\\\\L]Od3d0]LZOc3g0]LXOc3h0_LVOa3j0bLbMbM\\\\Ok5R3iLTMfMHa5T3lNkLT1U3nNiLR1W3oNhLQ1Y3oNfLQ1Z3POeLQ1Y3ROeLn0[3TOcLl0]3UObLk0j0\\\\J<l4hNh0h0eJ;d4lNh0e0kJ:^4QOg0c0PK9Z4SOg0a0SK9X4UOf0?VK8V4XOf0<YK9S4ZOf07]K;P4\\\\Oj0N]Kc0k3^On0]OeKR1_3_O\\\\3>hL^O\\\\3>lLZOW3c0`6M3M4L_V_5\"}}, {\"image_id\": 124, \"category_id\": 1, \"bbox\": [303.0, 85.0, 169.0, 278.0], \"score\": 0.9999991059303284, \"association_id\": 3, \"light\": [-2.0004258155822754, -2.245900869369507, 1.9497029781341553, 2.0678341388702393], \"segmentation\": {\"size\": [533, 651], \"counts\": \"mQn43a`02N101O0000001O000000000000001O0000001O01O01O0BJ\\\\@7Z?9`@G]?j0N2N2N200O1O001O1O1N1SLhN^HX1_;1bJbNdKU1gJPOb90eKQ1iJoN^95gKl0kJoN^97fKi0lJQO]97eKS1\\\\4mNcKT1]4mN^KV1c4jNZKY1e4hNYKZ1h4eNVK\\\\1k4dNSK]1o4bNnJa1S5^NkJT1cKkNd9OfJW1gKjNe9NbJX1kKjNc9NaJW1mKkNd9M^JW1oKlNe9LZJY1RLkNd9KYJZ1TLlNd9IVJ\\\\1WLjNe9IRJ^1YLjNe9GRJ^1ZLkNg9CPJ`1oKeN48Q>R1lAiN06R>Q1oA[OQ>d0PB]On=c0SB]Ol=c0UB]Oj=c0WB]Oi=c0XB]Og=b0[B]Od=d0]B\\\\Oa=c0bB]O\\\\=`0iB@V==nBCQ=<QCCo<=RCCi9VO`HV1iMDf9WO_HU1mMDd9XO[HT1TNDa9YOXHS1YND^9ZOXHR1[ND^9YOVHS1]ND]9YOTHT1`NC]9WORHV1cNC[9VOQHV1hNCj;>WDAi;?XDAg;?ZDBc;`0^D_OV9_O[GT1@^OcNZOX:6cGR1C_O`NZOZ:6`GR1GEi8YO_GQ1JFh8XO]GQ1NFe8YO[GR11De8ZOXGS15Bd8ZOUGU19_Oc8]ORGU1;^Oc8^OPGT1?]Oa8@lFT1e0\\\\O_8AhFT1l0ZO]8CbFU1S1VO[8H^FS1X1UOZ8KXFS1_1QOX85hES1P2gNY8^2hGaMW8`2iG_MW8b2jG\\\\MU8h2jGWMU8l2iGTMV8n2iGQMW8R3hGmLX8T3hGjLX8X3gGgLZ8[3eGcLg6FjHm39]Lk6;XH`3f0TLR7?VH_3f0RLS7b0TH^3i0oKS7d0QHa3j0jKV7f0lGe3m0dKW7h0gGk3o0]KZ7j0ZGV4Z1PK\\\\7T6cHlI\\\\7V6cHjI]7X6bHgI^7[6`HeI`7]6^HcIb7^6^H`Ic7a6\\\\H_Id7a6]H^Ic7c6\\\\H]Id7c6^H[Ib7f6V1002O0O3M1OQNgFdLW9Z3QGaLn8\\\\3XGcLf8Y3`GhL]8U3hGmLU8VOnFi2R1QN_OdN`7`0SHg2R1TNSOmNh75WHf2Q1\\\\NfNnNS8MZHd2Q1ZOf6fMgHj2g0_Oj7=YHBi78\\\\HFg74^HKe7nNaIo0i6aNaI[1o:L3N2N3M2O2N2M2O2M3L8H8H4K5K5Jbkl2\"}}, {\"image_id\": 125, \"category_id\": 1, \"bbox\": [259.0, 33.0, 524.0, 663.0], \"score\": 0.9728025197982788, \"association_id\": 1, \"light\": [-2.0413410663604736, -2.0374653339385986, 1.8488671779632568, 1.8117969036102295], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\mR63jg05K5M3M2O1N100O2O000O10000O100O100O00100O100O10000O10000O0100O001O00001O00000001O0010O0100O010O10O00010O00001N1O101O00001O01O0100O010O0001O000000O01O100O101O0010O010O01O010O00001O0mDTOQOn0m0ZOlMLoFo0R;\\\\O`Mk1^2\\\\N[Mf1c2aNWMa1e2eNYMZ1d2kNZMV1d2mN[MR1d2QO[Mo0d2RO[Mn0d2UO[Mk0d2WO[Mh0f2WOZMj0e2WO[Mh0e2XO\\\\Mg0d2YO\\\\Mh0c2XO^Mg0b2YO^Mg0b2YO_Mf0`2[O`Md0`2]O`Md0^2\\\\OcMd0\\\\2]OdMc0[2^OfMb0X2_OhMa0W2@jM`0T2@mMa0Q2@PN?o1ASN?j1BWN?g1A[N>d1B^N>_1BcN?[1AgN>W1DiN=U1CmN<Q1FoN;n0GRO9m0HRO9n0GQO;m0FRO;n0EQO=n0CRO=n0CRO>m0BTO>j0BWO>h0BYO?e0B\\\\O>b0B@>=CD=<CE=9DG=8CH>6BL>3CL>3BL?5@Ka04@Jb05_OIb06_OIc06^OIc05_OJa05@L`02BoIaNd1n1[4CeIUOf1W1d4EeIZOc1Q1g4FeI\\\\Oa1n0i4HdI]Ob1k0i4HeI_Oa1j0h4HfI@a1h0h4IfIA`1g0j4GfID_1e0j4GgIF^1d0i4FiIH\\\\1c0j4FhIJ]1`0j4FiIMZ1?j4EkIOZ1<j4FiI3Z18k4FiI8Y12l4GiI;Y10l4FiI>X1Mn4EhIb0Y1In4FhIc0X1Ho4EhIf0X1Eo4FhIg0X1Dn4FhIi0X1Bo4FhIi0Y1An4HgIi0Z1_On4IgIi0[1^Om4KeIi0^1\\\\Ok4NeIh0_1ZOk4OdIi0a1XOh43eIg0b1VOg46eIe0d1VOd48fId0d1UOe4:cId0g1ROd4=cIb0i1QOc4`0aIa0k1oNc4c0_I`0m1mNc4e0^I`0n1kNc4h0[I`0Q2hNd4j0XI`0S2fNd4l0WI`0T2dNd4P1nHd0\\\\2]Ne4R1gHh0c2VNd4g6[KZIc4h6\\\\KXIb4j6^KVI`4m6^KTI`4o6_KQI_4Q7`KQI^4Q7aKoH^4R7aKoH^4S7aKmH^4T7aKmH^4T7aKnH]4T7aKmH^4T7bKlH]4U7bKmH[4U7eKkHY4W7fKkHW4X7hKhHU4[7kKeHS4]7lKeHQ4]7oKbHP4a7oK_HP4b7PL^Hn3d7RL\\\\Hm3e7SLZHm3h7RLXHm3i7RLXHm3j7RLVHm3l7RLSHm3P8RLPHm3R8RLnGl3U8RLlGm3W8QLhGn3[8QLeGn3^8PLbGo3`8oKaGP4a8oK_GP4c8nK^GQ4c8oK]GQ4d8mK\\\\GS4\\\\2ZJMb1gMT4o1mJ4n0nMT4i1[K2a0UNT4g1cKO7[NU4d1hK03\\\\NU4a1lK3M]NW4]1PL5I^NV4[1UL7D^NW4W1ZL:^O`NW4U1^L;ZO_NY4T1`L=UO`NZ4S1bL=TO`NZ4R1dL>POaN[4Q1fL?nN`N\\\\4P1hL?lNaN[4P1jL`0jN`N\\\\4o0kLb0gN`N^4l0mLe0dN_N_4k0oLf0aN_N`4i0QMi0^N]Nb4h0RMk0\\\\N]Nb4e0UMo0XN\\\\Nc4`0ZMU1RNZNe4=]MZ1mMYNf4:`M^1iMWNh47cMb1eMWNh45eMe1bMUNj44gMg1^MTNl43hMi1\\\\MTNl42iMk1ZMRNo40iMn1XMRNQ5MhMQ2VMRNT5JgMU2UMQNU5GhMX2SMQNW5BiM]2PMPNY5@iMa2mLoM\\\\5YOlMh2hLoM]5SOPNn2dLnM^5mNSNU3_LmMa5gNUN\\\\3ZLmMc5bNVNa3WLmMd5^NWNf3ULlMg5YNVNk3SLkMj5VNUNo3QLkMk5TNUNQ4QLjMl5QNUNU4oKiMP6mMSNZ4mKiMR6hMTN_4jKhMW6bMRNf4gKgM`6mLWN\\\\5YKgMe<X2]CfMd<Z2\\\\CfMd<Z2\\\\CeMe<[2[CeMe<[2[CeMe<[2[CdMf<\\\\2[CcMe<]2[CcMe<\\\\2]CcMc<]2]CcMc<]2]CbMd<^2]CaMc<_2]CaMc<_2]CXKZOi0Y=o3]CXK]Of0V=R4^CVK@d0S=V4]CUKCc0P=X4]CTKEc0n<Y4^CSKEc0m<Z4^CRKFc0m<Z4^CSKFb0l<[4_CQKGc0j<\\\\4_CQKHb0i<]4_CQKHa0j<_4]CoJJb0i<_4^CnJHd0j<_4]CmJIc0j<b4]CiJJe0i<c4]CdJLi0h<c4]C\\\\J2Q1a<d4]CYJ3R1a<f4\\\\CVJ4T1`<f4]CTJ3V1a<g4[CRJ5W1`<h4[CoI6Y1_<i4[ClI7[1^<i4hDWKY;j4fDVKZ;k4eDUK[;l4dDTK\\\\;n4bDRK^;Q5^DoJc;T5ZDlJf;V5WDkJi;X5TDhJl;[5PDeJQ<^5kCcJU<b5dC`J\\\\<d5_C\\\\Jb<h5XCYJi<j5RCXJn<j5_BfHJd1h=h5ZBeJg=[5XBfJh=X5XBiJi=U5XBlJh=S5XBmJi=Q5WBQKi=n4VBTKj=j4VBWKk=h4UBYKk=f4UB[Kk=d4UB]Kk=b4UB^Kl=a4TB`Kl=_4TBbKl=]4TBcKm=]4RBdKn=Z4SBfKn=Y4QBhKP>W4PBiKQ>V4oAkKQ>T4oAlKR>S4oAmKQ>R4oAnKR>R4mAoKS>P4nAoKS>P4nAPLR>P4mAQLS>n3nARLR>n3nAQLS>n3mASLS>m3mASLS>l3nASLS>l3mAULS>k3mAULS>j3mAVLT>i3mAVLT>i3lAXLT>g3lAYLU>e3mAZLT>e3mAZLT>d3mA]LS>b3nA]LT>a3mA_LS>`3mAaLS>^3nAaLS>_3mAaLS>^3nAbLR>^3nAbLR>^3nAaLS>^3nAbLR>^3mAcLS>]3mAbLT>^3lAbLT>]3mAbLT>^3lAaLU>_3kAaLU>_3kA`LV>_3kA`LV>`3jA`LV>`3jA_LW>a3iA_LW>`3jA`LV>`3jA_LX>_3iAaLW>_3iAaLW>^3jAbLV>]3kAbLV>^3jAbLV>]3kAcLU>\\\\3kAdLV>\\\\3jAdLV>[3kAdLW>Z3jAfLV>Y3kAfLV>Y3kAfLV>Y3kAgLV>W3jAiLX>U3iAkLX>T3hAkLZ>S3gAmL[>P3fAoL\\\\>P3cAQM_>l2bATM_>k2aAUM`>i2aAVMa>i2^AXMc>f2^AZMc>e2]A[Md>c2\\\\A^Md>b2\\\\A^Me>a2[A_Me>a2ZA`Mf>_2[AaMe>_2[AaMe>_2[AaMe>_2ZAbMf>]2[AcMf>\\\\2ZAdMf>\\\\2ZAdMf>\\\\2ZAdMf>[2[AeMe>[2[AeMe>[2ZAeMh>Z2XAfMh>Y2YAgMg>Y2YAgMg>X2ZAhMf>X2ZAhMg>V2ZAjMf>V2ZAjMf>U2[AkMf>T2ZAlMf>S2ZAnMf>Q2[AoMf>P2ZAPNf>o1[AQNf>m1[ASNe>l1\\\\ATNe>k1[AUNe>k1[ATNg>j1YAWNh>h1XAXNi>g1WAYNj>f1VAZNk>e1UA[Nl>d1TA[Nn>d1RA\\\\Nn>d1RA\\\\No>c1QA\\\\NQ?c1PA\\\\NQ?c1o@\\\\NR?d1n@\\\\NS?b1n@^NR?b1n@]NT?a1m@_NU?]1m@cNT?Y1o@gNS?T1PAkNS?P1PAPOS?f0TAZOY?NRA2^c000O10000000000O1000000O1000000O10001N10000O1O100O2O0O100O101N1O100O101N100O2N101N3M2M4M3L8DfXd5\"}}, {\"image_id\": 126, \"category_id\": 1, \"bbox\": [398.0, 534.0, 131.0, 284.0], \"score\": 0.9999998807907104, \"association_id\": 3, \"light\": [-2.381636142730713, -2.225397825241089, 2.2714712619781494, 2.115941286087036], \"segmentation\": {\"size\": [960, 1280], \"counts\": \"fle;1nm02O1O1O001O001O100O1O00011N1O1O010O4L2N001O0gIIj^O7k`04T_OMe`0:Z_OFc`0>\\\\_OCa`0`0^_O@``0c0__O]O``0e0__O\\\\O^`0g0j\\\\OROXO7mc0i0`\\\\O\\\\OAKoc0j0Y\\\\OBGEnc0l0U\\\\ODL@nc0R1gZOfN@[1j1mNnc0^4R\\\\OcKmc0^4Q\\\\OcKmc0_4S\\\\OaKkc0a4U\\\\O_Khc0d4X\\\\O]Kec0e4[\\\\O[Kdc0f4\\\\\\\\OZKbc0i4\\\\\\\\OXKbc0j4^\\\\OVK_c0m4a\\\\OSK]c0P5b\\\\OPK[c0S5d\\\\OnJYc0V5f\\\\OjJYc0W5g\\\\OiJXc0Y5f\\\\OhJZc0Y5e\\\\OgJ[c0Z5d\\\\OeJ]c0[5c\\\\OeJ]c0\\\\5b\\\\OdJ^c0^5a\\\\OaJ_c0a5_\\\\O_Jac0d5\\\\\\\\O\\\\Jdc0g5Q\\\\OaIPOf0Pe0m5d[OfI[O=Qe0V6T[OeIK4Qe0U7oZOkHQe0V7nZOiHSe0W7lZOjHTe0V7lZOjHTe0V7lZOiHUe0X7iZOiHWe0V7jZOjHVe0V7iZOjHXe0V7hZOjHXe0o7000O100O100000001N101O2M2O2M2O1mNZZOjIhe0U5dZOXJHb0he0n4X[ORKjd0j4W[OWKjd0g4W[OXKkd0f4V[OZKkd0d4V[O[Kld0b4V[O^Kkd0`4W[O^Kmd0\\\\4V[OdKmd0W4U[OiKnd0T3aYOmLc1Mod0h2WZOlLl0;Qe0_2_\\\\OaMdc0X2a\\\\OfMcc0S2a\\\\OmMac0o1a\\\\OPNbc0m1_\\\\OSNdc0i1]\\\\OVNfc0h1[\\\\OWNgc0f1Z\\\\OZNhc0d1X\\\\O[Njc0d1V\\\\O\\\\Nlc0a1U\\\\O_Nmc0_1S\\\\OaNQd0[1P\\\\OcNVd0W1k[OiNYd0R1h[OnN[d0n0f[ORO]d0k0c[OUOcd0e0^[OZOgd0a0Y[O_Oid0?X[O@kd08Z[OH`i0O0O2O1O0O2O001O1N2O1N2M\\\\Yoe0\"}}, {\"image_id\": 126, \"category_id\": 1, \"bbox\": [938.0, 477.0, 129.0, 380.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-1.2445437908172607, -2.6809186935424805, 1.169887661933899, 2.594993829727173], \"segmentation\": {\"size\": [960, 1280], \"counts\": \"Rn_k0>\\\\m0b0@;D8G9G6M4L4M2L9H7J4M2L?A8lXOiLSc0a3\\\\\\\\OjL`c0]3U\\\\OkLic0b3eZOdMZe0b5O1O1O2QZOSHge0T8O2N2N3L3N1O002N3M1O1N3[[OWGoc0f9G4KVNQ\\\\OfIlc0[6U\\\\OfIgc0[6\\\\\\\\OeIac0Y6c\\\\OiI\\\\b0XNR^Oj7IoIRb0eNg]OS7c0XJda0lN`]Oh6S1]J[a0Z6k^OeIUa0V6P_OkIo`0j5]_OUJc`0a5g_O_JX`0\\\\5n_OeJQ`0W5T@hJl?R5\\\\@lJd?Q5_@oJa?o4b@PK^?P5c@PK\\\\?o4g@PKX?o4l@nJT?R5l@nJT?S5l@mJT?R5m@mJS?S5n@mJQ?T5n@lJS?V5k@jJU?b5^@^Jc?f5Y@YJi?i5T@VJn?_6[_OaIg`0c6T_O\\\\Im`0g6o^OYISa0i6j^OVIXa0m6e^OQI_a0P7]^ObGjN[1kb0S7\\\\^OlHea0U7Z^OjHfa0X7]^OcHda0]7`^O^H`a0d7^^O\\\\Hca0f7Z^OZHga0h7W^OWHja0k7S^OUHma0m7Q^OTHna0m7Q^OSHPb0n7o]OQHRb0o7m]OQHSb0P8m]OoGSb0Q8m]OoGSb0R8l]OnGTb0R8m]OmGSb0S8m]OmGUb0P8l]OPHVb0m7l]ORHUb0k7m]OUHUb0h7l]OXHUb0f7m]OYHTb0f7l]OZHUb0U5i\\\\OYLS1bNVb0m4R]O[Li0iNWb0a3`^OYM_OUOTb0Q3[_OTMgNKPb0h2i_OnL]N:ma0c2m_OcLbNi0da0_2UBaMm=[2VBdMl=X2WBgMk=T2YBkMk=k1[BUNm=]1ZBbNk=S1[BmNi=k0[BUOXf001O001O1O001O001O1O001O001O1O000O2O0O2O0O2NckV6\"}}, {\"image_id\": 126, \"category_id\": 1, \"bbox\": [833.0, 482.0, 123.0, 365.0], \"score\": 0.9999840259552002, \"association_id\": 2, \"light\": [-2.4673476219177246, -1.7701802253723145, 2.4308712482452393, 1.6633667945861816], \"segmentation\": {\"size\": [960, 1280], \"counts\": \"W`]h0k0Pm0;G7H9F:G6K5K6K6J5K3K6J8I6L3L5J7J5L3M4M5J3S[ObKS`0a4i_OdKS`0a4i_O`KT`0f4g_O]KU`0h4g_OZK`?@S]O[5\\\\3VKU?3P]OQ5f3nJW?R6d@nIZ?X6a@iI_?Z6^@YIhL1ib0l6Y@RIQM0eb0U7T@iHZM2ab0Y7Q@dH_M3`b0[7o_ObHbM2_b0`7k_O^HfM3^b0d7g_OYHkM3^b0g7d_OVHnM4]b0h7c_OTHPN4]b0l7^_OQHUN3]b0Q8Y_OlGZN3]b0T8W_OhG\\\\N4]b0W8T_OeG_N4]b0h8m]OXGGOK3`b0g9c]OVFM3`b0h9c]OTFL5ab0g9c]OUFH7eb0e9c]OeF]b0[9c]OfF\\\\b0Z9d]OfF\\\\b0Z9d]OfF\\\\b0Z9e]OeF[b0[9e]OeF\\\\b0Z9d]OfF\\\\b0Z9e]OeF\\\\b0Z9d]OfF\\\\b0Z9d]OfF]b0Y9d]OfF^b0X9b]OhFbb0T9^]OmFhb0l8Y]OSGQc0c8o\\\\O]GVc0^8k\\\\OaGXc0g6^\\\\ORJ:WOZc0Y6T]OTJBC\\\\c0T6X]OVJ]OF\\\\c0n5_]OXJUOJ^c0a5l]OaJfNN_c0X5U^OhJ]NO_c0T5Z^OkJWN1`c0l4c^OPKmM4ac0f3k_ORLdL8ac0^3U@WL[L;ac0[3X@XLXL<`c0[3Y@YLWL<`c0[3Z@WLWL>`c0Z3Y@XLWL>`c0Z3Z@VLWL`0ac0X3Y@VLWLb0cc0V3U@XLYLa0ec0V3Q@WL[Lc0ec0V3i@iLY?Y3d@fL_?Z3^@gLd?Y3Z@fLi?Y3U@gLm?Y3R@fLR`0W3m_OiLV`0U3j_OjLZ`0R3f_OnL^`0n2c_OQMa`0k2__OUMd`0h2]_OVMf`0h2Z_OXMg`0f2[_OYMg`0d2Z_O\\\\Mg`0a2[_O_Mh`0[2\\\\_OdMg`0U2]_OkMg`0i1a_OVNc`0]1g_OcN]`0S1i_OmN^`06X@IPg00O0100O01O1K5N2O10000000O10001N101O2M5KQn^9\"}}, {\"image_id\": 127, \"category_id\": 1, \"bbox\": [339.0, 83.0, 236.0, 316.0], \"score\": 1.0, \"association_id\": 2, \"light\": [-2.1420860290527344, -2.4031598567962646, 1.949711561203003, 2.110860586166382], \"segmentation\": {\"size\": [520, 644], \"counts\": \"oY\\\\5c0[?c0F4K4M3M3K8J5K3M3M2N2M4L8jBjMl;^3L3L3N1N4Mf0YO8I2M3M1O1O2N1O100O2O000O100O100N2O1gEYJo9X6TFhIL3S9X6kFTJS9b6O1N2N2N1M4M2N3N1O1O1N2O1O2O0O100O1O100O2N1O001O100O100O011O001N101O1O1O0O2O1O1O1N2O1O001O1N2O1O1O1N101O1O1O1N2O001O001O1O1N2O001O0000001O01O0001O1O1N101N101O00000010O0001O001O1N101O000000001O0O1000001O00YG_Ih7`6THmId7R6ZHRJe7m5YHVJf7j5YHXJf7h5YHZJg7e5XH_Je7a5ZHbJd7^5\\\\HcJc7]5\\\\HeJc7[5]HeJc7\\\\5[HeJY4mNbN^6TMgJR4POlNY6QMhJk3TOWOU6lLhJi3SO_OU6fLjJh3POFW6^LlJj3gN0]6ULnJQ3cN_M1^3^6PLPKn2kN\\\\MFj3`6jKQKk2[O\\\\1Y7aNiH`1W7^NkHa1W7\\\\NjHd1V7[NkHe1V7XNlHi1T7TNnHl1R7RNPIn1P7QNRIn1o6oMUIo1k6PNXIn1i6PNYIn1i6nM[IQ2f6dMeI[2\\\\6\\\\MlId2V6WMnIh2T6TMoIk2S6PMQJo2Q6gLWJX3^9O001O001O1O1O001O0O101O000O101N100O2O1N2O1O0O2O001O0O3M1O2N2O0O2O0O3N1N2O1N1O2O1N2O1N3N1N2O1N2N2N2N3M2O1N2N2N4J]gR1\"}}, {\"image_id\": 127, \"category_id\": 1, \"bbox\": [189.0, 4.0, 82.0, 145.0], \"score\": 0.8397127389907837, \"association_id\": 3, \"light\": [-1.645167350769043, -2.6983108520507812, 1.4507813453674316, 2.4414427280426025], \"segmentation\": {\"size\": [520, 644], \"counts\": \"UQP38h?`0F4J7H;I3kNiNPCa1j<dNnBc1n<aNmBc1Q=o0K3M2N2N2N1O1O2N1O2N1O2N3M2N001O000mCULi;l3UDXLh;h3XDYLg;g3XD\\\\Lf;d3[D\\\\Ld;d3\\\\D]Lc;c3]D]Lc;c3]D^Lb;b3]D_Lc;a3]D_Lc;a3\\\\D`Ld;`3\\\\D`Ld;_3]DaLc;_3\\\\DbLd;^3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLe;]3[DcLf;\\\\3ZDdLf;\\\\3ZDdLf;\\\\3ZDdLg;[3YDeLl;V3TDjLV<l2iCUM\\\\<e2eC[M\\\\<d2cC]M^<b2bC^M_<a2aC_Ma<S31O3dL^CV2MVNV=e1kB[NX=a1hB`N^=Z1aBgNb=V1^BjNe=R1[BoNj=j0VBXOl=a11N4M2M4L4L4iNaA=f>SOcAm0k>02L6TOg@>\\\\im5\"}}, {\"image_id\": 127, \"category_id\": 1, \"bbox\": [5.0, 220.0, 181.0, 150.0], \"score\": 0.9999805092811584, \"association_id\": 1, \"light\": [-2.5550384521484375, -2.165264368057251, 2.376115322113037, 1.9579235315322876], \"segmentation\": {\"size\": [520, 644], \"counts\": \"ni2U2S>1N2O000@iMoBW2o<lMPCS2P=oMoBQ2o<SNoBm1n<XNPCh1o<ZNPCf1o<\\\\NPCd1P=]NoBb1R=_NmBa1R=aNmB^1T=bNlB^1T=cNkB]1U=cNkB\\\\1V=dNjB\\\\1V=eNjBZ1V=fNjBZ1V=gNiBY1W=hNhBX1X=iNhBV1X=jNjBT1V=mNjBR1V=oNjBP1V=ROiBn0U=S1000001OUNkBc0U=^OkBb0T=_OlB`0S=BmB=S=DmB;S=EnB:R=GPC6P=KRC2n<0QCOo<2PCNP=2QCNn<3RCLn<4TCJl<7UCHj<:TCGj<<UCCk<>TCBl<?SCAl<a0TC^Ol<c0UC[Oj<g0VCXOj<i0VCWOi<j0VCVOi<l0WCSOi<n0VCROj<o0VCPOi<S1VClNj<Y2OmNWCiNi<[2OnNXCfNg<[1ZCdNe<^1[CaNd<`1]C_Nc<b1\\\\C^Nc<c1^C\\\\Na<f1^C[N`<g1`CXN^<j1cCVNZ<m1eCSNZ<n1gCQNX<P2iCPNV<Q2iCoMV<R2kCmMT<U2kCkMT<W2lChMS<Z2lCfMS<\\\\2mCcMS<]2mCcMR<^2nCcMP<_2PD`Mo;a2QD`Mn;a2RD^Mm;d2RD]Mm;_30UOSD[Mm;e2SD[Mm;e2SD[Mm;e2TDZMl;g2SDZMl;f2UDYMk;g2UDYMk;g2UDYMk;g2UDZMj;f2VDZMj;f2WDZMh;f2XDZMh;g2WDYMi;g2WDYMi;h2VDYMi;c30000000000010O01OTOXDXMh;h2XDYMg;d31O0TOXDXMi;g2WDYMi;g2WDYMi;g2WDYMj;f2VDZMj;f2VD[Mi;e2WD[Mi;e2WD[Mi;d2XD]Mg;c2YD]Mg;c2YD]Mh;b2XD_Mg;a2XD`Mh;`2XD`Mh;_2XDbMh;]2YDcMg;\\\\2YDfMf;Z2ZDfMg;X2ZDhMf;X2YDiMg;W2XDjMi;T2WDmMj;R2VDnMj;Q2VDPNk;o1UDQNk;n1UDSNk;l1VDTNj;k1UDXNj;g1VDZNj;e1VD\\\\Nj;c1WD]Nj;a1VD_Nk;a1TD`Nl;_1TDbNm;[1TDfNm;W1TDiNn;U1RDlNn;S1SDmNn;Q1RDoNP<o0oCROS<k0kCWOX<e0iC\\\\OY<a0iC^OY<`0gCAY<>gCB[<;fCF[<7aCLj<IVC8Q=@QC`0T=ZOnBe0U=WOlBi0\\\\=nNfBQ1f=^N`B`1U>N2O002O0O1O2N2N1O2N2N2N4KQYX7\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [435.0, 274.0, 171.0, 107.0], \"score\": 0.999999463558197, \"association_id\": 1, \"light\": [-2.2595629692077637, -1.9733171463012695, 2.1358323097229004, 1.8013217449188232], \"segmentation\": {\"size\": [600, 800], \"counts\": \"fUo74bb04M2N2N1O1^_OHd>9SA0k>1l@9S?Hj@;U?Eh@>X?Ce@`0Z?@d@b0\\\\?^Ob@d0^?]O`@d0`?\\\\O_@d0a?^O]@b0d?_OZ@a0g?@V@4C@X`0>Q@2^`0O`_O1a`01[_O1d`0Q110O10O02O1a_OZNi?j1P@\\\\Nm?Z2M2O00000O1000nNX@TOh?j0\\\\@YNMe0g?m0l@nNT?m0TAoNm>P1VAnNj>Q1WAoNi>P1\\\\AjNf>U1^AfNd>Y1^1O1O10000000000001O001O00001O001O1O0000001O000f_ObNZ?^1e@cN[?]1d@dN\\\\?\\\\1a@gN_?X1_@kNa?U1^@lNb?T1^@kNc?U1]@jNd?V1\\\\@jNd?W1Z@iNf?X1Z@fNh?Z1[@aNg?_1i01V_OPOi?R1n_O[Ol?l1M1O1O000O010000O100mN^@`NL0g?^1^@eNKHj?c1[@eNo?Z1R@gNm?Y1S@gNm?Y1R@iNm?W1S@iNm?W1S@iNm?X1R@hNn?Y1Q@gNo?\\\\1n_OdNR`0`1k_O_NT`0d1j_O\\\\NV`0e1j_OZNV`0g1k_OUNV`0n1i_OPNX`0S24mNg_OKW`02n_ONo?OV@Nl?O\\\\@Kd?2d@IVO^OP`0i0n@FPOEo?e0SADoNGn?e0VA@lNLn?d0[AWOfN9P`0`0]AAb>`0^A@b>`0]A@d>`0\\\\A@d>a0[A_Oe>a0[A_Oe>b0ZA^Of>c0YA]Of>e0YA[Oh>d0XA\\\\Oh>e0WA[Oi>g0UAYOk>h0TAWOm>i0SAWOn>h0RAXOo>g0QAYOR?d0n@\\\\OX?>h@A\\\\?<d@D]?;b@Ea?9_@Gd?6\\\\@Hh?5X@Li?2X@Mj?1W@Oj?OW@0Ya00O1000O100O100O010O1000O10O10O10000000000000O100000001NiZa3\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [0.0, 301.0, 141.0, 104.0], \"score\": 0.9990675449371338, \"association_id\": 2, \"light\": [-1.3211393356323242, -2.338029146194458, 1.211007833480835, 2.213909387588501], \"segmentation\": {\"size\": [600, 800], \"counts\": \"i9_2W`08I3M101O0OL5N1M4N1O2M3N2O1O100O1001N4M?A2N2M101O0O2N101N101O000000O10000O1O1F^@ZMf?c29L4L4N2M3C=M3O1O100O2O0010O01000O3N011N;F4K1N6K1O0O0100O001O001O00001O001O0UNk_OU1T`0iNo_OV1P`0jNR@U1n?jNT@T1m?lNU@Q1l?POSAOn>1PA0P?2n@NR?7i@IW?9f@HZ?:d@F]?:b@F^?=^@Cd?=[@Ce?>Z@Ah?>W@Ci?=W@Bk?>S@Cm?=S@Co?;Q@DR`09o_OGT`06k_OK[`0Nf_O2^`0Ge_O9]a000000000000000O10000000O10O1000000O10000O10000000O1000000000O10001N10000O1Fd]O8`b0L[hQ<\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [267.0, 314.0, 214.0, 123.0], \"score\": 0.9999921321868896, \"association_id\": 3, \"light\": [-2.6084887981414795, -0.788404107093811, 2.5557103157043457, 0.6139570474624634], \"segmentation\": {\"size\": [600, 800], \"counts\": \"]il48^b09F3M3O1O01O1O0010O0n_O@m=a0oADl=`0jAJS>9gAMU>7jAJS>9kAIT>:hAHW>;dAG]>;]AIb>:XAHj>:RAGn>b0i@^OU?m15N3O2O10O10O1000O101N2O2M8H4M1O1OfNUAhNj>R1bAhN]>X1fAgNY>X1jAgNU>X1PBcNQ>[1VB`Nj=_1[B\\\\Nf=c1^BYNc=g1_BWNa=i1bBRN`=m1f10000000001O01O00000000001O01O000O10000001O01O01O0O2O1O0`@UNS>l1hAYNW>h1QAQN7:g>g1o@VN35m>h1k@VN72m>[2QAeMo>R30001O\\\\Oo@`MR?[2VA`Ml>Y2cA^M^>S2Z1O2O1O1O100O101O0001O0V@]N_>c1`A_N`>a1_A_Nb>a1]A_Nd>c1YA]Ng>g1UAZNk>h1RAWNP?i1PAVNQ?k1m@UNT?n1h@RNY?U2`@jM`?Z2_@cMb?^2b@\\\\M_?d2`@\\\\Ma?f2]@YMd?i251O0O11N2O1N2N1O1O2N5K3M1O1N2O6I6J2N2N2O2MT1mN0Hi]O0Vb08010000O1000O10000O010O1000O010000O1O01000O1O100O010O100000000O1000O100000O01000000O1000O10000000O010000O100O010O10O10O10O0100000000O100000001N10002LQaj5\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [190.0, 296.0, 100.0, 110.0], \"score\": 0.9836001396179199, \"association_id\": 4, \"light\": [-2.3263683319091797, -1.707779884338379, 2.288431167602539, 1.5487867593765259], \"segmentation\": {\"size\": [600, 800], \"counts\": \"mf_34cb0100O2O0O10000O1fM9cAFV>`1l@`NR?h1h@XNU?R2c@nM[?X2b@hM]?[2a@eM_?[2a@dM_?]2a@bM_?_2a@aM^?`2b@_M_?a2a@_M_?`2b@`M^?^2c@cM\\\\?c1e@aN6LU?Z1hAgNV>W1mAiNS>V1nAjNR>V1mAkNS>T1nAkNS>V1lAjNT>W1jAjNV>W1iAiNW>X1hAhNW>^1dAbN\\\\>e1]A[Nc>g1[AYNe>h1ZAXNf>l1VATNj>P2RAPNn>Q2QAoMo>R2PAnMP?U2m@kMS?V2l@kMT?U2k@kMU?W2i@iMW?X2h@iMW?X2h@hMX?Y2g@hMY?X1o@lNGLZ?W1RAlNCNZ?V1UAjNA0Z?U1XAiN^O2Z?U1YAiN[O4[?S1]AbN\\\\O;X?R1hAnNX>R1hAnNX>R1gAoNY>Q1gAPOX>P1hAPOX>Q1fAPOZ>P1fAPOZ>P1fAPOZ>P1fAPOZ>P1fAPOZ>Q1eAoN[>Q1gAmNY>S1aAaNUO9Z?[1WAcN^O1[?^2e@aM[?a2c@`M\\\\?`2e@_M[?b2d@^M\\\\?b2d@_MZ?b2e@_M[?b2d@^M\\\\?b2d@_M[?n21Bd@`M\\\\?^2e@bM\\\\?\\\\2e@eM[?Z2f@fMZ?Y2f@gM[?T2i@mMW?R2j@nMW?l1n@SNS?j1PAVNP?h1RAWNP?g1QAYNo>f1SAYNn>d1TA[Nn>b1WA[Ni>P1f@POd0Mh>P1k@mN>3h>n0oAnNU>P1l1M2N2N3L4L5Kf`Z9\"}}, {\"image_id\": 129, \"category_id\": 1, \"bbox\": [106.0, 24.0, 404.0, 685.0], \"score\": 0.9999955296516418, \"association_id\": 1, \"light\": [-1.6402177810668945, -2.001624822616577, 1.4883805513381958, 1.9000749588012695], \"segmentation\": {\"size\": [800, 616], \"counts\": \"XSc2j4Pd0V1kN;F8I7J5L3M2N3M2O1N2N101N2O001N101O1O1O2N1O2M3N2N1O2N1O1O00001O0O101O0000001O0O101O001N2O001N2O001N2O1N101N2N2N2SJ]GZKd8c4bGYK`8c4fGYK\\\\8c4jGYKW8e4mGYKT8d4PHYKQ8e4SHYKn7d4VHZKk7d4XHYKi7f4ZHXKg7e4\\\\HZKd7nKQGj7\\\\1WLd7mKSGi7[1YLc7mKUGf7Z1\\\\La7mKXGd7Y1]L`7oKXGc7Y1]L`7oKXGb7Z1^L^7oKZGb7Y1]L_7PLZGa7X1]L_7QL\\\\G`7V1^L^7RL`G\\\\7T1_L]7ULdGV7R1bL\\\\7WLcGV7T1_LZ7ZLeGU7S1]LZ7^LcGU7T1[L[7_LbGU7U1YLZ7bLbGT7U1WL[7eL`GT7V1TL\\\\7hL_GS7V1SL]7iL]GT7W1PL_7kL[GT7W1oK_7nLZGR7X1mKa7PMZGP7W1lKb7SM[Gm6U1lKd7UMYGm6V1iKd7YMWGm6Y1dKc7^MUGm6]1^Ka7eMRGl6`1YKb7jMnFm6c1iIm8YOaEm6d1`IS9CYEl6h1WIV9NQEk6c>UI^Aj6b>VI_Ai6a>XI_Ag6a>YIaAe6_>\\\\IdA`6\\\\>`IfA^6Z>cIfA\\\\6Z>eIfAZ6Z>fIgAY6Y>hIfAX6Z>hIfAW6[>iIfAV6Z>iIgAV6Z>jIgAU6Y>kIhAS6Y>mIhAR6Y>lIiAR6X>nIiAP6X>PJiAo5W>QJjAm5W>RJkAl5V>TJkAj5V>UJkAj5f`0O2N2N101N3M3N2M3N1N2O001O0O2O001O1O1N3N1O001O001O001O001O001O2N1O1N101O001O001O001O1O1O1O1O001O001O00gIiKjGV4R8mKoGS4n7PLSHo3k7SLVHl3i7ULXHj3f7WL\\\\Hh3c7YL^Hf3a7[L`Hd3_7]LaHc3^7]LdHb3[7_LeHa3Z7`LgH_3W7cLiH]3V7dLkH[3S7gLmHY3R7hLoHW3o6kLRIT3m6mLTIR3k6oLUIQ3j6PMWIo2h6RMYIm2f6TMZIl2e6UM[Ik2e6UM\\\\Ij2c6WM]Ii2b6XM^Ih2b6XMiDoN`2i3f8YMhDUOUNKo3f3T9ZMgDAT2U3U9ZMeDCW2R3T9[MdDEW2P3T9\\\\McDGY2l2T9]MbDHZ2k2T9]MaDJ[2h2T9^MaDJ\\\\2g2S9_M`DK]2f2S9_M_DM]2d2T9_M_D^OZNKT4W3S9`M^D]O^NLP4W3T9`M]D\\\\ObNLm3X3T9`M]D[OcNNk3W3U9`M\\\\D\\\\OdNMk3W3U9`MZD]OhNLh3W3V9`MXD^OkNLf3V3X9_MVD_OmNLe3V3X9_MTD@POLc3V3X9^MTD@ROLb3V3X9^MSD@TOM`3U3Y9^MRD_OWON^3U3Y9^MQD^OZOO\\\\3U3Z9]MPD]O\\\\O2Y3T3[9]MPD[O^O4W3T3[9]MoC\\\\O@2V3U3\\\\9\\\\MnC\\\\OA4T3T3]9\\\\MmC]OB3U3S3]9]MkC\\\\OD4T3S3]9]MkC\\\\OE4R3S3_9\\\\MiC]OG4Q3S3`9[MhC^OG5P3R3b9ZMgC^OH6o2R3b9ZMfC^OJ7m2Q3d9YMeC_OK7k2R3e9WMeC_OL8j2R3f9WMbC^O09h2R3f9WMbC]O1:g2R3g9VMaC\\\\O3<e2S3f9UMbC[O4=d2S3g9TMaCZO6?b2T3f9SMaCZO8`0`2S3g9TM`CWO;b0^2S3h9SMnDIZ1U3g9RMoDIZ1U3g9RMoDIZ1V3g9QMnDI[1W3f9PMoDI[1X3e9PMoDH\\\\1Y3e9nLnDJ]1Y3d9nLnDI^1Z3c9nLnDH_1[3b9nLnDG`1\\\\3a9nLnDEb1^3`9mLmDEc1_3_9nLkDDf1_3^9mLlDDf1`3]9mLlDCg1`3]9nLkDBh1a3\\\\9mLlDBh1a3[9oLlD_Oj1b3Y9PMmD]Ok1c3W9QMnD[Ok1f3X5kKgJU1UNYOm1g3U5oKfJR1XNVOo1i3P5SLgJo0ZNTOP2k3l4ULhJn0[NPOS2m3h4XLhJl0]NnNT2n3d4\\\\LiJj0^NkNV2o3j1kKeLd0k0i0_NiNW2P4f1nKeLd0m0f0aNgNW2R4c1QLcLe0Q1c0`NdNZ2T4\\\\1VLcLe0U1?aNbN[2T4=VM]MJX1;cNaN[2U47[M]MJ\\\\17eN]N[2Y41_M^MJ^12hN\\\\N[2Z4JeM_MIc1MhNZN\\\\2]4EiM_MHe1JjNWN]2`4BjM^MJf1FmNVN]2a4^OmM_MIg1EYMRNa11b2d4\\\\OnM]MIj1CXMSNj5e4cMoM_MHj1FT3e2_MQN`MFk1FT3e2^MSN`MDk1HU3c2\\\\M^NXMYOU2FX3d2VMa1_OnKgMdM]5P5QMi1HgKhMbM]5o4eLW21dKV3X2XLd2?XKW3U2SLk2b0TKX3S2QLo2d0PKY3S2oKR3d0oJZ3R2nKT3d0lJ\\\\3R2lKX3c0kJ]3P2kK\\\\3c0nJX3`;bLdD\\\\3a;\\\\LaDd3]`01N101M2N2E;1O1O010O1O01000O2O0O2N101M3N2N1O2N1O2N1O2O0O2N2N2M3N2M2N3N1O2N100O2O1N1O2M4K8G7F8N2N2N2O0O2O1O0O3N1O2M4M2M101N2N101N101O001O1O2N1O1O001O001O001O01000O01000O010O0010O000001O001O001O010O1O1O001O0O2N2N2N3KZWb2\"}}, {\"image_id\": 130, \"category_id\": 1, \"bbox\": [58.0, 521.0, 277.0, 151.0], \"score\": 0.9999918341636658, \"association_id\": 3, \"light\": [-2.483900547027588, -1.8763532638549805, 2.4128317832946777, 1.6738481521606445], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Zl`1S1Zi0>G4N2N2N2N101M3N2\\\\XOPN_f0T2^YOnM_f0V2_YOkM_f0W2`YOiM^f0Z2aYOfM^f0[2bYOeM]f0]2aYOdM^f0]2bYOcM]f0_2bYOaM]f0`2cYOaM[f0a2dYO_M[f0c2dYO]MZf0g2dYOYMZf0l2cYOTM[f0o2dYOQMZf0R3eYOnL[f0S3dYOmL[f0T3eYOlLZf0V3eYOjL[f0V3eYOjLZf0X3dYOiL\\\\f0W3dYOiL[f0Y3dYOgL\\\\f0Z3cYOfL\\\\f0]3bYOcL^f0^3aYObL^f0`3aYO`L_f0a3`YO_L`f0b3_YO^L`f0c3`YO]L`f0d3`YO[L`f0e3`YO[L`f0m3000000000000000001O0000000000000000000000000000000000KaYOTL_f0l3aYOTL_f0l3aYOTL_f0l3aYOTL_f0l3aYOTL_f0l3aYOTL_f0l3aYOTL_f0k3bYOUL^f0k3bYOUL^f0k3bYOUL^f0k3bYOUL^f0k3bYOUL^f0k3aYOVL_f0i3bYOWL^f0i3bYOWL^f0i3aYOXL_f0g3bYOYL^f0g3aYOZL_f0f3aYO[L^f0d3cYO\\\\L]f0d3bYO]L^f0b3cYO^L]f0m31O000CaYOdL_f0i3000BaYOfL_f0Z3aYOfL_f0Z3aYOfL_f0Y3bYOgL^f0Y3bYOgL^f0Y3aYOhL_f0X3aYOhL_f0X3aYOhL_f0X3aYOhL_f0W3bYOiL^f0W3bYOiL^f0W3bYOiL^f0W3bYOiL^f0W3bYOiL^f0V3cYOjL]f0V3cYOjL]f0V3cYOjL]f0V3cYOjL]f0U3dYOkL[f0V3eYOjL[f0V3eYOjL[f0V3eYOjL[f0V3eYOjL[f0V3eYOjL[f0W3dYOiL\\\\f0W3dYOiL[f0X3eYOhL[f0X3dYOiL\\\\f0W3dYOiL\\\\f0W3dYOiL\\\\f0W3dYOiL[f0X3eYOhL[f0X3eYOiLZf0W3fYOiLYf0X3gYOhLYf0X3gYOhLXf0Y3hYOgLXf0Z3gYOfLXf0[3hYOeLXf0[3hYOeLXf0[3hYOeLWf0\\\\3iYOdLWf0\\\\3iYOdLWf0\\\\3iYOdLWf0\\\\3iYOdLVf0]3jYOcLVf0]3jYOcLVf0]3jYOcLUf0^3kYObLUf0^3kYObLUf0^3kYObLTf0_3lYOaLTf0_3lYOaLTf0P4O100000000O1000000000^OnYOaLRf0_3nYOaLRf0_3nYOaLRf0_3nYOaLQf0`3oYO`LQf0`3oYO`LQf0`3oYO`LQf0`3oYO`LQf0`3oYO`LQf0`3oYO`LQf0`3oYO`LQf0`3oYO`LQf0`3oYO`LPf0`3QZO`Loe0`3QZO`Loe0`3PZOaLPf0_3PZOaLoe0`3RZO_Lne0a3RZO_Lne0a3RZO_Lme0a3TZO_Lle0b3SZO^Lme0b3SZO^Lme0c3RZO]Lne0c3RZO]Lne0c3RZO]Lme0e3SZOZLme0f3SZOZLme0f3TZOYLle0g3UZOXLke0i3TZOWLle0i3UZOVLke0j3UZOVLle0j3SZOVLme0j3TZOULme0k3RZOULne0l3RZOSLoe0m3QZORLoe0n3RZOPLoe0Q4RZOmKne0S4RZOmKoe0S4PZOmKPf0S4PZOmKPf0S4QZOlKoe0U4PZOkKQf0T4oYOlKQf0T4PZOkKQf0U4nYOkKRf0U4nYOkKSf0U4lYOkKTf0U4lYOkKUf0T4lYOkKTf0U4lYOkKTf0V4kYOiKWf0V4iYOjKWf0V4iYOjKWf0V4jYOiKVf0W45O0000000000000000000000000ZNTZOcNle0R1dYOQNc0l0ie0P1kYOnM?Q1fe0o0fZOQOZe0l0jZOSOVe0j0oZOSOQe0m0R[OQOnd0n0Y[OlNgd0R1b[OfN^d0Z1g[O`N[d0_1i[O\\\\NXd0d1\\\\201N2O00100O1O1O1M3I7G9O1O1O100O1O1O1N2N2L4L4M3N2O10000O101N10000O10febh0\"}}, {\"image_id\": 130, \"category_id\": 1, \"bbox\": [718.0, 492.0, 497.0, 268.0], \"score\": 0.9994879961013794, \"association_id\": 1, \"light\": [-3.032557964324951, -1.6773781776428223, 2.9779138565063477, 1.4535173177719116], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"dffb02;8lh0f1jNg0I7J5L3M4M2M3N2N2N3M4L4K7J6kYOhKb03mc0m4i[OTKUd0Q5g[OQKVd0T5g[OlJXd0V5f[OkJYd0X5e[OhJZd0Z5d[OgJZd0\\\\5e[OeJZd0[5g[OdJXd0^5g[ObJYd0^5g[ObJXd0`5g[O`JXd0a5h[O_JXd0b5h[O]JWd0e5h[O[JWd0f5i[OZJVd0h5i[OXJUd0U6O1O1N200O1O1O2O0O100O100O100O1O1N2O1O2M2N2O1O100O100O10000O10000000Gf\\\\OWIZc0i6f\\\\OWIYc0j6g\\\\OVIYc0j6g\\\\OVIYc0j6g\\\\OVIYc0j6g\\\\OVIXc0k6h\\\\OUIXc0k6h\\\\OUIXc0k6h\\\\OUIXc0k6h\\\\OUIXc0k6h\\\\OVIVc0k6j\\\\OUIVc0j6j\\\\OWIVc0i6j\\\\OWIVc0i6j\\\\OWIUc0j6k\\\\OVIUc0j6k\\\\OVIUc0i6l\\\\OWITc0i6l\\\\OWITc0i6l\\\\OWISc0j6m\\\\OVISc0i6n\\\\OWIRc0i6n\\\\OWIRc0i6n\\\\OXIPc0i6o\\\\OXIQc0h6o\\\\OXIQc0g6P]OYIPc0g6P]OYIPc0g6P]OYIob0h6P]OYIPc0g6P]OYIPc0g6P]OYIPc0g6P]OYIPc0f6Q]OZIPc0e6P]O[IPc0e6o\\\\O]IPc0c6P]O]IPc0c6P]O]IPc0c6P]O]IPc0c6P]O]IPc0c6P]O]IPc0c6P]O]IPc0c6o\\\\O^IRc0a6n\\\\O_IRc0a6n\\\\O_IRc0a6n\\\\O_IRc0R7000001O00000000000O100000000000001O000O1000000000000O10000000001O000O1000000000000000000O1000000000000000000O1000000O101O0O10ROZ]OmIeb0R7O100O100ORO^]OkIbb0U6^]OkIbb0U6^]OkIab0V6_]OjIab0V6_]OjIab0U700000000OQO`]OkI`b0U6`]OkI`b0U700000000000000000000O100000001O0000000000000000000000000000001O000000000000000000000000001O000000000O10000000000000mNe]OmIZb0W7000000000mNf]OkIZb0U6f]OkIZb0U6f]OkIZb0U6f]OkIZb0X700000nNf]OiIZb0W6f]OiIZb0Y700000000000000000000000000000000000001O01O00000000000000000000000000000000000000000001O0001O0TOf]O]IZb0c6f]O]IZb0c6f]O]IZb0c6f]O]IZb0c6f]O]IZb0c6g]O\\\\IYb0d6g]O\\\\IYb0e6f]O[IZb0e6f]O[IZb0e6g]OZIZb0e6f]O[IZb0e6f]O[IZb0e6f]O[IZb0e6g]OZIYb0f6g]OZIYb0f6g]OZIYb0f6g]OZIYb0g6f]OYIZb0g6f]OYIZb0g6f]OYIZb0g6g]OXIYb0h6g]OXIYb0h6g]OXIYb0h6g]OXIYb0h6g]OXIYb0i6g]OVIYb0j6g]OVIYb0j6g]OVIYb0j6g]OVIYb0j6g]OVIYb0j6g]OVIZb0j6f]OUIZb0k6f]OUIZb0k6f]OUIZb0k6f]OUIYb0m6f]OSIZb0m6f]OSIZb0m6f]OSIZb0n6e]ORI[b0n6e]ORI[b0n6e]ORI[b0n6e]ORI[b0d7000000000000000000000000000000001[Od]OoH\\\\b0Q7d]OoH\\\\b0Q7d]OoH\\\\b0Q7d]OoH\\\\b0Q7d]OoH\\\\b0Q7d]OoH]b0P7c]OPI]b0P7c]OPI]b0d7001O0]Ob]OoH_b0P7a]OPI`b0o6`]OQI`b0o6`]OQIab0n6_]ORIbb0m6^]OSIcb0l6]]OTIeb0j6\\\\]OUIeb0j6[]OVIfb0i6Z]OWIgb0h6Y]OXIhb0g6X]OYIib0f6W]OZIib0f6W]OZIjb0e6W]OZIib0f6W]OZIjb0e6V]O[Ijb0e6W]OZIib0f6W]OZIjb0e6W]OZIib0f6W]OZIjb0e6V]O[Ijb0e6W]OZIjb0e6V]O[Ijb0e6V]O[Ikb0d6U]O\\\\Ilb0c6T]O]Ilb0c6T]O]Imb0b6T]O]Imb0b6S]O^Imb0b6T]O]Imb0b6S]O^Imb0b6T]O]Imb0b6S]O^Imb0b6S]O^Imb0b6S]O^Imb0b6T]O]Imb0b6S]O^Imb0b6S]O^Imb0b6S]O^Imb0b6T]O]Imb0b6S]O^Imb0b6S]O^Imb0b6S]O^Imb0b6S]O^Imb0b6S]O^Inb0a6R]O^Iob0b6R]O]Inb0c6R]O]Inb0c6R]O]Iob0b6Q]O^Iob0b6Q]O^Iob0b6Q]O^IPc0a6P]O_IPc0a6P]O_IQc0`6o\\\\O`IQc0`6o\\\\O`IRc0_6o\\\\O`IQc0`6o\\\\O`IRc0_6n\\\\OaIRc0_6n\\\\OaISc0^6m\\\\ObISc0^6m\\\\ObITc0]6m\\\\ObISc0^6m\\\\ObISc0^6m\\\\OaIUc0^6k\\\\ObIUc0^6k\\\\ObIUc0^6l\\\\OaITc0_6l\\\\O`IVc0_6`00001O001O001O1O1O1O001O001O00001O00001O001O001O1N3N4L4L3M2N1O2N1O5K4K3N2N1N3N3aLZZOh1ne0^MQZOG<d2ne0SMg[Oj1Sg0[O[_e1\"}}, {\"image_id\": 130, \"category_id\": 1, \"bbox\": [284.0, 505.0, 476.0, 199.0], \"score\": 0.9999870657920837, \"association_id\": 2, \"light\": [-2.1613035202026367, -2.5177161693573, 2.0324087142944336, 2.3316538333892822], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"[V]79Yj06J8G9E7M2mMkNdZOX1Xe0oNbZOS1\\\\e0SOUZOY1ae0TOYZOo0ee0VOUZOl0je0XOQZOk0me0XOPZOi0Pf0ZOlYOh0Rf0\\\\OjYOf0Tf0]OiYOd0Wf0_OeYOc0Yf0@dYOb0[f0l1O2XZOiKod0Y4nZOjKQe0W4mZOkKQe0W4nZOiKRe0X4lZOiKSe0Y4lZOgKSe0[4lZOeKTe0[4kZOfKTe0\\\\4lZOdKRe0^4mZObKSe0^4mZObKRe0`4mZO`KSe0`4mZO`KRe0a4nZO_KRe0b4mZO^KSe0b4mZO^KRe0c4nZO]KRe0d4mZO\\\\KSe0d4mZO\\\\KSe0e4lZO[KSe0f4mZOZKSe0g4lZOYKTe0g4lZOYKTe0h4kZOXKTe0i4mZOVKSe0k4lZOUKTe0k4lZOUKTe0k4mZOTKRe0m4nZOSKRe0n4nZOQKRe0o4nZOQKRe0o4nZOQKQe0P5oZOPKQe0Q5nZOoJRe0Q5oZOnJQe0R5oZOnJQe0R5oZOnJQe0R5oZOnJPe0S5P[OmJPe0S5Q[OlJod0T5Q[OlJod0T5Q[OlJod0T5Q[OlJod0U5P[OkJod0V5Q[OjJod0V5R[OiJnd0W5R[OiJnd0W5R[OiJnd0W5S[OhJmd0X5S[OhJmd0X5S[OgJmd0[5R[OeJnd0[5S[OdJmd0\\\\5R[OeJnd0[5R[OfJmd0Z5S[OfJmd0Y5T[OgJld0Y5T[OgJld0Y5S[OhJmd0X5S[OhJld0Y5T[OgJld0Y5T[OhJkd0W5V[OiJjd0W5U[OjJkd0V5U[OjJkd0V5U[OjJkd0V5U[OjJkd0V5U[OjJkd0U5V[OkJjd0U5U[OlJkd0T5U[OlJkd0T5U[OlJjd0U5V[OlJid0T5V[OmJjd0S5V[OmJjd0S5U[OnJkd0Q5U[OPKkd0P5T[OQKld0\\\\5000BT[OSKld0\\\\5O00@U[OVKkd0Z5000_OU[OXKkd0Y50^OU[OZKkd0X5O10000000000000000000000O100000000000000O1000000000000000000000000001O000000000000000000000000000001O00000000000000000000000000000O2O00000000000000000000000O101O00000000000000000O10000000000000000000001N100000000000000000000O1000000000000000000O1000000000000000000O100000000000000000000000000000000000000O1000000000000001O000000000000000000000O100000001O0O10000000000000000000000000001O0000000000000000000000000O10001O000000000000000O10000O10000O1O100O100O10000O2OU[OYJhd0g5W[O[Jhd0d5X[O^Jgd0h5O10IZ[O_Jfd0h50O1HY[ObJgd0]5Y[OcJhd0\\\\5X[OeJhd0Z5X[OgJhd0c500000000000000O010001O000O10001N10001O00001N10001O0000001N100O101O001O000O2O1O001O1O001O1O0O101O001O001O001O1N2O1O2N1O1O1O1O001O001O00001N10001O001N101O1O1O001O1N10001N101O0O1HdYOVL]f0j3dYOTL^f0j37000001O00001O001O0000001O0O101O000O101N101N1O2O1N3M2N3M2N8mLaXOe1MPOPh0i0VXOTOmg0@nWOf0a0]OAK`h00dWO1X\\\\a=\"}}, {\"image_id\": 131, \"category_id\": 1, \"bbox\": [52.0, 264.0, 412.0, 470.0], \"score\": 0.9898380041122437, \"association_id\": 2, \"light\": [-1.390873908996582, -1.7904305458068848, 1.2732375860214233, 1.641309142112732], \"segmentation\": {\"size\": [800, 600], \"counts\": \"oXY19_h0?F6J5M1N3N1O2O0O2N1O101N1N3O1N1O2O0O2O0O2O0O2N1O2N2N2O0O2N2O0O101N101N101N2O1O1N2O1O0O2O001N101N2O0O2N10001N10000O2O0000000O1000000O100O10000O100O1000000O1000O01000000O10000O010O100O10O0100000O10O10O1000O010O01000O010O1000O010000000O10O10O100O11O1N2O001O1N101O0O2gZO`LPe0`3nZOcLPe0g301N10001O001N101O1O001`GlMQJU2_5aN[J_1^5lN^JT1_4mKYFV3V5n0^4QLUFV3\\\\5i0_4RLRFX3]5g0a4QLPF[3^5d0b4RLlE^3`5a0e4QLhEb3a5=g4RLeEd3b5;i47UKIk49RKHn4:PKGo4;nJFR5;mJES5<lJDT5>jJBV5`0gJAY5a0eJ_OZ5d0dJ\\\\O\\\\5e0bJ\\\\O^5e0aJ[O_5f0`JZO`5g0_JYOa5nKeEY4i4Jc5kKfEZ4g4Kc5jKhE[4c4Le5gKkE\\\\4`4Me5fKlE]4_4Mf5cKnE`4Z4Nh5`KPFb4X4Nh5^KSF`0_Ob3d4a0]6kKREc3a4b0]6kKREd3_4b0l5^KQF=Bc3b4b0k5bKlE;Fa3c4a0k5gKgE]4]4Ml5c0UJ]Oj5d0VJ\\\\Oj5d0UJ]Oj5d0VJ\\\\Oj5c0WJ]Oi5ULeEa3a4;i5c0WJ\\\\Oj5e0TJ\\\\Ol5d0TJ[Om5e0RJ[Oo5f0PJZOP6f0oIZOR6g0mIYOS6g0mIXOT6i0jIXOV6h0jIWOW6j0hIVOW6k0iIUOW6l0hISOY6m0gISOY6m0fISO[6n0dIQO]6o0cIoN_6Q1aImNa6T1^IjNd6W1[IhNf6Y1YIgNg6Z1[FgJY1o3]8\\\\1RFRKZ1b3d8^1]EjKh1g2l8b3mF_LS9e3iF[LW9h3eFXL\\\\9i3cFVL^9k3`FVL`9l3^FSLc9n3\\\\FQLe9Q4XFoKi9T4TFkKm9[4lEfKT:`4fE_K[:d4bE\\\\K^:e4aE[K_:f4`EYKa:h4^EXKb:i4]EWKc:j4\\\\EUKe:m4XETKh:n4VEQKk:R5REmJo:U5oDjJR;W5mDiJS;X5lDgJU;Z5jDfJV;Z5jDfJV;[5iDdJX;^5gDaJY;c9000000000O100000000O100O100O10000O100000000O10000000000000000000000O100000001N100O1O101N2O1N2O3M4L4YCRDT<^<N2O1O001O000O2O00001N10000O1O101N10000O10000O1000000O1O010O1O1O1O1bMgBTI[=j6fBVIZ=i6gBVIZ=i6gBVIZ=j6eBWI[=h6fBWI[=h6fBXIZ=h6eBXI\\\\=g6eBXI\\\\=h6cBXI^=h6aBWIa=i6^BWIc=h6^BWIc=i6\\\\BWIe=i6[BVIf=j6YBWIg=i6YBVIh=i6YBWIg=i6YBVIh=i6YBWIg=h6YBXIh=g6YBXIh=g6YBXIh=f6ZBXIh=g6YBXIh=g6YBXIi=f6XBYIi=g6VBYIk=f6VBYIk=g6UBXIl=h6TBXIl=h6UBVIm=h6UBVIl=j6UBTIl=l6UBQIm=o6XBiHl=V7P2N2N1O2N2N1N3N1O1O2N10000O2YIn_O^4R`0^KY@Z4h?cK]@[4c?bKa@]4`?aKb@]4_?bKc@]4^?`Ke@^4]?`Ke@^4^?^Ke@_4b?YKc@c4oa0M5L4L3M2O2M3M4K5K6G9G7K5M3M2N2N3L6J6J8C=D;I4L3N2N3M3L6JViY3\"}}, {\"image_id\": 131, \"category_id\": 1, \"bbox\": [87.0, 60.0, 374.0, 535.0], \"score\": 0.9993371367454529, \"association_id\": 1, \"light\": [-1.703077793121338, -1.704496145248413, 1.6163731813430786, 1.6326816082000732], \"segmentation\": {\"size\": [800, 600], \"counts\": \"U]T2111ih08K2N1O2O0O2O0O10000O2O0O10001N1000000O100000000O10000000000000000O10O1000000O0100000000O0100000O10O10O1UKAUA?d>9k@Go>i0g@WOP?W1j@iNg>i1WAWNb>R2[AoM\\\\>[2cAeMU>e2iA[Mo=n2PBRMj=U3TBlLj=X3TBhLj=[3UBeLi=^3UBcLj=_3UBaLi=c3UB]Lj=e3UB[L_:H[EQ44WL`:KXER46TL`:MVER48RLa:ORET4;mKb:3mDT4`0iKb:6jDT4c0fKc:9eDT4g0cKc:=aDS4j0aKd:`0]DR4n0^Ke:b0ZDR4P1\\\\Ke:d0YDQ4R1[Ke:f0VDQ4T1YKf:g0TDQ4U1YKg:h0QDP4X1XKg:i0oCQ4Y1VKg:l0mCo3\\\\1UKg:n0jCo3^1SKh:P1dCQ4d1oJh:Q1]CW4i1iJi:T1SC[4T2aJi:h7VEXHj:i7TEXHl:h7TEXHl:i7SEWHn:h7REXHn:i7QEWHo:i7PEXHP;i7oDWHQ;j7nDWHR;h7nDXHR;i7lDXHT;h7lDXHT;i7kDWHU;j7jDVHV;k7hDVHX;k7gDUHY;m7eDSH[;n7cDSH];o7aDQH_;P8`DPH`;R8^DnGb;T8\\\\DlGd;V8YDkGf;X8XDhGh;[8UDeGk;_8QDaGo;c8lC]GU<h8fCXGZ<l8bCTG^<o8_CQGa<Q9^CnFb<T9\\\\ClFd<U9[CkFe<W9ZChFf<Z9XCfFh<[9XCdFh<]9XCbFh<`9WC_Fi<b9ZCYFg<i9XCVFh<k9XCTFh<n9VCRFi<P:VCPFj<R:UCmEk<T:VCjEj<W:VChEj<Y:VCfEj<[:UCdEl<]:SCcEm<]:TCbEl<_:SCaEm<`:RC`Eo<`:PC`EP=a:PC^EP=b:PC^EP=c:oB]EQ=Y;00001O0O10001O0000001N10000O2N2O2O10102N2kE`Bd8a=fFWCV9^>M3NM3N2M3M4L5K4K6K7I3M2N2M2O1O1N102N2N101N2O1N2O0O1O0O1O1O1O1O1O1O1O1O001O1O1O1N2N2O1N2N3M2M3N2N2N1O2M2N3N1M3M4L3[Oe0M3M4M2N2N3M2M3M3L4L5K4K5L5K4M3M3N2N2M3N2N2O1N2O1O1N2O1O1O2M2N2N2N2O2N1OSFlGP3S8gLYHW3f7gL^HY3a7fLaHY3_7gLbHX3]7hLeHV3\\\\7jLeHU3[7jLgHU3Y7kLhHT3X7kLjHT3V7lLkHR3V7nLkHQ3U7nLnHP3Q7QMPIn2P7QMRIn2n6RMSIm2m6SMTIk2m6UMSIk2m6UMTIj2l6UMUIk2l6TMUIk2k6UMUIk2k6UMVIi2k6WMUIi2k6VMVIj2j6VMWIi2i6WMWIi2i6WMWIh2j6XMUIi2k6WMUIi2k6WMUIi2k6XMTIh2l6XMSIh2n6XMRIh2o6WMQIi2o6WMQIi2o6WMPIj2P7VMPIi2Q7WMoHi2R7fIcGi0Z1b5S7bIjGi0R1e5T7^IPHk0l0f5V7[ISHm0f0i5W7^HcGXOd0_2b0k5X7XHQHQO9l2<l5Z7SH[IP2[Ol5\\\\7PH]IS2VOn5^7kG_IX2ROm5`7hGaIZ2nNn5b7YGoIi2_Nn5d:QJ[En5h:PJXEP6i:oIWEP6k:oIUEP6m:PJSEn5o:QJQEm5R;RJnDm5T;RJlDm5U;TJjDk5X;TJhDk5Y;UJgDj5[;VJeDh5[;YJfDf5Z;[JeDe5[;[JfDc5Z;_JfD`5Z;aJeD_5Z;bJgD]5Y;dJgD[5X;gJgDX5Y;iJhDV5X;kJgDU5X;lJiDS5W;mJjDQ5W;nJkDQ5V;mJmDP5T;oJnDP5S;nJPEo4Q;PKREn4n:QKUEl4m:RKVEl4j:SK[Eh4f:XK^ETNXNW5[<cLbEnM^NW5Q<kLdEgMeNW5f;SMjE^MiNY5\\\\;[M`G_2_8bMgGW2W8mMmGm1n7ZNTHb1g7dN[HY1l6BVI;f6J[I3i5dKVF^4R4Lg5RLjET4a4He5Q1\\\\JmNf5R1[JlN\\\\6>fI_O]6>fI_O]6?eI]O_6a0dIZO`6c0dIXO`6f0cIUOb6h0aIROe6k0]IROg6SMTDR3X5G^76dHGa75aHHe71_HKj7L[HOm7IYH1Q8CYH4j8bNgGd0We\\\\3\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [77.0, 136.0, 190.0, 96.0], \"score\": 0.9999998807907104, \"association_id\": 1, \"light\": [-1.73689866065979, -2.01503324508667, 1.672275424003601, 1.9128930568695068], \"segmentation\": {\"size\": [428, 535], \"counts\": \"h\\\\P13X=1O2O0O2N1O2O000O101N10000O101O00000O1000001O0000000O10001O00000000001N100000001O0000000RO4\\\\DL`;9_DG`;;`DD_;>`DB`;U1O1M3O100O1O1N2N2O1O1N2N200O10[ORElNn:i10ZOREnNn:R1REnNn:R1REnNm:i1000ZOSEmNl:T1TElNk:U1UEkNi:X1WEgNc:S20100O10_O`EVN`:k1aESN^:Q2`EnM`:_20000O10G`EfM`:Z2aEeM_:\\\\2`EdM`:\\\\2`EdM`:\\\\2`EcMa:]2_EcMa:]2_EcM`:^2`EbM`:^2aEaM_:^2bEbM^:^2aEcM_:]2aEcM_:]2aEcM^:^2bEbM^:^2aEcM_:\\\\2bEdM^:\\\\2bEdM]:]2cEcM]:\\\\2cEeM]:[2cEeM]:[2bEfM^:Y2cEgM]:X2cEiM]:W2bEjM^:U2cEkM]:U2bElM^:S2cEmM]:S2bEnM^:Q2cEnM^:R2bEnM^:Q2cEoM]:Q2bEPN]:Q2cEoM]:P2dEPN\\\\:P2dEPN\\\\:P2dEPN\\\\:P2cEQN]:n1dERN\\\\:n1dERN\\\\:n1dERN\\\\:n1dERN\\\\:m1eESN[:m1eESN[:m1eESN[:m1eESN[:m1eESN[:m1eESN[:m1eESN[:l1gESNY:m1gESNY:m1gESNY:m1gESNY:m1gESNY:m1hERNX:n1hERNX:n1hERNX:m1jERNV:n1jERNW:l1jETNV:l1jETNV:l1kESNU:m1kESNU:m1kESNU:m1kESNV:l1jETNV:l1jETNW:l1iESNW:m1iESNW:n1hERNY:m1gESNY:m1gESNY:n1fERN[:m1eESN]:n1aEQN`:R2\\\\EnMe:R2ZEnMf:S2YEmMh:Z23N2HTEkMn:T2501O2N1O100N2O1O1N2N101N2M3I6O2N101N2N1O101N1OYk_3\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [39.0, 138.0, 115.0, 65.0], \"score\": 0.9999991655349731, \"association_id\": 4, \"light\": [-0.6496228575706482, -3.9315378665924072, 0.591788113117218, 3.7057511806488037], \"segmentation\": {\"size\": [428, 535], \"counts\": \"i_`02Z=0O2O001N10001O0O100000000O1000001O0000000000000000000000000O100000000@KiC5S<0lC0R<4lCLS<7lCHR<<lCDR<`0lC@S<b0mC]OS<d0lC\\\\OT<d0mC[OR<f0nCZOQ<h0oCWOP<j0oCWOP<j0PDVOQ<IPDm0OZO[;MXEi0]OZOZ;OgDJ?l0@[OZ;2jDJNNOU1Y;SOiDLMN0S1Z;TOgDY1Y;=00000IfD[N[;k10KeDYN[;g1eDYN[;g1eDYN[;g1eDYN[;g1eDYN[;l1000KeDYN[;g1eDYN[;g1eDYN[;Y1eDPO0G[;Y1fDTOZ;l0fDoNOG\\\\;g1eDYN[;g1fDXNZ;h1500O1000000O10000O100O1N2_O[DoNO101g;n0a0N2N2N2N2M3N2N2O10000O1M3O100N2O1O10000O10001N2N3M_So4\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [14.0, 134.0, 86.0, 54.0], \"score\": 0.9999793171882629, \"association_id\": 3, \"light\": [-1.5224392414093018, -3.427332639694214, 1.4665888547897339, 3.3514864444732666], \"segmentation\": {\"size\": [428, 535], \"counts\": \"nP63Y=1N1000000O10001O000000001O0000000000000_OJ^C1:5X<1eCO[<1eCOZ<2fCNY<4fCLY<7eCIY<:fCFX<=gCCW<a0hC^OW<d0iC[OW<f0jCXOU<i0kCWOU<j0kCUOc;0hDm0DSOd;1fDn0FoNe;4cDn0HnNe;5_DP1LkNe;7\\\\Do0OjNe;a1[D_Ne;a13O10000000000O100CVDXOj;h0VDWOk;U100VDcNe;]1ZDeNe;l0ZDQO37OLd;k0aDXOJNe;Y1[DgNe;Y1[DgNe;Y1[DfNg;Y1YDgNg;Y1600001O001O1TDbNh;e0[D2d;JcD4\\\\;GlD6U;IlD6T;JmDD\\\\O?g;MoD3Q;LWEMi:2]EId:6]EGe:8X100N2O1O2O000O2O00iee5\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [0.0, 132.0, 57.0, 45.0], \"score\": 0.9951136112213135, \"association_id\": 2, \"light\": [-1.7798089981079102, -2.811070203781128, 1.7044246196746826, 2.751169204711914], \"segmentation\": {\"size\": [428, 535], \"counts\": \"]52Y=2O0O2O00O10000000000000iNMUE3k:MUE3k:MUE3k:MUE3k:MeD7BLi;NbD:CHk;NbD=@En;NaD?@Co;NaD>ADn;NaD>ADn;N_D`0CBn;N_D`0DAm;0RDM9b0HAm;4YD<J@m;5WD=K^On;7UD<M\\\\On;9SD<O[On;:RD;1ZOm;;QD<2YOm;U1SDkNm;T1TDlNl;9QDF1b06_Oh;8UDGM0O=6Ei;8YD2OFi;5ZD5MEj;6XD6NDj;5YD8P<HPD8P<FRD:n;FRD:n;FRD;m;DUDNC8Y<JUDMB86Hl;0^D<DER<3RD8LER<3RD8KFS<2RD9IFU<1oC=KBV<1oC=KBU<1QD>IAV<0QDe0o;ZOSD?HBU<NWD7MHk;0UfW6\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [474.0, 330.0, 203.0, 209.0], \"score\": 1.0, \"association_id\": 4, \"light\": [-1.975501298904419, -2.4638373851776123, 1.9319266080856323, 2.337161064147949], \"segmentation\": {\"size\": [683, 782], \"counts\": \"Q`l9f1bc0c0^O9H5K3M3M3X_ORMn>S3m@oLQ?^3b@fLZ?a3_@aL_?c3\\\\@_Lc?d3Z@^Le?e3X@[Lg?i3U@XLj?k3S@WLl?k3R@ULm?o3o_OSLP`0d401O0O10001O0000001O001OVOR@WLn?h3S@YLk?g3V@YLj?f3V@[Lj?e3V@\\\\Li?c3X@]Lg?d3Y@\\\\Lg?d3Y@]Le?d3[@]Ld?b3]@^Lb?c3^@]Lb?c3^@]La?d3_@]L`?c3`@]L`?c3a@]L^?c3b@^L\\\\?b3e@^L[?b3e@_LZ?a3f@_LZ?a3f@_LZ?a3f@`LY?`3g@`LY?`3g@aLX?`3g@`LY?`3g@aLX?_3h@aLX?_3i@`LV?a3j@`LU?a3j@`LU?a3j@`LU?a3j@_LV?c3h@^LW?d3h@[LX?l40001OnNUAdKk>^4WA_Kh>d4UA\\\\Kk>k4o@UKP?o4l@QKT?Q5k@nJU?S5k@lJU?T5m@kJR?U5o@jJQ?V5PAiJP?W5PAjJo>W5PAiJP?W5PAiJP?W5QAiJn>W5RAiJn>W5RAiJn>W5RAjJm>V5TAiJl>W5TAiJl>W5SAkJl>U5TAkJl>U5TAkJl>T5TAnJk>R5UAnJk>Q5UAPKk>o4VARKi>m4XASKh>l4XAUKh>j4XAXKg>g4TA_Kl>`50nNSAgKl>V4WAjKi>S4YAnKg>P4[AQLd>n3\\\\ASLd>l3ZAWLf>i3WAZLi>e3XA\\\\Lg>d3XA]Lh>b3YA^Lg>a3ZA_Lf>`3ZAaLf>^3[AbLe>^3[AcLd>]3[AdLe>\\\\3[AdLe>\\\\3[AdLe>[3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLe>Z3[AfLe>Z3[AfLe>Z3[AfLe>Z3[AfLe>Z3[AfLe>Z3[AfLe>Z3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLd>[3\\\\AeLd>[3]AdLc>\\\\3]AdLc>\\\\3]AcLe>\\\\3[AdLe>\\\\3\\\\AcLd>]3\\\\AcLd>]3]AbLc>^3]AaLe>^3\\\\AaLd>_3\\\\AaLd>`3[A`Le>`3[A`Le>`3\\\\A_Ld>a3\\\\A_Ld>b3[A]Lg>b3YA^Lg>c3YA\\\\Lg>e3XA[Lh>f3WAZLj>e3VA[Lj>f3VAXLk>i3TAWLl>j3TAULl>k3TAULl>l3SATLn>l3RASLn>n3QAQLP?P4PAoKP?R4o@nKQ?S4o@lKQ?U4n@kKR?U4o@jKQ?W4n@iKR?W4n@hKT?X4k@hKU?X4k@hKU?Y4k@fKU?Z4k@eKV?\\\\4i@dKW?\\\\4i@dKW?\\\\4i@dKW?\\\\4i@dKW?W510001N1UOf@eKZ?Z4g@fKZ?W4h@iKX?U4j@jKX?S4j@mKW?n3m@RLT?j3o@ULS?h3o@XLR?e3PA[LQ?a3SA^Lo>\\\\3UAdLT?AW@o2k0_Me?V2]@jMf?Q2\\\\@oMf?l1^@SNd?h1_@WNd?d1_@\\\\Ne?]1_@bNe?Q1e@nN_??QA_OU?0WA0n>GWA8na000O4L6Ho`U2\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [155.0, 348.0, 143.0, 101.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-0.9243478178977966, -3.9269468784332275, 0.8137478232383728, 3.7648026943206787], \"segmentation\": {\"size\": [683, 782], \"counts\": \"eiW32Td00n\\\\O2ob08i\\\\OIQc0b0i\\\\O^OTc0j0g\\\\OVOXc0U1^\\\\OlN_c0X1_\\\\OiN\\\\c0\\\\1c\\\\OdN\\\\c0^1c\\\\OcN[c0g1N2N3N1O2O0O101N1O2L3O101NBX]O[Nhb0e1X]O\\\\Nfb0e1Z]O[Nfb0e1Z]O\\\\Ndb0e1\\\\]O[Ndb0e1\\\\]O\\\\Nbb0e1^]O[Nab0f1_]OZNab0f1_]O[N_b0f1a]O[N^b0e1b]O[N^b0Y201O0O2O000000^Oe]OUNYb0m1g]ORNYb0`200O10Cm]ObMSb0`2l]O_MSb0d2k]O]MTb0d2l]O[MTb0f2l]OYMTb0h2l]OWMTb0i2m]OVMSb0j2m]OVMSb0j2m]OVMSb0k2l]OUMTb0k2m]OTMSb0l2m]OTMRb0m2n]OSMRb0m2n]OSMRb0m2n]OSMRb0m2n]OSMRb0m2m]OTMSb0l2m]OTMSb0l2m]OTMTb0j2l]OWMTb0i2l]OWMTb0i2k]OXMUb0h2k]OXMTb0h2m]OXMSb0g2m]OZMSb0e2n]O[MRb0d2n]O]MRb0b2o]O^MQb0`2P^OaMPb0]2P^OeMPb0Z2l]OkMTb0U2k]OlMUb0S2l]OmMTb0R2l]OoMTb0d20\\\\Ok]ORNUb0n1k]ORNUb0m1k]OTNUb0l1k]OTNUb0l1k]OSNVb0l1j]OUNVb0k1j]OUNVb0j1k]OVNUb0j1k]OVNUb0j1k]OVNUb0j1k]OVNUb0j1l]OUNTb0j1m]OUNTb0k1l]OUNTb0k1l]OUNTb0k1m]OTNSb0l1m]OTNSb0l1m]OTNSb0l1m]OSNTb0m1m]ORNSb0n1m]ORNSb0n1m]ORNSb0n1m]ORNTb0m1l]OSNTb0m1l]OSNTb0m1l]OSNTb0m1m]ORNSb0n1m]ORNTb0m1l]ORNUb0o1j]OQNVb0o1k]OPNUb0P2k]OPNVb0o1j]OQNVb0P2i]OPNWb0P2j]OoMVb0R2i]OnMWb0R2i]OmMYb0S2g]OlMYb0T2g]OlMYb0U2f]OkMZb0U2g]OjMZb0W2d]OhM^b0Y2`]OgMab0Y2^]OgMcb0Y2]]OfMdb0Y2\\\\]OgMfb0W2Z]OiMhb0V2X]OhMlb0U2T]OkMnb0W22LP]OjMRc0U24N2N2M3N002L4L5]Ob0Ah[OB^d07`0K[^R:\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [2.0, 332.0, 131.0, 142.0], \"score\": 1.0, \"association_id\": 2, \"light\": [-1.6280016899108887, -2.5779125690460205, 1.5333406925201416, 2.4936516284942627], \"segmentation\": {\"size\": [683, 782], \"counts\": \"lh1=nd01N3N1O1N2O0000001N10000N2000000O010jL;f@EY?=f@CY?a0d@_O[?i0V_O_Nh0h0P`0c1g_O^NX`0e1g_OZNY`0g1f_OYNY`0j1e_OVNZ`0Q2a_OnM^`0U2a_OjM^`0X2h_OaMX`0`2h_O_MW`0U3W_OjLi`0Y3T_OgLk`0[3T_OeLl`0\\\\3T_OcLl`0]3T_OcLl`0^3S_ObLm`0^3S_OaLn`0_3R_OaLn`0_3R_OaLm`0_3S_ObLm`0^3S_ObLm`0]3T_OcLl`0]3T_OcLl`0]3T_OcLl`0]3S_OdLm`0\\\\3S_OdLm`0[3S_OfLl`0[3T_OdLm`0[3S_OfLm`0Z3S_OfLm`0Y3S_OhLm`0X3R_OiLn`0V3R_OkLn`0T3R_OmLn`0S3Q_OnLo`0g300000O1000000000001O0000O010000000000O1000000YOn^OUMRa0k2n^OUMRa0k2o^OTMQa0l2o^OTMQa0l2o^OTMQa0l2o^OSMRa0m2n^OSMSa0k2n^OUMRa0k2n^OUMRa0k2n^OUMRa0k2n^OUMRa0j2o^OVMQa0j2P_OTMQa0l2o^OTMQa0k2P_OUMPa0k2P_OUMPa0k2P_OUMPa0k2P_OUMPa0k2Q_OTMPa0k2P_OTMQa0l2o^OTMQa0l2P_OSMPa0m2P_OSMPa0m2P_ORMQa0n2o^ORMQa0o2o^OPMRa0o2n^OQMRa0P3m^OoLTa0Q3l^OoLTa0R3k^OnLVa0Q3k^OmLVa0T3i^OlLWa0U3h^OkLXa0W3f^OhL\\\\a0X3d^OgL\\\\a0[3c^OdL]a0]3b^OcL_a0c3101N2L4O2O2M6J2L3M4B`0H6L6lNT]O\\\\O\\\\c0Cja`=\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [673.0, 373.0, 95.0, 131.0], \"score\": 0.9999997615814209, \"association_id\": 3, \"light\": [-1.847043752670288, -2.0924441814422607, 1.7637934684753418, 1.9100244045257568], \"segmentation\": {\"size\": [683, 782], \"counts\": \"e[Q>2Xe02O0\\\\MNa_O1WN1Vb00a_O2XNNUb04__O0\\\\NLTb07[_O1_NIVb06Y_O4`NFWb06R^OHb0`0SOB\\\\b03f]O;=8@ZO_b0o0k]OIFYOja01_^OW2HhMga04a^OS2GjMga08Z^OR2OfMea0P3[^OPM`a0U3_^OlLaa0T3_^OlLaa0U3^^OkLba0U3]^OmLba0S3^^OmLaa0S3`^OmL`a0S3_^OnL`a0S3`^OmL`a0S3`^OnL_a0_30001O00001O000000Ac^OPM]a0P3c^OQM\\\\a0o2d^OQM\\\\a0o2d^OQM\\\\a0o2d^OQM\\\\a0o2d^OQM\\\\a0o2d^OQM\\\\a0o2d^OQM\\\\a0o2d^OQM\\\\a0o2d^OQM\\\\a0o2d^ORM[a0o2e^OPM[a0P3e^OPM[a0P3f^OPMYa0Q3g^OnLYa0S3g^OmLXa0U3g^OjLYa0Y3g^OeLXa0h3O00000001O1O1O001O001O2N1O1O001O1C^^OnLda0o2\\\\^OSMda0Z31O1[O]^O`Mia0W2Z^OiMha0S2Z^OmMga0V1^^OcN16ha0o0P_OQOQa0Gi]Oo0W1ZOQa0Ej]Oo0W1[Oo`0El]Oo0U1\\\\Oo`0El]Oo0V1[On`0Gj]Oo0X1YOo`0m0R_OSOo`0l0R_OSOSa08c]OI\\\\1NTa0Hd]OO39V10ja0KY^O4`c0O001N1000001O1M[k8\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [309.0, 350.0, 52.0, 94.0], \"score\": 0.9999170899391174, \"association_id\": 5, \"light\": [-1.4819872379302979, -2.1245267391204834, 1.4484663009643555, 2.000270128250122], \"segmentation\": {\"size\": [683, 782], \"counts\": \"o^^64Re0:J2N100j\\\\O@`a0b0^^O_Oaa0d0]^O]Oaa0e0^^O[Oaa0i0\\\\^OXOba0l0[^OTOea0R1T^OPOja0R1U^OnNka0T1S^OlNla0c1f]O]NZb0P2Z]OPNdb0S2Z]OmMeb0T2[]OlMeb0l0\\\\]O3NQOeb0l0b]OOHVOeb0k0d]OOGWOcb0i0`]O8MoNbb0g0c]O;LmN`b0i0b]O<OjN[b0^2e]ObM[b0^2f]OaMYb0`2f]OaMZb0_2f]OaMZb0^2e]OdM[b0\\\\2e]OdM[b0\\\\2e]OdM[b0\\\\2f]OcMZb0^2g]O`MYb0a2h]O]MXb0d27001O1O1000O101N2N3mM\\\\]OY1fb0dN[]O[1gb0cN\\\\]O[1\\\\c0M3M3E;K5TOg[O?ld0BR]h8\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [362.0, 330.0, 114.0, 81.0], \"score\": 0.987920880317688, \"association_id\": 6, \"light\": [-1.749496340751648, -2.3734490871429443, 1.6950902938842773, 2.195841073989868], \"segmentation\": {\"size\": [683, 782], \"counts\": \"ija72Xe02N101O000000001O00000SO0_\\\\O0kb0Ka\\\\O50L1==Gjb0Kj\\\\O`0M0>Eib0Nj\\\\OS1;POib0Y1V]OgNjb0:m\\\\O:8]Ojb04[]O8KDjb03_]O6GGib00n\\\\OIc0`0EGjb00e]O:@Fkb0Of]O<^OFlb0Kh]O?\\\\OFlb0Ji]Oa0ZOEob0Gh]Oe0XODoc0=P\\\\OCPd0?n[OAgc0H^\\\\Oh0K@gc0I]\\\\Oh0K_Ohc0NX\\\\Oc00@fc0OY\\\\Oa01Aac0E`\\\\OU1OVO`c0P1_\\\\OQO`c0o0`\\\\OQO_c0_1O1O010O000000VNd\\\\Of1\\\\c0XNe\\\\OY1OSO`c0n0_\\\\ORObc0n0_\\\\OPOac0Q1_\\\\OnNac0S1_\\\\OlNbc0S1^\\\\OmNbc0U1^\\\\OiNcc0Y1\\\\\\\\OeNdc0]1]\\\\O_Ndc0c121N10O10O01O001O1O1O1O1O1O001O00000000000000O100O010O10000N2N1010O01N2O1O010O10000O2L3O1O1L4O100000^Ok\\\\OnNUc0P1P]OmNQc0R1Q]OlNob0S1U]OiNmb0e0i\\\\O]OQd0a0>K5M3L3N3M6JXh[6\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [121.0, 326.0, 57.0, 83.0], \"score\": 0.9475347995758057, \"association_id\": 7, \"light\": [-2.0679564476013184, -2.3939075469970703, 1.935437798500061, 2.1635429859161377], \"segmentation\": {\"size\": [683, 782], \"counts\": \"QQa2b0hd04L2N3M2O2M3N2h[OnNSd0V1N2O4i\\\\OiNRb0X1l]OjNSb0Z1i]OfNWb0Z1i]OfNVb0\\\\1i]OdNWb0]1h]OdNWb0]1h]OcNXb0]1h]OcNXb0]1h]OcNXb0]1h]ObNYb0_1f]OaNZb0_1f]OaNZb0_1f]OaNZb0_1f]OaNZb0_1f]OaNZb0_1f]OaNZb0^1g]ObNYb0^1g]ObNXb0_1h]OaNXb0^1i]ObNWb0^1i]ObNWb0]1j]OcNVb0]1j]OcNVb0\\\\1k]OdNVb0[1j]OeNVb0[1j]OeNVb0Z1k]OfNUb0Z1k]OfNVb0Z1j]OdNWb0]1j]O`NWb0_1k0N20010M2M4O1N10002N1N2O1L5L3K9@Z[OJiUc<\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [445.0, 626.0, 266.0, 269.0], \"score\": 0.9943937063217163, \"association_id\": 2, \"light\": [-1.7264671325683594, -2.5995209217071533, 1.4819421768188477, 2.4385006427764893], \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"_gQ>1TP1?SPONin06UQO3cn0M\\\\QO8`n0FbQO=Zn0DfQO?Rn0GmQO;nm0HQRO;gm0LVRO7im0V1M2O2N1O2N1N2NQNeROT1Zm0n000O1`NiMlTOZ2oj0PNcTOX2[k0\\\\1N200O1O1N2N2N2N2N2O1O1O100N2M3L4M3N2O1O1O1O1O0O2M3K5M3O1O1O1O1O1O001L3K6N1O10001O001O1M3H8L3O2M3O1O1N2N1N3M3N2N2O1N2O1O100O1O100O01000O1000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000O10000000000O1000000O10000O10000001O0000000000001O00001O0000001O00001O1O001O1O2N001O1O0001O01O00000000000001O000O2O00001N10000O2O000O100O10000O1000O1000O1000O11O001N2O1O001O1O001O001O2N1O8Hd0UIjWO[4[j0G4L3M3M2M5L7IR1mNb0_O4K2O1N2N2N3L3M:ET_Z?\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [1.0, 357.0, 273.0, 373.0], \"score\": 0.9399762153625488, \"association_id\": 4, \"light\": [-2.927389621734619, -1.8122938871383667, 2.83038067817688, 1.6247167587280273], \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"Sc1[1gn0h0\\\\O7J5K4L2N3L3L4L4N3M2aSOjLC10Nik0X3[TOQNJgNUk0Y3mTOYNQk0h1nTOYNQk0h1nTOYNQk0f1QUOZNnj0f1RUO[Nmj0e1TUO[Nkj0e1TUO]Njj0d1VUO]Nij0c1WUO^Nij0a1WUO`Nhj0`1YUO`Nfj0`1[UO`Nej0_1[UOcNdj0\\\\1]UOdNbj0\\\\1_UOdNaj0[1_UOfNaj0Y1`UOgN`j0W1bUOiN^j0V1cUOjN]j0U1cUOlN]j0S1dUOmN[j0S1fUOmNZj0S1eUOmN\\\\j0R1eUOnN[j0R1eUOnN[j0Q1eUOPO[j0P1eUOPO[j0o0fUOQOZj0n0fUOSOZj0m0fUOSOZj0l0gUOSOZj0l0fUOUOZj0k0fUOUOZj0j0fUOVO[j0j0eUOVOZj0j0fUOWOZj0h0gUOXOYj0h0fUOXO[j0g0fUOYOZj0g0fUOYOYj0h0gUOWOZj0h0fUOYOZj0g0fUOYOZj0f0gUOYOYj0h0fUOYOYj0h0gUOXOXj0i0hUOWOWj0j0iUOUOWj0l0hUOUOVj0m0jUOSOUj0n0jUOROWj0n0iUOROVj0o0iUOQOWj0P1iUOoNXj0Q1hUOnNXj0S1gUOlNZj0U1fUOkNYj0V1gUOjNXj0V1iUOjNVj0W1jUOhNVj0Y1jUOgNTj0[1lUOeNSj0[1nUOeNQj0\\\\1oUOcNQj0^1oUOaNRj0_1nUO`NRj0`1oUO_NQj0b1nUO\\\\NTj0e1lUOYNTj0i1lUOVNTj0k1kUOVNSj0k1nUOTNQj0n1oUORNPj0o1oUOQNQj0P2oUOPNPj0P2QVOPNni0Q2RVOnMni0R2SVOnMli0R2UVOnMii0S2XVOlMhi0T2XVOmMfi0U2ZVOjMfi0V2[VOjMci0X2\\\\VOhMei0W2\\\\VOhMdi0Y2[VOhMdi0Y2\\\\VOfMei0Z2ZVOgMei0Z2[VOfMdi0[2\\\\VOdMei0\\\\2[VOdMdi0]2\\\\VOcMci0^2]VOaMci0`2]VO`Mbi0a2^VO^Mbi0c2^VO]Mai0c2_VO]Mai0d2_VO\\\\Mai0c2`VO\\\\M`i0d2`VO]M`i0b2aVO]M_i0d2`VO]M`i0b2`VO^M`i0b2`VO_M`i0`2`VOaM`i0^2aVObM^i0^2cVOaM^i0^2bVOcM^i0\\\\2cVOdM\\\\i0]2cVOdM]i0[2dVOdM\\\\i0]2cVOdM]i0[2cVOeM]i0\\\\2bVOeM^i0Z2aVOgM_i0Z2`VOgM_i0Y2bVOgM]i0Z2bVOfM^i0Z2cVOfM\\\\i0[2cVOeM]i0[2dVOeM[i0[2eVOeM\\\\i0[2dVOdM\\\\i0\\\\2eVOdMZi0\\\\2gVOcMYi0]2hVOcMXi0\\\\2hVOdMXi0\\\\2iVOdMVi0\\\\2kVOdMTi0]2lVObMTi0_2lVOaMSi0_2nVOaMQi0`2oVO`Moh0b2QWO]Mnh0d2SWO\\\\Mkh0f2UWOZMih0h2WWOXMhh0h2YWOWMfh0k2ZWOUMdh0m2\\\\WORMch0P3]WOPM`h0S3`WOmL\\\\h0V3eWOiLXh0[3hWOeLSh0`3mWO`Lng0e3RXO[Leg0n3[XORL_g0T4aXOlKce0eNT[Od5YOgKbe0jNmZOc5AbKce0oNfYOa6g0`Jbe0S6^ZOmIbe0S6^ZOmIae0U6^ZOjIce0V6^ZOiIbe0W6^ZOhIbe0Y6^ZOgIbe0Y6^ZOgIbe0Y6^ZOfIce0[6\\\\ZOeIde0[6\\\\ZOeIde0[6\\\\ZOdIde0]6]ZObIce0^6]ZObIce0^6]ZObIce0^6]ZOaIde0_6]ZO`Ice0`6^ZO_Ibe0a6^ZO^Ice0b6^ZO]Ibe0c6^ZO]Ibe0c6_ZO\\\\Iae0d6_ZO\\\\Iae0d6`ZO[I`e0e6`ZOZIae0g6_ZOXIae0h6_ZOXIae0h6`ZOWI`e0i6aZOUI`e0k6`ZOUI`e0k6aZOTI_e0m6`ZORIae0n6`ZOQIae0o6^ZOQIbe0Q7]ZOnHce0T7\\\\ZOkHde0Y7YZOeHhe0_7gYOoGMb0\\\\f0S8aYOlG_f0W8_YOhGaf0\\\\8\\\\YOcGef0]8[YOaGgf0_8YYO`Ghf0_8ZYO_Ggf0a8ZYO]Gif0a8WYO]GQg0]8PYO`GUg0\\\\8lXOcGWg0[8jXOaGZg0^8;ZMZXOdLgg0X3_XOdLdg0V3aXOiL`g0R3eXOmL]g0n2fXOSM\\\\g0g2gXOYM\\\\g0a2fXO`M]g0[2eXOeM]g0V2fXOjM\\\\g0XOPXOi0e00^g0PORXOm0b02_g0lNRXOP1`05ag0eNSXOR1?8bg0_NTXOV1;;eg0VNXXO[15?gg0hM^XOg1La0[i0^OfVOa0[i0]OfVOc0[i0[OgVOd0Yi0\\\\OgVOd0Zi0ZOhVOe0Yi0ZOgVOf0Yi0ZOgVOg0Xi0XOjVOf0Wi0ZOjVOe0Wi0YOkVOf0Ui0YOmVOf0Si0YOoVOf0Ri0YOoVOf0Qi0YOQWOf0oh0XOSWOh0nh0VOTWOi0lh0WOTWOi0mh0UOUWOj0kh0UOWWOi0jh0WOXWOg0ih0XOXWOf0ih0YOZWOe0gh0ZO\\\\WOb0eh0]O^WO`0ch0@_WO>bh0A`WO<ah0CaWO;`h0EbWO8`h0FcWO6_h0JeWOO^h00gWOI\\\\h06l3O1N3M2O1O1O1O2O1N1O3M2NRdSm0\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [837.0, 589.0, 327.0, 271.0], \"score\": 0.9999818205833435, \"association_id\": 1, \"light\": [-1.382697582244873, -2.814553737640381, 1.2644426822662354, 2.683903932571411], \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"]n\\\\j09oo02N3M2O0O2O1O1N3N1N2oRO\\\\O[j0f0XUOIdj08ZUOKcj07ZUONcj03\\\\UONbj04\\\\UOObj02\\\\UO0cj00\\\\UO3bj0N[UO5cj0M[UO5dj0LZUO7dj0MWUO5hj0a0aTOA^k0b0_TO_O`k0d0\\\\TO^Ock0d0[TO\\\\Oek0e0ZTO\\\\Odk0f0[TO[Odk0e0\\\\TO\\\\Ock0d0\\\\TO^Ock0b0]TO_Obk0b0]TOA`k0?`TOE\\\\k0;dTOHYk08gTOIXk07hTOJWk06jTOJUk06kTOLSk05mTOLQk04oTO1lj0OUUO:aj0F_UO>]j0BdUO?Zj0AfUOa0Xj0_OiUOa0Vj0_OjUOb0Uj0^OlUOb0Sj0^OmUOb0Sj0^OmUOc0Rj0]OnUOe0Pj0[OPVOf0oi0ZOQVOi0li0XOSVOj0ki0VOUVOl0ii0UOVVOl0ii0UOVVOl0ii0UOVVOl0ii0UOWVOj0hi0YOVVOh0ii0YOWVOf0ii0[OWVOe0hi0\\\\OXVOd0gi0\\\\OYVOd0gi0]OYVOc0fi0^OYVOc0fi0^OZVOb0ei0@YVOa0fi0@ZVO?ei0EYVO;fi0V3N1O3M4SWO\\\\Img0b7M2O2N1O1O1N2O1O1N2O1O1O0O2O0000000O10000000001O00O10O1000O100O10000O10000[MRXOnLog0S3RXOkLng0U3TXOUKOf0mg0W4UXOkJ<e0_g0b4[XOXJd0P1Qg0k4[YOTKef0o4YYOPKgf0R5WYOnJif0T5UYOlJkf0V5SYOjJmf0X5PYOhJQg0Y5mXOhJSg0X5lXOiJTg0V5kXOlJUg0S5iXOPKWg0P5gXORKYg0m4WXOlI6X1cg0k4WXOoI4W1eg0j4VXOQJ2W1hg0g4UXOVJ0T1kg0X5PXOkJPh0g60O101O000O101O00001O001O0bMjWOiLWh0d50aMiWOlLXh0b50`MhWOoLXh0a51O000_MgWOQMZh0o2fWOQM[h0^50O100`MdWOSM]h0l2cWOTM]h0l2cWOSM_h0l2aWOTM_h0l2aWOTM`h0Z51O1O001N2O00001O001O1O1O1O1O1N101O00001O001O1O1O1O1N2O001O00001O001N101O1O1]McVOXN^i0g1bVOXN_i0h1aVOXN`i0g1`VOYN`i0g1`VOXNbi0g1^VOYNbi0g1^VOYNbi0g1^VOXNdi0g1\\\\VOYNei0f1[VOZNei0f1[VOYNgi0f1YVOZNhi0e1XVO[Nii0R41O001N1dMTVO[Nli0e1TVO[Nmi0d1SVO\\\\Nmi0d1SVO\\\\Nni0c1RVO]Nni0c1SVO\\\\Nmi0d1SVO[Noi0d1RVO[Nni0e1RVO[Noi0d1RVO[Noi0d1QVO\\\\Noi0d1QVO\\\\NPj0c1QVO\\\\NPj0b1QVO^Noi0b1QVO^NPj0a1PVO_NQj0`1oUO`NQj0`1PVO_NQj0`1oUO`NRj0_1nUOaNRj0_1nUOaNSj0^1nUOaNRj0^1oUObNRj0]1nUOdNRj0[1nUOeNRj0[1oUOdNRj0Z1oUOfNQj0Z1PVOeNQj0Z1PVOeNPj0Z1RVOeNni0[1SVOeNmi0Z1UVOdNki0[1WVOdNii0\\\\1XVOcNii0[1ZVOdNei0\\\\1\\\\VOcNei0[1\\\\VOeNdi0[1]VOeNci0Y1`VOeNai0Z1`VOfNai0X1aVOfN`i0Y1cVO\\\\MoNi0_j0i1gVOUMVOl0Rj0o1^WOmMch0R2`WOkM`h0U2aWOjM`h0U2bWOiM_h0V2aWOjM_h0V2bWOiM`h0U2aWOjM`h0U2aWOiMah0U2aWOjMah0T2`WOjMbh0U2_WOiMch0V2^WOhMdh0W2\\\\WOhMfh0W2[WOgMfh0Y2[WOeMgh0Z2ZWOdMgh0]2c2O1O100O2N10YROPNSm0Q2hROUNWm0_2O100USOSMXl0e3KO010O100O1O1O100O001O001\\\\N[SOBfl0:^SOEcl07aSOH`l06bSOI_l05dSOI]l05eSOK[l04fSOK[l03hSOKXl05hSOKYl03iSOLXl03iSOLWl03jSOMWl02jSOMWl01kSONVl00lSOOUl0OnSOORl00PTOOQl0OQTO0Pl0OQTO0Pl0NRTO1ok0MRTO3nk0MSTO1ok0NQTO2Pl0LRTO3ok0KTTO2nk0LWTO0kk0K]TOOj^R1\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [1024.0, 285.0, 166.0, 228.0], \"score\": 0.999854326248169, \"association_id\": 3, \"light\": [-1.2181744575500488, -2.228848457336426, 1.107675552368164, 2.0616276264190674], \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"`_YP14TP1101N101O001N2O00000000001O0000000000000000000000000000001O001N100001O01cN1^ROO`m03`ROM_m04aROL^m05bROK[m09dROGZm0;fROEYm0=fROCYm0>gROBWm0`0iRO@Um0c0jRO^OSm0f0kROZORm0j0mROVOol0o0QSOPOil0Y1TSOgNgl0a1WSO^Ncl0i1\\\\SOWNZl0T2eSOlMPl0`2oSO`Mhk0h2YTOXM]k0Q3dTOoLWk0U3jTOjLQk0[3PUOeLfj0c3\\\\UO]L_j0f3cUOZLXj0j3iUOVLPj0P4PVOQLli0R4UVOnKii0S4XVOmKfi0U4ZVOjKei0W4\\\\VOiK_i0[4bVOeKYi0`4gVO`KVi0c4jVO]KRi0f4oVOZKlh0k4TWOUKih0n4WWORKgh0Q5XWOoJfh0S5ZWOmJeh0U5ZWOkJeh0W5ZWOhJfh0Y5ZWOgJeh0Z5[WOfJdh0[5\\\\WOfJch0[5\\\\WOeJdh0\\\\5[WOdJeh0\\\\5[WOdJdh0]5\\\\WOcJdh0^5[WObJeh0^5[WObJfh0^5ZWOaJfh0`5YWO`Jgh0e5UWOZJlh0a61O2M3N1O1N3N2oNYVOYKNYOji0Y5cVOZK^i0a4iVO]KVi0a4RWOYKPi0d4VWOWKlh0f4SWO]Koh0^4SWObKQi0MmUOf1k0_NG3>LYj00]WO1XN0am0O^RO1cm0M]RO4em0J[RO6gm0HYRO8ln0000000000000000000O10000000001O00001O000000001O0000000000000000000000O1000001O00000XPOEbo0;]POFco0:]POFco0:\\\\POGdo09\\\\POGeo08ZPOIfo0=00001O0IYPOLho03ZPOKgo03ZPOMgo01ZPOOgo0OZPO1PP1OYb7\"}}, {\"image_id\": 135, \"category_id\": 1, \"bbox\": [42.0, 209.0, 671.0, 296.0], \"score\": 0.9999983906745911, \"association_id\": 1, \"light\": [-2.232517957687378, -1.7441082000732422, 2.1041007041931152, 1.6961593627929688], \"segmentation\": {\"size\": [544, 800], \"counts\": \"\\\\gf095H?9n>2_@H`0:o>4k@2S?0d@^O3c0X?6e@LZ?n0N100O101O0O1000000O100000001O0O1000000000000O2O000000000000POPA0P?0RANn>2SAMm>2TANm>1TANl>2UAMk>3VAMi>3WAMi>2XANh>2YAMg>3YAMg>3YAMg>3YAMg>3YAMg>3YAMg>3ZALf>3[AMe>3[AMe>3[AMf>2ZANf>2ZANf>2ZANg>0ZA1e>O[A1f>NZA2f>M[A3f>L[A3f>K[A5f>JZA7f>G[A9g>DZA<g>BZA>h>[O]Af0^?00001O01O000001O0O1000001O0000001N10001O00001O001O001N1010O01O001O0010O01O011O000O010O00001O00001O01O000000001O0000001O001O1O1O1O0000001O0000000000001N100000000O10001N100O1O2N1O101O0O100O1000000O010O1O100O10OO2N200O1000O100000O010000O101N10000O10000O101N100M3O10001N1000000O1000000O2O000O100O101N100O10001O0O100000001N10000O2O0O100O2O0O2N100O1O2O0O10000O101O0O1000000O100O2O000O100O101N100O10000O100O2O00000O0100000O10000000000O10000000000000O010000000000000O10O10000000O100000000000000000000O10O1000000000000000O1000000000000O1000O10000000000O1000000O10000O1000000O100000001N0100000000000000000000000O10000000000000O10000000000000O10000000O10000000000O10000000000000000O10000000000000000000000O2O0000000000000000000O1000000000000O1TOSAEm>9WAEj>9WAGi>9WAGi>8XAHh>8XAHh>8YAGg>8ZAGg>9YAGg>9YAGg>9YAGg>8ZAHf>8[AGe>9[AGf>8ZAHf>7[AIe>7\\\\AHd>8\\\\AHm8KmK=VKHb8>PLJ_KG_8d0oKEbKF^8i0oK_OdKH\\\\8l0nK\\\\OfKHZ8o0THaNk2h0hLGX8R1RHbNm2e0iLGX8T1mGdNQ3a0jLGW8W1hGfNV3<kLGV8\\\\1`GhN]35mLGV8Y2lJPNoLFT8\\\\2mJmMoLGT8^2kJkMQMFT8a2jJhMSMGS8b2iJgMTMGR8d2iJeMUMGR8e2hJdMVMHQ8e2hJcMUMKQ8d2jJ_MTMOR8c2iJ]MTM2R8c2iJ[MSM5S8a2jJXMRM9S8b2jJSMQM>T8h2dJhLUMd0V8Q3ZJYL^Mj0V8P3\\\\JTL[Mo0X8o2^J\\\\Mb5e2[J]Md5e2ZJ\\\\Mf5e2XJ\\\\Mh5e2VJ\\\\Mj5d2UJ]Mk5d2RJ^Mn5d2nI]MS6h2cI]M]6[3gGhMZ8_50O100O1O1O1O1O1N2N2N2YOg0K5H7L5M3N3M2L5E;L4I7E;N2M3N101N1000001O01O0001O01O01O0010O1001OO5LU1_EkIl7b6\\\\GiIc8[700O010O000001O00001O00001O001O000O100O1O1TLZJYLj5`3o3O1O1O1O1O2O0O2N1O1N3N1N2O2O0O1O10001O0000000000000O1001O010O1O2N1O3M3M2N2N2N1O1O1O2N2N5K:F=ZDgKk9b4jEbKT:f4bE]K]:g4]E\\\\Kb:b5O1N2O1O001N_NTFQLk93RFE6R12UOe92[FVOd0U1@D`9NbH_O]Nc0P9M]J4a5M_J3a5L`J4_5LcJ3]5MdJ2[5OgJNZ51hJNX52jJKV56mJFT5:nJDR5;PKBQ5?QK^OP5b0QK[OQ5e0PKYOQ5g0oJXOR5h0oJUOS5k0a50000000000O100000000000000000000000O100000O100000000000000000001N2N1O2M4Jko]1\"}}, {\"image_id\": 136, \"category_id\": 1, \"bbox\": [615.0, 558.0, 255.0, 222.0], \"score\": 0.9984943866729736, \"association_id\": 3, \"light\": [-1.9655581712722778, -2.3183467388153076, 1.8028098344802856, 2.1377885341644287], \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"[[af0i0]T1?I4L3L4I8G9G8I7L3N3M3N1O1O1O1O1N2O1O2N1O100O1O1O2YoNlLon0U3oPORMkn0o2RQOWMjn0j2UQOZMgn0h2WQOZMgn0g2XQO[Mgn0f2XQOZMgn0g2XQOZMgn0h2WQOYMgn0j2WQOXMgn0i2WQOYMhn0i2UQOYMjn0h2UQOYMjn0i2SQOYMmn0g2RQO[Mln0f2SQO[Mmn0f2PQO\\\\Mon0f2oPOZMRo0g2jPO\\\\MUo0`4O1O100O100O100000000O10000000O1000000O010O1O1O100IcPOZJ^o0_5iPO`JWo0_5kPOaJUo0]5nPOaJSo0\\\\5b0O0O2O1O0OnoNlJlo0_5L_OWPO_Kgo0Y4^POkK`o0Q4cPORL\\\\o0k3`PO\\\\L`o0b3_POaL`o0_3`PObL`o0]3`POdL`o0Z3aPOgL_o0X3bPOgL_o0W3bPOjL^o0Q3gPOoLYo0l2kPOUMUo0j2lPOUMUo0j2kPOWMUo0h2kPOXMVo0g2kPOYMTo0g2mPOXMTo0g2lPOZMTo0d2nPO[MSo0c2nPO^MQo0b2PQO]MQo0b2oPO^MRo0a2oPO^MRo0a2nPO_MSo0`2nPO`MQo0`2oPO`MRo0_2oPOaMQo0^2oPObMRo0]2oPOcMQo0[2QQOdMon0\\\\2RQOdMnn0[2SQOeMmn0Z2SQOfMnn0Z2RQOfMmn0Z2TQOfMln0Y2UQOgMjn0Z2VQOeMkn0Z2UQOgMkn0Y2UQOgMjn0Z2VQOeMkn0Z2UQOgMkn0Y2UQOfMkn0Z2VQOfMjn0Z2VQOfMjn0Y2WQOgMin0X2WQOiMin0V2XQOiMin0V2XQOjMhn0U2YQOkMgn0U2YQOkMgn0U2YQOkMgn0T2ZQOlMfn0T2ZQOlMfn0T2ZQOlMfn0T2ZQOkMgn0U2XQOlMhn0T2XQOlMhn0T2XQOlMhn0T2XQOlMhn0T2XQOlMin0S2WQOmMin0S2WQOmMin0S2WQOmMin0S2WQOmMjn0R2VQOnMjn0R2UQOoMkn0Q2UQOoMln0P2TQOPNln0P2TQOPNmn0o1RQORNnn0o1QQOQNnn0P2RQOPNnn0P2RQOQNmn0P2RQOPNnn0Q2QQOoMnn0S2QQOmMon0T2PQOlMPo0U2oPOjMRo0V2mPOkMSo0V2kPOkMTo0V2lPOjMTo0W2jPOjMVo0W2iPOiMVo0Z2iPOeMWo0^2fPObMZo0n2WPOPMjo0T3SPOXL1Nlo0m3QPOSL5Oio0P4QPOQL6Oio0P4RPOPL50io0Q4QPOPL4Olo0Q4QPOaLoo0_3SPO_Llo0b3TPO^Llo0b3UPO\\\\Llo0d3TPO\\\\Llo0d3TPOZLno0f3SPOXLmo0h3TPOVLmo0i3UPOVLlo0i3VPOVLio0j3XPOULho0k3YPOTLgo0l3ZPOTLfo0k3\\\\POSLdo0n3\\\\PORLdo0m3]POSLco0m3]POSLco0m3]PORLdo0n3]POQLco0n3^PORLbo0n3^PORLbo0n3^POQLco0n3_POQL`o0o3aPOPL`o0P4aPOoK_o0T4_POkKao0Y4[PObKjo0c4?5Kd0\\\\O7I3M2N3M000000000000000000O1O1O1G9L4M3N2O1O2N1O1N2N2M4L3N2O1O2N1O2N2L4K5I7K5M3N2N2N2M3L4J7F9L4M3N2N3M2N3K8Hflfj0\"}}, {\"image_id\": 136, \"category_id\": 1, \"bbox\": [974.0, 464.0, 280.0, 414.0], \"score\": 0.9999993443489075, \"association_id\": 1, \"light\": [-1.8049678802490234, -2.3056108951568604, 1.6979045867919922, 2.17033314704895], \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"UTfS14ZU15H7L5L2N2N101N100O2N101O000O2O0000001O00000000001O0000001O001O1O001O001O001O1O1O001O001O001O010O1O010O01O010O0010O00100O01O2O0O1O2OO010O100O5K100O100O101N2O0O1O100O1O1O1O2N2OO010O1000O2O0O1O100O1O100O010O10O0100O1O00100O100O100O010O6K2M2VmN^MbR1i2gIhMYYOW2ff0kMYYOV2ef0lMZYOU2df0mM\\\\YOR2cf0PN\\\\YOQ2bf0QN^YOo1`f0RN`YOn1`f0RNaYOm1_f0TN`YOm1^f0TNcYOk1]f0UNcYOk1]f0UNcYOk1\\\\f0VNdYOj1[f0WNfYOh1Yf0YNgYOb0]J^Ojk02iYO?bJ[Ock08lYO:dJ]O]k0<oYO6gJ\\\\OYk0?QZO3iJ\\\\OUk0b0SZONlJAnj0b0VZOJPKEgj0b0[ZOEfJ4mj08^ZOYOeJj0jj0OfZOU1Xe0lNeZOY1Xe0hNfZOZ1We0iNeZO[1Ye0gNbZO_1[e0cNYZOi1ee0YNUZOn1he0TNUZOo1je0RNSZOR2ke0oMQZOV2me0jMkYO`2Sf0aMSYOZ3kf0gLQYO^3lf0dLQYO`3mf0aLQYOb3lf0aLPYOb3of0_LlXOg3Rg0ZLjXOj3Ug0WLiXOk3Wg0ULfXOo3Xg0SLeXOo3[g0RLaXOQ4_g0PL[XOU4eg0lKVXOX4ig0jK^WOkMiN_6ii0hKYWOo4gh0TKSWOP5lh0UKkVOo4Ti0YKUVOW5ki0W3O2O0O1N2O0O2N2N2O100O010O100O00nMaVOTH^i0g7kVOVHUi0]7\\\\WO_Hch0W7kWOfHTh0Y7QXOcHog0a7oWO_Hog0j7hWOXHWh0Y8VWOjGih0a8iVOcGUi0]:O2N1O1N2N2O1N2O101O1N101O001O001O010O01O1O3M2O0O2N1O2N2N3M4L3M3L5K5K4L3M2M3M3M3L4cMjUO[IZj0V6dVOWIai0c6hVOTI^i0f6kVOnH]i0k6h2E;WOi0A?J5K6G8G9M4M2O2N101O1O1N3N2N2N2N2N1O1[MknNm0VQ1oNWoNg0jP1UO_oNe0cP1VOdoNe0`P1UOeoNi0eP1gNdoNV1]R1N2M4L4L3L5K3M5J;Cojd<\"}}, {\"image_id\": 136, \"category_id\": 1, \"bbox\": [199.0, 586.0, 379.0, 214.0], \"score\": 0.9999986290931702, \"association_id\": 2, \"light\": [-1.8930073976516724, -2.4730730056762695, 1.8287508487701416, 2.309547185897827], \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"dlY7>iT1a0E7J3N2N2O0O100O1O1O1N2N2M2O2O1O1O1O10O01O01O0000001O00001O00100O01000O010O01O010O0TnN_Nko0`1RPOiNho0X1WPOlNfo0S1ZPOQObo0o0^PORObo0m0^POUOao0k0^POVOao0k0_POUO`o0k0`POVO`o0i0_POZO_o0f0`PO\\\\O^o0e0`PO^O\\\\o0d0UPOMfo04WPO3eo0NYPO7bo0K^PO8[o0MePO5Wo0MiPO6To0KmPO5Ro0KnPO6Qo0KnPO5Ro0KoPO5on0MPQO4on0MPQO3on0NQQO3ln00SQO0ln02SQOOkn02TQONmn03QQONon02PQONRo02mPONSo02nPOMSo03lPOD^o0<aPOBao0`0^PO\\\\Ofo0e0XPOZOjo0h0SPOWOno0k0PPOSOSP1T1doNiN`P1P34O03N0O100O100O2O00000O1000000O1000O100000OL5O11O2M2TPOQKUo0V5cPOmJ[o0[5]POfJao0_5\\\\PO]Jgo0e503M2O1O1O0O2O001O001O0O10QNPQO_Mon0U2^QOjMbn0T2`QOlM`n0S2bQOmM]n0Q2eQOoM[n0P2gQOoMYn0P2iQOnMXn0P2kQOoMUn0n1nQOQNSn0k1lQOnLhNT1]o0j1nQOVMgNk0[o0k1QRO]MgNb0Yo0n1SROaMaNb0]o0k1SROeNom0Y1SROfNnm0Z1RROfNnm0Y1SROgNmm0X1SROhNmm0Y1SROgNmm0Y1SROgNmm0Z1RROfNnm0Z1RROeNom0[1QROeNom0[1QROeNom0\\\\1PROcNQn0]1oQObNRn0^1nQOaNSn0_1mQO`NTn0a1kQO^NVn0b1iQO_NWn0a1iQO_NWn0b1hQO]NYn0c1gQO]NYn0c1gQO\\\\NZn0e1eQOZN\\\\n0f1cQO[N^n0d1bQO[N_n0e1`QO[Nan0f1^QOZNbn0f1^QOYNcn0g1]QOYNcn0g1\\\\QOYNen0h1ZQOXNfn0h1ZQOWNgn0i1YQOVNhn0j1XQOUNhn0m1VQOTNjn0l1VQOSNkn0m1UQOSNkn0n1SQOSNmn0n1RQOQNon0o1PQORNPo0o1oPOQNQo0P2mPOQNSo0o1mPOQNSo0P2kPOPNVo0P2jPOPNVo0P2jPOPNVo0Q2hPOQNWo0o1iPOQNWo0o1iPOQNWo0o1iPOQNWo0o1iPOQNWo0o1hPORNXo0n1hPOSNWo0m1iPOSNWo0m1iPOSNWo0m1iPOTNVo0l1jPOUNUo0k1jPOWNUo0i1kPOXNTo0h1lPOYNSo0g1mPOYNSo0g1mPOZNRo0f1nPOZNRo0f1nPO[NQo0e1oPO[NQo0e1oPO[NQo0e1oPO[NQo0e1oPO[NQo0e1oPO\\\\NPo0d1PQO\\\\NPo0d1PQO\\\\NPo0d1PQO\\\\NPo0d1PQO\\\\NPo0d1PQO\\\\NPo0d1PQO\\\\NPo0e1PQOZNon0g1QQOYNon0g1QQOYNon0h1PQOXNPo0h1SQOUNmn0l1SQOSNmn0n1RQOQNon0P2PQOPNon0R2QQOmMon0T2QQOkMon0U2RQOjMnn0W2RQOgMon0Y2RQOfMnn0[2RQOdMnn0\\\\2QQOdMPo0\\\\2PQOaMSo0_2lPOaMTo0`2kPO`MVo0`2hPO^M\\\\o0b2cPO_M]o0a2bPO_M_o0b2_PO_Mao0a2^PO`Mbo0`2^PO_Mco0a2\\\\PO`Mdo0`2\\\\PO`Mdo0`2[POaMfo0^2YPObMho0^2WPOcMio0]2VPOdMjo0\\\\2VPOdMjo0\\\\2UPOeMlo0Z2TPOeMmo0[2SPOeMmo0Z2TPOfMlo0Z2TPOfMlo0Z2UPOeMko0[2UPOeMko0[2UPOeMko0[2UPOeMko0[2UPOeMlo0Z2TPOfMlo0Z2TPOeMmo0[2SPOeMmo0[2TPObMno0^2RPOaMoo0`2QPO\\\\MRP1d2noNZMO^O]o0X3ePOVMNF\\\\o0T3fPOTMNLZo0P3iPORMM0Yo0n2kPOoLM5Wo0l2mPOjLO<To0j2mPOfL2a0Po0i2jQOXMVn0i2hQOYMWn0i2eQOYM\\\\n0h2`QO[M_n0g2\\\\QO\\\\Men0R3ePOVMZo0`41O010O1O01O01N100O2O0O1O1dMYPOnNio0l0bPOnN^o0o0hPOmNYo0P1jPOoNXo0n0iPOTNVO1RP1j1iPOVNUOORP1k1iPOVNVOMSP1k1hPOXNVOLRP1l1hPOXNWOJSP1l1gPOZNWOHTP1m1ePO\\\\NXODUP1o1cPO]N]O^ORP1S2bPO_NCVOlo0[2`PO_NXP1`1hoN`NYP1_1foNbN[P1\\\\1foNcN[P1]1doNdN]P1[1coNeN^P1Y1boNgN_P1Y1aoNgN`P1X1`oNhN`P1Y1_oNfNcP1Y1]oNgNdP1X1\\\\oNgNeP1Y1\\\\oNfNeP1Y1[oNfNgP1Z1XoNeNjP1Z1VoNeNlP1Z1ToNeNnP1Z1RoNfNoP1Y1RoNeNoP1\\\\1PoNdNQQ1[1onNdNRQ1\\\\1onNbNSQ1]1mnNbNTQ1_1knNaNVQ1^1knN`NWQ1`1hnN_NZQ1a1fnN^N[Q1e1bnNYN`Q1i1^nNTNeQ1R2j02N1N100O1O102M3N2N1O202M4L0O1O0O100O100O10O0100010O000O010O10O0001O001O1O3M3M3N3L2N1O1O1O1O1N2N2O1kMnlNi1fS1Dd0WOmn\\\\U1\"}}, {\"image_id\": 137, \"category_id\": 1, \"bbox\": [282.0, 484.0, 197.0, 178.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-1.678196907043457, -2.266040086746216, 1.579106330871582, 2.241741418838501], \"segmentation\": {\"size\": [799, 547], \"counts\": \"^el65ih02O1M3N2O0O100O2N1O2N1O101N1O1O2N1O100O1O1O2O0O1O100O1N20O0100O1000O01000O10O010O100O010000O1O010O10O010O0001O010O10O01O10O10O010O1O010O010O1O010O1O10O100O01000`NVOjZOj0Qe0BiZO>Qe04cZOLfd0o0U[OQOjd0Z1lZOgNSe0_1hZOaNVe0b1hZO_NUe0d1kZO\\\\NSe0h210O001NPOoZOUNmd0n1T[OSNjd0d1a[O\\\\N_d0`1e[O`NZd0]1j[ObNUd0Z1Q\\\\OfNnc0U1W\\\\OlNhc0S1Z\\\\OlNfc0T1[\\\\OlNdc0R1_\\\\OnN`c0P1c\\\\OoN]c0Q1c\\\\OPO]c0P1c\\\\OPO]c0P1c\\\\OPO]c0P1m[OTNOl0Td0P1j[O[NNd0Yd0R1g[O]NNa0Zd0S1f[O`NN<]d0X1_[OaN08ad0Y2Z[OgMfd0X3010000O010000O0mN`[OiM`d0U2b[OjM_d0Q2f[OnM[d0m1j[OSNVd0k1k[OUNVd0j1k[OVNUd0i1l[OWNTd0h1l[OXNUd0f1l[O[NUd0c1l[O\\\\NUd0c1k[O]NVd0a1l[O_NTd0_1n[OaNRd0^1o[OaNRd0^1n[ObNTd0\\\\1m[OdNSd0Z1o[OeNRd0Z1o[OfNQd0Z1n[OgNRd0Y1n[OfNSd0Z1m[OfNTd0Y1l[OfNUd0[1j[OeNUd0^1i[OaNXd0g1`[OYN`d0k1\\\\[OTNed0m1Z[ORNgd0n1X[OoMld0R2S[OmMnd0T2Q[OkMPe0U2P[OkMPe0W2nZOhMTe0X2lZOgMTe0\\\\2iZOcMXe0b2cZO^M]e0c2bZO]M]e0e2cZOYM^e0g2bZOYM]e0h2cZOWM^e0i2cZOTM_e0l2cZOPM_e0o281000O1O1O1O01O0000O101O1]Oc0N2N2O1K5L4O1O1N2J6M3O1M3I7O1O1O1N2O2M3N9_Ogbd1\"}}, {\"image_id\": 137, \"category_id\": 1, \"bbox\": [1.0, 263.0, 394.0, 469.0], \"score\": 0.9999998211860657, \"association_id\": 2, \"light\": [-1.2159044742584229, -2.119736671447754, 0.9930304884910583, 2.0351266860961914], \"segmentation\": {\"size\": [799, 547], \"counts\": \"P^1c1Zg04M3M2N2N2O0O100O100O100O101N100O10000O01000O100000000O1000000O10O01O100O010O1000O10O01000O1O010O1O100O01000O100O100O001O100O1000O01000O01O010O1O010O100O10O10O100O100O1O10O010000O10O100O010O010O0100O0100000O0100O100O00100O10000O0100O10O01O100O0100O010O10000O1000O10O100000O100O010O0100O00100O1000O01000000O10000O01000O100O010O01O10O10O10O01000O10O0100O1O0O2N2O010O10O010000O100O10O01O1O100O10000O100O100O1N2O1O100O100O100O1O1O1N2N2O100O100O01000O100O1O1M3N101O100O10O10O01O010O01O001O1O010O0010O01O0O2O001O00100O1000O0100O1O001O10O0100O011O0O100O1O2N1O101N10001N2O^OBdXO=Vg0JjXO4Rg02nXOL``0LREb0_J_OZ`0;iD>nJTOX`0e0cD;UKnNV`0l0bD7ZKjNQ`0V1_D3dKbNh?d1]DNbM^NW<k1PFJhM\\\\NV<o1mEGlM\\\\NT<P2nEEnM[NR<T2lECQNZNP<Y2hEAWNWNo;_2`E_O`NSNn;b2]E^OdNQNn;d2ZE]OhNoMl;h2XE[OkNnMj;o2SEVOSOkMh;V3nDQOZOiMf;]3iDlN@hMf;_3fDkNDfMe;b3bDkNIcMe;e3]DjNO`Mc;j3XDiN5\\\\Mc;o3RDhN<XMa;S4nCgN\\\\>\\\\1bAeN\\\\>_1aAbN]>b1`A_N_>e1]A\\\\Nb>g1[AZNe>h1XAYNg>j1UAWNl>l1k@YNV?l4d@UH[?i7n@PHS?o7PAoGo>o7SARHl>l7WATHh>k7ZAVHc>j7_AVH`>j7aAVH^>i7eAUH\\\\>i7gAVHX>g7mAWHT>h7a1O20O0_@XHm=f7TB[Hl=d7UB]Hi=b7XB_Hh=_7[B`Hd=`7n1O0010N1O2O1O001000001O00001O0O10001O001O1O1O1e@VH_=l7[BYHd=g7ZB[Hf=f7VB]Hj=d7iAiHV>Q5_AgKODb>a4gAhKCJf>[4mAjKYOMj>U4TBmKnN0n>P4\\\\B_Md=_2eB[MZ=d2kBXMU=g2nBVMT=i2mBUMT=k2nBQMT=n2nB_Ld=a3]BTLm=^Oo_Oa3U2hLU>DP@[3k1oLY>CR@Y3f1RM\\\\>@R@\\\\3b1TMa>ZOQ@_3_1VMf?h2[@XMf?e2\\\\@XMj?b2Y@SMW`0f2l_OUM\\\\`0S1f]O^OQ2]O^`0k0n]O_Og1Db`0a0S^OAa1Mje0O101O0O3M^Sf3\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [1357.0, 167.0, 239.0, 276.0], \"score\": 0.9999998211860657, \"association_id\": 3, \"light\": [-1.4332890510559082, -2.7177517414093018, 1.2918751239776611, 2.6192691326141357], \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"n\\\\V\\\\12WQ13N2O0O101O0O10000O1O1O10000O10000O100O1O1O10000O100O1O1O100O1000O010000O10O01000000O10000O10O1000O10000O1000000O10O100000O2O000000000O100000001O0000000O100000001O000O10001O00001O00001N1000001^ISOd\\\\Om0Xc0ZObXOHi1n0ce0_OlWO_OQOa0X3c0ie0I\\\\WO<i2Khe0T2UZOmMhe0[2RZOeMle0f2kYOZMSf0k2jYOUMUf0m2jYOTMTf0n2kYORMSf0S3jYOmLUf0W3hYOiLWf0Z3gYOfLXf0]3fYOdLXf0^3gYObLYf0`3eYO`L[f0a3dYO_L\\\\f0a3dYO_L\\\\f0b3cYO^L]f0b3cYO]L^f0d3`YO]L`f0d3_YO\\\\Laf0e3^YO[Lbf0g3\\\\YOYLdf0i3ZYOWLff0j3YYOVLgf0j3YYOVLgf0j3YYOVLgf0k3XYOTLif0l3WYOTLif0l3WYOTLif0l3WYOTLif0m3VYOSLjf0n3UYORLkf0o3TYOPLmf0Q4RYOoKnf0R4QYOnKof0S4PYOmKPg0T4oXOlKQg0U4nXOkKRg0U4nXOkKRg0U4mXOlKSg0U4\\\\XObJXOY1\\\\h0U4\\\\XOcJWOX1]h0V4[XObJXOX1]h0W4ZXObJXOW1^h0W4YXOcJYOV1^h0Y4WXObJZOU1_h0Z4VXObJZOT1`h0\\\\4RXOdJ\\\\OP1bh0]4PXOfJ\\\\Om0dh0_4mWOfJ^Ok0eh0`4kWOfJ@j0eh0b4gWOgJCg0fh0c4fWOfJDg0fh0e4dWOdJFg0gh0e4bWOeJFf0hh0f4aWOdJGf0hh0f4`WOeJHe0ih0f4^WOeJId0kh0g4ZWOfJKc0lh0g4UWOiJO`0oh0b5QWO^JQi0`5nVOaJTi0]5lVOcJUi0\\\\5kVOdJYi0X5gVOhJci0n4]VORKTj0]4lUObKZj0Y4gUOfK[j0X4eUOhK\\\\j0W4dUOiK]j0U4dUOkK]j0T4cUOlK_j0Q4bUOoK_j0P4aUOPL`j0n3aUORL`j0m3`UORLaj0n3_UORLbj0n3^UOQLcj0n3]UORLcj0n3]UOQLej0o3ZUOQLfj0o3ZUOQLfj0P4ZUOoKfj0R4YUOnKgj0S4XUOmKhj0T4WUOlKij0U4VUOjKkj0[50000000O1000000O10O10O100000000000O1O0010000000O2oNTUOaKlj0]4VUOcKjj0[4XUOeKhj0X4[UOhKej0U4^UOkKaj0R4cUOnK]j0n3gUOQLYj0n3iUORLWj0m3jUOSLWj0j3kUOVLUj0h3nUOWLSj0f3oUOZLQj0d3QVO[LPj0c3RVO]Lni0c3RVO]Loi0b3QVO^Loi0c3PVO]LPj0c3PVO]LPj0c3QVO\\\\Loi0e3PVO[LPj0d3QVO[LQj0c3PVO]LQj0`3RVO_Lni0_3TVOaLmi0Z3WVOeLki0U3[VOjLfi0c2mVO[M[i0Z1kQ4\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [30.0, 329.0, 301.0, 458.0], \"score\": 0.9999999403953552, \"association_id\": 1, \"light\": [-2.4041762351989746, -1.8877203464508057, 2.331458806991577, 1.7409329414367676], \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"Wno03PQ1:M3M1O2N10000O101O0O1000000O10000O10000O100O10000O100O1000001N101O001N101O01O01O100O00100O00001O100O1O1O00100O0010O010O01O010O1O001O1O1O001O001O001O1O1O001O001O001N2O1O001O00001O001O1O1O0O2O00001N2O1O001N101O001O1N2O1O001N101O1O1N2O1O1O0O2O001O1O1O001O0lKYMdYOh2oe0nMgYOR2Tf0ZNeYOf1Vf0eNdYO[1Uf0VOaYOj0[f0HWYO9ff0NUYO2if04SYOLlf08QYOHnf0;PYOEnf0?QYO@lf0g0PYOZOkf0n0QYOROlf0T1QYOlNlf0Z1QYOgNlf0^1QYObNnf0b1PYO^Nmf0g1PYOYNmf0m1PYOSNmf0V2nXOiMPg0_2jXOaMTg0g2fXOYMYg0k2dXOVMYg0o2eXOPMZg0T3cXOlL[g0X3dXOgLYg0_3dXOaLVg0i3gXOVLTg0R4jXOlKod0nMW\\\\O]6gNeKod0YNn[OX6RO^Kmd0dNd[On8Zd0]GW[Oj8dd0P2A?K40O1gMl[O]GUd0f8k[OnF]d0V9c[OcFad0Q;i[OeCbc0[<Y\\\\OUD[c0Q=K5L3N2O000O2O000O2O00000000000000000000000000000000000000000000000001O00001O001O2N2N3M3M2N2N2M3N6J`0@:F6J3L3N3M4M2M2N2N2N001O001O1O001O1O1O001N2O2N1O1OO10000O010O1O10O000O1kMlZORHVe0i7^[OeGed0X8e[O^G]d0a8k[OWGWd0h8U20O101O01O001O3L3N2M3M3N3M2N3M4M2M7J7I7H5L3M3K5J6H8H8I7J:H:Eb0\\\\O>B?A?Cj0VO7F9G:C>[Ol0ZO`hYY1\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [499.0, 434.0, 631.0, 618.0], \"score\": 0.9999998807907104, \"association_id\": 6, \"light\": [-0.7879221439361572, -3.3869388103485107, 0.6691544651985168, 3.305858850479126], \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"UnX`07RQ14M2O1N2O1O1N101O001N10001O00001O01O01O00001O0000001O000000001O000000000000001O0000000O100000000000000O101O0000000O100000000O1000000O10000O101O000O10000O10000O10000O1000001N10000O100O101N1O1O1O1N2O1O2N100O1O100O10000O2O0O10000O2O0O1O101N1O2N1O101N1O100O2O0O1000001O0O100000001N1000001O0O10001O000O10001O0O1000000O100000001N10000000000O10000000000O10000000000O1000000000000O1000000O1000000O1000000O100000000O1000000000O10O10000000O1000O10O1000000O10O1000O1000000O10f@UM4k2KXM2h2M\\\\M1d2M`M1`2MdM1[2NiM0W2OlMOS20PNOo10UNNk11WNCoNoAi2]>\\\\N\\\\GoNl1OZHf2]>^NUGVOP2I\\\\Hc2_>`NmF]OT2C_H`2_>bNjF@U2_ObH_2_>cNgFCV2[OdH^2_>eNdFHU2VOhH]2_>fN`FMT2ROmH[2^>hN\\\\F3T2kNRIZ2^>iNVF>P2bN\\\\IV2^>kNhET1R2nMhIS2^>lNZEi1U2YMSJS2^>lNQEV2W2lLZJR2]>mNlD`2W2bL`JQ2]>nNiDe2U2]LeJo1^>POfDg2T2[LhJn1]>QOeDi2U2XLhJn1_>RObDk2T2VLkJm1_>ROaDm2T2ULkJl1`>SO_Dn2T2TLlJk1a>UO]Dn2U2RLmJk1a>VO[Do2V2PLnJk1a>WOXDR3V2mKQKi1b>YOTDU3W2iKRKj1c>ZOPDW3X2_Ni9[NkC\\\\3Y2YNl9]N\\\\Cj3d2jMP:^NQCT4l2]MT:`NmBW4l2ZMV:bNjBW4o2WMW:cNgBZ4o2SM[:dNdB[4P3QM\\\\:eNbB\\\\4P3PM^:eN`B]4Q3nL_:fN^B^4Q3lLb:hNYB_4S3jLd:hNWB`4S3iLf:iNSBb4T3fLi:kNnAb4W3dLk:l6RETIo:o6mDRIS;P7iDRIW;P7fDQIZ;P7dDQI\\\\;Q7aDoH`;R7^DoHb;R7\\\\DoHd;R7ZDoHf;R7XDXFPMT1i>e8TDoHl;R7RDoHn;R7PDoHP<S7lCnHU<T7gCnHY<X7^CkHb<\\\\7PCjHQ=d7TBiHl=[=0RMUA\\\\Ck>c<VA]Cj>b<WA^Ci>b<VA_Cj>a<VA^Ck>b<UA^Ck>a?1O001O001O0`LQAdDP?W;TAiDm>S;UAmDl>P;WAPEj>m:XASEi>j:YAVEh>h:YAXEh>e:ZA[Eh>b:YA^Eh>a:XA_Ei>_:WAbEj>]:VAcEk>[:VAeEk>Y:WAeEk>Z:UAfEl>X:UAhEk>W:VAiEk>V:UAjEl>T:VAjEl>U:TAkEm>S:TA\\\\CXOd1e?o:YAUCVOi1b?Q;ZASCYOi1^?R;ZATC[Oh1\\\\?S;YASC@f1Y?W;WAQCCf1X?W;UARCHb1U?[;SARCJb1W?X;o@VCKa1[?U;j@XCMa1`?Q;c@^CMa1h?i:[@eCOa1n?c:R@lC1`1T`0]:k_OSD1_1Y`0Z:g_OVD1_1[`0X:d_OXDE9YOV1`a0X:a_OYD[OX2Va0]9__OZDXO\\\\2[a0Y9^_OZDSOa2aa0T9[_OZDROe2da0Q9Z_ORGi`0n8Q_OVGRa0h8h^O\\\\G\\\\a0c8]^ObGia0Z8P^OjGXb0`;8I3L3M4L2O2M2O1N2O1O1N3N3M4L4K6K5K2M3N1O1N2O1O1N2O1O1N3N1O3L4M4L3L4M2jL_[O^Icd0_6^[O`Ied0\\\\6\\\\[OeIfd0X6[[OhIfd0V6Z[OkIhd0Q6Z[OnIhd0P6X[OQJjd0k5X[OUJkd0f5W[OYJmd0a5U[O`Jnd0Z5U[OfJod0T5S[OkJQe0P5P[OQKSe0k4nZOTKUe0h4mZOXKTe0f4mZOYKUe0e4kZO\\\\KWe0a4jZO^KXe0a4hZO_KXe0`4iZO`KXe0_4gZOaK[e0]4fZOcK\\\\e0Z4eZOfK\\\\e0Y4dZOgK^e0V4cZOiK`e0T4aZOlKae0Q4`ZOoKae0o3_ZORLce0k3^ZOULce0h3_ZOXLbe0f3_ZOZLbe0d3_ZO[Lbe0d3_ZO\\\\Lbe0b3^ZO_Lce0_3^ZOaLce0_3\\\\ZO`Lee0`3[ZO`Lfe0`3YZO`Lie0^3WZOaLke0^3UZOaLne0^3QZObLQf0\\\\3nYOeLTf0Z3kYOeLXf0Y3hYOgLYf0Y3fYOfL\\\\f0Y3dYOgL]f0Y3bYOgL_f0X3aYOhL`f0X3_YOgLcf0X3]YOhLdf0W3\\\\YOiLef0U3[YOlLff0S3ZYOlLhf0S3XYOmLif0R3WYOnLkf0o2VYOQMlf0m2SYOSMPg0k2PYOUMRg0h2oXOWMTg0g2kXOZMVg0e2jXOZMYg0c2gXO^MZg0a2fXO^M[g0b2dXO^M^g0a2aXO`M`g0^2aXOaMag0^2_XOaMcg0^2\\\\XOcMfg0Z2[XOfMfg0Y2YXOgMjg0W2VXOiMlg0U2TXOkMog0Q2QXOPNPh0o1oWOQNTh0m1lWOSNUh0l1jWOUNWh0i1jWOWNWh0h1hWOYNYh0f1gWOZNZh0e1eWO[N]h0d1cWO\\\\N^h0c1aWO^N`h0a1`WO_Nah0`1_WO_Ndh0_1[WObNgh0\\\\1YWOdNih0Z1WWOeNkh0Z1UWOfNmh0X1SWOgNoh0X1QWOhNPi0W1PWOiNQi0V1oVOiNSi0V1lVOkNTi0U1lVOkNUi0T1kVOlNVi0R1kVOmNVi0S1jVOmNWi0R1iVOnNWi0R1iVOnNXi0Q1hVOoNYi0P1gVOoN[i0P1eVOPO[i0o0fVOQO\\\\i0m0dVOSO]i0l0cVOSO_i0l0aVOTOai0j0_VOVObi0i0^VOWOci0h0]VOWOfi0f0[VOZOei0g0ZVOYOgi0f0YVOYOii0f0WVOZOii0g0UVOZOli0e0TVO[Oli0e0TVO[Omi0d0SVO[Oni0f0QVOZOPj0e0PVO[OQj0e0nUO[ORj0e0nUO[OSj0d0mUO\\\\OSj0e0lUOZOVj0e0jUO[OVj0f0iUOZOXj0e0hUO[OYj0d0gUO\\\\OZj0c0fUO]O[j0b0eUO]O^j0a0bUO_O`j0?`UOAbj0=^UOCcj0<]UODej0:[UOEgj0:YUOFhj08YUOHhj07XUOIij06WUOJij06WUOIkj06UUOJkj07TUOImj07RUOInj08QUOHPk09nTOGRk0:mTOEUk0<iTODWk0=hTOCYk0=fTOCZk0>eTOB\\\\k0=dTOC]k0=bTOC_k0=aTOB`k0>_TOBbk0=^TOBdk0>[TOBfk0=ZTOCgk0<YTODgk0<YTODhk0;XTOEik0:WTOFik0:WTOFjk09VTOFlk09TTOGlk09TTOGlk09TTOGmk08STOHmk08STOHnk07RTOInk07RTOHok08QTOHPl07PTOIPl07PTOIPl07PTOIPl07PTOIPl07PTOHRl07oSOHQl08oSOHQl08oSOHQl08oSOHQl08oSOHRl07nSOHSl08mSOHSl08mSOHSl08mSOHTl07lSOITl07lSOIUl06lSOIUl06kSOJVl05jSOJXl05hSOKYl03iSOLYl02gSON[l00eSO0\\\\l0OeSO0]l0NcSO2^l0McSO2_l0KbSO5_l0JaSO5al0J_SO6bl0I^SO7cl0G_SO8al0H_SO8bl0F_SO9dl0D]SO<dl0C\\\\SO=el0A\\\\SO>gl0_OZSOa0ml0WOUSOg0Tm0POmROP1gn0O100O10001N10000O101O001N101N2O001N2O0O101N101N2N1O2M3N3L4KjjX?\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [953.0, 239.0, 284.0, 460.0], \"score\": 1.0, \"association_id\": 5, \"light\": [-2.1225857734680176, -2.3643245697021484, 2.0363521575927734, 2.269460916519165], \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"ScQo03VQ13N2O1O001O001N2O001O1O0000001O001O001O1O01O01O00001O010O1O00010O1O100O1O10O01O1O1joN]Obo0Y1N0O1O0010O01O1O2N2N1O1O001gQOVNol0k1PSOUNPm0l1oROUNPm0l1oROTNQm0n1lROSNTm0P2iROPNWm0Q2gROPNYm0Q2dROQN\\\\m0P2cROQN\\\\m0o1cRORN]m0o1bROQN^m0Q2_ROPNam0S2[ROnMem0l200000O11O00001O0O10001O00O100O1000000O10000000O10001O000000000000O1000000000000000000000^KXNaYOi1Yf0`NcYO`1Yf0eNfYO[1Wf0jNgYOV1Wf0mNhYOR1Vf0ROoXOlMfNQ3Vh0ZOnXOnMgNh2Rh0EQYOjMjNa2lg00RYOhMoNX2mg05nXOgMTOT2lg0:jXOfMYOP2kg0>gXOeM_Ol1hg0c0dXOdMDi1eg0j0XXOhM3^1ag0l2_XOTM^g0o2bXOQM\\\\g0R3cXOnL\\\\g0S3dXOmL[g0T3eXOlLZg0U3fXOjLYg0X3hXOgLVg0[3jXOeLSg0^3mXObLPg0a3PYO_Lof0b3QYO]Lnf0e3RYO[Lmf0f3TYOYL\\\\c0aMV_OW6^MWLYc0gMW_OR6aMWLRc0oMY_Ol5eMULnb0TNZ_Oi5hMRLnb0WNV_Oi5mMPLkb0ZNV_Og5oMnKkb0\\\\NT_Og5RNmKjb0]NR_Of5UNmKib0^No^Of5YNkKib0_Nl^Og5\\\\NiKib0aNg^Oh5bNfKfb0dNd^Oh5gNbKgb0gN_^Oi5jN^Kib0jNZ^Oi5nN[Kjb0nNU^Oi5POWKmb0SOn]Oh5UOSKob0XOf]Oi5ZOnJQc0]OX]On5HcJQc0D\\\\\\\\O`6b0kISc0Y8l\\\\OfGUc0[8j\\\\ObGYc0^8h\\\\O_GZc0a8f\\\\O^G[c0b8e\\\\O^G[c0b8e\\\\O]G\\\\c0c8d\\\\O]G\\\\c0c8d\\\\O\\\\G]c0d8d\\\\O[G\\\\c0e8d\\\\OZG]c0h8a\\\\OVGac0k8^\\\\OSGdc0o8Z\\\\OQGfc0P9Y\\\\OoFhc0Q9X\\\\OoFhc0R9W\\\\OmFjc0T9U\\\\OlFkc0T9U\\\\OkFlc0U9T\\\\OkFlc0U9T\\\\OjFmc0V9S\\\\OiFnc0W9R\\\\OiFnc0W9R\\\\OhFoc0X9Q\\\\OhFoc0X9Q\\\\OhFoc0Y9P\\\\OfFQd0Z9o[OeFRd0\\\\9m[OcFTd0]9l[ObFUd0_9j[O`FWd0`9i[O`FWd0a9h[O^FYd0c9f[O]FZd0d9e[O[F\\\\d0g9c[OVF_d0l9_[OSFbd0n9^[OoEdd0R:\\\\[OkEfd0V:Y[OiEid0W:W[OhEjd0[;2lNT[OgEmd0Z;3L4L4M2N3N2M4M5J:F7H4^OcYOPFaf0n9bYOmEcf0o9`YOmEff0o9a0I6K5J3N2M3N1N3N3L3N3L3N1O1N3N2M3cNbWOiI`h0n5kWOmIYh0n5lWOnIVh0o5oWOmISh0o5YXOgIig0U6i1K4M4N1N3N2N2N3M3M4K4K4N2N1O1O100O2O0O100N2O1N2L4N2O1N3N2N3M3L5K4K5L3N1N3M3L4I7J6K6K5K7H9E`0WOjki;\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [1202.0, 23.0, 187.0, 210.0], \"score\": 0.999999463558197, \"association_id\": 2, \"light\": [-1.778946042060852, -2.0994668006896973, 1.6644614934921265, 1.9908783435821533], \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"XfTW15UQ1101N100O100O2O0O100N2J6O1O100O10O0100O10000O10O10O100000000O0100000O1000000O100000000O10000O10000O10000000000O1000000XMCbTO=Wk00cTOO[k06bTOK]k07bTOI\\\\k0;bTOE[k0a0bTO_O[k0g0bTOYO\\\\k0k0bTOUO^k0l0`TOUO^k0n0aTORO]k0Q1bTOoN\\\\k0T1cTOlN[k0X1bTOiN]k0Y1bTOgN\\\\k0\\\\1cTOdNYk0a1gTO^NTk0h1kTOXNSk0k1mTOTNQk0o1nTORNoj0R2PUOmMmj0W2UUOeMej0c2bUOUMYj0Q3iUOlLTj0X3lUOgLQj0]3_UOYLZO:Tk0d3QUOcLHHoj0S5PUOmJnj0U5SUOjJlj0X5SUOiJkj0X5VUOgJhj0[5XUOdJhj0^5WUObJhj0_5YUO`Jfj0a5ZUO_Jfj0a5ZUO_Jfj0a5ZUO_Jfj0a5ZUO_Jfj0a5ZUO_Jfj0a5ZUO_Jfj0a5[UO^Jej0b5[UO^Jej0b5[UO^Jej0b5[UO^Jej0b5[UO^Jej0b5[UO]Jfj0c5ZUO]Jfj0c5ZUO]Jfj0c5ZUO]Jfj0c5ZUO]Jfj0c5ZUO]Jfj0c5ZUO]Jfj0c5ZUO]Jfj0c5ZUO]Jfj0c5ZUO\\\\Jgj0d5YUO\\\\Jgj0d5YUO\\\\Jgj0d5YUO\\\\Jgj0d5YUO\\\\Jgj0d5YUO\\\\Jgj0d5YUO\\\\Jgj0d5YUO\\\\Jgj0d5YUO[Jhj0e5XUO[Jhj0e5WUO\\\\Jij0d5WUO\\\\Jjj0c5VUO]Jjj0c5VUO]Jkj0b5UUO^Jlj0a5TUO_Jmj0`5SUO`Joj0^5QUObJQk0\\\\5oTOcJjk0e4UTO\\\\Klk0c4TTO]Kmk0b4RTO_Kok0`4PTO`KTl0]4lSOcKUl0\\\\4jSOdKXl0Z4iSOeKYl0Z4gSOdK]l0Z4cSOdK`l0[4`SOcKdl0Z4\\\\SOgKel0W49O1N3M2N2N2N1O2O0O2N2N2O1N2N101N101N2SO[RORNgm0i1_ROSNcm0j1aROSN`m0k1dRORN^m0l1eROPN]m0n1R1N1N3N2N3N1N2O001N2O2N2N1N3M5K4L4L3L8Hggk6\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [291.0, 239.0, 418.0, 408.0], \"score\": 0.9999731779098511, \"association_id\": 4, \"light\": [-2.2082462310791016, -1.8722572326660156, 2.0493288040161133, 1.6500864028930664], \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"o^_97PQ16K5L3N3N1O2N1N2O1O1O0O0100000000O1000O10O100000000001O1O001UTOQOXg0P1eXOTOR12``0k0\\\\^OWOj0HnJ9ke0i0Z^O\\\\Ob0A[Ka0he0b0Y^OB?]O^K`0ie0c0U]OjNRNj0^3[OaK>je0c0S]OROlMf0_3SOiKk0he0;Q]OVOjMf0X3POVLo0ge06n\\\\OZOdMi0[33Rb0Jn\\\\OU1l0SOVb0Ik\\\\Oh1=_Nhb0Ii\\\\Ok1=]Nkb0Gh\\\\Ol1<_Nkb0Ei\\\\Ol1<_Nkb0Dj\\\\OAlMT1^2Hlb0Cj\\\\O@oMS1[2Jmb0Aj\\\\OAQNR1X2Lmb0Ak\\\\O^OTNS1T2Nmb0Ak\\\\O\\\\OWNT1Q2Onb0_Ok\\\\O]OXNT1n11ob0^Ol\\\\OZOZNV1k12Pc0\\\\OlZOoNX1:TOW1i14ob0\\\\OkZOQO\\\\12TOZ1h17nb0ZOkZOUOf2W1kL[Oh1o0mc0XOjZOYO>0k07SN:42h1l0oc0VOiZO^O87l0G_N=M6h1k0Pd0UOhZO@3<o0]OgN?G:f1i0Sd0SOhZOB1;h1KgM=d1h0Ud0POiZOE08k1LcMa0b1f0Wd0oNkZOD16k1NaMd0`1e0Yd0mNkZOF13n1M^Mk0]1b0\\\\d0kNlZOF13P2G_MU1V1`0^d0jNmZOF22`2P1iLlNh0b1Qe0hNnZOF22^2d1`MLSe0fNnZOF42\\\\2g1^MKTe0eNZ\\\\OGT1k1]MIVe0dNn_Of1jJGWe0bNP@h1hJFYe0`No_Ol1gJDZe0`No_On1eJB]e0^No_OQ2dJ@]e0^NP@T2aJ^O`e0]Nn_OW2aJ\\\\Okk0f0STOZOlk0i0STOWOlk0l0RTOSOmk0Q1QTOnNok0U1oSOjNPl0Z1nSOfNPl0\\\\1PTOcNPl0^1oSObNPl0`1PTO_NPl0b1oSO^NPl0d1oSO\\\\NPl0f1PTOZNYk0dNnTOS3IXNUk0lNnTOn2LVNRk0ROoTOi20TNnj0XOoTOg22PNnj0[OoTOf23nMmj0^OnTOf25lMlj0@mTOe27jMlj0W3SUOhLmj0Y3RUOgLmj0Z3TUOeLlj0\\\\3SUOeLkj0]3TUOcLlj0^3TUOaLlj0`3SUO`Llj0a3UUO^Lkj0c3TUO]Lkj0e3UUO[Ljj0e3WUOZLhj0h3XUOWLhj0i3YUOVLfj0k3ZUOULej0l3\\\\UOSLdj0n3[UORLdj0o3\\\\UOQLdj0o3\\\\UOQLcj0Q4]UOoKbj0Q4^UOoKaj0R4_UOnKaj0S4^UOmKXg0lNj[OX5nLlKXg0lN_YO0]1X5lMmKVg0mN_YO5X1S5SNjKUg0oN_YO<R1k4ZNkKSg0oN`YO=Q1k4[NiKSg0QO`YO;b0GmNT5NiKSg0QO`YO;a0\\\\5lNXKRg0RO`YO;b0\\\\5kNWKSg0RO`YO;a0]5lNWKRg0RO_YO:c0_5lNTKRg0SO_YO9d0`5kNTKRg0SO_YO9c0a5lNSKRg0SO_YO8c0d5kNQKSg0TO]YO8e0c5kNQKSg0TO]YO8d0d5lNPKSg0TO]YO8d0d5lNPKSg0TO]YO7d0g5kNoJSg0SO^YO7c0h5mNmJRg0TO]YO8d0g5mNmJQg0VO]YO6d0h5mNmJSg0SO]YO8b0j5mNkJTg0SO]YO9`0j5oNkJTg0PO^YO<=j5QOjJUg0oN]YO=;l5SOhJVg0mN]YO`04P6YOcJWg0kN]YOg0IZ7jf0mG^YO[9Yg00000001O000000001O001N10001O0000000000001O00000O1000001N10000O2\\\\OZFaYOT8_O\\\\IPg0aN`YOS8@\\\\IPg0cN]YOR8C[Iof0eN]YOP8D\\\\Inf0eN\\\\YOP8GZImf0hNZYOn7IZIlf0kNYYOk7KZIlf0lNWYOk7MYIlf0mNVYOj7OXIkf0nNVYOj7OYIjf0mNWYOj7OYIjf0mNWYOj7OYIjf0mNWYOj70XIif0oNVYOi71YIhf0nNWYOi71YIhf0nNWYOi71YIhf0nNWYOi72XIhf0nNVYOj72YIgf0mNWYOj72YIgf0nNVYOi73YIgf0nNUYOj75WIff0oNUYOk74VIgf0oNUYOk74VIgf0Z7ZYOeHef0\\\\7[YOeHdf0[7\\\\YOeHdf0\\\\7\\\\YOcHdf0]7\\\\YOcHcf0^7]YObHcf0^7^YOaHbf0`7]YO`Hcf0`7]YO`Hbf0a7^YO_Hbf0a7_YO^Haf0c7^YO^H`f0c7`YO]H`f0c7aYO\\\\H^f0e7bYO[H^f0f7aYO[H^f0e7cYOZH^f0e7bYO[H_f0d7bYO\\\\H^f0c7bYO]H^f0d7aYO\\\\H`f0c7`YO]Haf0b7`YO^H`f0a7`YO_Haf0`7_YO`Hbf0`7^YO_Hcf0`7]YOaHdf0]7]YObHdf0^7[YOcHef0\\\\7\\\\YOcHef0\\\\7\\\\YOdHcf0]7\\\\YOcHdf0^7[YObHdf0_7]YO`Hcf0a7\\\\YO_Hdf0b7\\\\YO^Hcf0b7]YO^Hcf0c7\\\\YO]Hdf0c7]YO\\\\Hbf0f7]YOZHcf0f7]YOZHcd0POl\\\\Og8aNYHbd0WOf\\\\O`8hNYHad0[Od\\\\O]8jNXHbd0]Ob\\\\O[8mNWHad0_Oa\\\\O[8mNVHad0Aa\\\\OY8nNWH`d0^Od\\\\O[8mNUH`d0^Of\\\\O\\\\8jNVHad0ZOi\\\\O`8eNVHbd0XOk\\\\Ob8dNUHad0VOn\\\\Oe8aNUHad0TOQ]Of8^NVHad0SOR]Oh8\\\\NUHbd0QOU]Oi8ZNUHad0QOV]Oj8YNUHad0POX]Oj8WNVHad0oNY]Ol8UNUHcd0nNY]Ol8UNUHbd0nNZ]Om8TNTHcd0oNY]On8SNSHdd0nN[]On8RNRHdd0QOY]Om8SNQHdd0TOX]Ol8SNPHed0UOW]Ok8UNnGed0XOV]Oi8UNnGed0ZOV]Oh8VNlGed0]OT]Oh8VNkGed0_OT]Of8WNjGfd0@T]Of8VNhGgd0CR]Oe8WNhGgd0DQ]Od8XNhGfd0ER]Od8XNfGfd0FR]Od8XNfGfd0GR]Oc8WNfGgd0GR]Oc8XNeGed0JR]Ob8XNdGfd0JR]Ob8XNdGfd0IT]Oc8VNcGgd0IS]Oe8UNbGhd0HU]Of8SNaGhd0IV]Of8QNaGid0HW]Oh8PN_Gid0IX]Oh8nM_Gjd0HY]Oj8lM^Gld0GY]Ok8kM]Gld0GZ]Om8iM]Gld0F[]Ob9eb0]F]]Ob9cb0^F]]Ob9cb0]F_]Ob9bb0]F^]Oc9bb0]F_]Ok5dMdMmd0`L`]Oj5fMeMjd0aLa]Oh5gMfMhd0bLa]Of5jMgMed0cLb]Oc5lMiMcd0bLc]Ob5mMkM`d0cLc]O`5QNkM\\\\d0eLd]O]5SNnMXd0eLf]OZ5UNPNUd0eLh]OX5WNQNQd0gLi]OU5YNSNnc0hLk]OR5ZNTNlc0iLk]OQ5[NUNjc0jLm]On4[NWNhc0jLo]Om4[NXNfc0kLQ^Oj4ZN[Nec0kLR^Oh4[N\\\\Nff0b1\\\\YO]Ndf0b1^YO]Ncf0a1^YO_Ncf0^1`YOaNaf0]1aYOaNaf0]1aYObNaf01kWOVNf1h1af0KRXOXN`1k1af0FWXOZNZ1o1af0B]XOYNU1S2af0@]XOZNT1U2af0_O]XOYNT1V2af0_O^XOXNR1Y2af0^O_XOVNR1[2_f0^OaXOUNR1\\\\2^f0^O`XOWNQ1[2_f0_O_XOVNS1Z2^f0A]XOVNU1Y2_f0AZXOXNV1W2`f0BYXOWNX1U2`f0FUXOWNZ1S2bf0FRXOXN]1Q2af0IPXOVN_1Q2af0JnWOWNa1n1af0LmWOVNb1n1bf0MiWOWNd1k1df0OfWOWNg1i1df00dWOXNg1h1ef01bWOXNj1e1ef04`WOXNj1d1ff04`WOXNk1b1ff04cWOWNh1e1ef02gWOUNg1g1bf03kWOSNe1i1`f02PXOPNb1n1^f01SXOnMa1P2[f01Q[OM^c0oMQZOR2d2NZc0SNoYOo1i2LWc0YNmYOk1m2KVc0\\\\NlYOh1P3JSc0bNjYOd1U3FfKXN[g0`0hYOb1X3EQc0lNeYO_1\\\\3Dnb0oNdYO]1_3Cmb0ROcYOZ1b3Cjb0TOcYOZ1c3Ajb0WObYOX1d3@jb0YOaYOY1e3\\\\Okb0\\\\O_YOX1f3[Okb0_O]YOW1h3XOlb0DYYOU1l3UOkb0IWYOR1b3jMULZ1Rg0MTYOP1e3iMVLW1Rg03oXOo0i3gMVLV1Rg07lXOm0l3fMWLT1Qg0<hXOm0o3dMWLR1Rg0?eXOm0R4bMXLo0Rg0V3f\\\\OkKXLn0Qg0Y3W]OdLjb0]3W]O_Ljb0c3X]OXLkb0g3\\\\]OPLfb0Q4Y5010O0010O010O01O010O10O011N2N101L4\\\\L[SO^2hl0^M^SO^2dl0^MaSO^2cl0]MbSO_2al0]MdSO`2em0M3N2N2N2N2O0O3M2N2N2N2N2N2M3N2N2N2N3M4M4K3M5K4K5K5K6JfQPm0\"}}, {\"image_id\": 139, \"category_id\": 1, \"bbox\": [89.0, 194.0, 250.0, 202.0], \"score\": 0.9999999403953552, \"association_id\": 3, \"light\": [-2.5928406715393066, -1.8043116331100464, 2.4431934356689453, 1.5790425539016724], \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"mnl2=fP1d0A8I3M3L4L2O2N2N1O1N2N2O1O1O001L4K5O1O1O1N2I7N2O1O1O2M2M3N2N203L3M2N1N3N2N1O2N2N1O2M200O1O2N101N1O2N1O1O101O0O2O001O001N101N1O2N^O[SOcLbl0T4O[O_SOdL]l0S42O1^O`SOZL`l0f3bSOWL_l0j3cSOSL\\\\l0n3fSOoKZl0S4fSOlKYl0U4hSOjKVl0X4kSOeKVl0]4;0O1O2N1N2N3N1O1O1I7G9N2O2M200O100`TOcJRk0^5lTOeJSk0\\\\5kTOfJTk0Y5mTOhJRk0X5mTOjJRk0V5nTOjJRk0V5nTOjJRk0V5nTOjJRk0V5mTOkJSk0U5mTOlJRk0f5000001O00000000\\\\OnTOPKRk0P5nTOPKRk0o4oTOQKQk0o4nTORKRk0n4nTORKRk0d5O00000000001O000000kNPUOPLQk0l3QUOULoj0i3SUOWLmj0h3SUOYLmj0f3TUOZLmj0c3UUO]Lmj0`3TUOaLmj0X3XUOhLjj0S3YUOmLhj0P3ZUOPMhj0m2YUOSMij0j2XUOVMlj0d2VUO\\\\MPk0\\\\2RUOdMPk0Y2QUOgMPk0X2PUOhMQk0V2PUOjMRk0R2PUOnMRk0n1PUORNQk0k1QUOUNPk0h1SUOWNnj0f1TUOZNmj0e1SUO[Noj0b1RUO^Noj0`1RUO`Noj0^1RUObNoj0\\\\1RUOdNoj0Z1SUOeNnj0Y1SUOgNnj0U1UUOkNmj0Q1UUOoNlj0m0WUOSOjj0j0XUOVOij0g0YUOYOij0c0ZUO\\\\Oij0>ZUOBWn0O10000000000000000000001O0000000000000000001O00000O10000000001N10000000000O100000000O10000000000000000O1000000O10000O1000000O100O1O10001N100O1O1O101N100O101O0O101N10001O1N2Na[PY1\"}}, {\"image_id\": 139, \"category_id\": 1, \"bbox\": [291.0, 233.0, 325.0, 290.0], \"score\": 0.9999999403953552, \"association_id\": 2, \"light\": [-2.215163469314575, -2.0218045711517334, 2.0543665885925293, 1.8741037845611572], \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"R]_9S1TP14L5L3N1O1O1O001O1O1O1O1N2O1N101N2N101O1O0O2O000O2N1O2O001O1O0O2N2N1O2O2O1N1O1O1N2N2O1N200O1O1O1N2N2N2O1O1O1O1O1N2N2N2N2O1O1O1O1N2N2O1N2O001O1O1N101O1O1O100O1O1O1O1N1O2O1O1O1O1O1O1NB]SOYL`l0i3aSOWL\\\\l0k3dSOVLYl0l3gSOTLYl0m3gSOSLWl0n3iSOSLVl0n3jSORLVl0n3jSORLUl0o3jSORLUl0o3kSOPLTl0R4lSOnKSl0R4mSOoKRl0R4nSOnKPl0g4O010O1O1O100O[OTTOmKlk0S4UTOmKkk0R4UTOoKjk0R4VTOnKik0S4VTOnKhk0T4XTOlKgk0l4NZO[TOjKfk0V4ZTOjKek0n4O100O1O1O0O2N2O1O1O001O1O1N2N2N2O1O100O1N2O1YOg0F:L4M3M3O1M3M3L4L4N2N2N2O10O0nNTWOhIlh0W6UWOiIjh0V6WWOkIih0R6ZWOnIeh0R6\\\\WOnIdh0Q6\\\\WOPJdh0n5]WOSJch0l5^WOSJch0l5]WOUJch0i5^WOXJbh0f5`WOZJ`h0d5aWO]J_h0b5aWO_J_h0`5bWO`J^h0_5cWOaJ]h0^5cWObJ^h0]5cWOcJ]h0[5eWOeJ[h0Z5eWOgJ[h0X5fWOhJZh0X5eWOhJ\\\\h0W5eWOiJ[h0V5fWOjJZh0V5fWOjJZh0U5fWOkJ[h0T5fWOlJZh0S5gWOmJYh0Q5iWOoJWh0o4kWOQKUh0n4lWORKTh0m4mWOSKSh0m4mWORKTh0m4lWOTKTh0k4mWOUKSh0j4nWOVKRh0i4oWOVKRh0i4oWOWKQh0g4QXOYKog0f4QXO[Kog0d4RXO\\\\Kng0c4SXO]Kmg0b4TXO]Kmg0b4SXO_Kmg0`4TXO`Kmg0JcWOj2b0\\\\Mkg0GhWOi2?`Mig0ElWOh2<bMig0EmWOg2;dMig0CnWOg29gMig0@QXOg27iMig0^OSXOf25lMjg0YOWXOg20PNmg0QO[XOi2JUNQj0f1PVOZNRj0b1PVO^NQj0_1QVOaNPj0]1QVOcNPj0\\\\1PVOdNQj0Z1oUOgNRj0W1oUOiNSj0S1oUOmNTj0n0nUOQOVj0j0lUOVOVj0f0lUOZOUj0d0kUO]OVj0a0kUO_OWj0>jUOAYj0<hUOD[j07gUOI]j0LjUO4ij0kNeUOU1Xm000O1000000000000O100000000O1000000000000O1000000O1000000O10000000000O1000000O1000000000000O1000000O10O1000000000O1000000000000O10000O10000O1000000O10000O10000O100O100O2O0N2N2O1M3N200O100O2O1NZmoo0\"}}, {\"image_id\": 139, \"category_id\": 1, \"bbox\": [634.0, 608.0, 434.0, 319.0], \"score\": 0.9999998807907104, \"association_id\": 1, \"light\": [-1.388062834739685, -2.9456639289855957, 1.3131481409072876, 2.7603485584259033], \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"hjdd0S1SP18I5L3N2N10001N10000O100000000O10000O10000O1O010O1O1O1O1O100hPOQNTo0P2lPOQNSo0o1mPORNQo0T200O101N10000O1O1O1N3L3N2N2N2N2O1O2N1O1O1O1O2N1O1O1O1OAQROfMmm0l2O1O_OUROeMjm0\\\\2VROdMhm0^2XRObMfm0_2[ROaMcm0a2\\\\RO`Mbm0b2^RO_M`m0b2`RO^M_m0W3O\\\\ObRO^M]m0c2cRO^M\\\\m0X3O1O1O1O2M2N2O1O1O1O100O2N1N2O1N3M2O1O1O101N100O1O1N2O1N2O1O1O1O2O0O1O1O1O1N2O1N2O2O0O1O100O1O1O1N2N2O1O1O1O100O1O1O2N1N2O1O1O1O1O100O100O1O100O1N2O1O1O1O10000O1000000O1000000O2O0OiN`UOfK_j0[4aUOeK_j0\\\\4aUOcK]j0`4cUO_K\\\\j0b4dUO^K[j0d4eUO[KZj0l5N1O1O1000O1000000000O10O100000O1O010N2O2M3N3M2N3N1N101N1O2N1O2M2O2N1O2N1O100O1O2M\\\\OPWORIoh0o6RWOPIlh0S7SWOmHjh0V7WWOiHhh0X7YWOgHfh0Q6XWOeJ2ZOeh0Q6ZWOfJ1XOdh0R6\\\\WOgJOWOdh0S6\\\\WOhJ0TOch0T6^WOiJOQOch0V6^WOjJ0nN^h0]6bWOeJfh0a5ZWOaJXh0k5gWOXJVh0i5iWOZJUh0f5kWO[JSh0f5mWO\\\\JQh0d5oWO]JPh0c5QXO^Jng0a5SXO_Jlg0b5SXO`Jjg0a5WXO_Jhg0a5YXO`Jeg0a5[XO_Jdg0a5\\\\XO`Jcg0`5^XOaJag0_5_XOaJ`g0_5`XOcJ_g0\\\\5bXOdJ]g0]5cXOdJ\\\\g0[5eXOeJ[g0Z5eXOgJ[g0X5fXOiJYg0U5iXOkJWg0T5jXOlJVg0S5kXOnJTg0R5lXOnJTg0Q5mXOoJSg0P5nXOPKRg0P5nXOPKRg0o4oXOQKQg0o4oXOQKQg0n4PYORKPg0m4QYOSKof0l4RYOTKnf0k4SYOUKmf0i4UYOWKkf0h4VYOXKjf0g4WYOYKif0g4WYOXKjf0g4WYOYKif0f4XYOZKhf0f4XYOZKhf0e4YYO[Kgf0d4ZYO\\\\Kff0c4[YO]Kef0b4\\\\YO^Kdf0a4]YO_Kcf0_4_YOaKaf0^4`YObK`f0]4aYOcK_f0\\\\4bYOdK^f0[4cYOeK]f0[4cYOeK^f0Y4cYOgK]f0X4dYOhK]f0V4dYOjK]f0T4dYOlK]f0Q4dYOPL]f0aN[XOd4Z1kL`f0VNhXOb4i0XMef0lMhXOi4d0[Mag0b2`XO^Mag0`2`XO`Mbg0^2^XObMcg0\\\\2^XOdMdg0Y2]XOgMeg0V2\\\\XOjMhg0Q2YXOoMlg0j1VXOUNRh0b1PXO^NUh0Z1nWOfNWh0T1jWOlNZh0n0hWORO\\\\h0i0dWOXO^h0e0cWO[O_h0b0bWO^O`h0>bWOBah09aWOGhh0M[WO3Ri0\\\\ORWOd0il01000000000000000000000O10000000O10O10000000000O1000000O10000O1000000O10001O0000000O101O000O10001O0O10000000001N100O10000O101O000O1000001O0O100O1O1O100O100O2O0000000O10000O100O101N100O100000001N100000000O10001N100O10000O100000001N100O101O0O100O2O000O10001O000O2O1O1LZoXa0\"}}, {\"image_id\": 140, \"category_id\": 1, \"bbox\": [360.0, 154.0, 317.0, 542.0], \"score\": 1.0, \"association_id\": 1, \"light\": [-1.9259461164474487, -2.563504457473755, 1.725642204284668, 2.3708579540252686], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"bY^82mg03L2O2N1O2N1N2M3M4K4M3M3O2N1O1O1O2N1O1N3M3WDkNb0X1[OlNa0X1\\\\OkNb0W1\\\\OkNa0X1]OiNb0X1]OjNa0X1]OiNb0Z1[OgNc0\\\\1[OfNa0_1\\\\ObNa0d1[O^N`0T3PNnLn1[3jMgLS2\\\\3kMeLS2^3kMcLT2_3jMbLV2_3hMbLW2`3fMbLY2`3bMdL]2]3^MhL`2\\\\3WMlLd2Y3YMiLa2_3[McLa2c3\\\\M^Lc2d3ZM^Le2d3YM]Lf2e3XM\\\\Lg2f3WM[Lh2f3VM\\\\Lj2e3oLbLo2`3RL^Mm3d2mJaNQ5b1jJbNT5b1hJ`NV5c1hJ^NW5e1fJ\\\\NY5f1eJ\\\\NY5f1eJ[NZ5g1dJZN\\\\5f1bJ]N\\\\5e1aJ^N]5d1aJ]N^5e1_J^N_5d1_J^N^5f1`J[N]5k1^JVNa5n1[JTNc5o1[JQNd5R2YJoMf5T2WJnMh5S2VJnMi5U2SJnMl5T2QJmMn5W2mIkMS6X2iIjMU6b2_I_M`6f2\\\\I[Mb6g2\\\\IZMc6i2ZIYMd6i2ZIXMe6j2YIXMe6i2ZIXMf6i2XIYMf6i2XIXMh6j2UIWMj6l2SIUMm6m2PIUMo6m2oHSMQ7n2mHSMR7o2mHQMS7P3lHPMT7Q3jHPMV7Q3iHoLW7Q3hHPMW7R3hHnLX7R3gHoLY7Q3gHoLY7R3fHnLZ7S3eHmL[7S3eHlL\\\\7U3cHkL]7U3cHkL]7U3cHkL\\\\7V3dHjL\\\\7U3fHjLY7V3hHjLX7V3hHjLW7V3jHjLV7U3kHkLT7V3lHjLS7V3nHjLQ7W3oHiLP7W3QIiLn6X3RIhLn6X3RIgLo6Y3QIgLn6Y3SIgLm6Y3SIgLl6Z3TIfLl6Z3TIeLm6[3SIeLm6[3TIdLk6P2aHUId0k4k6n1dHUIb0m4j6m1fHUI`0n4j6l1hHUI>o4j6k1iHVI=n4k6k1jHVI;o4k6j1kHXI9n4l6i1mHXI7o4l6g1oHZI5o4k6g1QIZI4n4l6g1RIZI2o4l6g1RIZI2o4l6f1SI[I1n4m6g1SIZI0o4m6g1RI[I1n4m6g1RI\\\\I0m4n6g1RI\\\\I0l4n6i1QI]I0j4o6i1QI^IOi4P7j1PI]I0i4P7j1oH_IOh4Q7j1PI_INf4S7k1oH`IMe4S7l1PI`ILd4T7m1nHaIMa4U7o1nHaIL`4U7R2lH_IO^4U7V2jH]IO^4V7W2iH\\\\I1\\\\4V7[2fH[I3Y4X7_2aHYI7X4W7Z4hHeKX7\\\\4gHeKY7\\\\4fHdKY7]4gHbKZ7_4eHaK[7_4dHbK\\\\7^4dHbK\\\\7^4cHbK^7^4bHbK^7^4bHbK^7d2_HWI3U4^7b2aHYI1U4^7a2cHYI0U4]7a2dHZIOU4]7_2gH[ILV4\\\\7_2iH[ILT4\\\\7_2kH\\\\IIU4\\\\7\\\\2nH_IFU4\\\\7Z2QI`ICV4\\\\7Y2RIaICU4[7Y2TI`IBW4Z7W2VIbI@V4[7W2WIbI_OV4Z7W2XIcI^OU4[7W2YIcI\\\\OV4[7V2ZIdI\\\\OT4[7W2ZIdI\\\\OU4Z7V2\\\\IdI[OU4Y7U2_IeIXOU4Z7U2_IfIXOT4Y7T2bIgIVOT4X7S2eIgITOV4W7Q2hIhIROV4V7o1lIjIoNV4U7o1nIjInNV4T7n1QJjImNX4Q7m1TJjIlNX4P7n1TJjIlNX4P7m1UJkIlNW4o6n1UJjInNW4m6n1VJkInNW4k6n1VJkIPOV4k6o1UJkIQOU4j6o1VJkIQOV4i6o1VJjISOV4g6P2VJiITOW4g6n1UJkIVOV4e6o1UJkIVOV4e6n1VJkIVOV4e6n1VJkIWOV4c6n1XJkIUOW4c6m1YJkIVOW4a6m1[JkITOX4a6l1]JjITOY4_6l1^JkISOX4a6k1^JkISOY4_6k1_JlISOX4^6k1aJkIROZ4]6k1aJkISOY4\\\\6k1cJjIROZ4]6k1bJiISO[4\\\\6j1cJiIRO]4\\\\6h1dJiIQO_4]6e1cL[N^3c1cL]N^3a1cL_N^3^1dLaN^3]1cLcN]3\\\\1dLdN]3Z1eLeN[3Z1fLfN[3Y1eLgN\\\\3W1eLhN\\\\3X1dLhN]3W1dLgN^3W1cLhN`3V1aLhNb3V1_LhNc3V1_LhNc3W1^LhNc3V1^LiNd3U1^LiNc3V1^LjNc3T1^LkNd3R1_LmNb3o0aLPOa3l0bLSO`3h0dLWO_3e0cL[O^3b0eL\\\\O]3b0dL]O^3a0cL_O^3`0bL@_3>cL@_3>bLB_3<cLC^3:dLE^36gLI[3IRM6P3FRM9P3CTM;n2CSM=n2@UM>m2@TM`0m2^OTMa0n2\\\\OUMd0k2YOWMg0k2TOXMm0i2mN\\\\MS1i<O100O010O1O00010O010O010O10O1000O010O1O010O1O100O1O010O1O00100O1O2O0O100O1O010N101O1O2N1O1N3N001O1N2N3M2M5KmeS8\"}}, {\"image_id\": 141, \"category_id\": 1, \"bbox\": [717.0, 439.0, 769.0, 590.0], \"score\": 0.9999850988388062, \"association_id\": 2, \"light\": [-1.1172239780426025, -2.109194278717041, 1.0502692461013794, 1.9875766038894653], \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"m^Th06SR16K4N3M2N3N1N3N1N2000001N100000000O101O0000O100O1O1O100O1O1O1O1N2O1O1O2N1O1O100O100O100000001N10001O2N2N2N1N3N1O1OSgY10iXfN5L5K4M4M2O1O1N3N1O7H8I1PPOfNjn0[1TQOhNjn0Z1RQOjNln0W1RQOmNkn0U1QQOoNmn0R1PQOSOln0P1QQOTOln0n0PQOWOmn0X2M4L5K5K3M2N1O1O1N2O001O001O1O010O1O001O2N2N2N3N1N3M1O1O1O00100O001O010O010O0100000000000000000O100000000000O01000O010O10O10O100O01000O1000O010O100O010O1O010O001O10O01O100O1O100O2N1O100O1O010O1O01O01O00010O00001O000010O01O001O10O01O1O1O0010O01O0010O01O001O010O1O001O100O1O1O1O1O010O001O0000001O0000001O000010O0001O001O00001O001O1O001O00100O1O00100O2N1O101N1O1O10O0100O00010O01O0001O01O0001O01O01O00010O00001O0010O01O001O001O1O001O001O010O00001O001O001O001O001O00001O1O001O001O001O001O00001O00001O0000001O0000001O00001O001O1O001O1O1O1O010O001O001O0000000010O0000001O000010O01O001O001O1O010O001O00100O1O00101N101N:F>jIjSOY5gl0NlNkJQUOT5hj0XKTUOf4ej0hKVUOW4aj0WLZUOh3\\\\j0gL`UOY3Wj0VMcUOi2Xj0i2L5K4M3L4N2O2N1N2O1O1O1N2NkKfVOZOYi0j0kVOPOQi0U1RWOfNlh0_1RWO`Nkh0e1TWOZNkh0i1TWOVNjh0m1VWORNih0Q2VWOnMih0T2WWOkMhh0W2WWOiMhh0Y2XWOfMgh0\\\\2XWOdMgh0_2XWOaMfh0a2ZWO^Meh0e2YWO[Meh0X6O1N2O1N2N2O1O1O1OnLgWOQLXh0Q4iWOmKWh0T4iWOkKVh0X4iWOgKWh0X7O100O100O10000000001N100000000000001O000O100000001N1000000O2O000O2O0O101N10VN[XOlHcg0W7`XOeH^g0^7eXO^HYg0f7iXOVHUg0n7jXOQHTg0R8lXOnGQg0V8mXOjGRg0W8nXOiGPg0Z8nXOhGof0Z8QYOfGnf0[8RYOfGkf0]8SYObGnf0`8QYO^Gof0e8PYOXGRg0P:1O2M2M4J60O3N2N1O1N2O2N1O1O2M2O2N2N2N2N1N3N1N1i[OWDha0k;T^O[Dga0g;U^OaDfa0a;U^OfDea0];V^OkDda0Y;V^OPEca0T;Y^OSEaa0o:\\\\^OUE`a0o:\\\\^OVEaa0l:[^OXEba0k:[^OXEca0i:[^OZEba0i:[^OZEca0g:[^O[Eda0f:Y^O]Eea0e:Y^O]Eda0f:Z^O]Eaa0g:]^O[E^a0j:`^OXE[a0m:d^OTEWa0Q;h^OPEUa0S;j^OoDSa0T;l^OlDSa0U;l^OlDRa0V;m^OkDRa0V;n^OjDQa0W;n^OjDQa0Y;k^OiDTa0[;h^OfDWa0Y>O2M2N2N2N2N2N2O1O1O1O100O100O1O100O1O1O1O1O1O001O1N2O1N2O1O1O100O1O100O100000000000000000001O000000000000000OmL[@_Ce?X<l@`CT?X<\\\\A`Cd>]<bA`C^>^<fA`CZ>^<jA`CV>_<lA`CT>_<oA_CQ>`<QB_Co=`<SB_Cm=_<VB`Cj=_<XB`Ch=_<ZB`Cf=^<^B`Cb=]<cBaC]=]<gBaCY=]<lB`CT=_<oB_CQ=_<SC_Cm<`<UC_Ck<`<WC_Ci<a<XC^Ch<a<ZC^Cf<b<[C]Ce<c<[C]Ce<b<]C]Cc<c<^C\\\\Cb<d<_C[Ca<e<`CZC`<f<bCXC^<h<cCWC]<h<fCUC[<k<gCSCY<m<iCQCW<o<jCPCV<P=kCoBU<P=mCoBS<Q=nCmBS<S=nClBR<T=PDiBQ<W=PDgBQ<Y=PDeBQ<[=QDbBP<^=RD_Bo;`=TD]Bm;c=_400000000O10001N100O100O100O100O1O100O1O2O0O101N100O2bEmZO_7Se0^HS[O_7nd0^HW[O^7jd0aHZ[O\\\\7gd0aH][O\\\\7ed0bH^[O\\\\7cd0aHb[O[7ad0aHd[O\\\\7`d0]Hg[O^7`d0YHi[O`7_d0kGV\\\\On7Zg0J3M3M3M2N2M3N2M2N3M3L5K4L5J6J6K5L4M2M3N2N1O2N1O1N3M2M4L3N3M2N3N1N3N2N2N3M3L4L5K5H7K5J6L4K5L3N3L3M5K5J7H9G?_OXQj3\"}}, {\"image_id\": 141, \"category_id\": 1, \"bbox\": [123.0, 56.0, 698.0, 613.0], \"score\": 0.999997079372406, \"association_id\": 1, \"light\": [-1.9615156650543213, -1.9568171501159668, 1.8280205726623535, 1.8057621717453003], \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"ShT42YR16K5WOGUoN=hP1HRoN<kP1HPoN;oP1EQoN;oP1FPoN:PQ1GnnN9SQ1GmnN9SQ1HlnN7UQ1a00O1O1O100N3N1N2N2M3O1O1O100N2N2N2K5K5N2O1O100000O10O[UOOob01ZYO8SO5_g0DZYO<SO5`g0@\\\\YO=RO7]g0^OaYO;QO:Ug0BiYO6oN<Vg0^OjYO8nN<Wg0\\\\OkYO8mN>Wg0TOQZO?hN>Zk0CdTO?Zk0BfTO>Zk0BeTO`0Zk0AeTO`0Yk0AgTO?Yk0BeTO`0Zk0@fTOa0Yk0_OfTOc0Xk0_OgTOb0Xk0^OgTOd0Xk0]OgTOc0Xk0^OhTOc0Wk0^OgTOd0Xk0]OgTOe0Wk0[OhTOg0Vk0[OiTOf0Vk0[OhTOg0Wk0YOiTOh0Vk0YOiTOg0Wk0ZOhTOf0Xk0[OhTOd0Xk0]OhTOb0Yk0^OgTOa0Yk0@fTO`0Zk0AfTO>Zk0CfTO<Zk0EeTO;[k0FeTO:Zk0GfTO8[k0IcTO7]k0JcTO5]k0MbTO2^k00`TO0`k01`TON`k04_TOKak06^TOJbk08]TOGdk09\\\\TOFdk0;[TOEek0;\\\\TOBfk0>[TO^Ohk0b0ZTO[Ogk0f0YTOXOhk0h0ZTOUOgk0k0ZTOROhk0n0ZTOoNgk0Q1[TOlNfk0U1\\\\TOfNfk0Z1^TOaNck0_1bTOZN`k0f1eTOTN\\\\k0m1b20000001O0001O00000O101O00000O101O0000001N10001O0O2O0N3N2M4M2M4L4K5I8GnXd0Bcg[O7J6J5K4L3N2N1O2N101N10001N10000O2O00000O2O00001N1000001O0O101O10O01O1O1O1O2N1O1O2fQOPORk0Q1dTOYO[k0h0^TO@`k0a0ZTOEek0<VTOIik08RTOMmk04oSO0Pl02mSO0Rl01lSO1Sl00kSO1Ul00iSO2Vl0OhSO3Wl0NgSO4Yl0MdSO5[l0MbSO5]l0L`SO8^l0NZSO6dl0l2N1O3M2N2N1O010O1O001O0010O010O10O10000001O01N1000000O10O0100O01O10O10O10001O01O100O001O001O00000000O010O010O00100O010O0010O0100O0010O01O010O10O010000O1000000O10O10O1O100O100O1O1O010O00100O010O100O010000001O000O01000O010O0000001O0000010O0000001O0000YVO`J`f0`5_YObJaf0^5]YOcJcf0]5\\\\YOdJdf0\\\\5[YOeJef0[5[YOeJff0Z5ZYOfJff0[5ZYOdJff0\\\\5ZYOdJgf0\\\\5YYOcJgf0]5ZYObJgf0^5YYOaJgf0_5YYO`Jif0`5S3000O101OO0100O1O010O0010O010O010O10O1000001OO3N1O1O1N101N100O101NYOg0ZOg0SOm0M2M4N2M3NaLVVO]Nhi0f1^VOSN`i0o1cVOnM[i0U2dVOkMZi0W2eVOjMXi0[2fVOdMXi0`2gVO`MVi0e2gVO[MVi0l2gVOTMVi0V3bVOjL\\\\i0_3^VOaL_i0X6O1O2N1O1O2O0O2N101N1O2N1N3N2M3M2N3N1O2N1O2O0O2N101N1O2O1N1O2O1N1O2N2N2M3N3J5K4L4L4L3M4N100O101O001N2O0O2M3M2M4K4M4M2O1N200O1O1O0O2N1fZOlCZd0e=]O6F:D;G9L4M3N2M2O2M2O1N2N2M3H8SOm0D<L4M3M3N2N3N1N2N2N2M3M3M3M3L4L4N2N2O1O1O1O10000O1000000O100000000O100000000000O100000000000000000000000000O100000000O1000000O100\\\\Ng@S@Y?l?j@R@V?m?n@P@R?o?QAn_OP?Q`0TAl_Ol>R`0XAl_Oh>S`0\\\\Aj_Od>U`0_Ai_Oa>V`0cAf_O^>Y`0fAd_OZ>[`0iAc_OW>\\\\`0lAb_OT>]`0oAa_OQ>]`0RBa_Oo=_`0SB__Om=``0UB^_Ol=a`0WB]_Oi=b`0ZB[_Og=e`0ZBY_Og=f`0Q2O1O100O1O100O100O11O0001O00000000000000Y@__Ob=Y2QAU;[1eBc=S2VAW;V1gBd=o1[AX;P1kBe=j1^A[;j0mBi=d1bA];d0QCj=]1hAa;;TCn=V1mAe;2WCS>m0SBk;F[CX>a0]BQ<XO`Ch>J\\\\Bc<hNfCQb0Y<j]OkCWb0T<f]OnC[b0R<c]OoC_b0P<^]ORDcb0n;[]OTDeb0l;X]OVDib0j;U]OWDlb0j;Q]OWDPc0i;m\\\\OZDSc0g;i\\\\O[DYc0e;b\\\\O^D_c0c;\\\\\\\\O`Dgc0R=3M3N2M2N2N2N100O2N1O2N2N2N2N1O2N1O2N1O1O1O1O100N3N2N2N2N2N1O2N1O1O1O1O1O001O1O1O1O2N2N2hLP[OnHQe0g6[[OWIgd0`6b[O^I_d0^6e[OaI\\\\d0]6g[OaIZd0\\\\6i[OcIXd0[6j[OdIWd0Z6k[OeIVd0Y6l[OfITd0Y6m[OgITd0V6o[OiIRd0U6o[OkIQd0S6R\\\\OlIoc0Q6T\\\\OnImc0o5U\\\\OQJlc0k5X\\\\OTJic0h5Z\\\\OXJgc0d5]\\\\O[Jdc0b5_\\\\O]Jcc0_5_\\\\O`Jcc0^5_\\\\OaJcc0\\\\5_\\\\OcJbc0Z5`\\\\OfJbc0W5`\\\\OhJac0U5b\\\\OjJ`c0R5c\\\\OlJac0o4b\\\\OPK`c0g4i\\\\OWKZc0\\\\4Q]OcKRc0j3_]OTLeb0]3h]ObL[b0X3i]OfLZb0U3j]OjLXb0R3l]OlLVb0P3m]OnLUb0P3m]OoLTb0o2n]OoLUb0m2o]OPMSb0n2o]OPMTb0l2P^OQMSb0k2Q^OnLWb0k2Q7@f0YOd0SOooUj0\"}}, {\"image_id\": 142, \"category_id\": 1, \"bbox\": [317.0, 15.0, 50.0, 87.0], \"score\": 0.9999997615814209, \"association_id\": 1, \"light\": [-1.6196486949920654, -2.7042272090911865, 1.5582945346832275, 2.629957914352417], \"segmentation\": {\"size\": [300, 400], \"counts\": \"fnl25W91O1O001O000010O000001O0fNM[I3b61]IO`64`IL^66bIK[67eIIW6<iICU6l0^ITO`6Q1`IlN_6Y1ZIkNd6Z1RIlNm6j100O1OLQImMm6S2TInMi6o1]IQN^6R2dInM\\\\6R2dInM[6T2dIlM\\\\6T2dIlM\\\\6V2bIjM^6W2aIhM`6X2`IgMa6Y2_IfMb6Z2800O2N1O3N6UOiHTOY7g0QIPOS7m0h0N2M3O2K`d9\"}}, {\"image_id\": 142, \"category_id\": 1, \"bbox\": [93.0, 104.0, 185.0, 195.0], \"score\": 0.9999998211860657, \"association_id\": 2, \"light\": [-2.888303518295288, -1.8357001543045044, 2.703157424926758, 1.649002194404602], \"segmentation\": {\"size\": [300, 400], \"counts\": \"Rak04V92O101N1O1O1O1O1O100N2M3O100O100M3WLYOlNh0Q1AhN`0W1BhN>V1EhN<W1FgN;X1IdN8[1=PNDo1`0nM@R2b0kM_OT2b0lM^OS2c0lM^OR2e0lM\\\\OS2e0mM[OR2f0nMZOQ2g0nMZOQ2g0lM\\\\OS2e0lM\\\\OR2f0nMZOP2g0PNZOn1h0QNYOn1h0PNZOo1g0oM[OP2e0PN\\\\On1e0SN[Ok1f0UN[Oi1f0WN[Oh1f0WN[Oh1e0XN\\\\Og1e0YN[Of1f0ZNZOf1g0YNYOf1h0ZNXOf1h0ZNkMSNX1b3n0\\\\NhMUNX1_3Q1bNnN^1S1bNlN^1T1WNoMXNk0a3W1TNXOl1i0QNYOo1g0PNZOP2g0nMZOQ2h0mMYOR2j0jMXOV2k0eMWOZ2l0bMVO^2k0`MVO`2k0^MVOb2l0[MUOe2n0WMSOi2h3000O100O100O100M3N200O1O1O1M3O1O100N2N2O100O1O1N2N200O100N2N2O010000000O100O100O1O1O0N3M3M2O2O100\\\\OUKfLk4V3WKkLk4S3VKnLj4Q3WKnLi4R3WKoLi4P3XKoLh4o2[KPMf4n2\\\\KRMd4l2^KSMc4l2^KTMa4k2aKUM_4i2cKWM]4g2eKYM[4e2gK\\\\MX4b2jK^MV4_2mKbMR4\\\\2PLbMQODn4h2SLcMPOEm4g2TLdMoNE85T4`2gLeMnNG49T4Z2kLeMoNF3;S4X2nLdMYOOl3Z2]2N2N2N2M3M3N200O10000O100001O1O001O001O2N2N3M5Ka0_O7I1O1O1O1O0000O1YOQJoMQ6`1eJYN]5l0QJoNa7j0d0M3O1N2N2O2N1O1O2N<_OYdS1\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [455.0, 588.0, 82.0, 79.0], \"score\": 0.854875922203064, \"association_id\": 2, \"light\": [-3.072934627532959, -1.317781686782837, 2.980612277984619, 1.1657960414886475], \"segmentation\": {\"size\": [800, 691], \"counts\": \"_QT;5hh05N001O001O0QXO1mf00PYO4nf0MnXO8Pg0JhXO>Vg0EfXO=Yg0f0O1O10000O02N101N6K1O1N5K1O2O1OO10O001000O001O00001O10O0100O10O0003N0O10O010O01O0000000001O1O1O00001O0AXXOFgg09ZXOHfg06[XOJfg04[XOMfg0N]XO2dg0L^XO4bg0IaXO7^g0HdXO8\\\\g0HdXO7]g0HeXO7[g0GgXO8Yg0HhXO7Zg0FiXO9og000O100O101N1Nfeg3\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [97.0, 629.0, 119.0, 104.0], \"score\": 0.9992645382881165, \"association_id\": 4, \"light\": [-3.1765072345733643, -0.7809914946556091, 3.043736457824707, 0.6318193078041077], \"segmentation\": {\"size\": [800, 691], \"counts\": \"U^\\\\26ih05K3M2N2M3N3M4L2N2O2M5K3M2N8I:D4L4L3M2ZOf0N2N2000O1O1O100O10000O105QMPZO3NZ2gf0G3O3N02NO1N002O0O1O0O101N3M1O2N1O1O1N23M01O1O1O1O002N2N2N4[OWXOKmg0NVXO2og0FSXO;Yh000O10O10O100O100O1000000O10000O010000O010000000O10000000O10000O10000O100O10000O0100O0100O10O0100O1O1O1O1MT]b;\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [204.0, 605.0, 118.0, 100.0], \"score\": 0.9999749660491943, \"association_id\": 1, \"light\": [-2.3901190757751465, -1.2968560457229614, 2.2506964206695557, 1.1526201963424683], \"segmentation\": {\"size\": [800, 691], \"counts\": \"noo4:bh0;G6K6SOPO^YOX1[f0nN_YOV1^f0mN^YOX1^f0iNZYOa1bf0`0O001O00001O1O001O0010O02N10O9H2M10010O0O100000O101O1N1000O10O01O001O1O1O1N3N2N1O1O2N2N001O2N1O001O2N001O0O2O1N101N1O1011N001O0001O000O2]OZXOJhg00[XO1fg0I^XO8ig0^OZXOb0Qh0010O1000O010000O001O0010O1O0010000O100O10001N103L2O1N2O001O00001N10001N101Ncco8\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [317.0, 604.0, 100.0, 79.0], \"score\": 0.9999210834503174, \"association_id\": 3, \"light\": [-2.389671564102173, -0.9714224338531494, 2.3094735145568848, 0.8657548427581787], \"segmentation\": {\"size\": [800, 691], \"counts\": \"SXh78gh02N2N100O100O2O0XXOEkf0;SYOHkf0:SYOHlf09gXO4Wg0l0O1O2N1O1O1O1O1O00001O01O10000O3M8I1N5L000O2OO2aXOgNTg0Y1fXOlN[g0\\\\101O0001O001O0O2O000O2O0O101N101N1O1O2O101N1O2N2N2N000O10001O1[OkWO=\\\\h001O0O100000O10OO2O100O01000O10O1000000O101O0AeWO8bh0O2O0O100O1O2O1NZ]e6\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [607.0, 542.0, 45.0, 48.0], \"score\": 0.9871140718460083, \"association_id\": 6, \"light\": [-2.237987995147705, -1.493157148361206, 2.2250967025756836, 1.3761271238327026], \"segmentation\": {\"size\": [800, 691], \"counts\": \"ahj>;ch03M4L3C=O2M2O2N1O000001O9G1O2N1000O10O01O100O01O1O1O1O1N201N2O0O2N1O1O1O001N101O1O6J001Nhlm0\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [50.0, 617.0, 57.0, 96.0], \"score\": 0.9233735799789429, \"association_id\": 7, \"light\": [-0.8981645703315735, -2.736140727996826, 0.7474883198738098, 2.5643885135650635], \"segmentation\": {\"size\": [800, 691], \"counts\": \"jfW1f0Vh07K6J:F4M2M4lN^NdZOd1[e0aNRZOK0g1me0n002O1O000000000000O2N1O7I7I4L3M3SNUYO`1\\\\g0J1N101N3N001N2M[XOoN`g0n0^XOWOag0U100O1O2OYObXOJ_g04bXOZOO>`g06gXOJYg0GbXO169eg0A^XOa0Ph0000O100001O1O1O1N3N2M3M3NPbW>\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [147.0, 566.0, 56.0, 81.0], \"score\": 0.9625920057296753, \"association_id\": 8, \"light\": [-1.8569309711456299, -2.4333815574645996, 1.7440204620361328, 2.257822036743164], \"segmentation\": {\"size\": [800, 691], \"counts\": \"n]c37fh09H6J4nN^O`YOf0^f0]O\\\\YOh0af0[OZYOj0df0j0O2N1O0010O011O2N;E3M1N20OO1O2N100O1O1O2N1O2M2O3L3M4M4K5K4K4LcWOKdh000000O100O10000O10000000000000O100OPdl;\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [552.0, 564.0, 113.0, 108.0], \"score\": 0.8715171813964844, \"association_id\": 5, \"light\": [-2.511714458465576, -1.398634672164917, 2.4384844303131104, 1.2144262790679932], \"segmentation\": {\"size\": [800, 691], \"counts\": \"ei_=5jh02O2N2N1WYOKed08U[O2dd00PZOFh0=Ve0OoYOFg0>Ze0<cZOH\\\\e09bZOI]e07bZOJ^e06_ZON`e03]ZO0be01XZO5ge0MTZO6le0a1N1O0010O000UMXZO_2Uf0000O_MgYO[2`f0O010fMlYOg1]e0ZNeZON>Z1kd0iNgZOL`0[1hd0jNgZOJd0Z1ed0oNdZOGh0Y1dd0YOZZO_OR1X1dd0^O][Ob0bd0^O^[Ob0bd0_O^[O`0dd0^O][Ob0dd0]O[[Oc0gd0[OZ[Oe0fd0[OZ[Od0gd0[OZ[Od0hd0[OW[Oe0kd0ZOU[Oe0nd0YOR[Of0od0ZOQ[Oe0od0\\\\OQ[Od0od0[OR[Od0nd0]OQ[Oc0Pe0\\\\OQ[Oc0od0^OP[Ob0Pe0O`ZO0`e02^ZONae0:oYOgN0Q1Qf0g1001O00001O001O1YNTZO8oe0CVZO:ne0^OXZO?Rf0XORZOd0Qf0XOQZOf0Rf0XOoYOe0Xg0HXOC\\\\YO;ff0D[YO:gf0E[YO8hg0NoNJZYO5VOMVg06WYOGB510Ug06UYOGD222Ug05UYOHB143Ug04UYOLH1Sg0LgXO0n04gg0O2O0O1O1O1O1N]Re01fcN\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [361.0, 230.0, 34.0, 68.0], \"score\": 0.999999463558197, \"association_id\": 1, \"light\": [-2.1378774642944336, -1.8573137521743774, 2.0801100730895996, 1.7097216844558716], \"segmentation\": {\"size\": [535, 800], \"counts\": \"lkl55a`02M2T@KU?6i@LS?7n@Jm>9RAIk>:TAGj>;VAEi><VADj>=UADj>=PA[OL8S??m@^ON3U??l@_OO1T?a0n@^ON1S?`0RA^OK2P?b0WAZOJ4l>d0\\\\AVOI5f>k0bAnNH8e>k0hAUOW>k0jAUOU>l0lASOT>l0mASOT>m0lASOT>m0lASOT>n0kAQOV>o0jAQOV>o0jAPOX>n0iAROW>l0kASOV>k0_ATOO1e>MSAf0:^OMNn>?WABLMQ?<UAFZ?3j@Mbjc6\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [458.0, 234.0, 43.0, 63.0], \"score\": 0.9999982714653015, \"association_id\": 5, \"light\": [-1.9854568243026733, -1.8780708312988281, 1.912621021270752, 1.6597975492477417], \"segmentation\": {\"size\": [535, 800], \"counts\": \"`a_73d`00O1O100O100V@Om>2SANl>3SANl>4SALk>6TAKk>6TAKj>7VAIh>8XAHh>9WAHh>:WAFh>;PA_OK7T?<n@AL3V?>i@C0NW?V100OXOo@LP?4RAKn>4UA^OF6V?<VA[OE9S?=\\\\ACc>>]AB_>b0aA^O]>d0cA\\\\O\\\\>e0dA[O[>f0eAZOZ>g0fAXO[>h0eAXO[>h0eAXO[>f0gAZOY>e0hA[OX>d0iA\\\\OW>c0jA\\\\OX>a0jA_OV>?lA@V>=mABT>:oAEc?NPnk4\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [486.0, 193.0, 65.0, 70.0], \"score\": 0.9999785423278809, \"association_id\": 6, \"light\": [-1.8655273914337158, -2.1290829181671143, 1.824589490890503, 1.9313509464263916], \"segmentation\": {\"size\": [535, 800], \"counts\": \"eTn73d`00O2O000__OM]`0700000M4N2Ncc2O^\\\\M2O1O0S@OT?2k@OS?2m@NS?2l@NT?3l@MS?4m@LS?4m@LR?5m@LR?5n@KQ?6o@JQ?7n@IQ?8o@HZ>OfA:OHT>8jA11GS>:kA0MAI5^>=iAONAJ3_>>hANOAJ3_>>hAN0_OJ5^>?fAN7CS>?fAM8DR>`0dAM:CR>a0`AO>@R>X1oAhNR>W1nAiNR>W1nAiNU>T1kAlNV>S1jAmNW>R1iAnNY>Q1fAoN[>P1eAPO]>Q1`AoNa>S1\\\\AmNd>T1[AlNf>e0XA^O0Mh>d0[A^OLNi>d0^A\\\\OH0k>a0`A_OD0l>`0cA^OA1m>?dA_O@2m>=lACT>1YBMh=2_10001N10^lQ4\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [176.0, 194.0, 27.0, 48.0], \"score\": 0.9999963045120239, \"association_id\": 3, \"light\": [-1.4499049186706543, -1.889064073562622, 1.4373273849487305, 1.752039909362793], \"segmentation\": {\"size\": [535, 800], \"counts\": \"oUl21Z`02n_O0o?2Q@Ob?=]@D`?>a@B^?>c@B[??f@AY?`0g@@W?b0h@_OT?e0l@[OQ?h0o@XOP?j0o@VOo>l0QASOP?P1n@oNR?T1k@lNU?U1k@jNU?V1k@jNU?m0j@VO2MT?l0m@UOOOT?k0VATOk>l0VASOj>l0b0O0[OX@:i?DZ@:h?D[@9S`0Lefg9\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [263.0, 228.0, 40.0, 60.0], \"score\": 0.9999992847442627, \"association_id\": 2, \"light\": [-2.1260902881622314, -1.549189805984497, 2.0489120483398438, 1.404950737953186], \"segmentation\": {\"size\": [535, 800], \"counts\": \"keY42d`02nN3WAOe>5YALd>;XAEf>?WACf>a0XA_Og>d0WA\\\\Og>h0WAYOg>j0WAVOh>k0WAVOf>m0ZASOe>o0ZAQOe>Q1ZAoNf>S1XAmNh>S1XAmNh>S1XAlNi>U1VAkNj>U1VAjNk>V1UAjNl>U1TAkNl>V1SAjNm>W1SAgNo>`12Go@gNR?Y1n@fNS?V1o@gNV?n0>N2O3M2M5L101O2N1O1O3K7Jj]S8\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [560.0, 230.0, 41.0, 69.0], \"score\": 0.9999927282333374, \"association_id\": 4, \"light\": [-1.4369996786117554, -2.2259209156036377, 1.4555164575576782, 2.1638052463531494], \"segmentation\": {\"size\": [535, 800], \"counts\": \"fjT91f`01N1O100^@M`>4^AMb>3]AOa>3]ANc>2[A0c>3[ANd>3[ANd>5k@H14T?5i@J12U?c0j@]OS?h0k@WOU?j0j@WOU?h0m@XOS?;SA[OM:_>GjAc01FW>FfAf03DV>GfAg03BV>IfAe04BV>KaAg08^OW>M^Ag0:\\\\OX>M^AQ10QOc>[1\\\\AeNd>[1\\\\AeNd>\\\\1[AdNe>H[Aa10gNi>X1WAgNj>i0ZAYOLNk>g0]AXOH1k>e0aAWOE4j>d0cAUOE6j>c0hA]OY>`0iA@W>=lACU>;lADU>MYANf03b?00NUfW3\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [1428.0, 121.0, 158.0, 148.0], \"score\": 0.9999960064888, \"association_id\": 2, \"light\": [-2.6212897300720215, -1.615939974784851, 2.4120004177093506, 1.4463982582092285], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"i[V^19iP13L5J6H;E8VOoN[QOT1cn0POXQOS1gn0oNTQOU1jn0e0N1O1N2O1O1O2N3M1O1RRO[Mh00Yk0f2cSO_MMNW1MYk0g2`SOfMW1CYk0W3gTOiLYk0V3hTOjLXk0V3hTOjLXk0V3hTOkLWk0T3jTOlLVk0S3kTOmLUk0Q3lTOPMUk0n2lTORMTk0n2lTORMTk0m2mTOSMSk0m2mTOSMTk0l2lTOTMUk0k2kTOUMVk0j2jTOVMWk0i2iTOWMWk0i2iTOWMXk0h2hTOXMXk0i2gTOWM[k0h2dTOXM]k0g2cTOYM^k0g2aTOYM`k0f2`TO[M`k0e2_TO[Mak0e2_TO[Mck0d2\\\\TO\\\\Mek0c2ZTO^Mgk0a2YTO_Mhk0a2WTO_Mik0b2VTO^Mkk0b2TTO^Mmk0b2RTO^Mok0a2QTO_MPl0a2oSO_MRl0a2mSO_MSl0a2mSO_MTl0a2kSO_MVl0a2iSO_MYl0a2eSO_M[l0b2dSO^M]l0a2cSO_M]l0b2bSO^M_l0c2_SO]Mbl0d2\\\\SO\\\\Mfl0d2XSO\\\\Mhl0g2USOYMll0g2SSOYMnl0h2PSOXMPm0j2nROVMTm0j2kROUMVm0V3^ROjLcm0V3\\\\ROiLem0[31O0LZROjLgm0U3YROkLim0S3WROmLjm0Q3WROoLim0Q3VROPMkm0o2UROQMkm0o2UROQMlm0n2TRORMlm0m2UROSMlm0k2UROUMlm0j2TROVMmm0i2SROWMmm0i2SROVMnm0j2RROVMom0i2QROWMom0i2QROWMom0i2QROWMom0h2SROWMmm0i2SROVMom0i26000001L4N2N101N1O1O2N2MgQOhMjm0W2VROjMkm0T2UROmMlm0Q2UROPNkm0n1VROQNlm0n1TRORNlm0m1TROTNmm0k1SROUNnm0i1SROWNnm0h1RROXNom0g1QROYNom0f1RROZNom0e1QRO[NPn0c1QRO]Nom0c1QRO]NPn0a1QRO_NPn0`1PRO`NQn0`1nQO`NSn0_1mQO`NUn0_1kQOaNVn0_1iQOaNWn0_1iQOaNXn0^1hQObNYn0^1fQOaN\\\\n0_1cQOaN^n0^1bQObN^n0_1aQOaN`n0^1`QOaNbn0_1]QOaNdn0_1[QOaNin0\\\\1VQOdNln0Z1TQOfNln0[1SQOeNnn0Z1RQOfNon0Z1PQOfNQo0Y1oPOgNRo0Y1>N10O0100O101N1O002N3M3L3N1N2M9G]X>\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [0.0, 256.0, 104.0, 261.0], \"score\": 0.9999948143959045, \"association_id\": 6, \"light\": [-0.5188145041465759, -2.9462780952453613, 0.4293510317802429, 2.7995474338531494], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"T8;i0l3nj0cL^TOQ4>cKei0b6L2N:F1O2M2O1O001O001O001O001O00001O1OYOfVOdIZi0\\\\6fVOeIYi0\\\\6fVOeIYi0[6gVOfIXi0Z6iVOfIVi0Z6jVOgIUi0Y6lVOgISi0Z6lVOgISi0Y6mVOhIRi0X6nVOiIQi0X6nVOiIQi0X6oVOgIQi0V701O000010O1O2N3M1O2N3M3M102M3M3M1N2O1O1O2N2N1O1O1O2N3M2N001O000O11O0000O2O1N2O2M3N0O2O1N100O1YOUUOTKmj0j4TUOnJ3Dkj0]5SUOnJVk0Q5iTOmJ\\\\k0P5dTOoJ_k0o4aTOQK_k0n4cTOPK_k0n4cTOoJ_k0g3eTOTMNSO^k0d3QUOPM[k0m2jTOnLYk0n2mTOmLVk0n2e1K3M3L3N3M3N3K7G7J5L5IaX\\\\`1\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [685.0, 257.0, 167.0, 214.0], \"score\": 0.9247750043869019, \"association_id\": 7, \"light\": [-2.2640485763549805, -1.553755283355713, 2.196465492248535, 1.3296620845794678], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"YaUf01QQ13N1O100O1SNLhRO6Pm0h0XROZOam0R1YROoNcm0W1[ROiNam0\\\\1_ROcN^m0a1aRO_N]m0c1cRO]NTm0m1kROSNSm0P2lROoMSm0S2mROmMhl0HkRO]2>iMgl0a2YSO_Mfl0c2YSO\\\\Mhl0d2WSO]Mhl0d2WSO]Mil0d2VSO\\\\Mil0e2WSO[Mil0e2WSOZMil0g2WSOYMhl0h2XSOXMhl0g2YSOYMfl0h2YSOXMgl0i2YSOWMfl0j2ZSOVMel0k2[SOUMdl0m2[SORMdl0Q3\\\\SOnLcl0U3[SOkLcl0X3\\\\SOhLcl0Y3]SOgLbl0[3]SOdLdl0^3ZSObLel0`3ZSO`Lfl0a3YSO_Lfl0c3YSO]Lgl0c3YSO]Lfl0e3YSOZLhl0f3XSOZLgl0g3YSOYLel0j3ZSOVLdl0l3\\\\SOTLal0o3_SOQL`l0P4_SOPL`l0R4`SOnK_l0S4aSOmK^l0T4bSOlK^l0T4bSOlK]l0U4cSOkK]l0U4cSOkK\\\\l0V4dSOjK[l0W4dSOiK]l0W4cSOiK\\\\l0X4dSOhKYl0[4fSOfKXl0[4iSOeKVl0\\\\4jSOdKTl0^4kSOcKTl0]4lSOdKTl0\\\\4kSOeKTl0\\\\4kSOeKTl0e410O^OmSOULSl0a3VTO`Ljk0]3YTOcLfk0\\\\3\\\\TOdLck0\\\\3]TOdLck0\\\\3^TOdLak0]3_TOcL`k0^3`TObL_k0^3bTObL^k0^3aTOcL^k0^3bTObL^k0^3bTObL^k0^3bTObL^k0^3bTObL^k0]3cTOcL]k0]3cTOcL]k0]3cTOcL]k0]3cTOcL]k0]3cTOcL]k0]3cTOcL]k0]3cTOcL\\\\k0^3dTObL\\\\k0^3dTObL\\\\k0^3dTOaL]k0_3cTOaL]k0_3cTOaL]k0_3cTOaL\\\\k0`3dTO`L\\\\k0`3dTO`L[k0a3eTO_L[k0a3eTO_LZk0b3gTO]LXk0e3gTO[LXk0f3hTOYLXk0h3hTOXLXk0i3gTOWLXk0j3hTOVLXk0k3hTOSLXk0o3gTOQLXk0U4dTOcKE0fk0i4ZTOSK61_k0P5bTOPK]k0S5bTOkJ^k0W562N2M2L5J5N3N1O1O1N2O2NVOQUOaKnj0]4VUObKjj0[4ZUOdKfj0Z4]UOeKcj0Y4aUOeK_j0X4fUOeK[j0V4lUOhKTj0U4QVOiKoi0U4TVOiKmi0V4UVOiKki0W4VVOhKji0W4XVOhKhi0V4_VOdKbi0[4d1N2O1O1O1O1O1N2O1O1O1O1O1O1N2N200O2N2N2mLoROo1Xm0cMXSOQ2Rn0K5M2N3L4L6G;@?B=F^oUh0\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [472.0, 277.0, 238.0, 321.0], \"score\": 0.9591736793518066, \"association_id\": 5, \"light\": [-2.5999646186828613, -1.6164987087249756, 2.4838201999664307, 1.4737331867218018], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"ikX?5nP16RXO3Z>1bA2[>1bA0^>1`A1_>1]A1c>0[A1d>2XA0h>2SA1m>1n@3Q?Ok@3U?Nh@4X?Mf@4Z?Ke@7[?Ib@:^?E_@?a?@X@h0h?WOm_OU1S`0jNi_O[1W`0eNf_O_1Y`0`Nf_Ob1Z`0_Nd_Ob1\\\\`0_Nb_Ob1^`0_N__Oc1a`0_N[_Oc1e`0^NV_Of1j`0[NR_Oh1n`0YNo^Oi1Qa0XNlYOTOj3e2Zb0XNaYO^OS4[2\\\\b0WNeYOZOe1DIl2md0TNPZOROZ12Dj2Re0PNd\\\\OWOWNk2Ue0lMe\\\\OZORNn2Ye0eMh\\\\O]OjMS3^e0_Mi\\\\O^OeMW3be0ZMj\\\\O_OaMZ3ee0VMk\\\\O@_M[3fe0TMl\\\\OA\\\\M]3he0QMm\\\\OBZM^3ie0PMm\\\\OBXM`3le0mLl\\\\OCUMc3oe0iLm\\\\ODQMe3Sf0gLl\\\\ODlLj3Xf0bLl\\\\ODjLl3Zf0_Lm\\\\ODhLo3\\\\f0\\\\LT[O@\\\\O3TOR4\\\\f0[LS[OB\\\\OOVOU4[f0YLS[OE\\\\OJWOY4[f0VLS[OH6S4hd0SLR[OL3S4ld0oKR[OONU4gg0lKnWO^4Qh0dKlWO^4Th0dKiWO]4Wh0eKeWO]4Zh0eKdWO\\\\4\\\\h0eKbWO\\\\4_h0dK_WO]4ah0dK]WO]4ch0dKZWO^4fh0cKWWO_4ih0cKRWO`4nh0bKnVO_4Si0cKiVO_4Wi0bKfVO`4Zi0aKdVO_4]i0bK`VO_4ai0bK\\\\VO_4ei0aKYVO_4ii0cKSVO]4oi0eKmUO[4Uj0Y10O2O1O1O0O2N1O2N1OcUOdIUj0[6lUOfIRj0Z6oUOhIoi0Y6PVOhIQj0V6oUOkIQj0U6nUOlIRj0S6nUOnISj05PVOT5LhJTj0F_VO`5]OkJnj0T5QUOmJPk0R5oTOoJ\\\\i0LmWOT5fNQK\\\\i0NmWOo4hNSK[i00jWOn4jNSK[i0=^WO`4WOSKZi0>_WO_4VOTKZi0>`WO^4VOTKYi0?aWOo4\\\\h0UKcWOk4\\\\h0VKdWOW4WOVKTi0d0eWOV4WOVKSi0e0eWOi4Zh0XKeWOi4Zh0XKeWOi4Xh0ZKfWOh4Wh0ZKgWOi4Xh0kJ\\\\WOTO:S6Yh0hJcWOQO3X6Yh0gJfWOPO0Z6Yh0fJhWOPOOZ6Zh0fJeWOROOX6]h0fJcWOTONW6`h0fJ]WOZONQ6fh0eJWWOQ6ih0U16I8I2N2MM4G9LASWOnHlh0f7O1N2N_OYWOhHfh0Z7ZWOfHdh0[7]WOeHch0[7]WOeHbh0\\\\7^WOdHbh0\\\\7^WOcHch0\\\\7^WOdHah0]7_WOcHah0]7_WOcHah0]7_WObHah0_7_WOaHah0_7_WOaHah0_7_WO`Hah0b7^WO^Hah0c7_WO\\\\H`h0g7_WOYH_h0i7aWOVH]h0Z8NnJgWOQ1NfKXh0[3lWOY1Th0gNoWOW1Ph0iNQXOW1ng0jNSXOU1kg0lNVXOS1jg0nNWXOgLEi3Th0_OXXOfLFc3OULRh0c3ZXOcLGd3NTLRh0d3ZXObLHg3LRLQh0f3[XO`LJg3KQLQh0g3\\\\XO_LIi3Ph0HXXO\\\\LKk3mg0JiXO5Xg0JhXO7Wg0IjXO6Vg0JkXO5Ug0KlXO4Tg0MlXO2Ug0MlXO2Tg0NmXO2Rg0NnXO2Rg0MoXO3Qg0MPYO2Pg0NPYO2Pg0NPYOQNVOJig0T2SYOoM_OD^g0]2SYOmMCDZg0_2TYOkMDFWg0_2UYOjMFFUg0b2TYOfMIGUg0c2QYOcMNHRg0g2nXO_M3HPg0j2kXO\\\\M9Hnf0k2iXOZM=Jjf0m2gXOXMb0Ihf0o2eXOXMd0Hhf0o2eXOWMe0Igf0AbWOQ3Q1dMh0Iff0@bWOS3P1dMh0ISg0c2UXOcMj0Jof0c2YXOaMi0Lnf0c2YXO`Mk0Llf0d2YXO_Mm0Lif0e2\\\\XO\\\\Mn0Oef0e2]XOZMQ10bf0f2]XOXMT11_f0g2^XOUMV13`f0e2YXOXMW13cf0d2SXOYM\\\\11df0e2oWOYM_10ef0g2jWOXMc1Off0R3\\\\WOnLP2Mhf0W3TWOkLV2Lif0Y3nVOjL\\\\2Khf0[3kVOhL`2Kgf0]3hVOgLc2Ihf0_3fVOfLc2Jif0_3eVOdLbl0Z3^SOeLel0Y3e0N2N2N2nMSROi0om0SOUROk0nm0RORROn0Qn0oNoQOQ1Tn0lNlQOT1Vn0iNkQOX1Xn0bNjQO^1Qo0MH[NVQOa1hn0`NYQO_1gn0aNZQO_1Uo0O2O1O001O001N101O0O2O0O2N2N2M4H8F;I6Kelhl0\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [1027.0, 145.0, 152.0, 87.0], \"score\": 0.9997992515563965, \"association_id\": 3, \"light\": [-2.921715021133423, -1.2826802730560303, 2.8650829792022705, 1.1184723377227783], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"SXWQ18iP14M4N2M2O1O1O1O1_QOYOcl0j0YSOWOgl0k0WSOVOhl0k0WSOUOil0m0TSOTOll0n0RSOROnl0Q1oROoNQm0T1lROlNTm0U1kROkNUm0V1jROjNVm0W1iROiNWm0X1hROhNXm0[1dROfN\\\\m0]1aROcN_m0^1`RObN`m0_1_ROaNam0`1^ROaNam0`1^RO`Nbm0a1]RO_Ncm0c1[RO]Nem0d1ZRO\\\\Nfm0e1YRO[Ngm0f1XROZNim0f1VROZNkm0e1URO[Nkm0f1SRO[Nnm0e1QRO[Nom0e1QRO[Nom0f1PROZNQn0e1oQO[NQn0e1oQO[NQn0f1nQOZNSn0e1mQO[NSn0f1mQOYNSn0g1mQOYNSn0h1lQOXNTn0h1lQOXNTn0i1kQOWNVn0h1kQOVNVn0k1iQOUNWn0l1hQOTNXn0m1gQOSNYn0P2^QOnM22`n0V2_QOiMan0X2^QOgMcn0Z2\\\\QOfMcn0[231O1O00000000000000O100O100N2O1O[QOiM_n0U2cQOlM]n0Q2dQOPN\\\\n0o1eQOQN[n0n1fQORNZn0m1fQOTNZn0l1fQOTNZn0k1gQOUNYn0j1hQOVNXn0j1hQOVNYn0h1hQOXNXn0g1iQOYNWn0g1iQOYNXn0f1hQOZNXn0e1hQO\\\\NXn0c1iQO]NWn0c1jQO[NXn0c1iQO]NWn0c1iQO]NWn0c1iQO]NXn0a1iQO_NXn0_1iQOaNWn0_1iQOaNWn0^1jQOaNXn0]1iQOcNWn0]1iQOcNXn0Z1jQOfNWn0X1jQOhNVn0W1kQOiNVn0V1jQOiNWn0V1jQOiNXn0U1m0N2N2O001N2O2N001N101O1ObPOYO`n0f0`QOZOan0e0_QO[Oan0e0_QO[Oan0e0P1O1O001O0000O1000O1000001O001O1O001O1002NO010O001O00001O0O100FZoN6jP1M3O2Ofcc=\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [118.0, 211.0, 235.0, 352.0], \"score\": 0.9999803304672241, \"association_id\": 4, \"light\": [-1.6735546588897705, -3.0585663318634033, 1.5067448616027832, 2.911862373352051], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"Ufj31SQ12M101N1O101N100000000O2O0^MGYTO9fk0JXTO6fk0LYTO5fk0MYTO3ek0O[TO1bk02^TON^k07`TOJ^k08bTOH\\\\k0:dTOF[k0<cTOE\\\\k0<cTOE\\\\k0<dTODZk0>fTOBXk0`0gTOAWk0a0iTO_OVk0b0jTO^OUk0c0jTO^OUk0c0kTO]OTk0d0lTO\\\\OSk0e0mTO[ORk0f0nTOZOQk0f0PUOZOoj0g0RUOXOkj0k0TUOVOjj0l0VUOSOjj0n0VUOROhj0o0YUOQOgj0o0YUOQOfj0P1ZUOPOfj0P1ZUOPOej0Q1ZUOPOej0P1\\\\UOPOdj0P1\\\\UOPOcj0Q1]UOoNbj0Q1_UOoN`j0R1`UOnN^j0S1cUOmN[j0U1eUOkNXj0W1iUOhNUj0[1kUOeNQj0^1PVObNni0`1RVO`Nli0a1UVO_Nji0b1VVO]Nji0d1VVO\\\\Nii0e1WVO[Nhi0f1XVOZNgi0g1YVOXNfi0j1hUOkMeN;`k0m1jUOiMeN;_k0n1kUOiMeN8`k0P2jUOiMfN7_k0Q2kUOhMfN7^k0S2kUOgMeN6`k0U2iUOfMfN6_k0W2iUOeMgN3_k0[2iUObMgN4^k0]2iUOaMgN3^k0^2jUOdMcNNbk0a2jUORNUj0P2iUOQNVj0P2iUOPNWj0Q2iUOoMUj0S2kUOmMSj0V2kUOkMRj0X2nUOgMQj0[2oUOeMPj0\\\\2PVOdMoi0^2PVOaMPj0`2oUOaMPj0`2PVO`Moi0b2PVO^Moi0c2QVO]Mmi0f2RVOZMki0i2TVOXMki0j2TVOUMli0m2SVOSMli0o2SVOQMmi0P3RVOoLni0S3QVOmLoi0T3PVOlLPj0U3nUOlLRj0T3nUOlLRj0U3mUOjLTj0W3kUOiLUj0W3kUOiLTj0X3lUOhLTj0X3lUOgLTj0[3kUOeLUj0[3jUOfLUj0[3kUOeLTj0\\\\3lUOdLSj0]3mUOcLRj0^3nUOaLQj0a3oUO_LPj0c3oUO]LPj0e3nUO[LSj0e3mUO[LRj0g3mUOYLRj0i3nUOVLRj0k3mUOTLSj0o3kUOQLUj0Q4iUOnKXj0T4fUOkKZj0X4eUOfK\\\\j0^4`UOVKkj0g5SNjIbXOX6Wg0oIfXOS6Vg0RJgXOP6Ug0TJiXOn5Tg0UJjXOl5Ug0WJgXOk5Wg0YJeXOi5ke0`IZZOk0Fi5ne0aIWZOl0Bg5Vf0bIRZOm0^Og5_f0`InYOo0WOi5jf0[IkYOY8Uf0hGhYOZ8Xf0hGdYO[8[f0gG`YO\\\\8_f0hG[YO[8ef0R1000001O0000O10000`N\\\\YO`Hdf0^7`YO_Haf0_7cYO_H]f0`7fYO^HZf0a7hYO^HXf0`7lYO]HUf0a7oYO]HQf0a7RZO]Hoe0a7UZO\\\\Hle0c7VZO[Hke0d7WZOZHje0e7XZOYHie0g7WZOWHke0h7WZOUHke0j7WZOSHke0l7WZOQHke0n7XZOmGle0Q8d1N2O1O1O100O2O0O10001N100O2M3M2N3M3N2N2N1O00000O2N1O2N102N1O2N4K5K4L3M4M2N4L4K6J=B:F6oMmSOhNXl0m0VTOlNnk0o0YTOlNhk0S1aTOcNbk0[1kTOQN^k0n1m1N2O1aNSQOd0on0[OQQOe0Po0YORQOf0Po0WORQOg0Qo0UORQOj0Po0POVQOo0`o0O1O2N1O2N2O11OCloNIRP14SPOImo05UPOLko02VPONjo01XPOOho00XPO0ho0NZPO2`P1JY`ZX1\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [1017.0, 245.0, 385.0, 495.0], \"score\": 0.9999890923500061, \"association_id\": 1, \"light\": [-2.5755200386047363, -1.70855712890625, 2.5746359825134277, 1.5678291320800781], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"h^mP19iP14M2N2O001N100O10000O2O00000O10000O1000000O2O0WMXOVUOh0ij0@PUO`0oj0GkTO9Tk0KhTO6Wk0LhTO4Wk0NgTO3Xk0OgTO1Xk00hTO0Wk02gTOOXk03gTOLYk05gTOKXk07fTOJZk06fTOJYk07gTOIXk09fTOHYk09gTOGWk0;iTOEVk0<iTOEUk0=kTOBUk0?kTOATk0`0kTOATk0a0kTO_OTk0b0kTO_OUk0a0jTO_OVk0b0jTO^OVk0b0iTO_OVk0b0jTO^OUk0c0jTO]OWk0c0iTO]OVk0d0jTO\\\\OUk0e0jTO\\\\OUk0e0jTO\\\\OTk0f0lTOZOSk0g0lTOZORk0h0nTOXOPk0j0oTOVOPk0l0PUOTOoj0m0QUOSOnj0n0RUOROmj0o0RUOROnj0n0RUOQOnj0P1QUOQOnj0P1QUOPOnj0R1QUOoNnj0R1QUOoNmj0S1SUOmNjj0V1UUOkNjj0V1VUOiNij0Y1WUOgNhj0Z1XUOfNgj0[1XUOfNgj0[1YUOeNfj0\\\\1YUOeNgj0[1YUOeNfj0\\\\1ZUOcNgj0\\\\1YUOeNfj0\\\\1ZUOdNfj0\\\\1YUOdNgj0]1YUOcNgj0]1XUOdNgj0]1XUOcNij0\\\\1XUOdNgj0]1XUOdNgj0]1YUObNgj0_1YUOaNgj0_1YUOaNej0`1[UO`Nej0`1\\\\UO`Ncj0a1\\\\UO`Ncj0`1^UO`Naj0`1_UO`Naj0`1`UO`N_j0_1cUOaN]j0^1cUObN]j0^1dUObN\\\\j0]1eUOcNZj0]1fUOcN[j0\\\\1fUOdNYj0]1fUOdNZj0[1gUOeNXj0\\\\1gUOdNYj0]1gUOcNXj0^1gUOcNXj0^1hUObNWj0^1jUObNUj0_1jUObNTj0`1lUO`NRj0b1mUO^NRj0d1nUO\\\\NQj0e1oUO[NPj0e1PVO\\\\Noi0e1QVO[Nni0f1RVOYNoi0g1PVOZNoi0g1QVOYNni0h1RVOXNli0j1SVOWNli0j1TVOVNji0l1UVOUNii0m1WVOSNgi0o1XVORNfi0P2ZVOoMfi0R2ZVOnMei0S2[VOmMdi0T2\\\\VOlMci0U2\\\\VOlMdi0T2\\\\VOlMdi0T2\\\\VOlMci0U2\\\\VOlMdi0T2\\\\VOlMdi0T2\\\\VOlMdi0T2\\\\VOlMci0U2]VOkMci0U2\\\\VOkMei0U2[VOkMdi0V2\\\\VOjMdi0V2\\\\VOiMei0W2[VOiMdi0W2]VOiMbi0X2^VOhMai0Y2_VOgM`i0Z2_VOfMai0[2_VOeM_i0]2aVOcM]i0_2cVOaM\\\\i0`2dVO`M[i0a2eVO_MZi0b2eVO_MZi0b2fVO^MYi0c2gVO]MYi0c2gVO\\\\MYi0e2gVO[MTb0_NnCW4nIZMRb0bNoCT4oIZMQb0eNmCR4RJXMPb0iNlCP4TJWMoa0kNkCn3WJWMma0mNiCn3ZJUMka0POhCm3]JSMja0ROfCm3`JPMja0UOcCm3cJnLia0WObCl3eJmLha0XOaCm3gJkLga0ZO`Cl3iJjLfa0\\\\O`Ci3kJkLea0\\\\O_Cj3lJjLda0^O^Cj3mJhLda0@]Ci3oJfLea0BYCj3RKdLda0DXCi3TKcLda0EUCj3WKaLca0GaAInK`4n0PLca0G[Ab5RMgJca0HVAe5WMcJba0JRAg5\\\\M_Jba0Jo@j5_M\\\\Jba0Km@j5aMZJca0Lj@l5cMXJba0Ni@k5eMWJba0Og@k5hMUJaa02c@l5kMRJba03a@l5mMQJba05]@l5QNoIba07X@m5VNlIba0:R@m5\\\\NiIba0<j_OR6cNbIca0=b_OX6lNZIba0`0[_O[6SOTIca0b0W_O\\\\6VORIca0c0U_O\\\\6XOQIca0d0S_O]6YOoHda0e0Q_O]6[OnHda0f0o^O^6\\\\OlHea0h0k^O_6_OWHWOC^b0i1j^O_6@SH[OD[b0l1f^O`6CoG^ODYb0P2`^Ob6GiGBDWb0S<U^OXDEEVb0U<S^OVDHCVb0Z<o]ORDMCTb0\\\\<n]OPD0CRb0_<m]OkC4CQb0c<j]OiCnb0Z<P]OeCQc0\\\\<n\\\\OcCSc0]<n\\\\OaCSc0a<k\\\\O_CUc0b<k\\\\O\\\\CVc0e<j\\\\OYCXc0k<>5K5\\\\\\\\OoBkb0f=O1O1O1O1O1O0O0100000000O1000O1000O1000000000000000000000000O10000000000000000000000000000O100O10000O1]No\\\\ORERc0l:Y]OjDib0S;_]OfDbb0Y;b]OcD_b0\\\\;d]OaD]b0^;f]O_D[b0`;h]O]DYb0b;i]O\\\\DXb0c;j]OZDYb0d;i]OYDYb0g;h]OWDYb0h;j]OTDYb0j;a1O1O2M2O2O0O2N101O1O1O1O1O1O001O1O1O010O001O01O010O10O010O10O0100O10O01O1O010O1O10O01O1O1O100O1O000O1000000bMbZOVI^e0f6iZOWIWe0e6oZOXISe0d6R[OZInd0d6U[O[Ikd0c6X[O\\\\Ihd0c6Z[O\\\\Ifd0b6][O\\\\Ied0a6_[O\\\\Ibd0b6a[O\\\\Iad0a6c[O\\\\I^d0a6j[OYIXd0a6R\\\\OTIUd0X6c\\\\O[Iec0\\\\6g3J6M2O2M3N1O2N1N3N1N3M2N3M3M2N3M2O2N1N3N2M3M2N3M3L4M3M3M4L3L5M4K7I7I6I8H;^O_d\\\\6\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [456.0, 259.0, 225.0, 182.0], \"score\": 0.7531275153160095, \"association_id\": 8, \"light\": [-2.667372226715088, -1.4650075435638428, 2.5827362537384033, 1.3094818592071533], \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"^^h>1RQ110001O0O100YKM]XO3Ug0c0cXO]O\\\\g0f0bXOZO]g0h0bXOXO]g0j0bXOVO^g0k0aXOUO_g0l0`XOTO`g0m0`XORO`g0o0^XORObg0o0]XOQOcg0o0]XOQOcg0P1\\\\XOPOdg0P1\\\\XOQOcg0P1\\\\XOPOdg0P1\\\\XOPOdg0Q1[XOoNeg0R1ZXOnNfg0T1XXOlNhg0X1TXOiNkg0Y1SXOgNmg0[1QXOeNng0^1PXObNPh0`1^TOcNT3M^h0V2XWOkMgh0[2SWOeMmh0^2PWObMPi0`2nVOaMQi0`2nVO`MRi0a2lVO`MUi0`2jVO`MVi0b2hVO_MWi0c2gVO]MYi0f2dVOZM\\\\i0h2bVOXM^i0i2aVOXM^i0i2aVOWM_i0j2`VOVM`i0k2_VOUMai0l2^VOTMbi0n2\\\\VOSMci0o2[VOQMei0R3WVOoLii0R3VVOnLji0S3UVOmLli0S3SVOnLli0S3SVOmLmi0T3RVOlLoi0T3PVOlLPj0U3oUOkLRj0U3mUOkLTj0U3kUOkLUj0V3jUOjLWj0V3hUOjLYj0V3fUOjL[j0V3dUOjL]j0U3cUOkL^j0U3aUOkL`j0U3_UOlLaj0T3^UOkLdj0T3\\\\UOlLej0T3ZUOkLhj0T3XUOlLjj0S3VUOkLoj0Q3QUOnLSk0P3nTOkLXk0T1dSOb0T1WN[k0T1dSOSOLW1im0E\\\\ROROLZ1hm0B]ROTOK[1im0_O]ROTOK]1jm0\\\\O]ROTOL`1gm0YOiROg0Xm0XOhROi0Xm0UOkROi0Vm0VOnROg0Qm0YOPSOf0Pm0ZOPSOf0Pm0ZOQSOe0Pm0ZOPSOf0Pm0[OPSOd0Pm0\\\\OPSOe0ol0[O^RO[O4Z1]m0[O_RO[O4]1Zm0XObRO[O4]1Zm0YOaROZO5]1Zm0ZOTSOf0ll0[ORSOf0ol0ZOPSOf0Pm0\\\\OmRO5XO]Okm0a0jRO1\\\\O_Oim0a0_RORONn0K^Ogm0d0kROL@^Ofm0f0kROIC_Obm0i0lRODjm0<\\\\ROQOPn0P1R1000000001O00001O1O3MO1M3O2ORROnNjk0Q1UTOQOlk0n0STOTOlk0l0STOUOnk0j0RTOVOok0i0PTOYOPl0e0QTO[OPl0d0PTO\\\\ORl0a0nSO@Sl0?mSOAUl03mQOKn13Vl00oQOKj16Xl0MRROIf1;Yl0JPTO8Pl0FQTO;Pl0CoSO?[n0000O1000000O1000000O100O10000O10O10O10000O100O100aMXObTOh0]k0ZObTOf0^k0ZObTOf0]k0[OcTO4eM7hm0EcTO1jM7cm0IdTOMmM8^m0LTUOLnj07h28N2N2O1N2N3N1N2O1O1O2O00O1000iROROXj0n0gUOTOXj0l0^TOPO[O1^O6fl0i0aTOROYO1^O6gl0f0cTOTOiNN04K6hl0d0dTOTOiNN04K6hl0e0cTOSOjNN04K5hl0g0cTOROjNN13I7il0g0bTOQOkNN13I7il0g0bTOPOlNO04H6jl0g0bTOPOlNO13F8jl0g0cTOoNlN00b0al0`0aTOPOlNO1c0al0`0_TOPOmN1Mc0fl0>]TOQOlNQ1gl0:oSO6Pl09_SOIal0^2O1O1O1O0001gMZSO2fl0M`SOO_l01cSOM]l02eSON[l01fSONZl03dSO0Zl0_2O1O2M3O1O1O00O0O1QNoSOiNRl0R1VTOjNlk0Q1YTOmNik0o0[TOoNfk0m0`TOPOak09eSOiNP1l0_k0oN`SOf0l16Tn0I^mfm0\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [406.0, 105.0, 232.0, 312.0], \"score\": 0.9999993443489075, \"association_id\": 3, \"light\": [-2.4242806434631348, -1.6759445667266846, 2.3244755268096924, 1.5371718406677246], \"segmentation\": {\"size\": [422, 656], \"counts\": \"XfW54R=1O0O2O001O000000001O0000001O000000001O00000O101O00000000000001O00000O10000000000O100000000000000001O0000000000001O00000000001O0000000000001O000000000000001O0000000000001O00001O00001O001O00001O001O1O00001O001O1O00010O0eI<TOEg0d0TO]Oh0i0VOVOf0R1VOnNf0[1UOeNi0`1TO`Nj0d1TO]Nh0h1WOWNf0P2VOPNh0U2UOkMj0X2mM]MiM:Y4\\\\2hMbMkM3Z4a2\\\\MlMVNB\\\\4i2QLQO_OVN]4P3kKQOFoM^4U3dKQOMjM_4[3XKQO:cM^4b6bK^I]4c6cK]I]4c6cK]I]4c6cK]I]4b6dK^I\\\\4b6dK^I\\\\4b6dK^I\\\\4a6eK_IZ4`6hK`IX4^6jKbIV4\\\\6lKdIS4\\\\6nKdIQ4]6oKbIo3b6PL^In3d6RL\\\\Il3g6TLXIk3k6SLUIk3o6TLPIi3T7WLkHf3Z7YLeHe3^7[LaHa3e7^LZH^3m7bLPH\\\\3U8bLjG]3X8dLfG[3\\\\8eLcG[3]8eLcGZ3^8gLaGX3a8hL^GW3c8jL[GU3h8lLUGS3m8b02N2O1O1O1O1OO1N2N3M2N2M3eMYLgJX4k4]KQJ`0?e4]5nJSJ>;j4a5eJVJd01n4h5YJ\\\\Ji6Q60002N\\\\OXIlJe6Q5_IoJ`6n4cIRK^6j4dIWK_6d4cI\\\\Kd6\\\\4\\\\IdKg6X4[IgKg6V4ZIkKg6S4YImKj6Q4UIoKn6n3RIRLo6m3PITLQ7j3QITLP7l3QIRLQ7l3RIPLP7P4QImKR7S4S10gGdKd7]4WHfKj7[4PHiKP8f41O010O1O2N3L4K5D<J6jMnF6U9[OZGe0b:0O00001O0001O1N3M2N1O202M2N1O1N2N3M2N1N4J]T7\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [75.0, 119.0, 320.0, 251.0], \"score\": 0.9999334812164307, \"association_id\": 1, \"light\": [-2.18161678314209, -2.293539524078369, 2.0651073455810547, 2.1576650142669678], \"segmentation\": {\"size\": [422, 656], \"counts\": \"kVo01S=:G2N2\\\\Lg0YIZO\\\\6U1^IlN]6\\\\1`IeN\\\\6_1cIaNY6e1eI[NW6l1fIUNV6Q2gIoMW6V2fIlMV6X2hIiMU6Z2jIfMT6^2jIcMT6_2kIaMT6b2jI^MU6e2iI[MV6h2hIYMW6h2hIXMW6j2hIWMV6k2iIVMU6m2iITMU6n2jISMT6Q3iIPMU6S3iImLW6V3fIjLY6\\\\3bIeL\\\\6_3aIaL^6c3_I]La6f3]IZLa6i3]IWLc6k3]ISLb6o3`IoK_6R4fIhKY6[4fIeKX6_4fI`KY6d4dI\\\\K[6j4aIVK^6n4^IRKa6R5]ImJb6W5\\\\IhJc6Z5]IdJc6^5]IaJc6`5dIWJ\\\\6k5>2M3M3M4L4M3M3N2M2O1O1O0O101TJSI]5m6`JXI^5h6aJZI^5g6`J^I\\\\5b6dJ^I\\\\5b6cJ`I\\\\5`6cJbI[5`6cJbI\\\\5^6dJcI[5]6dJdI\\\\5\\\\6dJdI\\\\5\\\\6dJeI[5[6dJfI\\\\5Z6dJfI\\\\5Y6dJhI\\\\5Q700000001O00000000000000000000000000000000000000000000000000000000001O000000000000QOfJRJZ5n5fJRJZ5n5fJRJZ5n5fJRJZ5m60000000000001O00000QOeJSJ[5m5eJSJ[5m5eJSJ[5m5eJSJ[5m5eJSJ\\\\5k5eJUJ[5k5eJUJ[5k5eJUJ[5k5eJUJ[5j5fJVJ[5i5eJWJ[5i5eJWJ[5i5dJXJ]5g5cJYJ]5g5cJYJ]5g5cJYJ^5f5aJ[J_5d5bJ\\\\J_5c5aJ]J_5c5aJ]J`5b5`J^J`5b5`J^Ja5a5_J_Ja5a5^J`Jc5_5]JaJc5_5]JaJd5]5]JcJc5]5]JcJc5]5]JcJd5\\\\5[JeJe5[5[JeJe5[5[JeJf5Z5ZJfJf5Z5ZJfJf5Y5[JgJe5Z5ZJfJg5Y5YJgJg5Y5YJgJg5Y5YJgJg5\\\\61O0000001O00000000001O00000000001O00000nNUJiJk5W5UJiJk5Y6000000000000000000000000001O000oNTJhJl5X5TJhJm5W5SJiJm5W5SJiJm5V5TJjJl5V5TJjJl5V5TJjJl5V5TJjJl5V5TJjJm5V5RJjJn5V5RJjJn5V5RJiJo5W5QJiJo5W5QJiJo5W5QJiJo5W5QJiJo5W5QJiJo5W5QJiJP6V5PJjJP6U5QJkJP6T5PJlJP6T5PJlJQ6S5oImJQ6R5PJnJP6R5PJnJQ6Q5oIoJQ6Q5oIoJQ6Q5oIoJR6P5nIPKR6P5nIPKR6P5nIPKS6o4mIQKT6n4lIRKU6m4kISKV6l4jITKW6l4hITKY6k4gIUKZ6j4gIUKY6k4gIUKZ6j4fIVKZ6k4eIUK\\\\6j4dIVK\\\\6j4eIUK[6k4eITK]6l4bITK^6l4bITK_6k4aIUK`6k4_IUKa6k4_IUKb6j4_IUKb6k4]IUKd6j4\\\\IVKe6i4[IWKf6h4[IWKf6i4YIWKi6g4WIYKj6f4VIZKm6d4SI[KP7b4PI^KS7`4mH_KU7_4kHaKV7^4jHbKW7^4iHaKX7^4hHbKY7U5001O1O2N2N2N2N2\\\\O[HcKf7m42_OXHbKi7m40AWHaKj7^4VHbKk7l40BUHaKl7l42ARHaKP8_4PH`KR8^4nGbKS8^4mGaKU8]4kGcKV8]4jGbKW8]4iGcKX8\\\\4hGdKX8\\\\4iGbKY8]4gGcKZ8[4gGeKZ8Z4gGeKZ8Y4kGcKV8\\\\4;O1N2mKUGi3k8QL]Gl3Q9N1N2N2N3M3M5J6E9F;F9I>B8_O`0E<ZOTZ[3\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [391.0, 110.0, 75.0, 169.0], \"score\": 0.9999996423721313, \"association_id\": 2, \"light\": [-1.9612271785736084, -1.8987758159637451, 1.7612760066986084, 1.7580945491790771], \"segmentation\": {\"size\": [422, 656], \"counts\": \"b\\\\Q53R=4M00001O000mMj0VFVOi9k0WFUOi9l0VFTOj9l0VFTOi9m0WFSOh9n0XFROf9P1ZFPOh86gFl0`0nNf8=dFg0f0mN`8X2^GhM[8b2bG^MZ8g2eGYMV8o2gGQMW8S3gGmLX8V3fGjLX8]3bGeL\\\\8_3aGaL]8g3^GXLb8j3\\\\GVLc8m3]GQLb8Q4]GoKa8T4_GkK_8_4O1MKeGeKX8]4iGcKV8]4kGcKR8_4nGbKQ8^4RH`Km7`4TH`Kl7_4UHaKk7_4VH`Kj7`4VH`Kj7a4VH^Kj7d4UHZKl7i472N4L2O1N1101M2O7G3M1O2M3N3K5`M]HSOi7j0]HPOg7l0\\\\2M4N2M2O6K6J7I2N1O1N1O1O1N3L6IUT^2\"}}, {\"image_id\": 147, \"category_id\": 1, \"bbox\": [158.0, 187.0, 1376.0, 762.0], \"score\": 0.9999992251396179, \"association_id\": 1, \"light\": [-1.7527005672454834, -1.594068169593811, 1.544712781906128, 1.4129574298858643], \"segmentation\": {\"size\": [1071, 1600], \"counts\": \"[RV5<cP1g0_O`0C=D;E9I6L4L4L3N3M2N2O1N2N2O1N2N2O1N2N2O2M2N2O2M2O1N2N2N2N2O1N2M3N2N2N2M3N2N1O2N2N1O2O0O100O100O100O10000O100O10001N10001N101N100O2O0O2O0O2O1N101N2M2O1N3M2N3M2O2M2O2M2O2N1O2O0O1O2N1O2N1O2N1N2N3M2N3L3N2N3M2N2O1O2N1O1N2O1N2N3RDkH^MW7_2UIUMn6i2\\\\IlLg6Q3eIbL_6\\\\3fI^L]6_3iI[LZ6c3jIYLX6e3lIVLW6h3mITLU6k3nISHXMoNl8k8oIPH]MPOf8n8PJkGdMQO^8Q9SJfGiMSOV8U9UJ^GRNUOl7Z9WJ^FTOMj6b9WJYFC^O_6W:SJRFe0\\\\Nb5_;QJhE[;U:VEoDX;n:f5M4L3M3N2N2N2O1O1N2O1N2O1N2O1N2N2N2N2M3N2M3N2N3N0O2O1N2O1N2O1N2N2O1N2N2N2O1O1N20000000000000000000000000000000000000O1000000O1000001O0O10000O10001O1O1O1N2O1O1O001O1O0O2O001O1O000O2N1O101N1O1O2N1O1O2N1O1O101N1O10001O0O101O000O2O001O000O2O001O1O1OjDiYOU;Uf0kDlYOV;Vf01O2N2N2O0O2N1O1O1O1O1O100O001O1O001O1O1O1O00100O1O1O2N1O1O2N2N3M4L3M5K4L5K7I7I6J5K5K4L4L3M3M4L4L4L5K4L3M3M3M2N3M3M2N2N2N2N2N1O2N2N2N2N1O2N3M2N2N2N3L3N2N1O2N1O1O1O1O2N1N2O1O1O1O1O1O1N2O1O1O2N2M3N2N2M3N1N3N1O1N3N1O1N2O1O001N2O1O2N1N3N2N2N2N2M2O2N1O1O1N2O1O1O1O1N2O2N1O1N3N2N2M3N3M1N3N1N3N1N2O2N1N3N2N2M4M3M3M4L3L4M3M2N2N3L3ZJfZOSO]e07Z[OEid0Hl[O3^d0ROX\\\\Oi0Td0dNY\\\\OV1[d0nMS\\\\Ol1ge0QLiZOh3fi0I6I7aMeRO>Vo00O1000000O010O1O100O010O1O1O000N2N2M3K5XNg102O0O2O1O010O10O010000O0100000O10O1000O100000O1000O10000000O1000O1000O1000000O10O100000O100000000O1000O100000O100000000O1000000000O010000000000O10000000000000000O10O100000000000000000O1000000000O100000000000000000O100000O100000000000000000000O100000O1000000000O1000000000O1000000000O1000O10000000000O100000000000O0100000000000000000000O100000000000000000000000000000000000000000000000000000000000O10000000000000000000000000O100000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000O100000000000000000000000000O1000000000001O0O10000000000O10000000000O100000000000000O101O0000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000001O00000000O100000000000000000000000O1000000000000000000000O100000000000000000000000000000000000000000000000000000O10000000000000000O1000000000000O100000000O100000000000000O1000000000000000000O10001O000000000000000000001O0000000000000000001O0000000000000000000000001O00000000000O100000000000000000000000001O000O1000000000000O100000000O10000O100000000O100000001O000O10000000000000000O10000000000000000000000O1000000000000O100000001O0O1000000O1000000O1000000000000O1000000000000000000O1000001O0000000000000000000000000000001O00000000000000000000001O0O10000000000000000000000000001O000000000000000O10000000000000000000000O10000000000000001N100000000000000O10000000000000000000000000000000000O100000000000000000000000000000000000001O000000000000000000000000000000000000000O1000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000O101O00000000000000001O0000000000001O0000000O2O0000001O000000001O00000000001N10000000000000001O00000O100000001O000000001N1000001O00001O1O1O1O2M2O1O001O001O001O00001O000O2O0000001O0000001O00000O2O0000001O00001O0O101O001O001O0O2O001O1O1N2O1O1O1O1N2O000O2O001N101O0O101O000O101O0O10001O0O2O1O1N1O2M4M2MfWT2\"}}, {\"image_id\": 148, \"category_id\": 1, \"bbox\": [77.0, 674.0, 482.0, 165.0], \"score\": 0.999992311000824, \"association_id\": 1, \"light\": [-0.9172410368919373, -2.875523805618286, 0.8390883803367615, 2.791348695755005], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"QhR42le14M101O0N2000001O0000000000O1000000000O10000nLi1[^NWNca1n1Z^NRNda1R2Z^NnMea1T2Z^NlMea1V2Z^NjMfa1W2Y^NiMfa1Y2Y^NgMga1Y2Y^NgMfa1[2Y^NeMga1[2Y^NeMga1\\\\2X^NdMga1]2Y^NcMga1^2X^NbMga1`2X^N`Mga1a2Y^N_Mga1c2W^N]Mha1e2W^N[Mia1f2V^NZMia1h2V^NXMja1i2U^NWMja1j2V^NVMja1k2U^NUMka1l2U^NSMja1o2U^NQMka1P3T^NPMka1R3U^NmLka1S3U^NmLja1U3U^NkLka1V3U^NiLka1X3T^NhLka1Y3U^NgLka1Z3T^NfLka1\\\\3T^NdLla1]3T^NbLka1`3T^N`Lla1`3T^N`Lka1b3T^N^Lka1c3U^N]Lka1c3V^N\\\\Lja1d3V^N\\\\Lia1f3V^NZLja1f3W^NYLia1g3Y^NWLfa1j3[^NTLfa1l3Z^NTLea1m370O100O100O10000O1000000O10000O100O100O100O100O10000O1000000O0\\\\^NkK_a1U4`^NmK_a1Z401N10000OGd^NPL\\\\a1o3d^NRL\\\\a1n3d^NRL[a1n3f^NRLZa1n3f^NRLZa1m3f^NTLZa1l3f^NTLZa1k3g^NULXa1k3h^NVLXa1j3h^NULYa1j3g^NWLYa1i3f^NXLYa1h3h^NXLXa1g3i^NYLWa1f3j^NZLUa1f3l^NZLTa1d3n^N\\\\LRa1c3n^N^LQa1b3P_N^LPa1b3P_N^Lo`1b3R_N^Ln`1a3S_N_Lm`1a3S_N_Lm`1`3T_N`Lk`1a3U_N_Lk`1a3U_N_Lk`1b3T_N^Ll`1b3S_N_Lm`1a3S_N_Lm`1a3S_N_Ll`1b3T_N^Ll`1b3T_N^Ll`1b3T_N^Ll`1b3T_N^Ll`1b3T_N^Ll`1b3T_N^Ll`1b3T_N^Ll`1b3T_N^Lk`1d3T_N\\\\Ll`1d3T_N\\\\Ll`1d3T_N\\\\Ll`1d3T_N\\\\Ll`1d3T_N\\\\Lk`1f3T_NZLl`1f3T_NZLl`1f3T_NZLk`1h3T_NXLl`1h3T_NXLl`1h3T_NXLk`1i3U_NWLk`1j3T_NULm`1k3S_NULm`1k3S_NULl`1m3S_NSLm`1m3S_NSLm`1m3S_NSLm`1m3S_NSLm`1n3R_NRLn`1n3R_NRLn`1n3R_NRLn`1n3R_NRLn`1n3R_NRLn`1n3R_NRLm`1o3S_NQLm`1P4R_NPLn`1P4R_NPLn`1P4R_NPLn`1P4R_NPLn`1P4R_NPLn`1P4R_NPLn`1P4R_NPLn`1P4R_NPLn`1P4R_NoKo`1Q4Q_NoKo`1Q4Q_NoKo`1Q4Q_NoKn`1R4R_NnKn`1Q4S_NoKm`1Q4S_NoKm`1Q4S_NoKm`1P4T_NPLl`1P4T_NPLl`1P4T_NPLl`1P4T_NPLl`1o3U_NQLk`1o3U_NQLk`1o3U_NQLk`1o3U_NQLj`1o3W_NQLi`1o3W_NPLj`1P4V_NPLj`1P4V_NPLj`1P4V_NPLj`1P4V_NPLj`1o3W_NQLi`1o3W_NQLi`1o3W_NQLi`1o3V_NRLj`1n3V_NRLj`1n3V_NRLj`1m3W_NSLi`1m3W_NSLi`1m3W_NSLi`1m3W_NSLi`1m3W_NSLi`1m3W_NSLi`1l3X_NTLh`1l3X_NTLh`1l3X_NTLh`1l3Y_NSLh`1l3X_NTLh`1l3X_NTLh`1l3X_NTLh`1l3X_NTLh`1k3Y_NULg`1k3Y_NULg`1k3Y_NULg`1k3Y_NULg`1k3Y_NULg`1k3X_NULi`1k3W_NULi`1k3W_NULi`1k3W_NULj`1j3V_NVLj`1j3V_NVLj`1j3V_NVLj`1j3V_NVLj`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NWLi`1i3W_NVLj`1j3V_NVLj`1j3V_NVLj`1j3V_NVLk`1i3U_NWLk`1i3U_NWLk`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLj`1h3V_NXLk`1g3U_NYLk`1g3U_NYLk`1g3U_NYLk`1g3U_NYLk`1f3V_NZLj`1f3V_NZLk`1e3U_N[Lk`1e3U_N[Lk`1e3U_N[Lk`1e3U_N[Lk`1e3U_N[Ll`1d3T_N\\\\Ll`1d3T_N\\\\Ll`1d3T_N\\\\Ll`1d3T_N\\\\Lm`1c3S_N]Lm`1c3S_N]Lm`1c3S_N]Lm`1d3R_N\\\\Ln`1d3S_N[Lm`1e3S_N[Ln`1d3R_N\\\\Ln`1e3Q_N[Lo`1e3Q_N[Lo`1e3Q_N[Lo`1e3Q_N[Lo`1f3P_NZLQa1e3o^N[LQa1e3o^N[LQa1f3n^NZLRa1f3n^NZLRa1f3n^NZLRa1g3m^NYLTa1f3l^NZLTa1g3k^NYLUa1g3k^NYLUa1h3j^NXLWa1g3i^NYLWa1h3h^NXLXa1i3g^NWLZa1i3e^NWL[a1i3f^NVLZa1k3e^NUL\\\\a1j3d^NVL\\\\a1k3c^NUL]a1k3c^NTL^a1m3a^NSL`a1l3a^NSL_a1m3a^NSL_a1n3a^NQL_a1o3a^NQL`a1o3_^NQLaa1o3`^NPL`a1Q4`^NnK`a1R4`^NnKaa1R4^^NnKba1R4_^NmKaa1T4^^NjKea1U4[^NkKea1U4310O0000001O01O000001O000000001O0001O0001O00001O1O001O1O1O1O1O001O00001O0Ol]NWLSb1i3m]NXLRb1k300001O000000010O00000O2O00001O00001O001O00001O0Eg]NoLYb1Q3g]NoLZb1o2g]NQMYb1o2f]NRMZb1n2f]NRM[b1l2f]NTMZb1l2f]NTMZb1l2f]NTM[b1k2e]NUM[b1j2f]NVMZb1j2f]NVM[b1i2e]NWM[b1i2e]NWM[b1i2e]NWM[b1i2e]NWM\\\\b1h2d]NXM\\\\b1h2c]NYM]b1g2c]NYM]b1g2c]NYM^b1f2b]NZM^b1f2b]NZM^b1f2b]NZM^b1f2b]NZM_b1e2a]N[M_b1e2a]N[M_b1e2a]NZMab1e2_]N[Mab1e2_]N[Mbb1c2_]N]Mab1c2_]N]Mbb1b2^]N^Mbb1b2^]N^Mbb1b2^]N^Mcb1a2]]N_Mcb1a2]]N_Mcb1a2]]N_Mdb1`2\\\\]N`Mdb1`2\\\\]N`Mdb1`2\\\\]N`Meb1_2[]NaMeb1_2[]NaMeb1_2[]NaMfb1^2Z]NbMfb1^2Z]NbMfb1^2Z]NaMgb1_2Y]NaMhb1^2X]NbMhb1^2X]NbMhb1^2X]NbMhb1^2Y]NaMhb1^2X]NbMhb1^2X]NbMhb1^2X]NbMhb1^2X]NbMhb1_2W]NaMib1_2W]N`Mkb1_2U]NaMkb1^2V]NbMjb1^2V]NbMjb1^2V]NbMkb1]2U]NcMkb1]2U]NcMlb1\\\\2T]NdMlb1\\\\2U]NbMmb1\\\\2T]NdMlb1\\\\2T]NdMmb1[2S]NeMmb1[2T]NdMlb1\\\\2U]NcMlb1\\\\2T]NcMmb1]2S]NcMmb1]2;O2O00001O0O101O1O0O2O0O2O001N10001O0O101N2O1N2N3Jhnok2\"}}, {\"image_id\": 148, \"category_id\": 1, \"bbox\": [647.0, 396.0, 695.0, 851.0], \"score\": 0.999998927116394, \"association_id\": 3, \"light\": [-2.0654690265655518, -2.033362627029419, 2.030055046081543, 1.8442049026489258], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"noTR12ie17L3N3N000O101O0000000O2O00000O2O001O0O2O00001O0O10001O000000001N1000001O0O2O00001N10001O00000O1000001O00000O101O00001O000O101O00000000001N10000000000O2O00000O100000001N100000000000001O000000000O10001O000000000O100000001O000000000000000000000O100000000000000000000000001O0000000000000000000000000000001O0000000O100000000000001O0000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O0100000000000000000000O10000000000000000000000000000O10000000000000000000000O100000000000000O100000000000000O10000000001O00000000000000001O0000000O2O0000001O1O3M2dN^[NU1id1N2N001O0000000O2O000000001O000000001O0O100000001O0000000000000000000O101O000000000000000000001O00000000001N100000001O000O2O00001O0O101O00001N101O0O2O1K9Jn[[2S1dbdMh0A6J6K4L3M3M3L4K6J5J6hkNjLae0Z3\\\\ZOjL_e0[3]ZOiL_e0Z3^ZOjLUCAoP1h3i[OiLRCDSQ1g3h[OfLPCIVQ1d3g[OdLRCIUQ1g3f[ObLSCITQ1j3e[O^LUCJSQ1l3f[O\\\\LVCHQQ1Q4f[OXLXCImP1T4h[OUL[CGjP1Y4h[ORL]CEiP1\\\\4g[OQL`CCgP1^4g[OQLbCAgP1_4c[OSLfC^OfP1`4_[OXLiCYOgP1`4\\\\[O[LmCUOfP1a4X[O_LRDPOeP1c4S[ObLXDkNeP1c4dROiK\\\\3o0ZIeNeP1d4aROPLW3l0cI`NeP1e4]ROUL\\\\1dNbLX2POZNdP1f4[ROZLW1hNaLR2YOVNdP1g4XRO]LT1lN^Lo1BQNcP1i4VRO_LP1SOZLi1MlMbP1l4QROcLm0YOVLb1:fMaP1o4lQOhLg0a2lLhK`P1S5`QOTMc0\\\\2]M]K_P1X5jPOnMc0h1TNRK^P1a5XPO[Na0\\\\1iNhJ\\\\P1k5joNbNb0Q1XObJ[P1R6`oNfNb0k0C]JYP1Y6XoNiNd0d0KZJWP1[<foN^I3WJTP1c<`oNZI<RJSP1_c0loNb\\\\ORP1`c0noN`\\\\OQP1ac0ooN_\\\\Ooo0cc0QPO]\\\\Ono0dc0RPO\\\\\\\\Omo0ec0SPO[\\\\Olo0fc0TPOZ\\\\Oko0gc0UPOY\\\\Ojo0hc0VPOX\\\\Ojo0hc0VPOX\\\\Ojo0gc0WPOY\\\\Oho0hc0XPOX\\\\Oho0hc0XPOX\\\\Ogo0hc0ZPOX\\\\Ofo0hc0ZPOX\\\\Ofo0gc0[POY\\\\Odo0hc0\\\\POX\\\\Odo0gc0]POY\\\\Obo0gc0_POY\\\\O_o0hc0bPOX\\\\O\\\\o0gc0gPOY\\\\OVo0la0ooN[_Om0iNQo0[a0_SOe^O]l0Sa0oSOm^Onk0Qa0WTOo^Ogk0o`0]TOQ_Oak0n`0bTOR_O\\\\k0m`0gTOS_OXk0l`0jTOS_OUk0m`0mTOS_ORk0j`0RUOV_Olj0e`0[UO[_Ocj0\\\\`0hUOd_OUj0Q`0YVOo_Odi0i?gVOW@Ti0f?TWOZ@fh0e?aWO[@Zh0d?lWO\\\\@Ph0d?TXO\\\\@gg0c?_XO]@\\\\g0a?kXO_@lf0b?\\\\YO^@Xf0f?PZOZ@fe0g?cZOY@Ye0b?P[O^@ld0b?X[O^@fd0a?\\\\[O`@bd0`?`[O`@^d0`?d[O_@\\\\d0a?e[O_@Yd0c?g[O]@Xd0c?i[O]@Vd0c?k[O]@Sd0d?n[O\\\\@Pd0e?Q\\\\O[@mc0f?T\\\\OZ@jc0g?W\\\\OY@gc0h?Z\\\\OX@dc0j?\\\\\\\\OV@cc0j?_\\\\OU@`c0k?a\\\\OT@_c0l?b\\\\OS@^c0m?d\\\\OQ@\\\\c0o?f\\\\Oo_O[c0P`0j\\\\Og_OPGNVl0[`0Z901N1020O2N1O1O100O001O10O01N2O001O001O001O001O0100O100O100O2O0O2OO2O1O1N4M2M3N1N2O1N101N2O2M3N4K<E8G8I5J4M4K6K6I5L4Ke_ObnNVOCR:hQ1bFdoNYNZOk:oP1iFXPOgMTO`;aP1eFiRO\\\\9Tm0bFPSO^9ol0^FWSOb9gl0[F]SOf9al0VFeSOj9Yl0QFoSOn9ok0lE[TOQ:ek0iEdTOT:\\\\k0fElTOX:Tk0cERUO\\\\:nj0`EWUO_:lj0ZEXUOf:cKZD[l0i0VXOm:XKaDel0;XXOT;iJjDTm0HYXO`;VJQEeU1T;giNYE]V1l:fhNgE^W1\\\\<3M2N2N1O2M3M4L4L4L4bF^hNV4eW1bKmhNQ4VW1hKViNP4mV1jKYiNS4jV1hK[iNU4hV1fK\\\\iNX4gV1bK_iN[4dV1_KbiN^4aV1[KhiN`4[V1WKRjNb4QV1VKcjN[4aU1]KPkNV4SU1dKUkNW4oT1bKWkN[4PU1\\\\KUkNa4RU1UKSkNi4UU1mJPkNP5T[1L5Kd0\\\\O]1cN6J4L4L2N3M2N4L3M3M3M2N1O2N1O1O1O1N2O1O1O2M3N2N3M2M2O1O2M2O1O1N2O2M2O2M6H8H]Wfb1\"}}, {\"image_id\": 148, \"category_id\": 1, \"bbox\": [1529.0, 628.0, 324.0, 98.0], \"score\": 0.9998916387557983, \"association_id\": 2, \"light\": [-1.6423676013946533, -1.8313283920288086, 1.5553628206253052, 1.7195814847946167], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"bld`22ne11N100000000O10000O100000001O000fN9[\\\\NFac1m0Q\\\\NSOmc1P1R\\\\NPOlc1S1T\\\\NlNkc1V1T\\\\NjNkc1Y1S\\\\NgNlc1\\\\1R\\\\NdNnc1^1P\\\\NbNPd1_1o[NaNQd1`1o[N_NQd1b1n[N^NRd1b1n[N^NRd1b1o[N]NQd1d1o[N[NQd1f1n[NZNRd1f1o[NYNQd1h1301O000O2O00000000001O0001O000000000000000O100000000000000O1000000000000000000000O100000N2O001N2O1O01000O01000P\\\\NTNlc1k1U\\\\NUNjc1l1V\\\\NTNjc1k1V\\\\NVNjc1i1W\\\\NWNhc1j1X\\\\NVNhc1i1Y\\\\NWNfc1i1Z\\\\NXNfc1g1[\\\\NYNdc1g1]\\\\NYNcc1g1]\\\\NYNcc1f1^\\\\NZNac1g1_\\\\NYNac1f1`\\\\NZN`c1f1`\\\\NZN_c1g1a\\\\NYN_c1f1b\\\\NYN_c1g1a\\\\NYN^c1g1c\\\\NYN]c1f1e\\\\NYN[c1g1e\\\\NYNZc1g1g\\\\NYNYc1g1`000000O100X\\\\NXNXc1h1h\\\\NXNXc1h1h\\\\NXNWc1j1h\\\\NVNXc1j1h\\\\NVNWc1k1i\\\\NUNWc1k1i\\\\NUNVc1l1j\\\\NTNVc1m1i\\\\NSNVc1n1j\\\\NRNVc1o1<2O001N100O2O000O100O100O1O100O101N100O010O100O1O10000O1000000000000O10O10000000000000000000O10000000000000O10000000001O0O100000000000000000000000000000f\\\\NbMRc1^2n\\\\NcMQc1]2o\\\\NcMQc1]2o\\\\NcMQc1^2n\\\\NbMRc1^2n\\\\NbMRc1^2n\\\\NbMRc1^2n\\\\NbMRc1^2n\\\\NbMSc1^2l\\\\NbMTc1^2l\\\\NcMSc1]2m\\\\NcMSc1^2m\\\\NaMSc1_2m\\\\NaMSc1`2l\\\\N`MTc1`2l\\\\N`MTc1`2l\\\\N`MTc1`2l\\\\N`MTc1a2l\\\\N^MTc1b2l\\\\N^MTc1b2l\\\\N^MTc1c2400001O01O000001O0001O00001O00010O0000000000001O00000000001O00000O0100000000000000000O2O000O2O0O10000O2O000O100O101M2O1N200O2N100O100O001O1N2O1O100O100O100O2O000O100O1O101N100O1O2O0O2O0O2N100O2N1O2IS[NXOod1d0<Hi]hg0\"}}, {\"image_id\": 149, \"category_id\": 1, \"bbox\": [23.0, 233.0, 1117.0, 659.0], \"score\": 0.9995971918106079, \"association_id\": 1, \"light\": [-2.3590469360351562, -1.6994402408599854, 2.2765090465545654, 1.594006061553955], \"segmentation\": {\"size\": [1073, 1599], \"counts\": \"okh0:RQ1:J3M2N2O0O2O0O101O0000001O0000001O0000001O0O100000001O0000000000000000000001O0001O0000000000000000000000000000001O00000000001O0000001O000001O000000O2O0000000000000O100000001O00000000000O10001O000000001O0000001O000000000O101O000000000000001O000000001O00001O0000001O00000000001O01O000001O00001O1O1O1O2N001O1O001O001O01O0001O00001O01O01O1O00100O2N010O001O0`JPN_[OP2Zd0WNf[Oi1Sd0_Nm[O`1nc0fNQ\\\\O[1mc0gNR\\\\OY1mc0iNR\\\\OW1mc0kNR\\\\OU1nc0lNQ\\\\OT1nc0nNQ\\\\OS1nc0mNR\\\\OS1mc0oNR\\\\OQ1mc0QOR\\\\OP1mc0QOR\\\\Oo0nc0QOR\\\\Oo0mc0SOR\\\\On0mc0SOR\\\\On0lc0SOT\\\\On0kc0SOT\\\\Om0lc0TOT\\\\Ol0jc0VOU\\\\Oj0kc0VOU\\\\Ok0jc0VOU\\\\Oj0jc0XOU\\\\Oi0jc0XOU\\\\Oh0jc0ZOU\\\\Of0kc0[OT\\\\Of0kc0ZOU\\\\Of0jc0\\\\OU\\\\Od0kc0]OT\\\\Od0kc0]OT\\\\Oc0kc0@S\\\\O`0mc0AR\\\\O`0mc0AR\\\\O`0lc0BS\\\\O?lc0CR\\\\O?lc0BS\\\\O`0jc0CT\\\\O?jc0BU\\\\O`0ic0BV\\\\O=jc0ET\\\\O<jc0FU\\\\O:kc0HS\\\\O8mc0JQ\\\\O6nc0MP\\\\O4oc0On[O1Rd01l[OOTd03j[OMUd06i[OJWd08g[OHYd0:f[OFXd0=f[OCZd0>e[OBZd0?f[OAYd0a0f[O@Yd0a0g[O^OXd0d0g[O]OWd0e0h[O\\\\OVd0f0i[OZOWd0f0i[O[OUd0g0j[OYOVd0h0i[OYOUd0i0k[OVOUd0j0k[OWOSd0k0l[OUOTd0k0l[OUOSd0m0l[OTOSd0m0l[OSOSd0o0l[OQOTd0o0l[OQOTd0P1k[OPOTd0R1l[OmNTd0T1k[OmNTd0T1k[OlNTd0V1k[OjNUd0W1j[OiNVd0X1j[OgNUd0[1j[OfNUd0[1j[OeNVd0\\\\1j[OcNVd0^1i[ObNVd0a1h[O`NWd0a1h[O_NXd0b1g[O^NYd0d1f[O\\\\NXd0f1g[OZNYd0g1f[OYNZd0g1f[OYNZd0h1e[OYNZd0g1f[OYNYd0h1g[OXNYd0h1g[OXNYd0h1h[OWNWd0j1i[OVNWd0j1i[OVNWd0j1i[OVNVd0l1i[OTNWd0l1j[OSNUd0n1k[ORNUd0n1k[ORNUd0n1k[OSNSd0n1n[OQNRd0o1n[OQNQd0P2o[OPNQd0P2P\\\\OoMoc0R2Q\\\\OnMnc0S2R\\\\OmMnc0S2R\\\\OmMmc0T2S\\\\OlMlc0U2T\\\\OkMkc0V2V\\\\OiMic0Y2V\\\\OgMic0Z2W\\\\OfMic0Z2W\\\\OfMhc0[2X\\\\OeMgc0\\\\2Y\\\\OdMfc0]2Z\\\\OcMfc0]2Z\\\\OcMec0^2\\\\\\\\OaMcc0`2]\\\\O`Mcc0`2]\\\\O`Mbc0a2^\\\\O_Mbc0a2^\\\\O_Mbc0a2_\\\\O^M`c0c2`\\\\O]M`c0c2`\\\\O]M_c0d2a\\\\O\\\\M_c0d2b\\\\O[M^c0e2b\\\\O\\\\M]c0c2d\\\\O]M[c0d2e\\\\O\\\\M[c0d2e\\\\O\\\\M[c0c2f\\\\O]MZc0c2g\\\\O\\\\MXc0e2h\\\\O[MXc0d2i\\\\O[MXc0e2h\\\\O[MXc0d2i\\\\O\\\\MVc0e2k\\\\OZMUc0e2l\\\\O[MTc0d2m\\\\O\\\\MSc0c2n\\\\O]MQc0d2P]O[MPc0d2Q]O\\\\Mob0d2Q]O\\\\Mnb0d2T]O[Mlb0d2U]O\\\\Mkb0d2U]O\\\\Mjb0e2W]OZMib0e2X]O[Mhb0e2X]O[Mgb0e2Z]O[Mfb0e2Z]O[Mfb0e2Z]OZMfb0f2\\\\]OYMdb0g2\\\\]OYMdb0g2\\\\]OYMcb0g2^]OYMbb0g2^]OYMab0h2_]OXMab0h2_]OXMab0g2`]OYM_b0h2a]OXM_b0h2a]OXM^b0i2b]OWM^b0i2c]OVM\\\\b0j2e]OVM[b0j2e]OVMZb0k2f]OUMYb0l2g]OSMYb0n2g]ORMXb0o2h]OQMWb0P3i]OPMVb0Q3j]OoLUb0R3k]OnLTb0S3m]OlLQb0V3o]OiLQb0W3P^OiLoa0X3Q^OhLma0Z3T^OdLka0^3U^ObLja0_3V^O`Lia0b3X^O]Lfa0e3Z^O[Lda0g3\\\\^OXLca0j3^^OTLba0m3^^OSLaa0n3_^OQLaa0o3a^OPL^a0Q4b^OoK]a0R4c^OmK^a0S4c^OlK\\\\a0U4d^OkK[a0V4e^OiK\\\\a0W4d^OiK[a0X4f^OgKZa0Y4f^OfKZa0[4f^OeKZa0[4f^OeKYa0\\\\4h^OcKXa0]4h^ObKXa0^4i^ObKWa0^4j^O`KWa0`4i^O`KVa0a4j^O^KWa0b4j^O]KUa0d4l^OZKUa0f4l^OYKSa0h4o^OUKRa0k4o^OTKPa0l4R_ORKo`0n4Q_ORKn`0o4R_OQKm`0P5S_OoJm`0R5T_OmJl`0S5T_OmJk`0T5U_OlJj`0U5V_OkJi`0U5X_OjJg`0X5Y_OhJb`0]5__ObJ\\\\`0c5d_O]JW`0h5i_OXJS`0l5m_OTJo?P6R@nIn?S6R@mIm?T6S@lIm?T6T@kIk?U6V@kIi?V6X@hIh?Y6Y@fIg?Z6Y@fIf?[6[@dIe?\\\\6[@dId?]6]@aId?_6\\\\@aIc?`6^@_Ib?a6^@_Ia?b6_@^Ia?a6`@_I`?a6a@^I^?c6b@\\\\I_?d6a@\\\\I_?d6a@\\\\I^?e6c@ZI]?f6c@ZI\\\\?g6e@XI[?h6e@XIZ?i6g@VIY?j6j@SIU?n6m@PIR?Q7PAmHP?S7QAlHn>U7SAiHm>X7TAgHk>Z7UAfHj>[7VAeHi>\\\\7WAdHh>]7YAbHf>_7ZAaHe>`7[A`Hd>a7\\\\A_Hc>b7]A^Hb>c7_A\\\\H`>e7`A[H`>e7`AZH`>g7aAXH^>i7bAWH^>i7bAWH]>j7dAUH\\\\>k7dAUH[>l7fASHZ>m7fASHY>n7hAQHX>o7hAQHX>o7hAQHW>P8jAoGV>Q8jAnGW>R8iAnGW>S8hAmGW>T8iAlGW>T8jAkGV>V8iAjGW>V8iAjGW>V8iAiGX>W8hAiGX>X8gAhGX>Y8iAfGW>Z8iAfGW>[8iAdGW>\\\\8iAdGW>\\\\8iAcGX>]8iAbGW>_8hAaGW>`8iA`GW>`8jA_GV>a8jA^GW>c8hA]GX>c8hA]GX>c8hA]GW>d8jA[GV>f8Z50000001N1000001O0O10001O0O10001N101N1][ORGc`0o8[_ORGd`0P9[_OPGe`0P9U_OmF`L4Zd0Q9Z_OoFe`0S9[_OlFe`0T9\\\\_OkFc`0W9]_OgFc`0[9i31N5L4K5L4K5L6I:G9F9H8H8G8I7I6I6K4L4L3Y\\\\OVC`b0n<W]O[Cfb0h<R]O_Clb0c<n\\\\OdCob0^<j\\\\OiCSc0]=N2N1O1O1N2O1O1O1N2O1O000O101O000O10001N10000O101O0O100O101O0O100O100O2N100O1O100O100O1O00100O1O1O001O1O1O1O1O001N2O1N101N2N2M3N1O2N2N2N100O100O10000000jNm^OoATa0o=Q_OlAPa0S>U_OhAl`0W>Y_OdAh`0[>^_O_Ad`0_>b_OZA``0e>f_OUA[`0i>W1RO]]OmBeb0Q=j]OaBWb0\\\\=P1N1N3M2N3^Oa0K5N2O1N3M2N2N3K4M4L4L400O0100001O000000001O00001O1O001O001O1O001N2N2N1O2N2N1O2M3N2M2N3O1O1O1O001O1O1O010O1O100O10O0100O100O10001N10001N100O2O0O101N101N101N1O2O0O2O0O2N2O0O2O1O0O2O0O2O1O0O101O0O2O00001O001O001O001O001O001O1O001O1O1O1O1O1O1O1N2O1O1O1O1O001O1O001O1O001O00001O010O00010O00010O010O010O10O010O010000000O1000O1000000O01000O0100O010O0010O01O001O010O1O001O1O001O1O1O1O100O2N2N1O2N1O2N1O1O2N1O2N1O1dXOhF^f0Y9UYOXGef0o9K5Kj0VO1O2N1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O001O001O001O1O001O001O001O1O001O001O001O1N101O001O1N101O1N2O001N2O1N2O1N2O1NYLW\\\\OiIhc0U6\\\\\\\\OjIbc0T6d\\\\OjIZc0U6j\\\\OjITc0T6Q]OjIob0R6W]OmIgb0Q6^]OnI`b0o5f]OPJXb0P6j]OPJTb0P6o]OoIoa0R6S^OnIka0Q6W^OoIga0Q6\\\\^OnIca0R6^^OnI`a0R6b^OnI\\\\a0R6f^OnIYa0R6h^OnIVa0R6l^OnISa0R6n^OnIQa0Q6Q_OoIm`0R6S_OoIl`0P6V_OPJh`0Q6Y_OnIg`0R6Z_OmIf`0R6\\\\_OmId`0S6]_OlIb`0T6`_OjIa`0V6`_OiI``0V6c_OhI\\\\`0X6f_OgIZ`0Y6g_OeIZ`0Z6i_OdIV`0\\\\6l_ObIU`0^6l_OaIT`0^6n_O`IR`0`6P@^IQ`0a6Q@^Io?a6S@]Im?c6U@\\\\Ik?c6W@[Ii?e6Y@ZIg?d6\\\\@[Ic?e6^@ZIc?e6_@ZI`?g6a@WI`?h6b@WI]?i6d@VI\\\\?j6f@UIZ?k6g@TIX?l6i@SIW?m6k@RIU?n6l@PIT?P7m@PIR?Q7o@nHP?R7RAlHn>U7RAkHn>U7SAjHl>V7UAiHk>X7VAgHj>Y7VAgHi>Y7YAeHg>\\\\7ZAcHf>]7ZAbHf>^7\\\\A`He>`7bAXH^>h7kAoGU>R8SBeGn=[8WB_Gi=a8]BYGc=h8bBRG^=n8hBlFX=U9a5000O2O000O1O2E:G9M3O1N2O100O2N101N10001N2O0010O0100O010O10O10O10O1000O0100O01000O1O10O0100O1O10O01O1O1O1O010O1O100O1O10O0100O10000O100O100O2O0O100O1O1O101N1O1O1O1O1O2M2O1N2N2O2N1O1O1O1O2N1O1O101N1O101N1O101N101N101N1O2O1N2M3N2M3M3M3M3M3L5K4L4L4K5L5K4L4L5L3L4N2M3N2M3N2N2N2N1O2N2N1O2O1N1O2N2N2N2N2M3N2K5J7I6J:D=@`0@d0hNP2aNZdP?\"}}, {\"image_id\": 150, \"category_id\": 1, \"bbox\": [14.0, 119.0, 774.0, 460.0], \"score\": 0.9999993443489075, \"association_id\": 1, \"light\": [-1.6378623247146606, -2.0875508785247803, 1.47434663772583, 1.928685188293457], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"nn:^1`f05L2N2N1O2O0O10001N10000O100000000O1000000O10000O1000000O101O0000000O10000000000O100000000O10001N100O10000O10001N100000001O0O1000000000000O10001O0O10000O1O100O100O10000O10000O1000000O10000O10000O100O100O1O2O0O1O1000000O1000000O1000000000000O10000000000O100000000O10000O1000000O100000000000000O1000000000000000000000000O101O000000000000000000000O100000001O00000O10001O000O10001O00000O2O00000000001O0O100000001O000000001O0O101O0000001O00001O0O10O100000O10O1000O100O0100000O10O1000O10000000O1000O1000O10000O1000O01000000O010000000O010000000000O0100000001O0000000000000000000O100000000000000000001O00000000001O0000000000001O00000001O000001O0000000000000]Mf[O`1[d0^Nf[Ob1Zd0]Ng[Oc1Yd0]Nh[Ob1Xd0]Ni[Oc1Xd0[Nj[Od1Vd0[Nk[Of1Td0YNn[Of1Sd0XNo[Og1Qd0XNQ\\\\Og1Pd0VNS\\\\Oi1oc0TNU\\\\Oi1lc0SNZ\\\\Oj1gc0QN]\\\\Oo1kd0O010O00001O000001O01O0000010O0000010O001O10O01O001O010O001O01O01O0001O01O000001O01O000001O0001O0000010O0000001O00010O00000000001O01OXOQZO_Ooe0<WZOCie0<XZODhe0<YZOCge0<ZZODge0;ZZODfe0<[ZODde0;]ZOEce0;^ZODbe0;`ZOD`e0<aZOD^e0;cZOE]e0;dZOD]e0;cZOE]e0:eZOFZe0:fZOFZe0:fZOFZe09hZOFXe0:hZOFXe0:hZOFXe0:iZOFWe08jZOHVe08jZOHVe08jZOHVe08kZOGUe09kZOGUe09kZOGVe08kZOGUe09kZOGUe09lZOFUe09kZOGUe09lZOFTe0:lZOF``01jB9fLF_`07gB2kLF]`0:gB0lLF\\\\`0;hBNmLGZ`0=hBLnLGZ`0=hBLoLFX`0`0hBJPMFX`0`0iBIoLGW`0b0iBGPMGW`0b0iBGQMFU`0e0iBERMFU`0e0iBERMGT`0d0jBERMGS`0e0kBDRMGS`0f0jBCSMGS`0f0jBCTMFR`0g0kBBSMGQ`0h0lBASMGQ`0h0lBATMFP`0i0lBATMFo?j0mB@UMFm?k0mB_OVMFm?k0mB_OVMFl?l0oB]OUMHk?k0PC]OVMGj?l0PC]OVMHh?l0RC\\\\OVMIf?k0UC\\\\OUMIe?l0WC[OSMIe?m0XCZOTMIb?n0ZCYOTMIa?n0]CXORMJ`?o0^CWORMK^?o0`CVORMK]?P1aCUORMK]?o0cCUOPMM[?o0eCTOPMMZ?P1fCSOQMLY?P1gCTOPMMX?o0iCSOoLNW?P1jCROoLNW?P1jCROoLNW?P1jCROoLOU?P1lCROnLNV?o0mCSOmLNV?o0mCSOmLNU?P1nCROmLOT?o0oCROmLOT?o0oCROmLOS?P1PDQOmL0R?n0RDROlL0R?n0SDQOkL1Q?P1SDoNlL2o>P1UDnNlL2n>R1UDlNmL3k>S1XDjNmL3i>V1YDgNnL3e>Z1]DcNnL4`>^1aD^NoL4]>a1dD[NoL5X>d1iDWNoL5V>g1jDTNPM6S>h1mDRNPM6R>i1nDQNPM6Q>k1oDnMPM7P>l1PEmMPM8n=m1QEkMQM8m=o1QEiMRM8l=P2REhMRM8k=S2QEfMSM7j=V2REcMTM8g=X2TE`MUM8e=\\\\2TE\\\\MXM7^;_O]ER3l1WMYM8];B[ER3m1TM[M8\\\\;DYES3o1QM\\\\M9Z;FXER3Q2oL]M9Y;HWER3R2mL^M9X;IXER3R2kL^M:W;JXES3R2iL`M:T;KZET3P2hLaM9S;N[ER3Q2fLaM:R;O\\\\ES3o1dLcM;o:0^ET3o1`LdM<l:3aET3l1]LgM<j:5cET3j1[LjM<f:7eEU3j1WLkM=e:8fEV3h1ULmM=d:9gEV3g1TLnM=c::hEV3g1RLnM>b:;iEV3f1QLPN>`:<hEW3g1oKQN>_:=hEX3h1mKPN>_:>iEX3g1lKQN>^:?iEZ3f1iKTN=[:b0iE\\\\3g1cKVN?Y:c0iE`3d1^KZN?W:f0hEd3c1VK_N?T:i0gEk3a1lJdN`0R:k0gEo3_1fJiN?P:m0fES4_1`JkN`0o9n0eEV4_1\\\\JnN?m9Q1dEW4`1XJoN`0l9R1cEY4a1TJRO`0i9T1cE[4a1PJSOa0h9U1cE\\\\4a1nIUO`0g9W1aE^4P3[J]7Z1^Ea4S3UJ^7\\\\1YEg4W3lI_7_1TEo4X3bIa7c1SES58]IT2L_8U:UGkEi8_:oFaEo8g:YFZERONd:m:PF]EZOFe:Q;kE]E@Ad:V;fE]EF\\\\Oc:Z;bEjE]:Y:_E]EOUOa:g<_EYC`:h<aEWC^:j<bEUC]:n<bERC\\\\:P=eEoBZ:R=fEnBX:T=hElBW:U=jEjBU:W=kEiBU:W=kEiBT:X=9100O100O001O1O001NbLSFlHl9P7_FkH`9R7hFiHX9V7nF^GPOFQ:l8SGWGROJk9o8VGPGUOOf9P9YGhFYO5_9R9]G]F_O:X9W9lH`FY7`9kHWF[7g9`3N2M3N101N2O1OcDfFV8Z9iGkFS8U9lGlFT8T9kGnFT8Q9mGoFS8Q9lGPGT8P9lGQGS8o8lGRGT8n8lGSGS8m8lGTGS8m8mGSGS8m8mGTGR8l8nGTGR8l8nGTGR8k8oGVGP8j8oGWGQ8i8oGWGQ8i8oGWGQ8i8oGWGQ8i8oGXGP8h8PHXGP8h8PHXGP8i8oGWGQ8i8oGWGQ8i8oGWGQ8i8oGXGP8h8PHXGP8h8PHXGP8h8PHXGP8h8PHXGP8h8PHXGQ8g8oGYGQ8g8oGYGQ8g8oGZGP8f8PHZGP8g8oGYGQ8g8nGZGR8f8nGZGR8f8nGZGR8f8nGZGR8f8nGZGR8g8mGYGS8h8lGYGS8g8mGYGS8i8lGVGU8j8jGVGV8l8hGTGX8R9bGnF_8X9ZGiFe8\\\\9VGdFj8^9TGbFm8^9RGbFn8`9PG`FQ9`9nFaFQ9`9nF`FS9_9nF`FS9`9lFaFU9^9jFbFW9^9hFbFY9^9fFcFZ9^9dFbF]9_9aFaF`9a9]F`Fc9b9[F]Fe9e9YF[Fh9f9^EdENf0e:g9VEPFM:l:h9QEXFM0R;g9SE[FHNU;f9TEZGm:c8VE\\\\Gj:a8YE_Gg:]8]EdGb:P8kEoGU:X7dFhH]9U7eFkH[9S7gFmHY9R7hFoHW9P7kFoHV9o6kFQIU9n6lFRIU9l6mFTIS9j6oFUIR9i6QGVIo8h6TGWIm8e6WGYIl8b6XG]Ii8_6ZGaIf8\\\\6^GcIc8Z6_GfIa8X6aGgI`8W6aGjI_8T6cGkI^8T6cGlI]8R6dGoI]8o5dGPJ]8n5eGQJ]8l5fGSJ[8j5gGUJ[8g5jGWJX8d5lG[JU8a5oG^JR8^5RH`JQ8\\\\5QHdJP8Y5RHgJn7W5UHhJk7V5WHiJj7V5VHkJj7S5XHlJj7Q5YHmJh7R5YHmJi7P5ZHnJh7o4\\\\HoJf7m4]HQKf7j4^HTKd7i4^HVKd7f4_HYKb7d4bHZK_7d4cH[K^7c4dH\\\\K]7b4dH_K\\\\7_4fH_K\\\\7_4fH`K[7^4hH`KY7]4kH`KX7\\\\4lHbKV7Z4nHcKU7X4PIfKQ7W4SIfKo6W4UIgKl6W4`6N2N1O2N2O1N2N2M4M3L4L4M2N2N2N2N1000001O000000001N100000001O1N2O1N3M3M4L3L4L3M4M2M3N3M2N3M4L4L5J;CZg`5\"}}, {\"image_id\": 151, \"category_id\": 1, \"bbox\": [156.0, 228.0, 393.0, 150.0], \"score\": 0.9999962449073792, \"association_id\": 1, \"light\": [-2.0702152252197266, -2.2888147830963135, 1.943985939025879, 2.0419795513153076], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hZe3f0of0>ZOe0J5J6L4N1N2XOgMh[OZ2Xd0gM\\\\[ONO]2dd0fM\\\\[O1K[2hd0fMZ[Od2fd0^MW[Oc2hd0;O001J6O1O100O000I8N1O2N1O2N2O001O1O1O001000O010O00100O010O010O\\\\\\\\OQLYc0o3f\\\\OSLXc0n3h\\\\OSLWc0m3h\\\\OTLXc0m3g\\\\OSLYc0m3g\\\\OTLXc0l3h\\\\OTLWc0m3h\\\\OTLXc0l3h\\\\OTLXc0m3g\\\\OSLYc0m3g\\\\OSLYc0m3g\\\\OSLYc0m3g\\\\OSLYc0m3g\\\\OSLYc0m3g\\\\OSLXc0o3g\\\\OQLYc0o3g\\\\OQLYc0o3g\\\\OQLYc0P4f\\\\OPLZc0P4f\\\\OPLZc0Q4e\\\\OPLZc0P4f\\\\OPLZc0Q4e\\\\OoK[c0Q4e\\\\OoK[c0Q4e\\\\OoK[c0[40000000000000000000000000000000000000000000000000He\\\\OkK[c0U4e\\\\OkK[c0U4e\\\\OkKZc0V4f\\\\OjKZc0V4f\\\\OjK[c0U4e\\\\OkK[c0]40Ge\\\\OmK[c0\\\\4000Fe\\\\OoK[c0[40000000O1Ed\\\\ORL\\\\c0n3d\\\\ORL\\\\c0n3d\\\\ORL\\\\c0m3e\\\\OSL[c0m3d\\\\OTL\\\\c0l3d\\\\OTL\\\\c0l3d\\\\OTL\\\\c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OVLZc0j3f\\\\OUL[c0k3e\\\\OUL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL\\\\c0j3d\\\\OVL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3e\\\\OUL[c0k3d\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0i3e\\\\OWL[c0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OYLZc0f3e\\\\O[L[c0e3e\\\\O[L[c0e3e\\\\O[LZc0f3f\\\\OZLZc0f3f\\\\OZLZc0g3e\\\\OYL[c0g3e\\\\OYL[c0g3e\\\\OYL[c0g3e\\\\OYL[c0g3e\\\\OYL[c0g3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0h3f\\\\OXLZc0i3f\\\\OWLYc0i3g\\\\OWLZc0h3g\\\\OWLYc0j3f\\\\OVLZc0j3f\\\\OVLZc0k3e\\\\OUL[c0k3f\\\\OTL[c0l3d\\\\OTL\\\\c0m3c\\\\OSL]c0m3c\\\\OSL]c0X40Fc\\\\OQL]c0o3c\\\\OQL]c0P4c\\\\OoK^c0P4c\\\\OoK]c0Q4c\\\\OoK]c0R4c\\\\OmK]c0S4c\\\\OmK]c0T4b\\\\OlK^c0T4b\\\\OlK^c0[40000001O0000000000001O000000001O00001O00001O001O0000001O0H[\\\\OULec0k3[\\\\OULfc0i3[\\\\OWLec0i3Z\\\\OXLfc0h3Z\\\\OXLfc0g3[\\\\OYLfc0f3Y\\\\O[Lgc0o31DX\\\\O^Lic0l31CV\\\\ObLjc0]3W\\\\OcLjc0\\\\3U\\\\OeLlc0Y3U\\\\OgLlc0X3T\\\\OhLlc0W3U\\\\OiLlc0V3T\\\\OjLlc0V3T\\\\OjLmc0T3T\\\\OlLlc0T3T\\\\OlLmc0S3S\\\\OmLmc0S3S\\\\OmLnc0R3R\\\\OnLoc0Q3Q\\\\OoLoc0P3R\\\\OPMoc0o2P\\\\ORMQd0m2o[OSMRd0l2n[OTMSd0j2n[OVMRd0j2n[OVMSd0i2m[OWMSd0i2m[OWMTd0h2l[OXMUd0f2l[OZMUd0e2k[O[MVd0d2j[O\\\\MWd0c2i[O]MXd0a2i[O_MZd0^2f[ObM^d0Z2b[OfM_d0Y2a[OgM`d0X2`[OhM`d0X2`[OhMad0W2_[OiMbd0U2_[OkMad0U2_[OkMbd0T2^[OlMed0Q2\\\\[OnMhd0n1X[ORNkd0j1V[OVNSe0a1mZO_N^e0V1bZOjNhe0l0XZOTOke0h0VZOXOQf0a0oYO_OSf0?mYOATf0=mYOCTf0<lYODUf0:lYOFUf08lYOHXf03iYOMZf0OgYO1[f0KgYO5Rg0000000000000000000000000000000000000000O1001O0000001O001O0O4M002N0O2O00ZlS;\"}}, {\"image_id\": 151, \"category_id\": 1, \"bbox\": [541.0, 255.0, 234.0, 238.0], \"score\": 0.9999982714653015, \"association_id\": 2, \"light\": [-2.2598485946655273, -2.0306994915008545, 2.14162015914917, 1.89542555809021], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"`Rf<9`g0=G5L3M4M4L2d[OmNha0V1U^OmNha0V1T^OnNga0W1U^OmNha0W1T^OjNka0X1X^O^Nma0e1U24L2L2O2O1O1O1M3N3O5J9H7H5K6I7I5K5L6I9G6H8Cg0]O>E7I4K4N3M2O2N1N2O1O001O001O001O001O1O1O1O0010gMY_OWMg`0j2W_OXMi`0i2U_OWMl`0h2S_OYMn`0l42N2N1O2N4L5K3M2N2M4M2N001O1O1O2O0O1O10000Fn]OdJRb0d4R^OdKNIRb0P4U^OiK52J6Rb0T3a_OmLd`0b2h_O^MZ`0^2g_OcM[`0n0_]OK[2WOZ`0?XABl>4ZALi>OYA1\\\\c0O0000000000001O0000000000001O00000001O000000001O0O100LDdXO=Zg0DfXO<Yg0FfXO:Zg0FfXO:Zg0601O00001O01O000001O01O000000000001O00O10000000000000001O0000000000000000000000000000000000001O00000000000O10000000I_OPYO`0Pg0AoXO?Qg0AoXO?Qg0BnXO=Sg0CmXO=Sg0CmXO=Sg0CmXO=Sg0CmXO=Sg0CmXO=Sg0CmXO<Tg0DlXO<Tg0DlXO<Ug0CkXO=Ug0BlXO>Tg0BlXO>Tg0BlXO>Tg0BlXO=\\\\g0000O100O100O100O100O1000000001O00000O101O00000000000000001N10001O0O3MVYj5\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [1234.0, 711.0, 366.0, 676.0], \"score\": 0.9923186302185059, \"association_id\": 1, \"light\": [-1.6317684650421143, -2.300842761993408, 1.540075421333313, 2.141437292098999], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"kjki1>\\\\_1=E:F8I5M1N2N2O1N3N1N2O1O1O1O1O101O000000001O0000001O000O10000O101O0O100O10000O2N1O1O2M2O1N2O2N1O1O2N100O2O000O2O001N2O0O2O001O1N2O2M3N3L4M3L4M3L4L3N3Lm0SO5L3L6J6J7I4L3M3M3M3M3N1N2WlN]ISn0e6hQOaITn0b6gQOdIVn0_6dQOhIWn0^6`QOlIZn0Y6\\\\QOSJ]n0R6ZQO[JWNdN[m0W7VTOaJ^MSOYn0`6RTOVMkk0P3nSOTMPl0P3jSOTMSl0Q3gSOSMVl0R3dSOQMZl0T3_SOPM_l0W3WSOnLfl0_3iROiLTm0f3RROgLkm0j:L3M4K4L4L4M3L4L3M4K4M3N2N3M2N2O2M2O1O1O2N1O1O1N2N2O1N3N1O1N20000O10000O10000O10O1000000O1N2O1O1N2N2M4L3N2N2N2N2N2N2M3L4E;D<B?1N4L2N101O0001O001O1O10OSN_QOZBan0c=aQO_B^n0_=bQOcB^n0Z=dQOgB\\\\n0X=cQOjB]n0U=cQOlB^n0R=bQOoB^n0P=bQORC]n0m<bQOUC_n0i<aQOYC_n0e<aQO]C_n0a<`QOaCan0]<_QOdCcn0Y<]QOhCdn0V<]QOjCdn0T<]QOmCcn0R<]QOnCdn0R<\\\\QOnCdn0R<[QOmCin0R<UQOnCmn0b>4K3N2M200O000000000O1O2N1iNoPO^ARo0e>mPOWAUo0l>iPORAXo0o>hPOn@Yo0T?ePOl@\\\\o0U?cPOi@^o0Y?`POe@bo0\\\\?`PO^@co0c?=2M3N3L4M3L4M2M2O0O2N1O101N1O2N3M3M3M3N1N2N2N1000000001O00100O1000O1O000O100O10O0100O010O0010O1O1O1O1O1O1O2N2N2N2N2N2N2N2N2N3M4L4L6J4L3N2]FmoNS3VP1hLmoNW3UP1fLnoNX3TP1eLmoNZ3XP1bLhoN^3\\\\P1]LeoNc3aP1WL`oNh3gP1PLZoNP4nP1gKSoNY4RQ1bKnnN]4XQ1]KinNc4[Q1XKgnNg4\\\\Q1UKenNk4^Q1QKcnNo4_Q1mJdnNQ5`Q1jJcnNT5`Q1iJbnNU5cQ1eJ`nNW2[OmLYR1g0_nN[2F_LQR1P1[nNa2LULoQ1S1XnNg22kKmQ1X1SnNk2`0\\\\KbQ1e1PnNn2nT1PMSkNn2QU1nLPkNR3QU1lLQkNR3RU1kLPkNT3QU1jLSkNR3PU1jLTkNT3oT1gLUkNV3nT1eLWkNY3nT1`LVkN]3SU1XLQkNg3VU1PLnjNn3ZU1gKjjNW4aX1O1N2O1N3M3M3QNfdNSO^[1e0gdNYOP\\\\1B`dN=b]100O10000O100O1O100O100O1O1O0O200O002O0O2N1N3M2O2M2O2N3L4K5I9Ghind0\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [0.0, 763.0, 175.0, 79.0], \"score\": 0.9999683499336243, \"association_id\": 2, \"light\": [-0.6331258416175842, -3.422492504119873, 0.5822228193283081, 3.2642765045166016], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Yh0321O0[_1f0maNYO_\\\\1l0^cNUO^\\\\1o0`cNRO_\\\\1P1`cNPO`\\\\1T1\\\\cNlNc\\\\1P2bbNPN^]1Q2abNoM_]1R2`bNnM`]1R2`bNnM`]1S2_bNmMa]1Y2O100000000000000000000000000000000000000000000000000000000000000000000000000000G`bNQNa]1o1_bNQNa]1o1_bNQNa]1o1_bNQNa]1o1_bNQNa]1n1`bNRN`]1n1`bNRN`]1n1`bNRN`]1n1`bNRN`]1n1_bNSNa]1m1_bNSNa]1V20000000O1H^bNRNb]1n1^bNRNb]1n1^bNRNb]1n1^bNRNb]1n1^bNRNb]1V20G^bNTNb]1l1^bNTNb]1l1^bNSNc]1l1^bNTNb]1l1]bNUNc]1k1]bNUNc]1j1^bNVNb]1j1^bNVNb]1j1^bNVNb]1j1]bNWNc]1i1]bNVNd]1i1]bNWNc]1i1]bNWNc]1i1]bNWNc]1i1]bNWNc]1h1^bNXNc]1g1\\\\bNZNd]1f1\\\\bNZNd]1g1[bNYNe]1g1[bNYNf]1f1ZbNZNg]1e1YbN[Ng]1e1YbN[Ng]1e1YbN[Nh]1d1XbN\\\\Nh]1c1YbN]Nh]1b1WbN_Ni]1a1WbN^Nj]1b1VbN^Nk]1a1UbN_Nk]1a1UbN_Nl]1_1UbNaNk]1_1UbNaNk]1_1UbNaNl]1^1TbNbNl]1^1TbNbNm]1]1SbNcNm]1]1SbNbNn]1^1RbNbNo]1]1QbNcNo]1^1PbNbNP^1^1QbNaNo]1`1PbN`NQ^1_1oaNaNQ^1g10IoaN_NR^1`1naN`NR^1`1naN_NS^1a1maN_NS^1b1maN]NT^1b1laN^NT^1b1laN^NU^1f1001KjaN^NV^1b1jaN^NV^1b1jaN^NW^1e11LhaN^NX^1b1haN]N[^1a1eaN_N]^1_1caNaN^^1^1201O1O001O0011N0001O001O001O1O001O1N2HVaNXOm^1d0TaN\\\\Om^1?WaNAl^17YaNIi^13YaNMj^1LZaN4Y_100000O101O0001N10000000001N2NTfhg2\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [898.0, 734.0, 146.0, 325.0], \"score\": 0.9999998211860657, \"association_id\": 4, \"light\": [-1.9046989679336548, -1.7631895542144775, 1.8454663753509521, 1.6553306579589844], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"WiSZ1;b_17J4L5J8I<E8H4L7I:G6H6K7H9VcNSM;KP[1a4Gf0ZOj1VN<E5K6J4K4N2O0O2VOhGhiN[8jU1WHfiNT8WV1n0L2N3N3L3N1O0O2O0000001O00000000000000000001O0O1gF_jNS8bU1iGdjNT8]U1hGhjNU8[U1gGhjNW8aV1O2L4L3N32ORO`hNZI_W1_6]hNlIdW1W71O0dNZhN^JgW1]5[hNeJfW1S5`hNnJaW1i4ehNYK[W1`4khN`KWW1\\\\4lhNdKTW1Z4mhNgKTW1V4mhNjKUW1R4nhNnKSW1m3QiNRLRW1g3RiNZLPW1X3\\\\iNgLgV1P3`iNPMaV1j2diNUM]V1j0ihNnMa1W1hU1e0gkNZO[T1b0gkN_OZT1=ikNBZT19ikNF[T15gkNK]T10dkNO`T1M`kN3dT1I]kN6gT1FZkN9hT1EYkN;iT1BWkN>lT1_OUkNa0lT1^OSkNb0oT1[OSkNe0oT1XORkNg0RU1TOPkNk0TU1QOljNP1VU1nNjjNQ1XU1nNhjNR1YU1mNfjNS1SZ110O0010O01O1O001N2O1O1N1O2O1O100O100O100O100O2O001N102N1N2O000O2O1O001N2O001N100000001O0O3N2McPQ_1\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [530.0, 714.0, 114.0, 81.0], \"score\": 0.9999345541000366, \"association_id\": 3, \"light\": [-2.2412805557250977, -1.8668357133865356, 2.1961958408355713, 1.8025784492492676], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"\\\\gkh0n0P_15L10000000000000000000001O0O1O1N2O100O2N1O2N1N2gaN_NR^1b1naN_No]1c1QbN]No]1c1PbN_Nn]1b1RbN^Nm]1c1SbN^Nk]1b1VbN^Ni]1c1VbN^Ni]1c1WbN]Ng]1d1ZbN]Nc]1d1]bN]Nc]1c1]bN]Nb]1d1]bN]Nb]1d1\\\\bN^Nd]1b1\\\\bN^Nd]1b1\\\\bN_Nc]1a1^bN^Na]1c1_bN]Na]1c1_bN]Na]1d1^bN\\\\Nb]1d1_bN[N`]1f1`bNZN`]1f1`bNZN`]1f1abNYN_]1g1abNYN\\\\]1k1dbNTN\\\\]1l1dbNTN\\\\]1l1dbNTN\\\\]1m1cbNSN]]1m1dbNRN[]1o1ebNQN[]1o1ebNQN[]1o1ebNQN[]1o1ebNQN[]1P2dbNPN\\\\]1P2dbNPN\\\\]1P2dbNPN\\\\]1P2dbNPN\\\\]1P2dbNPN\\\\]1P2dbNPN\\\\]1P2dbNPN\\\\]1P2dbNPN\\\\]1P2dbNPN\\\\]1Q2dbNnM\\\\]1R2dbNnM\\\\]1R2dbNnM\\\\]1R2dbNnM]]1Q2cbNoM]]1Q2cbNoM^]1P2bbNPN^]1P2bbNPN^]1Q2abNoM_]1Q2abNoM_]1Q2bbNnM^]1S2abNmM`]1R2`bNnM`]1R2`bNnMa]1Q2_bNoMa]1Q2_bNoMb]1P2^bNPNb]1P2^bNPNc]1o1]bNQNd]1o1[bNQNe]1n1[bNRNg]1m1YbNSNh]1l1XbNTNi]1j1XbNVNh]1j1XbNVNi]1i1WbNWNj]1h1VbNXNk]1g1UbNYNk]1g1TbNZNm]1e1SbN[Nn]1c1SbN\\\\NP^1b1PbN^NQ^1a1oaN_NR^1_1naNbNT^1\\\\1laNdNW^1Y1iaNfNZ^1X1eaNiN\\\\^1U1eaNkN\\\\^1S1eaNlN^^1Q1caNoN^^1n0daNQO^^1m0daNRO]^1l0daNSO`^1g0?M3M<A[XiQ2\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [295.0, 750.0, 132.0, 59.0], \"score\": 0.9132372140884399, \"association_id\": 5, \"light\": [-1.0453975200653076, -2.252086639404297, 0.8947960734367371, 2.1682798862457275], \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"nWk=c0[_16K3N2M3N2M2O1N3N2M3M2O1O0O100000001O000O10000O1000O0100O1000O010O010000000O1000faN]NW^1c1iaN]NV^1g10LjaN^NV^1b1jaN_NU^1a1kaN_NU^1a1kaN_NU^1a1kaN_NU^1a1kaN_NU^1a1kaN_NU^1a1kaN_NU^1a1kaN_NU^1a1laN^NT^1b1laN^NT^1b1laN_NS^1b1laN^NT^1b1laN^NT^1b1laN^NT^1c1laN\\\\NT^1d1laN\\\\NT^1d1laN\\\\NT^1d1laN\\\\NT^1e1laNZNT^1f1laNZNT^1f1laNZNT^1f1laNZNT^1i10NlaNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1h1laNXNT^1j100000MlaNZNT^1f1laNZNT^1i1000000001O000O10000000001O00000O2O0000001O00001O001O001N101O001O001N101O001O0SObaNNKLd^11iaNJH4a^1NTbN2l]1MUbN2m^10001O1N2NQgn[2\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [787.0, 669.0, 260.0, 98.0], \"score\": 0.9999901056289673, \"association_id\": 3, \"light\": [-2.3786721229553223, -1.5818498134613037, 2.3564586639404297, 1.4043934345245361], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"UY`d05\\\\j06J:F8J4M2O1N101O0O2N2N2N2N1O2O1N101O0O2O0O2O0O2O0O2O00001O000O10000000000O100000000\\\\WORNah0m1`WOSN_h0n1aWOSN^h0m1bWOSN^h0l1bWOUN^h0k1bWOUN^h0k1bWOUN^h0k1bWOVN]h0i1cWOXN]h0h1cWOXN]h0g1cWOZN]h0f1cWOZN]h0e1cWO\\\\N]h0d1cWO\\\\N^h0b1bWO_N^h0a1bWO_N^h0a1bWO_N^h0a1aWOaN^h0_1bWOaN^h0_1bWOaN^h0_1bWOaN^h0_1bWOaN^h0_1bWOaN^h0_1aWObN_h0^1aWObN^h0_1bWOaN^h0_1bWOaN^h0k10000000000000DbWOaN]h0`1cWO`N]h0`1cWO`N]h0`1cWO`N]h0`1cWO`N]h0`1cWO`N]h0`1cWO`N]h0`1cWO`N]h0`1cWO`N\\\\h0a1dWO_N\\\\h0a1dWO_N\\\\h0a1dWO_N\\\\h0a1dWO_N[h0b1eWO^N[h0b1eWO^NZh0c1gWO\\\\NYh0d1gWO\\\\NYh0d1gWO\\\\NXh0e1hWO[NXh0e1hWO[NXh0e1iWOZNVh0g1jWOYNVh0g1jWOYNUh0h1lWOWNSh0j1mWOVNRh0k1mWOVNRh0k1nWOUNQh0l1oWOTNQh0l1oWOTNQh0l1oWOTNPh0m1PXOSNPh0n1oWORNPh0o1PXOQNPh0o1PXOQNog0Q2QXOnMng0S2RXOmMmg0T2SXOlMmg0T2SXOlMlg0V2TXOiMlg0W2TXOiMkg0X2UXOhMkg0X2UXOhMjg0Z2VXOeMjg0[2VXOeMig0]2VXOcMig0e200O100O100000O1000O10001N1000001O0O1000000000000000O100O100001O000O1000000000000000000000000000000000000000E]XOdMcg0\\\\2]XOdMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMdg0[2\\\\XOeMeg0Z2[XOfMeg0e2001EZXOeMfg0[2ZXOeMgg0Z2YXOfMgg0Z2YXOfMgg0Z2YXOfMgg0Z2YXOeMhg0[2XXOeMig0Z2WXOfMig0Z2WXOfMig0Z2WXOfMig0Z2WXOfMig0Z2WXOfMjg0Y2VXOgMjg0Y2WXOfMig0Z2WXOfMjg0Z2UXOfMlg0Y2TXOgMlg0Y2TXOgMng0W2RXOiMog0V2QXOjMog0V2RXOiMog0V2QXOjMPh0U2QXOjMog0V2QXOjMPh0U2PXOkMPh0U2PXOkMQh0U2oWOjMRh0U2nWOkMRh0U2nWOkMSh0U2lWOkMUh0U2jWOkMWh0T2jWOkMVh0U2jWOkMWh0U2iWOjMWh0V2iWOjMXh0U2hWOkMXh0V2hWOiMYh0V24O1000O01O01OO2O001O1O001O001O001O001N1O2M3M2N3N2N1O2N2L5VOQWOBYi0:e0Fi\\\\Q6\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [359.0, 671.0, 410.0, 119.0], \"score\": 0.9999997019767761, \"association_id\": 2, \"light\": [-2.4531023502349854, -1.7265435457229614, 2.4413952827453613, 1.5748083591461182], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"lg[9175L1gi0o0WWOlNWg0Y1hXOgNUg0]1jXOdNUg0]1iXOdNVg0^1iXObNVg0_1jXObNUg0_1jXOaNUg0`1kXO`NUg0a1iXO`NVg0a1jXO_NVg0a1jXO_NVg0b1iXO^NVg0d1iXO\\\\NWg0d1iXO\\\\NWg0e1hXO[NWg0g1hXOZNWg0f1hXO[NXg0f1gXOZNYg0f1gXOZNYg0g1fXOYNYg0h1gXOXNYg0h1gXOXNYg0i1fXOWNZg0i1fXOXNYg0h1gXOXNXg0i1hXOWNXg0j1gXOVNYg0j1gXOVNYg0k1fXOUNYg0m1fXOTNYg0l1fXOUNYg0m1fXOSNZg0n1eXORN[g0n1eXORNZg0P2eXOQNZg0P2eXOPN[g0P2eXOPN[g0P2eXOPNZg0R2dXOoM\\\\g0Q2dXOoM\\\\g0Q2dXOoM\\\\g0R2cXOnM]g0R2cXOnM\\\\g0S2dXOmM\\\\g0T2cXOlM\\\\g0U2cXOlM]g0U2bXOkM^g0V2aXOkM]g0V2cXOjM]g0V2cXOjM]g0W2bXOiM]g0X2cXOhM]g0X2cXOhM]g0Y2cXOfM]g0Z2cXOfM]g0Z2cXOfM]g0Z2dXOeM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdMZg0]2fXOcMZg0]2fXOcMZg0]2fXOcMZg0^2fXOaMYg0`2gXO`MYg0`2gXO`MYg0`2gXO`MYg0`2gXO`MYg0`2gXO`MYg0_2hXOaMXg0_2hXOaMXg0_2hXOaMXg0_2hXOaMXg0_2hXOaMXg0_2hXO`MYg0`2fXOaMZg0_2fXOaMZg0_2fXOaMZg0_2eXObM[g0^2eXObM[g0^2eXObM[g0l2O10AfXOcMZg0]2fXOcMZg0]2fXOcMZg0]2fXOcMZg0]2fXOcMYg0^2gXObMYg0^2gXObMYg0m20@gXOdMYg0\\\\2gXOdMYg0\\\\2gXOdMYg0\\\\2gXOdMXg0\\\\2iXOdMWg0\\\\2iXOdMWg0\\\\2iXOdMWg0\\\\2iXOdMWg0\\\\2iXOdMWg0\\\\2iXOdMWg0]2hXOcMXg0]2hXOcMXg0]2hXOcMXg0]2hXOcMXg0]2hXOcMXg0]2hXOcMXg0]2hXOcMXg0]2hXOcMXg0]2hXOcMXg0]2hXOcMWg0n20000000000000000O100000EjXOWMVg0T30000O1000000000HlXOoLTg0R3mXOlLSg0T3nXOkLRg0U3nXOjLSg0W3lXOiLTg0W3lXOiLTg0W3lXOiLTg0X3kXOhLUg0X3lXOgLTg0Y3lXOgLTg0Y3lXOgLTg0Y34000000000000000O100000000000000000000000000hXOfLUg0Z3kXOfLUg0Z3kXOfLUg0Z3kXOfLTg0[3lXOeLTg0[3lXOeLTg0[3lXOeLTg0[3lXOeLTg0[3kXOfLUg0]300000000O100000O01000000O1000000000000O10AnXO[MRg0e2nXO[MRg0d2oXO\\\\MQg0d2nXO\\\\MSg0c2nXO]MRg0c2nXO]MRg0b2oXO^MQg0b2oXO^MQg0a2oXO`MQg0`2oXO`MQg0`2oXO`MPg0`2QYO_MPg0a2PYO_MPg0a2PYO_MPg0a2PYO_MPg0a2PYO_MPg0a2PYO_MPg0a2PYO_MPg0`2QYO`Mof0`2QYO`MPg0_2PYOaMPg0_2PYOaMPg0_2PYOaMPg0_2PYO`MQg0`2nXOaMRg0_2nXOaMRg0^2oXObMQg0^2oXObMQg0_2nXOaMRg0_2nXOaMQg0`2oXO`MQg0`2oXO`MQg0`2oXO`MQg0`2nXOaMRg0_2nXO`MSg0`2mXO`MSg0`2mXO`MSg0`2mXO`MSg0a2lXO_MTg0a2lXO_MTg0a2lXO_MTg0a2lXO_MTg0a2lXO_MUg0`2kXO`MUg0`2kXO_MVg0a2jXO_MVg0a2jXO_MVg0a2iXO`MWg0`2iXO`MXg0_2hXOaMXg0_2hXOaMXg0_2hXOaMXg0_2hXOaMXg0_2hXOaMYg0^2gXObMYg0^2gXObMYg0^2gXOaMZg0m21O0AeXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0\\\\2eXOdM[g0k200001_OdXOgM\\\\g0Y2dXOgM\\\\g0Y2dXOfM]g0Z2cXOfM]g0Z2cXOfM]g0Z2cXOfM]g0Z2cXOfM]g0Z2cXOfM]g0Y2dXOgM\\\\g0Y2dXOgM\\\\g0Y2dXOgM\\\\g0Y2dXOgM\\\\g0X2eXOhM[g0X2eXOhMZg0Y2fXOgMZg0Y2fXOgMZg0X2gXOhMYg0X2gXOhMYg0X2gXOgMZg0Y2fXOgMZg0Y2fXOgMZg0Y2fXOgMZg0Y2fXOgMZg0Y2fXOgMYg0Z2gXOfMYg0Z2gXOfMYg0Z2gXOfMXg0[2hXOeMXg0[2hXOeMXg0[2hXOeMXg0\\\\2gXOcMZg0]2fXOcMYg0^2gXObMYg0^2gXObMYg0^2gXObMYg0^2gXObMYg0_2fXOaMZg0_2fXOaMZg0_2fXOaMZg0_2gXO`MYg0a2fXO_MZg0a2fXO^MZg0c2fXO]MZg0c2fXO]MZg0c2gXO\\\\MYg0e2fXO[MZg0e2fXO[MZg0e2fXO[MZg0e2fXO[MZg0e2fXO[MZg0f2fXOYMZg0g2fXOYMZg0g2fXOYMZg0g2fXOYMZg0h2eXOWM\\\\g0i2dXOWM\\\\g0j2dXOUM\\\\g0k2dXOUM\\\\g0R31N100O1O11O001O00000010O0000001O00010O0000001O00000000001O0O100000001N100000000O2O000O2N1O1O1N3N100O1O1O101N1O1O1O1O2N1O1O2M2L5L3M4N1N3N1O2O0O101N101N2N2O1M3M8G7I6J8IUoX=\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [101.0, 658.0, 263.0, 79.0], \"score\": 0.9999603033065796, \"association_id\": 1, \"light\": [-2.2901482582092285, -1.4825387001037598, 2.2734453678131104, 1.3769724369049072], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"hid2j0ji04L2O001N10001O001N2O1nVOjNch0W1\\\\WOiNdh0W1\\\\WOiNch0Y1\\\\WOgNdh0Y1[WOhNeh0X1[WOhNeh0Y1ZWOgNfh0Z1YWOfNgh0[1XWOfNgh0\\\\1WWOdNih0\\\\1WWOdNih0]1VWOcNjh0]1VWOcNjh0]1VWOcNjh0^1UWObNkh0^1UWObNkh0^1UWObNjh0`1UWO`Nkh0`1VWO_Njh0b1UWO]Nlh0c1TWO]Nlh0c1TWO]Nlh0d120O101O000O101O000000000000000O100000000000001O00000000000000000000O10O1000000000O100O1O1O1O1O1000O0100000UWO\\\\Neh0c1\\\\WO]Ndh0c1\\\\WO]Ndh0c1\\\\WO^Nch0b1]WO^Nch0a1^WO_Nbh0a1^WO_Nbh0a1^WO_Nbh0a1^WO_Nah0b1_WO^Nah0b1_WO^Nah0b1_WO^Nah0b1_WO^Nah0c1^WO]Nbh0c1^WO]Nbh0c1^WO]Nah0d1_WO\\\\Nah0d1_WO\\\\Nah0d1_WO\\\\N`h0e1`WO[N`h0f1_WOZN`h0g1`WOYN`h0g1`WOYN`h0g1`WOYN_h0i1`WOWN`h0i1`WOWN`h0i1`WOWN_h0j1aWOVN_h0j1aWOVN^h0k1bWOUN]h0l1cWOTN\\\\h0m1dWOSN\\\\h0m1dWOSN[h0n1eWORN[h0n1eWORN[h0n1eWORNZh0o1eWORN[h0n1eWORNZh0o1fWOQNZh0o1fWOQNYh0P2gWOPNYh0P2gWOPNXh0Q2hWOoMXh0Q2hWOoMWh0R2iWOnMWh0R2iWOnMWh0R2iWOnMWh0R2iWOoMVh0Q2jWOoMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2kWOnMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2lWOmMTh0S2lWOmMTh0S2lWOmMTh0S2lWOmMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMUh0R2kWOnMVh0R2iWOnMWh0R2iWOnMWh0R2iWOnMXh0Q2hWOoMXh0Q2iWOnMWh0S2hWOmMYh0R2gWOnMYh0R2gWOnMYh0S2fWOmMZh0S2fWOmMZh0S2fWOmM[h0R2eWOnM[h0V20MeWOlM\\\\h0S2dWOmM\\\\h0V21O000000001O00001O001O0000001O001O1O1O001O001O00001O001O001O00001O0JVWOaNkh0]1VWOcNjh0\\\\1WWOdNjh0Z1WWOfNih0X1YWOhNgh0W1ZWOiNgh0V1YWOjNgh0U1[WOjNeh0V1[WOiNfh0W1ZWOiNgh0V1ZWOiNfh0W1ZWOiNgh0U1ZWOkNgh0T1=01O000O2M2O2O1N2O2M2N2N2O0O2N2N2N5I5KW[jg0\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [1039.0, 680.0, 152.0, 83.0], \"score\": 0.9999960660934448, \"association_id\": 4, \"light\": [-1.7287871837615967, -1.9751534461975098, 1.7304813861846924, 1.8507274389266968], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"SVRk01cj08H3TVOFD1\\\\i0=nVOIQi09mVOIQi0;kVOF4Fch0Y1[WOiNdh0W1\\\\WOiNch0X1]WOhNch0Y1\\\\WOgNch0[1[WOfNeh0[1ZWOeNfh0[1ZWOfNdh0\\\\1[WOdNeh0\\\\1[WOdNeh0]1ZWOcNfh0]1ZWOcNfh0^1YWObNgh0_1XWOaNhh0_1YWO`Nfh0a1ZWO_Nfh0b1YWO_Nfh0a1ZWO_Neh0b1[WO^Neh0b1[WO^Neh0b1[WO^Neh0b1[WO^Neh0i1O1000000O100N2O1O1OJbWOWN]h0Q2O1M3O1O1O100O100O10000O10000O100000NnWOaMRh0_2nWOaMRh0`210000000000001O0000000000001O00O101O0000000O1000001NkWOcMTh0]2lWOcMTh0\\\\2lWOfMSh0Y2mWOhMSh0]21IlWOkMTh0U2lWOkMTh0T2lWOmMTh0R2mWOnMSh0R2lWOoMTh0P2lWOQNUh0n1kWORNUh0m1lWOSNTh0m1lWOSNUh0l1kWOTNUh0l1kWOTNUh0l1kWOTNVh0k1kWOTNUh0l1lWOSNTh0m1lWOSNUh0l1kWOTNUh0m1jWOSNWh0l1iWOTNWh0l1iWOTNXh0k1hWOUNXh0k1iWOTNXh0k1hWOUNXh0k1iWOTNXh0k1hWOTNYh0l1hWOSNYh0k1hWOUNXh0k1hWOUNYh0i1hWOWNYh0h1gWOXNZh0f1gWOZNYh0f1gWOZNZh0e1fWO[NZh0e1fWO[N[h0d1eWO\\\\N\\\\h0c1;00010O1000000O10O0100O10O010O1O001O001N2O001O1N2N2N2M5EW^Y2\"}}, {\"image_id\": 154, \"category_id\": 1, \"bbox\": [108.0, 81.0, 270.0, 304.0], \"score\": 0.9433034658432007, \"association_id\": 1, \"light\": [-2.1837501525878906, -1.7664419412612915, 2.0241823196411133, 1.5981677770614624], \"segmentation\": {\"size\": [566, 849], \"counts\": \"Skk1a0o`07N3M2N2O001N2O2N1O2N1N101O1O00001O00001O001O001oCTOV7m0eHXOZ7h0`H_OTMUOX9\\\\1cI_OUMUOX9]1bI_OUMTOY9]1bI_OUMTOY9^1aI^OVMTOY9^1aI^OVMUOX9^1aI]OWMUOX9^1aI]OWMUOX9^1aI]OWMUOY9]1`I^OWMUOZ9\\\\1_I_OWMUO[9[1^I@WMUO]9Z1[IAXMUO_9X1YICXMUO`9W1XIDXMUOa9V1WIEXMUOb9U1VIFXMUOc9T1UIGXMTOe9T1SIHYMSOe9T1RIIZMROf9S1PIKfLROS;n0WH0dLWO[;`0QH9bL\\\\Ob;3lGa0`L@d;MlGc0^LCg;HkGe0\\\\LFi;DkGf0[LGk;BjGh0ZLFm;@jGj0YLGm;]OkGl0XLGo;YOkGP1VLGk?R10XOU@Fi?;W@Ej?:V@Fj?:V@FX<YOXGQ1`LGW<YOXGP1aLFX<ZOWGi1h8ZNVGf1j8\\\\NTGd1l8aNoF_1P9dNnF[1S9gNlFX1T9iNlFV1T9kNlFT1T9mNnFEWMd0l;GbG:_8F_G;a8E^G;c8EUGb0m8^OUF_1k9aNmEf1U:ZNhEg1Y:YNfEh1[:XNcEh1^:YN`Eg1b:XN\\\\Eh1f:YNXEf1j:ZNREf1R;[NhDe1^;ZN_Da1`NRNP=>]D\\\\1fNZNl<:]D[1gN^Nk<7]D[1gN`Nl<6[D[1iN`Nl<4ZD\\\\1jNaNl<3YD[1mNbNj<3XDZ1POcNh<3VDY1VOcNd<4SDU1@gN]<4oCU1HgNY<3mCW1LgNV<2lCW10gNT<2kCW12gNS<2jCW14gNQ<4iCU18fNn;7hCS1;gNl;7gCQ1`0gNh;;eCk0h0iN]8FYIi0_Me0P1lNR80ZI_2gN`Mm75YIZ2lNaMj78WIW2PObMg7:WIT2RObMf7<VIR2UObMe7=nGTOMo2`0aMc7?iG[ONe2h0`M`7c0bGCO[2P1_M]7h0\\\\GJIU2_1YM[7j5fHWJX7i5jHVJU7i5mHWJR7i5PIVJP7i5QIWJn6j5SIUJm6k5SIUJm6j5UIVJj6j5VIVJj6j5VIVJj6i5WIWJi6i5XIWJg6i5YIWJg6i5ZIVJf6i5\\\\IVJd6j5]IUJb6l5^ITJb6l5^ITJb6l5_ISJa6m5_ITJ`6l5`ITJ`6l5aISJ_6l5bIUJ]6k5cIUJ]6k5cIUJ]6l5cISJ^6l5bIUJ]6k5cIUJ]6k5dITJ\\\\6l5dITJ\\\\6m5cISJ]6m5dIRJ]6m5cISJ]6h0YGQ4Z2WK]6g0[GR4W2WK_6d0^GS4T2XK_6b0aGU4o1ZK`6=gGW4h1\\\\Kf62kG`4_1]Ko7b4PH_KQ8`4nG`KS8a4kG_KV8`4jG`KW8`4hG`KY8`4fG`K[8`4dG`K]8`4bG`K`8_4_GaKb8_4^GaKb8`4\\\\G`Ke8`4ZG`Kg8`4XG`Ki8`4VG`Kj8g0aEb2d1gLl8d0hE`2\\\\1kLn8`0mEb2T1nLR99TFd2i0SMX9MaFf26]Me;Q2YDoMj;P2TDQNl;o1TDPNn;o1QDQNo;P2PDPNQ<o1oCRNR<l1nCTNS<k1mCUNW<h1hCXNa<_1_CaNc<]1]CcNf<Z1ZCfNg<Z1YCeNi<Y1WCgNj<X1VCiNj<W1VChNl<W1SCiNP=U1oBkNU=R1jBnNa=i0]BXOd=g0[BYOg=f0XBZOi=e0WB\\\\Oh=e0XBZOi=e0WB[Oj=d0VB\\\\Oj=d0VB\\\\Ok=c0UB]Ol=c0SB^On=`0SB_On=`0RB@o=?QBAP>>QBBo==QBBP>>PBBQ>>nAAS>?mAAS>`0lA@U>`0jA_OW>b0hA^OX>c0hA\\\\OY>d0fA\\\\OZ>e0eA[O[>f0dAZO]>e0cA[O^>d0cA[O_>d0`A]O`>b0`A^Oa>a0_A_Ob>`0^A@c>?]ABb>?]ABc>=]ACc>=]ADc>;]AEd>:\\\\AGc>9]AGd>9[AHf>6ZAJg>5YAKi>3WANi>2VAOj>0VA0j>0VA1i>0VA0j>2UAMk>6RAJo>;k@EU?j0\\\\@VOd?m0Z@ROf?P1X@POh?R1V@nNj?S1V@mNj?^1101ET@iNm?W1V@eNk?\\\\18O10O01O1O1O001O00001O001O000O2N3M2J\\\\PT8\"}}, {\"image_id\": 154, \"category_id\": 1, \"bbox\": [410.0, 114.0, 337.0, 424.0], \"score\": 0.9999995231628418, \"association_id\": 2, \"light\": [-2.3681139945983887, -1.9703972339630127, 2.1546292304992676, 1.7635548114776611], \"segmentation\": {\"size\": [566, 849], \"counts\": \"lRS74b?5TB0g=2XB0e=3YBNe=4YBMf=7VBJi=9UBHj=:SBGm=9RBHn=9QBGn=;QBEo=>nABR>c0iA]OW>h0cAYO]>i0aAWO_>k0_AUOa>l0^ATOb>n0[ASOe>o0YAQOg>R1UAnNl>n1000O100O100O10000O1Dl@VNT?V2000RNi@]1W?bNRAV1n>jNSAU1m>jNUAU1k>kNUAU1k>kNUAU1k>kNVAT1j>lNVAT1j>lNVAT1j>lNWAS1i>nNVAS1i>mNXAR1h>nNXAR1h>nNXAS1g>mNZAR1f>nNZAS1e>mN[AT1d>lN\\\\AT1d>lN\\\\AU1c>lN\\\\AT1d>lN\\\\AT1d>lN\\\\AT1d>mN[AS1e>mN[AT1d>lN\\\\AT1d>lN\\\\AT1d>mN[AS1e>mN[AS1e>mN[AS1e>mN[AT1d>lN\\\\AT1d>lN\\\\AT1d>lN\\\\AT1d>lN[AU1d>lN\\\\AU1c>kN]AT1d>lN\\\\AT1d>mN[AR1f>nN[AQ1e>oN[AP1f>POZAP1f>POZAP1f>POZAP1f>POZAo0g>QOYAo0g>QOYAn0h>ROYAl0h>TOXAk0i>UOWAk0i>UOWAj0k>UOUAk0k>UOVAj0j>VOVAj0j>VOVAi0k>WOUAi0k>WOVAh0j>XOVAh0j>XOVAh0j>XOWAg0i>YOWAg0i>YOXAg0g>YOYAg0g>YOZAf0f>ZOZAf0f>ZOZAg0e>YO[Ag0e>YO\\\\Ag0c>YO]Ah0b>XO^Ah0b>XO_Ah0a>WO_Ai0a>WO_Aj0`>VOaAi0_>VObAk0^>SOcAn0]>QOdAo0\\\\>nNfAU1W>jNkAV1U>iNkAX1U>fNlAZ1Y?000O002OO01O010O00100O10O010O01O010O1O1O010O0010O02O0O100O10001O2N1N100O10O010O100O010O010O1O1O100lN\\\\@4e?J]@5c?J`@4a?Ja@5`?Ge@7\\\\?E^@HKa0h?D]@NK=i?B]@4I9[`0Je_O5Z`0Lf_O4Y`0Nf_O2Y`0Oh_O0W`01i_OOU`03l_OLR`06n_OJZ;HmH`0jKGW;MkH?mKDW;OkH=nKDT;3lH:QLBo:8PI6QLBR9l0iFMR4FSLAf8b2WKmMTL@_8i2\\\\KhMVL^OZ8n2`KdMVL^OX8P3bKbMVL^OV8S3bK`MYL[OR8Y3dK]MZLZOm7_3gKXM\\\\LYOe7h3fIeLJ;lNWO\\\\7Q4cInLMLTOUOU7Y4fInLJF\\\\OROP7^4hIoLFDBnNo6c4fIPMBBIkNm6g4dIRM_O_O1dNl6R5^IRM[O]Od7j3dHhNU7P6K5L4M3O1N2O1O1O1O1O1O1O1N2M3N2N2O1O1O0010^FcIj7]6mG\\\\Jc7e5VHoJ]7P5bHUKY7l4fHXKW7h4hH[KU7e4lH]KQ7c4PI^Km6c4SI`Kh6b4WIaKb6d4]I`K`5`5_JcJ[5a5fJ`JV5b5kJ`JQ5b5nJaJo4`5QKbJk4`5TKcJg4`5WKeJY4i5eK[Jg3V6YLmIb3V6\\\\LmIa3T6_LnI^3T6bLlI]3U6cLkI[3W6dLkIX3X6hLhIT3\\\\6lLeIP3^6oLcIn2`6RM`Im2a6RMaIk2b6SM`Ik2b6SMaIi2b6TMbIg2b6kLYJe2g:E7I4L3M3M4L6J7H7I6G:H7I9Ghc=2W\\\\B5M4K4L3N1N2O1O0O2O001O0O101O0000001N100O100O100O1O1O2N2N2N2N3M3M3N2M100000001O000002L5K8E\\\\Xh1\"}}, {\"image_id\": 154, \"category_id\": 1, \"bbox\": [125.0, 348.0, 66.0, 39.0], \"score\": 0.9973659515380859, \"association_id\": 3, \"light\": [-1.9077346324920654, -1.1637778282165527, 1.9257543087005615, 0.9777621626853943], \"segmentation\": {\"size\": [566, 849], \"counts\": \"g^U21da02O001O001O001O0O2O001O0ZO0f_O0Y`02f_OOX`03g_OMX`05g_OKY`06f_OJZ`07e_OJY`08f_OHZ`09f_OFZ`0;e_OEZ`0=e_OC[`0>e_OBZ`0?e_OA[`0`0d_O@\\\\`0a0d_O^O\\\\`0c0c_O]O]`0d0c_O[O]`0f0b_OZO^`0g0b_OXO^`0h0b_OXO^`0i0b_OVO^`0j0b_OVO^`0j0b_OVO_`0j0a_OUO_`0k0a_OUO_`0Q11J`_OTO``0Q1001K40000010O0001O010O0000002N2N001O0O101O001N1O4K:DlZ[;\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [1347.0, 710.0, 217.0, 153.0], \"score\": 0.9999996423721313, \"association_id\": 2, \"light\": [-1.7202324867248535, -2.0604653358459473, 1.7093052864074707, 1.87179696559906], \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"^XUo1?V_1b1gN5K3N1O1O1N2N2O2N1O1O1M3J6G9M3L4M3L4M201O1O100O1O001O1O100O10001O1O1N2O1O1O1O001O00001O0]dNiK][1X4bdNiK][1W4bdNkK][1U4cdNkK^[1T4adNmK_[1S4adNnK^[1Z4000GbdNQL^[1m3bdNTL^[1k3cdNUL][1j3cdNWL][1h3bdN[L][1d3cdN]L][1b3cdN_L][1a3cdN_L^[1Q40000000]ObdNdL_[1n301O00000001O000000000010O0000000O100O10001O0000000O10000000000O100000001O0000000000O10O10001O0O10000O100000000000000000000000000000O100000000000000AidNULW[1Z41AhdNTLX[1l3hdNTLX[1l3hdNTLX[1m3hdNQLY[1o3gdNQLY[1o3gdNQLY[1o3gdNQLY[1o3gdNPL[[1o3fdNPLZ[1Q4edNoK[[1Q4edNoK[[1[41O001O001O0O2O001O1O1O1N101O1O2N1O1O1O3M4L4L2N2N2N1O2N1O2N7H5BQcNcMQ]1Z2obNgMS]1V2nbNjMS]1n1nbNkMO8T]1a1lbNXN;7k\\\\1]1]cNcNd\\\\1Z1^cNeNd\\\\1Y1\\\\cNhNe\\\\1T1^cNlNh\\\\1l0ZcNTOi\\\\1g0YcNYOQ]19ScNGc^1000000000000000000000000O2O00000O10001N2Lcem>\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [1508.0, 701.0, 255.0, 130.0], \"score\": 0.9999968409538269, \"association_id\": 1, \"light\": [-2.030099391937256, -1.8929362297058105, 2.0543551445007324, 1.702538251876831], \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"effV2;__1;H8J5L3M1O1O001O00001N1O2N1O6J2N1O100O2O001O001O001N2O1O2N2N001N2O1O2N3M3M001N101O0000001O001ZcNdMe[1^2YdNdMf[1]2YdNcMg[1^2XdNcMg[1]2YdNcMf[1_2YdNaMg[1_2YdNaMg[1`2XdN`Mh[1`2XdN`Mh[1a2WdN_Mi[1b2VdN_Mi[1a2WdN_Mi[1b2VdN^Mj[1c2UdN]Mk[1d2TdN\\\\Ml[1e2TdNZMl[1g2SdNYMm[1i2QdNXMn[1i2QdNWMo[1k2ocNUMQ\\\\1n2mcNQMS\\\\1^301O0O1001O01O00000000001O00000000001O0000000000IocN_LR\\\\1`3ncNaLQ\\\\1_3ocNaLQ\\\\1_3ocNaLQ\\\\1_3ocNaLQ\\\\1g3000GocNdLP\\\\1\\\\3PdNdLP\\\\1\\\\3PdNdLP\\\\1\\\\3PdNdLP\\\\1\\\\3PdNdLP\\\\1f30EPdNfLP\\\\1Z3PdNfLP\\\\1Z3PdNfLP\\\\1Z3PdNfLP\\\\1Z3PdNfLP\\\\1Z3PdNfLP\\\\1Z3PdNfLP\\\\1Z3PdNfLQ\\\\1Y3ocNgLQ\\\\1Y3ocNgLQ\\\\1X3PdNiLn[1X3RdNhLn[1X3RdNhLn[1X3QdNiLn[1X3RdNhLn[1X3RdNhLn[1X3RdNhLn[1X3RdNhLn[1X3RdNhLn[1X3RdNhLn[1X3RdNhLn[1W3SdNiLm[1W3SdNiLm[1W3SdNiLm[1W3RdNjLn[1V3RdNjLn[1V3RdNjLn[1U3SdNkLm[1U3SdNkLm[1U3SdNkLm[1U3RdNlLn[1S3SdNmLm[1S3RdNnLn[1R3RdNnLo[1Q3QdNoLo[1a3000_OQdNQMo[1o2QdNQMo[1o2QdNQMo[1`3001O0^OPdNTMP\\\\1l2PdNTMP\\\\1l2PdNTMQ\\\\1k2ocNUMQ\\\\1k2ocNUMR\\\\1j2ncNVMR\\\\1j2ncNVMR\\\\1j2ncNVMS\\\\1i2mcNWMS\\\\1i2mcNWMT\\\\1h2lcNXMT\\\\1h2lcNXMU\\\\1g2kcNYMV\\\\1f2jcNYMW\\\\1g2icNYMX\\\\1f2hcNZMY\\\\1e2gcN[MZ\\\\1d2fcN\\\\MZ\\\\1d2fcN\\\\M[\\\\1c2ecN]M[\\\\1c2ecN]M\\\\\\\\1b2ecN]M[\\\\1c2ecN]M[\\\\1d2dcN\\\\M]\\\\1c2ccN]M]\\\\1c2ccN]M^\\\\1b2bcN^M^\\\\1b2ccN]M]\\\\1c2ccN]M]\\\\1c2ccN]M]\\\\1c2ccN]M]\\\\1d2bcN\\\\M^\\\\1d2ccNZM_\\\\1e2acN[M_\\\\1e2acN[M_\\\\1e2acN[M_\\\\1e2bcNZM^\\\\1f2bcNZM_\\\\1f2`cNZM`\\\\1f2acNYM`\\\\1f2`cNZM`\\\\1g2_cNYMb\\\\1f2^cNZMb\\\\1g2^cNXMb\\\\1h2^cNXMb\\\\1i2]cNWMd\\\\1h2\\\\cNXMd\\\\1h2]cNWMc\\\\1j2\\\\cNVMd\\\\1j2\\\\cNVMe\\\\1i2[cNWMe\\\\1i2\\\\cNVMe\\\\1i2[cNWMe\\\\1j2ZcNVMf\\\\1j2ZcNVMg\\\\1i2ZcNVMf\\\\1j2ZcNVMg\\\\1j2XcNVMi\\\\1i2WcNWMj\\\\1h2VcNWMl\\\\1n2001O1O2N1O3GlbN`MV]1^2jbNbMW]1c21O001O002N2N3M1O1N1H]bNSNd]1l1\\\\bNTNe]1k1[bNUNf]1i1ZbNXNh]1d1ZbN\\\\Ni]1^1ZbNbNi]1Z1XbNfNj]1V1XbNjNi]1U1WbNkNj]1S1VbNnNk]1P1VbNPOk]1l0XbNTOl]1d0XbN\\\\OV_10000000000000000000000O101O1O0N^Vc5\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [907.0, 758.0, 163.0, 327.0], \"score\": 0.9999999403953552, \"association_id\": 4, \"light\": [-2.181001901626587, -1.8758783340454102, 2.00559663772583, 1.7465589046478271], \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"T^aZ1W1_^1`0D:F<B<G7I6G9YMhLjgN6_N]3\\\\Y1XMVgNh3cX1eLjfNi3nX1i1L3L4L5K5L2N2N3M2M4L3M3XhNmG`W1V8_hNlG]W1W8chNjGZW1W8fhNkGWW1W8ihNjGSW1Y8mhNgGQW1f8NFPiNfGoV1Z8RiNgGlV1Y8UiNgGjV1X8XiNiGgV1X8XiNhGgV1[8WiNeGhV1]8WiNdGgV1l8N1O2N10001O1O00001O001O0010O2N1O10O01O002O0O2O3L9nNnhNdHfW1d6^hN]IcW1^6`hNbIaW1X6dhNhI]W1S6hhNmIXW1o5khNQJVW1k5mhNUJTW1e5QiN\\\\JPW1_5TiNaJmV1Y5WiNgJjV1R5\\\\iNoJdV1i4ciNWK^V1b4hiN_KXV1Y4oiNhKRV1f3^jN[LbU1`3cjN_L_U1]3cjNdL_U1T3fjNlL\\\\U1n2hjNSMXU1j2kjNVMTU1h2njNXMSU1e2ojN\\\\MQU1a2QkN_MRU1\\\\2PkNdMSU1W2ojNiMSU1R2PkNnMRU1f1XkNZNlT1e0RlN[OTT1J`lN6eY10001O00000010O0001O001O0000000O10001O000000001O000000001O00000001O01O00000001O01O000001O000001O0001O00000000001O01O00000000001O000000001N1O2N3M3@a`N010O3VnSV1\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [553.0, 770.0, 307.0, 541.0], \"score\": 1.0, \"association_id\": 3, \"light\": [-1.345299482345581, -2.6051061153411865, 1.1737697124481201, 2.3678576946258545], \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"m^ni0f0T_1:G8I6L3L6J5I8H8J4L5L3M2N3K5K5L4M3M3M3M3L5J5L4M3M3M3M1O1L5M2M3O2N1N1O2N2N2O1N2O1O2M201N2O1O1O1O1N3N1M5K4L4M4M3M3N3M2M5L3M4K4M2M4K5L6I>A;VnNdG^k0a8\\\\TOdG`k0`8]TObGak0a8\\\\TOaGbk0b8\\\\TO^Gdk0f8WTO[Ghk0k8STOVGkk0n8RTORGnk0Q9nSOPGQl0S9mSOmFRl0U9mSOkFRl0X9lSOhFRl0]9kSOdFRl0d9hSO\\\\FTl0n9eSOSFXl0V:bSOjE[l0]:aSOcE]l0a:aSO_E]l0f:`SOZE]l0m:_SOSE\\\\l0W;_SOiD^l0a;[SO_Dbl0i;YSOWDel0P<VSOPDhl0U<VSOjChl0\\\\<TSOdCjl0b<RSO^Cll0k<mROUCQm0]=_ROcB_m0g=YROZBem0k=WROUBhm0o=UROQBjm0R>UROmAkm0U>SROkAmm0X>PROhAPn0\\\\>lQOeASn0]>lQObASn0b>jQO^AVn0c>iQO]AWn0e>gQO[AYn0g>eQOYA[n0j>bQOVA^n0l>`QOTA`n0n>^QORAbn0P?\\\\QOQAcn0P?\\\\QOPAdn0Q?[QOo@en0Q?[QOo@en0Q?[QOo@en0Q?[QOo@en0R?YQOPAfn0P?ZQOPAfn0P?ZQOPAfn0T?UQOn@jn0S?UQOm@ln0R?TQOn@ln0S?RQOn@nn0R?RQOn@nn0S?PQOo@on0V?kPOk@Vo0o?001O1O2N102M3M2N2N2O2M4L6J6J4M2M2QLdoNVH]P1d7PPOTHRP1b7bPOZFiN=fP1S9UQOiE\\\\NU1`P1n8QROSGPn0j8RROVGom0g8QRO[GPn0b8QRO`Gnm0^8PROfGQn0V8iQOSHWn0e7PRO\\\\HQn0\\\\7UROeHkm0U7ZROlHgm0o6]ROQIdm0k6_ROUIcm0e6aRO[Iam0]6eROcI]m0R6lROnIUm0i5TSOVJnl0e5USO\\\\Jkl0a5WSO_Jil0_5YSOaJhl0\\\\5ZSOdJfl0Z5\\\\SOfJel0W5]SOiJcl0S5aSOlJ`l0P5dSOPK]l0l4fSOTKZl0j4iSOUKXl0i4iSOWKWl0f4lSOZKUl0b4nSO^KSl0U4YTOkKhk0a3lTO^LVk0e1eVO[N\\\\i0\\\\1mVOcNUi0V1PWOjNQi0S1QWOmNQi0o0QWOQOPi0m0QWOSOPi0k0RWOTOnh0k0SWOUOnh0j0RWOVOoh0h0RWOXOoh0f0RWOZOoh0d0RWO\\\\Ooh0b0SWO]Onh0a0SWO^Ooh0`0RWO@oh0>SWOAoh0=QWOCQi09QWOGSi03oVOMWi0JlVO6ci0kNnVOT1oS10000000001O00001O0O101O00001O0O2O1O000O101O00001O00001O000O2O000O101O0O10001O001O1O001O001O000O2O001O001O0O1000TOiNobNW1n\\\\1POnbNo0R]1VOjbNj0U]1YOjbNf0U]1]OibNc0W]1^OhbNb0W]1@hbN?Y]1AgbN?Y]1BebN?[]1BdbN>\\\\]1CbbN>^]1Q1100O10001N101O000O2O000O2O001N2O1N3M2N2O1N10001N2N4K3N2N2N1O2O1N3M3L4K7I4M2O2JiXm_1\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [1075.0, 697.0, 242.0, 190.0], \"score\": 0.9998059272766113, \"association_id\": 5, \"light\": [-1.1549427509307861, -1.6003389358520508, 1.1383495330810547, 1.510650634765625], \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"UY]b11l_1301N10001O001N101O000UbNJl[16TdNJl[16TdNJk[17UdNIk[17TdNJl[16TdNJl[16TdNKj[16VdNJj[16VdNJj[16UdNKk[15UdNKk[15UdNKk[14VdNMi[13WdNMi[13WdNMi[12XdNNh[12XdNOg[10YdN1g[1OYdN1f[10ZdN0f[10ZdN1e[1OZdN3e[1M[dN4d[1L\\\\dN4d[1L\\\\dN8_[1IadN8^[1HbdN8^[1HadN:^[1FbdN:^[1EcdN;][1EcdN;][1EcdN;][1EcdN;][1EcdN<][1@fdN`0Z[1_OgdNa0Y[1^OhdNb0X[1^OgdNd0X[1\\\\OhdNd0W[1]OidNc0W[1]OidNc0W[1]OhdNd0gX1YOjgN4_Oc0eX14SgNYO8c0cX1>mfNPO?b0cX1e0hfNiNe0b0bX1k0dfNcNi0c0cX1o0_fN^Nn0c0bX1Q1_fN\\\\No0c0bX1R1^fN[NP1c0bX1R1^fN[No0d0cX1R1]fN[No0c0cX1S1^fNZNo0c0cX1S1^fNZNn0d0cX1S1_fNYNn0d0cX1S1_fNZNl0d0dX1S1`fNYNk0e0dX1S1afNYNe0HdNl0VZ1T1`fNYNd0i0kX1o0afNXNc0j0lX1m0bfNZN`0j0mX1m0cfNYN?k0nX1k0dfN[N<j0QY1k0cfN\\\\N:j0RY1k0dfN\\\\N8j0TY1k0dfN[N7j0UY1k0dfN[N7j0UY1j0ffN\\\\N4j0UY1k0hfN[N2j0VY1k0hfN[N2j0VY1j0jfN\\\\NOj0VY1k0kfN[NNk0WY1i0lfN]NLj0WY1j0mfN\\\\NLj0WY1i0ofN]NIj0XY1i0ofN]NIj0WY1j0PgN]NHi0XY1i0RgN]NEk0YY1h0RgN^NDj0ZY1h0SgN]NCj0[Y1i0RgN^NBi0\\\\Y1j0RgN\\\\NAk0]Y1i0RgN]N@j0^Y1i0SgN\\\\N_Ok0^Y1i0SgN]N]Ok0`Y1h0SgN^N\\\\Oj0aY1h0SgN_N[Oi0bY1h0TgN^NZOj0bY1h0UgN^NXOj0cY1i0UgN\\\\NXOk0cY1i0UgN]NWOj0eY1i0TgN]NVOj0eY1i0WgN\\\\NTOk0eY1i0WgN]NSOj0fY1i0XgN\\\\NQOl0gY1h0XgN\\\\NQOl0gY1h0YgN\\\\NnNm0iY1h0XgN[NoNm0jY1g0WgN]NmNm0lY1f0XgN\\\\NlNn0lY1f0XgN]NjNn0mY1f0YgN]NhNn0oY1e0ZgN]NeNo0RZ1c0YgN3hX1LXgN4hX1LYgN3hX1LXgN3iX1NWgN1jX1NWgN1iX1OXgN0hX10XgN0iX1OXgN0hX10XgN0hX10YgNOhX10XgN0iX1OXgN0iX10WgNOjX10VgN0kX1OVgN0kX1OUgN1lX1NUgN0mX1OSgN1oX1MRgN2oX1MQgN3QY1KofN5RY1JofN5RY1JnfN5TY1JmfN5[Y1CffN<^Y1@bfN`0fY1XO[fNg0kY1ROVfNn0PZ1lNQfNS1TZ1hNleNX1VZ1eNleNZ1ZZ1^NheNa1\\\\\\\\10001O000000001O00001O00001O0000001O0O2O001O00001O000O2O001O001O00001O0O101O1O1O001O001O00001O00001O001O00001O00001O00001O00001O00001O00000O2O00001O00001O000000001O00001O0O101O000O101O0O2O3KPU`j0\"}}, {\"image_id\": 156, \"category_id\": 1, \"bbox\": [153.0, 35.0, 182.0, 484.0], \"score\": 0.9999911189079285, \"association_id\": 2, \"light\": [-2.1616272926330566, -2.1512200832366943, 2.0127739906311035, 2.028555154800415], \"segmentation\": {\"size\": [533, 665], \"counts\": \"gn_2:n?c0E;F8I6J5M4K5L4L5K4L3eCfMV:]2^EQN^:Q2\\\\E\\\\NZ:h1^EbN]:a1ZEeNe:f3O1iDgJQ;[5nDfJP;a5M2M3fKZJfK;TOa5d4ZKoJ2Oi4P5b4O2N10BYJPFf5Q:\\\\JlEc5U:>1N1N2O2N2O00100O1RNYEmMh:P2bEiM^:V2hEdMY:[2mE_MU:_2nE^MS:b2nE\\\\MS:c2PFZMk9PO\\\\Ef3k0VMk9TOZEe3m0UMi9WO[Ec3n0PMj9^OXEb3o0kLl9DVE`3Q1eLk9MTE^3S1]Ln96oD]3Z<dLgC\\\\3X<eLhCZ3X:`L]G7\\\\NX3S:iL\\\\G0aNW3o9QM[GIfNV3l9WMYGEkNT3i9]MXG@oNS3e9iMRGVOZOP3b9SNkFnNDo2_9^ObFa0\\\\9AdF?[9AgF>X9ChF=W9DjF;U9EmF:Q9HoF8o8ISG6l8IWGXM]O^2Z9:]GPME`2m8`0TH_Ol7?WH@i7?[H^Oe7`0^H_Ob7`0_H@a7?aH@_7?cH@^7?dH_O]7`0dH_O^7?bHAb7;_HDe78]HFg76YHJl71UHNo7NQH3Q8JPH5T8GmG8X8CiG<c8XO]Gh0R9iNnFW1^9]NcFb1c9YN]Fg1g9TNYFl1k9QNUFo1n9nMQFS2X<100O1O2O2M4L3M5K4K5L4N03NN2N1O000O2OO0100O1O1O100O1O2N1O1O1O1O1O1UJiLVNX3e1PMWNQ3d1XMWNi2d1`MWNa2c1kMVNV2e1VNSNk1i1\\\\NSNe1i1aNTN`1h1fNUN[1i1hNUNY1j1iNTNY1j1kNRNW1k1ROmMP1Q2ZOdMh0[2KRM7l20mL1Q36iLKV3:dLIY3?_LD_3c0YL@d3g0TL]Oj3g0QL\\\\Om3g0iKCS4a0cKH[4[6N2O1N2N2N2N2N3M2M2O2N1N2O2N3M7I6K6I?BY1fN4M1N3L6K3L5L2L5JY`[5\"}}, {\"image_id\": 156, \"category_id\": 1, \"bbox\": [417.0, 128.0, 214.0, 283.0], \"score\": 0.999993622303009, \"association_id\": 1, \"light\": [-2.1045074462890625, -2.3309483528137207, 1.945704460144043, 2.1648154258728027], \"segmentation\": {\"size\": [533, 665], \"counts\": \"T^i64]`04O1O100O2O00000000000@Ef@;Z?Ef@;Z?Fe@:[?Gd@9\\\\?Gd@9\\\\?Hc@8]?Hc@8]?Ib@7_?Ha@8_?I`@7`?I`@7a?H_@8a?H_@8a?I^@7b?I^@7c?I\\\\@7d?I[@8f?GZ@8g?IX@7i?HW@8R>E^C4^N5i<2PDNf06R;_1lDaNm:g1REYNm:i1REWNl:k1UETNk:l1UETNj:l1WETNi:l1XESNh:l1YEUNe:l1[ETNe:l1\\\\ESNd:l1]ETNb:m1^ESNb:l1`ESN`:l1aETN_:k1bEVN]:i1eEVNZ:i1hEXNW:f1lEYNT:e1nE[NR:b1QF_No9\\\\1VFcNj9W1\\\\FiNd9i0jFXOU9f0nFYOR9e0PG[OQ9c0PG^Oo8b0RG]On8b0SG_Ol8a0TG_Ol8`0VG@i8?XGAh8>YGBg8=ZGDe8<[GEd8<[GDe8<\\\\GCh4K_Kb0IDd42^K:NDb46^K61C`49^K51Ba4:]K42Ca49\\\\K43Ca4:[K35Ba4<XK27Ca4<VK19Ca4>TK0:Ad4?QK0<@c4a0PKO=^Og4b0kJ0>^Oi4a0hJ1`0\\\\Ol4b0bJ3b0XOQ5e0ZJ3f0WOR5e0WJ4h0TOT5i0RJ3a9M^F4b9K^F5d9JZF8f9HXF;>QOU6e0ZI<2E]60]Ia0B0Q7AYIc0_O1X7]OUIf0^O1\\\\7[OSIT1POCm7]OmHU1fNK]8XOZG\\\\O:i1BKj8]500000000O10O10O0100O000O0O1N3K5L5M2O2N2M3G9kNU1N2O002N1N2M3M3M3N2N3N2M4L5J:C:H7J4M4K5F:mNS1E:K5L4M2N2N2L4M3N2O1O0O2O0N200O100000000O1O2O0010O00100N101N2O010OO2N2O001O1O100O001O1N2N1O2O0000N20O1N2010O1O100O1O001N200000000O100001N4L6Ijaa0\"}}, {\"image_id\": 156, \"category_id\": 1, \"bbox\": [286.0, 150.0, 90.0, 231.0], \"score\": 0.9957440495491028, \"association_id\": 3, \"light\": [-2.545126438140869, -1.9506754875183105, 2.386688709259033, 1.7967815399169922], \"segmentation\": {\"size\": [533, 665], \"counts\": \"XWe41c`02O2M5L1O1O00001O0000000000000001OO10000000000^IIfL7W3NgL2W32gLNX38cLH\\\\3`0]L@b3d0[L\\\\Oe3g0aJSOaM6n7S1oIUOkMHV8_1`GYNk0k0YO\\\\O]8g1kFbNa0_OHS1=SO`8P3mFoN>PNf8j5Q14Lj0VO1O1O00000000000002N2N2N3Me0]MfEaM[:^2kE\\\\MV:b2RFWMn9i2VFSMj9n2YFnLh9Q3[FlLe9T3_FhLa9Z3eF]L]9d3jFSLW9m3lFnKW9Q4h1OoDnK]9U4`FlK`9T4_FmK`9U4^FkKc9T4]FlKd9T4[FlKg9T4WFlKk9U4RFkKP:U4nEkKS:n2oDQNl0QOV:g2YEVN>SO\\\\:a2^EZN3UOa:\\\\2dEZNKZOc:W2hE]ND\\\\Oe:R2lEbN^O\\\\Ok:j1lEiNXO]OS;^1mETOnN^OX;f0_D\\\\O^1a0iN]O_;`0\\\\F4TN\\\\Od;;ZF9QN\\\\Ok;2WFd0lMZOh>g0VAYOj>j0SAVOn>i0RAWOo>h0QAXOR?d0o@\\\\OU`001NSRf4\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [2031.0, 756.0, 264.0, 111.0], \"score\": 0.9999402761459351, \"association_id\": 1, \"light\": [-0.5085515975952148, -3.3686821460723877, 0.458385169506073, 3.2801592350006104], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"gTT[35mc1Nm]N4na12\\\\\\\\NH_16Rb1=k]NDQb1e0i]N[OVb1i0f]NXOYb1l0d]NTO[b1o0c]NQO\\\\b1Q1c]NoN]b1R1a]NoNZb1W1e]NiN[b1X1d]NhN[b1[1c]NfN[b1]1c]NcN\\\\b1`1b]N`N^b1a1a]N_N^b1c1a]N]N^b1e1a]N[N_b1e1a]N[N^b1f1b]NZN^b1g1a]NYN^b1h1b]NXN^b1i1a]NWN^b1k1a]NUN_b1m1_]NSN`b1o1_]NQNab1P2^]NPNab1Q2`]NnM`b1S2_]NmMab1S2_]NmMab1T2^]NlMab1V2^]NjMbb1W2]]NjMab1Y2]]NgMcb1[2\\\\]NdMcb1^2\\\\]NbMdb1_2[]NaMeb1_2[]NaMeb1`2Z]N`Mfb1`2Z]N`Meb1a2\\\\]N^Mdb1c2[]N]Meb1c2[]N]Meb1d2Z]N\\\\Mfb1d2Z]N\\\\Mfb1e2Y]N[Mgb1e2Y]N[Mgb1e2Z]NZMeb1g2[]NYMeb1g2[]NYMeb1g2[]NYMdb1h2\\\\]NXMdb1g2]]NYMcb1g2\\\\]NZMcb1g2]]NYMcb1g2]]NYMcb1f2^]NZMbb1f2^]NZMbb1f2]]N\\\\Mab1e2_]N[Mab1e2_]N[Mab1d2`]N\\\\M`b1d2`]N\\\\M`b1d2`]N[Mab1e2^]N\\\\Mbb1d2^]N\\\\Mbb1c2_]N]Mab1c2_]N]Mab1c2_]N]Mab1b2`]N^M`b1a2a]N_M_b1a2`]NaM_b1^2b]NbM^b1^2b]NbM^b1]2c]NcM]b1]2c]NcM]b1]2c]NcM]b1]2c]NcM]b1]2c]NcM]b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1[2e]NeM[b1[2e]NeM[b1[2e]NeM[b1[2d]NfM\\\\b1Z2d]NfM\\\\b1Z2d]NfM\\\\b1[2c]NeM]b1[2c]NeM]b1[2c]NeM]b1[2c]NeM]b1[2c]NeM]b1[2c]NfM\\\\b1Z2e]NeM[b1[2e]NeM[b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2d]NdM\\\\b1\\\\2c]NeM]b1[2c]NeM]b1[2d]NdM\\\\b1\\\\2d]NdM\\\\b1]2c]NcM]b1]2c]NcM]b1]2c]NcM]b1]2c]NcM]b1]2c]NcM]b1]2c]NcM]b1]2c]NcM^b1]2b]NbM^b1^2b]NbM^b1^2b]NbM^b1_2a]NaM_b1`2`]N`M`b1`2a]N_M_b1a2a]N_M`b1a2_]N_Mab1a2`]N^M`b1b2`]N_M_b1b2`]N^M`b1c2_]N]Mab1d2_]NZMbb1f2^]NZMbb1g2]]NYMcb1h2]]NWMcb1i2^]NVMbb1j2_]NUMbb1k2]]NUMcb1k2^]NTMbb1m2]]NSMcb1m2]]NSMcb1n2]]NQMcb1o2]]NQMcb1P3]]NoLcb1Q3]]NoLdb1Q3[]NoLeb1Q3\\\\]NoLcb1Q3]]NoLcb1Q3]]NoLcb1Q3]]NoLcb1R3\\\\]NnLdb1R3\\\\]NnLdb1R3\\\\]NnLdb1R3\\\\]NnLdb1R3\\\\]NnLcb1S3]]NmLcb1S3]]NmLcb1S3]]NmLcb1S3]]NmLcb1S3]]NmLcb1S3]]NmLcb1R3^]NnLbb1R3^]NnLbb1R3^]NnLbb1R3]]NoLcb1Q3]]NoLcb1P3^]NPMbb1P3^]NPMbb1P3^]NPMbb1o2_]NQMab1o2_]NQMab1o2^]NRMbb1m2_]NSMab1m2_]NSMab1m2^]NTMbb1l2^]NTMbb1k2_]NUMab1k2_]NUMab1k2^]NVMbb1j2^]NVMbb1i2_]NWMab1i2_]NWMab1i2_]NWMab1i2^]NXMbb1h2^]NXMcb1g2]]NYMcb1f2^]NZMbb1f2^]NZMbb1f2^]NZMbb1f2^]NZMbb1f2^]NZMbb1f2^]NZMcb1e2]]N[Mcb1d2^]N\\\\Mbb1d2^]N\\\\Mbb1d2^]N\\\\Mbb1d2^]N\\\\Mbb1e2]]N[Mdb1d2\\\\]N\\\\Mdb1d2\\\\]N\\\\Mdb1d2]]N[Mcb1e2]]N[Mdb1d2\\\\]N\\\\Mdb1d2\\\\]N\\\\Mdb1d2\\\\]N\\\\Mcb1e2]]N[Mcb1e2]]N[Mcb1e2]]N[Mcb1e2]]N[Mcb1e2]]N[Mcb1e2]]N[Mbb1f2^]NZMbb1f2^]NZMbb1g2]]NYMcb1g2]]NYMdb1f2]]NYMcb1h2\\\\]NXMdb1h2\\\\]NXMeb1g2[]NYMeb1h2Z]NXMfb1h2[]NWMeb1i2[]NWMeb1i2[]NWMfb1i2Y]NWMgb1i2Z]NVMfb1j2Z]NVMfb1k2Y]NUMgb1l2X]NTMhb1l2X]NTMhb1m2X]NRMhb1n2X]NRMhb1S30000000LX]NPMib1o2W]NQMib1o2W]NQMjb1n2V]NRMjb1n2V]NRMjb1n2V]NRMkb1m2U]NSMkb1m2U]NSMkb1m2V]NRMjb1n2V]NRMjb1n2V]NRMjb1n2V]NRMkb1m2U]NRMlb1n2T]NRMlb1n2T]NRMlb1n2T]NRMmb1m2S]NSMnb1l2R]NTMob1k24O1N4M4L1O1N2O1O2M3M3M:ES]>\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [539.0, 1054.0, 524.0, 463.0], \"score\": 1.0, \"association_id\": 3, \"light\": [-1.075624704360962, -2.8214423656463623, 0.950005054473877, 2.5341622829437256], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"^h^l0_1_d17J4L5J5L4K5K6I6U`N[MR\\\\1i2kcNYMS\\\\1j2kcNXMR\\\\1k2lcNVMR\\\\1m2lcNTMS\\\\1n2lcNSMQ\\\\1P3mcNQMR\\\\1P3QdNlLo[1V3UdNdLk[1]3[32N2N2O1N2N2O1N2O1N2N1N3N2M2N3N1N3N1O2O0O2N2M4M2M6I:G=C?A9G6K4K5L3M4K4L4M2M4K4M3M4M2N2N101N2N1O2O2M2N2N2M3N2dcN`GU[1b8ddNeGZ[1\\\\8idN`GW[1`8QeNXGoZ1h8U1O010O0010O01O12M2O1N2O0O1O100O2N1N2OCYcNcGf\\\\1^8ZcNbGe\\\\1m80A[cNcGd\\\\1^8\\\\cNcGb\\\\1^8^cNbGb\\\\1^8^cNbGa\\\\1o800O10001N_ObcNbG^\\\\1^8bcNbG]\\\\1_8ccNaG\\\\\\\\1`8dcNaGZ\\\\1`8fcN`GX\\\\1b8hcN^GV\\\\1W9[fN[FlV1h9ohN^FmV1e9QiN^FlV1d9SiN^FjV1e9UiN\\\\FhV1f9XiNZFgV1g9XiN[FeV1g9[iNYFdV1h9\\\\iNWFdV1j9\\\\iNVFbV1X9WgNjFX2L`V1\\\\9XgNiFW2K_V1^9ZgNgFW2J^V1a9[gNeFW2I\\\\V1e9]gNbFW2I[V1f9^gNbFV2G[V1i9_gN`FV2F[V1j9`gN`Fe0]Og08TW1l9`gN_Fd0@f04VW1n9`gN^Fc0De0NYW1P:_gN_Fb0Ed0K[W1Q:`gN_F`0Gd0H]W1R:_gN_F?Id0E^W1S:`gN_F>I\\\\Y1h9VfN`F<J\\\\Y1f9YfN`F9L]Y1e9ZfN`F7L_Y1c9\\\\fN`F30j0ZO`W1W:cgN`F11k0XOaW1V:dgNaFO3k0VOaW1V:fgNdFJ3m0SObW1W:ggNhGc0QNfW1V:hgNjGa0PNfW1W:jgNjG>oMgW1X:kgNlG;lMiW1X:mgNnG8jMjW1Y:ngNoG5iMlW1Y:ogNoGKgM11UX1X:PhNQHGiM3NUX1Y:QhNYHH_MWX1X:RhNYHE`MXX1W:ThN`HlW1`7ThNaHkW1_7UhNaHkW1_7UhNbHjW1^7VhNbHkW1]7UhNdHjW1\\\\7VhNdHjW1\\\\7VhNdHjW1\\\\7VhNeHiW1[7WhNeHiW1[7WhNeHiW1[7WhNfHhW1Z7XhNfHhW1Z7XhNfHhW1Z7XhNfHhW1Z7XhNgHgW1Y7YhNgHgW1X7ZhNhHfW1X7ZhNiHeW1W7[hNiHeW1W7[hNjHdW1U7]hNkHcW1U7]hNlHbW1T7^hNlHbW1T7]hNnHcW1P7^hNPIcW1o6]hNRIcW1l6^hNTIbW1l6^hNUIcW1h6^hNXIcW1f6^hNZIcW1d6^hN\\\\IcW1b6^hN_IaW1`6`hN`IaW1^6`hNbI`W1]6ahNcI`W1\\\\6`hNeI`W1Y6ahNgI_W1Y6ahNhI_W1W6ahNiI`W1U6ahNlI_W1S6ahNmI`W1R6`hNnIaW1Q6_hNPJaW1n5`hNRJbW1l5^hNTJcW1k5]hNUJeW1i5[hNWJfW1h5ZhNYJgW1e5YhN[JiW1c5WhN]JkW1a5ThN`JPX1QMjgNV76iK]Y1n3dfNRL^Y1l3bfNTL`Y1j3`fNVLaY1i3_fNWLbY1h3^fNXLcY1g3]fNYLcY1g3]fNYLdY1f3\\\\fNZLeY1e3[fN\\\\LeY1c3[fN]LfY1a3[fN_LgY1_3YfNaLhY1^3WfNcLkY1[3VfNdLkY1[3UfNeLkY1Z3VfNfLkY1Y3UfNgLlY1X3TfNhLlY1X3TfNhLlY1X3TfNiLkY1W3UfNiLkY1V3VfNjLjY1V3VfNjLjY1V3VfNjLkY1U3UfNkLkY1U3UfNkLkY1U3UfNkLkY1U3UfNkLkY1U3UfNkLkY1U3UfNkLkY1U3UfNkLkY1T3VfNlLjY1T3VfNmLiY1S3WfNmLiY1S3WfNmLiY1R3XfNnLhY1R3XfNnLhY1R3XfNnLhY1Q3YfNoLgY1Q3YfNoLgY1P3ZfNPMfY1Q3YfNoLgY1Q3YfNoLhY1P3XfNPMhY1Q3WfNoLiY1Q3WfNoLiY1R3VfNnLjY1R3WfNmLiY1T3VfNlLjY1T3VfNlLkY1S3VfNlLjY1U3UfNkLkY1U3VfNjLkY1V3TfNjLmY1U3TfNkLlY1V3RfNjLoY1W3PfNhLQZ1\\\\3keNcLVZ1`3feN`L\\\\Z1d3_eN[LbZ1m3UeNSLkZ1T2[eN_OfZ18geNCZZ1:jeNDWZ19meNESZ19QfNEPZ18TfNFnY14XfNXMaNb0X[1Q2]fNYM_Nd0V[1l1bfNYMbNf0nZ1j1gfN_M^Nc0mZ1i1jfNdMYNa0nZ1h1lfNhMUN?P[1g1nfNjMQN>R[1f1ofNmMnM<T[1e1PgNPNjM;W[1c1QgNoMkM=U[1b1SgNnMiM`0T[1a1WgNkMfMd0U[1^1XgNdL_Mf06X1T[1[1mgNWMQM^1S[1W1lgNkL`L9d0e1R[1S1kgNPMcL1c0l1Q[1P1jgNTMaMl1fZ1n0igNYM_Mi1iZ1l0hgN]MYLMn0k1R[1i0ggNhMUM_1U[1g0ggNlMRM]1X[1e0fgNTNmLX1\\\\[1d0ggNUNlLW1^[1a0ggNZNiLV1`[1?hgN[NhLV1a[1<igN_NdLV1d[18igNcNcLU1e[14kgNhN^LU1h[10lgNkNRLRO4T2n[1LmgNVOSLo0Q\\\\1ImgNYOPLo0T\\\\1FmgN\\\\OmKo0X\\\\1BlgNj1UX1SNmgNm1UX1oMmgNR2TX1hMogNY2TX1^MRhNb2QX1nL\\\\hNR3Y]10O1000000O2O000O100O1O10000O100000000O1000000000000000lLo]NY2Qb1gMo]NY2Qb1fMQ^NY2oa1gMQ^NY2oa1gMQ^NY2oa1gMR^NX2na1hMR^NX2na1hMR^N]OMd2Qb1oMS^N[OMf2Pb1oMS^NZOOf2na1PNS^NYO0g2la1PNV^NXONh2la1PN\\\\^NP2da1PN\\\\^NP2da1PN\\\\^NP2da1QN[^No1ea1QN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nl1da1TN\\\\^Nl1da1TN\\\\^Nl1da1TN\\\\^Nl1da1TN\\\\^Nl1da1TN]^Nk1ca1UN]^Nk1ca1UN]^Nk1ca1UN]^Nk1ca1UN]^Nk1ca1VN\\\\^Nj1ea1UN[^Nk1ea1UN[^Nk1ea1UN[^Nk1ea1VNZ^Nj1fa1VNZ^Nj1fa1WNZ^Nh1fa1XNZ^Nh1fa1XNZ^Nh1fa1XNZ^Nh1fa1YNY^Ng1ga1YNZ^Nf1fa1ZNZ^Ne1ga1[NY^Ne1ga1[NY^Ne1ga1\\\\NX^Nd1ha1\\\\NX^Nd1ia1[NW^Ne1ia1[NW^Ne1ia1\\\\NV^Nd1ja1]NT^Nd1la1]NS^Nc1na1]NP^Nd1Pb1]Nm]Ne1Tb1S100001O00001N1000001O0000001O00001O0000001O000000001O00001O0O2O001O001O00001O00001O000O2O001O001O001O1O001O1O001O001O0O2O001O1O1N2O1O3M2M3N1O001N2O2N2N1N3N1O1N6K2N1N3N2M3N1N2O001N10001O0O2O001O1N2O2N1N2O1O0O2O1O0O2O001O0O2O1N2N2M6Kig\\\\Q2\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1332.0, 1190.0, 487.0, 328.0], \"score\": 0.9999799728393555, \"association_id\": 4, \"light\": [-1.9794131517410278, -1.7078197002410889, 1.9195244312286377, 1.5760345458984375], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"YnXV2P1ld1e0]O6J5K6K5K3N2N2N1O2M4M3M3M3M2N1O1O0010O001O1O1O001O1O1O1O1O00100O2N101N5L5J5L2M3N000O100000001000O104K8H4Q^NlMi_1W2l_NSNR`1Q2e_NVNX`1T2[_NQNd`1i3L6K2N3Ma0^O5L1N100O10O01O000000000O100000O01000hM``N^L7e0X_1h2faNVMZ^1g2kaNWMT^1f2SbNVMm]1i2XbNSMi]1k2\\\\bNRMc]1m2bbNnL_]1Q3fbNcLc]1Z3b201O1O00100O01O100O1O1J6J6N2N2N101O1O1O1O2M2N2M3M3laNdKbZ1^4\\\\eNiK]Z1Y4aeNjK]Z1W4beNkK\\\\Z1V4ceNkK\\\\Z1W4beNkK\\\\Z1V4deNjK[Z1W4eeNiKZZ1X4feNhKYZ1Y4feNhKZZ1Y4eeNgKZZ1Z4feNfKYZ1[4feNfKZZ1[4eeNeKZZ1\\\\4feNdKZZ1]4deNdK[Z1]4eeNcK[Z1^4ceNcK]Z1^4\\\\dNQK[Oa0X\\\\1_4]dNQKYOa0Z\\\\1^4\\\\dNRKZO?[\\\\1`4ZdNQKZO`0[\\\\1`4[dNQKXO`0\\\\\\\\1`4\\\\dNPKXO`0\\\\\\\\1`4\\\\dNQKVO`0]\\\\1a4\\\\dNPKjNJFj0S]1]4]dNQKdNm0n\\\\1S4^dN`La[1a3_dN^Lb[1c3\\\\dN^Lc[1c3]dN]Lc[1c3]dN]Lc[1c3]dN]Lb[1e3]dN[Lc[1e3]dNZLd[1f3\\\\dNZLd[1g3[dNYLe[1g3[dNYLe[1h3ZdNXLf[1h3ZdNXLf[1i3YdNVLg[1l3XdNTLh[1m3WdNSLi[1n3VdNRLj[1n3VdNRLj[1o3UdNQLk[1o3UdNQLk[1P4TdNPLl[1P4TdNPLl[1Q4SdNoKm[1R4RdNnKn[1R4RdNmKo[1T4PdNlKP\\\\1T4PdNlKP\\\\1U4ocNkKQ\\\\1U4ocNkKQ\\\\1V4ncNjKR\\\\1V4ncNjKR\\\\1V4ncNjKR\\\\1U4ocNkKQ\\\\1U4ocNkKQ\\\\1T4PdNlKP\\\\1T4PdNlKP\\\\1S4QdNnKn[1R4RdNnKn[1R4RdNnKn[1Q4SdNoKm[1Q4SdNoKm[1P4TdNQLk[1o3UdNQLk[1n3VdNRLj[1m3WdNSLi[1l3XdNTLg[1l3ZdNTLf[1j3\\\\dNWLc[1h3^dNXLb[1g3_dNYLa[1e3adN\\\\L^[1c3cdN]L][1b3ddN^L\\\\[1a3edN`LZ[1_3gdNaLY[1^3hdNbLX[1^3hdNbLX[1]3idNdLV[1\\\\3jdNdLV[1[3kdNeLU[1[3kdNeLU[1[3kdNfLT[1Y3mdNgLS[1Y3mdNgLR[1Z3ndNfLR[1Y3odNhLP[1X3PeNhLP[1X3PeNhLP[1W3QeNjLmZ1W3SeNiLmZ1W3SeNiLlZ1W3UeNjLjZ1V3VeNjLiZ1V3XeNjLgZ1V3ZeNjLeZ1W3[eNjLdZ1U3]eNkLbZ1U3_eNkLaZ1U3_eNkL`Z1U3aeNkL_Z1U3aeNlL^Z1T3beNlL^Z1T3beNlL^Z1T3beNlL^Z1S3ceNmL]Z1S3ceNmL\\\\Z1T3deNmL[Z1S3feNlLZZ1T3feNlLZZ1T3feNlLZZ1T3feNmLYZ1S3geNmLYZ1S3geNmLYZ1S3geNmLYZ1S3geNmLYZ1T3feNmLXZ1T3heNlLXZ1T3heNlLXZ1U3geNkLYZ1U3geNkLYZ1V3feNjLZZ1V3geNjLXZ1W3geNiLYZ1W3geNiLXZ1Y3heNfLXZ1[3geNfLXZ1Z3heNfLXZ1[3heNdLXZ1]3geNcLYZ1^3feNcLYZ1_3feN`LZZ1a3eeN_L[Z1d3beN\\\\L^Z1h3_eNWLaZ1m3]eNQLcZ1R4[eNmKeZ1V4YeNjKfZ1X4YeNgKgZ1[4^eN^KbZ1e4heNoJWZ1Z5ieN]JWZ1e5jeNXJVZ1j5ndNWIEo0\\\\[1m5gdNbIVOI4g0o[1Q6`dNoI@0P\\\\1]3[dN]OEVMo[1Z3cdN\\\\O^OZMo[1X3hdN\\\\OYO[Mo[1W3ldN\\\\OUO]Mn[1W3PeN[ORO^Mm[1V3SeN[OQO^Mm[1U3UeN[OoN`Mk[1T3YeNZOmNbMi[1R3_eNYOjNdMf[1Q3eeNXOfNfMe[1P3jeNWObNiMc[1m2RfNVO[NmMc[1j2WfNWOWNoMa[1h2]fNVORNRNa[1f2afNVOnMTNa[1e2dfNUOkMVNb[1c2ffNUOhMXNb[1b2ifNTOeM[Na[1`2lfNTOcM\\\\Na[1_2ofNSO`M^Na[1^2SgNQO\\\\MaNa[1]2XgNnNWMfN`[1Z2_gNlNQMjN`[1Y2mgN`NdLVO_[1X2XhNYNYL_O`[1U2\\\\hNYNTLC_[1S2ahNWNPLF_[1R2dhNUNnKI^[1Q2ghNTNjKL_[1P2hhNSNiKM_[1o1khNRNfKO`[1n1khNRNdK1a[1l1mhNQNcK3a[1j1ohNQN_K6b[1h1QiNPN\\\\K:d[1d1TiNoMWK>e[1b1ZiNjMPKf0g[1\\\\1_iNgMmJn0e[1X1TjNkNkU1S1VjNnNkU1o0VjNROkU1l0UjNUOkU1h0XjNXOiU1d0YjN]OhU1>VjNHkU13UjN1mU1JRjN:PV1_OUjNa0PV1jN_jNW1U]10O2O00000O100000000000000001O0000000000000010O000001O000000001O00000000001O0000000000001O000000001O00001O0000001O0000000O101O00001O00000000000O2O000000000000001O0000001N100000001O00000000001O000O101O000000001O0000000O101O000000000000001O000O100000001O000000001O0000001O00001O00000000001O0000001O00000O2O0000001O0O10001O00001N101O001O0O2O1N2N2N6E]ZNNmdbi0\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [0.0, 1230.0, 318.0, 169.0], \"score\": 1.0, \"association_id\": 2, \"light\": [-2.1947858333587646, -1.7275543212890625, 1.998911738395691, 1.5341136455535889], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"UY1f0Pe1?G7I5M3M3L3X\\\\NTO[b1n0_]NYO]b1j0`]NYO^b1i0a]NXO\\\\b1j0c]NWO[b1l0d]NTO[b1m0d]NUOZb1l0f]NTOZb1l0f]NTOXb1n0h]NROXb1[2O10bNi]NSOVb1n0j]NROVb1m0k]NSOUb1m0j]NTOUb1m0k]NSOUb1l0l]NTOTb1l0l]NTOTb1l0l]NTOSb1l0m]NUOSb1k0m]NUORb1l0m]NUOSb1k0m]NUOSb1k0m]NUOSb1\\\\2O_Nn]NVORb1j0n]NVORb1j0n]NVORb1j0n]NVOQb1j0o]NWOQb1i0o]NWOQb1i0o]NWOQb1h0P^NXOPb1h0P^NXOoa1i0Q^NWOoa1h0Q^NYOoa1g0Q^NYOoa1g0Q^NYOoa1h0P^NXOPb1h0P^NXOPb1\\\\2O1000000000000000000001O000ZNP^N\\\\OPb1d0P^N[OQb1e0o]N[OQb1Z20000000ZNo]N]OQb1Y20000O100O100OYNR^N^Ona1b0R^N^Oma1b0T^N^Ola1b0S^N_Oma1`0T^N@ka1`0V^N@ja1`0V^N@ia1a0V^N@ja1`0V^NAha1?X^NBga1?Y^NAga1?X^NBga1>Z^NBfa1>Y^NCga1=Y^NCfa1=[^NCea1=Z^NDea1=[^NCda1=]^NCba1>^^NBaa1>_^NC`a1>`^NB_a1>b^NB]a1?c^NA]a1?b^NB]a1>d^NA\\\\a1`0d^N@\\\\a1?e^NBYa1?f^NBZa1=g^NCXa1=i^NCVa1=j^NDVa1<j^NDUa1=k^NCUa1<k^NETa1<l^NDTa1<l^NDSa1<m^NESa1;m^NERa1;o^NEQa1:P_NEPa1<P_NDPa1;Q_NEo`1;Q_NEn`1<R_NDn`1;S_NEl`1<T_NDl`1<S_NEl`1<T_NDl`1;U_NEk`1;U_NEj`1<V_NDj`1<V_NDj`1<V_NDi`1=W_NCi`1=W_NCh`1?W_NAh`1`0X_N@g`1a0Y_N_Of`1c0Y_N]Of`1d0[_N[Oe`1f0Z_NZOe`1g0[_NYOe`1g0\\\\_NXOc`1j0]_NUOc`1l0\\\\_NTOc`1o0\\\\_NPOd`1Q1\\\\_NnNd`1T1Z_NlNe`1V1[_NiNe`1W1[_NiNe`1X1[_NgNe`1Z1Z_NfNf`1Z1Z_NfNf`1[1Z_NdNe`1^1\\\\_N`Nd`1c1]_NYNc`1j1k_NeMU`1^2j_N`MV`1h2c_NWM]`1k2b_NTM^`1m2b_NQM_`1V3[_NiLe`1Z3Y_NeLg`1Z40000000YO\\\\_NRLd`1o3^_NnKc`1Q4^_NnKb`1R4__NmKa`1S4__NlKb`1T4__NkKb`1U4]_NkKc`1U4]_NkKc`1U4^_NjKc`1U4]_NkKc`1U4^_NjKc`1U4__NhKb`1X4`_NfK``1Z4`_NfK``1Z4a_NeK``1Z4`_NfK``1Z4a_NeK_`1[4a_NeK_`1[4a_NdK``1c2P_NfM0O0j0`0nN``1a2S_NfM2I1P1:PO``1`2U_NdM;k00QOa`1^2V_NdM21Jl0`a1^1f^NcMOY1[a1S1P_NnNPa1Q1P_NSN^O;ca1a1o^NVN@6ba1b1o^NYNEN\\\\a1h1P_N\\\\NHEYa1n1o^N^NJ@Ya1P2o^NaNfa1]1[^NcN=\\\\OP`1o1e_NfN9^OP`1j1i_NiN5^OR`1g1j_NPOJ_O\\\\`1^1m_N7S`1FP`N;o_1BT`N>l_1@V`N`0j_1_OV`Nb0j_1]OW`Nc0i_1\\\\OY`Nc0g_1]OY`Nc0g_1]OY`Nc0g_1]OY`Nc0g_1]OY`Nb0g_1^OZ`Nb0f_1^OZ`Nb0f_1^OZ`Nb0f_1^OZ`Nb0f_1^OZ`Nb0f_1^O[`Na0e_1_O[`Na0e_1_O[`Na0e_1@Y`Na0g_1_OY`Na0g_1@X`N?i_1AW`N?i_1BV`N>j_1CU`N=k_1CU`N=k_1DT`N<l_1DT`N<l_1ET`N:l_1GS`N9m_1HR`N8n_1Ko_N4R`1Ok_N1U`11i_NOW`12h_NNX`13g_NMZ`13f_NLZ`17c_NI^`19__NGb`1:\\\\_NFd`1;[_NDg`1<X_NDh`1c0Q_N]OPa1c0o^N\\\\ORa1d0n^N[OSa1f0l^NZOUa1e0k^NZOVa1g0j^NXOWa1g0i^NYOXa1fNb_N`0UOi0[a1bNj_Nb0hNl0`a1\\\\Nn_Nh0`Nk0Zc1UOd\\\\Nk0_c1i02M3M3M4M3_NR\\\\Nk0Pd1TOP\\\\Nl0Rd1ROn[Nn0Vd1mNk[NR1Wd1mNi[NS1Xd1kNh[NU1Zd1iNg[NW1Zd1gNg[NW1bd1O1O10000O01000O10000N2M3N2O1O1O1O100O100O100O1O1O1O1O10000O100O2O000O100O100O100O100O2O0O101N100O2N100O2N[RfX3\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1097.0, 771.0, 150.0, 188.0], \"score\": 0.7096657752990723, \"association_id\": 5, \"light\": [-1.576255202293396, -2.310725688934326, 1.5029566287994385, 2.1585428714752197], \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"]Pli1S3bb1=N1O0O1M3N20O01E_LW^Na3ca1a0O010L5I7M2l^N^Ki`1d4V_N]Ki`1c4W_N]Kg`1e4Y_N[Kd`1i4[_NWKc`1l4\\\\_NTKc`1m4]_NSK[`1V5d_NjJ[`1W5f_NhJZ`1Y5e_NgJZ`1Z5f_NfJY`1[5g_NeJY`1\\\\5f_NdJZ`1\\\\5f_NdJZ`1\\\\5f_NdJZ`1\\\\5f_NcJZ`1^5f_NbJY`1^5g_NdJX`1\\\\5h_NdJX`1[5h_NfJW`1Z5i_NgJW`1X5i_NiJV`1a500O100000000O^Om_NZKS`1d4n_N\\\\KR`1d4n_N\\\\KR`1d4n_N[KS`1e4m_N[KS`1e4m_N[KS`1d4n_N\\\\KR`1f4l_NZKT`1Y5000000001O001O1O1O1O1O2N3M3M8H2N1O1O1O1O1En^NfKUa1W4k^NjKXa1Q4j^NnKYa1l3j^NTLWa1j3j^NVLVa1j3j^NVLWa1i2i^NfM1AZa1[2X_NdME1Xe1000001O001O001N10000000000O10M3O1N2O1O1N20O010000O1000000000000000O100000000000000000000000000000000O100N2000000001O1O0000001O00000001O00bhfg1\"}}, {\"image_id\": 158, \"category_id\": 1, \"bbox\": [458.0, 439.0, 190.0, 180.0], \"score\": 0.9999023675918579, \"association_id\": 2, \"light\": [-1.6564323902130127, -2.2807235717773438, 1.516963005065918, 2.0215904712677], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"oog:i2Re09J4N1O1O1O2N1O0O20O010O000010O0001O001O001O00O100O100O1O100O10O01O1O1O010O01N1Oc[OeLXd0[3i[OeLWd0Y3k[OgLSd0Z3m[OgLTd0W3m[OiLTd0U3m[OkLTd0S3m[OmLTd0Q3m[OoLTd0n2n[ORMSd0m2l[OUMSd0j2n[OVMSd0i2m[OWMSd0i2m[OWMTd0h2l[OXMTd0h2l[OXMTd0h2l[OXMTd0h2l[OXMUd0g2k[OYMTd0h2k[OYMUd0h2j[OXMVd0h2j[OXMUd0j2j[OVMVd0j2j[OVMVd0k2i[OUMVd0l2j[OTMVd0m2i[OTMVd0l2j[OTMUd0m2k[OSMUd0n2j[ORMVd0n2j[ORMVd0n2j[ORMUd0P3j[OPMVd0P3j[OPMUd0Q3k[OoLUd0R3j[OnLVd0S3i[OmLWd0S3i[OmLVd0U3i[OkLWd0U3i[OkLWd0V3h[OjLXd0V3h[OkLWd0U3j[OjLVd0W3i[OiLVd0X3j[OhLVd0X3j[OhLVd0X3j[OhLUd0Y3k[OgLTd0Z3l[OfLSd0[3m[OeLRd0e300O10Fo[OeLPd0[3Q\\\\OeLnc0[3S\\\\OeLlc0[3T\\\\OfLkc0[3U\\\\OeLic0\\\\3X\\\\OdLhc0\\\\3X\\\\OdLgc0\\\\3Z\\\\OdLec0]3[\\\\OcLcc0_3\\\\\\\\ObLcc0^3^\\\\ObL`c0`3`\\\\O`L_c0`3b\\\\O`L^c0`3b\\\\O`L]c0`3d\\\\O`LZc0a3g\\\\O_LXc0a3i\\\\O_LUc0c3k\\\\O]LTc0c3m\\\\O]LRc0d3n\\\\O\\\\LRc0d3n\\\\O\\\\LQc0e3o\\\\O[Lob0g3Q]OZLmb0g3S]OYLlb0h3T]OXLkb0i3U]OWLkb0i3U]OWLjb0j3V]OVLib0k3W]OULgb0m3Y]OSLfb0n3Z]ORLeb0o3[]OQLdb0P4\\\\]OPLbb0R4^]OnK^b0W4a]OiK\\\\b0Z4d]OfKZb0]4e]OcKYb0`4f]O`KXb0c4g]O]KXb0d4i]O[KVb0g4i]OYKWb0h4h]OXKXb0i4g]OWKYb0j4f]OVKZb0l4e]OTKZb0m4f]ORKZb0o4e]OQK[b0P5e]OoJ\\\\b0Z51O2N3WM^]ONgb0L`]ONdb0L_]O3bb0L_]O3db0I^]O6eb0E^]O:gb0_O\\\\]O?jb0ZOY]Oe0jb0WOX]Od0XOPNac0Z1Y]Oe0lb0YOU]Oh0kb0VOW]Oi0kb0UOU]Ok0Qc0nNP]OR1Sc0kNm\\\\OU1Tc0iNm\\\\OW1Tc0hNl\\\\OX1Tc0gNm\\\\OY1Tc0fNl\\\\OZ1Vc0dNj\\\\O\\\\1Xc0bNh\\\\O^1Zc0_Ng\\\\Oa1Zc0^Nf\\\\Ob1[c0\\\\Nf\\\\Od1\\\\c0YNf\\\\Of1]c0SNg\\\\Om1fd001O00001O1O1O1O1O1O1O001N3N2N3M3L3N2M4Fa0@l]i8\"}}, {\"image_id\": 158, \"category_id\": 1, \"bbox\": [663.0, 421.0, 355.0, 195.0], \"score\": 0.9980732798576355, \"association_id\": 1, \"light\": [-2.3932900428771973, -1.7175476551055908, 2.3058712482452393, 1.5638432502746582], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Tka?1ng010000000000O10000aN1mZOOPe09kZOGSe0<lZODSe0>lZOCRe0?mZOAQe0a0oZO_OPe0d0nZO\\\\OSd08[[Oa0a0WOPd0i1o[OWNnc0l1R\\\\OVNjc0m1U\\\\OSNjc0n1V\\\\ORNic0P2V\\\\OPNjc0P2V\\\\OPNic0R2V\\\\OnMjc0S2U\\\\OmMjc0U2U\\\\OkMkc0V2T\\\\OjMlc0V2T\\\\OjMkc0X2T\\\\OhMlc0X2T\\\\OhMlc0Y2S\\\\OgMlc0[2S\\\\OeMmc0\\\\2R\\\\OdMnc0^2P\\\\ObMoc0a2o[O_MQd0b2n[O^MRd0c2m[O]MRd0e2m[O[MSd0f2l[OZMTd0f2m[OYMSd0h2l[OXMSd0i2m[OWMSd0j2l[OVMTd0j2l[OVMTd0j2l[OVMSd0k2m[OUMSd0k2m[OUMSd0k2m[OUMSd0k2m[OUMRd0l2n[OTMRd0l2n[OTMRd0l2n[OTMRd0l2n[OTMQd0m2o[OSMRd0l2n[OTMRd0l2n[OTMSd0k2m[OUMSd0k2m[OUMSd0k2n[OTMSd0k2m[OUMSd0k2b[ORM83Wd0j2a[OSM83Wd0j2a[OTM72Xd0i2b[OUM62Yd0g2a[OXM61Zd0d2b[O[M41Zd0`2f[O_M01[d0]2f[OcM0N[d0^2f[OeMNM\\\\d0]2g[OfMMM]d0\\\\2e[OhMNL]d0\\\\2d[OiMOK]d0[2e[OjMOJ\\\\d0\\\\2d[OkMgd0U2Y[OkMgd0U2X[OlMhd0S2Y[OmMgd0S2Y[OmMgd0S2Y[OnMfd0R2Z[OnMfd0S2Y[OmMgd0S2X[OnMhd0R2X[OnMhd0R2X[OnMgd0S2Y[OmMgd0S2Y[OmMgd0T2X[OlMgd0U2Y[OkMgd0U2Y[OkMfd0W2Y[OiMfd0X2Z[OhMfd0X2Z[OhMed0Z2Z[OfMfd0[2Y[OeMgd0[2Y[OeMfd0]2Y[OcMgd0]2Y[OcMgd0^2X[ObMhd0^2X[ObMgd0_2Y[OaMgd0`2X[O`Mhd0`2X[O`Mgd0a2Y[O_Mgd0a2Y[O_Mfd0b2Z[O^Mcd0e2][O[M`d0h2`[OXM^d0j2b[OVM\\\\d0l2d[OTM[d0m2e[OSMZd0m2g[OSMWd0o2i[OQMUd0Q3k[OoLRd0S3o[OmLmc0W3S\\\\OiLjc0Y3W\\\\OgL_b0J`^Oa3QOeL[b00`^O\\\\3VOdLYb02_^O[3XOcLYb03\\\\^O[3\\\\ObLWb06Y^OY3AaLUb09V^OX3E_LSb0=P^OY3NZLQb0h4P^OXKPb0f4R^OZKma0g4S^OXKna0g4S^OYKma0f4T^OZKma0e4S^O[Kma0d4T^O\\\\Kma0b4T^O^Kma0`4T^O`Kma0^4T^ObKla0]4U^OcKla0\\\\4T^OdKma0[4S^OeKna0Z4R^OfKVb0S4i]OmK_b0k3a]OUL`b0k3_]OULbb0j3^]OVLcb0i3]]OWLdb0i3[]OWLfb0h3Z]OXLhb0g3W]OYLjb0g3U]OYLnb0e3Q]O[LQc0d3n\\\\O\\\\LTc0c3l\\\\O\\\\LUc0e3i\\\\O[LYc0c3g\\\\O]LZc0c3e\\\\O]L\\\\c0c3c\\\\O]L^c0c3a\\\\O]Lac0b3^\\\\O^Ldc0b3Z\\\\O^Lhc0a3W\\\\O_Ljc0a3U\\\\O_Llc0`3U\\\\O_Llc0a3S\\\\O^Loc0i32GS\\\\O[Loc0d38N1gMe[Ol0[d0QOi[Om0Xd0QOk[Om0Vd0ROk[On0Td0QOn[On0Td0nNo[OQ1Sd0jNQ\\\\OU1Qd0fNS\\\\OY1oc0cNU\\\\O[1mc0aNV\\\\O^1lc0_NV\\\\O`1lc0\\\\NV\\\\Od1Se001O00000000001O00000000000000000000001O01O000001N2O1O001O001O001O001N2OeYObNUf0]1jYOeNWf0X1eYOmN\\\\f0Q1eYOoN\\\\f0m0fYOTO[f0i0gYOWOZf0d0jYO\\\\OXf0<nYODTf02TZOOmf010000000000000000000000000000000000000000000001O00000000000001O000000000000000000000000000000000001O0000000000000000000000000001O000000000000000001O01O00000000001O0000000000001O000000000000001O0000000000000000000000000000001O000000000000000000000000000000001O000O2O0OTm3\"}}, {\"image_id\": 158, \"category_id\": 1, \"bbox\": [150.0, 399.0, 302.0, 214.0], \"score\": 0.9974390268325806, \"association_id\": 3, \"light\": [-2.071864128112793, -1.8611223697662354, 1.8346160650253296, 1.7124903202056885], \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\Qa31Xg0R1SOe0G8I6I6K6M3M3M4M2O1N2O0O2O1N2O1O2M2O0O2O0O1O1O100O1O1O1O0O2M2O1O4K4J7\\\\\\\\OTLka0d5C=N2M2M3O1O100O10000000000000000000000000O100lNW_OPKj`0j4i_OhJX`0V5Z1O100O1O1N2O1O1000001O000001O1N2O2N001O1O1O2O1N1O1O1O2M5L4L1O2N0001Oo]OUJma0o501O0000000000000001N2gJP^O`4Sb0TK]^O_4@[KVb00_^Ob4\\\\O^KWb0La^Oc4YOaKRc0]4o\\\\OcKRc0\\\\4n\\\\OeKSc0Y4m\\\\OgKTc0W4m\\\\OiKTc0V4l\\\\OjKUc0U4k\\\\OkKVc0T4j\\\\OlKXc0R4h\\\\OnK[c0P4e\\\\OoK]c0o3c\\\\OQL^c0n3c\\\\OQL_c0n3`\\\\ORLac0n3_\\\\ORLbc0m3]\\\\OSLfc0k3Y\\\\OULjc0i3W\\\\OTLlc0j36000002N2O0O001N3M3N;fLk[OQ2Se0O100O1O1O2N1O010O000001O01O001N3N2M2O1O1N3M`1aNL4O100O100000000O100000000000001O000000000000000000000000000000000000000000001O000000000001O00000000000000000000000000000001O000001O00000000000000001O00000000001O000000000000001O0000001O0000001O000000001O0000000000001O00000000000000000001O00000000000000000000000001O0O1000001O0O[]\\\\=\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [955.0, 324.0, 139.0, 107.0], \"score\": 0.9999650716781616, \"association_id\": 1, \"light\": [-2.0696229934692383, -2.187757968902588, 1.9556372165679932, 1.9482436180114746], \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"hQgo0T16mN\\\\P1`1L4K4L3K5M2O2N2M2N2O100O1M3M3O1O0O1O2O001O1O001O1O1O1O1PQOSMln0n2TQORMln0Q3O2N100O100O100O10KYQOQMfn0o2ZQORMen0n2\\\\QORMdn0n2[QOSMen0l2[QOUMen0i2\\\\QOXMdn0e2^QO\\\\Mbn0c2_QO\\\\Mbn0c2_QO]Man0b2`QO^M`n0a2aQO_M_n0a2aQO_M_n0`2bQO`M^n0`2bQO`M^n0`2bQO`M^n0_2cQOaM]n0_2cQOaM]n0_2cQOaM]n0_2cQO`M^n0`2bQO`M^n0`2bQO`M^n0`2bQO`M^n0`2bQO`M]n0b2bQO^M_n0a2aQO_M_n0a2aQO_M_n0a2aQO_M_n0a2aQO_M_n0a2aQO_M_n0a2aQO_M_n0a2aQO_M_n0a2aQO^M`n0b2`QO^M`n0b2_QO_Man0a2_QO_Man0`2`QO`M`n0`2`QO`M`n0`2`QO`M`n0`2`QO`M`n0`2`QO`M`n0`2`QO`M`n0`2`QO`M_n0a2aQO^M`n0b2`QO^M`n0b2`QO^M`n0b2_QO_Man0a2_QO_Man0b2^QO^Mbn0b2^QO^Mbn0b2^QO^Mbn0c2]QO]Mcn0d2\\\\QO\\\\Mdn0e2[QO[Men0e2[QOZMfn0g2YQOYMgn0g2YQOYMgn0h2YQOWMhn0i2WQOWMin0j2VQOVMjn0j2VQOVMjn0j2VQOVMjn0j2VQOVMjn0j2WQOUMin0l2VQOTMjn0l2VQOTMjn0Q30JVQOVMjn0j2VQOVMjn0j2VQOVMjn0j2VQOUMkn0k2UQOUMkn0k2UQOUMkn0P3000000001O00000O10001O00000O100KRQOXMnn0g2SQOYMnn0d2TQO[Mmn0a2=@?J7M2N4K;POaoNGhP12h0GVUl:\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [559.0, 478.0, 626.0, 435.0], \"score\": 0.9999997019767761, \"association_id\": 2, \"light\": [-2.5104336738586426, -2.05935001373291, 2.3719472885131836, 1.8472962379455566], \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"Ybbb0a3Yn0c0@:F=Dd0\\\\O5L3M3M3N2M3N2M3M3M3M3M3M2N2M3N2M3N2N1lUORIUi0P7fVOYITi0i6iVO\\\\ISi0e6lVO]IRi0e6lVO]ISi0c6lVO_IRi0c6lVO^ITi0b6kVO`ISi0b6lVO_ISi0b6lVO^ISi0c6lVO_IRi0c6mVO]ISi0d6lVO\\\\ISi0e6mVO\\\\IPi0g6oVOYIoh0i6QWOWImh0l6QWOVIlh0l6TWOTIjh0o6UWORIgh0Q7YWOoHeh0S7[WOnHch0T7\\\\WOlHch0U7]WOlHbh0T7^WOlHah0U7_WOlH`h0Z8O100O2O0O100O2O0O100O1O1O1N2M3M3M3N3M2O100O1O100O100O100O`N`XObH_g0]7cXOdH[g0\\\\7eXOeHZg0Y7iXOgHUg0Y7lXOhHSg0U7QYOkHhf0R7aYOoHVf0W7mYOiHRf0V7oYOlHoe0S7RZOnHme0R7TZOnHke0R7UZOoHje0Q7UZOQIje0o6VZORIje0m6UZOUIje0k6TZOXIle0h6RZOZIme0f6SZO\\\\Ile0c6TZO^Ike0c6UZO]Ije0c6VZO^Ije0b6VZO^Iie0b6XZO^Ige0c6YZO]Ide0e6\\\\ZO\\\\I`e0h6`ZOXI\\\\e0k6eZOUIXe0n6hZORIVe0P7jZOPIUe0Q7jZOPIUe0P7lZOPISe0Q7mZOoHRe0R7mZOoHRe0Q7oZOPIPe0P7P[OPIod0Q7P[OPIPe0P7oZOQIPe0P7oZOQIQe0o6nZORIQe0o6nZORIRe0n6mZOSIRe0n6nZORIQe0o6nZORIRe0n6nZORIQe0o6oZOQIPe0P7oZOQIPe0P7P[OPIod0Q7Q[OoHod0Q7Q[OoHnd0R7Q[OoHod0Q7Q[OoHnd0R7R[OnHnd0R7R[OnHnd0R7R[OnHmd0S7S[OmHmd0S7R[OnHnd0R7R[OnHmd0S7S[OmHmd0S7S[OmHmd0S7S[OmHmd0T7R[OlHnd0T7Q[OmHnd0T7R[OlHnd0T7R[OlHnd0T7R[OlHnd0T7Q[OmHod0T7P[OlHPe0T7P[OlHod0U7Q[OkHod0U7Q[OkHod0V7P[OjHPe0V7oZOkHPe0V7P[OjHPe0V7P[OjHPe0W7oZOiHQe0W7oZOjHod0W7Q[OiHod0W7Q[OiHod0X7P[OhHod0Y7Q[OgHod0Y7Q[OgHod0Z7P[OfHPe0Z7P[OfHod0[7Q[OeHod0[7Q[OeHnd0]7Q[OcHod0]7Q[OcHod0]7P[OdHod0]7Q[OcHod0]7Q[OcHod0W:O10000O100000000O101O000000000000000O10001O000000000000000000001O000000000000000000000000000000000000001O00000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000000000000000WMZ[OXHfd0h7Z[OXHfd0h7Z[OXHfd0h7[[OWHed0i7[[OWHed0i7\\\\[OVHdd0j7\\\\[OVHdd0j7\\\\[OVHcd0k7^[OTHbd0l7^[OTHbd0m7][OSHcd0m7^[ORHbd0n7^[ORHbd0o7][OQHcd0o7][OQHcd0P8][OoGcd0R8\\\\[OnGdd0R8\\\\[OnGdd0S8\\\\[OlGdd0U8[[OkGed0h:00000bM\\\\[O`Gdd0a8][O^Gbd0c8^[O\\\\Gbd0e8^[OZGbd0f8^[OZGbd0g8^[OXGbd0h8_[OWGad0j8^[OVGbd0j8^[OVGbd0k8^[OTGbd0l8_[OSGad0n8^[ORGbd0n8_[OQGad0P9^[OPGbd0Q9^[OnFbd0S9][OmFcd0S9^[OlFbd0U9][OkFcd0V9][OiFcd0W9^[OhFbd0Y9^[OgFad0Y9_[OgFad0Y9`[OfF`d0[9`[OdF`d0\\\\9`[OdF`d0]9`[ObF`d0^9a[OaF_d0_9b[OaF]d0_9c[OaF]d0_9d[O`F\\\\d0`9d[O`F\\\\d0`9e[O_F[d0a9e[O_F[d0a9e[O_F[d0a9e[O`FZd0`9f[O`FZd0`9f[O`FZd0`9f[O`FZd0`9f[O`FZd0a9e[O_F[d0a9d[O`F\\\\d0`9d[O`F\\\\d0`9d[O`F\\\\d0`9d[O`F\\\\d0`9c[OaF]d0_9c[OaF]d0_9c[OaF]d0_9c[OaF]d0_9c[OaF]d0_9b[OcF]d0\\\\9d[OdF\\\\d0\\\\9d[OdF\\\\d0\\\\9d[OdF\\\\d0\\\\9c[OeF]d0[9c[OeF]d0[9c[OeF]d0[9c[OeF^d0Z9a[OgF_d0Y9a[OgF_d0X9b[OhF^d0X9a[OiF_d0W9a[OiF_d0W9`[OjF`d0V9`[OjF`d0V9_[OkFad0Z;00000000000001O0000000000000000000000000000000000000000000010O0000000000001O0hM_[OQGad0P9_[OoFad0Q9`[OnF`d0R9`[OnF`d0S9a[OkF`d0T9a[OkF_d0U9c[OjF\\\\d0V9i[OeFWd0\\\\9k[OaFUd0_9n[O^FRd0c9o[O[FQd0e9Q\\\\OYFPd0g9P\\\\OXFPd0i9o[OWFQd0i9P\\\\OVFPd0k9o[OUFQd0l9n[OUFQd0l9o[OSFQd0n9n[ORFRd0o9m[OQFTd0o9k[OQFUd0o9l[OPFTd0Q:k[OoEUd0Q:k[OoEUd0R:k[OmEUd0S:k[OmEVd0S:i[OmEWd0S:j[OlEVd0T:j[OlEWd0S:j[OlEVd0T:k[OkEUd0U:k[OkEVd0T:k[OkEUd0U:k[OkEUd0U:k[OkEVd0T:k[OkEUd0V:j[OjEVd0V:k[OiEVd0V:j[OjEVd0V:j[OjEVd0V:k[OiEVd0V:j[OjEVd0V:j[OjEVd0V:j[OjEVd0V:j[OjEVd0V:j[OjEWd0U:i[OkEWd0U:i[OkEWd0U:i[OkEWd0U:h[OlEYd0S:g[OmEYd0S:g[OmEYd0S:g[OmEZd0R:f[OnEZd0R:f[OnEZd0Q:g[OoEZd0P:f[OPFZd0P:f[OPF[d0o9e[OQF[d0o9e[OQF\\\\d0n9c[OSF^d0l9b[OTF^d0l9b[OTF_d0k9a[OTFad0k9_[OUFbd0j9^[OVFbd0i9_[OWFbd0h9^[OXFbd0h9][OYFdd0f9\\\\[OZFdd0f9\\\\[OZFed0d9[[O]Fed0c9[[O]Ffd0a9Z[O`Ffd0_9[[O`Fgd0_9X[ObFid0\\\\9U[OgFkd0W9S[OmFnd0P9P[OTGQe0h8Q[OYGPe0c8R[O^God0^8S[OcGnd0Y8U[OgGld0V8U[OkGld0R8V[OnGkd0o7V[ORHkd0k7W[OUHjd0i7V[OXHkd0e7W[OZHjd0e7V[O\\\\Hkd0b7U[O_Hkd0_7V[ObHkd0\\\\7U[OeHkd0Y7V[OhHkd0V7U[OjHmd0S7S[OoHmd0o6T[ORImd0j6nZO^ISe0`6_ZOoIbe0n5WZO[Jje0c5TZO_Joe0^5PZOdJSf0X5mYOiJVf0S5jYOnJYf0n4gYOSK\\\\f0i4eYOVK]f0h4cYOYK^f0e4cYO[K^f0c4bYO]K`f0a4aYO_K`f0`4`YO`Kaf0_4_YOaKbf0^4]YObKef0]4[YOcKff0\\\\4ZYOdKgf0[4YYOdKif0[4WYOeKjf0Z4UYOgKlf0X4TYOgKof0W4QYOiKQg0V4nXOiKVg0T4jXOkKZg0R4eXOoK^g0n3bXOQL`g0n3`XORLag0m3_XOSLbg0l3]XOTLeg0k3[XOULfg0j3ZXOVLgg0Q70O2O001O1O1O0O2O1O2M3N2N2M3L4L4M3M5L3L4M3M2M4M2N3L4SM]VOWMei0g2[VOYMgi0e2YVO[Mii0b2XVO]Mli0`2TVO`Moi0\\\\2RVOdMQj0X2PVOgMVj0S2kUOlMZj0P2fUOoM_j0l1bUOSNaj0j1`UOTNdj0j1\\\\UOTNgj0j1ZUOSNkj0j1VUOUNlj0j1TUOUNnj0i1SUOVNoj0h1RUOXNoj0f1RUOYNPk0f1PUOZNPk0e1QUOZNRk0c1oTO]NTk0^1nTObNUk0k0[UOTOij0d0\\\\UO\\\\Ohj0;]UOEgj02^UOMhj0I]UO7[k0[N[UOd1am0O1O1O1O2M2M3N2N2N2O100O100O10000O2O000O101O0O1O2O1N1O2N2N1N3N2N2N2N2N1O2O1N101N2O1N2K5K5M`Pk7\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [1085.0, 364.0, 266.0, 204.0], \"score\": 0.9999998807907104, \"association_id\": 3, \"light\": [-1.3736239671707153, -2.4024336338043213, 1.342828392982483, 2.2709431648254395], \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"UYQT1d1WP18K5K3N2N2M3J6J6I7H8M3L3H9L3M4I6UROoKem0T4ZROlKdm0Z4N1O1O2M2N3M2I7L4O1N2O2N1O1O1O100O2N100O101N1O1O1O1O1N2O1O2N1O1O1O1N2N2O1O1O1O1O1O2O0O10000O1000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000TO\\\\TO\\\\Kdk0d4\\\\TO\\\\Kek0c4[TO]Kek0c4[TO]Kek0c4[TO]Kek0c4[TO]Kek0d4[TO[Kek0e4[TO[Kek0e4[TO[Kek0e4[TO[Kek0e4[TO[Kfk0d4ZTO\\\\Kfk0d4[TO[Kek0e4[TO[Kek0e4[TO[Kek0e4[TO[Kek0e4[TO[Kfk0d4ZTO\\\\Kfk0e4ZTOZKhk0d4XTO\\\\Kjk0c4UTO]Kok0_4RTO`Kok0`4PTO`KQl0_4oSOaKRl0_4nSO`KRl0`4nSO`KSl0V51YOmSO_KTl0a4lSO^K[l0[4gSOcKZl0]4gSOaKYl0_4hSO`KYl0`4fSO`K[l0`4dSO`K\\\\l0`4dSO`K\\\\l0a4dSO^K]l0b4bSO^K^l0c4bSO\\\\K^l0d4cSO[K]l0f4cSOYK]l0g4cSOYK]l0h4bSOXK^l0h4bSOXK_l0g4aSOYK_l0h4`SOXK`l0h4aSOWK`l0h4`SOXK`l0i4_SOWKbl0h4^SOXKdl0f4\\\\SOZKgl0d4XSO\\\\Kjl0b4VSO^Kml0^4TSObKml0]4SSOdKml0Z4SSOgKml0Y4SSOgKnl0X4RSOhKnl0W4SSOiKml0KSSOS4OSLnl0HUSOU4MSLol0FVSOU4KVLol0EVSOU4KVLol0DWSOU4KWLWm0h3iROYLWm0g3hROZLXm0e3gRO]LYm0U40000002nKlRO]2IeM]m0FTSO`2@jM[n0T2eQOmM^n0m1dQOTN^n0f1fQOZN[n0_1jQOaNXn0k0cQObN;c0Sn0?\\\\QOPOo0b0fm0;eROE[m08hROHYm04jROLVm01mROOTm0LPSO4Pm0HTSO8ml0AYSO?To000000000O10001N1O100O10000O10000O100O1O2O000O2N2NolZ2\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [1255.0, 334.0, 165.0, 172.0], \"score\": 0.9999967813491821, \"association_id\": 4, \"light\": [-0.7061206698417664, -3.2280657291412354, 0.6906173825263977, 3.1900415420532227], \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"WieY16iQ13L3K4L5N101N1O2O1N3N2M3N2M1O1O101N100O101N2O1N3N2N2N1O1N2O001O001O1O2N1O1O1O001O2M101O1O001O1O2N1O1O1O006J4L6YROSMok0P3mSOSMQl0o2lSOTMRl0o2iSOTMVl0n2`SO[M_l0g2\\\\SO\\\\Mdl0j2PSO]Mol0j300000000000OmRO\\\\Kll0d4QSOcKkl0]4USOdKjl0[4WSOfKil0Y4VSOiKil0V4XSOjKhl0V4XSOkKgl0U4YSOkKgl0T4ZSOlKfl0T4ZSOlKfl0T4ZSOlKfl0T4ZSOlKfl0T4ZSOlKfl0T4ZSOlKfl0T4ZSOmKel0S4[SOmKel0S4[SOmKel0S4[SOmKel0S4[SOmKel0S4[SOmKel0S4[SOnKdl0R4\\\\SOnKdl0R4\\\\SOnKdl0R4\\\\SOnKdl0R4\\\\SOnKdl0R4\\\\SOnKdl0R4\\\\SOnKdl0S4\\\\SOlKdl0T4\\\\SOlKdl0T4[SOnKdl0R4\\\\SOnKdl0R4\\\\SOnKdl0R4]SOmKcl0S4]SOmKbl0T4^SOlKbl0T4^SOlKbl0T4^SOlKbl0U4]SOlKbl0T4^SOlKbl0T4^SOlKbl0T4^SOlKbl0U4]SOkKcl0U4]SOkKcl0U4]SOkKcl0U4^SOjKbl0V4^SOjKbl0W4]SOiKcl0W4]SOiKcl0W4]SOiKcl0W4]SOjKbl0V4^SOjKbl0V4^SOjKbl0V4^SOjKbl0W4]SOiKcl0W4]SOiKcl0W4^SOhKcl0W4]SOiKcl0X4\\\\SOhKdl0X4\\\\SOhKdl0X4]SOgKcl0Z4\\\\SOfKdl0k40@\\\\SOdKdl0]4\\\\SObKdl0_4\\\\SO`Kdl0`4]SO_Kcl0a4^SO^Kbl0c4]SO]Kcl0c4]SO]Kcl0c4^SO\\\\Kbl0e4]SO[Kcl0e4]SOZKdl0g4\\\\SOXKdl0i4[SOWKel0i4[SOWKel0i4[SOWKel0i4[SOWKel0i4\\\\SOVKel0i4ZSOXKfl0Q50000001O002N1O1O2M3N5BkROfKYm0U4kROhK[m0R4>kLTROm1Pn0iM^ROn1Qo0Gd1gM]f1\"}}, {\"image_id\": 160, \"category_id\": 1, \"bbox\": [273.0, 504.0, 225.0, 207.0], \"score\": 0.9999999403953552, \"association_id\": 3, \"light\": [-2.6989798545837402, -1.2823944091796875, 2.642576217651367, 1.1492383480072021], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"kRT71cj02M3O0O2O00001O00000O2O000000001O0ZLER]O;mb0FS]O:lb0GT]O9ib0JX]O6^b03g\\\\OCnM:Ze04g\\\\OFlM6\\\\e05g\\\\OIjM3]e06\\\\\\\\O^OnM;81\\\\e08^\\\\O^OkM<9O\\\\e09^\\\\O_OjM<:L\\\\e0<_\\\\O4SNA]e0<^\\\\O5TN_O]e0=^\\\\O6TN]O]e0?^\\\\O5TN\\\\O^e0`0\\\\\\\\O5VN\\\\O\\\\e0a0\\\\\\\\O5WNZO\\\\e0c0[\\\\O5XNXO\\\\e0e0S\\\\O<`NoN\\\\e0g0R\\\\O<aNmN[e0k0P\\\\O<dNiNYe0n0Q\\\\O=dNdNYe0R1Q\\\\Ol0mc0]Ok[Od0Ud0]Oi[Od0Vd0]Oh[Oe0Xd0\\\\Of[O>VOfMUe0m1d[O?_d0C_[O>ad0C^[O>`d0C_[O>`d0D][O>cd0BZ[Oa0ed0AX[Oa0gd0@nZOl0od0^2O1N2N2O1O1O100O1O1O1O1O1O100O10000O10000O10Oc[OQJUd0P6j[ORJUd0n5k[ORJTd0o5l[OQJTd0o5k[ORJTd0o5l[OQJTd0n5l[OSJSd0n5l[OSJTd0m5k[OTJUd0l5k[OTJUd0k5k[OVJUd0i5k[OXJUd0T60000O[Ol[OkJTd0S5n[OmJRd0R5n[OoJRd0P5o[OPKQd0o4o[ORKQd0m4P\\\\OSKPd0l4Q\\\\OTKoc0l4Q\\\\OTKoc0k4R\\\\OUKnc0j4S\\\\OVKmc0j4S\\\\OVKmc0i4T\\\\OWKlc0i4T\\\\OWKlc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OWKlc0i4T\\\\OWKlc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0h4U\\\\OXKkc0i4T\\\\OWKlc0i4T\\\\OWKlc0i4T\\\\OWKlc0j4S\\\\OVKmc0j4S\\\\OVKmc0j4S\\\\OVKmc0j4S\\\\OVKmc0j4S\\\\OVKmc0j4S\\\\OVKmc0j4S\\\\OVKmc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0l4Q\\\\OTKoc0l4Q\\\\OTKoc0l4Q\\\\OTKoc0l4Q\\\\OTKoc0l4Q\\\\OTKoc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4R\\\\OUKnc0k4S\\\\OTKmc0k4T\\\\OUKlc0k4T\\\\OUKlc0j4U\\\\OVKkc0i4V\\\\OWKjc0g4Y\\\\OXKgc0e4\\\\\\\\O[Kdc0Y4h\\\\OgKXc0l3V]ORLkb0j3[]OSLfb0e3Y2N2N2N2C=F:_Oa0K5M3N2O1O1O100O1O1O1O1O100O1O1O100O1O100O1O100O1O100O10000O100O100O1O1O1O1O100O100O1O1O1O101N100O100O100O1O1O2N2M4K;YO`mZd0\"}}, {\"image_id\": 160, \"category_id\": 1, \"bbox\": [408.0, 493.0, 821.0, 303.0], \"score\": 0.9999872446060181, \"association_id\": 1, \"light\": [-1.9895079135894775, -2.75858473777771, 1.9314422607421875, 2.6240756511688232], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"V\\\\d:1dj00O2N2O0O2O0O2O00000\\\\YO6ZLLfe0Nh]Om0Wb0ROg\\\\OOYNU1nd0lNe\\\\O7SNV1Ue0cNd\\\\O`2Yc0`Mc\\\\Oi2Yc0VMc\\\\OS3Xc0mLe\\\\OX3Yc0hLd\\\\O\\\\3\\\\c0cLb\\\\Oa3\\\\c0^Lc\\\\Oe3]c0ZLa\\\\Oj3]c0TLd\\\\Oo3od04aNmKV\\\\OU4fc0nKX\\\\OV4ec0jK[\\\\OY4ac0hK^\\\\O[4`c0eK`\\\\O]4^c0dK`\\\\O^4_c0bKa\\\\O`4]c0`Kb\\\\Ob4]c0_Kb\\\\Oc4\\\\c0]Kd\\\\Od4[c0]Kc\\\\Oe4\\\\c0[Kd\\\\Of4[c0ZKe\\\\Of4[c0[Kc\\\\Og4\\\\c0YKd\\\\Og4\\\\c0ZKc\\\\Og4\\\\c0YKc\\\\Oh4]c0XKc\\\\Oh4^c0WKb\\\\Oj4]c0UKc\\\\Ol4^c0SKb\\\\Om4_c0RK`\\\\Oo4`c0PKa\\\\OQ5_c0nJ^\\\\OU5bc0jJ\\\\\\\\OY5dc0fJY\\\\O^5hc0`JW\\\\Ob5ic0^JU\\\\Oe5\\\\d01O100O2O0O10001O0O10001O0000001O000O101O0000O010O10O1O00100a[OoIYd0Q6e[OTJXd0m5h[OTJWd0l5h[OUJXd0j5i[OVJVd0k5i[OVJWd0i5j[OXJUd0h5k[OXJTd0i5k[OXJUd0g5l[OYJTd0g5l[OYJTd0g5l[OYJTd0f5l[O[JSd0f5m[OZJSd0f5m[OZJSd0e5m[O]JRd0c5n[O]JRd0c5m[O^JSd0b5m[O^JSd0a5m[O`JSd0`5m[O`JRd0R600000000000001N1000000000000O1000001O00000O10000000001O000O100000000000001O000O10000000000000001N100000000000000O10001O0000000O1000000O101O0O100O101N100O100N3L3N2M4M2O1O1O1O1O1O2N1O1N200O100O2O000O101O001O1N2O001O001O0O10001O00000O2O000000000O1000000000000O^Om]OdHSb0]7o]O`HQb0`7P^O_HPb0b7R^O[Hna0e7S^OZHma0f7T^OYHla0h7S^OXHma0j7Q^OVHna0l7R^OSHna0o7P^OlGN2Qb0S8T^OmGla0U8S^OjGma0X8Q^OgGPb0Z8P^OeGPb0[8P^OdGQb0]831N101O1O001O001N101O1O001O001O0O101O00010O0000001O00000010O000001O0000000010O000001O0000001O0001O01O0000O100000000O10001O000O1000000O100000000O101O00000O1000000O1000000O10Q^OeGda0g80O01O1O00001O000000WO_^O_Haa0_7_^ObHaa0]7`^OcH`a0[7b^OeH^a0Z7b^OgH^a0W7c^OjH]a0U7c^OlH]a0S7c^OnH^a0P7b^OQI^a0n6b^OSI^a0l6a^OVI_a0i6`^OYI`a0n700001O000000001O00000000001O0001O0001O00000000001O0]NZ^OWJfa0i5Z^OWJfa0i5Z^OWJfa0h5[^OYJda0g5\\\\^OYJda0g5\\\\^OYJea0e5\\\\^O[Jda0e5\\\\^O[Jda0e5\\\\^O[Jda0e5\\\\^O[Jda0e5\\\\^O[Jda0d5]^O\\\\Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5^^O[Jba0e5^^O[Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5]^O\\\\Jca0d5^^O[Jba0e5^^O[Jba0e5^^O[Jba0e5^^O[Jba0e5^^O\\\\Jaa0d5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5`^O[J`a0e5a^OZJ_a0f5a^OZJ_a0f5a^OZJ_a0f5a^OZJ_a0f5a^OZJ_a0e5b^O[J^a0e5b^O[J^a0e5b^O[J_a0d5b^O[J^a0e5b^O[J^a0e5b^O[J^a0e5b^O[J^a0e5b^O[J_a0d5a^O\\\\J_a0d5a^O\\\\J_a0d5b^O[J^a0e5b^O[J^a0e5b^O\\\\J^a0c5b^O]J^a0c5b^O]J^a0c5b^O]J^a0c5b^O]J_a0b5a^O^J_a0b5a^O^J_a0b5a^O^J`a0a5`^O_J`a0a5`^O_J`a0a5`^O_Jaa0`5_^O`Jaa0`5_^O_Jba0a5^^O_Jca0`5]^O`Jca0`5]^O`Jca0`5]^O`Jca0`5]^O`Jda0_5\\\\^OaJda0_5\\\\^OaJda0_5\\\\^OaJea0^5[^ObJea0^5[^ObJea0^5[^ObJea0^5[^ObJfa0]5Z^OcJfa0\\\\5[^OdJea0\\\\5[^OcJfa0]5Z^OcJga0\\\\5Y^OdJga0\\\\5Y^OdJga0\\\\5X^OeJia0Z5W^OfJia0Z5W^OfJia0Z5W^OfJia0Z5W^OfJja0Y5V^OgJja0Y5V^OgJja0Y5V^OgJka0X5U^OhJka0X5U^OhJka0X5U^OhJla0W5T^OiJla0W5T^OiJla0W5T^OhJma0X5S^OhJna0W5R^OiJna0W5R^OiJna0W5R^OiJoa0V5Q^OjJoa0V5Q^OjJoa0V5Q^OjJPb0T5Q^OlJoa0T5Q^OlJoa0T5Q^OkJPb0U5P^OkJPb0U5P^OkJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5P^OkJPb0U5P^OkJQb0T5o]OlJQb0T5o]OkJRb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJRb0S5n]OmJRb0S5o]OlJQb0T5o]OkJRb0U5n]OkJRb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJQb0T5o]OlJRb0S5n]OmJRb0S5n]OlJRb0U5n]OkJRb0U5n]OkJRb0U5o]OjJQb0V5o]OjJQb0V5o]OjJQb0W5n]OiJRb0W5n]OiJRb0W5n]OiJRb0W5n]OhJSb0X5m]OhJSb0X5m]OhJSb0X5m]OhJRb0Y5o]OfJQb0[5n]OeJRb0[5n]OeJRb0[5n]OeJRb0[5n]OeJRb0[5n]OdJSb0\\\\5m]OdJSb0\\\\5m]OdJSb0\\\\5m]OdJSb0\\\\5m]OdJRb0]5n]OcJRb0^5m]ObJSb0^5m]ObJSb0^5m]ObJSb0^5m]OaJTb0_5l]OaJTb0_5l]OaJTb0_5l]OaJTb0_5l]OaJTb0_5l]OaJSb0`5m]O`JSb0a5l]O_JTb0a5l]O^JTb0c5l]O]JTb0c5l]O]JTb0c5l]O]JSb0e5l]O[JTb0e5l]O[JTb0e5l]O[JTb0e5l]O[JTb0f5k]OZJTb0g5l]OYJTb0g5l]OYJTb0h5k]OXJUb0h5k]OWJUb0j5k]OVJUb0k5j]OUJVb0k5j]OUJVb0l5j]OSJVb0m5j]OSJVb0n5i]ORJWb0n5i]ORJVb0o5j]OQJVb0P6i]OPJWb0P6j]OoIVb0R6j]OmIVb0S6j]OmIVb0S6k]OlIUb0U6j]OkIVb0U6k]OjITb0W6l]OiITb0X6k]OhIUb0X6l]OgITb0Y6l]OgITb0Z6k]OfIUb0Z6k]OfIUb0Z6k]OfIUb0[6k]OdIUb0\\\\6k]OdIUb0]6j]OcIVb0]6j]OcIVb0^6i]ObIWb0_6i]O`IVb0a6j]O_IVb0c6h]O]IXb0d6h]O[IXb0g6f]OXI[b0j6d]OUI\\\\b0b70O100@d]OfH]b0Z7i]O`HWb0a7j]O[HWb0g7;001O001O001O000O2O00000000001O00000000001O000000001O0000001O000000001O000000001O00O10000O10000O100O10000O100O100O10000O100O10000O100O10000O2O000O100O100O1O100O1O2N1O1O1O1O1O1O1O100O2N100O100O100O100O10001N1000000O10000O101N100O10000O2O000O10001N10000O101O000O2O00001N1000001O000O101O00001N10001O000O2O001N101O1O0O2O001O0O2O00001N10001O000O101O000O101N10000O2O0O101N1O2O0O1O2N1O2O1N3M3M3N2M3N1N1O2O1N1N3L3]MnYO`0Vf0]ORZO:Rf0C_ZOMbe01dZOI_e03gZOH\\\\e0KX[OKkd02^[OEed07d[O^OSe0MPPZ1\"}}, {\"image_id\": 160, \"category_id\": 1, \"bbox\": [130.0, 549.0, 156.0, 135.0], \"score\": 0.9998465776443481, \"association_id\": 2, \"light\": [-2.316253423690796, -2.007338762283325, 2.2278404235839844, 1.8874077796936035], \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Tn\\\\35jh00gXO1Ug05gXOMWg06fXOKYg07fXOJYg07eXOKYg06gXOJYg05hXOKWg06iXOJVg07jXOIUg07lXOIRg09oXOFQg0;nXOEQg0=oXOBPg0a0nXO^ORg0e0mXOYOSg0m0hXOPOYg0U1eXOhN\\\\g0`1\\\\XOfN]g0`1]XOgN[g0c1\\\\XO`Nbg0b1\\\\XO_Nbg0d1\\\\XO^Ncg0c1ZXO_Neg0Y2O2N1N2O1O1O1O100O1O102MNiXOhLVg0Y3jXOgLUg0]3OMlXOgLSg0Z3mXOfLRg0Z3oXOfLQg0Z3oXOfLPg0Z3PYOgLPg0X3QYOhLnf0Y3RYOgLnf0X3RYOiLnf0W3RYOiLmf0X3SYOiLkf0X3TYOiLlf0V3UYOjLjf0W3VYOiLjf0W3UYOjLjf0W3VYOhLkf0X3UYOhLjf0Y3UYOhLkf0X3UYOhLkf0X3UYOiLif0b300O100O100O100O100000000O10000000000O100O100000F^YOaLbf0_3^YOaLbf0_3^YOaLbf0^3_YObLaf0^3_YObLaf0]3_YOdLaf0\\\\3_YOdLaf0[3aYOdL_f0[3bYOeL]f0\\\\3cYOdL]f0\\\\3cYOdL]f0[3dYOeL\\\\f0[3dYOeL\\\\f0[3dYOeL\\\\f0[3dYOeL\\\\f0[3dYOeL\\\\f0Z3eYOfL[f0Z3eYOfLZf0Z3gYOfLYf0Y3hYOgLXf0X3iYOhLWf0X3iYOhLWf0X3iYOhLVf0X3lYOgLTf0X3mYOhLSf0W3oYOgLRf0[2aYOYNa0YOne0Q2hYOnMO1cg0]1Z1N2IjVOmNWi0T15O10000O1000000O10000000000000000000000O100000000000001O000000000O2O001O1O1O1O1O1K6Kn^ki0\"}}]"
  },
  {
    "path": "projects/LISA/output_light/inference/soba_instances_results.json",
    "content": "[{\"image_id\": 1, \"category_id\": 1, \"bbox\": [245.02700805664062, 129.4477081298828, 150.12026977539062, 259.74237060546875], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"min56fg06J5M3N2ON101O1O1O1OXKKgA3V>j0QAVOl>[1g@cNZ?a1c@^N]?d1b@[N^?i1^@XNa?P2W@PNj?b2b_O_M^`0k2X_OUMh`0Q3R_OPMn`0T3m^OiLWa0\\\\3_^OfLda0j41O1O10O10000O1001N2O0O2O000O100000001O01O1O100O1O001O10O0001O001O1O001O1O1N2O1O001O00010O000O100000001O001O1O1O2N001O1O001O0000010O01O1O2O0O1O1O1O2O1N3N0O2O014K6I3N1O1O6Ke0B^\\\\OoKob0^43K2M5Ll0SO:F3N1N3L7J7I6I9HS1lN:E9H5JelQ?\"}}, {\"image_id\": 1, \"category_id\": 1, \"bbox\": [360.1565246582031, 136.72373962402344, 408.9678039550781, 556.4942016601562], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_Q`99cg0:G9H5J5K4M4G8G:G9H8F:D<E;E;H9G9ROn0A>H8J6K5L3L6J5K5M3M2N3M2O1N1O2O1N101O00001O000000001O00000000001O0000001N101O00001O001O0O101O001O1O1O001O1O001O001O001O00001O1O001O1O1O1O1O1O001O1O1O001O001O001O001O001O001O1O001O1O001O1O001O001O1O001O1O010O001O001O1O1O001O001O1O00001O001O000010O0001O010O0010O01O010O1O001N10001O0O2O001N101O001N100010O01O10O01O010O010O000100O01O001O0O2O1O1O1O1N2O1O2N1O1O1O000100O1O1O1O100O001O10O01O0010N10001O001O00001O001O001O001O01O000010O0001O00010O000010O00000001O000000001O0000001O001N2M^FWFR5f9PK]Fn4a9RKcFl4Z9VKhFi4V9WKmFi4o8YKSGf4k8ZKXGe4e8\\\\K]Gc4b8^K_Gb4^8_KeG`4W8cKiG]4T8eKnG[4m7hKTHX4n6hFPIR52W4l6mFkHn45Y4P7oFcHi4;[4Q7RG]He4?\\\\4S7]LkHe3T7[LjHi3U7WLjHj3U7ULkHm3T7lKRIV4m6iKTIY4j6gKTI\\\\4k6dKTI^4k6bKUI_4i6bKXI^4g6bKZI_4e6aKZIa4f6]K[Id4e6[K[If4f6WKZIk4f6TKZIm4g6PKZIQ5g6mJYIU5g6QFkHl3?U6n7[IRHg6n7XIRHi6m7VITHk6l7SIUHn6j7QIWHP7i7nHXHS7g7lHYHV7h7gHYHZ7g7eHXH]7h7bHXH_7h7_HYHb7g7]HXHf7g7YHYHh7g7VHZHl7e7SHZHP8f7mG[HU8d7jG\\\\HW8d7gG]H[8b7dG^H]8b7aG^Ha8b7^G^Hc8b7\\\\G^He8`7^G^Hc8a7^G]Hd8a7^G^Hc8a7^G^Hd8_7^G`Hd8^7^G_He8^7]GaHe8]7\\\\GbHf8\\\\7[GbHh8[7ZGdHg8[7ZGdHh8Z7ZGdHg8Z7[GdHg8[7ZGdHg8Z7]GbHe8\\\\7Q4N3N2M3M5K8H6K6I5K4L4M2M3M4L3M5J6K5K6Ij0WO;E:F<D:F5J5L3M4L4K5H>ZOUTQ6\"}}, {\"image_id\": 1, \"category_id\": 2, \"bbox\": [206.81016540527344, 322.2153015136719, 227.25270080566406, 117.76214599609375], \"score\": 0.9999979734420776, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"jZl4=ag04M2N101N2N2O1O1N2O0O101N2O0O4M2M2O1O001O1N3N2N2N1O1O1O1O001O001O3M2N1O0O2O2N8G4I6N0O2O0O2O0O2O1N2O1OkNQ[OZOmd0g0S[OYOmd0g0T[OYOkd0f0V[O[Oid0e0W[O\\\\Ohd0c0Z[O\\\\Ofd0d0Z[O]Oed0b0\\\\[O_Ocd0a0^[O^Obd0b0][O@bd0`0^[O@bd0`0][OAcd0?][OBbd0?][OAcd0?\\\\[OBdd0?[[OAed0m100000001O\\\\O[[OmMed0R2][OmMcd0S2_[OkMbd0S2_[OmMad0S2_[OmMad0S2_[OmMad0S2_[OmMcd0Q2\\\\[OPNid0k1W[OUNld0h1T[OXNmd0g1R[OZNod0e1Q[O[NPe0d1P[O\\\\NPe0d1P[O\\\\NQe0b1P[O^NQe0a1oZO_NQe0`1P[O`NQe0^1P[OaNRe0^1nZObNSe0\\\\1nZOdNRe0l0gZORO72Se0j0gZOTO62Ue0g0gZOUO54Ve0e0fZOUO55Ze0a0cZOWO58Ze0?T[OBmd0=S[OBod0=Q[OBPe0>P[OBQe0=oZOCQe0<oZOERe0:nZOESe0;mZOETe0:kZOGVe08iZOHXe08gZOIZe06fZOJZe06eZOJ]e05aZOM_e03]ZOOfe00WZO2le0KTZO5ne0JQZO6Rf0HnYO8Rf0HmYO8Uf0GkYO9Vf0GhYO:Yf0EfYO;\\\\f0EbYO<_f0E^YO<cf0EZYO<ff0>2O000O101N100O1000000O1000O010000O100YOVYO8jf0FYYO:ff0E\\\\YO:df0F]YO9cf0F^YO:cf0E]YO;df0D\\\\YO<Ug0O000O100000001N0100O1O01000O0100O100O100O2N1O101N101N3Mfkn>\"}}, {\"image_id\": 1, \"category_id\": 2, \"bbox\": [312.2872009277344, 426.74517822265625, 430.4860534667969, 333.1461181640625], \"score\": 0.999996542930603, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Q`^85hg06K4M4K4M2O11N2O1N3M3N2M100O10O0010O01O1O10O0010O10003L7MM10O000O1O1O10O01O001O001O1O1O3L4M5K3M2N2N1O2O000000001O00110101ON1O1OO0O01000O10000O1001O001O2M3N1O1O1O0000O100O01000O0100O1O0_MWNl^Oi1Pa0[No^Of1k`0`NU_O`1g`0dNX_O]1d`0gN[_OZ1b`0iN]_OX1c`0gN]_OY1d`0gNZ_O[1f`0dNY_O^1f`0bNY_O`1g`0_NX_Oc1g`0]NX_Oe1g`0[NW_Oi1h`0VNV_On1h`0SNU_O5_Ne0Zb0VOV_O4eNd0Tb0XOV_O3jNd0oa0YOV_O4lNc0ma0YOV_O4POa0ja0[OU_O4SOa0ha0ZOU_O5UO`0ea0[OU_O6WO>ea0\\\\OS_O5[O>ba0\\\\OR_O7^O;aa0]OQ_O8@:_a0^Oo^O9D8]a0^Oo^O9H7Xa0An^O9K6Wa0@m^O;M7Ta0^Om^O<07Qa0^Om^O;39o`0[Om^O=59l`0[Om^O=79k`0[Ol^O<;:h`0YOl^O><:g`0YOk^O>>:g`0WOk^O??9f`0XOk^O>`0;e`0VOk^O>a0=d`0TOj^O?d0<c`0UOh^O>f0>e`0POe^O`0h0a0d`0mNc^Ob0k0a0c`0kNb^Oc0l0d0a`0iN`^Od0Q1c0^`0iN`^Oc0T1f0[`0gN_^Oc0X1f0X`0hN^^Ob0[1g0W`0jNX^O2ALQ2X1U`0lNV^OOMEj1`1R`0oNR^OM^2T1_?SOm]OKd2S1`?H_@7d?G\\\\@9d?IY@7g?JY@6g?JY@6f?KY@6f?KZ@5e?K[@6e?I\\\\@7c?I^@7a?J^@7a?I`@6`?Ja@6^?Jb@6^?Jc@6\\\\?Jd@6]?Ic@8]?Gd@8\\\\?Hd@9\\\\?Ee@;]?Ce@<[?Cj@9W?E[AKd>4_AN]>1n_O_OROf0j`0JR@DTOb0i`0IQ@HVO?h`0IQ@IXO>g`0GQ@LYO=e`0GP@O[O9f`0Gn_O0_O8b`0Im_O0B6b`0Ik_O1F4``0Ji_O3I2^`0Ji_O3K2]`0Jg_O4O1Z`0Ke_O53NY`0Ld_O64NW`0Ld_O67NU`0Kd_O78MT`0Mb_O6;NS`0Kb_O7<NQ`0Kb_O8=NP`0Kb_O6`0On?Jb_O7`00m?Ib_O8b0Ol?Hb_O8c00k?Hb_O8c00l?Ga_O8e01j?Fa_O8f02i?Fa_O8g01j?E__O8j03g?D__O8k04j?_O\\\\_O<l04k?]OX_O?o03k?[OW_O`0Q15i?UOZ_Od0P16Za0Ih^O6Wa0Il^O7l?ET^O2S28h?KP^OMY28g?2h]OFc28e?l0\\\\@TOd?l0]@SOc?l0^@TOb?l0_@SOa?m0_@SOa?l0`@TO`?l0a@TO^?k0c@UO]?k0d@TO\\\\?l0d@TO\\\\?l0e@SO[?l0f@TOZ?l0g@TOY?k0g@UOY?k0h@TOY?k0g@UOZ?i0g@WOY?i0g@VO[?i0e@WO\\\\?h0d@XO]?g0b@ZO^?e0c@[O^?d0b@\\\\O_?c0a@]O_?c0a@]O`?a0a@^O`?b0`@^Oa?a0_@_Oa?`0`@@a??_@Ab?>^@Bb??]@@e??Z@Bg?=Y@Ch?<X@Ci?>U@Cl?<T@Dm?;S@Dn?<Q@EP`0;o_OEQ`0;o_ODS`0;m_OES`0;l_OFU`0:j_OEW`0;i_OEW`0;h_OEZ`0:f_OE\\\\`0:c_OF^`09b_OFa`09]_OHf`06X_OJo`01P_O0Ua0Jk^O6aa0_O^^Oa0_d02O001O1O010O1O01OD=OO0001O0@QOQZOo0ne0b0N100000101ZOXNS[Oh1be001O001O0O2O1N10100O1O010O100O010O0010O0N2O00101O01O010O010O100O101N100O1O2N1O1O1O2N1O1O1O10`LlNU@R1^c0O0O2N2N2M4FR\\\\e7\"}}, {\"image_id\": 1, \"category_id\": 2, \"bbox\": [406.2237548828125, 544.087646484375, 359.68475341796875, 209.41839599609375], \"score\": 0.05124194175004959, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Xbe92ng00X`4Oi_K1N2O1O2N1O1O2M2O3M3M4L3M3M3M1O1O1O001O10O01O2O1N3N1O1O0O01000O010001O001O2N5K2NO1000O0100O01001O0hNoNb[OP1Xd0UOj[Oi0Rd0ZOP\\\\Of0mc0\\\\OS\\\\Oc0lc0^OU\\\\Ob0hc0@X\\\\Oa0fc0AZ\\\\O>ec0C[\\\\O>dc0B]\\\\O>ac0C_\\\\O=ac0C`\\\\O>]c0Cd\\\\O=[c0Cf\\\\O?Vc0Bl\\\\O>Rc0BP]O?lb0BV]O>hb0BY]O?eb0B\\\\]O=bb0D^]O=ab0C`]O<`b0Da]O<^b0Cc]O>\\\\b0Be]O>Zb0Bg]O>Xb0Bh]O?Wb0@k]Oa0Sb0_Oo]Oa0oa0^OS^Od0ja0\\\\OW^Oe0ga0ZOZ^Of0fa0ZO[^Of0da0YO^^Og0aa0XO`^Oh0`a0WOa^Oj0^a0VOb^Oj0_a0TOb^Om0]a0QOd^OP1]a0nNd^OS1]a0eNi^O]1f`0bNi_Oa1S`0bNl_O`1Q`0bNm_O`1o?cNP@`1m?bNQ@`1n?`NR@`1n?`NR@a1l?aNS@_1m?aNS@`1l?aNS@`1l?`NS@a1m?`NR@a1m?_NS@MlMd0oa0@T@KSNa0ha0DU@JUNb0da0EW@HWNc0aa0FW@FYNd0`a0FV@G[Nc0^a0FW@G[Nc0^a0GU@F^Nd0\\\\a0FT@HaNa0[a0HP@IfN?Za0Hm_OKkN=Wa0Ii_ONPO:Va0Ig_OOTO7Ua0Jc_O3YO4Ra0Ja_O4B5f`0He_O5G2c`0Id_O7KO``0Jc_O9NM^`0Kb_O90L^`0Kb_O91K]`0La_O:2K\\\\`0La_O94J\\\\`0L`_O:5JZ`0L`_O:7JY`0M__O99JW`0M`_O9:JV`0M__O9<JT`0M__O:>IR`0N__O9`0IQ`0N^_O9b0HP`0O^_O9c0Ho?O]_O8e0In?0[_O8h0Hm?O[_O9h0Hn?OZ_O8h0Io?NY_O8j0Jm?NX_O7m0Jl?OV_O6P1Kj?OU_O4S1Nh?NT_O1X10f?NQ_O0\\\\12c?NP_ON_14b?No^OKb17_?Nn^OJd18_?Mm^OJf19]?Ml^OIh1:\\\\?Ml^OHi1;]?Li^OHl1;\\\\?Lh^OGn1=[?Lf^OFP2>[?Lc^OEU2>Z?L_^ODZ2`0X?1V^O_Oe2?V?5o]O\\\\On2>S?8k]O[OT3<R?:f]O[OY3;Q?h0QAWOo>i0QAWOP?h0QAWOo>i0QAWOP?g0QAYOo>g0QAYOP?f0PAZOQ?e0o@[OR?d0n@\\\\OS?c0m@]OT?b0l@^OU?a0k@_OV?`0j@@W??i@AW??i@AX?>h@BX?>h@BY?=g@CZ?<f@DZ?=d@D]?;c@E^?:b@F^?:b@F_?:`@F`?:_@Ga?9_@Gb?9]@Gc?9]@Gc?9\\\\@He?8Z@Gg?9X@Hi?7V@Jk?6S@Jo?5P@LQ`03n_OMU`01j_O0W`00g_O0\\\\`0Nc_O2_`0M__O4b`0L]_O5d`0KZ_O5g`0KX_O6i`0IV_O7k`0JS_O7n`0HQ_O8Qa0Gn^O9Ta0Gj^O9Xa0Fg^O9\\\\a0Gb^O9aa0F^^O:ca0E\\\\^O;fa0EX^O;ja0DU^O<ma0DQ^O=oa0Co]O>Sb0Al]O`0Ub0_Oi]Oa0Zb0]Of]Oc0\\\\b0\\\\Ob]Oe0`b0ZO_]Od0eb0ZO[]Of0gb0YOX]Of0kb0YOa\\\\O1oNf0Sf0^OiYOb0Vf0BhYO>Wf0e0010OI8O0001O0O1O2O001O001O010O01O000000N2N2O1O2O010O1O100O1O1O2O0O1O101N10001N10000O1O010N2N2M3N2O100O1O010O010O00000O10O1001O010O0010O01O010O1O010O010O0100O0100O010O10O10O1000O0100000O0100O001N2N1O2N100001O010001N101O0O101N101NbdW6\"}}, {\"image_id\": 2, \"category_id\": 1, \"bbox\": [294.4266662597656, 374.8473205566406, 393.2955017089844, 734.00537109375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1280, 1024], \"counts\": \"`le<1jW1e0ohNGXU1e1J6J6K4K5L5J6G:Cb0[Oi0gNT1C8I7J5L5J4M4M2M4L3M3K6I7H8H8H7L5K4N2M3N2N1N2O1O1N2N2M3M3L4M3M3N2N2O1N2O000O2N1N2M3L4M3N1N3N1O2O00101O1N2O001O1O1O1O1O0O2O1O100N2O2M3L4L4L4L5L3M4K4M3M3M3N2M3M2M4L4L3M4K5J5K6M3M2N3M3N1O3L3N3L4L5J6J6I7J5L4L4L4M3M2N2N2M3N2M3L4L4I7J6K5K5L3M4M3L3M3L5gM`^Ok]Oea0oa0j^Oc]O[a0Vb0W_Of\\\\Oba0Qc0l1K4M3N2N3M2O1N2O1O1O0O2N1N3M2O2N1O101O00000001O1N2N2O0O2O1O1N2O001O1O1O010O01O1O001O0O2O1N1O2O1N101O100O100O2O0O101N10000O101O0O1000000000000000000000000000000000O100000000O1000000O10000O100O1O100^Mj]OY_OWb0U`0a^Od_O`a0S`0m^Oi_OSa0T`0T_Og_Om`0W`0X_Oe_Oi`0Z`0[_Ob_Of`0\\\\`0__O`_Ob`0_`0a_O^_O``0a`0c_O]_O]`0b`0d_O]_O^`0a`0c_O^_O^`0a`0c_O^_O_`0``0b_O__O_`0_`0d_O`_O]`0^`0d_Oa_O]`0]`0e_Ob_O]`0[`0e_Oe_O\\\\`0X`0f_Og_O[`0W`0h_Oh_OY`0U`0i_Oj_OY`0S`0i_Om_OX`0P`0j_Oo_OX`0o?j_OP@W`0o?j_OP@W`0n?k_OP@X`0n?i_OQ@X`0m?i_OS@Y`0k?h_OT@Y`0j?i_OU@X`0j?i_OT@Y`0k?g_OU@Z`0i?h_OV@Y`0i?h_OV@Y`0i?g_OW@Z`0g?h_OW@Y`0i?g_OW@Z`0h?g_OW@Y`0i?g_OV@Z`0j?g_OU@Z`0j?g_OT@Z`0l?g_OR@Z`0n?g_OP@Z`0P`0g_On_O[`0P`0h_Om_OY`0S`0h_Ok_OY`0U`0i_Oh_OX`0X`0j_Oe_OW`0[`0h3O0000001_BVYOj9jf0TFZYOi9gf0UF^YOh9cf0UFbYOh9^f0WFfYOe9[f0ZFjYOb9Wf0\\\\FnYO_9Sf0`FRZO\\\\9oe0bFUZO[9ke0dFXZOY9je0fFXZOX9he0gF[ZOV9ge0hF[ZOW9ee0iF\\\\ZOT9fe0lF\\\\ZOQ9ee0oF]ZOn8ee0QG]ZOk8ee0UG]ZOf8fe0ZG]ZO^8he0bG]ZOS8je0lG\\\\ZOh7je0XH[ZO[7me0eHWZOP7Pf0PITZOf6Sf0YIPZO^6Vf0bIlYOX6Xf0hIkYOo5\\\\f0PJfYOV5Sg0hJPYO`3aLZKnj0U1dXO[3hh0dL\\\\WOS3jh0lLZWOk2lh0TMZWO_2nh0`M_WOk1jh0TNkWOP1\\\\h0POiWOe0^h0ZOfWO<ah0BdWO6`h0JbWO1bh0N`WOMdh02^WOJdh06^WOFeh08_WOCdh0;cWO\\\\Oah0c0fWORO_h0l0jWOhNZh0W1Y7N2N2N2N2M3L4K5L4K5L4L4L\\\\jh=\"}}, {\"image_id\": 2, \"category_id\": 2, \"bbox\": [182.83108520507812, 814.2427368164062, 343.9987487792969, 318.79156494140625], \"score\": 0.9999970197677612, \"association_id\": 1, \"segmentation\": {\"size\": [1280, 1024], \"counts\": \"laU7X1_V1a0G4L2N3N1N2O1O1O1O3M4L4L5K5K5K4L3bkNfMoR1k3K2N1O2N1O2N1O1O2N001O001O1O001O1O1O2N2N1O1O1O1O001O000010O0001O001O001O001O1O1O1O1O1O001O001O001O00001O001O1O001O1O2N001O1O001O001O1O001O100O1O1O1O1O001O00001O00001O0000001O001O001O010O1O1O1O1O1O10O0001O01O00010fIQoNa5PQ1[JVoNb5mP1YJXoNe5jP1UJ[oNi5`Q1N1O1000001O1O1O1O1O1aKhmNS3YR1iLQnNPOO_3RR1]MdnNa2\\\\Q1\\\\MhnNb2ZQ1ZMlnNc2dS1D3N3M7Il0UO3L3N1N1N2O1O1N2N2N3N2M3N3L3N1O1O1O0O10O001O001O00001O0000001O0000000O1000O1000000000O100O1O1O1O1O1O1O1O100O1O101N100O100O10000O10000000000001O0010O01O1O0010O01O100O10000000001N2O1N2O1N1O3L5K6JcVYg0\"}}, {\"image_id\": 3, \"category_id\": 1, \"bbox\": [25.904190063476562, 182.18118286132812, 108.77557373046875, 116.02633666992188], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"V`k0:nd04L4M1N3M3M2O3M2O1N2N2O4K2O2N2N5J4M1O2N3M1O1O2N2N1O1O1O2N1O1O1O2N1O1N3N3M2N4L?A1O001O1O1O000000000000001O00000000000000O101N1O1DU^OXMla0f2[^OTMfa0f2b0L4N2N3K4L5M2M3Dh\\\\O_N_c0X1>N2N2O1O1O1O2M2N3N2N2Dg[OD[d08>J^Wcb0\"}}, {\"image_id\": 3, \"category_id\": 1, \"bbox\": [52.96216583251953, 218.1842041015625, 971.037841796875, 340.15740966796875], \"score\": 0.9999912977218628, \"association_id\": 1, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"Yol16Qe05J6L5J4La0@3M3M2O2O1N2O0O2O1N101N2O1N1O2O1N3M3N1N3M3M4L4L4L4L5K4M3L3M4M1N2O1N1O2O1N101O0O2O0O2O001O0O2O001O0O2O1O001O1O1N2O2N1O1O2N1N2O1O1O1O1O001O1N101O001O00001O000O2O00001O001O001O1O1O001O1O1O2N1O1O2N001O1O001O00001O000O2O00001O000000001O000O10001O00000O2O00001N10001O0O2O001N101O000O2O000O101O00000O101O00000000000O1000000000000000000O1000000000000000000000000O10000000000000000000000000001O00000O1000000000000000000000000000000O10000000001O000000001O000000001O000O2O00001O001O001O00001O00001O00001O000O101O00000000000000000O10000000000000000O1000001O0000000O10000000000000000O1000000O100000000O101O000O1000000O10000O1000000O1000000O2O00000O10000O10001N100O101N1O101N1O1O2N1O2N1O2N1N2M4M2N2N3M2O1O2N1O100O2N100O100O2O0O100O2N100O2N1O2N101M2O2N1O2M2O2M2O2M2O1O2N1O1O1O2N100O1O101O0O100O1000000O1000000O100000000O1000000O1000000O1000000O100000000O1000000O10000000000O100000000O100000000O10000000000O1000000000000O01000000000000000000O10O1000O10000O10000O100O1M3M3N2M3O1O100O10O010000O10000O1000000O1000000O10000O100O100O100O100O1L4M3M3N2O100O100O10000O10000O1000000O1000000000000000000O100000000000000O1000000000000O10000000000O100000000O10000O10000O100O10000O1000000O1000000O100000000O1000000000000O1000000000000O1000000000000O1000000O1000000O1000000O100O101O0O10000O100000000O1000000000000O100000000000001N10000000000000000O1000000000001O00000000000O10001O000000000000001O000000001O0000000O101O0000000000001O00000000001O000000001O000000001O000000001O000000001N100000001O000000001O00010O001O00001O1O001O0010O01O1O001O001O001O1O001O001O010O1O001O001O001O1O001O1O001O1O1O1O1O2N2N2N3M3M9F6K2N3M2N2N1O1O001O001O00001O00001O00000O2O00001O0O101O001N101O0O2O0O2O1N101O0O2O0O2O00001N10001N1000001O0O1000000O101N10000O101N100O1O1M4L3M3M4M2O1N3N1O1O2N1O1O2N1O2O0O100O2O0O100O2O0O1O101N1O1O2O0O1O1O2N1O1N3N1O1O2N1O1O2N100O2N101N100O2O001N100O2O001N101N2N101N2N2N2O1K7I;E[:\"}}, {\"image_id\": 3, \"category_id\": 2, \"bbox\": [21.455141067504883, 405.8929443359375, 1002.5448608398438, 202.2763671875], \"score\": 0.9992203712463379, \"association_id\": 1, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"Qkc11Ye01N2O1N2O2M2N4M4K2N2O1N2N2N1N3N2N2N2O0O2O1O001N101O001O0O101O0000000O10001O0O1000000O100O100O101N1O100O100O1O100O100O10000O10001O0O100000000O1000000O1000000O10000O100O100O100O100O100O100O10000O10000O10000O1000000O1000000O10000O10000O100O100O1O1O1O1O100O1O1O100O1000000O100000000O1000000000000000000000000000000000000000000000O100000000000000000000000000000001O000000000O10001O00000000001N100000001O0O1000000O2O0000000O1000001O000O1000000O10001O0O10000O10001N1O1O1O1O1O1O2N100O10001O000O10001O000000000O101O00000000000000001O0000000O10000000001O0000000000000000000O2O000000000000000000000000001O0000000000000000000000001O00000000000000001O00000000001O0000001O0000001O00001O00001O001O000010O01O0000001O0000001O0000001O0000001O00010O00001O1O1O1O1O1O101N1O1O1O1O1O1O10O0001O001O00010O0000001O000O2O00001O00001N2O1O001O1O1N2O001O1O001N101O00001O0O101O0000000O2O000000000000000000000000000000000000000000000000000000000000000000000000O2N1O2N1O2N2N101N1O2O1N3M5L1N1O1O2M2M3M4N1N2O1O2N1O101O0O1000001N1000000O2O00000O10000O10000O100N2O2M2N2K5I7M3N2O1O1O1O1O100O100O10000O100000001N10000O10000O10000O100O1O1O2O0O1O100O1O1O10000O10000O100000000O1000000000001N10000000000O100000000O10000O1000000O100000000O100000000000000O10000000000000000000000000000000000000000000000O100000000000000000000000000000000O1000000000000O100000000O10000O1000000O100000000O100000000000000O100000000000000000000O100000000000000O0100000000000O10000000000O1000000000000000O10000000000O1000000O10000O10000O1000001N1000000O100000000O100000000O1000000O01000O10000O100O100O100O100O1O10000O1000000O10000000000O1000O100000O1000000O1000000O100O100O100O100O10000O1000O010000O100000000O1000000O1000000O1000000O0100000O1000000O10000O1000O10O10000O100000000O01000000000O010000000O010000O010O10O10O10O0100O010O010O100O01000O10O1000O11O0O1000001O000O101O00001N10001N100O2O0O1O2N1O1O2O0O1O101O0O100O2O00000O2O1O1N2O1O1N2O1N2O1N2M3N2M3N8E;Eh5\"}}, {\"image_id\": 3, \"category_id\": 2, \"bbox\": [11.023405075073242, 288.8938293457031, 74.15486145019531, 13.562896728515625], \"score\": 0.9881167411804199, \"association_id\": 2, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"Un<2Xe000000O101O0000000O101O00000O10000O1000001O00000000000000O01000000000001O00000000001O0000000O10000000000000000001O0000001O1O2Mm]bc0\"}}, {\"image_id\": 3, \"category_id\": 2, \"bbox\": [158.42404174804688, 247.06251525878906, 142.78350830078125, 24.396041870117188], \"score\": 0.8951332569122314, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"ii^31Xe02N2O0R[OMdd05[[OKed05[[OLdd04\\\\[OLdd04\\\\[OLdd05[[OKed05[[OKed05[[OLdd04\\\\[OLdd05Z[OLfd04Z[OLed05[[OKed05[[OKed06Y[OKgd0<01O00000O10000000001O00000000001O0000000000000000000000001O00000000000000000001O000000000000000000000000000000000000001O000000000000001O0000001O00000000001O000000O1000000O1000000000000000000000000000O10001O00000000000O10001N1IV[ONld0NW[O1id0L[[O1IO_XS?\"}}, {\"image_id\": 3, \"category_id\": 2, \"bbox\": [74.09242248535156, 453.51470947265625, 378.80059814453125, 211.44049072265625], \"score\": 0.39758387207984924, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"Qka1>gd0<G3M3N2N2O0O2O001O1O1N101O001O001O0O10001O001N2O001N2O001N1O2O0O101O000O2O1O001O1O1N101O00001O00001O001O1O1O1O001N101O00001O001O0O2O1O1O001N2O000O2O000O101O000O2O001O1O1N101O000O10001N1O100O1O101N1O10000O2O001O1N2O0O2O1N101N100O101N101N1O2N1O2O1M201N101N10000O2O00000O2O00001N101O001O00001O0000001O0000000000001O0000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000001O000000000000000000000O1000001O000O10000O10000O100O100O2O000O1mNn^OmMSa0P2U_OkMk`0a1Q_OoMh0NY`0Q2[@mMe?R2^@kMc?T2`@iMa?V2k1N1G9O2N101N100O101N100O100O10000O101N10000O1000000O1000000000001O0000000000001O00001O001O001O001O00010O0000000000001O00000000000000000000000O100000000000000000000000000000000000O1000000000000000000000000000001O0000000O100000000000000000000000O2O1N2N2N2Mn]l;\"}}, {\"image_id\": 3, \"category_id\": 2, \"bbox\": [59.84610366821289, 518.1353759765625, 290.1900634765625, 160.02423095703125], \"score\": 0.06861153244972229, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"h`X18nd07L2N3M101N2N1O2O0O2O1O1N101O000O2O001O0O2O1N2O0O101O001N101O1O1O1O0O2O00001O001O001O1O1O0O2O00001N100O2N2O001N101O000O2O001O001N101O001N100O2O001N10001O0O101O001O1O001O001O0000001O1O001O1O0O2O0000001O001O001O1O0O2O001N1O1O101N1O1O2N1O101O0O2O0O2N100O1O2O0O100O2O0O2O1N1O2N101N100O100O100O2O0O1O10000O101O001O1O001O0O2O000000000000001O01O000001O001O1O001O0000001O000010O0001O001O1O00001O000000001O0O1000001O00001O001O001O0000000000000000000000000000000001O000000001O000000000000000O1000001O001O001O00001O000B`_ORL``0m3a_OSL``0j3c_OUL]`0i3f_OUL[`0j3g_OULZ`0i3g_OUL[`0U2k_OhNLQOZ`0U2X@TNTOH;NZ`0T2\\\\@lMg`0Q2Z1O1O1O001O1O1O1O1O010O2N100O1O2N1O1O2N1O2N2N3M2M5HZQQ>\"}}, {\"image_id\": 4, \"category_id\": 1, \"bbox\": [70.9486083984375, 262.59857177734375, 154.2806854248047, 91.00079345703125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"bcQ18V>e0L3M1O1M3N2O100O1O0N3O10O010ON10011O0000ONO4N2N3O000M300010O001N1O20O02N1O3L4M2O1N2N3M3N1ZCTNU<n1hCUNW<k1hCWNV<k1gCXNX<i1eCYNZ<X2O0O1000^OjCZNV<e1mCXNT<f1oCXNR<h1oCVNR<i1oCVNR<j1QDSNo;m1b00000O11O1O2YCTNV<o1dCUN[<X201O0000XOeCkN[<S1hClNX<S1kCkNU<U1PDeNQ<Y1SDdNn;\\\\1SDbNn;]1k000O10000O1000000O1000000000000000000O01000000000000000001N101O1O3M1O001RCZNINc<[2O2M1O1OO1O2N1O10001O01000O00010O1O1O1O1N2N3L6K2bNoBi0T=UOnBi0T=TOoBi0g=M:D6J5M3M3LPQU6\"}}, {\"image_id\": 4, \"category_id\": 2, \"bbox\": [0.0, 328.7700500488281, 56.73539733886719, 20.58026123046875], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"e:5h>4L4N1O10000O10000000O1000001O0O101N10O1O100O10O1000O10O100O10000O2O000O2O00000O10000O1000O1000O100O010000000O2Oen`8\"}}, {\"image_id\": 4, \"category_id\": 1, \"bbox\": [226.0249481201172, 261.73388671875, 288.51849365234375, 192.52554321289062], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"Vl_37f>8I5J4J5O2M4lDPOR8S1kGTOi7W1PHQOc7g1UFhM03o0b0i8d2lF_MR9f2hF\\\\MW9g2fF[MY9g2cF[M\\\\9f2eFYM[9h2eFWMZ9j2hFTMW9m2kFPMU9R3lFlLS9U3PGgLP9Z3SGXLW9j3n0100O100O1O00100O1O10000O011O001N101N101N101N101N101O1O1N:G2N1O2RFTKd9U5O1O1O1O1O5K3M1O1O001O00000000\\\\NSGUMm8d2`GVM`8f2iGUMW8i2mGUMS8j2oGUMQ8j2PHVMP8i2RHVMn7j2SHTMn7l2RHTMn7k2THTMl7l2VHQMk7n2[HmLe7R3cHfL^7Z3T2O1O1O100000000O10000000000000001O0001O01O00010O010O01O0010O01O000001O0000000001O00000000000000000000000000000QFgLW8Y3hGjLV8V3iGlLV8T3hGoLW8Q3gGQMY8o2dGUM[8k2aGYM_8g2_G\\\\M`8d2^G^Mb8b2\\\\GaMc8_2\\\\GbMd8^2\\\\GaMe8`2ZG_Mg8a2YG^Mg8d2XGZMj8g2UGTMP9m2nFoLW9R3hFjL\\\\9W3cFdLa9^3^F^Lf9c3YF\\\\Lh9d3XF[Li9f3VFYLj9j3TFULm9o3`05J6K2N000O101O001O2N6J2M2O1O2N1O1O00000000O10000O1O2A>L4O1N2O1O1N2O2N1O1O1O2N2N1N2N3K4J7L3N2O1N2N3L3N2M4K4J6O2O0O2N200O1O10002N1O1N100O10O0101O0016J4TCSNZ<b2H0000O10000019I14K0NO0O0100O001O2N4L5K3M2N1O010O0O2O1O1O2N2N2N2N1O1N2O1N3M8E^fj1\"}}, {\"image_id\": 4, \"category_id\": 2, \"bbox\": [145.7385711669922, 323.160400390625, 95.84805297851562, 26.732879638671875], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"jWU22n>1M2O1L4O100O010O1000000O10000000O10O100000000O10000O01001N1000O0L5O1O2L4L[h35aWL2001N2NRd5Mn[J5N1O1N2N110O2M2O1O1000000O101O0000000000O100000000000000O1BbA:YUk5\"}}, {\"image_id\": 4, \"category_id\": 2, \"bbox\": [253.73519897460938, 361.9300537109375, 233.35946655273438, 91.43014526367188], \"score\": 0.9999911785125732, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"`[i31n>3SAOf>7J501N10001N10O1O001O1O001O10O01000O1O010O100001N2O1N2I[A0m>N^a62X^I>H3K401N2O1N2N3M2M5L2O1N100O2O0N2O1O100O2OO1O1O0O2N2O1O0010000O1O2N1O100O100O2N1O2N1O2O0O10000O101N1O1O100O100000O01000000000O1000000O100000000000000000000001O001N2N2M3M3N3M2O1MQ^12maN4L200O100O1O1O1O100O01000L^OjAb0V>^OjAa0W>_OiAa0W>@hA`0X>@iA>W>CiA=W>CiA=W>CjA;W>DoA4T>L>2L301N200O2N2N2O1N100O101N100O010O1O1O10000O10O00K4H8011O010O1O001L3O2O001000000O10000O10001N101O1N2M4M2J6H\\\\A1e>Nbo\\\\2\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [7.284016132354736, 606.6954345703125, 85.05577850341797, 44.7432861328125], \"score\": 0.9999997615814209, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[Y54jd0c0I2N1O1O100O100O1O101N1O1O1O100O1000000010O00001N100010O00000001O00001O0000001O00001O00001O00001O0000001O00001O00001O00001O00001O000O101O0000001O1O001N2O0O2N2N1OdP_c0\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [638.736572265625, 476.1139221191406, 38.93402099609375, 26.433746337890625], \"score\": 0.9999932050704956, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"][[=4Ue03M2H8O1O2N100O1001OO100000001N101O1O0000000000O10000O1O1O2N1O2N1O2N2O2M4KkYV7\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [0.0, 150.54296875, 289.14312744140625, 180.69741821289062], \"score\": 0.9999862909317017, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_6X2mb0<G4O2M2O1O000O2O0000001O000001O000000001O0O101O001O001O0000001O0000000000001O00000000000000000001O01O000000000000000000000000000000O100000000000000O100000000O10000O1O100O1O100O100O1O1N2N2O2N1O1O100O1O1O2O0O1O100O2N101N1O2N1O2N1O1O2N100O101N1O2N1N3M3M2O2N101N100O100O2N1O100O101O000000000000cLa_On1_`0PNf_Om1Z`0RNj_Ok1V`0SNm_Ol1S`0SNo_Ol1R`0RNP@m1Q`0PNR@P2m?nMV@Q2k?iM\\\\@U2e?aMe@^2\\\\?]Mi@b2X?ZMk@f2n`00O01O00100O001O001O00010O000000001O0000001O00000010O000001O00010O001O1O1O1O1O010O00001O01O00010O0010O01O01O001O1O0O2O00001O0O101O001O1O1O2N1O1O1O1O1O001N2N2O2M2O2M101N2O001N102M2N101N2O1N101O0O2O1N100O2N1O2O0O101N100O2O0O2N2N2O1N2N101O001N2O1O1N2O0O2O\\\\Y]?\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [263.9549255371094, 52.550254821777344, 714.568359375, 365.8962097167969], \"score\": 0.9999628067016602, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"hTg62Xe0:bIFXG>Y80dG3X81RD^OX1d0c:0RD_OY1d0c:NQDA[1c0b:MQDA]1c0a:M[C]OTO6n2a0b:NYC@RO4R3?b:NXCDnN2W3=b:NVC;W2Hb:NTC=Y2Ec:5kB9`2Ce:Y2ZEgMf:Z2YEgMf:Z2YEfMg:\\\\2WEdMh:a2SE`Mm:d2oD\\\\MQ;h2kDXMU;i2jDWMV;k2hDVMW;k2gDVMY;k2fDUMZ;l2eDTM[;m2dDSM\\\\;m2dDSM\\\\;n2bDSM^;n2aDRM_;o2`DQM`;o2`DRM_;o2`DPMb;P3^DoLb;R3]DnLd;R3\\\\DmLd;U3ZDkLg;V3XDhLi;Z3VDeLk;\\\\3SDdLn;VOlBV3U1cMo;ROXCU3i0gM\\\\=W2bBiMa=U2_BiMe=U2[BiMi=U2WBiMk=V2VBhMl=X2UBeMl=\\\\2UBaMm=^2TB`Mn=`2SB]Mn=d2RBZMP>e2QBYMQ>g2oAWMR>i2PBTMQ>m2W2O0010O0100O10O0101O2M3N2M3N1N100O100O100O10O01O10O01O010O10O010000O0101O1O0O2O001N10001N1000O0100O100O00100O1O1O10O02N2N101N2N1O2dIRMXJo2c5XMYJi2d5]MnEOU2d2l7aMjENY2b2j7dMiEM]2_2h7fMiEL_2^2f7jMgEKb2\\\\2e7kMgEKc2[2d7lMgEKd2Y2c7oMgEIg2X2`7QNVEWOEb0d3V2`7SNSE;]3b1_7UNPE=`3_1_7UNmD?d3\\\\1_73`HN_72bHN^72aHN_73`HN_73`HN_73`HM`73`HN_73`HN_73`HO^72aHO^72aHO^71cHO\\\\72cHN]73bHN]72cHO\\\\72cHN]72cHO\\\\72cHN]72cHO\\\\72dHM\\\\73dHM]73bHN]72cHN]73bHM^73bHM^73bHN^72aHN_72aHN`72_HOa71^HOb71^HOc71]HOd70[H0f70YH0i7NWH3j7LUH4m7KRH5P8JoG6S8IlG7U8HkG9V8FiG:X8FgG:Z8FeG:\\\\8FcG:^8FaG:_8FaG;_8E`G;a8E^G;b8E^G;c8E\\\\G;d8E\\\\G;e8EYG<h8CXG=h8DWG<j8DTG=m8BSG>o8AoF`0R9@lFa0U9_OjFa0X9^OfFc0\\\\9\\\\ObFe0`9[O^Fe0d9[OYFf0i9YOUFh0m9WOQFj0Q:VOlEk0U:UOiEl0Y:SOeEn0\\\\:QOdEo0]:QOaEP1`:PO_EP1b:oN]ER1d:nN[ER1e:nN[ER1f:nNXES1h:mNXES1i:mNVES1j:nNVEQ1k:oNTEQ1m:nNSER1n:nNQER1o:oNQEP1P;oNPEQ1Q;oNnDQ1T;nNlDQ1U;nNkDR1V;nNiDR1Z;lNgDR1_;hNbDV1e;eN\\\\DY1e;fN]DX1d;hN]DV1c;jN_DT1b;lN_DR1b;mN_DR1a;oN_DP1b;PO^Do0b;QO^Do0c;RO\\\\Dm0d;UO[Dj0f;WOYDh0g;YOYDf0g;\\\\OXDc0i;]OWDb0i;@WD>j;BVD=j;DVD;k;DVD;k;EVD9k;FVD9j;GWD8k;GUD8l;GUD8l;GUD8k;HVD7k;IUD6l;ITD7m;HTD7l;IUD6l;JTD5l;KUD4l;KTD5l;KUD4l;KTD5l;KUD4l;LSD4m;LTD3l;MTD3m;LTD3l;MUD2l;MTD3l;MUD2k;OTD1m;NTD1l;OUD0l;OTD1l;OUD0l;OUD0k;0UD0k;0VDOk;1UDNk;2UDNk;2VDMk;2VDMj;3VDMj;3WDLi;4WDLi;4XDKi;5VDKj;5WDJi;6WDJi;6WDJi;6WDJi;6XDIh;8WDHi;8WDHj;7VDIj;8UDIj;7VDIj;7VDIj;8UDHk;8UDHk;8UDHk;9TDHk;8UDHk;9TDGl;:SDFm;:SDFm;:SDFm;;RDEn;;RDEn;<QDEo;:QDFo;:QDFo;;PDEP<;PDEP<;PDEP<;PDEP<<oCDQ<<oCDQ<<oCDQ<<oCDR<<mCDS<<lCET<;lCET<<kCDU<<kCDU<<kCDV<<iCDW<<iCDW<<iCDW<=hCCY<<fCEZ<<eCD[<<eCD[<<eCD\\\\<<cCD]<<cCD]<=aCD`<;`CDa<=^CCb<=^CCc<=[CDe<<[CDf<;ZCEf<;ZCEf<<YCDh<;WCFi<:WCFi<:WCFj<9VCGj<9UCHk<9TCGl<9TCGm<8SCHm<8SCHm<8SCHm<8RCIo<6QCJo<7PCIP=7PCIP=7PCIP=7PCIQ=6nBKR=5nBKR=5nBKR=6mBJS=6mBJS=6mBJT=5lBKT=5lBKT=5lBKT=5mBJS=6mBJT=5lBKT=6kBJU=6kBJU=6kBJV=5jBKV=5kBJU=6kBJV=5jBKV=5jBKV=5jBKW=5iBJW=6iBJX=5hBKX=5iBJX=5hBKX=5iBJX=5hBKX=5hBKX=5iBJX=5hBKX=5jBIV=7kBHV=7kBHU=8lBGT=9nBER=;oBEP=;QCDP=;QCDo<<RCCn<=RCCn<=SCBm<>SCBm<>TCAm<>SCBm<>SCBm<>TCAl<?TCAl<?TCAl<?UC@k<`0UC@l<?TCAl<?UC@k<`0UC@k<`0UC@k<`0UC@k<`0VC_Oj<a0VC_Oj<a0VC_Oj<a0WC^Oi<b0WC^Oi<b0XC]Oh<c0XC]Oh<c0XC]Oh<d0XC[Oh<e0XC[Oi<d0XC[Oh<e0YCZOg<f0YCZOg<f0ZCYOf<g0[CXOe<h0\\\\CWOd<i0]CVOc<j0^CUOb<k0_CTOa<l0`CSO`<m0aCRO_<n0aCRO_<n0bCQO^<o0cCPO]<P1cCPO]<P1dCoN\\\\<Q1dCnN]<S1cClN]<T1dCkN\\\\<U1eCjN[<W1eCgN\\\\<Y1eCfN[<[1eCdN[<\\\\1gCaNZ<`1fC_NZ<a1hC\\\\NY<e1gCZNY<f1iCWNX<j1iCSNX<m1mCiMX<X2mC]MX<c2W32O00001O00001O00000000000O101O0000000O1000000O1000000O10001N10000O100O1000001O000000001O0O10001O0000001O00001O0O101O001O01O00100O1O1O100O001O1O100O1O1O00100O1O001O000010O01O000010O00010O00010O010O0010O01O10O01O010O010O00010O01O001O001O010O1O00001O0010O01O00001O01O01O00001O00001O001O001N2O1O001O1O0O2O1O001O0O2O001O001N101O00001O001O1O1N2O2N1O2N1O2N2N1O1O1O2N1O0O2O1O1O001O1@Q\\\\OBoc0=V\\\\O_Okc0?[\\\\O[Ogc0c0c0M2M5L3L5KTRQ1\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [110.39572143554688, 576.2659301757812, 768.884765625, 63.27618408203125], \"score\": 0.9995645880699158, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ndb31Ye0100O1O1O2O0000000O100000001O000O10000000000O1000001O0000000O100000000000000000000000000O10000000O10000000000000O100000000000000000000000O100000000000000000000000000000000O100000000000000001O000000000000000000000000001O00000000000000000O1000000000000000000000000O2O000000000000000000000000000000000O10000000000O1000000O10000O10000000000000O01000000000000000000000000000000000000000001O0000000O10000001O00000000001O000000001O0000000000000000001O000000000000000000O100000000000000O1000000O100000000O100000000000000O100000000O1000000O1000000O1000000O1000000000000O10000000000000000000O100000O100000000000O10000000000000000000000000000000000O10000000001O000000000O1000001O00000000001O000000000000001O0000000000000000001O000O1000000000001O0000000001O000001O0000000000001O000000000000000000000000001O00000001O0001O0000001O000001O01O0000000000001O00000001O000001O0000010O00000001O0001O0001O0000000001O00000000001O01O0001O001O1O1O2N001O01O01O001O001O001O00001O001O00ZfV1OfYiN1O2M2M4K5N1HA`[Od0Zd0Bd[O?[d0900O1O101N100000000O10000000000000000000000000000O1000000000000000000O100000O2O000000001O00001N101O001O003M1O000WOb[Oe0_d0ZOa[Of0bd00001O0000000O103M00001O00000O2N101N3N00001O0O106J1Oe^c4\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [0.0, 267.4341735839844, 371.1473693847656, 93.2633056640625], \"score\": 0.9963753819465637, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"gn21Ye04S[OOYd03e[OOUd0Lc[O77MVd08i[OHWd09h[OFXd0;h[OEXd0<g[ODYd0<g[ODYd0<g[ODYd0=f[OBZd0?f[OAZd0`0e[O@[d0`0e[O@[d0`0e[O@[d0`0e[O@[d0`0e[O_O\\\\d0a0e[O^O[d0b08O000000000O10001O000000000O1000001O0000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000001O0000000000000001O00000001O01O00000001O01O0001O00000000010O00001O01O01O010O000010O0001O0010O000010O0001O0001O0001O01O0001O0000001O01O00000001O00000000001O000001O0001O0001O00000001O0001O000000001O00001O001O1O00001O000O2O000O101O0O101N^e40cZK1jZOOPe02mZO0Re06O001O001Q[OGjd0=01N101O0O2O0O10000O1000001O000O10000000000O1000000O10000O100O2O0O100O100O10000O100000000O10001O0000000O1000000001O0000000000000000001O01O0001O001O00001O00001O00000O2O001O001O1N101O0000001N10000O2O2M2N5ES[O0Ue0N2Onlc>\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [400.7513122558594, 232.8164825439453, 608.8956298828125, 123.11973571777344], \"score\": 0.9962999224662781, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Rl_?1Ye0101N101N2O0O2O1O00001O0O10001O00000000000010O00000001O00000000001O0000000000001O01O00000001O01O000001O00000000001O0001O00000000001O00000010O0001O00001O00000000001O0001O0001O000010O0001O01O000000010O000000000001O0000010O001O00001O0000001O00000000001O0000001O001O000O2O00001N10]fR10bYmN1I70000O1000000O10001N1000000O1KC\\\\[O=dd050000O100000000O1000000O2O00000O100O1000000O100000000000000O1000O10000000000000O1000000000O1000001N100O2N101N1M;Doe9\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [188.09559631347656, 338.4259338378906, 663.8469848632812, 154.83624267578125], \"score\": 0.9932865500450134, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"el_83Xe02N1N2O00001OO100000001N100N3MgPV23UoiM4O1O1O001O000000001O00000000000000001O00000001O000001O0001O00001O0000001O00001O00001O000010O01O00000000001O000000000001O01O0001O00000010O0001O0001O010O01O1O001O00001O01O0001O0000001O000000001O000000000010O000000000010O0000000000010O0000000001O01O00010O00001O010O001O0010O01O000010O000001O0001O001O0O2O001O1O0O2O1O1O1N101N101NZQb11fn]N001N101N101N101N1O1O2N1O1N200O2O000O1000001O000O1000000000000O101O00000O10000O100O10000O100O1000000000000000000000000000000001O000O100000000000001N101N2O0O2N2M3N2Fecc3\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [68.48746490478516, 167.83831787109375, 842.2171630859375, 468.9998779296875], \"score\": 0.9914278388023376, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"h^l23We02N2N2N2ROGg\\\\O;Xc0Eh\\\\O<Wc0Eg\\\\O=Xc0Ch\\\\O=Wc0Eg\\\\O=Xc0Ch\\\\O>Wc0Cg\\\\O?Xc0Ah\\\\O`0Wc0Ag\\\\Oa0Wc0Ah\\\\O`0Wc0@h\\\\Ob0Wc0_Oh\\\\Ob0Wc0_Og\\\\Oc0Xc0^Og\\\\Ob0Xc0@f\\\\Ob0Yc0_Of\\\\Oa0Zc0@d\\\\Ob0[c0_Oc\\\\Ob0]c0_Ob\\\\Ob0]c0@`\\\\Oa0_c0g0O00001O000O2O00001O000100O010O100O2O8H4L1O00O1O2O1N2N2O1N2N101N2N00100O001O0010O01O01O010O0010O010O010O0100O010O10O0100000O01000O10O10O1O010O010O0010O010O01O10O01O100O100O100O101N1O2O0O100O100O100O10O01O1O1O1O2N2N2N2N1O2N2N1O2N001O1O1O001O1O001O001O001O00001O001O0O101O0000001O00001O0000001O00000O2O0000001O0000001O000000001O01O00010O00ZN]LdAc3\\\\>]LeAc3Z>^LeAb3Z>_LfAa3Z>`LfA_3Y>bLgA^3Y>bLgA_3W>cLiA\\\\3a:bL[G2TN\\\\3V:oLcGGVNZ3Q:WMgG_OYNY3j9`MjGYO[NX3e9fMmGSO^NW3b9kMnGoN`NU3`9PNmGlNcNT3^9SNnGjNcNS3]9WNmGgNgNQ3Z9[NmGeNiNQ3W9]NnGdNjNo2W9_NnGbNlNn2T9cNnGaNmNl2T9dNnGaNnNk2S9fNnG_NPOj2T9eNmG_NPOl2T9eNlG]NROn2R9eNmGZNSOP3Q9eNmGYNTOQ3P9fNmGVNTOT3P9fNlGTNVOU3n8hNmGoMXOX3l8iNmGlMYO[3j8iNPJW1Q6iNoIV1R6jNmIV1S6kNmIU1S6lNkIT1U6mNjIS1W6nNhIQ1Y6oNfIQ1Z6POeIQ1[6oNeIP1[6QOdIo0]6QObIP1]6QOcIn0^6ROaIn0_6SO`Im0a6SO^In0a6SO_Il0a6UO^Ik0c6TO]Im0b6TO]Il0c6UO]Ik0c6TO]Il0c6UO\\\\Ik0e6UOZIk0g6TOYIm0g6SOXIm0i6ROWIn0i6SOWIm0i6ROWIn0j6ROUIn0l6QOTIo0n6POQIQ1o6nNQIR1P7nNoHR1S7lNmHU1S7kNmHT1T7kNlHU1U7jNkHV1V7jNiHV1X7iNhHX1X7hNgHX1Z7gNfHY1[7fNfHY1[7gNdHY1\\\\7gNdHZ1\\\\7eNdH[1\\\\7fNcHZ1^7eNbH[1^7eNbH[1_7dNaH]1^7dNbH[1_7dNaH\\\\1_7dNaH\\\\1_7eN`H[1`7eN`H[1`7fN_HZ1b7fN]HZ1c7fN]HZ1c7gN]HX1c7iN\\\\HW1e7iNZHW1f7kNXHU1h7mNVHS1j7oNTHQ1m7POQHP1o7ROPHm0P8UOnGk0S8VOkGj0U8XOiGh0W8ZOgGf0Y8[OfGe0[8[OeGd0[8^OcGb0^8^OaGb0_8_O`Ga0`8@_G`0b8@^G?b8B]G>c8C\\\\G=d8C\\\\G=e8C[G<e8EZG;f8FYG:h8FWG:i8GVG9j8HUG8l8HSG8m8IRG7n8JQG6o8KPG5Q9KnF5R9MlF3T9NkF2U9OjF0X91fFOZ93dFM\\\\94cFL^95`FK`96_FJa97^FIb98]FHc98]FHd98[FHe99ZFGf9:YFFg9:YFFh9:WFFi9;VFEj9;VFEj9<UFDk9<UFDl9<SFDm9<SFDl9>SFBm9>SFBm9?RFBm9>SFBm9?SF@l9a0TF_Ol9b0SF^Om9b0SF^Om9c0RF]On9c0RF]Om9e0RF[On9f0QFZOo9f0QFZOo9g0PFYOP:g0PFYOo9i0PFWOP:i0QFVOo9k0PFUOP:k0PFUOP:l0oETOP:m0PFSOP:n0oEROQ:n0oEROQ:n0oEROQ:o0nEQOQ:P1oEQOf6hLfJW4dNQOf6hLfJX4cNPOf6jLfJV4dNPOf6jLfJV4eNoNe6kLfJV4eNoNe6lLeJU4fNoNe6lLeJU4fNoNe6lLeJU4fNoNe6lLeJU4gNnNd6nLeJS4gNoNc6oLfJR4gNoNc6PMeJQ4hNoNc6PMeJQ4iNnNb6QMeJQ4iNnNb6RMdJP4jNoNa6QMfJo3iNPOa6RMeJn3jNPOa6RMeJn3jNPOa6SMdJm3lNoN`6UMcJl3mNoN_6WMcJj3nNoN_6WMdJi3mNPO_6XMcJi3mNoN`6YMbJh3oNnN_6\\\\M`Jf3QOnN_6]M`Jd3QOoN_6^M_Jc3ROoN_6`M]Ja3TOoN_6bM[J_3VOoN_6dMZJ\\\\3XOoN^6gMXJZ3ZOPO]6hMWJX3\\\\OPO]6jMUJV3^OPO]6lMPHVO_1m34QO]6oMeG\\\\Oj1c35QO\\\\6dN^I[26QO]6dN]IY27SO\\\\6dN^IX26TO]6dN\\\\IX28SO\\\\6fN\\\\IV28TO]6fN[IU28UO]6gN\\\\IR28UOe6bNTIW27WOk6]NPIY25ZOn6[NoHX24\\\\On6\\\\NPIU22_On6]NSIP2OCo6\\\\NSIP2OCn6^NSIn1ODn6_NSIl1OEn6`NSIj10En6aNRIi10Fn6bNQIh11Fn6cNQIf12Fm6eNQId12Gm6fNPIc13Gm6fNQIa14Hk6hNQI_14Il6gNPI`14Il6gNQI_14Ik6iNPI^15Ik6iNQI]14Jk6iNQI]15Ij6kNQI[15Jj6kNQI[16Ii6mNQIY16Ji6mNQIY16Kh6lNSIX16Kg6nNRIW17Kg6nNSIV16Lh6nNQIV18Lf6nNSIU17Mf6oNSIS17Nf6oNSIS18Me6QOSIQ18Ne6QOSIQ18Od6QOTIo09Oc6SOSIn0:Oc6SOTIm090c6TOSIl0:1b6SOUIk0:1a6UOTIj0;1a6VOSIi0<2`6UOUIh0;3a6UOSIh0=2`6VOSIh0=2`6WOSIf0=4_6VOTIf0=4_6WOSIe0>4_6WOSIf0>3^6XOTId0>4^6XOTId0>4^6XOTId0>5]6WOUId0>5]6XOTIc0?5]6XOTIc0?5^6WOSId0?6]6VOTIe0>5^6VOTIe0>5^6VOTIe0>6]6VOTId0?6]6VOTIe0>5^6VOTIe0>6^6TOTIf0=7_6SOTIf0=7_6SOTIg0<7_6ROUIg0<7_6SOTIf0=7_6SOTIg0<7_6ROUIg0<7`6QOTIh0<7`6QOTIi0;7`6POUIi0;7`6POUIi0;7`6QOTIh0<8_6POUIi0;7`6POUIi0;7`6POUIi0;8`6nNUIk0:7a6nNUIk0:7a6nNUIl097a6mNUIm097b6mNTIl0:7b6mNTIm097b6lNUIm0:6a6mNUIn096a6lNVIn096b6lNTIn0:6b6lNTIo096b6kNUIo096b6kNUIo097a6jNVIP195a6lNUIo0:6`6kNVIo0:6`6kNVIP196`6jNWIP196`6kNVIo0:7_6jNWIP1:6^6jNXIP1:6^6jNXIP1:7]6jNXIo0;8\\\\6iNYIP1:7^6hNXIQ1:8]6hNXIP1<7\\\\6iNXIP1<7\\\\6iNXIQ1;7\\\\6iNXIP1<7\\\\6iNXIP1<7\\\\6iNXIP1<8[6hNYIP1=7Z6jNXIP1=6[6jNXIP1=7Z6jNXIo0>7Z6jNXIo0>7[6iNWIP1?6Z6kNVIP1?5\\\\6jNUIQ1`05Z6kNUIP1a05[6jNTIQ1b04Z6lNSIQ1b03\\\\6kNRIR1c02[6lNRIR1d01[6mNPIR1e01\\\\6lNoHT1e0O\\\\6mNoHT1f0N\\\\6nNmHT1h0NZ6nNnHU1h0L[6nNmHV1i0KZ6POlHV1i0J\\\\6oNjHX1k0H[6POjHX1l0G[6POiHZ1k0F\\\\6QOhHY1m0E[6ROhHZ1m0C\\\\6ROgH\\\\1l0C\\\\6QOhH\\\\1m0B\\\\6ROfH]1m0A]6ROfH]1n0@\\\\6SOfH^1m0_O]6SOfH^1n0^O]6SOeH`1m0]O^6TOdH_1n0]O^6TOdH_1o0\\\\O]6UOdH`1n0[O^6UOdH`1o0ZO^6UOcHa1o0ZO^6UOcHa1P1YO]6VOcHb1o0WO_6WObHb1P1VO^6YObH`1Q1VO]6ZObH`1R1UO\\\\6[ObHa1S1QO\\\\6^OaHa1U1oNZ6@aHa1W1mNX6BaHb1X1jNX6C`Hc1Y1hNX6E`Hb1Z1gNV6G`Hb1\\\\1eNT6I`Hc1]1aNT6M^Hb1`1_NR6O^Hb1a1^NQ60^Hb1b1\\\\NQ62^Hb1a1[NR62]Hc1b1YNR64\\\\Hc1b1YNR64\\\\Hc1c1WNR66[Hd1c1UNR67\\\\Hc1c1TNR6:ZHb1d1SNS6;YHb1e1PNU6=VHd1e1mMV6?VHc1d1mMW6`0UHc1e1kMW6b0THc1e1jMX6c0THc1d1gM[6e0QHd1e1eM[6g0QHd1c1cM_6h0oGd1c1bM_6j0nGe1c1^Ma6m0mGd1c1\\\\Mc6o0kGe1b1YMe6R1jGd1c1WMe6T1iGe1c1SMf6X1gGe1e1oLg6[1eGf1f1iLi6`1bGg1g1cLj6f1_Gg1Y<XNhCh1W<XNiCi1W<VNjCi1V<WNjCj1V<UNkCj1U<VNkCk1U<TNlCk1T<UNlCk1U<TNkCm1T<SNmCl1S<TNmCm1S<RNmCn1S<RNnCm1R<SNnCn1R<QNnCo1R<QNnCo1R<QNoCn1Q<SNnCm1R<SNnCm1R<SNoCl1Q<TNoCl1Q<TNPDk1P<UNPDk1P<UNQDk1n;UNSDj1m;VNSDj1m;VNTDi1l;WNUDh1k;XNVDg1j;YNWDf1i;ZNYDd1g;\\\\N[Db1e;^N\\\\Da1d;_N^D_1b;aN_D^1a;bNaD\\\\1_;dNbD[1^;eNcDZ1];fNdDY1\\\\;gNfDW1Z;iNgDV1Y;kNgDT1Y;lNhDS1X;mNiDR1W;nNiDR1W;nNjDQ1V;oNkDP1U;POlDo0T;QOlDn0U;ROlDm0T;SOmDl0S;TOnDj0S;WOmDh0S;XOnDf0S;ZOoDd0Q;\\\\OPEb0Q;^OPEa0P;_ORE>o:BUE:k:GYE3h:M]EMd:3bEF_::gE_OZ:a0kEWOX:j0oEhNW:X1SFWNT:i1g400001O00000000001O00000000001O0000000000001O000000000000001O00000001O0000000001O000000000000010O000000000010O00000001O0001O01O0000010O00001O01O01O00001O010O001O001O01O01O00010O01O010O0010O01O0010O01O2N1N3M3N2M3YO_\\\\O_Occ0?c\\\\O[O`c0a0h\\\\OXO[c0e0h0L4K5L5JQ^\\\\2\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [661.0241088867188, 492.22418212890625, 30.83856201171875, 8.81951904296875], \"score\": 0.9466493129730225, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"`fn=3Xe00O1O100O1O1O0100000000O10000O10000O1O1O_Yn6\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [187.6157684326172, 258.40252685546875, 670.5450439453125, 211.673095703125], \"score\": 0.9218573570251465, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[jo63We03M4L2N1O2O0O101O00010O00001O001O010O009G1O1O1O1m[OROgc0o0V\\\\OTOic0m0T\\\\OVOkc0j0T\\\\OWOlc0U1O010O001OO10000O1DT\\\\OYOlc0f0W\\\\OWOjc0i0W\\\\OVOic0i0Y\\\\OUOhc0k0Y\\\\OSOhc0l0<O100O100O100O10`MWOZ^O3e1f0Q`0Gn_O8R`0Jm_O6S`0Kl_O5S`0Ma^O\\\\Oh0g0g`0N^^O]Ok0d0h`0OZ^OAm0`0i`00V^ODP1<i`02R^OFU18i`0e0V_OZOk`0g0U_OXOk`0i0T_OWOl`0j0S_OUOn`0l0Q_OTOo`0n0o^OROQa0o0n^OPOSa0Q1l^OoNTa0S1j^OlNWa0U1i^OjNWa0W1h^OiNXa0W1h^OhNYa0X1g^OhNYa0X1g^OgNZa0Z1e^OeN\\\\a0[1e^OdN[a0\\\\1e^OcN\\\\a0]1d^OcN\\\\a0^1c^OaN^a0_1b^OaN^a0_1c^O_N^a0a1a^O`N_a0`1a^O`N_a0a1`^O_N`a0a1`^O_N`a0a1`^O_N`a0a1_^O`Naa0a1^^O_Nba0a1^^O_Nba0b1]^O]Nda0c1[^O^Nea0b1[^O^Nea0c1Z^O]Nfa0c1Y^O^Nga0d1W^O\\\\Nia0h1S^OXNma0k1o]OVNQb0l1m]OTNSb0m1l]OSNTb0n1j]ORNWb0o1h]OQNXb0P2f]OQNZb0o1f]OQNZb0`20001O01O01O010O00010O0010O01O010O001O01O01O0001O01O000010O0001O0010O01O1O0010O01O000010O0001O01O000001O000000001O000000001O00001O1O1O1N2O1O1O1O0001N1N2O2M2O1O2O0O101N10001N10000O2O000O2O00000O10000001N1000000O10000O10000O101N10000O100O1O1O1001O1O010O001O001O10O01O1O1O10O0001O0001O0001O01O0001O0010O01O0010O01O00001O01O000001O01O000000010O0000001O00001O00001O0001O01O0000001O00000000001O00000000001O00001O00001O00001O00001O0000001O00000000001O000000001O00001O00001O001O00001O000O110O0000001O0000001O00001O010O00001O001O001O000010O001O00001O0010O0001O0001O01O0001O000010O01O00000010O0000001O000010O01O1O1O1O001O1O0010O01O0000001O0000000001O00000001O00000000010O00000oLS^Od2ma0[MU^Od2ka0[MW^Od2ja0ZMX^Oe2ia0YMY^Of2ha0XMZ^Og2ga0WM[^Oh2Ub0N2O1O001N2O2N1lMd]OV1^c0K6J7H2O1O1N2N2N1O2N2@][O6nd0M2N2NPod4\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [38.57219696044922, 291.64556884765625, 510.69110107421875, 297.17034912109375], \"score\": 0.8228972554206848, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\iY2:od04L3M2N3L3O2M2\\\\_OVOf<l0WCWOf<k0WCYOg<i0VCYOi<h0WCYOf<i0ZCWOe<k0ZCUOd<m0\\\\CSOb<o0^CQO_<S1`CnNZ<W1fCiNg;k1XDUNd;o1\\\\DQN`;T2_DlM];Y2bDhMV;_2jDaMS;c2lD]MS;d2mD\\\\MR;f2mDZMR;g2nDYMQ;h2oDXMP;j2oDWMo:j2RETMo:l2QETMo:l2RERMo:o2QEoLP;Q3PEoLP;Q3QEmLP;S3QElLo:U3QEiLo:X3REfLo:Z3REeLn:[3a31O0000001N10010O01O101N10001N2O1N2O1N101N100O0010O01O010O1O10O0101N101O0O10001OO0100O1O10O010O010O010O100O010O1O100O010O0010O0010O01O3N1N3N1N2O1N100O100O10000O010O010O010O010O01O010O01O01O010O1g^ObMd?]2\\\\@cMd?^2Z@dMe?\\\\2[@dMf?\\\\2Y@eMf?[2Z@eMg?[2W@fMj?Z2U@fMk?Z2T@hMl?X2R@iMn?X2P@iMP`0W2n_OkMS`0U2i_OnMW`0S2g_OoMX`0Q2g_OPNZ`0P2e_OPN[`0Q2c_OPN]`0P2b_OQN_`0o1`_OQN``0P2^_ORNa`0n1__ORNa`0o1]_ORNc`0n1]_ORNd`0n1Z_OSNf`0m1Z_OSNf`0n1X_OSNh`0m1X_OSNh`0n1V_OSNk`0l1T_OUNl`0l1R_OUNn`0l1o^OWNPa0i1n^OYNSa0g1j^O[NVa0f1f^O]NZa0j2001O00001O001O001O1O010O001O1O001O00001O001O0001O000001O000000010O000000001O0001O01O0001O00001O00001O00000010O000001O00001O00001O00001VNWLPBi3g=bLWB^3h=eLVB\\\\3h=gLVBY3V;VLkEb0nNY3U;WLkEc0nNV3V;YLjEc0oNT3V;[LiEd0oNR3V;\\\\LiEe0oNo2W;^LiEe0nNm2Y;_LhEd0oNm2X;aLhEb0POm2X;bLgE`0ROo2U;cLhE>SOo2T;fLgE:VOP3S;hLeE8YOo2Q;lLdE4\\\\OP3P;mLcE3]OP3P;oLaE0@Q3n:RM`EMBR3m:RMaEJCT3l:TM_EHET3l:UM^EGFT3k:WM^EDIT3i:ZM\\\\EBKT3i:[M[EALT3i:\\\\MZE_ONU3h:]MYE^OOU3h:^MYEZO1Y3d:^M[EWO3[3b:_M[ESO6]3_:aM[EoN8`3]:aM\\\\EjN;e3Y:bMaG^2_8bMaG^2_8cMaG\\\\2_8dMaG\\\\2_8eM`G[2`8fM_GZ2a8fM_GZ2a8gM_GX2`8iM`GW2`8jM_GV2a8kM^GU2b8kM^GU2b8lM^GS2b8mM^GS2b8mM^GS2b8mM_GR2a8nM_GR2b8mM^GS2b8nM^GQ2b8oM^GQ2b8oM_Go1b8QN^Go1b8QN_Gn1a8RN_Gn1a8RN`Gm1a8RN_Gn1a8RN_Gn1a8RN`Gl1a8UN^Gk1b8UN^Gk1b8UN_Gj1a8VN_Gj1a8WN^Gi1b8XN]Gh1c8YN]Gf1c8[N\\\\Ge1d8\\\\N[Gd1e8^NYGb1g8`NXG_1h8bNWG^1i8dNUG\\\\1k8fNTGY1l8hNSGX1m8jNQGV1o8lNPGS1P9nNoFR1Q9QOmFn0S9TOkFl0U9VOjFi0V9YOhFg0X9[OfFe0[9ZOeFf0[9[OdFe0\\\\9[OdFe0]9[OaFf0_9ZOaFf0_9ZOaFf0`9ZO_Ff0a9ZO_Ff0b9ZO]Fg0b9YO^Fg0c9YO\\\\Fg0d9YO\\\\Fg0e9XO[Fh0e9YOYFh0g9XOYFh0h9XOWFh0i9XOWFh0j9WOVFi0j9XOVFg0j9ZOUFf0k9ZOUFf0l9ZOSFf0m9[OSFd0m9]ORFc0n9^OQFb0P:]OQFb0o9@oE`0Q:AnE?R:BnE=R:DmE<T:DkE<U:FjE9V:HiE8W:IhE7Y:HgE8Y:IfE7Z:JeE6[:JeE6[:KdE5]:KaE6_:JaE6`:J_E6a:K]E6d:I\\\\E7d:JZE7f:JYE6h:JVE7j:JTE7l:ITE7m:IQE8o:IPE7P;JoD5R;KmD6S;KlD5T;LkD4V;KjD5V;LiD4W;LiD4W;MhD4W;cMWCT1b1Y1W;aMYCW1_1X1X;`M[CW1]1Y1X;_M\\\\CY1[1X1Y;_M\\\\CZ1Z1W1Z;^M]C[1Y1W1Z;^M^C[1W1W1[;]M_C\\\\1U1X1\\\\;\\\\M_C]1T1W1];\\\\M_C^1S1V1^;\\\\M_C^1S1V1^;[MaC_1P1V1_;[MaC_1P1V1_;\\\\M`C^1Q1V1^;^M_C^1R1T1^;`M_C\\\\1R1U1^;bM]CZ1U1T1];eM[CX1X1S1\\\\;7cDJ\\\\;7dDI[;8dDI[;8eDHY;:gDFX;<fDEY;<gDDX;=gDDW;>iDBV;?iDBV;?jDBU;>kDBT;?kDBU;>kDBU;>lDAT;`0kD@U;`0kD@U;`0lD^OU;b0kD^OU;b0kD^OU;c0kD\\\\OU;d0kD\\\\OU;d0lD[OT;e0lD[OT;f0kDZOU;f0kDZOU;f0lDYOT;h0kDXOU;h0kDXOU;i0jDWOV;j0iDVOW;i0kDVOU;i0mDVOT;g0nDXOT;e0oDYOS;TNdCl0]1n0S;nMgCo0Y1P1W;gMhCR1V1T1h<RMgAo0R2e1o<[MTDX2\\\\jl9\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [125.18354034423828, 297.9114990234375, 770.719482421875, 352.4368896484375], \"score\": 0.8228054046630859, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[QZ82Ye0001O001O1O1O1O1O1O2N2N1O000O101O0000001O00000000001O000O100000001O000000000000001O0000000000000001N100000001O000000001O00001O1O001O00001O000000O1O100O1O101N10000O1000000O2O0000000O100000001O000O10000000000001O0O1000000000000000000000000000000000000000000000000000000000000000000000000000000001O0000000000000001O00001N10001O00001O1O001O001O00001O001O00000O2O00000000001O000000001O0000000O2O0000001O000O10001O0O101N100OkfV21TYiM2O1N1000000000O2O0000000O1000000000001N100000ZPd00fo[O00000l``00R__O3N2O0000000O10001O000000000000001O000O1000001O000000001O0001N2N3L3N^co3\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [110.36141967773438, 376.61083984375, 683.689208984375, 240.19525146484375], \"score\": 0.6303569674491882, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\]^21Te0;I;F5L2N1W\\\\OTOQc0m0m\\\\OUOQc0m0n\\\\OUOPc0l0n\\\\OVOQc0k0m\\\\OXOQc0k0j\\\\OYOTc0i0i\\\\OZOUc0i0g\\\\OZOWc0^1O2N1O1O001O0O2O1O001O1O001O1O001O00001O00001O001O001O001O001O1O001O001O001O00010O00001O000010O0001O0010O0100000O10O1000O01000O010O101N2O1N2O0O2O001N3N4L2N0000000000O01000O010O10O00010O010O00010O01O010O001O01O01O0010O01O001O10O0001O0100O010O1O10O010O10O01O100T^ObMi`0_2U_OdMi`0]2U_OfMj`0Z2U_OgMj`0Z2T_OhMk`0Y2S_OiMl`0X2R_OkMl`0U2S_OSNg`0m1W_OTNi`0m1V_OSNj`0o1S_ORNm`0o1P_OSNPa0n1m^OTNSa0m1j^OTNXa0m1d^OTN]a0m20O101O0O1000001O00000000001O000O10000010O0001O000010O01O001O10O01O001O010O000010O00000001O0001O000000000001O00000001O00000001O0000000001O000[N_L`Ab3^>_LbAa3^>_LbAb3\\\\>_LeA`3[>`LeAa3Y>`LgA`3X>aLhA`3W>`LiAa3U>`LkA`3T>aLlA`3S>`LmAa3Q>`LoAa3P>_LPBb3n=`LQBa3n=_LRBa3n=_LSBa3l=_LTBb3k=^LUBb3k=^LUBc3j=]LVBd3i=\\\\LWBd3i=\\\\LWBe3h=[LXBe3i=YLXBg3m=TLSBl3g?O0001O0001O0000000010O00000001O0000010O00001O00001N10001O0000001O0000001O0000001N10000000001O0000001O0O10001O00001N10001O000O10001O0000000O10000000000000000O1000001O00000000000000000O10000000000000000000000001O00000000000^MjLnBW3Q=kLnBU3R=mLlBS3S=oLlBQ3S=RMkBn2U=SMjBm2V=SMjBm2U=UMjBk2V=UMjBk2V=UMjBk2W=TMiBl2W=TMiBl2W=TMiBl2W=TMiBl2W=TMiBm2W=QMjBo2V=QMjBo2V=QMjBo2V=QMjBo2W=PMiBP3W=oLjBQ3V=oLjBQ3V=nLkBR3V=lLkBU3T=jLmBV3S=jLmBV3S=hLoBX3R=fLoBZ3a?001O00000001O0001O00000001O000001O0000000001O00000001O000001O001O00001O001O001O00001O00001O0000001O000001O01O000000001O0000010O00001O000010O000001O01O000000001O00001O00001O01O01O0000001O000\\\\MlLQCT3n<nLQCR3m<PMSCQ3k<PMUCP3k<PMUCP3j<RMUCn2k<RMUCn2h<UMXCl2c<XM]Ch2a<ZM_Cf2`<\\\\M_Cd2`<]M`Cc2_<^MaCc2^<]McCb2\\\\<`McC`2]<`McCa2[<`MeC`2[<`MeC`2[<aMdC`2Z<aMfC_2Z<aMgC_2X<bMgC^2X<cMhC^2W<bMiC_2V<bMiC^2W<bMiC^2W<cMhC^2V<cMkC\\\\2U<dMkC]2T<dMkC\\\\2l;cLQBR1R2\\\\2l;eLoAo0U2\\\\2l;dLPBP1U2[2k;dLQBQ1T2[2T<eMlC\\\\2R<eMoCZ2P<gMPDZ2n;gMSDX2m;hMSDX2m;iMRDX2l;gMiAQOV2W3Q<hMiAROU2V3R<hMiAROV2V3P<gMkASOU2V3P<gMkATOT2U3Q<gMkATOU2T3P<hMkATOU2T3P<hMkATOU2U3o;gMlAUOU2S3o;hMlAUOU2S3o;hMlAUOU2S3o;hMlAUOV2R3n;iMlAVOU2Q3o;hMmAWOT2Q3o;hMmAWOT2R3n;gMnAWOU2Q3m;hMnAWOU2Q3m;iMmAVOV2Q3m;iMlAXOW2n2l;kMmAWOW2n2l;YNTDg1l;YNUDf1k;ZNVDe1j;\\\\NUDd1j;]NWDb1i;^NXDa1h;_NXDa1h;`NXD_1g;bNZD]1f;cNZD]1e;dN\\\\D[1d;fN[DZ1d;gN]DX1c;iN]DV1c;jN]DV1c;jN^DU1b;lN]DT1c;lN^DR1b;oN^DQ1b;PO^Do0b;QO^Do0b;QO^Do0b;RO^Dm0b;SO_Dl0a;UO^Dk0b;UO_Dj0a;WO^Di0b;WO_Dh0a;YO_Df0a;[O_Dd0a;]O_Db0a;_O_D?b;A_D>a;C_D<a;E^D;b;E_D:`;H`D7`;J`D5`;KaD4_;LbD2_;O`D1`;OaD0_;1aDN_;2aDN_;2bDM^;4aDK`;5`DK`;5aDI`;7aDG`;:_DEb;;_DDa;<_DCb;>^D@c;`0^D]Od;c0]D[Od;e0]DYOd;h0\\\\DUOf;k0[DSOf;m0\\\\DoNf;Q1[DlNg;T1ZDjNg;W1YDfNi;Z1YDcNh;]1YDaNh;_1ZD]Nh;c1YD[Nh;f1YDWNh;i1YDUNg;l1ZDPNi;P2XDmMk;R2VDkMl;U2UDgMn;Y2TDcMn;]2SD_MP<a2]31O0000000000001O00000000001O0000001O0000000001O00000000000010O0001O001O1O001O1O1O1O1O1O2M4L3M3M3L3N3L5K9F<EXei4\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [0.0, 171.3577423095703, 443.69775390625, 207.75233459472656], \"score\": 0.4424522817134857, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"c6c1cc0=F9H4L2N2O1N2O001O1O001O1O001O001O00001O000010O00000O10001O000000001O0000001O0000000000001O00000000001O0000000O10O10000O1000000O100000000O10000000000O10000000000O2O0000000O10000O100N2O1N2M3O1N2O1O1O100O101N1O1O2N1O2N2N1O2N101N10001N1000001O001O001O001O1O001O00001O0000001O0001O0000010O0XL\\\\_Oi2d`0SMa_Ol2``0oLe_OP3[`0mLi_OS3Xa0O001O001O1O1O001O001O0010O01O001O00001O001O00001O001O0000001O00001O01O0001O010O001O010O1O0010O01O0010O01O1O001O1O010O001O001O01O01O00001N101O1O1O1N2O2N1O1O0O2O001O001N101N101N2O2M3N1N2N2N2O1N1O2O001N101N101N2O1O1N2O1O0O2O001N101O000O101O0O2O0O2O1O0O2N1O2O0O2N1000000O100000000O10000000000O10O101O0O1O100O1O1O1O1O1N2M300O10000000O10000N2I7O1O1O101N10000O02O000O101N1bM1__O0``02__ON``05R_OH`N4]b06P_OK_N0`b08m^O6Sa0Nh]O\\\\Oc0i0ca00a]O^Oe0g0ja06Q^ONna05n]OMRb04k]ONUb02j]OOWb0Oj]O0Xb0Og]O2Zb0Mf]O3\\\\b0\\\\O^]OF6n0bb0RO^]OO0o0Wc0oNj\\\\OQ1Wc0mNj\\\\OR1Xc0mNh\\\\OS1Yc0kNh\\\\OU1Yc0jNf\\\\OV1[c0iNf\\\\OW1gc000H800O200O2N2N101N2O1N1O001O001O000001O01O0000000010O0000010O001O00010ON3M3N2M3KfnS=\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [611.4265747070312, 592.3743286132812, 220.03912353515625, 54.2921142578125], \"score\": 0.33885282278060913, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]ih<9Qe02N1000001O000000001N10000000001O000001O01O0001O01O000000010O000001O000001O00010O0000010O0000010O0001O0O2M4Jb[a0Mad^O5M2N2N3M2O10000O10001O0M300O1N2O100O2N1O1O10000O101N100O101N1000000O100O100O10000O10000O1000000O1000000O100000000001O0O10000000001O001O00000000010O00000000001O0000001O00001O00001O001O000000001O001O001O000O2O00001O0O2O0000001O1O2M2O1O001N101O2N1O1N2O1O0O2O2M_^U4\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [339.7050476074219, 139.44871520996094, 684.294921875, 202.71556091308594], \"score\": 0.24377559125423431, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"b[T7>id0?C3N1N3N2N1O101N1000010O00000001O00000010O00000001O01O00002N2O0O1O1OM4L3O1O2O0O2N101N2O0O2N2O0O2I6M4N2N1O2N2O3LWma0NlR^O001O0O2O001N101O1N101N100O2N100O100O101N10000O10001N100O101N100N3M2N2O2N101N101O0O2O001O00001N10001O00001O001O1O1O1O1O1O2N1O1O1O001O001O001O00001O00001O001O1O1O1O1g^OQNT?Q2i@PNW?Q2h@oMX?R2f@PNY?P2g@PNY?Q2f@oMZ?R2e@oMZ?Q2f@oMZ?R2d@oM\\\\?Q2d@PN[?Q2d@oM\\\\?Q2d@oM\\\\?Q2d@PN[?Q2U_OgMW18d?Q2T_OhMX17d?Q2R_OkMY15d?Y2\\\\@gMe?Y2Z@gMf?Y2Z@gMf?Z2Y@gMf?Y2Z@gMg?Y2X@gMh?Z2X@fMg?Z2Y@fMh?Z2W@fMj?Z2U@fMl?Z2S@gMn?X2Q@hMP`0W2P@iMR`0V2m_OkMS`0U2l_OkMU`0T2k_OlMV`0T2i_OmMV`0T2i_OlMX`0S2h_OmMY`0S2f_OmMZ`0S2f_OnMZ`0Q2f_OoMZ`0R2e_OnM\\\\`0Q2d_OPN[`0P2f_OnM\\\\`0Q2d_OoM\\\\`0R2c_OmM_`0R2a_OnM_`0R2a_OnM``0R2__OmMb`0S2^_OmMc`0R2^_OlMd`0T2[_OlMf`0S2Z_OlMh`0T2W_OlMj`0S2W_OkMk`0T2U_OjMn`0V2Q_OhMQa0X2P_OeMSa0[2o0O010O01O010O010O00010O00100O0`]OgMoa0Z2n]OiMPb0Y2l]OjMTb0W2h]OlMWb0e20O1O1O0010O01O0000001O000001O01O0000000010O00000001O00010O001O000010O000001O01O00001O000010O0001O00001O01O01O00001O000010O000001O01O01O00010O00010O001O00010O00001O0010O0001O000000O1000001O0O100000000O101O000000000O2O000000001O0000001O0000001O000000001O000000000000001O000000000000001O000000001O00001O00001O0000001O00000000010O000001O00001O00001O00001O000000001O000000000000001O0000000000000O1000001O0000000O100000001O0O1000001N10001N10001N10000O2O000O10001N100O10000O110O0001O01O01O0001O01O01O010O001O01O01O0001O01O0001O00010O1O010O10O01O10O01O10O00010O0001O0000010O0000001O00001O00010O001O1O1O001O1O010O001O001O01O01O001O010O001O1O010O001O10O0001O01O0001O01O000001O000000001O00001O00001O001O1O001O001O001O00001O00001N101O001N2O1O1O1N3N5K3L2O1O1N2O0O2N2O0O2O000O2O0O2O0O2N2O0O2M3N5J8Do\\\\P1\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [334.4683532714844, 659.0872802734375, 35.39794921875, 14.14544677734375], \"score\": 0.23537054657936096, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[hR74Qe0600O10000000000000O100000000000000000000O1000000O2O0O4Kkld=\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [52.922122955322266, 456.42474365234375, 816.8065795898438, 180.70361328125], \"score\": 0.12839113175868988, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Z\\\\]81Ze000001O000O10001O000O101O000000000000000O100000000000000O100000000O1000000O1000000000O010000000000000000000000000000000000000000000000000000000000O2O0000000O100000001N10000000000O1000001O00000O1000000000001O00000001O000000000001O000001O0000000000000000000001O000001O000000000000000000000001O000001O0000000000000000000000001O000001O0001O0000001O001O001O1O001O10O01O001O00010O0000001O00000O2O00000O2O0O101N1OlfX20UYgM1O0000000000000O10000000000O101O0000000O100001O0O101O00001O0O10Ujf5\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [207.57843017578125, 230.80824279785156, 811.5284423828125, 199.32884216308594], \"score\": 0.10859136283397675, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"`a`?1Ze0000O2O0O100O2O0O10000O2O0O10000O101N10000O10001N1000000000010O000001OZ[OC^d0>_[ODad0c01O0001O01O000000001O00000000001O000001O000001O0O10000000001O00000000001O00000000001O0000000000001O1O1N2O1O1O1O00ogk3\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [601.9927978515625, 267.8998107910156, 422.002197265625, 94.41680908203125], \"score\": 0.09623737633228302, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fV_?3Ue05L3O1O00001O001O00001N10000000001O000001O000000000000001O00000001O00000000001O0000000000001O0001O00000000001O0000001O00001O0001O000001O01O0000000000000001O01O0001O00010O0000010O00010O00000010O00000010O00010O000000010O0000001O00001O010O0000001O0001O00000001O0001O0001O0000001O0000001O00001O1O001O1O0O3N1O00cPj02YoUO3N2O3L2O0O100O100O100O1O1O1O1O101N100O100000O10O1000000O10000000000O1000000O10000O2N100O1O1O101O0O1000001O000000001O0000000010O0000000000000000000000001O0000001O2M2O0O2De[O_O30Yd09b[OJRe0MX;\"}}, {\"image_id\": 5, \"category_id\": 1, \"bbox\": [0.0, 582.7400512695312, 24.798568725585938, 39.28240966796875], \"score\": 0.07599805295467377, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[b0767Wd0e0K0O2O00000001O0O2O2N1N2O1N1O2N3N2M3M3M2N2N[Rkd0\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [665.31103515625, 489.6892395019531, 24.64398193359375, 8.993255615234375], \"score\": 0.07086199522018433, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"afn=1Ye01O100O1N2O1O1O100000O1000000O10000O2Nknn6\"}}, {\"image_id\": 5, \"category_id\": 2, \"bbox\": [656.1738891601562, 482.9328308105469, 33.0264892578125, 18.4420166015625], \"score\": 0.06851599365472794, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"afn=2Xe01O1O100O1O1O0O2N200000O1000001N1O101Nlnn6\"}}, {\"image_id\": 6, \"category_id\": 1, \"bbox\": [177.52284240722656, 15.191585540771484, 254.07057189941406, 373.7381896972656], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"lQQ64jo06J5K7I3M4M2M3N1O100O2N100O2O0O101N2N3M3M2M4M2M3N101WYOaNo>_1n@hNn>X1n@POm>Q1n@YOm>g0o@Am>?PAFn>;o@In>8PALn>4QAOm>2PA1o>OPA3o>No@4P?Mn@5Q?Lm@6R?Lk@6T?Kk@6S?Lk@5U?Lj@5U?Li@5W?Kh@7W?Jf@8Z?He@:Z?Fc@=]?Da@>^?Ba@?_?A`@a0_?@_@b0`?^O_@d0`?]O^@e0a?\\\\O\\\\@g0c?ZOZ@h0f?ZOT@k0k?WOn_Oo0Q`0SOi_OQ1W`0POe_OS1[`0nNb_OU1]`0lNa_OU1_`0lN^_OV1b`0lNY_OW1g`0kNS_OY1m`0iNZ\\\\On3fc0V400O10000001O01O0000000000000O1000000000000000000O1000000O100000000O10000000000O10000O10000O1000000000000O100000000000000000000000000001O0000001]G\\\\\\\\Oh4dc0UKe\\\\Oe4[c0ZKj\\\\Ob4Vc0]Kn\\\\O_4Tc0_Kn\\\\O`4Rc0_KQ]O_4Pc0`KR]O^4ob0`KV]O\\\\4jb0cKh]Ol3Yb0RLP_Of2Pa0YM]_O[2d`0cM`_OZ2``0eMd_OX2\\\\`0fMh_OX2X`0hMj_OV2W`0hMl_OPNgKd3]d0[Nb@b1^?^Nf@^1Z?aNk@Z1V?fNm@W1S?hNPAV1P?jNRAT1n>kNTAS1m>mNTAS1k>mNVAR1k>mNVAR1j>nNXAP1h>POYAo0g>QO[Am0e>RO\\\\An0d>RO]Am0c>SO^Al0b>TO^Al0c>RO_Am0a>RO`Am0a>RO`An0`>QOaAn0a>oNbAP1^>oNcAP1^>oNcAQ1^>mNcAR1_>lNcAS1^>lNbAU1^>jNcAV1]>hNdAX1^>fNcAZ1]>dNeA[1[>eNeA[1\\\\>dNeA\\\\1[>cNfA\\\\1Z>cNiA[1X>cNjA[1X>cNjA\\\\1V>bNnA[1T>aNRB[1P>aNUB]1l=_N[B\\\\1g=aN_BY1e=dN_B1oGd0he0UO]BMWHh0de0ROSCf0Xf0K4L4K5Jcdb;\"}}, {\"image_id\": 6, \"category_id\": 2, \"bbox\": [320.2629089355469, 383.12542724609375, 327.4301452636719, 553.1818237304688], \"score\": 0.9999860525131226, \"association_id\": 1, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"g\\\\R:c0Wo0k0RQOUOXm0P2I6K4N2N2N2M4M2N3M2N2M4M2N2N2M3N3M3M3L5L5K3M2M3N1O1O1O1O2N102M2N3M3M2N2N2N1O1O2N3L4M3M4L3M3M2M4M2N101N1O2N2Q[OoIk?R6k_OdJi?]5m_OSKk?o4m_O]Km?e4l_ObKS`0`4f_OfKX`0\\\\4d_OgK[`0Z4c_OhK]`0X4`_OkKa`0T4]_OmKd`0S4Y_OoKm`0l3P_OVLVa0d3h^O_L]a0\\\\3`^OfLda0W3Y^OkLka0Q3R^ORMPb0m2l]OWMVb0f2i]O[MYb0d2d]O^M^b0`2a]OaMbb0^2Z]OeMhb0Y2V]OiMkb0V2R]OlMPc0T2k\\\\OPNVc0Q2d\\\\OSN]c0Q2X\\\\OUNhc0P2j[OZNVd0j1^[O]Nbd0b6O1O0001O1O1O1N10001N10O010000000000100010O000O10O000001O1O101N2O1O0O10O01O00001N2O2N2N2O1N2N1O100O1O001O1O2N1N3N2N2M2O1N1O2O0O2N2N2N3M3M2N1O1O2O0O001O1O2N1O2N1O3M2N1O2N1O010O001O1O010O001O0010O100O100O2O1O1O1O00000000O00100O0010O01O010O001O1O2N1O2N2N2N2N2M3N1O1O1O100XKiXOe0Xg0XOkXOg0Vg0VOnXOh0Tg0TOoXOk0Sg0ROoXOn0Rg0nNQYOQ1Qg0lNQYOS1Qg0iNRYOW1nf0gNTYOX1mf0fNUYOY1lf0fNUYOY1lf0fNUYOX1lf0iNSYOW1nf0hNSYOW1nf0hNSYOW1mf0iNTYOU1nf0jNTYOT1lf0lNUYOS1kf0mNVYOR1kf0mNVYOQ1kf0POVYOn0kf0POWYOo0if0QOXYOn0if0QOYYOm0gf0SO[YOk0ff0TO\\\\YOj0ef0UO]YOi0df0VO^YOh0cf0WO`YOf0af0XObYOf0`f0XObYOf0_f0YObYOf0`f0XOaYOf0af0YO`YOf0`f0ZOaYOe0`f0ZObYOc0_f0]ObYOb0_f0\\\\OcYOb0^f0^OdYO`0]f0_OeYO>\\\\f0BeYO<]f0CeYO:\\\\f0FhYO5Yf0KkYO0Wf0OlYOMUf03nYOITf07oYODRf0<RZO\\\\ORf0d0ZZOiNne0W1]4010O01O010O011N10001O1O1O0001O1O001N1O1O1O2O0O1O1O2N1O101N1O2O8H9Fi0XO4L7If0[O0O0100O0O1O001O001N101O001O001O1N2M4L4@a0VOoYh3\"}}, {\"image_id\": 6, \"category_id\": 2, \"bbox\": [273.8427429199219, 342.3031921386719, 292.8612976074219, 457.7463684082031], \"score\": 0.9763782024383545, \"association_id\": 0, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"_\\\\f85jo0>B2O1N2O1O1N2O1N2O001O1N2O2N2M3N2N1O1O1N101O1O1O2N2N3M2N2N2N1O1O1O1O2N2N4L6J5K3M2N2N1O2N1N4M3M4L2N2N1N2O1O1N2N2O2M4L4L4L3M2N2N3N1O2M3N3M3L4M2N2M3N2N2N1N4M2N3M3M2M3N2N1O1O1O1O2N2N2N3L3N2N1O1O2N1O2N3M4R[O\\\\JX?h5[@fJ_?^5Z@jJb?Y5X@mJe?U5W@PKf?R5V@QKi?P5T@SKk?n4R@UKm?l4P@VKQ`0j4k_OZKT`0g4h_O]KW`0d4e_O_K[`0b4a_OaK_`0`4^_OcKa`0]4]_OeKc`0\\\\4[_OfKe`0Z4Y_OgKh`0Y4V_OhKk`0X4S_OiKo`0W4n^OjKTa0V4h^OlKZa0aL]^OR65^MRc0a2k\\\\OaMWc0_2e\\\\OdM[c0\\\\2b\\\\OfM_c0Z2^\\\\OhMcc0X2Z\\\\OjMgc0W2V\\\\OjMkc0V2R\\\\OlMoc0U2m[OmMTd0U2f[OnM\\\\d0j62N3M1O2N1O1O1O1O1O1O1O2N2N2N1O1O2N1O2N2N3M3M3M3M2N1O2N1O2N3M3M3M3M2N3M2N1O2N1O2N3M2N3M3M2N1O1O1O1O2N2N2N4L4L3M3M2N1O2N2N2N2N2N2N1O2N2N2N1O2N2N2N2N2N3M3M1O2N1O001O001O001O001O001O001N101O1O1O1O1O2N1N2O1O0O2O1N2O0iNcUOjL_j0m2mUOnLTj0l2TVOQMni0l2VVOQMli0l2WVOQMki0n2WVOQMji0n2VVOSMii0l2YVOSMhi0l2YVOTMgi0j2ZVOVMfi0j2[VOUMfi0j2[VOVMei0h2]VOWMdi0h2\\\\VOXMei0g2`VOTMai0j2dVORM]i0m2hVOmLZi0R3k1O1O1O1O1O1O1O1N200O2N2O1N1O2O0O1O100O1N101N2O1O0O2O1N2N2O1L6K4I9Gc0POR[X6\"}}, {\"image_id\": 7, \"category_id\": 1, \"bbox\": [349.43450927734375, 222.0576629638672, 109.02398681640625, 263.19732666015625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"_lV6l0d?g1E6J6H:I6K4L4I>ZBnLh;k4dD\\\\JP:j5jE[Jm9P6kEUJn9S7K4N2M3M3N200O1000000000O0nNdFbI\\\\9X6PG`IR9V6a1C=K5ROYD_Kk;]4n0M3N2O10VEXK\\\\8h4_GhKV8X4gGlKX8U4`GSL_8o3aFPM^9R3[FTMd9n2WFUMi9o2iE\\\\MV:W501O01N1O101N2QOjERJX:LeER5a0jJo9KlER5;nJf:n4`ElJe:m4\\\\1J5K4M2M4K5L4L7G:VOh0K5J5oNR1B>K3O2M3N2M3N3M2N2N3N1N1O100O2020M0O00001O010O1O2O0O0010O0O2M3Li]Q:\"}}, {\"image_id\": 7, \"category_id\": 1, \"bbox\": [627.019287109375, 299.1822814941406, 60.8028564453125, 117.82489013671875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"PgR;1ma06K2O0O10000O01O1O1O10O010N2N100QOIU@7i?1P@1o?4g_O1Y`08^_OJa`0k0O01000Q@kNS?V1g@VOR?j0l@]Oo>o1K4L3M4L3M5K2N2N10jLhAc2X>ZMlAe2i>UOTA]Nn>^1\\\\AZNf>f1j001O001O1001O4KUOS@Dn?4b@D^?6h@IY?0n@NU?Mn@3V?CRA8``0JkYn5\"}}, {\"image_id\": 7, \"category_id\": 2, \"bbox\": [374.8970031738281, 467.12274169921875, 170.10458374023438, 24.947906494140625], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"WSe61oa00N3N100O1000000O100000000000O010000000O10GLf^O3Za0Ne^O3[a0Nd^O2\\\\a0Nd^O2\\\\a0Nd^O1]a0Oc^O1]a0Ob^O2^a0Nb^O2^a0Nb^O2^a0Nb^O1_a0Oa^O1_a0Oa^O1_a0Oa^O1_a0Oa^O1_a0Oa^O1_a0Oa^O1_a0Oa^O1`a0N`^O1aa0O_^O1aa0O_^O1aa0O_^O1aa0N`^O2`a0N`^O3_a0Ma^O3^a0Nb^O2^a0Nb^O2^a0Nb^O2^a0Nb^O2^a0Nb^O2^a0Nb^O2^a0Nb^O3]a0Mc^O3]a0Mc^O3]a0Mc^O3]a0Mc^O3]a0Mb^O5\\\\a0Ld^O4\\\\a0Ld^O4\\\\a0Lc^O5]a0La^O5_a060000O2O0000000000000000000000000000O10001O0000000000000000000000000000000000000000000000O2O000001Ac^O9]a0Gc^O9]a0Gd^O8\\\\a0Ge^O9[a0Ge^O9[a0Ge^O9aa001O0000000001O0000000000000000000000O1000000000000000000O10000000000O1000000O100000000O2O0O10001NRa]8\"}}, {\"image_id\": 7, \"category_id\": 2, \"bbox\": [664.8726806640625, 411.0146789550781, 70.68890380859375, 6.709869384765625], \"score\": 0.9999951124191284, \"association_id\": 1, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"mXi;2na00O10000000000000000O1000001O00000000000000000000000000000000000000000000000000000000000000O100000000000001O0000000000001O0OTYS5\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [388.5434875488281, 637.3273315429688, 289.6369934082031, 130.67266845703125], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [768, 813], \"counts\": \"l]T9173Zg0f0C8H5L3^OhNUZO\\\\1ge0a0L7I7K3L4M1O2N101N101N1O2N2N2M3N2O0O2O0O2O0O101N1O2N101O0O2O00001N1000001O0O1000001N10001O00000O2O0000000000000000000000000000000000000000000000000000000001O0O100000000001O0000000000000000000O10000000000000000000000000001O01O00000000000000000000000000001O0000001O000000001O000000000000000000000000001O0000000000000000000000001O00000000001O0000001O0000000000000000000000000000000000000000000000O10000O1000000O1000000000000000O10000000000000000O1000000O100000000O100000000000001N10001O001O001O000O101O0O2O1N2O1N101O001N102N1N3N3L3N1N101N101N101N4M5\\\\Oa0I7L2N3N2N1N5K5K5H^bT3\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [182.12872314453125, 86.1440200805664, 306.0693054199219, 140.09902954101562], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [768, 813], \"counts\": \"kk`4i1me0<K5L3N2N2N1O2N2M3N2N1O2O1N2O0O2O000O101O0O10001N1000001N101O1O001O001O00001O0O2O001O1O1O1O1O1O00000O1000000000000O10O1000O1000000O1000000000O100000000O10O100000000000000000000000000000000000000000000000000001O0001O00000000001O0000001O0001O01O0000000001O000000010O0000000000000001O00001O00001O00001O0000001O00001O01OO2O00001O0O101O000O1000000000000000000O1000000000O100O1O100O10000O100O100O1O100O101O0O1000000O100O100O100O100000001N10000000000000000000000000000000O100001O0000001O000000001O000001O0001O000000001O001O001O1O2N1O1O1O010O001O00101N2N2O0O1O1O100O001O2M3N7I6J2N1O1O1O0O3M3L4L6H`cc7\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [252.35855102539062, 448.17596435546875, 273.1044006347656, 136.55615234375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 813], \"counts\": \"P_m5X2`e0>G4O1N2N101O001N101O0O2O00001N10001N2O001O0O2O000000001O0000001O001O00001N100000001O000O100000000000000O01000000O100O100O10000000001O0O100000000000O100000000000000000000000000000000000001O00000000000000001O0001O0001O000000001O000000000000001O00000000000000000000000O100000000000000O101O0000001O001O001O0000001N1000001O000O10001O0000000O2O0000O10O1000O1000000000000O100000000000000000000O1000O10000000O100O10000O2O00000O010000000000000000000000000O10001O0000000O101O00001O001O000O101N1000000O10001O00001O0O101O1O1O1O1N2O1N2N4L3L4L3N1O2N2N2N2M6FRag6\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [6.653026580810547, 282.4931335449219, 278.5122375488281, 127.00762939453125], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [768, 813], \"counts\": \"ca5d2[e04K3N1O2O0O101N1O101N101O000O101O0000000O1000001O00000000000O2O0000000000000O100000000O10O10000000000000O10000000000001O00000000000000000000000000000000010O000000010O00O2O00000000001N10000000000000000O1000000000000001O000000000000000000000000000000000000O1000001O00000000000000001N1000001O000000001O0000000O10000000000000000000000001O0000000000000000000O10001O0O10000O1000000000000O1000000000000000O10000000000000O1000000000000O100000O10000000O011O00000O101O0000000000000000001O0000000O101O001O1O1O001O001O0O2O1O2N2N3M1O1O001O001N102M3N2N1N101O0O2O1N2I:FPf[<\"}}, {\"image_id\": 8, \"category_id\": 2, \"bbox\": [33.78194046020508, 349.31134033203125, 254.6105499267578, 125.345703125], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [768, 813], \"counts\": \"edS16jg01O2M5L6J4L1N2O1O1O001O1O2N2N2N1N2O001O001O1O1O1O001O00001O010O1O001O000000001O01O00000001O001O01O01O0000000010O00000000001O00000000O2O00001O001O0000001O0000000000000000000000001O000001O00000000010O0000000010O0001O001O0000010O000000001O00O100000000000000O10000O100O10000O1000000000000O100000000000000O10000000000O1000000O2O00000000000O100000000000000000000O100000000000000000O1000000O100O001O1O0O200O1O100O1O1O1O10O01O10000O100O1000000O1000000O101O0O10000O1000O0100O100O100O2kNZYOm0of0M9FR[Y<\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [2.7839348316192627, 631.0110473632812, 270.2178649902344, 128.40936279296875], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [768, 813], \"counts\": \"Vm28d0U1Xd0K^[O=]d0e1N2N2N1O1N3N1O1O1O1O10000O2O000000000000000O10000000000000000O1001O0000000000000000000000000000000000000000000000000000001O00000000001O000000001O0000000000000000000000001O0000000000000000000000O1000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000O01000O10O10O1000O10000000000O1000000000000000000000001N10000O2O0O101N1000000O101O00000O100O2O0O2O1N2N2N2M3M2O2N1O2O1N2N3K8G7\\\\OTZOeN4Kke0]1VZObNVf0U1eYOnNef0P17N2M4J:ERkk<\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [408.0229797363281, 294.3020324707031, 277.2447204589844, 153.92416381835938], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [768, 813], \"counts\": \"bZb9`1Yf0<H4M3N2M3M2N3N3L3N3L3O1N2O0O101O1N1O3N2M3M2O1N2O001O1O1O2N3M5J3N1O1O1O1O2N2N4L3M100O001N10000000000000000O100000000000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000O100N2L4M3N2O1O10000O100O100O100O100O10000O100000000O1000000000000000O10000000000000000000000000000000O100O100O100O10000O2O000O100O100O100O100O1000000O100000000000000O10000000000O1000000000000000000000000000000O100001O0000000010O0000000010O01O100O0010O0010O001O1O1O2N1O1O100O0O2M4M3Lleo2\"}}, {\"image_id\": 8, \"category_id\": 2, \"bbox\": [288.8125305175781, 523.4044799804688, 238.98159790039062, 113.668701171875], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [768, 813], \"counts\": \"RRj61ng09G1O2O1O0O3N4L3M1O001N101O1O003M1N2O1O00001O001O1O1O00001O00001O001O001O00000000001O0000000000000000001O0000001O001O0000001O000000000001O00000001O00001O001O0001O000010O0001O0000000001O0000001O001O000000010O00000001O00001O010O00001O001O1O010O0000001O0000001O0000000000000000000000O1000000O10000O10000O010O100O10000O100O100O10000O1000000000O1000000000O010000O1O010O100O1O0O2O1O1O1O100O1O1O100O100O0100000O1O100O1000000O1000O01O010O10O1000O1000O01000O0100O00N3N101O0KmN^YOS1if0O2M9Fa0ZOoUe6\"}}, {\"image_id\": 8, \"category_id\": 2, \"bbox\": [235.77503967285156, 159.15628051757812, 253.5708465576172, 123.65838623046875], \"score\": 0.9999991655349731, \"association_id\": 3, \"segmentation\": {\"size\": [768, 813], \"counts\": \"Rgb52fg00_XO2`g08N2O1N6K2M4M1N2O0O2O001O2M2O2N1N101O1N101N2O1O1O1O1N10001O00000000001O00000O2O001O00001O000000000000000000000000000001O0000000001O000001O010O0010O00000010O000000000010O0001O001O0010O0000001O00010O0000000001O010O001O001O0000001O001O001O0000000000000000000000000000000000000000O10000O10000O10000O1O1O1O1O100O100O1O100O1O1O100O100O1000000O10000O1000001O000000001O00000000O10000O100O100O1O2N1M3K5O1O100O1O100O10000O1000001O000O1000000000000O100O1000000O100000000O10001N100O10000O100O010O100O1O101N7I4KXib7\"}}, {\"image_id\": 8, \"category_id\": 2, \"bbox\": [458.4454040527344, 359.67742919921875, 231.98892211914062, 123.75433349609375], \"score\": 0.9992714524269104, \"association_id\": 2, \"segmentation\": {\"size\": [768, 813], \"counts\": \"hnZ<1mg02O1O101N100000000O2O000O100O100O1O2O0O100O1L5[Od0O1O2O0O1O1O100O1O1O100O1000000O1000000000000000O10000000000O100000000O1000000000000000000000O10000O100O100O100O10000O1O100O10000O010O100O100O10000O100000000O1000O1O001O10O02O0O1N2N3N1O100O100O100O1O10000O101O0000000O2O00001O0O101N100O101O00001N101O002N1O1N101N101N1O2N101N2M7Iabl2\"}}, {\"image_id\": 8, \"category_id\": 2, \"bbox\": [399.8177795410156, 0.0, 281.6230163574219, 106.0346908569336], \"score\": 0.974025547504425, \"association_id\": 0, \"segmentation\": {\"size\": [768, 813], \"counts\": \"VYd93ig07K5L2O1N2O2N2M4M3L3N1O1O1O001O1O101N6K1N2N1O1O1O010O1O1O2O0O1O1O001O000000000000000000000000000010O0000000001O000000000000000000010O0000001O00001O0000010O0000001O000000010O000004MO0001O01O0001O0000001O0XNmYOc1Sf0\\\\NnYOd1Rf0[NPZOd1Vf01O000001O00000010O02N1O1O001O0000001O0001O0001O0000001O01O01O00001O001O0O2O000000000000000000001O00000000000O1000000000O1000000O10000O1000000O100O010O100O10000O10000O01000O100O1000O10O10O000O1010O010O010000O1O0N300O100O1000000O100O100O10000O2O000000000O2O000O10000O100O10001N1O10000O10000O1O1O100O100O10001N1N3AcXO3P^Z3\"}}, {\"image_id\": 8, \"category_id\": 2, \"bbox\": [10.433903694152832, 0.0, 265.003662109375, 110.04165649414062], \"score\": 0.9656319618225098, \"association_id\": 0, \"segmentation\": {\"size\": [768, 813], \"counts\": \"UY8153`g09]XOE\\\\g0a0O0O2O001O001O4L1O1O1O1O001O100O5K2N1O1O1O00001O1N2O1O1O1O1O0O2O001O001O1O001O0O10001O000000001O001O00000010O00000001O000000010O001O00001O0000000O100000000O101O00000O101O000O2O000O2O00000O101O00000001O01O0000010O01O1O1O10O01O00001O001O002N2N1O1O001O0000001O0000001O000000000000000000000000000000000000O10000000000001O00000000000000001O0000000000000000001O00000000000O101O0O1O100O1O101N101N1O2N1O2N101N101O0O2O2L4L3N2N2O001O1N2O2N2N2M2O1O0N3M2O2M2Oem_=\"}}, {\"image_id\": 8, \"category_id\": 2, \"bbox\": [7.902163505554199, 694.0690307617188, 282.0873718261719, 72.22723388671875], \"score\": 0.2954432964324951, \"association_id\": 0, \"segmentation\": {\"size\": [768, 813], \"counts\": \"joP11og01O0O2O000O2O0000000O100000000000000O100001O0000000000000000000000001O0000000000001O001N3NnoT22oojM1O101N010000000O10000O100000000O10000000000O2O00O1000000000O100000000O101O0O1000000O100000000O10000O10000O10000O1O1O1O1O100O100O1O1O1N2L3L5AlNgYO2OX1Uf0fNlYOd1Tf0]NjYOd1Pf0\\\\NRZOh1ne0XNRZOh1me0ZNQZOg1oe05M2N3000000O10000000O2O00000N2O1O10000O2N1WNSZO\\\\1Qf0bNSZOX1_f0J4nNTYOb00@Xg09iXOE2OYg08iY[<\"}}, {\"image_id\": 8, \"category_id\": 1, \"bbox\": [407.61767578125, 1.494444489479065, 289.41217041015625, 71.2234115600586], \"score\": 0.12491021305322647, \"association_id\": 0, \"segmentation\": {\"size\": [768, 813], \"counts\": \"SPb9a0^g02O1O1O001O1O00001O2N1O001O0000001O001O1O00000000000000O1000000000000O1000000000000O100000000000000001O000000000000000000001O000000000000000000001O00000000000000000000001O0000000000000000001O0000000000000000000000OZYOQO]f0o0^YOQO00bf0P1]YOPO10bf0P1]YOPO10bf0P1\\\\YOQO10cf0o0\\\\YORO00cf0n0]YOROO1df0X1O2N1O00001O1O002N1O001O00001O0hYO[NUf0h1000000000001O000000001O00001O002N00001O000000000000000000000000000000000000000000O100O100O100O10000O100000000O1000000O1000000O100O100O100O10000O100O1O10000O1000000O1000000O1O100O100000000000000001O000000001O001O001O1O001O0000001O00001O00001O00000000O100O1N2O100O100O1O1O2M2M4J6YORYOIZoU3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [675.0321044921875, 401.6135559082031, 65.59124755859375, 136.21023559570312], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"YPT>3Se09J3M2ZNKR]OIg0>Wb09b]OI]b0<Z]OIfb0^1000N2N2O100O1Nn]OWNf`0h1Q_OhNi`0X1U_OoNf`0Q1W_OVOe`0k0X_OXOg`0j0S_O\\\\Ok`0g0o^O_Oo`0e0h^OBWa0Y2001N1O2N001O1O2M2O001M2N2L4L5M3M3N1O2O000001O2N2O0O1001O0O2lMS^Of0oa0VOU^Oh0oa0mN\\\\^On0Sc0N2L6J5L:A=AiXn5\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [606.3899536132812, 557.9227294921875, 97.0985107421875, 58.00933837890625], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Z^g<1gd00h[O5Vd0Lh[O5Xd0Nb[O6]d0L_[O7`d0:00000001O00000000000O010000000O10000000O1001O000O1O2O0O101N1O2O0O2l[OPOkc0Y1N101N1O2O0N2O011N10000O10000O010000000O1000O100O101N1O1O100O1jN[\\\\Oe0fc0[O[\\\\Od0ec0ZO^\\\\Of0bc0VOa\\\\Oj0bc0SO_\\\\Ol0Pd0000001O001O000O100O1O2L3M4N1O2L4N2NRae6\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [478.5600891113281, 609.4746704101562, 123.68862915039062, 73.52532958984375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_TP:4Te07J3N2N2N1O2N1O2L4N1O1L4N3O000000O1000000000O1000O010O010000O1O100O1O2O0O2O00000000001GdNa\\\\O\\\\1]c0gNb\\\\OY1]c0hNb\\\\OY1^c0hNa\\\\OX1_c0hNa\\\\OX1_c0iN_\\\\OX1`c0:00O1000000000000O1000000000000000O1000000000O1O10000O1O1O2M200O1O1O100O100O100000000000000000001O00O100000001N1_Nk\\\\Ol0Uc0ROo\\\\Ok0Rc0TOP]Oj0Qc0UOQ]Oi0Pc0VOS]Of0nb0ZOT]Od0mb0\\\\OT]Ob0mb0]OU]Ob0kb0]OV]Ob0kb0^OW]O>kb0AV]O>kb0BV]O=jb0BW]O=jb0BY]O:ib0EY]O9ib0EY]O9nc0Njjh8\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [462.40887451171875, 417.9097900390625, 140.35028076171875, 218.749267578125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Wej94Te06J6T]O5P`0Ni_O:T`0Gh_O>V`0Cf_Oc0V`0_Oc_On0T`0UOg_OT1S`0nNj_OV1T`0kNk_OW1S`0iNl_OY1Q`0jNl_OY1m?mNR@U1Z>kMSBR1BV1V>mMVBm0EV1S>RNSBi0JV1R>WNgAk06o0Q>?PBAo=?PBCn=?QBBn==SBDl=;UBFj=:WBFi=9WBHh=8YBHg=7YBJf=7YBJf=6ZBKe=5[BLd=5[BLe=4YBNf=4XBMh=5TBMl=_OaATN?_2o=\\\\OeAXN5_2V>XOfA]NM^2^>SOeAl1[>SNfAm1Z>RNgAn1Y>TNdAm1]>_23N1O1O1O0N3M3J6L6L2N2M3N1O2N1O1O1O101N1O1O2M2N2O2N2N1N2N3N100O2O0O101O1N2O1O1000O2O1O1N1000101N0001bLd_Oj1]`0QNg_On1\\\\`0lMh_OS2Z`0jMh_OU2Y`0iMi_OV2X`0hMj_OW2Z`0dMh_O[2^`0^Md_Oa2``0ZMa_Of2a`0TMd_Ok2]a0010O1O0101N3M2N1N3N2N2L4F;K4M4L4F;H8G8I7Ijco8\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [699.8397216796875, 523.5367431640625, 71.82586669921875, 31.78216552734375], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"e]c>1We04N2O1N1O100N3N1O100O1O100000000O1000000O100O1O1N200NZO_[Og0`d0200O101O001O1O000000001O0000000001O001O1VOd[Oc0dd0N0O2O00000O1O2O0O10000O10001N10001N1N3NjaY5\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [761.1405639648438, 390.8343200683594, 53.4892578125, 118.74765014648438], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"YPl?3kd0f0F2L3L3O2M2N4M4L4M5L1N5a\\\\OgNdb0S2L3M4L8I2N4K3N0O100N20O0N3M2M3101N3N2N1O10O2M1N3_Oc0L2N2^Ob0M3N3L4M2`N[\\\\OW1fc0hNe\\\\Oo0[c0mNj\\\\O>DC04Vd07l[ODO2]n^4\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [570.080810546875, 412.7220153808594, 99.27325439453125, 165.92630004882812], \"score\": 0.9999969005584717, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PPZ<2We04K4M3M1N3M3M3WOi0f]OkNY`0Z1e_OPOa?_O\\\\_Oc1Q1RO\\\\?b1b@cN[?^1`@iN]?X1a@kN^?U1`@nN_?R1_@RO_?n0`@UO^?j0b@YO\\\\?h0a@[O^?e0a@^O]?b0a@A^?`0_@Ba?`0\\\\@Bc?`0X@Ch?>U@Ej?=Q@Fo?k200000000001O001N2O1O1O1N1O1N3N1N2N3M3N2M2N3M2N2O1N201N101O0O201N1O010O010O1100O00O101O1aM_^OQ1ba0jNf^OQ1_a0eNi^OX1^a0\\\\Ni^Ob1bb0M4L5K2N3JXY\\\\7\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [196.6199188232422, 461.4387512207031, 214.3416290283203, 221.56124877929688], \"score\": 0.9999957084655762, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"WYY41n20X?7c@OW?5f@NX?5d@N[?3b@O]?4`@M_?5]@Oa?3[@0e?1X@1g?0W@2h?0V@1j?0T@1k?0T@0l?2R@Nn?4P@Mo?5o_OKR`06m_OJR`07m_OIT`08k_OGV`09i_OHW`08i_OGX`09g_OHY`08f_OH[`08d_OH]`08a_OJ_`06a_OI``07__OJa`06__OK``04a_OM]`04b_ON]`01d_O2Y`0Mh_O6U`0Il_O;P`0Eo_O`0m?_OT@c0i?^OV@c0j?]OV@d0h?\\\\OX@e0f?]OY@e0e?\\\\OZ@e0e?[O[@f0e?ZOZ@h0d?YO\\\\@g0d?YO\\\\@g0d?YO\\\\@g0P>[NRBn0Mh0n=_NRBi01h0j=dNSBd03h0i=fNSBb04h0h=hNRBa06g0f=lNRB=7h0f=nNPB;:g0e=ROkA:?e0d=XObA9j0?c=Y1[BhNc=Z1\\\\BgNd=Y1[BhNd=Y1[BhNe=Y1YBhNf=Z1XBgNh=Z1VBgNj=\\\\1RBeNn=_1mAbNR>b1jA_NV>c1fA^N[>c1bA_N^>c1^A_Nb>b1\\\\A_Nd>T40000000O010000O1000O01O101N100O101O0O1000000000000000000000000000000000000O1000000000000000001O000000000000000000000000000000000000000000000000000001O0000000000000000000000000000001O00001O001O1O1O001O001O00001O00001O0TL[AR1f>kN^AS1c>jN`AT1b>hNaAX1`>dNdA[1^>^NhAa1Z>YNjAg1Y>SNkAl1V>PNnAo1S>oMnAQ2S>mMoAQ2T>kMnAU2S>fMRBY2W>UMRBk2X`0O0000001N101O001O1O1O1N2K6@a0FgYh<\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [780.8938598632812, 498.64935302734375, 50.25927734375, 21.42669677734375], \"score\": 0.9999891519546509, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"kRZ`01Ye03N0O2N1N2N3O000O100000O02O0O00101O0000000001O1O010O1O1O010O001O0000001O000000000000001N2O1O4KblP4\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [802.78955078125, 396.3833312988281, 40.998779296875, 77.63919067382812], \"score\": 0.990276575088501, \"association_id\": 6, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\[k`0221Re00mZO12Mkd0?_[OAUc06R]O:BBK1^c07R]Oe0nb0^Oo\\\\Oc0ob0n0O000OJh\\\\OXN[c0e1600001OO2000O02O00002N001O1O001N1O2N2O0_OQ\\\\OEoc0:T\\\\OCoc0;S\\\\OARd08e0M]Zh3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [806.4117431640625, 473.82659912109375, 41.09674072265625, 13.596343994140625], \"score\": 0.9610949754714966, \"association_id\": 6, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]gl`03Xe01N100O10000O100000O10001O0000001O0000001O01O0000001O000001O0000001N2OTXf3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [990.2189331054688, 385.54376220703125, 27.9761962890625, 46.4759521484375], \"score\": 0.9259355068206787, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"gffd09Pe03M4M2K5N3N3M2N9G2N1O00010O0001O0O2N1CS\\\\O\\\\Onc0b0?M3N2N3Hmc3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [196.62557983398438, 532.9894409179688, 187.0716552734375, 150.01055908203125], \"score\": 0.8656215667724609, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"hdf42Xe02O1O000O1000000000000O101O0000O100000O100001N1O101NR[R`0\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [838.8634643554688, 398.64776611328125, 21.3499755859375, 51.555267333984375], \"score\": 0.8496840000152588, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fP`a0;1Fkd0`0V[OBad0f0G9I5O200O1000000O01O100M301N4L2N:B8H]o\\\\3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [796.8173217773438, 481.57330322265625, 41.827392578125, 18.3017578125], \"score\": 0.8154546618461609, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[Rf`04Te03O1O10000O11O00O11O001O000010O0000010O00001O01O000000001O0O10001N2Ncbm3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [488.71417236328125, 416.6921081542969, 175.0208740234375, 178.94808959960938], \"score\": 0.661129355430603, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"cbV:`2c1_Nc>h1UA]Ni>e1UA\\\\Nk>e1TA\\\\Nk>d1TA]Nk>d1TA]Nl>c1SA^Nm>b1RA_Nn>a1QA`No>`1PAaNP?_1o@bNP?^1o@dNQ?[1o@fNQ?d0^@mMd0b1m>>eAD[>6jAKV>4iANW>1gA2Y>MQAfNQOd1m?Eo@P1Q?nNPAS1P?lNQAT1P?lNn@U1R?nNh@U1Y?oNY@Z1P`0l14K2O1O1O00001O000000O2O0O100O1000000001O0000001O0000001O00001O00001O1O1O1O1O1O001O1O2N1WMT_OQ1m`0kNX_OS1i`0jNZ_OU1g`0hN]_OV1f`0fN\\\\_OY1h`0`N\\\\_O_1h`0[N[_Od1h`0XNZ_Og1g`0WN[_Oh1f`0VN[_Oj1f`0TN[_Ol1h`0nM[_OR2oa001O00001O1N101O001O1O0O2N3M2J6L5L3L8^Ofn1XO^RN3L4K4L3M4G8B=M4c]OnNi?NP_OX1T1SOe?a1Y@gNa?Z1]@iNa?W1^@lNa?T1^@nNa?R1^@QO`?P1]@TOa?l0\\\\@YOb?g0\\\\@\\\\Oc?d0[@@c?a0[@@e?a0X@Bg?`0U@Cj?>Q@Gn?;m_OIR`0=f_OEZ`0f20000000000000O1O2N1N2N3N1O1N3B\\\\_OnKi`0P4;M2O2O10O01O001O1O10O01000O00100O1000O100O1O010O1O1O011M6bM]^Oh0`c0_OUd_7\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [820.7472534179688, 399.24249267578125, 38.30859375, 59.778350830078125], \"score\": 0.6192453503608704, \"association_id\": 7, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\PTa0o0[d03N00000DSOV\\\\OQ1hc0;N011N10O20OO1000000O100O2N1O0O2O1O1O10000O100O3M9FIYOj[O00e0Wd0Ah[O<[d0]Ol[Oa0dd0J4L3Lcd]3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [814.4596557617188, 459.7062683105469, 38.32806396484375, 12.922454833984375], \"score\": 0.6095181703567505, \"association_id\": 7, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_\\\\Ua03We01O10000O1000000000000000O3N1O001O01O0001O0001O000001Ojmb3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [858.2153930664062, 396.01708984375, 23.61865234375, 36.591400146484375], \"score\": 0.5766560435295105, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"a[ma06Qe05L3KC[[O?]d0;IVOh[Oo0Xd02000000000O1O1O0O2N200O1N2@`[O3cd0Ka[OJK0lYP3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [845.875244140625, 398.0525207519531, 19.22747802734375, 47.33624267578125], \"score\": 0.5194374322891235, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Pfda0=3Ecd0d0K6D:O1O001001N101O1N4M2M5J6G9I6KiYZ3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [868.013916015625, 394.116455078125, 18.33489990234375, 31.6754150390625], \"score\": 0.35950967669487, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"b[Sb02Ve04FMU[O8jd05E;O10O10O10O1O1O1O1N3L5Khol2\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [791.5623779296875, 392.9258117675781, 35.7650146484375, 92.16122436523438], \"score\": 0.31352537870407104, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[ed`01Ve04G<K5MU1i[OYN`b0f1b]OWNab0g1b0fN^\\\\Oe01POcc0:\\\\\\\\Oe0jc0ZOY\\\\Ol0`c08^\\\\OdNcc0e1M6J3N0N1000O1F9O2N2N2TO]\\\\ONfc0Oi\\\\O_O[c0`0i0_O[[O50LjdU4\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [851.8470458984375, 438.5414733886719, 20.74176025390625, 8.306854248046875], \"score\": 0.27556291222572327, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"R\\\\ma01Ze0000O110N1000000001NVYV3\"}}, {\"image_id\": 9, \"category_id\": 1, \"bbox\": [854.5712280273438, 397.47296142578125, 17.75244140625, 42.63592529296875], \"score\": 0.23246710002422333, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Rfja093HN1ed0c001N1O1K4N101O3L4O1O03N1M8G8FfdU3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [803.1465454101562, 468.8429260253906, 42.31689453125, 27.383880615234375], \"score\": 0.1861138939857483, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ZRh`03Ve04M1O2O00O02O0KHX[O7hd0IY[O6fd0LZ[O3fd0MZ[O3ed0N\\\\[O2cd0N][O2bd0O^[O1bd0N`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0Oa[O0_d00a[O0`d0O`[O1`d0O`[O1`d0Nb[O1_d0Na[O2_d0Na[O2_d0Na[O2`d0Ma[O2_d0Na[O1`d00a[ON_d02=1O000001O0000001O00kbg3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [855.8169555664062, 435.5650939941406, 22.0599365234375, 8.611724853515625], \"score\": 0.12423624843358994, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]Qna01Ze00O10000000000000000MOkZO2Ue002NocS3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [846.4619750976562, 441.15765380859375, 24.86553955078125, 8.5460205078125], \"score\": 0.08134880661964417, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^QPb01Ze0000anX3\"}}, {\"image_id\": 9, \"category_id\": 2, \"bbox\": [808.6304321289062, 458.51690673828125, 42.38543701171875, 23.499908447265625], \"score\": 0.07793904095888138, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"RRn`02Xe02O00001O0O100000O1000EN^[O1bd0O^[O1ad00`[ONad02;100000000O10000001O2LTnj3\"}}, {\"image_id\": 10, \"category_id\": 2, \"bbox\": [998.1082153320312, 704.1364135742188, 303.78363037109375, 89.5286865234375], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"Wjoj02^k03O0O1O101N1000000O1000000O1000000O10000O10000000O10000000O01000000O10O1000O01DH]UO9cj0JYUO7fj0:10O1000O010O01O1O10O01000000000000O10000000001O00000O1000O10O1UOTOPWOl0oh0VOPWOj0oh0WOQWOi0oh0XOPWOh0Pi0XOPWOh0oh0YOPWOh0Pi0YOoVOg0Pi0ZOPWOf0Pi0ZOPWOe0Pi0]OoVOc0Qi0]OoVOc0Pi0^OPWOb0Pi0^OPWOb0Pi0_OnVOb0Ri0^OnVOa0Ri0@nVO`0Ri0@nVO`0Ri0@nVO`0Ri0@mVOa0Si0_OmVOa0Si0^OnVOb0Ri0^OnVOa0Si0_OlVOb0Si0_OmVOa0Si0_OmVOa0Si0_OmVOa0Si0_OmVOa0Si0_OmVO`0Ti0@kVOa0Ti0@lVO`0Ti0_OmVO`0Ti0@kVOa0Ui0_OkVOa0Ui0_OkVO`0Vi0@jVO`0Ui0@kVOa0Ui0_OkVOa0Ui0_OkVO`0Vi0@jVO`0Vi0@jVO?Wi0AhVO?Yi0@hVO?Yi0AgVO>Zi0BeVO>\\\\i0BdVO=]i0CcVO=]i0BcVO>^i0BbVO=_i0CaVO<`i0D`VO;`i0F_VO;ai0D`VO;ai0E^VO<bi0D]VO<di0D[VO=ei0C[VO<fi0DYVO=gi0BZVO>fi0BZVO>ei0CZVO=gi0CYVO=gi0CYVO=gi0CXVO=ii0CWVO=ii0CWVO=ii0CVVO=ki0CUVO=ki0CUVO=ki0CTVO>li0BTVO=mi0CRVO>ni0BQVO?oi0AQVO?oi0APVO`0Pj0AnUO`0Rj0@nUO`0Rj0@nUO`0Qj0AnUO?Sj0AmUO?Sj0AmUO?Sj0BlUO>Tj0`0000000000O100000O010000000O10000000O10O10000000000000000O1000000000000O10000000000000001O000000000000000000001O001O01O0001O000001O001O1O2N1O1O1O1O1O001N2N2N6I\\\\WT1ZOPikN<VNHSXO`0kg0DQXO=ng0GjWO>Uh0CkWO>Sh0DlWO<Th0DkWO=Uh0DjWO<Vh0DjWO<Vh0GfWO:Zh0GaWO=_h0D`WO<`h0E^WO<bh0GZWO:fh0IRWO<mh0U1001N2^NUWO;lh0_O[WO?gh0ZO`WOc0oi0nN\\\\UOf0kUb0\"}}, {\"image_id\": 10, \"category_id\": 1, \"bbox\": [825.948486328125, 208.64373779296875, 446.3052978515625, 546.509765625], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"hfWf0U1Vj0`0C<D7J5L4L3M4L2O2O0O1O2N1O101N1O1O2O0O2N100O2N1O2O0O2M3O0O2N2O1N2O0O2O1O0O2O001O1N2O1O1O1O1O2N1O2N1N3N2N1O1O1O1O1O1O1N2O2N1O2l[OSK_a0n4Y^O[Kea0g4P^OcKoa0`4d]OlKo1oNk<Y5h@SLT2oNQ=Q5d@ULR2TOX=i4`@XLT2SOZ=h4]@XLW2TOZ=f4Z@ZLZ2RO[=f4W@ZL]2SOZ=d4V@\\\\L^2RO[=i6dBZIZ=f6eB\\\\IZ=d6eB`IX=`6gBcIW=]6hBeIW=\\\\6fBhIX=X6gBjIX=V6fBmIY=S6fBoIY=Q6eBQJ[=P6bBSJ]=m5bBUJ]=k5bBVJ^=j5aBXJ^=h5aBYJ_=g5aBZJ^=f5bB[J]=e5bB]J]=c5cB^J\\\\=b5dB_J[=a5dBaJ[=^5fBcJY=]5fBfJX=Z5hBhJV=X5jBiJU=W5jBlJT=T5lBmJS=R5nBoJQ=Q5nBQKQ=o4oBRKP=m4PCTKP=k4QCVKn<j4QCWKo<h4RCYKm<f4SC[Km<d4SC^Kl<a4TC`Kl<^4UCdKj<[4VCgKi<X4WCiKi<T4YCnKf<P4[CRLd<l3[CXLd<f3\\\\C]Lc<a3]CbLb<\\\\3^CgLa<[3[ChLd<Z3XCiLg<Y3TCkLk<W3QClLn<U3nBoLQ=S3iBRMV=o2fBTMZ=n2`BWM_=o2WBUMi=[8O001O010O000010O010O010000O1000cMlA[GT>_8SB`Gn=[8WBeG`2iMZ8[:ZElGj1[No8c9ZERHe1]NS9]9ZEVHa1_NW9V9[E[H\\\\1aN[9P9\\\\E^HW1cN`9k8[EaHU1eN`9h8]EcHQ1fNc9e8_EdHl0hNg9a8`EfHh0jNh9^8dEfHb0mNk9\\\\8fEeH>oNm9Z8hEfH9QOP:X8jEfH3TOR:U8nEgHMUOV:S8nEiHJTOY:Q8PFlHCTO]:P8RFmH]OTOb:m7SFPIXOUOd:k7UFRITOSOh:j7UFTIPOSOk:i7WFSIlNUOn:f7XFWIfNTOS;d7YFXI`NVOX;a7ZF\\\\Kg9c4[F[Kf9c4]F[Kd9d4_FYKb9f4`FXKa9g4bFVK_9i4dFSK^9l4dFRK]9l4fFRK[9m4fFRKZ9n4hFPKY9o4hFPKY9o4gFQKY9o4hFoJZ9o4hFoJZ9P5hFmJZ9R5gFlJZ9T5hFiJZ9V5hFfJ[9Y5gFbJ^9[5eF_J`9`5bF[Jb9d5_FXJf9f5[FVJj9h5WFUJl9i5VFTJm9k5TFRJo9m5RFQJP:n5QFoIR:P6nEoIT:P6mEnIU:Q6kEnIX:P6iEnIY:Q6hEmI[:Q6fEmI\\\\:R6eElI_:Q6bElIb:R6_ElIi:m5YEPJo:i5SESJU;g5mDVJY;e5iDWJ];d5fDWJ`;f5aDWJd;g5]DUJh;h5YDUJo;e5SDWJV<b5lCYJ_<_5cC\\\\Jc<b5i4M2N3M3N1N3M2N1O1O100O1O00100O0010O0100O010O1O10O10O1000001O1O1O2N2N2N2N2M2O001N100O1O101N101N3N2M2O2N1O0O2O001O01O000000O2O1O0O2O0O2O0O100O100O1O101N101O1O2N2N3N2M3OOO100O001O00000O1000001O2O1N3O0O2M2O0O00000O10000O1000000O200O1O1000O001O000000O1O100O101N2O1N3N1N10000O10000O10O2O001O01O001O1O1O001O00O10O1000O100000100O0100O1O001O000O10O10O10000000010O11O1O1O1N1O1O1O0O3N2N3M3M3N2M2O200O1O1NO1O1O001O001N2N1O2N2N2N2N1O2O1N1O2N1O2N1O1O2M3M4L3M5J;EoR\\\\1\"}}, {\"image_id\": 10, \"category_id\": 1, \"bbox\": [543.8258056640625, 254.90086364746094, 314.61395263671875, 473.85968017578125], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"YQW?1^k0c0^O4L4L3N2N2N2N2O2M2O3L4L3N2M2N3N1N200000001N2O1N101O0O0010O01O00001O00010O00001O0000000O100000000O100O0O2N2N1O2N2O0O2N2O001N1O2N1O1O1O1N10001N200O1iMWMf[Oi2Rd0eMg[O]2Wd0lM`[OU2`d0RNX[Oo1hd0XNnZOl1Qe0\\\\NVZOU2je0j10O010O010O10000O100O10000O100O1O1O100O1O1O100O100O100000000000V_OjJX;V5SDjKb;V4jCdLP<\\\\3hCnLV<R3hCQMW<o2gCTMX<l2fCWMY<i2fCYMY<h2eCZMZ<g2cC\\\\M\\\\<e2aC]M_<e2]C^Mb<d2WCcMg<_2QChMn<Y2lBnMR=T2hBRNV=o1gBTNX=m1fBVNX=k1fBWNY=k1dBWN[=j1bBXN^=j1_BXN`=k1[BXNd=k1VBYNi=k1oAZNP>i1iA\\\\NV>g1dA]N[>f1`A]N_>g1[A\\\\Nd>i1VAYNi>k1QAYNm>l1m@VNR?V7O1O1O100O001O001O01000O0101O00100O11N10O0101N200O6K6I4L2N1N2O0O0001O0O2O1N100O100O010O001O001O0nLYBYHh=]7dB`H\\\\=\\\\7jBbHV=\\\\7mBcHT=[7nBdHR=Z7QCfHo<Y7RCfHn<Y7TCfHm<Y7UCeHk<Z7WCeHj<Z7YCcHg<\\\\7\\\\CbHe<]7]CaHd<^7^C`Hb<_7`C`Ha<_7`C`H`<`7aC_H`<_7bC`H_<`7aC_H_<a7bC^H_<a7bC]H_<d7bCZH_<e7cCYH]<g7fCVH[<i7gCUHY<l7hCRHX<n7iCQHX<n7iCQHW<o7jCPHV<Q8jCmGX<S8hClGX<U8hCiGY<Y8gCdGZ<^8fC_G\\\\<h2`AFEkNc2`NY<f2mADIlNU2cNV<g2UBBFUOR2\\\\NU<i2YB_OE\\\\OSa0S1]_O\\\\OCAo`0R1a_OYOCEl`0P1c_OYOCGj`0n0g_OVOALi`0m0h_OSOA1g`0j0m_OoN_O6e`0i0Q@jN]O>c`0f0`@oMZO\\\\1V`0c0PC\\\\OR=a0oB@Q=?oBBQ=<QCDo<:RCGn<8RCJm<4UCLl<1VCOk<NWC3i<JYC6h<GZC8h<EZC;g<B\\\\C=e<@^C?b<_O`C`0a<^OaCb0_<\\\\OcCc0^<[OdCd0^<YOdCf0]<WOfCh0\\\\<TOhCi0^<oNfCP1`c0N2N2N3L3N1O2M4L3L6JWgS=\"}}, {\"image_id\": 10, \"category_id\": 2, \"bbox\": [697.246826171875, 675.5496215820312, 386.9957275390625, 65.64239501953125], \"score\": 0.9999972581863403, \"association_id\": 1, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"[lib03^k03N1N101N100000000000001O0000000000000000000000001O01O0000000001O0001N10001O0O2O3Kc`^11[_aN9I2N4M1RUO_Oij0f0O1O1N10001O000O101O000O100000000O100000000O10O10000000O1000000000000O100000O100000O1000000000000O010000000000O10O1000000000O10O100000O1000000000O01000000O100000O010000000000O10O1000O10000O10000000O0100000000000000O1000000000O10000000000000000000000000000000000000O10O10000000000000000000000O1000000000000000000000000000000000O1000000000000000000000000000000000000O2O0O101N102K6GbPo8\"}}, {\"image_id\": 10, \"category_id\": 2, \"bbox\": [1116.2861328125, 391.7537841796875, 63.64599609375, 5.96759033203125], \"score\": 0.333486944437027, \"association_id\": 0, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"ZRUn01ak01O00000O1000000000000000000000000O10000000000000000000000000001O0000000000000000000001O00000000000O100000001O00j[m3\"}}, {\"image_id\": 10, \"category_id\": 2, \"bbox\": [1275.822021484375, 749.4020385742188, 40.684814453125, 44.54150390625], \"score\": 0.2264673113822937, \"association_id\": 0, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"ke\\\\R16[k01O1L4M3I7L4N2M3N101M3O0O200O100O0100000000O100O100O101N100O1UO`UOc0aj0\\\\OdUO`0]j0\\\\OfUOc0ej001N1O2N2NSY5\"}}, {\"image_id\": 10, \"category_id\": 1, \"bbox\": [1072.086669921875, 267.058349609375, 48.653076171875, 130.76385498046875], \"score\": 0.1951531618833542, \"association_id\": 0, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"jeQm02_k04L1L5L4I6M3J502N2ZOUO`VOV1Wi0RO_VOZ1Wi0l0E3M2WXOXMdf0i2WYO]Mgf0e2UYO_Mif0c2SYO_Mmf0e2iXObMVg0c2bXO`M^g0Z3000000001O1O5K6fN`XOUNcg0g1aXOVNbg0e1eXOVN\\\\g0i1iXOQNZg0l1W1N3G8O2N3M3L6J:E9AXf]5\"}}, {\"image_id\": 10, \"category_id\": 2, \"bbox\": [662.5211791992188, 692.4215698242188, 76.3851318359375, 11.95538330078125], \"score\": 0.14521518349647522, \"association_id\": 0, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"j^ma03^k010000O101O00000000000000000O101O0000000000000000001O0000000000000000000000000O1000000000000000000001O00000000001O0000000000001O000000000000001N3MgZf?\"}}, {\"image_id\": 10, \"category_id\": 1, \"bbox\": [93.92952728271484, 261.44091796875, 38.287315368652344, 106.28125], \"score\": 0.08837771415710449, \"association_id\": 0, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"nPc25Wk08F9B=I6L5E;TOaNcWO`1\\\\h0l0O1OO010001O2N2O1O1O000000000001N3N2L3M3A?I6J8ROTVOLSj0Nl0L6Km]RP1\"}}, {\"image_id\": 10, \"category_id\": 1, \"bbox\": [373.5018005371094, 357.3817138671875, 242.37802124023438, 91.69219970703125], \"score\": 0.07374493032693863, \"association_id\": 0, \"segmentation\": {\"size\": [882, 1323], \"counts\": \"fTa:c0mj03M3N1O2O0O2L5M6I6K2O1N10001O001O001O1N100000000000000000000001N100O10000O101O0O101N101N100O2O00001O001N1000001O01O0001O01O01O000001O0001O01O000001O010O1O1O001O001O0O101O1N2N2O1N101O1N2N3M3M2O0O2O0O2O1O1N2O2L3NTbk00m]TO0O10001O00O1000001O000000hSh00WlWO2N2K9J2N1SUO@hj0`0XUO@hj0`0XUO@hj0`0XUO@hj0`0XUO@hj0?6KQUOGnj0:5O101O001JhTOO^k002NWSfc0\"}}, {\"image_id\": 11, \"category_id\": 2, \"bbox\": [0.0, 997.766845703125, 601.7731323242188, 114.820068359375], \"score\": 0.9999977350234985, \"association_id\": 1, \"segmentation\": {\"size\": [1240, 836], \"counts\": \"XV2a0RV16M3N2N101O001O00001N10001O00001O001O00001O00001O0000001O000000001O00000000001O00000000001O00000000001O00000000001O00000000001O0000000000001O0000000000001O0000001O000000001O00000000001O0000000000001O0000001O00000000001O0001O000000000000000001O000001O000000001O00000000001O0001O0001O000000010O1O1O2N1O00001O010O0000001O00002N1O1O10O01O001O0010O00010O000001O01O000000000010O00000001O0000001O00010O00000000001O000001O01O0000001O01O01O0000001O000000000000000000000001O000000000000000000000000000O10000000O100000O1000000O10000000000O100000000000000000000000000000O100000000000O1000000001O01O000000000000000000000001O000000000000001O0000001O00001O00001O00001O00001O000000001O0001O01O001O1O001O1O1O001O001O00000010O00000001O000001O01O0000001O0001O000001O0000000000000001O0001O000000000000000001O01O0000000001O0001O01O0001O00010O00001O0001O0001O000001O01O00010O001O0010O0001O00010O0000001O01O001O100O2N1kNPkN`0SU1\\\\OojNd0cU1O3M1O10O00001O000001O000000001O0000001O0000001O00001O001O00001O000000001O00000000000000000000001O000000000000000000001O01O00000001O000000001O00001O00001O00001O001O00001O001O001O00001O00001O00001O0000001N10001O0Ooh`:\"}}, {\"image_id\": 11, \"category_id\": 1, \"bbox\": [143.0629425048828, 168.17227172851562, 413.41033935546875, 954.7784423828125], \"score\": 0.9999902248382568, \"association_id\": 1, \"segmentation\": {\"size\": [1240, 836], \"counts\": \"TRb6U1eT1Y1B9G8I6J6J5L4K5K6I8H8H:E;@`0A>C<D:H9I6K4L5K4L5J6J5K6H8H8H7J6J7K4L4L4L4M3L4M4K4L4L4K4K6J5L4M3M3M3N2M2O2N2N1N3M2N3M3L3M4K5L4M3M3M3M3O1N3N1O2M3N1O2N1N2O1O1N2O1O1O1N1O1N2N2O2L3N2L4M3M3N2N2N3M2O100O100000000O1000000O1000000O1O1O1O1N2N2N2N2M3L4N3L3N2O1N2O10000O1000000O1000000O1000000O1O1O2N1O1O1O1N2O1N2N2N2M3N2N2N3N1O2N1O1O2N1O2OeEh[OV2Wd0gMg\\\\O`1Wc0]NR]O^1mb0`NZ]O]1eb0`Na]O]1^b0bNg]O\\\\1Wb0cNm]O\\\\1Qb0bNU^O[1ja0dN[^OZ1ba0fNc^OX1[a0fNm^OU1Qa0iNX_OS1d`0nNc_On0Z`0QOn_Ok0n?VOX@g0d?ZOb@c0Z?_Ok@>R?BSA<j>E[A8b>HcA6[>JhA5V>KmA4R>LPB4m=MTB4j=KYB4e=M\\\\B4b=L_B4_=LcB4\\\\=LeB5Y=JiB6U=KkB6T=InB8o<IRC7m<ISC8k<HWC8h<HYC8e<H\\\\C:a<G`C9^<GdC:Y<GhC;S<FoC<m;EUD<f;E\\\\D<`;EcD<X;FjD:Q;HRE8h:K\\\\E3_:1dENV:5mEJn99VFEe9>]FB_9a0bF_OZ9d0gF]OS9g0nFZOn8h0SGYOi8i0XGXOe8i0\\\\GYO`8h0aG[OY8g0gG]OT8d0mG^Oo7b0SH@i7a0XHAe7?[HCb7>_HC^7>cHCZ7>gHBX7=jHDT7;oHDQ7:RIFl69WIGg68\\\\IHb68aIG]68hIGU68PJGm58XJGe59^JF`59dJFZ59iJGU59mJFR59QKGm4WKjAZ2\\\\9^2i4VK[Bm1P9j2d4WKmB`1b8X3`4VKSC]1b8[3Z4VKZCZ1b8\\\\3S4YKaCT1c8_3k3[KhCP1d8a3b3^KRDi0c8e3Z3aKYDc0d8h3R3dKbD;c8m3j2fKiD7d8o3b2iKPE0d8U4[2jKUELe8X4T2lK[EDh8`4k1kKaE^Oj8f4d1jKfEZOl8k4^1hKkEUOn8S5V1eKQFPOP9Z5n0dKXFhNR9d5e0`Kl3`4SL[KT4c4lKYKZ4f4eKUKb4i4^KSKi4k4VKQKP5m4PKnJV5Q5jJkJ[5T5eJhJ`5X5`JcJe5\\\\5l=001O1N2O2M3N3L4M3L3M3N1N3N1N2N101O001O100O101O1O1000O000000O1O1O100O1O1O2O0O1O010O01O001O1O1N3M3M2N3M2N3M2N2N3N1N2N2N2N2N2N4M3L5K5K5L5J6J4L3N1N2N2N101N1O1N3N2N2N2N2N3M3M3M3M2N2N1O1O001O1O001O00100O001O001O001O2K5L5J6J6Io0\\\\NcRb:\"}}, {\"image_id\": 12, \"category_id\": 1, \"bbox\": [388.39703369140625, 259.12322998046875, 348.2666015625, 235.25787353515625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[kW87Pe08I7I9E9B`0YOh0C9J5K5L3M4L3M3M4L4_O[LU_Oj3d`0e0F:G7L4M2N2N2O1N1O1O1O1N2N2O1N2O1O1O100O1000O0010O010O10O0100O00101O0O101O0O2O1O1O1N2O001O001O1O1O1O6J4L1O001O1O0O1000000000000O1000000O10000O100O10000O1000000O10000000000O1000000000000000000O100000000000000000000000000000000000000O1000O100000O010O010O010O010O0100O1000000000O10O1000000000000O1000000O1000000O100O100O100O100O1000000O100000000000001O0000000O101O00001O001O001O00001O00001N100000001N100O101N100O2O000O2O001O0O2O1O1O001O1N2O1O001O1O1O001O1O001O1O001O001O001O1O001O001O001O00001O000000001O00010O00001O000010O010O0100O0100O010O001O001O001OjJV@k4j?TKW@m4h?SKY@m4f?RK[@o4e?PK\\\\@P5c?oJ^@Q5b?nJ`@R5`?kJd@8F_4V`0[Ko_Od4]`001N2N2N3M2O2M1O1O001O001O1O1O2N2N2N1O1O1O1O1O1N2O2M2N4L5L5I4M2M4L3M2O2N2N3L4L4@j0XOZQo5\"}}, {\"image_id\": 12, \"category_id\": 2, \"bbox\": [252.2974853515625, 437.1063537597656, 495.43536376953125, 192.98794555664062], \"score\": 0.9996260404586792, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[e\\\\54Qe0<Z[OBmc0g0m[O]OQd0P1N101N1O2M3N2M2N3L3O2M2O1O1O2N100O10000O1O100O2N1O1J6N2O1O1O100O100O10000O100O1O1O1O1N2O1O1O1O100O100O10000O100O100O1O100O1O100O1O1O100O10000O1000O010000O100O1O100O1O100O10000O01000O10O10O10O01N2M2N3O100O10O010000O100O100O100M3N1N3O1O100O1O100O10000O100O010O10O0100O0010O010O0100O01000O1000000O0100000O100000000O01000000000000O1000000O101O0O1000000O1000001O00001N101O001O2N2N1O1O1O1O001O0O101O001O001O1O1O8H7I2N2N1O1O001O001O000O2O001N101O0O2O1N101N10001O0O100000O10000O100N2M3N2O1O100O100O100000O010000O100O1O1M3N2N200O010O1000000O10O100O0100O010O10O0100O10O0100000000O10O1000000000000O01000000000O01000O01000O01000O0100000O10O10000000O1000000O10000O10O01O10000O10000O1000000000O10O1000O100O10O010000O010000O10000O0100000O1000000O1000000000000O1000000O100O2O0O1O10000O10000O101O00001O0O2O000O2O001N1O100O2O000O10001O000O2O00001O0O2O001O0O101O0O101N10000O100O100O100O1O1O100O100O100O1000000O010O100O1O0O2N2N101O10O010O10O10001N100O100O1O2M2N2N2O1O2O0O10000O2O1N2O0O2N4M3GoXh5\"}}, {\"image_id\": 12, \"category_id\": 1, \"bbox\": [41.97691345214844, 87.03145599365234, 99.85310363769531, 241.123046875], \"score\": 0.9725305438041687, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"giP13Ve03N101O00010O1O1O01N100O2O02N2N1OO2L3M24L4RON[\\\\O6ac00Z\\\\O2dc0k0N3M30102N2M2M1O2O0O2O3M7I1N1O1O1000bNV]O;ib0A^]O?_b0_Og]O`0Ub0^OT^O<ka0Cb^OGhNNeb0:V_OAj`0?X_O_Oh`0`0S2O00000001O0Y^O@c>`0T3000g^ODc=;_BG^=9dBHY=8jBGT=9mBJP=5T4O1N2O2M4M2M3MTkjb0\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [231.9683380126953, 594.8298950195312, 34.41523742675781, 74.31060791015625], \"score\": 0.9999991655349731, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^\\\\k43We03M2N2N1N2M3K5K5A?@`0J6H8N2O1000000000000002N:F2N4L4L6J4L3lNR\\\\Oe0ad0I5K2N3KQnh?\"}}, {\"image_id\": 13, \"category_id\": 2, \"bbox\": [259.8497314453125, 592.7579345703125, 764.1502685546875, 75.8817138671875], \"score\": 0.9999985694885254, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"QRj62Ye00O2O0O101O0O10001N100O10001N10000O2O000O10000000000O10001O00000000000O1000000000000O10000000000000000O1000000000000000000O10000000000O1000000O1000000O10000000000000000O100000000000000000000O10000O10000O100O1000000O10000000000O10000000000000000000001N10000000000000001O00000000000000O2O001N101N1O3M3L9]OV[O2loS13QelNa0C2M2N101O0O101O00000O10001O0000O1000O1000000O10000O0100000O10000000000000000O10O10000000000000000000000O10O1000000000O1000O10000000000000000000O1000000000000000O100000O10000000000000000000000000000O1000O100000000000000000000000000000000O1000000000000000000000O10O1000000000000O10O1000000000000000O100000000000000000000000000O1000000000O10O1000000000000000000O10000000000000000000000000000000000000000000000O100001O0000000000000000000000000000000000001O00000000000000000001O0000000001O000000000000000000001O0000000001O01O000000001O000001O01O0000001O01O01O001O00010O000000001OO1000001O0000000000001O0000001O0O10001O000000001O000000000000001O000000000000001O000000000O2O00000000000000010O00000000000001O0001O01O001O001O01O01O00000000001O00000000010O001O1O001O001O002N4L3N0O1O1O001O001O0010O000000000001O000000010O001O001O1O010O0000010O00000010O0000000000001O00000001O01O0000001O00001O001O001O00001O01O0000000000000000000000000000000001O000000000000000000000001O0O10001N101N101N2N3Laf0\"}}, {\"image_id\": 13, \"category_id\": 2, \"bbox\": [460.0763244628906, 194.0535125732422, 218.37991333007812, 21.374954223632812], \"score\": 0.9999592304229736, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"a[R:1Ye01000001O00000O100000000000000O100000000000O100000000000O10000000000000O1000000000000000O10000000000000000000O100000000000O1000000000000000O10000000000000O1000000000000000O100000000000O100000000000000000000O010000000000000000000000000O10O1000000000000000000000000000000O1000O1000000000000000000000000O010000000000000000000000000000000000000000O10000000000000000000000000000000000O1000`mW7\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [49.527992248535156, 173.98133850097656, 974.4719848632812, 353.97705078125], \"score\": 0.9998970031738281, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\\\\\R1m0Xd08K4N2N2N2O1N2O1N2O0O2O1N101O00001O001O0000001O00001O0000001O0000001O0000001O000000001O000000001O000000000O2O000000001O000O101O00000O2O0000001N100000001N10000O10001O0O1000000O101O0000000O1000001O00000O10000000001O00000O100000001O00000000000O1000001O0O1000000O1000000O101N10000O100O100O1O101O0O100O1000000O2O00000O1000O010000O100O100O100O100O1O1O1O1O001O1O100O1O100O100O100O10000O1000O0100O100O100O10O0100O10000O100O10000O01000O10000O10000O1000000O10000O1000000O1000000O1000000O1000000000000O1000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000001O0000001O00001O001O0O101O001O001O001O00001O00001O0000001O0000001O0000001O001O00001O001O001O1O1O2N1O1O1O2N1O1O1O1O1O1O1O1O001O1O001O00001O001O00001O00001O001O00001Oi0WO1O001O001O001O001O001O001O001O01O00000000000000000O10000oN^AYKb>g4`AWK`>h4bAWK^>h4eAVK[>i4hAUKX>j4jAUKV>j4mATKS>k4oATKQ>k4QBSKP>k4SBTKm=k4UBTKl=j4UBVKk=h4XBWKh=g4ZBYKf=d4^B[Kb=c4`B]K`=a4cB^K]=`4eB`K[=_4fBaKZ=_4gB`KY=_4hB`KY=`4gB`KY=`4gB`KY=_4iB`KW=`4iB`KW=`4jB^KW=a4jB_KV=a4jB_KV=a4kB]KV=b4kB^KU=b4lB\\\\KU=d4kB\\\\KU=c4mB[KT=e4lB[KT=d4j10000O100O100O100O100O2O0O1O100O10000O100O10000O10O0100O100O100O1O100O1O1O1O100O1O10O0100O100O10000O10000O10000O1000000O1000000O1000000O100O100O10000O100O100O10000O100O10000O10000O10000O10000O100O100O10000O100O1000000O1000000O100000000O101O0000000O10000000000000000000000000000000000000000000000000001O0000000000000000000000000O1000000000O1000000000O10000000000000000O100000O10000000O10000000000O100000000O100000000O1000000O1000000O10000000000O100000000O1000000000000O1000O10000000O1000000000000O100000000000000O1000000000O10O10000000000000O100000000O100000O010000O10000O100O10000O10000O1000O10O10000000000O2O000000000000001O0O100000000000001O000O10000000000000001N10000000000O1000000O10000O100O100O1O1O1O100O1O1O1O1O100O1O1O100O1O100O100O1000O0100O100O010O100\\\\OWLe_Oi3Y`0\\\\Ld_Oe3Z`0_Lc_Ob3Z`0cLc_O]3[`0hLc_OX3\\\\`0kL`_OW3_`0j001O000001O0000000000001O001O00100O010O100O100O10O0100O1O1O1O100O1O1K5I7L4K4N3M3N2O1N2O1N2O1O1O1O1O1O100O1O100O1O1O1O100O1O2M2N2M3L4K5K6\\\\OR]OcNTc0\\\\1>N2O1O1O2O0O1O1O2N1O101N1O100O2O0O`BPOoI3l;m0UJQOmI5KL^;l0jJUOkI6IKb;j0jJUOkI6IKb;i0kJVOjI6JJa;j0kJVOkI5LH_;l0jJWOlI3m;e0YJVOlI2m;h0WJVOe6i0\\\\IWOVJ0Z;h0`JXOUJ2Z;e0bJXOUJ3Y;e0bJXOTJ5Z;a0dJYORJ6Z;a0dJYORJ6[;?fKAZ4?fKAZ4>gKBZ4<gKDY4<gKDZ4:gKFY49hKHX45jKKV43lKMU40mK0T4MnK3R4KPL5c<0000000000000000000000O2O0O2O0O2Omj6\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [306.2457580566406, 408.1939697265625, 717.7542724609375, 241.1080322265625], \"score\": 0.9998100399971008, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"geP84Ue08I6I5L2N2N2M3N1N3N1N3N101N10000O2O0O1O100O2N100O1O1SN]Ni_Om1R`0YNk_Oh1R`0\\\\Nj^OHc0m1^`0bNj^OEf0j1[`0hNk^OAh0h1]`0UOa_Om0]`0UOT_OjN1Q2k`0VOQ_OmN1n1m`0WOo^OnN2l1o`0WOm^OoN3j1o`0Hn^O9Ra0Hm^O8Sa0Q2O000O10000000000O100000000O100000001N100000000FdKd_O]4\\\\`0cKe_O\\\\4[`0dKe_O\\\\4e`0001O00001O01O0001O010O0010O100kKT_Oi3m`0PL[_Oo3Qa0O2O1N1O9H0O0010O000010O0001O0000101N2N3M2N2N2O0O2N2N1O1O2N100O1N01O1O1O0O2O1N2O1O001O1O10O01O1O100O10O10O10000O10O10O10O1O010O010O0010O010O0100O010O10O100000O10O100000O10O1000O10O100000O01000000O0100000O10O10O10000000O10O10000000000000O1000000000000O0100000000000O10000000000O10000000O0100000000O1000000O1000000O1000000O0100000O10000O10000000000O10000000000O1000000000000O100000O1000000000O1000O10000000O1000O1000000O10O10000000O1000000000000O10000000000000000O100000000000000000000000000000000000001O000000000O1000000000000000000000000001O01O0000000000000000000000001O00000000000001O0000000O2O0000001O00001O001O0000001O000000001O000000000000000000001O0000000000000000001O00000000000000000000001O000000000000000000001O00000000000000000O10O10000000000O0100000000O0100000O1000O1000O100000000000000O10000000000000000O100000000000000000001N10000000001N100000001O0O1000001O0O10001O0O101O0O101O001N101O001N2O001O1N2O1O1O1O0O2O1O1O001N101O1O00001O001N10001O001O001N101O001O1O0O2O00001O0O2O000000000O1000001O0O100000000O100000000O100000000O100000000000O1000001O0000000000000O10000000001O000O1000000O2O0O1N3M2N3M3M3L5BW@SKo?f4`0IUB\"}}, {\"image_id\": 13, \"category_id\": 2, \"bbox\": [194.61843872070312, 654.0232543945312, 38.877410888671875, 11.2398681640625], \"score\": 0.9994828701019287, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"VQV41Ze000001O00001O0001O0001O01O01O0000100O000010O010O00001OPYb`0\"}}, {\"image_id\": 13, \"category_id\": 2, \"bbox\": [0.0, 423.5630187988281, 802.417236328125, 109.41159057617188], \"score\": 0.9991756081581116, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"W_61Ye02O1O1N101O00000O1000000O10000000000O1000000000000O100000000000000O1000000000000O1000000000000O10000000000000O1000O1000000000000000000000000O10O1000000000000000O100000000000000O100000O100000000000000000000000O100000000000000O1000000O10O10000000000000000000000000O1000000000000000000000000O100000O1000000O101O0O10000O10000O1O101N1O100O101N10001N1000001N10O2O000O101N100O2M200O2O001O001O0O2O001O00001O00001N1000001O00001O0O101O000O2M2O1O2O0O100O101N1L400O10000O10001O000O10000000000O1000000000000O10000000000000000O1000000000000000000000000000000000000000000000000000001O0000000000000000000000001O000000001O0000001O00001O0000001O000001O01O00000000000010O000001O01O000101N6J2O1N0010O01O000010O0001O00000001O0001O0001O000010O02N3M3N0O1O100O001O00010O0000001O000000001O00001O001O001N2O2N:F2N2N1O1O1O1O0O2O00001O00001O0O10001O00001N101O0O2O1O2M4L5J^Z[10cedN1JOQ[O2nd07O0O2OO2O001HQ[O2od0LS[O3Te0O1OPQn30PoQL000O2O0000000000000000001N10000000000000000000001O0oZOJmd06R[OKmd06R[OKnd0900000000001KQ[OLod04400O101O0000000O2O00000O2Okdg5\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [0.0, 247.40896606445312, 715.534423828125, 267.1197814941406], \"score\": 0.9946529865264893, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Z\\\\b02Ve07I6K6J5L4M1N2N2N101N2O0O2N101N2N2N2N2O1N2N3M3M2N3M2N2N2O001N2O0O2O001N10001O000000001O000000001O0000001O000000001O00000000000000000000000000000000000001O000000000000000001O0000000000000000000000000000000001O00000000000000000000O1000000000001O000O10000000001O0000000O100000001O00000O10000O2O000O100O2O0O1O101N10001N1000001N1000000O100000O1000O10O1000O10O10O100O1000O0100O100000O010000O10000O100O100O100O1O1O100O1O10000O100O1000000O1000000O100O100O100O00100O1O1O100O100O100O100O10000O10000O100O10000O1000000O100000000O100000000000000O1000000000000000O1000000000000000000000000000000000000000000001O00000000000000001O0000000000001O00000000001O0000001O000O2O00001O0000001O0000001O000000001N1000001O000O2O001O0O2O1O001N2O1O001N101O001O0O101O001O00001O1O1O1N3N1O2N2N2N2N2N3M2N1O1O2n@SJb>n5ZAUJf>l5WAWJh>j5VAWJi>j5UAYJj>T601O0001O01O000000001O0000001O01O0001O0000001O000010O000000000O1000000O1000000O1000001N1000000O1@UA`Jk>\\\\5ZAcJf>X5`AgJ`>V5dAhJ]>T5gAlJY>Q5kAnJV>n4nAPKS>m4PBSKP>k4SBSKo=l4RBSKn=l4SBTKm=l4TBRKm=m4Z1O000O10001N10000O10001O0O10000O10000O10000O10001N1O1O1O1O1O1O1O1O1O1O100O100O10000O1000000O1000000O10000O10000O10000O100O100O100O10000O10000O100O100O1O100O1O1O100O0010000O100O10000O10000O1000000O101O000O10000O100O10000O1000000O1000001N100000000O10000000000O10000000000O10000000000O10000000000000000O100000001O0000000000000000001O0000000000000000000000000000000000O100000000000000000000000000000000O10O10000000000O100000000O10000000000O1000O1001N100O2N101N1O2N1N4M3L5J?ROVT]6\"}}, {\"image_id\": 13, \"category_id\": 1, \"bbox\": [57.652156829833984, 341.3053894042969, 966.3478393554688, 250.57083129882812], \"score\": 0.08655548840761185, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"WRW1:ed0k0]O5L4M3N101N2N101N2O0O2O0O101O000O2O0000001O000O10001O00000O101O000000001N10000000001O0O100000001O000000000O2O0000000000001O0000000000001O00000000000000001N10000000000000000000001O00000000000000000O1000001O0000000000000000000000001O000O10000000000O1000000O10000O1O2O0O1O100O100O100O100O100O10000O2O0000000O100000000000000O10000000O10000000000000O10000000000000000000000000000000000000O1000000000000000O100000O1000000000000000000000000000000O10000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000001O0000000000000000000000001O0000000000000000001O0000001O0000001O00001O0000001O000000001O000000001O00000000001O00000000001O0000001O0000001O002N1O2N1O2N1O2N1O2N1eAULT;m3cD[L\\\\;f3`D]L`;g3XBkKc1b0T<f3QBoKf1>Y<Q4dCRL[<P4aCRL_<o3_CSL`<n3]CTLc<m3[CTLe<X4nBjKQ=Y4kBhKU=Z4YBdJ4S1c=e4[B]Kd=e4YB\\\\Kg=f4VB[Kj=f4TB[Kl=g4QB[Kn=Q60001O001O1O001O1O1O001O001O00001O00001O0000001O0000000000000000001O00000000000000000000000000000000000000ZL^BCb=;aBE^=:eBD[=;gBDY=;jBCV=<lBCT=<nBCR=<PCCP=<QCDo<;SCDm<;TCEl<:UCFk<9WCFi<9XCGh<8YCHg<6[CJe<5\\\\CKd<5]CJc<5^CKb<5^CKb<5^CKb<4`CK`<5`CJa<6_CJa<5`CK`<5`CK`<4aCL_<4bCK^<5bCK^<4cCL]<4cCL]<4cCL]<3dCM\\\\<3dCM\\\\<3eCL[<3fCMZ<3fCMZ<2gCNY<2gCNY<2gCNY<1hCOX<1iCNW<1jCOV<1jCNW<1jCOV<1jCOV<1jCOV<0kC1T<NlC3T<MlC3T<LmC4S<LmC4S<KmC6S<InC7R<HoC8Q<GPD9P<GoC:Q<EPD;P<EPD<o;CRD=n;BSD>m;BRD?n;@SD`0m;_OTDa0l;_OTDa0l;^OUDb0k;^OUDb0k;]OUDd0k;[OVDe0j;[OVDe0j;ZOWDf0i;ZOWDf0i;YOXDg0h;XOYDh0g;XOXDj0g;UOZDk0f;UOZDk0f;TO[Dl0e;TO[Dl0e;TO[Dl0e;SO\\\\Dm0d;SO[Dn0e;RO[Dn0e;QO\\\\Do0d;QO\\\\Do0d;PO]DP1c;PO\\\\DQ1d;nN]DR1c;nN]DR1c;mN]DT1c;lN]DT1c;lN\\\\DU1d;kN\\\\DU1d;jN\\\\DW1d;iN\\\\DW1d;iN\\\\DW1d;hN\\\\DY1d;gN\\\\DY1d;gN[DZ1e;fN[DZ1e;fNZD[1f;dN[D\\\\1e;dN[D\\\\1e;dNZD]1f;cNZD]1f;cNZD]1f;cNYD^1g;aNZD_1f;aNYD`1g;`NYD`1g;`NYD`1h;_NXDa1h;_NWDb1i;^NWDb1i;]NXDc1h;]NXDc1h;]NXDc1h;]NXDc1h;]NXDc1h;]NWDd1i;\\\\NWDd1i;\\\\NWDd1i;[NXDe1h;[NXDe1h;[NXDe1h;[NXDe1h;[NXDe1h;[NXDe1h;[NXDe1h;ZNYDf1g;ZNXDg1h;YNXDg1h;YNXDg1h;YNXDg1h;XNYDh1g;XNYDh1h;WNXDi1h;WNXDi1h;VNYDj1g;VNYDj1g;VNYDj1g;UNZDk1f;UNZDk1f;UNYDl1g;TNYDl1g;SNZDm1f;SNZDm1f;SNZDn1e;RN[Dn1e;QN\\\\Do1d;QN\\\\Do1d;QN\\\\Do1d;QN\\\\Do1d;QN\\\\Do1d;QN\\\\Do1d;QN\\\\Do1d;QN\\\\Do1d;QN\\\\Dn1e;RN[Dn1e;QN\\\\Do1d;QN\\\\Do1d;QN[DP2e;PN[DP2e;PN[DP2e;PN[DP2e;PN[DP2e;PN[DP2e;PN[DP2e;PN[DP2e;PN[DP2e;oM\\\\DQ2d;oM\\\\DQ2d;oM\\\\DQ2d;oM\\\\DQ2d;oM\\\\DQ2d;oM\\\\DQ2d;oM\\\\DQ2d;oM\\\\DQ2d;oM\\\\DQ2d;oM[DR2e;mM\\\\DS2d;mM\\\\DS2d;mM\\\\DS2d;mM\\\\DS2d;mM\\\\DS2d;mM\\\\DS2d;mM\\\\DS2d;lM]DT2c;lM]DT2c;lM]DT2c;lM]DT2c;lM]DT2c;kM^DU2c;jM\\\\DW2d;iM\\\\DW2d;iM\\\\DW2d;hM]DX2c;hM]DX2c;hM]DX2c;hM]DX2c;gM^DY2b;gM^DY2b;gM^DY2b;gM^DY2c;eM]D\\\\2c;dM]D\\\\2c;dM]D\\\\2c;dM]D\\\\2c;cM^D]2b;cM^D]2b;cM^D]2b;cM^D]2c;aM]D`2c;`M]D`2c;`M]D`2c;`M]D`2c;`M]D`2c;_M^Da2c;^M\\\\Dc2c;^M]Db2c;]M^Dc2b;]M^Dc2b;]M^Dc2b;\\\\M_Dd2a;\\\\M_Dd2a;\\\\M_Dd2`;\\\\MaDd2_;\\\\MaDd2_;\\\\MaDd2_;[MbDe2^;[MbDe2^;[MbDe2^;ZMcDf2];ZMcDf2];ZMbDg2^;XMcDh2];XMcDh2\\\\;YMdDg2\\\\;XMeDh2[;XMeDh2[;XMeDh2[;XMeDh2[;WMfDi2Z;WMfDi2Z;WMfDi2Z;WMfDi2Z;VMgDj2Y;VMgDj2Y;VMgDj2Y;VMgDj2Y;VMgDj2Y;UMhDk2W;VMiDj2W;VMiDj2W;VMiDj2W;VMiDj2X;UMhDk2X;TMiDl2W;TMiDl2W;TMiDl2W;TMiDl2W;TMiDl2W;TMjDk2V;UMjDk2V;UMjDk2V;TMkDl2U;TMkDl2U;TMkDl2U;TMkDl2U;TMkDl2U;TMkDl2U;TMlDk2T;TMmDl2S;TMmDl2S;TMmDm2R;SMnDm2R;SMnDm2R;SMnDm2R;SMoDl2Q;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;RMQEn2o:RMQEn2o:RMQEn2o:RMQEn2o:SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMPEm2P;SMPEm2P;SMPEm2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMQEl2o:TMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMPEm2P;SMoDn2R;QMnDo2R;QMnDo2R;QMnDo2R;QMnDo2R;QMnDo2R;QMnDo2R;QMnDo2R;QMnDo2R;QMnDo2R;QMmDP3S;PMmDP3T;oLlDQ3T;oLlDQ3T;nLmDR3S;nLmDR3S;nLmDR3S;nLmDR3S;nLmDR3S;nLmDR3T;mLlDS3T;mLlDS3T;mLkDT3U;kLlDU3T;kLlDU3T;kLlDU3U;iLlDW3T;iLlDW3W;eLjD[3Y;_LjDa3Y;[LhDe3Y;YLhDg3Y;WLhDi3Y;VLgDj3Y;ULhDk3Y;SLgDn3Z;PLgDP4Z;nKgDR4Z;mKfDS4[;kKfDU4X>00000O1000000000000O100000000O1000000O10000O1000000O100000000000000O1000000000000000000O10000000000000000O10000000000000000O1000000000000000000000000000000000000000000000000000O100000000000000000000000001O0000000000000000000000000000000000001O000000000000000000001N10001O000O2O001N102N2M3K5K7H:D_A\"}}, {\"image_id\": 13, \"category_id\": 2, \"bbox\": [200.16744995117188, 348.2013244628906, 823.83251953125, 309.4660339355469], \"score\": 0.05480420961976051, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^a`56Qe04M3O2N100O1O2N1O1O1O1O2N100O100O2O000O100O2O00000O101O000O10001O0O1000001O000000001O0000000010O000001O00001O000010O0001O00001O001O01O01O001O001N101O1O001O00001O00001O00001O0000001O0000001O000000001O01O0000001O00001O00010O001O001O1O1O1O010O1O001O001O010O0O2O001N101O1N101N2O1N101N2O1N2O1N2N2O2L4L3M4M2M3Mnog10SPXN4M1N3M2O1N2O1O0O10O10000000O010000000O1000O10O10000O10000O10000O0100000O11O0O100O101N101N2O1N2O1N3M3MeUj0:QjUO1O1O1O2M2O1O100O1O100O10000000000O1000000000000000000000000O100000001O00000O2O000000001O0O10000000001O000O100000000000001O0O100000000000000000000000000000000000000O1000000000000000000000000O1000000O1000000O10000O10000O10000O100000000O10000000O011O0000000000000000000000000000000000000O101O0000000000000000000000000000010O000000000000000000000010O0000000000000001O0001O0001O0001O0000010O0000001O01O01O00001O00001O01O01O0000001O00000000001O00000000001O00000000001O000000001O000000001O000O101O0000001O000000001O0000000000001O00000O1000000000001O0001O00000000000000000001O0001O0000000000000000000000001O0000001O0000001O00001O001O001O001O001O001O001O001O00001O0000001O0000001O00001O00001O00001O001O00001O001O00001O00001O000O101O00001O000O2O00001O001O0O101O001O1N101N101N100O2O0O2O000O10001N10001N100O2O000O2O0O10Y\\\\a0\"}}, {\"image_id\": 14, \"category_id\": 1, \"bbox\": [626.38232421875, 380.74615478515625, 65.54296875, 172.29931640625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"\\\\jd`0`0]j0=C7J5K6K5I6J6K5gXOlM^e0Z2VZORNde0X2mYOQNPf0S2gYOTNWf0\\\\3M3L6J4I6L5M2N2000O100O1N200001O1O2N4L2N10O1O2N2N4L3M3M3M2N2gMZZOTOge0i0aZOQO`e0l0jZOkNYe0Q1a[OUNcd0g1]2N2M3K7I5L5L4L7I5L8H]RW<\"}}, {\"image_id\": 14, \"category_id\": 1, \"bbox\": [216.3568572998047, 388.98138427734375, 87.37037658691406, 190.781005859375], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"mWi56fj05L4M2N3M2N1M3L4O1M4M2M3N2O1N3M3M4N2N2N2O2N2N3L3M3K6L3N2N3L3M4Kj0VO7H5D=K7H<E9H3N1N101O1O001O001O10O1O2O1N1O2M5L6K1N2N2M2O1POPZOnLPf0T2U[OcMmd0T2`[OfMbd0X2d[ObM_d0[2T2N3M3N3N1N2O1N2O2M4L2M4M5L4K5K;C<Beg\\\\f0\"}}, {\"image_id\": 14, \"category_id\": 2, \"bbox\": [417.3812561035156, 563.119873046875, 69.71612548828125, 35.09332275390625], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"lmR;8fj03N1O2O0O1000O10O10O1000000000O2O001N10001N101O00001O1O1O1O1O1O0001N5I5L4K5MSUXb0\"}}, {\"image_id\": 14, \"category_id\": 2, \"bbox\": [599.5497436523438, 528.3925170898438, 68.93878173828125, 30.6788330078125], \"score\": 0.9999991655349731, \"association_id\": 2, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"a^l?7ij01O1N2O001O00001O000000001O000000001O0000000010O0001O001O01O010O1A`UO<dj0N101N100O2N010O01O010O1O0O2O001O001ME_UO;aj0EaUO:cj0O011N2N3M2N1O00cVk<\"}}, {\"image_id\": 14, \"category_id\": 1, \"bbox\": [432.887939453125, 373.47430419921875, 88.04345703125, 227.4456787109375], \"score\": 0.9999983310699463, \"association_id\": 1, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"R[`;?[j0:H5D<B>C<E<I8I6J6L4N2O2M5J5M6I8I4K8HQ1PO5J5L2M4M1N3N2M1O1O1O1O3M2O1N1O100O2M2O100000O11O0002M2N2L5M2N3M2N2M3J6QOo0I6L5M3L4M3N3N1O1N101O1O001O1O001N1bMRYOj0nf0QO^YOM^ODVg0;cYOJ^ODSg0>fYOH]OCQg0b0fYOGA^Omf0g0iYOAWg0;l1In[g`0\"}}, {\"image_id\": 14, \"category_id\": 2, \"bbox\": [0.0, 553.4743041992188, 251.43006896972656, 153.20745849609375], \"score\": 0.9888629913330078, \"association_id\": 0, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"f`1:dj03N2N100O1O1O1O100O100O100O1O100O1O101O0O10000O100O1O1O100O10000O1O1O1O100O100O100O001O1O100O10000O1O010O001O01O010O10O001O00010O0100O010O010O00010O00100O001O1N101O10O01O01O01O1O010O10O01O000O1O100010O010O1O0010O000O2O0O110O01N100O2O01O010O1O00001OO11O0010O10O01O0001O0010O0100O100O1O00100O10O10O01O01O00001O010O010OO100O2O001O010O10O00O10001O010O010O010O01O10O0100O10O10O10O00010O010O01000O010O01O010O01O1O001O1O001O1O0010OO10O00011O00100O1O10ON3N2N2O100O1O100O1O100O1O100O1O1O2N1O1N2O2M2O3LdVjg0\"}}, {\"image_id\": 14, \"category_id\": 2, \"bbox\": [699.7420043945312, 451.7844543457031, 35.23779296875, 38.684814453125], \"score\": 0.9856033325195312, \"association_id\": 0, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"Wn_b01oj01O0O100UO0i04jUOJ]i08`VOK_i06_VOK`i06^VOLbi04^VOLai05_VOKai04`VOLai03`VOL`i04`VOL`i04`VOLai03^VONbi02]VOOdi0O]VO1ci00]VO0bi0N`VO2`i0N`VO2ai0M`VO2`i0N`VO2_i0NeVOO\\\\i0OeVO1[i00eVON]i02k0NO0bhR;\"}}, {\"image_id\": 14, \"category_id\": 2, \"bbox\": [187.34548950195312, 542.8983154296875, 73.44424438476562, 45.33209228515625], \"score\": 0.8969330787658691, \"association_id\": 3, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"XgQ52mj02N2O0000000000000ENhUO2Vj00jUO0Uj02jUONVj03hUONWj03jUOLUj05kUOKTj06lUOJSj07mUOISj07lUOJSj06oUOIPj08PVOHoi09QVOGni0:RVOFmi0;SVOEmi0;SVOEli0=SVOCmi0=SVOCli0>SVOBmi0?QVOBPj0>oUOARj0f00100N10010O1O10O0100O010O0001O1O001O1O001N2N2O1N2O100O10000O100N2MTZdg0\"}}, {\"image_id\": 14, \"category_id\": 1, \"bbox\": [695.1948852539062, 84.19700622558594, 48.553466796875, 456.41851806640625], \"score\": 0.4355030953884125, \"association_id\": 0, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"Ukib0V1Vi0g1XJVM[@e4_>SLm_O]5;lIb<X1kAR6c0]I]=\\\\9cAXGZ>i:M3N2O0O00N3jMmA\\\\GT>Y8eBYG[=:eAY6f1QIi<4YBa5U2QJQ>[3]DYL\\\\<_1fXS;\"}}, {\"image_id\": 14, \"category_id\": 2, \"bbox\": [701.9727172851562, 470.3985595703125, 33.2802734375, 19.128875732421875], \"score\": 0.40858832001686096, \"association_id\": 0, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"Udab03mj0000O1O001O1000000000000000000O20O0000001O001O1O1O002NigR;\"}}, {\"image_id\": 14, \"category_id\": 2, \"bbox\": [600.4208374023438, 454.10467529296875, 26.146240234375, 16.56671142578125], \"score\": 0.10324299335479736, \"association_id\": 0, \"segmentation\": {\"size\": [864, 1152], \"counts\": \"PP\\\\n0\"}}, {\"image_id\": 15, \"category_id\": 2, \"bbox\": [83.17974090576172, 304.0821533203125, 442.139404296875, 121.77130126953125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [426, 640], \"counts\": \"UTR22m;O\\\\E2c:OZE5d:MYE5g:KXE6h:JWE7i:IWE8h:HXE8h:HWE9i:HVE8X9DYG5_O7W9FYG3@8U9F[G2@8U9F[G2@8T9G\\\\G1@8S9I[G1@7U9H[G1@6U9KZGOA6T9L[GNA6T9MZGNA5T9NZGNB4S90ZGLC4S90ZGLC4S90ZGMB3T90ZGMB3S91[GLA4T91ZGKB4T91ZGKB4T91ZGKB4T91ZGKB4T91ZGLA3U91ZGLA3U92YGKB3U92YGKB3U92YGKB3U92XGLC2U92XGLC2U92XGLC2U93WGKD2U93WGKC3V92WGKC3V93VGKC2W93VGKC2W93VGKC2W93VGKC2W94UGJD2W94VGIC3W94VGIC3W95UGIC2X95UGIC2X96TGHC3Y95TGHC3Z95RGIC2[95SGHB3[96RGGC3[96RGHB2\\\\97QGGC2\\\\97RGFB3\\\\98QGEC3\\\\98QGFB2]98QGFB2]99PGEC2]99oFFD1]99oFFC2^99nFFC1_9:mFED1_9:lFFE0_9;kFFENa9=iFEFNa9?gFDGMb9a0TFPO;b0OMb9i0TFiN1b08Mc9i0QFkN3`08Ld9P1TFTO8Ld9Q1SFSO9Ld9Q1SFSO9Ld9R1QFTO9Kf9Q1QFTO9Jg9k1XFVNh9j1WFWNh9k1VFUNk9k1TFVNl9d20O1000000O10000000000O1000000O1000O010000000000O101O0000000000000000000000001O000001O01O000010O00010O0000010O000001O00000010O000001O0000000010O000000001O00001O000000010O00000000000010O0001O001O00001O0001O01O000000001O001O00001O0000001O00000001O01O000010O0010O000001O01O000001O0000001O001O00001O0000001O0000001O00001O001O00001O000000001O0000001O001O1O1O1O001O001O0000001O001O001O2N2N1O1O1O1O001O00001O0000001N10001O00001O00001O00001O0000100O007I3M4L00001O0000001O00001O00002N2N2N001O0000001O000O101O00001O1O2N1O1O1O001N2O001O001N2O0ZOdC=^<@hC<e<CWC4R=Mcee1\"}}, {\"image_id\": 15, \"category_id\": 1, \"bbox\": [76.1489486694336, 174.55462646484375, 115.09102630615234, 203.97781372070312], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [426, 640], \"counts\": \"fjo0g0^<<G4K5H:J5L4K5Ca0A>H4L3M6K4L3N2N3M3M2N2N3M8H2N2N3M<aGdKX7Y5N1O1O2N1O000000O100N2eNTIWLm6e3ZIWLg6g3\\\\IWLe6g3_IVLb6h3fIQL[6m3b1O1O1N2O1O10000001O1O3M3WGfK\\\\8g4M3M9G8H2N1O3_HkJg6m5O1O000000000001O00001O1QN]JULe5f3aJVLa5h3cJUL_5i3R2N3N2M200O2O0O7I7I5K2N1O1O001O001O000kMiEW1W:gNTFP1l9oN^Fg0c9WObFf0^9YOhFb0X9^OjF?W9AnF9S9HoF4R9LQG0P90SGLn84VGFl8:[G]Og8d0o100O101O0O3Jabj5\"}}, {\"image_id\": 15, \"category_id\": 1, \"bbox\": [448.3840026855469, 128.87680053710938, 144.39193725585938, 286.4593505859375], \"score\": 0.9999994039535522, \"association_id\": 1, \"segmentation\": {\"size\": [426, 640], \"counts\": \"iVR67R=7H3M4L3M2O2kKZOdKg0X4CaK>\\\\4FaK<]4FbK<Z4GeK:X4IgK9U4KjK5T4OiK2U40jK0V41iK0V41hK1V42hKNX43fKOY43eKNZ44cKM[48bKIZ4>bKC[4c0aK]O]4h0_KZO_4h0`KWO`4l0]KSOd4Q1XKlNj4Z1PKcNR5b1jJ]NX5e1eJ[N\\\\5i1aJVN`5m1\\\\JRNe5T2UJkMm5Y2nIfMS62lHl0n0ROV6`2gI`MX6f2_I_M^6h4N2O012N2M=D=C7H4L5K4L3L3N2O0O100O10000000000O1O1N2M3M3N2M2L4J50M3M5M3K5A?I7I7I7D<A?L4N2O1N2M3L4N2N2N2I7G9J6N2M3I7oNQ1I7M3M3J6H8N2O10000O2VN\\\\El0d:QOdEj0]:TOfEj0[:ROjEk0\\\\;O2N1N3M4J8H;EeSg0\"}}, {\"image_id\": 15, \"category_id\": 2, \"bbox\": [522.8689575195312, 384.5923156738281, 114.99481201171875, 34.51287841796875], \"score\": 0.9999949932098389, \"association_id\": 1, \"segmentation\": {\"size\": [426, 640], \"counts\": \"_VP72P=9M200000001O00000000001O00001O0000000001O01O000000000001O01O000010O0001O01O00010O00010O0001O01O01O00001O0010O01O1O0010O0000000000001O01O0001N10001O001O0000001O0001O000000001O0001O01O0001O001O001N2NQ>\"}}, {\"image_id\": 15, \"category_id\": 2, \"bbox\": [156.1410675048828, 278.6421203613281, 339.8519287109375, 19.5286865234375], \"score\": 0.27020686864852905, \"association_id\": 0, \"segmentation\": {\"size\": [426, 640], \"counts\": \"Z]\\\\21Y=1N10000000000O2O0000000000000000000O100000000000O1000000000000000000000000000O100000000000000000000000000001O0000000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000O2O000000000000O00100000000O1000000000000000000000O10000000O10000000000000000000000000000O1000000000000000000000O10000000O100000000000O1000000O010000000O10000000000000000O100000000000]WU3\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [322.69256591796875, 599.4840698242188, 117.63412475585938, 72.31842041015625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"g[i61Ye06I7K2M2O3L<a[OYOdc0Z1L3N;E2N1O00000O10000000O1]Om\\\\OnNSc0Q1o\\\\OmNRc0Q1S]OlNmb0R1Y]OgNib0Z1d0O1000000000000001O00000000000000000000000000000000O100O1O10000O1000000001O0_\\\\OkNlb0V1P]OnNob0S1m\\\\OQORc0e1O1O1O000001O000WOS]OSOnb0i0Z]OQOhb0k0^]OROdb0m0m000O101O10O01O01O1000O010O1O2N100O4L1O011N2O0000O101N7J1N2N100O01O00001O1O1NRmT<\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [911.1560668945312, 411.07623291015625, 74.3721923828125, 63.301849365234375], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"mfRc07Qe08I5M1N4M3j[OYOac0Z1O0O2O001OO0100O01O100F:N2O100O010000V\\\\OmN\\\\c0T1b\\\\OnN]c0R1b\\\\OoN^c0Q1a\\\\OQO^c0^11O1N2O0O2L4L3N2O100O2O0O101O0010O00010O100O1O1000O2O4K102M2N101N101N1O1N3N2M3Mllj0\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [597.2362670898438, 511.1118469238281, 80.89385986328125, 74.66696166992188], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]\\\\e<1?1C5Wd0P1J7I7H3M3O0O100000O100O100\\\\O`\\\\O]O`c0`0Q]OQOPc0m0i0N100O1O10O100IUOm[Om0Rd07N101O01O01O1O011`\\\\OkNfb0X1T]OlNlb0[1j\\\\OgNZc0c11O0O101O0O1O2N1N2N2L4J6O2O01O0010O10O111O2M1006I1O5J4L1O2O0O1N3N1N2M]WV7\"}}, {\"image_id\": 16, \"category_id\": 2, \"bbox\": [246.5590362548828, 660.6370849609375, 167.3413848876953, 16.25189208984375], \"score\": 0.9999992847442627, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"l\\\\U53We02O00001N10000000000000000O1000000000000000000000000O100000000O1000000000000O1000000000000000000000000000000000000000000000000000000000000O1000000000O10001O0000000O101O001O1O1O1N10001OO1000000000000O100O1O10O010000O1000000000O10000000000000000000000000O0100000000000000000001O0000001N^e8NdZG2O1O0O101O000000O2MQWf<\"}}, {\"image_id\": 16, \"category_id\": 2, \"bbox\": [526.0778198242188, 580.1715087890625, 121.59466552734375, 14.4229736328125], \"score\": 0.9999992847442627, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"bRR;2Xe02O001O0O101O000000000000001O0000000O100000000000000000000O100000O10000000O1000000000000000O10O1000000000000O100000000000O10000000O100000000000O2O0000000O100000001O0O1000000000O1000O2O0000000000000000000000000000000O2O001ORWn7\"}}, {\"image_id\": 16, \"category_id\": 2, \"bbox\": [526.4806518554688, 539.9332885742188, 66.90313720703125, 7.5494384765625], \"score\": 0.999931812286377, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"l[S;1Ze01O000000000O100000001O000000000O10O10000000000000O10000000000000000000000000000O100000O1000000000000O10000000000O101O0OYnn8\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [578.82568359375, 490.4664001464844, 68.952880859375, 51.818084716796875], \"score\": 0.9999189376831055, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"beR<2We05M1N2O1N4M1O;E7d[OSOlc0W1N2O00AV\\\\O]Ojc0a0Y\\\\O]Ohc0b0]\\\\OZOcc0e0_\\\\OWOdc0i0?O01000001O000000001O000000000000010f[OVOQd0m0k[OVOSd0h0n[OYORd0g0n[OZORd0d0o[O\\\\OQd0b0Q\\\\O]OPd0b0R\\\\OZOQd0e0Q\\\\OZOoc0f0<N10001O001O1N2O1O1O101N1N2M7K1N10iZO100Pe02oZO0Pe0OP[O2od0NP[O5Pe0IQ[O6od0JQ[O7nd0IR[O7Ue0MOLlZO1[ck7\"}}, {\"image_id\": 16, \"category_id\": 1, \"bbox\": [582.1500854492188, 491.2483825683594, 84.5728759765625, 78.01046752929688], \"score\": 0.6611122488975525, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ceT<7Qe06M1N4M3Z[OBXd0j0O5K5K00O1O1G9L4O100000000000000000000000_\\\\OWO]O0eb0i0^]O7`b0I^]O:ab0G[]O<eb0DZ]O=fb0CX]O?hb0AW]O`0ib0AV]O?jb0AU]O`0kb0AT]O?lb0AT]O?mb0^OU]Ob0kb0]OU]Od0kb0[OW]Od0jb0[OW]O:Sc0Gm\\\\O5Vc0Jk\\\\O4Xc0Jj\\\\O4Xc0Ji\\\\O5Xc0Kh\\\\O5Yc0Jg\\\\O6Zc0Je\\\\O6[c0Je\\\\O6]c0Ia\\\\O9ac0D_\\\\O=cc0A\\\\\\\\O?ec0AY\\\\O`0hc0@W\\\\Oa0ic0_OU\\\\Ob0lc0>10O02O1O0008KM0O1O1O000000000O2O0N200O10010O0100O01O100O1O2JVc]7\"}}, {\"image_id\": 16, \"category_id\": 2, \"bbox\": [871.6537475585938, 467.3328552246094, 61.44122314453125, 10.3782958984375], \"score\": 0.5428647994995117, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"kgVb02Xe02O0O1000000O100000000000000O0100000000000000O1000000O100000000000000O10000000000001O000000000000000000O101O00O11O00oll1\"}}, {\"image_id\": 16, \"category_id\": 2, \"bbox\": [532.7743530273438, 535.6106567382812, 76.18731689453125, 11.97052001953125], \"score\": 0.05922269448637962, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"m[U;2Ye00000000O2O0000000000000000O0100000000000000O10000000000000000O10000000000000O100000O1000O100000000O10001O00000O02O000O100000O10O10000001N1O10ace8\"}}, {\"image_id\": 17, \"category_id\": 1, \"bbox\": [76.98246002197266, 521.7070922851562, 81.90959930419922, 135.1629638671875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"oaP31VV17K5jM;]mNJ]R1=^mNG^R1=\\\\mNH`R1>XmNIfR1:VmNHhR1?olNDPS1l100O1N2O1OO1N2O0101O1O10FmlN[MUS1P3N5K1O00000000000000O1M31O1O0000001O1O001O001O001O2N1O2N3M3M3M5K2N001O0000000001O1O2N1O2M6lNhmNjMZR1m0`mNcNO?d0KnQ1n0YoNnNQQ1e0a2I5LbTnh0\"}}, {\"image_id\": 17, \"category_id\": 1, \"bbox\": [288.3409423828125, 531.3508911132812, 261.3314208984375, 618.2094116210938], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"kZ];=]U1T1ROh0XNh1_O:H6K5K4M3L4L5K6Hj0VO`0B:_VOaIcb0e6U]OiI_b0\\\\6Z]OkIab0Y6Z]OkIdb0Y6U]OlIhb0Y6P]OlImb0[6i\\\\OlISc0l6n[O_Imc0b<M4L3O1N3N1N2M3M3J7I6L4oLY@Z_Oj?c`0h@k^O[?Ta0j@e^OY?Za0j@a^OY?^a0k@[^OY?ea0k@R^OZ?ma0j@k]O[?Tb0k@Y]Oc?gb0Q2O10000O1000O1000000O100O100O1O1O1O2M2N2M3M3L4K5gMi\\\\OVA^c0e>Z]OV@Xc0f?i1L4L4M2N3N3N1O1O1N2O1O1O1O1O1O1N2M3K5I7bNkXO^C[g0Y<a1L4Me\\\\O`CP=]<PChCn<U<QCPDo<k;QC[Dm<a;QCfDn<V;PCQEo<l:PCYEo<e:PC^EP=a:oBbEP=\\\\:PCgEo<Y:PChEP=W:oBkEQ=U:mBmES=S:jBPFV=P:d_OaEnNi0^a0e9Y_OcIg`0]6U_OgIk`0Y6S_OiIm`0V6R_OlIn`0T6P_OoIo`0S6m^OoISa0T6h^OnIXa0U6b^OnI^a0W6P^OVJPb0S=00000000000000000000O10O100000000O101O0O1aM[]OWAeb0c>n]Oo@Sb0m>X^Ol@ha0o>e^Og@^a0R?R_Ob@Qa0W?b_OY@b`0]?\\\\3D<G:G9K5L4L5J7H9G:A>@?E:J6K4M3L4L5K4L4L3L4K5M3M2O2M2O1N2O1N2N2N2N2M3M3L4K5J6I7J6K6K4M3M3N2N2M3N2N2N2N2N2N2N2N2O1N2O1O2N100O2O2N1O2N2M4M2N2N1N2O1O001N2O001N101N2N3L4K7H8I6J6J5K4M4K5K6I9Go0POh0XO>_O?C<E:F<]O]_U:\"}}, {\"image_id\": 17, \"category_id\": 2, \"bbox\": [138.17559814453125, 571.4923706054688, 127.46932983398438, 82.20220947265625], \"score\": 0.9999986886978149, \"association_id\": 1, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"XSb54nU1;TjNCbU1O^jNe0`U1:I3N2M10000000O1000000O10OXOjjN;WU1CmjN:TU1CPkN;PU1ERkN:nT1FRkN9nT1HQkN8oT1IQkN6oT1KPkN5QU1KojN5PU1KQkN5nT1LRkN3nT1NQkN3oT1MQkN3nT1NRkN2mT1OSkN1kT11UkNOjT13TkNNkT13UkNMjT14UkNMjT14VkNLiT15WkNKhT16WkNJiT17WkNIhT18XkNHfT1:YkNFgT1<XkNDgT1P1100O1O2M201N2N2O1O1N10001O001O0O2O000O10O100000O1O0010O10O001O10O01N2O001O01ON2O110O00G9O101O01O0010O01O100O1O1O1O1O1O1O1O1N2O1O1O1O1O1O1O1O1O1O1O2Nligd0\"}}, {\"image_id\": 17, \"category_id\": 2, \"bbox\": [493.71112060546875, 662.296875, 251.58123779296875, 408.415771484375], \"score\": 0.9999984502792358, \"association_id\": 0, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"Tjbc04WV1>A5L3M2N3M3M3L4omNjNfn0Z1VQOkNdn0Y1YQOjNdn0Y1YQOjNen0X1XQOjNfn0Z1WQOgNhn0[1WQOdNhn0_1VQObNhn0a1VQO_Nhn0f1TQO\\\\Njn0h1SQOYNln0i1QQOXNon0j1mPOYNRo0i1jPOZNUo0f1jPO[NUo0f1iPO[NXo0e1gPO[NYo0f1gPOWNZo0k1fPOUNXo0m1hPOSNVo0o1iPORNVo0o1jPORNUo0o1iPORNWo0P2fPOQNYo0Q2dPOPN]o0R2_POPN`o0S2]POlMco0X2ZPOgMho0b2noN^MRP1o2aoNRM_P1T3ZoNnLeP1V3WoNjLhP1X3XoNgLgP1Z3[oNcLdP1`3\\\\oN^LdP1d3]oNWLeP1k3[oNQLgP1P4[oNmKfP1T4[oNjKeP1W4[oNhKeP1Y4^oNbKcP1^4e0001O001N2N2O001N001O0N2O2O011N10001N2N2M2O2O1O1O1O001N1O2N1O2O001O0001N101N100001O00001O0O1000O2M2O101O1O1O001O1O1O1O1O0O2O1N101O1O001O00010O0O1O1O2N1O101O001O1O1N1O2N2N2O001O1O1O1O1O1O1N2N2N2O1O100O10000O100O101N1O2M2N3M3M2O2M3O1N2N1N2N3J5H8K6N1N2O2N1O1N3M2M3N3M2O1O2M2O2M2L5L4L3N3M4M4K7E\\\\di3\"}}, {\"image_id\": 17, \"category_id\": 1, \"bbox\": [505.54498291015625, 1095.3980712890625, 77.58868408203125, 55.045654296875], \"score\": 0.9999157190322876, \"association_id\": 0, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"ilob0<oU13K6L3M2N2N1O2L300N3N1O101N1N201N100O1O1O10000O100O1000O1000O1000O100000000000000O10000O100O1O1O1O1N2O1O1O1O2N1O1O1O100N2O1O1M3N2O1N2M3O1N3O0O1N2N]am8\"}}, {\"image_id\": 17, \"category_id\": 2, \"bbox\": [379.9961853027344, 860.1488647460938, 208.02999877929688, 290.24420166015625], \"score\": 0.9501436948776245, \"association_id\": 2, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"eZb>2UV18^ONajN7\\\\U1?N0O101L3I7N3N10M3N2M3N2O1O1N4Le]V1NnaiNj0D:G7I5M2M2O2M3N1N2O00N3J5J7M2N2N3N000O1N2O1N2OnLcNYQO[1fn0jNWQOV1jn0iNVQOW1jn0iNWQOU1jn0kNVQOU1jn0kNVQOT1jn0lNWQOS1in0oNVQOP1in0ROWQOn0hn0TOWQOk0in0VOWQOj0hn0XOVQOh0in0[OVQOb0mn0^OSQO`0on0APQO>Qo0CnPO<So0ElPO;To0FjPO:Xo0EhPO:Yo0GePO8^o0HaPO6ao0K^PO3eo0MZPO3fo0MZPO2io0LWPO4jo0KVPO5lo0HTPO8oo0DRPO<PP1BPPO>SP1@loN`0VP1_OioNb0YP1\\\\OgoNd0ZP1ZOfoNg0[P1XOeoNg0]P1WOdoNh0^P1WOaoNi0aP1VO_oNj0cP1UO[oNl0fP1SOZoNm0hP1POYoNP1PS110O1O1O1O10001O0O2N10000000001OO10O0O11000001O00O10O010O1000O0000O110001O3M3L4J6K4M3N2N]QT:\"}}, {\"image_id\": 17, \"category_id\": 1, \"bbox\": [279.42279052734375, 405.1211242675781, 46.01055908203125, 46.432952880859375], \"score\": 0.755341649055481, \"association_id\": 0, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"QV`:<mU15L5L?A4L1O1O1O100O10000000O10000O100O1O1O2O001O1O1O2N2N4L2N1O2M2O1O2N1N3N1N3K5MWidb0\"}}, {\"image_id\": 17, \"category_id\": 2, \"bbox\": [460.43218994140625, 764.7637939453125, 191.9537353515625, 358.8427734375], \"score\": 0.45333877205848694, \"association_id\": 0, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"\\\\lYa0=fU1a0C8J4M1000010O0001N1N2N3L3O1O2O00O010N11O001O2N1O001N0010100O1O1OO1N020011O001O01N1O2N10N3M2000001O01O00O1O10000000O1000000gK@gRO?Zm0DcRO<]m0H_RO7am0N[RO2dm03XROMgm07UROJjm0:PROIom0:lQOISn0:iQOIVn09fQOIZn08dQOJ[n08bQOI]n09bQOH[n0<bQOE\\\\n0>bQOD]n0>`QOC`n0?^QOAbn0b0ZQO^Ogn0d0VQO[Oln0g0RQOYOnn0i0PQOVOQo0l0lPOVOSo0k0lPOUOSo0n0jPOROWo0P1fPOPOYo0S1ePOmN[o0V1`POlN`o0W1[POlNdo0W1XPOkNgo0W1WPOjNjo0V1UPOjNko0W1UPOhNlo0X1TPOgNlo0Z1SPOeNoo0[1QPOdNPP1\\\\1ooNcNdo0l1\\\\PORNeo0P2YPOPNgo0Q2XPOoMho0S2WPOkMjo0V2UPOjMko0W2WoNQM>h0[P1i2coNUM^P1l2coNQM^P1P3coNlL^P1V3doNdL^P1^3doN\\\\L_P1d3T11O101N101O0O101O00O100O10O010000O10000O10000O1N2O1N2O1O100O1O100O1O1N2O1O1O1O2O0O1O100O2N1O1O1N3M2O1N101O1O1O0O2O2M2M3N3M2M4M3M1N3M3L4M3M3O1N3N2N7H7H6H9H9AllY6\"}}, {\"image_id\": 17, \"category_id\": 1, \"bbox\": [335.8236083984375, 355.60675048828125, 66.24935913085938, 48.341522216796875], \"score\": 0.06159375235438347, \"association_id\": 0, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"afe<`0dU1<I6K3L3O1O2OO010000O01O1O1N2N200IjjNXOVU1f0njNVOTU1j07000ejNVOTU1Q100000000000O010O2N10000000010O10O100O10O100O1O1O1O2N003N0O2O0O2N2N2N1O1O0O2O2M5Ield?\"}}, {\"image_id\": 17, \"category_id\": 2, \"bbox\": [113.22058868408203, 586.6755981445312, 113.27246856689453, 71.0010986328125], \"score\": 0.05675715580582619, \"association_id\": 0, \"segmentation\": {\"size\": [1228, 819], \"counts\": \"`Yc58jU1LZjN?ZU1>M1O1O1O1O00O10000O100O10O01WOkjN;UU1DmjN:TU1EmjN:SU1FojN9PU1FRkN9nT1GSkN9lT1HTkN7mT1IRkN8mT1ISkN6lT1LSkN5lT1MSkN3mT1MRkN3nT1NRkN2mT1OSkN1mT1OSkN0mT11SkNOkT14SkNMjT16VkNJiT18UkNIjT18VkNHjT18VkNHhT1:XkNEhT1<XkNDhT1<XkNDgT1>WkNBhT1`0XkN_OiT1b0VkN]OjT1e0UkN[OkT1R11O101N102M3M101O1O1O1O00000O10000O0100000O1000O00O1000010O010O2NmaVf0\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [158.9151611328125, 415.5577087402344, 144.55035400390625, 32.364349365234375], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"SUm21ia0200O1O2N100O01000O10000O1O01000O1O100O101O0O100O10000O10000O100O100O1000000O1000O10O100000O1000O10O100000O1000O1000O1000O10000000O010000O10000O1000000O100O1000O10O100000000O10000000O100000O10000000O10O1000000000000O100000000O10000000000O100000000O1000000O101O001N[bd<\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [490.024658203125, 409.0556335449219, 113.41961669921875, 49.493194580078125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"hfb84ga0101N10000O1O100O100O100O100N200O10O01O00100OO10010O000000010O0010O0L4M301O1O0N3N20O10O10O10O10O10000O010O100O1N3N00100O1O1000O0100O100000000O100000000000000000000O10000000000O1000000000000O10000O1000001N10001O000O101O0O2O_hZ7\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [72.99341583251953, 80.85392761230469, 136.82611083984375, 369.44097900390625], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"_S_18aa0:F6G8J5L5K4M2M3M4M2N2M3K4M4N2N1O2O1O1O001000000O100nAhM`<Y2XCUNb<k1nB_OW<c0]CM]<l2M3M7I9F5L2N3L5K=C7H4K7nNT1Aa0E7K3M3N1N3N3M2N2N1O1O1ZIZFT5g9hJ^FV5d9eJ_F[5d9ZJeFe5i:01O000000000000000000000000lMSJYHm5a7_JYHa5e7eJWH[5i7hJTHX54fIY6W1_IS5KVJc6m0[In4N[Je6k0YIk4N^Jh6j0VIj4N_Jm6h0RIm4DeJ\\\\7>lH^6U7U21N2N4L6JkMjFQLi8l3^GVL\\\\8h3hGYLV8d3oG]Lm7_3ZHcLb7W3dHkLZ7Q3kHoLS7o2PIPMP7m2UIQMk6k2]IQMb6n2cIoL]6n2hIPMY6m2jIRMX6g2nIWMU6V2]JiMe5T1]KjNf4Q1^KnNb4P1cKmN^4Q1eKlN\\\\4S1gKkNY4T1jKjNW4T1kKjNV4U1lKiNV4S1PLgNS4W1VLZNS4c1T6O3N3M3M2M2O2M3M5K4L3M4Hc0YOc`V>\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [693.6517944335938, 408.8168640136719, 99.64990234375, 52.2750244140625], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"UVU<1ia03N2O0O1O10000O01000000O1O100O1M`^OOda01N2N3N1N101O1O1N2O1N2N2N2O4K101O1N2O001O1O0100O10O03N5J2O004L2O0O001O000O1O100O100O1O001O00001OhNLh@4W?0g@OY?2f@NY?5e@K[?8c@G]?:c@E]?;c@D^?<d@A^??o00001O0O101O0O1000O010001N100000001N101O0O2OoUQ4\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [665.4222412109375, 15.226600646972656, 131.18084716796875, 427.2166748046875], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"kZe;3Ua0e0F:K5M3M0OOO3N201O2N1L4N3N0O1O0N2N3M2J6H62O2N4L3A`0K4O1O1O0OlAWOX;i0fDDP;;PELk:5QE2k:NRE9k:GSE=j:CUEa0i:_OUEe0i:]OSEh0i:\\\\OnDn0n:VOhDR1V;RObDT1[;QO]DU1`;g2M7H;E5I7E=^On0nNf0mH[G`4l8XK_G_4h8VKaGf4k8]JiG_5]:M2L3N2N2N2O101O001O1O002N3M3L4M3M3M3M3M2O1N1O1O1O1O00SMPLnGP4W7[MYHe2a7gMYHZ2b7mM[HU2_7RN^HR2Z7VNbHP2S7YNiHl1n6\\\\NmHi1PO]KT7o2jId2k5bMQJa2g5gMWJ[2b5mMZJU2c5PNYJQ2e5[NoIh1m5U5K5L4N2M3L4K5M3XMcCO`<MjCIY<4XDYOm;e0^DmNg;Q1_DiNd;V1`DcNc;^1^2O101N4L2N2N2N1O3L8I7H5K7EYQR4\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [346.4921569824219, 410.48193359375, 128.750732421875, 29.10498046875], \"score\": 0.9999996423721313, \"association_id\": 0, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"SXR63ha02O0000000O10000000000000O1000001O0Oj\\\\=N_QB0fa00Z^O4da04L4M2O1N10000O1000000000000O10000000000O10O10O10000O1000O10O100000O10000000000O10000000000O10000O1000000O10000O1000000O100O100000000O1000000O10000000001N10001O0O2N2O]Xb9\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [316.8494873046875, 86.54134368896484, 102.2021484375, 357.4100036621094], \"score\": 0.999998927116394, \"association_id\": 1, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"kjb5?\\\\a04QALo;7hC7P<KjC=S<DfCd0W<_OcCg0[<ZObCi0]<XO_Cm0_<UOZCR1d<oNYCU1e<kNZCY1c<gN\\\\C\\\\1j0\\\\NW97PF_19RO]9^OZFd11YO_9TO_Fj1C]Ok9iNaFg4a1^JW5k0XIk4Y1aJ\\\\5d0ZIo4T1aJa5`0YIU5n0`Ji5;VI\\\\5j0\\\\Jo58UIb5f0YJT66TIe5d0VJX66RIj5?RJ_65PIo59oIg62PIR63oIm60nHU6LQJW7KiHi7W7YHeHi7[7ZHaHf7_7\\\\10O001O1N2M3M3L4M3L4N2N2O1N20000O1001O1O2_HfF\\\\6[9]ITGZ6n8\\\\I\\\\Gb6f8VIbGh6j9O001O000001O002N3M:YE`I^9a7N1N3`NjGVIW8d6XHRIi7j6]HRId7h6dHUI^7d6lHVIX7g5P3F;B=K5L3N2J6M4M2M3L5L4M3L5TNfBmNb=n0gBfN`=W1hB^N^=^1b1N2O2N3M3M3L6I7I5J8Dbha:\"}}, {\"image_id\": 18, \"category_id\": 1, \"bbox\": [447.9963073730469, 30.055788040161133, 121.16543579101562, 414.0707702636719], \"score\": 0.9999982118606567, \"association_id\": 2, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"SWm75ca08H7F;H8I5L2L5K4M3N3K7[Of0_Od0F9H?@;F6J5L3N3M3M1O2M1O2N3N1O2N5L2M2N3L6K`0gD\\\\JZ9\\\\7F6VHaGl5f8eIjGS6U:J4M1M3cN[E`Kg:U4QF[KQ:b4\\\\FQKh9n4c1O101O00000O0100000000000000000000O100O1O1O1M3M4M2N2M4H8A`0^Ob0Gf0YEQI\\\\9V8]O:H8K3N2M4J5L4L5UOj0C=L4M4K4K5M3N3L5J7nLTDJR<0XDIj;6YDEj;:[D]Oj;b0]DQOj;n0]DhNg;V1^DcNf;[1hDTN_;h1^2K6K6I:F<D<Ck[Q8\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [328.6252746582031, 145.820556640625, 138.29510498046875, 295.2060852050781], \"score\": 0.8519208431243896, \"association_id\": 1, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"edR72ia03M1O2N1O2N1O1O1N2O11O1O1O0O2O00000O2O00001N1000001N2O1NRUY:\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [683.3251953125, 140.17481994628906, 109.5169677734375, 295.12701416015625], \"score\": 0.18000586330890656, \"association_id\": 0, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"WQb<6da07LO1O6J2OK4L400007IPla00iS^O8O000O100O2N101N11O0O2N10000001O01N2NfhV4\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [449.7645263671875, 85.78758239746094, 146.09173583984375, 344.619140625], \"score\": 0.056854307651519775, \"association_id\": 0, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"^fZ94ga02O0001N101N2O000000000000000000O1]S11`lN7I3O0001O0O2O0N3N1N3Nm]o7\"}}, {\"image_id\": 18, \"category_id\": 2, \"bbox\": [113.8584976196289, 160.5282440185547, 167.58685302734375, 288.15447998046875], \"score\": 0.05128795653581619, \"association_id\": 0, \"segmentation\": {\"size\": [572, 1024], \"counts\": \"]W\\\\31ha05N000O101O00001O0010O01O001O00001O001OO10O10000000O1O1O100O100000O10O0100001O0O2O0O2O00O1000O01O15JSX71mgH0O101O0000O1000001N[lU=\"}}, {\"image_id\": 19, \"category_id\": 1, \"bbox\": [913.3330688476562, 120.9687271118164, 598.9721069335938, 532.35205078125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [999, 1599], \"counts\": \"daXl01To08H:F:F;\\\\ROUOWl0g2UOj0VO=D:F8I4L4L4L4L3M3N1N2O1N2O1O1N2O2M2O2M3N3L3N3L3M3N2M3M2O2M2N2N2O1N2O0O3N1N2O2M2O2N1N3N2M3N2N2M3N1O1N2O1O1O0O2O001N2O1N2O1N2O1N3M3N2M3M2N2N3N1N2N2O0O2O1N2O001O1N2O1O3M2N3L4M2N4L3M3L4M2N2N2N2N1N3N1O1O2N2N3M2N3M4L7Ia0_O7I8H4L4L3M3M3M3M2N1O1O1O1O1O1O1O1O2N2N2N2N2N2N2N1O2N1O1O1O1O1O1O001O1O1O1O1O2N1O2N1O1O2N1O1O2N1O1O1O1O1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O10O01O10O01O10O0100O010O10O10O10O10000O10O010O1O1O10O01O1O010O001O0010O0001O00010O000000000000010O0000000000000001O00000000000000000000001O0000000000000000001O0000000000001O00001O00001O00001O001O001O001O1O001O001O001O001O001O00001O00001O0000001O0000001O0000000000000000000000000000000000000000000000001O00001O00001O001O001O001O1O001O1O1O1O1O001O001O001O00001O00001O00001O0000001O00000O101O00001O00001O00001O001O001O1O001O1O1N2O001O1O0O2O0O2O000O2O0O2O0O100O2N1O100O2N100O101N101O0O101N101O0O2O1O001N2O1O1N2O1O1O1N100O2O001N1O101N1O101N1O1O2N1O1O101N1O100O2O0O2O1O1N101N3N1O1N3N1N2O1O0O2O1N101N101N101N1O2N1O2N2N2M3N2N2N3M2M3N3M2N101N2N1O2N2N1O2N1O2N1O2N2N2N2N2N2N3N1N2N3M1O2N2N2N1O2N1O2O1N1O2M3N2N1O3M2N2N3L4M2N2M3N2M3N2N2N1O2N2O0O2N2N2M3N2N3L5K6J5K6J6J4L4M3L3N2N3M2N2N1O2N3M2M3M4L3L8H9G9DSad2\"}}, {\"image_id\": 19, \"category_id\": 2, \"bbox\": [43.12156677246094, 152.32655334472656, 1058.584716796875, 511.726318359375], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [999, 1599], \"counts\": \"jW\\\\1`1]m0>G8I6J7J5M3M3M2N3M2N2N2O1N2N2N101N2O1N2N1O2O1N3M2N2N2N1O2M3N1O2N1O2N1O2N1O2O0O2N1O2N1O2N100O2O0O101N100O2O0O101O0O2O0O2O0O2O0O2N2O1N2N101N2N2N2N2N1O2O1N2N2N101N2O0O2O1N2O0O2O001N101N101O000O2O0O10001N100O101N1O100O1O2O0O1O1O100O2O0O100O100O2O000O10000O10001O000O101O0000001O0O2O00001O0O2O001O1O001N2O1O001O1N101O001N101O000O2O00000O2O000O100O101N100O100O100O2O0O10000O10001N10000O101O000O10001O0O1000001O0O10001O0O2O00001N101O0O2O0O2O001N1O101N100O1O2O0O101N100O10001N100O10001O0O100000000O100000000000000000000O10000000001O000000000O1000000000000000000O10000000000O10000O100O10000O100O1O100O1O100O100O100O100O10000O10000O10000O1000000O10000000000000000000000000000000000000000000000000000O1000000000000000000000000000000O100000000000000O10000000000O10000000000O100000000O1000000O100000000O1000000O1000001O0O1000000O101O00000O10001O000O1000001O000O10001O0000001O0O101O001O001O1N101O1O1O2N1O1O1N3N1O2N2N2N2N1O1N3N1O1O1O1O1O001O1N2O001O001O001O001O001O1O1O1O1O1O1O1O2N1O2N1O2N2N2N1O1N2O2N1O001O1O1O1O001O001O1O001O00001O001O001N101O1O001O001N101O001O001N10001O001N10001O000O2O00001N10001O0O101O00001O001O001O001O001O001O1O001O00001O001O00001O00001O0000001O0O10001O000000001N10000O101O000O101O00000O2O00001O0O101O001O001N101O001O001O0O101O001O0000001O000000001O00000000001O0000000000001O0000000000001O000000000000001O00000000001O000O10001O00001O0000001O00001O001O00001O001O001O001O00001O001O000000001O00000000000000000000000000000000000000000000001O00000000000000000000000000000000000000001O000000000000000000000000000000001O00000000000000000000000000000000000000000000000001O0000000000000000000000000000000000001O00000000000000001O00000000001O0000001O0000001O0000001O00001O001O00001O001O001O00001O001O00001O001O001O001O001O001O001O001O1O0O2O1O001Oc0]O4L4L3M3M3M2N1fCk^O`:Wa0]En^O_:Sa0^ER_O_:o`0_EV_O]:k`0`EZ_O]:g`0aE\\\\_O]:e`0aE^_O]:d`0_E`_O_:a`0_Eb_O_:_`0^Ef_O_:[`0^Ei_O`:Y`0ZEm_Od:T`0REX@k:Tb0N2N1O2N2N2N3M2N3M7I7I;E<D1O1O1O001O1O1O001O001O001O00001O00001O00001N101O001O001O001O1O002N4L3M4L4L3L2O1O1O1O1O1O001O1O001O001N101O001O00001O001O001O001O1O003M2N3M3M3M3M6J9G9F8I7I;E9G9G8H1O1O1O1O1O1O1O1O1O1O001O1O001O1N101O1O1O1O4K6K4L4L2N0O2O001O1O0O2O1O0O2O001O1N2O001N2O001O1N101O1O1O1N2N2O1N3M3M2N3M3M3M4L3M4L4H9A?ASWW?\"}}, {\"image_id\": 20, \"category_id\": 1, \"bbox\": [591.1741943359375, 233.94236755371094, 265.49102783203125, 406.55621337890625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [844, 1500], \"counts\": \"ofd?e0ai0>Ad0ZO[1hN;J5J4M4mLWL\\\\_Ok3``0ZL]_Oj3[`0^L`_Of3X`0eLa_Oa3Y`0hL^_O_3[`0lLY_O]3``0hLZ_O_3``0dL]_O`3_`0cL^_O`3_`0bL__Oa3_`0`L__Oc3^`0`L__Oc3_`0^L^_Og3^`0]L]_Oh3``0^LT_Ob5Q?gJ_@f5\\\\?h2L4L3M2N2N2N1O2N1O2O0O2O0000000O1000O10000000O01O1O1O001O1O10O001O001O0O101O0010O001O1O1N2O001O1O1O100O1000O10O1000000O10000000000000000O10O2O00001O0000001O001N2O1O1O1O2N1O2M2O001O1O001O1O1O001O1O00000O2O0000000O1000O10000O10000O10O11O1N101N10001N101O0O1O1O1N2N2N3M2O1O1O1O1O1O1N2N2N2N2O1O1O2N1O1O1N2N2N2O1N2O1O1O1N2M3M4M2M3O1N2O1O2M3M2aI]_OS3f`0_LP@U3T`0hLT@Q3n?lLV@P3m?mLV@P3m?nLU@P3m?oLT@n2o?QMS@k2o?UMR@g2R`0XMQ@b2R`0^MQ@[2S`0eMP@S2V`0lMm_Om1W`0SNj_Oi1Z`0VNh_Oe1[`0[Nf_Ob1\\\\`0^Nf_O\\\\1_`0dNb_OT1d`0lNg_O?c`0AU5O000010O01O1N3N2N2N2N1N2O1O1N2O1N3M4K7H6J6J7I^Qn`0\"}}, {\"image_id\": 20, \"category_id\": 2, \"bbox\": [225.8786163330078, 581.2308349609375, 499.09149169921875, 104.07025146484375], \"score\": 0.9999980926513672, \"association_id\": 1, \"segmentation\": {\"size\": [844, 1500], \"counts\": \"Vgk58Tj01O0O2O00001O0000000O101O0000000000001O00000O1000000000000000000O2O00000000000000000000000O10001O000000001O0000000O2O00000000000O10001N1000000O2O00001N10000000001N1000000O101O0000001O0O10001O000000000O2O000O10000O2O00000O2O0000000O10001N10000O100O100O100O2O00000O1000000O10000O1N2N2L4O1O100O10000O1000000O10000O100N1M4H8N2O100O100O100000000O100000000000O1000000000O10O1000000000000000000O1000000000000000001O000000000000001O0O101O0000001O000000001O000O101O00001N101O0000001N10000000001O000000000000001O000000000000000000000000000000XXOmMWg0S2hXOnMXg0R2hXOnMXg0R2hXOnMXg0R2gXOoMYg0Q2gXOoMYg0Q2fXOQNYg0P2fXOPNZg0P2fXOPNZg0P2eXOQN[g0o1eXOQN[g0o1dXORN\\\\g0o1bXORN^g0n1bXORN^g0Q2^XOPNbg0]200000000000000000001O00000000000000000000000000000000000000000000O1000000000000O1000000000000O1000000000000000000000O10000000O10000000O1000000000000O10000000000000000000000000000O1000001O00000000000000000000010O00000000010O000001O001O001O0gM^XOh1cg0VN^XOk1ag0QNdXOn1]g0nMgXOQ2[g0lMkXOb0Be0Xh0XOlWOf0Uh0UOPXOj0kh0N1O1O001O001O00000010O000000O1000000000000000001O000000000000000000001O000000000000001O00001O00001O000O2O00001O00001O001O001O1O2M2O1O1O1N2L4N2N1ORT\\\\d0\"}}, {\"image_id\": 21, \"category_id\": 1, \"bbox\": [561.1118774414062, 288.1451721191406, 81.40264892578125, 173.39254760742188], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"oen9a0Za06L4K4K6N1N3L5X@QOZ>Q1cASOY>P1cAVOX>n0cAXOY>k0dAWOZ>[2M2M2O3M4L5Kf0ZOa0_O6J2N1O1O0000O1O1PO\\\\CcLe<i2[DhLi;S3d1N3102N9G42NON01M1O1N2N1O03RMWBBj03R=8[DXOi;f0k2N3M2N3M2N2N3N010O1O1O1O30N1O1N3M2ON10001N10O11N3M2M3Jadg6\"}}, {\"image_id\": 21, \"category_id\": 1, \"bbox\": [381.66058349609375, 163.99851989746094, 173.4837646484375, 354.21063232421875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"Saj6X1]`0a0E7K4K5J6H8K6L3M3L4M2N2O1O1N2O1M3N^NUBSOi=l0aBlN_=S1gBhNY=X1lBdNS=[1PCcNo<^1RCbNl<^1WCaNe<a1^C^N`<b1bC^N\\\\<c1fC[NZ<f1dC`NV<d1eCaNS<g1fC`Nn;m1kCXNQ<n1hCWNU<m1fCXNW<k1bC[N[<h3L5I6L4M2N3M2O3L4L3M3M3L4M5K:SE_Jh8f7F4M2M3N5KS1mN3M2M2O000001O00000000000`NbI^G^6Z8mIcGS6P6bImJ`0QOn5P6gIjJ?SOl5Q6jIeJ?WOi5R6PJ]J=]Of5T6gLdI\\\\3Y6X4M3N2O2]J`Da4h<G3N1O01N2N3N3L`0WCUK];P6G4M10003M:F4K3M100002N1N1O1N2M5J`0A6bNXESKR;U3]DlMj<k1_CQNe<g1dCTN`<c1kCVNZ<d1U2K4N3L3N3L4L3M4L3WOh_OGY`04n_OHT`05R@GP`06V@Dl?;m0O1O2N2M3M3L_b90X]F=O001O001O001O0101N2M4L4KRZZ8\"}}, {\"image_id\": 21, \"category_id\": 2, \"bbox\": [591.2954711914062, 446.3271179199219, 105.6553955078125, 14.033233642578125], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"Ub^:1na02N1000000O010000001O0000000O100000000O010000000000000000000000000O1000000000001O0000000000000001O000001O00000001O0000000000000000000001O0000000000000000000000000000001O0000000000000001O00001O00001O0O2OkQh5\"}}, {\"image_id\": 21, \"category_id\": 2, \"bbox\": [471.2083740234375, 493.3660888671875, 216.85943603515625, 51.79351806640625], \"score\": 0.9999992847442627, \"association_id\": 2, \"segmentation\": {\"size\": [576, 1024], \"counts\": \"[bZ85ja02O1OO1000O10O1000001N2NYc50h\\\\J1N2OO100000000000000000d^OLl`04T_ONk`01T_O0l`0OU_O2j`0NV_O2j`0NU_O4j`0MU_O3k`0MU_O3k`0NT_O3k`0MU_O3k`0MU_O4j`0MU_O3k`0MU_O4k`0KU_O5k`0LT_O4l`0LT_O5k`0KU_O5k`0LT_O4l`0LT_O5k`0LT_O4l`0LT_O4l`0LT_O5k`0KU_O5l`0JT_O6l`0KR_O6n`0JR_O6n`0KQ_O6n`0JR_O6n`0JR_O6n`0KP_O6Pa0Kn^O6Ra0<10O0000000000000001O000000000000001O00000000001O0000001O0001O0000000000000000001O000000000010O001O1O00001O01O00000001O00000010O000001O0000001O01O01O000010O01O1O0010O00010O010O0001O0000001O0001O0001O0000001O00000O100000000000000000000000O0100000000O10000O10000O100O1O2L3M4NcWS6\"}}, {\"image_id\": 22, \"category_id\": 2, \"bbox\": [163.32809448242188, 712.6904907226562, 299.5207824707031, 39.85687255859375], \"score\": 0.9999990463256836, \"association_id\": 1, \"segmentation\": {\"size\": [768, 621], \"counts\": \"m^o32ng00O101N10000O10001O000O1000000O1000000O10000000000O100000000O10000000000O10000O10000000000O100000O010000000000O10000O1000000O10000000001N1000000000000O100000000000000000000000001O0000000000000000000000000000000000000000O101O00000000000000000O1000000000000O100000000000000O10000000000000000000000000000000001O00000000000000010O000000001O00000001O0000000001O01O00000000000000010O000000000001O01O0000010O00000001O01O01O001O0000010O00001O01O0010O000001O01O00010O0100O001O01O000010O01O1O0010O000001O0000001O00010O1O00001O00001O001O0O10001O000000001O000OfPT4\"}}, {\"image_id\": 22, \"category_id\": 1, \"bbox\": [220.37571716308594, 27.028413772583008, 277.193603515625, 733.8535766601562], \"score\": 0.999996542930603, \"association_id\": 1, \"segmentation\": {\"size\": [768, 621], \"counts\": \"`X^5<_g0`0A<E9G8F:F9H8H7I6J5K6K4M3S@bLS;b3hDcLV;_3fDfLV;]3eDhLX;[3dDiLZ;Y3aDlL\\\\;W3`DmL^;T3`DoL^;S3`DPM];R3aDPM\\\\;T3aDnL];U3`DmL];X3^DlL^;Y3^DjL];`3[DcL`;f3YD^Lb;j3VD[Le;m3TDWLg;P4SDSLj;Q4QDSLl;Q4oCSLn;R4kCSLS<Q4fCTLX<Q4VC_Lf<a7M3M4K4M2N2M3M3N1N3M4L3M4L4L4K4L3M4L3M3M2O2N2N2N1O2O001O001N10001O00001N100O1O1O1O1O1O1N2O1M3L4M2M4K4M4K5I6K6K5K4I6I8G9K4O1O10XNiHiBW7V=i1O3L3M3M3L5L3N2O1O1O1001N1O2N100O2O001O010000000000000001O000001N2O1O001O1O001O00101O100O2O1O1O1N10O01O00001O1O1O1O2M3N4M2M5M3Lo0SOW1jNh0XOW1hN=C3L3M21O5K7Hi0WO2O0OOdCVK`5i4YJ^Kf5c4TJbKl5_4oIdKP6`4jIdKU6_4iFRKEc0`9`4^FXKM<d9_4RFaK54g9`4`EnKd0Fk9`4kD]LV1WOm9]7oEgHP:Y7kEmHS:U7gEQIW:P7bEYI[:i6]E`I`:R:N3M2N2N3M2N2N2O1N4L3PMTBfKo=[3UC]Lm<V3eCcL^<Q3RDhLP<R3YDiLi;R3]DkLe;P3aDmLa;o2cDoL`;l2dDRM`;g2fDUMa;b2dD\\\\Ma;\\\\2eD`Mb;U2fDhMb;l1eDPNY<P1oCjN_<f0iCUOa<?eC\\\\O`<>eC^Oa<:eCBb<5dCFg<MUdj3\"}}, {\"image_id\": 22, \"category_id\": 2, \"bbox\": [516.0968627929688, 499.48101806640625, 72.57568359375, 9.55303955078125], \"score\": 0.9771134853363037, \"association_id\": 0, \"segmentation\": {\"size\": [768, 621], \"counts\": \"h__<2ng0000000000O1000000O1000000O1000000000000O10000000000000000000000000000000001O00000000000001O2N2O0O00VPl0\"}}, {\"image_id\": 22, \"category_id\": 2, \"bbox\": [542.71728515625, 421.9202575683594, 31.658935546875, 8.080902099609375], \"score\": 0.12591241300106049, \"association_id\": 0, \"segmentation\": {\"size\": [768, 621], \"counts\": \"Xml<3mg0000O1000001O0O10000000000000O100000001N100O10ijR1\"}}, {\"image_id\": 23, \"category_id\": 2, \"bbox\": [231.74569702148438, 363.130859375, 717.576416015625, 175.0946044921875], \"score\": 0.9999910593032837, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"TnQ71Ze01N3N1O1O1N1000001O0000001O00001O001O001O1O0O2O001O00001O00001O000000001O0000001O00001O00001O00001O0000001O00000000001O000000001O0000000O2O000000001O00000000000O10001O00000000000O10000000000O10000000000O100000000000000000000000000000000000000000000001O0000000000000000001O00000000001O000000000000001O000000000000000000001O00000000000000001O000000001O0000001O00001O0000001O00000O10000000000O101O000O10000O1000000O10000000000O10000000000000O10000O1000O0100O1O100O100O100O01000O1000000O010O100O001O1O001M3H8G9N101O1O10000O10O100000O10000000O010000O1000O0100N2M2O200O10O10O100000000O01000000000O1000000O100O100O100O100O10000O10000000O0100000000O10000O1O100O1O1O1O1O100O10000O1000000O10000000000O10O1000O10000O1N1N3N2000O0100000O01000000000O10O100000000O10000O10000O100O1000O0100000000O10000000000O10O100000O10O01000O01000O0100000O10O100000O100000000000000000000000O1000000000000000000000000000000000O1000000000001nMc]OT1]b0kNe]OT1[b0jNg]OV1Yb0iNi]OV1Wb0iNk]OV1Ub0iNl]OW1Tb0hNn]OW1Rb0iNn]OW1Rb0hNP^OW1Pb0iNP^OV1Qb0jNP^OU1Pb0jNQ^OV1oa0jNQ^OV1oa0jNQ^OV1oa0jNQ^OV1oa0iNS^OV1ma0jNR^OW1na0iNR^OV1oa0jNQ^OV1oa0jNQ^OV1na0jNS^OV1ma0jNS^OU1na0kNQ^OV1oa0jNQ^OV1na0jNS^OV1ma0jNS^OV1ma0jNS^OU1ma0kNS^OV1la0kNT^OU1ja0mNV^OS1ha0nNY^OR1ea0PO[^OP1ea0PO[^OP1da0QO[^Oo0fa0PO[^OP1ea0PO[^OP1da0QO\\\\^Oo0da0QO\\\\^Oo0da0QO\\\\^Oo0ca0QO^^Oo0ba0QO^^Oo0ba0QO]^OP1ba0QO^^Oo0ba0QO^^On0ca0RO]^On0ca0RO]^On0ba0SO]^On0ca0SO\\\\^Om0ca0TO\\\\^Om0ca0TO]^Ol0ba0UO^^Ok0aa0VO^^Oj0ba0WO]^Oj0ba0WO\\\\^Ok0aa0YO]^Oh0_a0\\\\O`^Oe0_a0\\\\O`^Od0`a0]O^^Oe0ba0[O]^Of0ba0\\\\O\\\\^Od0ea0\\\\OZ^Oe0ea0\\\\OZ^Oe0fa0`10O010000000O100000000O1000000O10000O10O010000O10000O100000000O1000O1000O10000O100O0O2L4I7O1O1O100O100O10O010000O2O000O10000O100O2O0O1O1O1O2N1O1O2N2N101O1O0O2O001nMm\\\\Oj1[c0O0O2O1O1O1[Na\\\\O[1hc002N1N2mNo[Oj0[d0M3YOc[O42IoWb1\"}}, {\"image_id\": 23, \"category_id\": 1, \"bbox\": [190.9756317138672, 218.17025756835938, 670.1104736328125, 283.03955078125], \"score\": 0.9999879598617554, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"d_T43Ue06J6J6I6I8K4M3M3L3N2M3M3K6J5J6H8E;K5L3N3M3M2O2N1N3N100O2N1O101N1O2N100O2N1N3N1O2M2O1O2N1O1O1O2O0O100O101N100O100O2O0O1O101N100O2N1O101N1O1O2O001N101N101O000O101O00000O101O0O10001O0O100O10001N100O10001N1000001N10001O0O101O00001O0O10001O000000001N100000000000000000000000001O000000000000000O1000000000000000000000000000000O100000000000000000000O100000000000000O10000000000O10000000000O1000000000000O10000000000000000O10000000000000000000000O100000000000000O10000000000000000O100000000000000O10000000000O1000000O10000O10000O1000000O1000000O1000000O10000O10000O10000O10000O10000O100O100O100O100O100O1O1O1O1O1O1O1O1O100O100O100O10000O10000O100O100O100O1O1N2O1N2O1O1N2O100O100O10000O1000000O1000000O1000000O10000O10000O10000O10000O10000O1000000O10O1000O10000000000O10O1000000000O10O10000O100O1O1N101N2N2N2O1O1O1O1O010O1O100O10000O010O100O1O1O1O1O1N2O0O2N2N2O1N2O1O1O00100O10000O10O10O100O100O10O01O100O1O1O1O10O010000O10000O1000000O10000O10O10O100O100O1O100O100O10000O1000000O1000000000000O100000000000000O10000000000O1000000O100O100O100O2N100O100O10000O100O2O00000O10001O0O101O0O2O001O0O101N1000001N1000001N100000001O00001O00001O001N2O1O1O001O1O001O001O001O00001O00001O001O001O1N101O001O1O001O00001O001O0O101O00001O0O2O001N101N100O2N1O2O0O1O101N100O2O000O101N10001N100O2O0O2N2N2M3N2N2N2M3M3K5L4M3M3N2M2O2N2N1O2N1N3N2M3M4K6B>Dch]3\"}}, {\"image_id\": 23, \"category_id\": 1, \"bbox\": [189.2112579345703, 290.8428039550781, 411.30572509765625, 213.78909301757812], \"score\": 0.15059776604175568, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^UY42We06`NJa]O:[b0L_]O8]b05W]OMgb06o\\\\O3ob0On\\\\O4ob0On\\\\O5ob0Mn\\\\O6Pc0S1O2M3L4I7M3M3M3N2N1O2O0O100O2O0O1O1O1N2N2O1O2N1O1O100O1O100O1O1O1N2O1O1O2N1O100O100O100O2O0O2M2O2M2O2N1O2N10001N100O2O001N2O1N2N2N2N101N101O0O2O00001N2O001O1O1N101O0O101O0O101N101N101N2O001N10001N100000000O1000000O100O100O10000O1000000000001O0000000O1000000000000001O00000000000000000000001O0000001O00001O0000001O000000000000O10000000000O1000000O10000O1000000O100000000O10000000000O10000000000O1000000O1000000O10O10O100000000O100000000O10000O10000O100O10000O1O10000O10000000000O1000000000000O10000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000001O00000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10001O000000000000000000000O100000000O01000O100O1O100O1O2N1O2N1N3L4oIPAa5o`0hMSji8\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [47.72730255126953, 555.0107421875, 60.843727111816406, 122.2342529296875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [768, 513], \"counts\": \"SSV1U1hf06E?H3J6L4K4L4M3L4M2J6M4I6N2N4M2N1O2N1O1O100000000000000000000000000000000000iLi[Oe2Wd0YMn[Od2Rd0ZMT\\\\Ob2lc0]MV\\\\Ob2jc0]MZ\\\\O_2gc0`M[\\\\O_2fc0_M]\\\\O_2dc0\\\\Mb\\\\Ob2_d0M2O3M6I4M6I5L4K6I?nNTm_9\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [315.2156677246094, 505.2799072265625, 68.43215942382812, 94.828369140625], \"score\": 1.0, \"association_id\": 7, \"segmentation\": {\"size\": [768, 513], \"counts\": \"\\\\Q_74gg0>Bo0TO4K3M4M3M2N2O1O2N1O1O1M3N2O1K5N2N2O100O1O100000000O10000O1O1000000O10000000000001O00000cMZ[ODGM0a1Pe0iNQ\\\\OQ1oc0lNZ\\\\On0fc0QO`\\\\Oh0bc0VOb\\\\Og0_c0VOP]O4`[[3\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [1.6162891387939453, 333.18017578125, 35.097389221191406, 83.67657470703125], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [768, 513], \"counts\": \"^j1i0Vg05L1O1O2bYOVObe0o0PZO\\\\One0`1K6TZOPN\\\\e0Z2001O0O3N2M4M3M3N0N111N1O1O2N1O2M2O2N2M3N2N4L4L6I;E?A]cT;\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [194.47157287597656, 307.79730224609375, 39.43659973144531, 70.79605102539062], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [768, 513], \"counts\": \"QZc4R1if0:J4K5L4L4L4K5M00000000000000000002N2N2N2N1O2N003M1O2N2N1O2N2M3VOeYOJaf0MfYOJmcd6\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [0.02039620466530323, 424.1005554199219, 54.13655471801758, 107.88510131835938], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [768, 513], \"counts\": \"k>:`g0<D:]YOZOae0P1PZOZOke0`1M2O1N3N2M2N3L3M3N3M4M1N2O1O1O1N2O1O1O1O100O100000001O0000000000O1O1O1G9M3`MS[Oj1Pe0UNT[Og1md0WNY[Oc1id0\\\\N`[OZ1bd0eNj[Om0Zd0QOn[Of0Td0XOT\\\\O?fnl:\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [95.8959732055664, 656.3858032226562, 61.294288635253906, 14.7266845703125], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [768, 513], \"counts\": \"mdX21ng02N1O1M3O1O1O1O100O100000O1000000000000000O100000000000O100000000O2O00000000000O10O11O0O10O100000O1001OO101O000O2N^c\\\\8\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [21.256425857543945, 525.6160278320312, 105.7794189453125, 13.49737548828125], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [768, 513], \"counts\": \"dhj01ng02N1O1O1O10000O100000000000000000000000000000001O000000000000000000010O00000000000000000000O2O00000000000000000010O00000000000001O00000O101O0000000000001O00000000001O0000001OZoS9\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [405.35137939453125, 474.9956970214844, 56.134124755859375, 10.183990478515625], \"score\": 0.9999995231628418, \"association_id\": 6, \"segmentation\": {\"size\": [768, 513], \"counts\": \"Rga92ng00O1O1N2O2O0O10000000000000000000000000000001O0000000000000000000000001O00000000000O10000000000O101O0O1ORYW1\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [184.2688446044922, 663.1785888671875, 103.97486877441406, 15.06304931640625], \"score\": 0.9999995231628418, \"association_id\": 9, \"segmentation\": {\"size\": [768, 513], \"counts\": \"Tme42fg01]XO1bg01\\\\XO0cg07O10000O1000O1000000000000O100000000000000000000000000000000000000O100000001O000000000000000000000000O1000O1000000000000O10000O100O100O101O0O100000000000000O10001OVS[5\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [232.15980529785156, 417.4178466796875, 56.11427307128906, 73.93081665039062], \"score\": 0.9999995231628418, \"association_id\": 11, \"segmentation\": {\"size\": [768, 513], \"counts\": \"X]a59eg03ZYONUOJne09PZOEg0c0Ve0?TZOGje0Y1O0000000000001O1O000000001O000000010O001O000003M1O002N2N1O1O1O00001O0000001O1O2M2N002K;ROh0JYjY5\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [145.81292724609375, 530.8291015625, 69.89476013183594, 146.5426025390625], \"score\": 0.9999990463256836, \"association_id\": 9, \"segmentation\": {\"size\": [768, 513], \"counts\": \"_[`3d0Sg0?B?G5J7I8H8J4K3N1O1O1O100O1O1N2N2N2N2O1L4I7M3N2M3\\\\O[Li\\\\Oi3Rc0e0O1O100001nKc\\\\O_3]c0_Lf\\\\O`3Zc0_Lg\\\\Oa3Yc0^Lh\\\\Ob3Xc0[Ll\\\\Od3Uc0ULR]Oj3bc00O10000000000O11O1O001O3M5J3M2M7I3M4eMY[OX1id0dNa[OR1bd0mNa[Oo0`d0ROe[Og0]d0YOf[Oc0[d0\\\\Ok[O<Wd0D_\\\\O_Oic0`0k11O1Vnn6\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [309.6142578125, 439.03985595703125, 54.892852783203125, 71.327880859375], \"score\": 0.9999986886978149, \"association_id\": 13, \"segmentation\": {\"size\": [768, 513], \"counts\": \"SoX71mg08G5I9I8UOWOSZOl0de0_OTZOh0je0g0O101O00001O00000000O10000000000000000000002O2M3N1N1O1O10O02N001O1O100N6J3N3L5Jf0ZOfhc3\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [281.5788269042969, 481.5354309082031, 33.055938720703125, 11.53155517578125], \"score\": 0.9999984502792358, \"association_id\": 11, \"segmentation\": {\"size\": [768, 513], \"counts\": \"Voc65jg01N3O0O10000000000000000000O1000000000010O0000001O001O1OjPg4\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [163.76834106445312, 477.7266845703125, 68.75070190429688, 120.3214111328125], \"score\": 0.9999980926513672, \"association_id\": 10, \"segmentation\": {\"size\": [768, 513], \"counts\": \"eYR41kf0T1N2N3M3N00O02N2O1M3M2O3K5N1K5M3N2K6M3M3N1O2O1N4L6J4L2O3M2N1O1O1O00001O1OO10000O100O100O100O1O1O1UMc[OS2^d0jMj[Oo1Wd0oMm[Oo1Td0oMT\\\\Oi1oc0TN\\\\\\\\Oa1fc0ZNe\\\\O^1^e0@6J9H3K]Vb6\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [248.8246307373047, 250.23828125, 34.45939636230469, 62.44140625], \"score\": 0.9999979734420776, \"association_id\": 2, \"segmentation\": {\"size\": [768, 513], \"counts\": \"nPn56bg0:I5I8I7M4L3M4L4gYO^Noe0l1N0000000001O1O1O1N2O1O002N1O2N102M2N4K7I[W\\\\5\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [371.3272705078125, 382.1788330078125, 57.581390380859375, 106.42059326171875], \"score\": 0.9999971389770508, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"omh8?Xg0a0UOf0J5L5M2M3N1N2M3L3O1O2M2N102N1O1N2O1O1O1O1O0010000O10O01O1O10O001O010O2O0N3N2N2M3N2TNT[O>Pe0AZ[O2jd0Me102MM^OjXOb0Xg020N3M2O10i[o1\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [359.5708923339844, 591.1840209960938, 63.19671630859375, 10.712646484375], \"score\": 0.9999967813491821, \"association_id\": 7, \"segmentation\": {\"size\": [768, 513], \"counts\": \"gj`81ng0101N1O1M3O1000000O1001O00000000000000000000001O0000000O10000000000000000000001O000000000O101O000O2O000000000000\\\\UU2\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [52.905029296875, 260.3485412597656, 47.86022186279297, 119.72381591796875], \"score\": 0.9999963045120239, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"^PX1333_g0b0nZO^Ooa0k0i]OWOUb0P1d]OSOZb0Q1b]OPO]b0S1_]OoNab0T1Z]OmNgb0X1S]OhNnb0\\\\1l\\\\OeNUc0_1f\\\\ObNZc0e1Z\\\\O`Nfc0l1i[OXNXd0h20000000000000hLg[Oi2kd0M3M4L3M4L2N1O2O1N2N2N3N0O3N2N1N2N3M3M3M4L4L2N100N2O2M7C[ed9\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [254.97805786132812, 359.66302490234375, 57.529693603515625, 104.7017822265625], \"score\": 0.9999933242797852, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"YeR61mg04VXOMag0:01O0LFcXO9\\\\g0GhXO6Pg03PYOMkf0:QYOGlf0h0M3O2M5K4L2N2O0O011N2O1N3M3K4K7K8I3N2N3N1N101O00O101N1IjZObMYe0[27M4L4M4J5J6N5J9H4K3N4L4L7I2M3Nc[f4\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [140.4371795654297, 388.7621154785156, 60.83866882324219, 85.811767578125], \"score\": 0.9999904632568359, \"association_id\": 8, \"segmentation\": {\"size\": [768, 513], \"counts\": \"Xl]36hg0:H01O00O2O0TYOB_O0bf0>oYO0he01RZO_OFm0le0IWZO`0fe0o0N1O1O1O1O1O001O001O1O0002OO01O1O010O100O1O1O1O1O00001O2N001O1O001O1N2N5K5K4J6E:EPc\\\\7\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [301.5989990234375, 349.4369812011719, 25.289520263671875, 69.78433227539062], \"score\": 0.9999833106994629, \"association_id\": 15, \"segmentation\": {\"size\": [768, 513], \"counts\": \"^cS73ig0b0A9F6J9YYOeNO3Pf0k1L4N001O0000O1000001O1O3M4L3L4M3I6If0YOST[4\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [230.8648223876953, 370.3554992675781, 29.778366088867188, 7.062255859375], \"score\": 0.9999608993530273, \"association_id\": 4, \"segmentation\": {\"size\": [768, 513], \"counts\": \"c[^54kg02O00000000000000001O000001N1000001O000000000000000O]Tn5\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [207.28326416015625, 589.3436279296875, 111.82736206054688, 12.7647705078125], \"score\": 0.9999558925628662, \"association_id\": 10, \"segmentation\": {\"size\": [768, 513], \"counts\": \"bbl43lg0101N1O100O101O0O1000000000000000000000O10000000000000000001O000000000000000000000000000000000001O0000000000001O00000000001O0000001O001O0000000000001O000000000000000000000000000000001O000000000000000000001O00001OZmc4\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [191.63485717773438, 464.1985168457031, 44.84056091308594, 8.806396484375], \"score\": 0.9999525547027588, \"association_id\": 8, \"segmentation\": {\"size\": [768, 513], \"counts\": \"d^`44jg02O100O10000000000000000001O0000000000001O0000000000001N100000000000000O2O0000\\\\Yb6\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [347.38824462890625, 287.77508544921875, 34.268890380859375, 52.05633544921875], \"score\": 0.999906063079834, \"association_id\": 12, \"segmentation\": {\"size\": [768, 513], \"counts\": \"aQV88fg01O5I4O100O002M3N3L2O2O1O0010O00]YOTOSf0l0aYORO28]f0k0aYOUO_f0V11O1O2N1O2N1O2N2M3N2M6K5YOeXO162[]S3\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [472.8122863769531, 346.8280944824219, 38.9403076171875, 14.51458740234375], \"score\": 0.9995197057723999, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"R[U;1ng01O2O000O101O0000000000000LL\\\\XO4dg0L[XO5eg04O00001O01G^XO2cg0M]XO3ig0O010O0010O0001O00001O00iT1\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [374.2431945800781, 336.16900634765625, 31.60919189453125, 7.031097412109375], \"score\": 0.9991666078567505, \"association_id\": 12, \"segmentation\": {\"size\": [768, 513], \"counts\": \"cbi82ng0000O1000001O000000000000001O00000001O0O1000001O0000\\\\mb2\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [427.1279296875, 316.7380065917969, 52.563018798828125, 101.63912963867188], \"score\": 0.9990072846412659, \"association_id\": 14, \"segmentation\": {\"size\": [768, 513], \"counts\": \"h[W:2c02[f01TYO10O71HMif00YYO3N251gf0HXYO?MKjf0HWYOj0jf0WOTYOi0mf05O1O0L6O0POhNU[O\\\\1hd0hN_ZOM>_1Se0iNmZOX1Qe0iNoZOW1Qe0jNmZOW1Se0kNjZOW1Te0iNlZOX1Te0hNiZO[1We0fNfZO\\\\1Ye0g0OO1O100L5K4N4K6I7J3N2N:B9YOc001O2O1N101MX]k0\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [436.5090026855469, 415.8530578613281, 64.5980224609375, 65.47207641601562], \"score\": 0.9986409544944763, \"association_id\": 6, \"segmentation\": {\"size\": [768, 513], \"counts\": \"_UX:8eg04G;K4M4L2O1O1O000000O101I7O001O1002N0lXOWOof0P1M2OO11N1N2O1O00O101OOO2L4O10OIRYO\\\\Oof0e0QYOYOPg0e0TYOYOmf0e08O100000001O2N2M2N2L6J]XONhg0000ZX22bgM2O3N3O2L5LN2N3IPb8\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [280.702880859375, 308.272705078125, 35.459808349609375, 4.22808837890625], \"score\": 0.9981787204742432, \"association_id\": 2, \"segmentation\": {\"size\": [768, 513], \"counts\": \"fQf61og01O000O1000000001O000000000000000000000000000000001O00Yfe4\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [456.3909912109375, 541.409423828125, 49.533203125, 110.7247314453125], \"score\": 0.9979057312011719, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"jaf::Rg0JQYOn0bf0b0E7H8K3M6J;E7I2N1N2O001O1O001O00000000000000000001O1QMX[Od2Ue0M1O2N10O01N2O001O001O001O1O1O1O00100O1N6JPf4\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [295.33428955078125, 449.5503234863281, 22.565185546875, 6.862518310546875], \"score\": 0.9976884126663208, \"association_id\": 13, \"segmentation\": {\"size\": [768, 513], \"counts\": \"Tfm63kg03O0000000000000000000000000000000001OmQd4\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [452.70196533203125, 408.3517150878906, 60.29803466796875, 14.675872802734375], \"score\": 0.9961963891983032, \"association_id\": 14, \"segmentation\": {\"size\": [768, 513], \"counts\": \"lTg:1og01O001O000O2O000000001O000O1000000O01000000002O1N1O0000000001O001N10001O0000000000000O101O0000000000001O1OPS1\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [307.64739990234375, 413.7010192871094, 45.63848876953125, 7.416839599609375], \"score\": 0.9922298789024353, \"association_id\": 15, \"segmentation\": {\"size\": [768, 513], \"counts\": \"QU[71ng02O00000001O000000O1O2O0O1000000001O00000000000000001O000000000000000000000000Pcg3\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [30.24979019165039, 409.3680419921875, 44.45646286010742, 8.25390625], \"score\": 0.9863542914390564, \"association_id\": 5, \"segmentation\": {\"size\": [768, 513], \"counts\": \"l\\\\h02ng01N100O10001O000000000001O00O2O0000000001O0000001O00001O01O000001N10000000000OS[Z:\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [445.6754150390625, 456.373046875, 54.446990966796875, 116.4945068359375], \"score\": 0.9589871764183044, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"oo^:6cg0>F6K6]YOROje0f1K2M3O1O001O1M2O2O00N0M2301N21OO1N3O103L1O0O1O100O100O10O1L5N2O00001N1O101O2N2M1`NZZOa0me0XO]ZO;me0B]ZOOke0OR1M4004L01OPQ:\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [139.1944580078125, 348.9319763183594, 42.07695007324219, 57.876953125], \"score\": 0.9079351425170898, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"`cX31lg08J4G503N3L1O3M2N2N2N2N3M2O00000000000000000000001O001O1OkN]YOm0cf0QOlYOc0Sf0]ORZON\\\\O<bf0E`ZO0ce0NZfn7\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [332.6449890136719, 316.70892333984375, 44.645172119140625, 94.62002563476562], \"score\": 0.5341376066207886, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"aRj7m0Rg02M3N2M3O0O1O2N3M101N3L5XZOZNPe0l1^ZOeN\\\\e0T2L1N2N10000O1002N00O13M001O7I3M3M;Eb0UO]YOUOff0c0aYOZOaf0?e0Ded[3\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [438.9300842285156, 409.2340087890625, 71.93905639648438, 66.03616333007812], \"score\": 0.22527559101581573, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"odW;3kg03N2O1N2O01N100O100O2O001O0O10000000000000000000000001OO2OQc2\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [403.3455810546875, 306.2456359863281, 37.99322509765625, 131.01473999023438], \"score\": 0.1480116844177246, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"Rc^9j0;^Ole0e1I2N2N3L4L4N1N3M23NU[ObMQd0a2m[OnMbc0W2c\\\\O[Mac05k[OX2We0dMjZOZ2md0iMW[OX2hd0fM[[O[2Ue002M1O2M2B_ZOYNde0e1=O1M9F6Ej0ROPUm1\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [101.85786437988281, 356.9609069824219, 52.66206359863281, 77.29296875], \"score\": 0.10321064293384552, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"^S_21ng01iXO1[Oa0Uf0F[ZOi0_e0WORZOH9\\\\1[e0VO^ZOl0ae0WO[ZOl0de0TO]ZOl0be0fN]ZOi1ce0:00000mM_ZOg1ae0WNbZOh1^e0YNaZOg1_e0ZN^ZOh1be0XN]ZOh1de0VN^ZOj1be092O00000000000O1000010O006J2N1O1O1VNQZO`1\\\\f0J3K8K6J2O2K5M5L3L;FRk_8\"}}, {\"image_id\": 24, \"category_id\": 2, \"bbox\": [0.0, 433.5149230957031, 19.366472244262695, 8.21087646484375], \"score\": 0.08416642993688583, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"c=2mg02O0000000000000001O0O1O^bf;\"}}, {\"image_id\": 24, \"category_id\": 1, \"bbox\": [285.35589599609375, 351.89654541015625, 44.94049072265625, 79.03024291992188], \"score\": 0.05947180837392807, \"association_id\": 0, \"segmentation\": {\"size\": [768, 513], \"counts\": \"Zkf6`02CXg0?YYOGoe06SZOKle00WZO3ie02mYO0CC`f0b0fYO1Zf0DdYOINj0^f0]ObYOH0n0^f0ZO`YOm0af0;`YOaNYf0_1fYOcNZf0a10:FO1N2O1O0000O2O0N3M200100001O000000000001O1N5L4L2M4K5D<nNTYOg0Zk[4\"}}, {\"image_id\": 25, \"category_id\": 1, \"bbox\": [513.1727905273438, 50.147220611572266, 107.94921875, 221.74786376953125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"R]l:8Qe04M2N2N1N3WJ_OcFe0[9^O`Fe0_9]O^Fg0`9[O[Ff0g9\\\\ORFh0n9ZOnEh0S:YOkEg0V:YOhEg0Z:ZObEb0e:^OYE`0k:@SE`0o:AoD>S;BiD?Z;AdD?^;B_D?b;B[D?f;BVD?l;BRD=P<DmC=T<CjC>W<CfC?Z<AdC`0]<@bC`0_<@aC?`<A_C>c<B\\\\C=f<BYC`0h<]OYCc0h<ZOZCg0f<VO\\\\Cj0f<RO\\\\Co0^`02XMPOSAR1l>POQAR1P?nNo@R1R?oNk@R1V?oNh@Q1X?QOe@Q1[?POb@Q1^?SO]@n0c?UOX@m0i?SOU@o0k?SOQ@n0P`0WOi_Oj0X`0YOc_Oi0_`0VO]_Ol0e`0TOV_Oo0n`0YOc^Oj0_a0`12N3M4L3N1M7J4L2N2N3M2O0O01000O10N10001N100O10O10O01N2N2O100O1O1dNY]O3hb0KQ^OYOTb0d0S^OTOQb0j0Y1O1O1O2N1O3J6Klo]8\"}}, {\"image_id\": 25, \"category_id\": 2, \"bbox\": [61.5017204284668, 329.9403381347656, 489.279296875, 296.7381286621094], \"score\": 0.9968873858451843, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]`Z1`0id07J4L3M2O2M2N2OO1O1O1O1N2O1O1O1O1O00100O100O10000O2N2O1N2N3M2JPh:JZXE3M4L5K4L4f[OG[c0<`\\\\OG`c0;Z\\\\OJdc0o001N100O1O100O01000O01000O01O1O100O01000O100O010O1O001O001O001O010O0001O10O010O010O0001O00O1N2N2O1O100010O010O01000O010O010O0000000O1000010O0010O0100O010O001O000000000000001O0010O01O10O01O1O00001O0O1O01000010O010O010O010O10O0010O00O00001O1O10O100010O010O01O010O001O0O100O1001O01O01O01000O010O1O01O01O000O100001O01O01O10O10O010O10O0001O0000O101O0O110O001O10O010O01O001O00O1M2N3O10O11O00010O0100O010O0010O0000OO2N2O1O2O001O010O10O010O010O01O0000N2O1O1O1000001O010O010O010O01O000000000001O010O0010O010O010O0010OO1O001O1O101O001O010O010O10O0100O001O000000001O000010O010O010O010O01O000O1O1O1N11001O00010O010O010O010O0000000O2O000000000010O001O0100O0O2O1O00100O1O010O10000O10O11N100N2N3M2N2N3L3L5Mh`d;\"}}, {\"image_id\": 25, \"category_id\": 2, \"bbox\": [388.23382568359375, 318.8189697265625, 558.3780517578125, 352.49609375], \"score\": 0.9840161800384521, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"jS`:8od08H6G9F9K5K5K4O2N2O1N2N101N2O000O101N100O1O100O1O1O1O1O1O2N100O100O10O10O010O01O01O000000001O0001O00001O10O0100O010O1O001O001N101N100000010O0010O010O010O01O001O001O0O2O0O100O1000001O0010O01O010O0010O01N10000M3M2O2N2O2O00001O001O001O001O1O0O1O2N1O1O1O1O2O00001O0010O010O010O01O001O00001O00O2O00010O0010O010O10O0100O001O1N101N100O2O000O1001O01O010O010O100O00100O0010O00000O0010O2O01O01O010O1O100O0O2O1O1O001O001OO2O0O2O001O001O10O010O1N2N101N10N2O10000001O0001O10O01O1000O1O010O0001O000000001N10O0011N101O10O010O010O01O010O00001O00000O101O001O00100O0010O00010O000000000O10000001O001O001O1O010O1O01O01O001O000O100O1O1010O01O010O0100O10O01O010O1N10001O0000O110O00100O010O10O100O1O1O1O1M3N2N2N2N2O0N3M3N00M4001N2N2L5KUQ]3\"}}, {\"image_id\": 25, \"category_id\": 2, \"bbox\": [52.294673919677734, 269.00970458984375, 854.046875, 372.63824462890625], \"score\": 0.9748896956443787, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]mo14Ve06K5J2O1O2M2O1O2N1N2O2N1O1O0O2O001O0O2O00001O001N10001O000O2O00001OO01000000O10O10O10000O01000O100O0100000O10O1000O1000O0100000O010O100O001O1O001N2O001O001N2O000O2O001O01O01O0010O010O010O0100O1O1O1O2N100O1O1O1O1O1O1O1O1O2O1DZe01oZO3N0O101N1O100O100O1000O2N1O1O2N2N2O1N2NPZ;3feD=G7M2NM2N3O1N2O100O001O1O10O01O01O010N2O1N1N3N2N2O1N2O001N2O1O1O100O01O1O001O001O001O001O00000001O00001O0O2O001O0O2O000O2O1O0010O1O100O1O1O1O1N2O1O1N2M3N2N2N2O1O1O1M2M4N2O1O01O0M4K5O001000O1M4K4MZn]12fQbN4K4M3L4M1N3M2N2O2N1N2O1O2N1O100O1O100O10O010O010O001O10O01O010O10O0100O0100O01000O010O1000O0100O10O100O010O010O001O0O2O001O001O010O001O10O010O010O010O01000O0101N101N101N1O2O1N2O1N2N3M6K6IZd73[[H;M3N2M2O1N2N2N2N1O2N1O1O2N1N2O1N2N2N2N3M2N2O1N2O1O1O1O2O0O1O1O100O10000O100O1000O0100O10000O01000O100O10O10O100O01000O010O10O01000O1O1O1O1O001O1O1N2O1M3N1N3M3J6J5M4N2M201N1O2O001N100000O1012M3M4L4L5J7JVmZ8\"}}, {\"image_id\": 25, \"category_id\": 2, \"bbox\": [452.9819641113281, 446.7431945800781, 337.6976623535156, 215.27908325195312], \"score\": 0.4140845239162445, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[SV:3Ue09H5K4M3M2O1N2O0O2O001N101N101N1O2O0O101N101N10001N10001N10001N100O1O1O1O1O1O100O1O100O010O1O1O001O100O001O10O0001OO10000O00101O010O010O10O01O1O001O001O01O010O010O01O1O001O10O0100O10O010O0010O0N2O2O0O2O10O0100O0010O0000O100010O010O010O01O01OO1O1O1O101O010O0010OO10000N2N2O2O0010O01O01O00O100000000001O0010O01O01O0N2O1O10000010O010O1O01O00O1000010O0100O010O010O000O10000010O0100O010O00000001O0010O0100O010O000000N2N2O11O01O01O10O01N100O100O110O0001000O010O0000000001O00010O10O01O010O000000010O01O10O010O0000O11O01O0100O01000O1O100O1N2O1O1O1O100O100O2N1O1O1O1O1O1O1N2O2N1O1O2NWoT5\"}}, {\"image_id\": 25, \"category_id\": 2, \"bbox\": [61.3769416809082, 453.2402648925781, 283.0960998535156, 189.85037231445312], \"score\": 0.2625718116760254, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"XkY1k0\\\\d0=Z\\\\OoN]b0U1]]OTO^b0o0[]OWOcb0k0Z]OWOfb0h0Z]OYOeb0h0[]OYOcb0h0]]OXOab0j0^]OWOab0j0_]OVO`b0k0`]OUO_b0l0a]OUO^b0k0b]OUO]b0k0d]OUO\\\\b0k0d]OUO[b0k0f]OUOYb0j0j]OUOUb0h0P^OWOoa0f0V^OYOja0e0Z^OYOea0g0]^OXOba0g0`^OYO`a0f0b^OZO\\\\a0f0e^OZO[a0e0g^OZOYa0e0h^O[OXa0d0i^O\\\\OXa0a0j^O_OVa0?l^OATa0;Q_OC6CS?g0i@F0IU??m@HLLV?;o@IJMX?9o@IH0X?6QAJG0Y?4QALE1Z?2RAMD1\\\\?ORAOA3_?JRA3^O4b?CTA9XO6ja0IU^O9ja0GV^O9ja0GU^O:la0ET^O;la0ES^O<ma0DS^O<ma0DR^O=na0DQ^O<oa0DP^O<Qb0Do]O<Qb0Dm]O>Sb0Ck]O>Ub0Bk]O>Ub0Bj]O?Ub0Ci]O>Wb0Bi]O>Wb0Ch]O<Yb0Df]O<Zb0Fd]O:]b0Fb]O9`b0H^]O7cb0J[]O6gb0KX]O4ib0LV]O4kb0MS]O4mb0LS]O3nb0NP]O2Qc0Om\\\\O2Sc0R11N2O1O1O100O100O100O1O1O1O1O100O10O0100O001N101O1O001O01O0000O1O2O000001O010O000O1O10000001O010O1O0001O001N110O1O010O001O000001O01O010O010O0O100O10001O010O010O000000O1000001O00010O00O010O100O110O001O001N10O1001O0100O010O01O00O100010O010O0010O000O1O101N110O1O010O00000O11O00010O010O1O00000000010O010000O100O100O100O2N010O1O1O001N2O1O010O100O1O1O1O1M3N2N2O1O100O2N1O2M2N3M3M3L3O3LcSe>\"}}, {\"image_id\": 25, \"category_id\": 2, \"bbox\": [216.4913787841797, 277.83612060546875, 353.9451904296875, 263.94097900390625], \"score\": 0.25049740076065063, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ila4j0`d05K2N2N2O0O1O010N2O100O1O10O0100O10O000001O000O10O101O000010O010O0O101O001O0001O01O01O01O001O010O010O00100O010O010O000O100001O01O0010O01O001O000O100O1O10010O0010O010O0000000O100000001O0010OO2O000O1O1O10001O00010O10O0010O0O10O1000010O010O010O00010N01000000010O010O0010O0000O100000001O010O010O0000000000O11O01O0010O010O000000000000001O010O00010O00O101N101N101O001O10O0001O0000000O10000010O010O00001O00000001O0010O010O010O00000O1O100O1000001O0010O00O100O100O100010O01O010O000O10O1000000000010O010O001O001O001O00100O00100N2O1N1O2N2N101N110O0001O010O01O001N1010O01O01000O010O0000001O001O00100O010O100O100O1O1O1O1O10O0100O101N1O100O1O1O1O2N1O1O101N1O2NmVQ:\"}}, {\"image_id\": 26, \"category_id\": 1, \"bbox\": [744.7945556640625, 168.00531005859375, 274.09039306640625, 655.5925903320312], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"ck_e0l0\\\\l09I5L4K6J5K6F:F:E;G8J5M3M4J5H9F9H9H8E<B=H9K5L5J6J8I8G8I6I6K5I7H6J6K4L5L3M3M3M3M2N3M2M3M3M3M3N2M3N2N2N2N2N2O1O1N2O1O100O1O01000O100O100O10O01O0O000O0O02O1O1010001O001N2O0O2M2N3N1O1O10O0O1O2N1001N101O2M3N2N1O2N2O1N2N2N2N2N2M3N1O2N2N2O1O1O1N2O1O0O2N2M2O2N1O2O001O01O001O0UCQHY4P8^K^H\\\\4b7]KkH\\\\4W7aKoH[4R7cKSIY4n6eKVIY4k6cKZIZ4g6cK^I[4c6`KbI^4`6[KgId4Z6VKlIh4V6TKnIk4S6RKPJm4Q6PKRJn4Q6nJQJR5Q6kJPJU5T6eJnI[5V6^JnI`5Y6VJkIj5[6\\\\IYJd6n5eHbJ[7c5UHhJj7\\\\5oGhJQ8[5iGhJW8[5dGgJ\\\\8[5`GgJ`8]5XGgJg8_5PGeJP9c5aFcJ`9g5YEVKg:h;O10O10O1O000O2N1O1N2K5M3N1O1OO00LkNSFT_Ok9d`0_13102O101O1O2M3L4L3N3L4M3M3M3M2O2N2M3M3L4L4L4M3N2PHaB0a=NjBGX=8QC^OQ=`0UCYOn<g0WCQOl<n0[CgNj<X1]CZNk<f1eCbMc<]2hCTM]<l2fClL_<S3eCeL`<[3cC]Lb<c3bCRLe<m3bCbKi<^4\\\\DgIZ<Y6]5O100O100001O00001O00001O001O0000010OmKc[OVO]d0f0n[OSORd0j0V\\\\OQOjc0l0^\\\\OPOac0m0k\\\\OjNUc0T1T]OeNmb0X1]]O_Neb0]1c]O\\\\Nab0]1j]O[NYb0\\\\1U^O[NPb0Z1X5Cb0Ai0XO^Sc0\"}}, {\"image_id\": 26, \"category_id\": 1, \"bbox\": [45.800113677978516, 94.31060028076172, 284.9687194824219, 708.8414306640625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"^aQ23jl0c0hNY1D=_Ob0F;[@hMc3a2cGUM^M`3Z8AjIhMXMP3i8]OfIPNVMi2P9[OeITNRMg2V9WOdIYNoLd2Z9WOcI\\\\NkLa2`9VO`IaNgL]2g9VO]IgN_LY2Q:VOXIoNVLT2^:TOQIR5m6UKeHT5Y7RK\\\\HU5b7PKVHU5h7PKPHV5n7mJmGV5S8mJfGW5Z8lJ_GX5a8[KgFj4X9VLbEQ4^:_700000O010000000O1O100O1O100O1O1N2N2ZOf0\\\\Od0YOg0hNX1E;G9K5M3N2M3M2L5K5J6K5M3M3M3N2N2N1N3MZMXClDg<T;dCcDZ<];QDYDo;f;XDUDf;l;]DQDc;o;`DoC_;Q<dDkC\\\\;W<gDdCY;^<jD]CV;e<mDTCT;n<RE^BV;i=j1=F:M3N2N2O1O1O001OUDUBV8j=^GhB]8W=]GSC`8l<]G[C`8d<]GbCa8]<]GhCa8W<^GlCa8S<]GPDc8o;[GUDd8i;[G[Dd8d;ZG_Df8`;XGdDg8Z;YGiDf8V;YGmDf8R;ZGPEe8o:[GSEd8m:[GSEf8n:XGQEj8Q;SGnDo8S;oFlDS9V;jFjDW9X;fFhD[9Z;bFfD_9[;_FeDb9];[FcDf9^;WFcDj9^;TFbDm9`;nEbDS:a;eEbD]:a;[EbDg:b;nDbDU;g>4L4L4N2N2O1N3M2N2M3L4K5M3M3N2N3O1O1O1O1O2N102M5K7I6PAaBf=g=aAoBZ>V>L5K4M4K2N2N2N100O001O001O001O001O0001O0001O01O00001O0010O0000000O1000000000000O10000000000000000000000O100000000`KZ@XKg?d4b@WK^?f4j@UKV?h4RARKo>j4[APKf>k4bAPK_>k4kAoJW>m4RBlJo=Q5ZBhJh=T5gB^J\\\\=_5VCPJk<n5_ChId<U6dCcI_<Z6iCYI_<d6hCfHj<W7h4L5J6K6J5L5K4L5K4L4K6I7Gb0aKSXOa2hi0Ba0@=gNW1^Oa0Ab_cd0\"}}, {\"image_id\": 26, \"category_id\": 1, \"bbox\": [484.26605224609375, 110.87329864501953, 275.80682373046875, 730.0535278320312], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"`g\\\\>=cl0e0@a0D8H6K4L4M2M3M2M4L3K6J5L5K4M3N3M2L5L4M2N3M3M3M3N2N2N1N3N1N3L4M2M4M3M3N1O3N1N2O1O1N2O1oZOeJna0\\\\5l]OoJma0R5o]OXKka0i4P^O_Kla0c4o]OeKka0^4n]OkKna0X4h]OSLTb0P4_]O_L\\\\b0]7K6K5K3M2M3N3L3M4M4K5m_OPF`<Y:hB\\\\FR=n9YBbFb=e9nAhFn=^9fAkFV>Y<K6K5K7J6I6K5K5K5K4M3M3M5K5fDf_OW9b`0kEW@R:da0N1N3N1O1O0000001O00000000001O0O10O0100O1O100O1O1N2N1N3M3M2OVLUHVBi7\\\\<XJRCg5i<dJSCZ5h<PKTCP5i<WKTCf4l<`KPC_4P=fKlBW4V=oKdBb3k=eLoAn2^>fMm@Y2U?m52N200O2N3M3M3L4L4L4M4L4M3M3N2M3N1O2N1N2O2N1O002N2M3N3M4L3M2N2N2N1O0000000000000000000000000000000000000000O1O1O1O1O2N1N2N2N2N3L5L4K5L5J8I6I7I6J5L5J7IR1[I`A\\\\LP?m2jA`Le>i2eBULa=b3PCoKW=i3WCjKo<o3]CeKi<W4`C_Ke<\\\\4dC[Ka<_4jCVK\\\\<c4[DfJk;S5\\\\6E:^Oc0E;K4M4L4N200O1O1O1O2O1N1O2N2O0010O00100O2N1O1O10O2N2M5L4L4L3M3M2N3M3K5K6J6I8I7I6J5K5K5Jdne7\"}}, {\"image_id\": 26, \"category_id\": 2, \"bbox\": [1.9157308340072632, 796.0271606445312, 224.92349243164062, 40.18365478515625], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"lc2<Pm02N1O101O0000001N1000001O0000000O2O0000000000001O0000001O0000001O0000000000001O0000001O00000000001O0000000001O01O00000000010O01]O_SOI0<bl0GkSO2Vl0JnSO6el0O001O1NTeU12iZjN8I2O2N001N102N1O001O0O11O000001O0000000000000000000000001O0000000000001O0000000000001O00000000010O00000000001O000000001O0001O0000000000000001O0001O0000000AbSOO^l01cSON]l01dSOO\\\\l01eSON[l02eSON[l01fSOOZl01fSO0Yl00hSOOYl00gSO0Yl0OhSO1Yl0NgSO2Yl0MiSO2Xl0MhSO3Xl0LiSO4Wl0LiSO4Xl0KiSO4Wl0LiSO4hl00001O0000001O0O101O001N2OkUPg0\"}}, {\"image_id\": 26, \"category_id\": 2, \"bbox\": [397.4413146972656, 803.0931396484375, 218.74264526367188, 44.8505859375], \"score\": 0.9999992847442627, \"association_id\": 3, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"UYa;2Zm0101N2O00001N100000001O00000O10001O0000001O00000000001O0000001O00000000000010OO1000O100000000000000001O1O0000001O0000000O2O000000001O000000001O0000000000001O0000001O00001O0000000000001O000000001O00000000001O0000000000001O01O0001O000000001O01O0001O0000000010O0001O000000000001O0001O00000001O00000001O00000000000000000000001O00000000000000EfSOCZl0<hSODWl0;jSOEVl0:lSOETl0:nSOESl08PTOFQl08RTOFPl07d0O1O2O0OX\\\\e<\"}}, {\"image_id\": 26, \"category_id\": 2, \"bbox\": [666.4140014648438, 768.275390625, 282.744140625, 41.89154052734375], \"score\": 0.999996542930603, \"association_id\": 2, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"ki_c02Ym03O00001O000O2O00000O101O00000O1000000O100N2O100O1O1O10O100O10000O0100000O10000000000O1001O0000000000000000010O000000000000001O000001O000000001O0000000000001O000000000000000000000000000000001O0000000000000001OO10000000000000000000000000000000000000001O00000000000000000000000001O0000000000000000001O000000001O0000000000001O00000000001O0000001O00001O01O00000001O00001O00001O01O0001O2N1O001O0003YO[SO`0jl0O2N5L3L1O1O001O002NRQn3\"}}, {\"image_id\": 26, \"category_id\": 1, \"bbox\": [345.9049377441406, 184.8400421142578, 140.27239990234375, 417.6285400390625], \"score\": 0.9911012053489685, \"association_id\": 0, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"V]V:8Tm09G3M3N001O1N1QUO[OVi0f0fVOCTi0=hVOLSi04hVO6Si0KfVO>Wi0BfVOd03hNlg0e0mWOh00SOhg06VXOT1_OTOTh0H[XOh2_g0YM^XOm2^g0SM_XOS3_g0mL]XOX3`g0lLVXO[3jg0T10000O1000000000001O1O001O000O0100O1O1Ob\\\\OQK`>o4PAhKi>W4m@`Lg>_3m@\\\\Me>c2WAnM]>S2`ASN\\\\>m1bA[NX>f1fA_NV>b1gAbNW>`1fAcNX>_1eAcNZ>^1cAdN]>]1]AiNb>X1ZAkNf>U1WAoNh>V1PAmNP?Y1d@mN\\\\?Y1Z@lNe?^1l_OgNT`0Y70001O1O00PJ__OUO``0k61O1O1O100O1N2QOe_OjD]`0n:R@hDQ`0U;U1M3L4K5M3N2N2O1N2N2N2N2O1O1N2O1O1N2L4N2N2N2M3J6H8L5K4M3N3M2N4K5I?UHlZOd6Uf0H7H7I6J6K5L4M3M4L3L4K5jLZXO`0jg0^O\\\\XO9ig0F\\\\XO1ig0M\\\\XOJjg05[XOCig0;^XO\\\\Ogg0>aXO[Ofg09hXOXOdg0a0j2I6JVo_?\"}}, {\"image_id\": 26, \"category_id\": 2, \"bbox\": [344.564208984375, 579.359130859375, 64.92098999023438, 12.59051513671875], \"score\": 0.796180784702301, \"association_id\": 0, \"segmentation\": {\"size\": [941, 1024], \"counts\": \"Ycm95Wm02O1O000000001O0000000000000000000000001O000000000001O000000000001O000001O00000000000001O0000000001O000000001O0000001O0000000Odbda0\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [301.1077575683594, 331.05645751953125, 132.26370239257812, 212.46234130859375], \"score\": 1.0, \"association_id\": 6, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ZX_69od05hM1o^O1n`05m^OMPa0:i^OITa0g0\\\\^O\\\\Oba0m0S^OVOla0o0m]OUOPb0R1i]OPOTb0U2K5K6L3M3L4L4M3N3F9O3M2N2N2N1O1G9N3N1N2N2K5N29G6J2N2N01N1O100O2N1O2L4M3M3L3K5M4M2O1N2N2O1N3N1O1N200O10000O1000001O00000000000000000000000000000000O1000000000000000000000000]J_A\\\\4a>bKeAZ4[>eKhAY4X>fKjAX4W>gKmANOj2T>XMbCa2_<]MiC[2X<dMmCX2S<gMTDS2m;jM`Dj1a;UNeDl0[L^OQ?DfDg0aLBk>EfDg0aLBP?@bDj0bLCRc07S]ODSc05T]OEQc06V1Ka\\\\`<\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [918.15283203125, 335.0902099609375, 105.84716796875, 210.16943359375], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"oSXc0=fd0?Bd0]O8K4L4E;K6K3G9E;F9N3N2O1G900O001N102N1O1N2J6O2N1O2N1N3N1N2N3L3N2O100O2O0O1O1N2N200O100O100O1N2N2N2L4N2O1N2M3N2O1N2N2N2O1O1N2O1N2O100O100000000000O100O1O1O1N2O1O1O100O1O1N2O100O100O1O1O1O1aNVA\\\\Lk>^3]A_Ld>\\\\3cA`L`>V3lAdLY>T3`G\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [633.22998046875, 300.90850830078125, 126.3948974609375, 212.504638671875], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"d[_=6eb0Ki]O2Y1?a`0g0k^O@Qa0f0i^O\\\\OSa0n0b^OXOTa0d2N2N2N3L3L4M3N2N2K5K5M3M3L4L4M3N2M3N2N2N2O1N2O1O1O1M3K5L4N2O1M3N2N2O1N2O1N2O10000O1000000000000000000000000000000000000000000000000000O100O100O1N2N2N2O1N2N2O1N2N2N2hJi@[4X?bKm@Z4U?dKn@Z4S?eKPAW4R?hKQAT4Q?kKRAR4o>nKRAP4o>oKSAo3o>oKUAm3l>QLYAj3i>TL[Ah3h>UL\\\\Ah3f>TL`Ag2aNXNda0c1b^OcM08ba0Q2n^OkMWa0P2m^OiMZa0S2V1E;F7J<E9F:Ef[b5\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [740.6878051757812, 265.0802001953125, 149.653076171875, 233.4775390625], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"eXe?W1Qd09H:F4M2N2M4L3N2M1O01O0100N3N2L3N3N2N2Q_O`MQ?b2m@dMk>`2SAdMf>a2YAaMd>`2\\\\AbMa>`2^AbM`>a2]A`Ma>^1X@kNT1I^>^1k@`Nd04]>^1TA]N;8^>\\\\1ZA[N6:_>[1\\\\A]N2:_>[1_A]NO9a>Z1aA_NBa0j>S1dA]N]Od0m>P1gA]NXOg0o>m0iA6W>JjA4V>NjA1V>OjA1V>OkA0T>1mANR>3mANR>4lAMR>5mALQ>6mAMR>4lAMS>5kALU>5gAMZ>5_A0a>\\\\3100O001O1O001N1O2N1O1O101O100O00100O100O1O100O1N2N2M2N3N2O1D\\\\@nJg?k4`0L4L4N2O1N101N2N21O1O1N101O0O2O1N3M2OUOg_OeLX`0Z3k_OdLU`0[3m_OdLS`0[3o_OdLQ`0[3Q@XLJMV`0j3R@ULMOP`0m3P11O0O100000O100O100F:O0O2L4K5M3N2O1N2O1O1O2M2M3N3L4L3L5]OU]O\\\\NTc0[1c0J7E<TOaii2\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [66.94378662109375, 241.68759155273438, 177.33480834960938, 327.2596740722656], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"`d_1:ld0f0WO:H8jAbNT8f1hG_No7j1mGZNP8f1PHZNP8e1RHZNm7f1TH[Nk7c1VH^Ni7c1VH_Nh7b1`GXN`K=n<\\\\1]G_NeK1o<`1XGcNd9\\\\1ZFfNf9[1XFgNg9Y1YFhNg9W1YFjNf9V1ZFkNf9e0TBiNV4c0e9b0[BfNQ4i0c9`0_BdNo3l0b9?oFCP9<QGEn8;RGFm89RGIn86QGMn83PGOP90nF3R9KmF8S9\\\\OVGg0i8AmFe0P9AgFf0U9\\\\OgFk0T9XOnBlNQ3S2m9VOgB@h2`1\\\\:8_EO\\\\:2bE3Z:NdE8W:IfE>U:BiEf0Q:[OlEk0P:UOoEo0n9QOPFS1o9lNoEX1o9hNoE]1n9cNoEb1P:]NfEn1Y:RNaEV2]:iM`E]2_:bM^Eb2b:mJ^Dm1i0_3h:]JkDn17l3g;QLYDQ4g;nKWDU4i;jKVDX4j;gKTD[4n;dKlCa4V<_K`Cj4`<P21O2N1O1O1O1O1O0O2O00000O1000O01000000O1000000O0O101O000001O1N2O1N2O1O1O1N2N1O2O1O1O100O1000000O100000000000001O1N2O1O1O1O2N2N2N2O2M2N1O001O0000jGVDl6i;SI\\\\Di6d;UIdDe6];YIiDa6X;]IoD^6Q;`IUE\\\\6k:cIXE[6h:cI\\\\EZ6e:fI]EW6e:gI^EV6c:jIcEk5b:UJfEm4P;SKXE_4o:_KVEY4n:gKUET4m:lKVEP4k:QLYEg3j:YL`E[3a:fLdER3_:nLcEn2_:RMbEl2`:SMbEj2_:WMcEe2^:[MeEa2]:_MeE]2\\\\:cMfEZ2\\\\:eMeEY2\\\\:gMfEV2\\\\:gMjER2[:kMkEm1Z:oMnEh1W:UNoEd1U:XNQFa1S:]NRF[1T:aNTF6WK=cS]`0\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [553.5386352539062, 564.7638549804688, 176.14227294921875, 25.6949462890625], \"score\": 0.9999992847442627, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Q]k;310Qe0601O001N1000001O00001N100001O01O0000001O00000000000000001O00000000001O0000000000001O001O0000000000001O00000000000000000001O0000000000000O100000000O1000000O100000000000000000000000000O100000001O000000001O00001O00001O0000001O000000001O00000001O01O001O0000001O0000001O000000000000001O0000000000001O0001O0000000001O00000O2NYVT6\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [136.67578125, 549.4083862304688, 327.2726745605469, 34.12591552734375], \"score\": 0.9999986886978149, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ZXS31We06M1N2N1000000O10001O0O100000000000000000001O0000000000000000000001O01O01O0000001O1O001N103KR[=LQeB3L7L3M2O1O001O0O10001O0000001O000000000000000001O00000001O00000000000000000000O101O00000001O0000000000000000000001O0000000000000000001O00000000000010O0000000000000001O00001O000000001O000000001O0000001O0000000001O01O000000000000001O0001O00000000000000000000000000000000000000000000000001O0000000000000000000000001O0000000000000000000000000O100000000000000000000000000000000000000000000000000O10000000000000001O000O10000000000000000O1000001N10000000000O1000000OnnP<\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [445.3265075683594, 310.351318359375, 173.12753295898438, 260.1434326171875], \"score\": 0.9999983310699463, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"kd^9=ld04K4L4YNXOd^On0Ua0_OP^OV1ka0X1M3M4K3M4L4L3N3M2M4J5E<K4M4N1N3M2M4M3M2O2N2PNoJcCR5X<UKdCn4Y<WK[CR5e<SK^Bf5`=S101N2O1O2N1N2O1O001O1O001O00001O001O1O00000O10001O000000000000001O00001O00000001O00000000001O001O001O2N3M3M2O2M2N3M3M2N1O2N1O1O1O1O2N1O1O000000000000000O100000000O3N3L5fIaAI9o4h?oJo_O_4b`0jKU_Oe3o`0QLZ_Ol3Ta0M3N3M3M4M3L6J4K4M1N3N1N2_N_]O6db0Ga]O4bb0Jb]O0db0La]ONcb0Ob]OJbb04b]OGbb07b]OBdb0:Y1N1M3N1OY[OJ[d06f[OIZd07=00O10O1000O10TKJcD6\\\\;KVDMRL8g?MUDNRL5i?MTD0RL2k?MUDORL2l?MRD0TL1fc0N\\\\\\\\O000Sc0Om\\\\O30Lec03Y\\\\O3Vd0Nh[O3W[`8\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [864.0389404296875, 306.92022705078125, 93.27288818359375, 208.01361083984375], \"score\": 0.9999977350234985, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[fPb0^1bc0]1kN6K4K4K5J5J7L3N2M3N2N2L4L4M3M3N2O1M3K500O1O100O1O1O1N2H8L4N2M3L4N2N2O1O1O1O1N2N2O100O1O1O1O1N200O1O10000000O10000000000`NfAnK[>k3oARLQ>l3WBnKi=Q4[BmKd=f3ZAlKX1:_=f3QCWLP=a3a2L4O1N2Nk@eL^<Y3k2WMe^O]1`a0aNg^OX1[a0gNi^OU1Xa0kNn^On0Sa0QOY_Ob0i`0^O[_O<g`0Ba_OOi`0N[2O1O2N1OUga1\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [49.82567596435547, 215.81982421875, 117.19982147216797, 285.9864196777344], \"score\": 0.9999949932098389, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^fQ1h1]c0:H5J5L4M3N3K4I8K5M3M3K5K4M4L4H7F;ITNf_OTOU`0S1h_OQOo?\\\\3H:H6K3L3N2N3M2N1O1O2N2N2LbN[A]La>a3fAbLT>^3nAcLo=]3TBSLGUOS>i4YBlKJYOl=l4\\\\BdKM@e=m4fBVKLL\\\\=o4Q21000001O000O1000N3L3N3N20O100000N2N2NiB`Kg9S4dFRLY9m3iFRLW9n3iFTLU9l3lFVLQ9i3RGVLm8i3TGWLl8h3TGXLm8f3TG[Ll8W3]C_Li3:j8W3bGiL^8W3bGjL]8V3dGiL\\\\8W3fGhLY8X3YGVMh8h2RFcLlMn0R<^2oERM`Mf0`<_ObCd1e0hM1f2h;lMfCc1<TN0`2n;gMkC`1:ZNFe2T<`MPD\\\\18aN\\\\Oi2[<YMVDY15Z2e;\\\\LYDW14\\\\2c;\\\\L[DT1U1`1a:WM_DU1S3C_8VOaDS1b3XOm7CdDP1g3XOe7IeDl0h3[Oa7LiDc0h3B\\\\7NnD=g3EZ70PE8g3HY7ORE6g3KX7NSE1i31V7KRK5P5IPK7P5HRK6o4JQK6n4KSK1o41QKIS58o70010000001O2N2N4Khb1NZ]N3M2O1N20Mlika0\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [596.9928588867188, 502.22222900390625, 278.098876953125, 40.41754150390625], \"score\": 0.9999891519546509, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"n[a<110Ve04O001O0O101O00000O2O000000001O000CGh[O9Wd0Hi[O8Wd0Ih[O7Xd0Ih[O7Xd0Ih[O7Xd0Ih[O7Wd0Ji[O6Wd0Ji[O6Wd0Ji[O6Wd0Ji[O6Wd0Jj[O5Vd0Kj[O5Vd0Li[O4Wd0Kj[O5Vd0Kj[O5Wd0Ji[O6Wd0Ji[O6Yd0Gh[O9ed001O000000000000000001O0001O00000000000001O0000000000000000000O2O000000000000000000000000000000000000000000001O0000001O00001O00000000001O0000001O01O000001O000000001O01O000000000001@DP\\\\O;oc0Ho[O8Qd0HP\\\\O7Pd0Jo[O6Qd0Jo[O6Pd0LP\\\\O3Pd0No[O2Qd0No[O2Qd0On[O1Rd0Oo[O0Qd00o[O0Qd00o[O0Qd01o[ONQd02o[ONQd02o[ONQd02o[ONQd02o[ONQd02o[ONQd02o[ONQd03n[OMRd03o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd04o[OLQd03P\\\\OMPd03P\\\\OMPd03P\\\\OMPd03P\\\\OLQd04o[OLQd04o[OLQd04n[OMRd03n[OMRd03n[OMRd03n[OMRd03n[OMRd02o[ONQd02o[ONQd02o[OMRd03n[OMRd03n[OMRd03n[OMSd02m[ONSd02m[ONSd01n[ONSd02m[ONSd02m[OMTd03l[OMTd03l[OMTd03l[OLUd04k[OLUd04k[OLUd04l[OKTd05l[OJVd04l[OKTd05l[OIVd07?0000000000000000000001O000000000001O00000001O0000000000001O01O000000000000010O000000000000001O000001O0001O000000000000000000000001O000O2O001N102MVl\\\\3\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [234.59022521972656, 227.94200134277344, 125.07011413574219, 205.41184997558594], \"score\": 0.9999403953552246, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_PQ58Qe05K4M4K4hNYOh]Oi0Ub0[Oh]Oi0Tb0ZOh]Om0Qb0UOn]Om0Pb0TOo]Oo0na0ROP^OR1ma0oNR^OV1ia0kNU^O[1fa0fNX^O^1ea0cNY^Oa1da0`NZ^Od1ca0]N[^Oh1`a0ZN]^Om1^a0TN`^OP2]a0RN^^OU2^a0P1L4L3M2N2N2N2N2N9G2N2N3M6J3M2N1O1O1OTNf@hMY?U2l@dL_Ol0d?_2YAaMf>_2[AaMd>_2\\\\AeMa>[2_AkMZ>V2hAjMROjNg>Y3_BjMgNnNk>U3eBiM]NTOn>P3UChMl<T2ZCiMg<T2\\\\CkMg<S2YClMi<R2ZCkMi<R2YClMj<R2VCmMl<P2WCnMP=i1VCSNn<g1UCYNk<d1WC\\\\Nj<b1WC^Nj<`1XC_Nk<\\\\1WCdNk<Y1WCfNj<X1ZCeNh<X1ZCfNi<W1h3K5L3M32O1N1O0000O11O1O100O1O1O1OO5L3M2N10002O1O2M1O01N7JO1O1M4M3L5L_lW>\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [412.137451171875, 439.92913818359375, 39.256988525390625, 10.071868896484375], \"score\": 0.9997121691703796, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^Wc82We04N1O1O0000010O00000000000000000001O000000010O000O2O00000O100O101O0O_]P<\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [524.6802978515625, 299.424560546875, 119.545654296875, 239.079345703125], \"score\": 0.9997065663337708, \"association_id\": 7, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"QQ`;3Ue03N3M2O1^JIPF8n9NkE4S:0gE4Y:OkDk0S;YO^B5Bl0o=SOZB6Aj0U>SO\\\\BIGQ1l=YOaB[OJZ1e=]OcBQOMa1_=@\\\\D?b;F]D7b;N\\\\D1c;6XDaNlLn0k>i0fDlN[;Y1eDbNZ;`1iD\\\\NV;f1QBkMj1<U<k1oAnM\\\\ONP28d<n1mAaNYO_Oc12U=V4gBjKW=\\\\4eBdKY=`4eB`KY=d4dB]K[=g4_B\\\\K_=h4\\\\B[Kc=h4ZBYKd=k4XBWKf=V5lAmJR>W6N2O1O1O1N10M3L4O2O0O2O1O1N2O1N21O000000O1O1O1O01000000000O2M2O100O1000000O1N2N3M2N3N2M3G;cMj@SNe?oNi_OY2W1hMR`0n1m1\\\\Oa0G:F;Fn^R8\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [384.3432922363281, 210.56666564941406, 81.89117431640625, 119.37580871582031], \"score\": 0.9519389867782593, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[]T81Ye03M3kZOMhd0`0L3M3YN[Od^Oi0Xa0ZOd^Oj0[a0WO`^On0_a0XOY^Ok0ea0WOW^Om0fa0WOT^On0ja0UOP^OP1na0SOe]O[O2d1Yb0V1N101M2O2O001O1N10001N1000001O0O10000O10001O000O101O02N1O00O1O21N2N3M00O11O4L2O6I1O1O3M3M3M3M2N1O1O2N5K2M2O2N2M4M4J6Dg0^O5K:E_lc;\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [455.0196533203125, 324.9091796875, 31.150543212890625, 9.923004150390625], \"score\": 0.8625460267066956, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ei_92Ye01N1000000O101O00000010O000001O000001O000O20OO10001O1Od`X;\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [237.46109008789062, 475.1031799316406, 84.82296752929688, 17.95623779296875], \"score\": 0.5278620719909668, \"association_id\": 6, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"lVS54Ve0101O0O100000000000001O0000001O00000O1000000000001O000O101O000000001OO100000001O00000O100O1O100O100O10000000O2O000000000000000000O10000O2O1N2N^hg>\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [775.4871826171875, 485.45269775390625, 92.6728515625, 16.353179931640625], \"score\": 0.5255075097084045, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ggf`01Ye02O0K501O000000000000000000000000000001O0001O01O000000001O0000000001O00001O01O000001N100000000000000O2O0000000001O00001O1O1O1O3LklX3\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [607.7725219726562, 499.0306396484375, 52.63824462890625, 15.8922119140625], \"score\": 0.5231958031654358, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"lPf<2Xe02N101N1O1O2O0O1O1O1000000000000000000000000000O100000000O2O000O101N2O1NbYl7\"}}, {\"image_id\": 27, \"category_id\": 2, \"bbox\": [583.144775390625, 515.9005737304688, 235.85906982421875, 30.7103271484375], \"score\": 0.512158989906311, \"association_id\": 7, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^f\\\\<1Ze02M101O0O3N2N1O000000001O000000000000001O00000001O00000000000000000000000010O0000000000000000001O000000000001O00000000000000000010O00000000000000001O0000000000000000000000000000000000000000000000001O00000000001O00001O0000010O000000001O0000000001O0001O00000001O000000010O0000HAd[O?\\\\d0Ad[O?\\\\d0Bc[O>\\\\d0Dd[O;\\\\d0Ed[O;\\\\d0Ed[O;\\\\d0Ed[O;[d0Fe[O:[d0Gd[O9\\\\d0Gd[O9\\\\d0Ge[O8[d0He[O8[d0He[O8[d0Id[O7\\\\d0Ie[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Je[O6[d0Jf[O5Zd0Kf[O5Zd0Le[O4[d0Le[O4[d0Le[O4[d0Le[O4[d0Kf[O5Zd0Kf[O5Zd0Kf[O5Zd0Kf[O5Zd0Kf[O4[d0Le[O4[d0Ld[O5\\\\d0Kd[O5\\\\d0Kd[O5\\\\d0Je[O6[d0Jd[O7\\\\d0Id[O7\\\\d0E_[O159]d0F^[O159]d0F_[OO5;\\\\d0F_[OO5;\\\\d0Fg[O:Yd0Fg[O9Zd0Gf[O9Zd0Gf[O9Zd0Gf[O8[d0He[O8[d0He[O8[d0He[O8[d0He[O7\\\\d0Id[O7\\\\d0Id[O6]d0Jc[O5^d0Kb[O4_d0La[O3`d0M`[O1bd0O^[O0cd00<0000001O000000000000000000000000000000000000000\\\\WZ4\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [544.6445922851562, 307.94122314453125, 310.46875, 201.80410766601562], \"score\": 0.17188115417957306, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"i[i=3We04L3L2O2L3ZOB\\\\\\\\Oa0ac0EX\\\\O?ec0f0N100002N>B6J2M3N1O1O1N2O1N1000O2N10000dJAkEa0S:AlE?T:AlE?S:CkE>U:BkE>U:BkE>U:BkE>U:BjE`0U:_OlEa0T:_OlEa0U:^OjEc0W:[OjEe0]?1O100O101OO1000dJXOUFh0k9ZOSFe0n9\\\\OQFd0o9]OPFc0P:^OnEb0R:_OnEa0R:@mE?T:AlE>U:BkE>U:AlE>V:AkE>U:BkE=V:BkE=V:BkE=V:AlE>b?L3O1N4NZX=KhgB4O02Mb]g0O[bXO5O101N101N1002N2M3N2M2Oad82\\\\[G1O2N20N2N2O1NcnV4\"}}, {\"image_id\": 27, \"category_id\": 1, \"bbox\": [375.0379943847656, 305.7157897949219, 347.5675354003906, 248.01351928710938], \"score\": 0.09426606446504593, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Seb92Xe03L4K5K4M3N2M3CWOU\\\\Oj0jc0YOS\\\\Oh0Yd0O2M2N3L6I9GU_81k`G3L5M2N2O0P\\\\OCQc0>n01O000O10000000000O100000000000000000O100000001O00001O0O10]]OMo?2n_O6o?Ho_O?n?^Ob^O3?e0@QOh`04W_O;>g0XOPOTa0KV_O`0<X1Pa0jNn^OX1^OWNTa0b0^_OY1YOYNXa0?^_OY1XOYN[a0=]_OZ1WOZN\\\\a0<]_Og1c`0XN^_Og1b`0ZN]_Of1c`0ZN]_Og1c`0XN]_Oh1c`0YN\\\\_Oh1c`0XN]_Oh1BcMe`0d0i_Oi1AdMf`0d0h_Oi1_OfMh`0a0j_Oh1]OhMi`0a0h_Oi1]OhMj`0a0f_Oh1_OhMj`0b0d_Oi1@gMk`0c0a_OT2c1PMi<P1^AS2a1UMP=U4oBlKQ=U4nBkKQ=V4nBkKR=U4nBkKR=V4lBkKT=U4lBkKT=V4jBkKV=U4jBlKU=T4kBlKU=S4lBmKT=S4kBnKV=P4kBPLU=P4kBPLU=o3lBQLT=o3kBRLU=`3YA[Lc15T=^3]AZL`19S=Z3aAZL]1<R=Y3dAXLZ1`0R=k2\\\\AjLR2;b<j2bAdLm1c0b<g2iCZMW<e2fC_MZ<_2dCeM]<V2eClM[<o1hCSNY<_1RDcNo;X1SDjNn;_O[A8m2e0i;nN`ALU3W1\\\\;jNcAKR3\\\\1\\\\;gNdAKP3_1];dNdALP3`1];cNcAMP3`1];bNdAOo2]1_;bNcA1o2\\\\1_;bNbA2Q3Y1^;dNbA3V3S1Y;iNaA4[3m0n;SOZD`0k;AXD6n;I`BNgNNP?5UB2]?N`@7^?HV@f0Xb00000000O100O001O00000010N2N1N4M2L9EZPc8\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [341.5411071777344, 295.6297607421875, 55.96923828125, 168.217529296875], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hjP8c0[g04K5M2N3L4L4M6K3M4K8H7G6L5K6G;F8X\\\\OjLZb0[3^]OkL^b0[3\\\\]OhLcb0^3U]OeLib0d3l\\\\OaLRc0W4O1O100O100N2O1O10000O10O10O10YOY]OULhb0l3T]OWLlb0^42PKV]Of4Uc0M3M1PNi\\\\OUOXc0b0S]O[Onb0a0X]O\\\\Ojb0a0Y]O]Oib0`0[]O]Ofb0=d]O^O]b0?T^OQOna0m0\\\\^OjNda0V1a^OcNWb0=mWh>\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [180.80673217773438, 295.47698974609375, 154.41290283203125, 230.4326171875], \"score\": 0.9999990463256836, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"QXX41ig0<G6L1N2M3lMYOm\\\\Oj0Pc0YOn\\\\Oh0Qc0YOn\\\\Og0Rc0ZOn\\\\Oe0Sc0\\\\Ol\\\\Oa0Wc0_Oh\\\\O`0Zc0@f\\\\O=\\\\c0Dd\\\\O;_c0C`\\\\O<cc0B^\\\\O=ec0@Z\\\\O`0]`0@lBa0P=FmB9S=KjB3X=NgB2Z=NdB2]=0_B2b=N\\\\B3e=NYB2i=LXB3j=KVB5n=FTB:o=BQB>hb010OO2O0XN^OS\\\\Oc0kc0AR\\\\O?mc0EQ\\\\O:Qd0Do[O<Td0@m[O`0ie00O101O0O2O0O1mLXOn^Oi0Ra0YOl^Of0Va0[Oh^Oe0Ya0[Of^Of0Za0[Oc^Of0^a0\\\\O^^Of0ba0]OY^Oe0ga0\\\\OV^Oe0la0YOS^Oi0Sb0QOk]OQ1Wb0mNg]OT1bd01O1O00O01O1O100O2H8B>M2N3O00O100O20N1O1M3YOf0E;O110N2N10100O101O1O1O100O1O2N1N3M3N2N2N1N3[^OkK[?W4c@mKX?W4e@lKY?U4f@lKW?W4g@kKW?X4f@jKW?Y4g@jKV?Y4h@hKW?Y4h@hKW?Z4g@gKX?[4e@gK[?Z4\\\\@nKc?T4Y@oKg?Q4W@QLh?Q4V@PLj?R4Q@QLo?Q4g_OWLY`0a1`_Om0a`0POh_Oj0X`0TOk_Ok0U`0UOm_Oh0T`0XOn_Od0T`0]On_ONd`02__OIc`07^_OGc`08__OFb`0:`_OBc`0<`_O@b`0?a_O]Oa`0b0a_O\\\\Oa`04]^OnMS1n1a`0MT@1o?_Oa@=d?YOe@c0_?nNo@o0W?gNQAV1nb0N2N3L4L4L3N2N3LSb\\\\`0\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [400.9846496582031, 291.6043701171875, 50.00164794921875, 136.06356811523438], \"score\": 0.9999984502792358, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hc^91T13Xe0=`ZOHZd0c1][OeNXd0c1_[OdN]d0c1Z[ObNfd0\\\\200M3N2K4OH9]O=302O201O101O1O3m[OQMob0U3i\\\\OnLVc0Z3_\\\\OiLac0k301O000001O1N3N2\\\\OZ\\\\OjLic0P3i0@?H6L6K5L1M3N3oNQ1L6K3O1N4M2Mjm^=\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [245.4810028076172, 555.7147827148438, 334.084716796875, 35.9326171875], \"score\": 0.9999980926513672, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\iR61og0001O0000000000001O000000000000001O0000000000001O00001O0000000000001O00000000000001O01O01O000000010O000000000001O000001O000000000000000001O0000010O00000000001O00001O00001O0000001O0001O0001O000000001O0001O00000000000001O00000001O00000000000000000000000000000000000000000000000000000001O00000000000001O000000000001O0000000000000000000000001O0000000001O000001O0001O0001O000001O00000001O0001O00000001O00000000001O01O0000000001O00000000001O0000000000000000001O00000000000000000000000000000000000001O0000001O00000000001O0000001O01O01O000000001O0000001O01O00000001O0000000000001O000001O000000000001O0000000000001O00000000001O0000001N10001O000Oim]:\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [670.3475952148438, 563.2933349609375, 309.441650390625, 34.56622314453125], \"score\": 0.9999979734420776, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"eah?2mg02O000000001O000000001O0000000000001O000000000000001O0000010O0000000000001O00000001O0001O0000000000000001O000001O00000000000000000000000000000000001O0000000000000001O000001O000000000000000001O000000000001O0000000000001O000001O0000000000000000000000001O000000000000010O0000000000000000000001O0000000001O00000001O01O000000000001O00000000001O0000000001O0001O00000000000000001O0000000000000000001O00000000000001O000000000001O01O00000000000000000000001O0000000000000000000000001O00000000000000000000001O0000001O00001O000001O000001O00000000001O0001O0001O000000001O0000000001O000001O00000000000000001O0000000000000O2OcmQ1\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [139.16746520996094, 312.83935546875, 121.11329650878906, 142.44708251953125], \"score\": 0.9999929666519165, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Ve[39eg04M1O1N10010O010O010000O1O1O1O2N2O1N4MeV2MZiM5I7M3M4L4M1O2oYOVOhd0l0W[OWOed0k0[[OWObd0k0\\\\[OWObd0l0\\\\[OUOad0V1V[OkNhd0Y1T[OiNjd0Z1T[OfNkd0]1S[OeNjd0]1U[OcNjd0^1V[OcNhd0_1W[OaNid0`1U[ObNid0[201O0O2O1M2O2N1O1_OQMl[OT3Rd0=O2O000100O1O1O1J6O1O100O2N1N3010O06J5K2O1RNnZOVOa0i0bd0JP\\\\O3Pd0JS\\\\O6mc0IU\\\\O6kc0HY\\\\O5hc0I[\\\\O6dc0J^\\\\O4cc0Ka\\\\OLfc01V2O1N2001O1O1P[OEWb0<f]OGZb09f]OE[b0=d]OB\\\\b0?c200N3L4Mid[b0\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [519.7544555664062, 315.4275207519531, 37.4852294921875, 142.71603393554688], \"score\": 0.9999842643737793, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"R\\\\Y<e0Pg0d0UOi0fZOTNnc0]2a[OhMYd0R3M4L3O1O1N2N200O0O2O0O0GXOd\\\\OaMZc0_2n\\\\OWMWc0h2j\\\\OUMYc0k2f\\\\OWMYc0f2k\\\\OXMVc0h2i\\\\O\\\\MTc0b2P]OYMSc0_1W^O[Nma0\\\\1`200O1O12NO2mNaYOb0Rg0I8Hamm:\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [474.9564208984375, 630.5291137695312, 538.8602294921875, 63.01824951171875], \"score\": 0.9999278783798218, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"gkg<1og0000000001O000000001O000000001O000000000000000000001O0000000000001O00000000000000001O000000001O001O001O000000001O00000000001O0000001O00001O000001O01O0000000001O00001O00001O01O01O000000001O000001O0001O000000001O01O000001O00000001O000001O000001O01O0001O0000000010O000000000000000010O0000001O00000000001O000001O00000001O0001O0001O000001O000001O00000001O0001O00000000001O01O000001O00000000000000000010O00000001O0000000000000000001O0001O00000001O000000000000001O0000000000000O1000001O0000000000001O00000000000000000000001O0000000000001O0000000000000001O000001O00000000010O0000000001O0001O0001O000001O01O0001O01O0001O00000010O000001O001O001O1O0000001O01O000001O000000000001O0001O000000000001O0001O00000000O2O00000000001O0000000000000000001O00000000001O00001O0000001O000000001O0000001O00001O001O00001O000001O0001O000000001O000000001O000000001O01O0000000000000000001O0001O000001O000000000000000000000000000001O0000000000000000001O000000001N101O3Kgb7\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [812.86962890625, 288.38519287109375, 50.4732666015625, 138.72128295898438], \"score\": 0.9992570281028748, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"UjRc0?^g0m0SO4N4L4M2ZZO_Nhd0^2N2N3M10N13L3L3_O`002OO1N20000O01000O100O2N1001O00jLh[Od2Yd0XMn[Oe2hd0O1O10O0001O1\\\\NiZO9Ze0AQ[O8Te0CQ[O7Ue0EP[O4Te0IZ[ODnd09`1K7H[ej3\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [494.9503479003906, 301.1673583984375, 187.09793090820312, 249.06292724609375], \"score\": 0.9961820244789124, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Whc;b0[g07K3M2O000O10000O010O00010O000100O010O100O10O0100O100O1000O010O100O001N2N2M2O2N1001O0001O00001N1N3O001O01O1O001O00000O10ULkNk@U1U?nNi@Q1X?QOe@o0Y?XOd@g0\\\\?A]@>a>mNW_Om0Q25i>POS_On0T1ROjNn0o`0TOP_OQ1n0VOQO:[a0@d^O_1D^N8`0G@39ka0K^^Oc1ZOcN8?M_O8Ina04Y^OY3\\\\OUM7]OSb07X^OR4CgKUb0;S^Oo3Wb0TLa]OU4[b0i0O00M3N2O100O1O1O11N101O00001O00O1N2O2M2N2O1000000kKS]OV3ob0eLU]O[3kb0aLZ]O^3fb0aL[]O_3eb0aL\\\\]O]3fb0bL[]O\\\\3fb0dL[]OY3gb0gLZ]OU3ib0kLX]Oo2nb0PMT]OCHi2^c0ZMc]O^2Td0L2O0:F3L3N1N1000UOW[O_Ngd0k0Q\\\\OTOoc0f0W\\\\OZOic0`0]\\\\O@bc0<c\\\\OD\\\\c0<c\\\\OF\\\\c09d\\\\OI\\\\c06c\\\\OK]c04c\\\\ON\\\\c01d\\\\O1[c0Od\\\\O3[c0Le\\\\O6[c0Hf\\\\O8[c0Ge\\\\O9\\\\c0Ee\\\\O;[c0Df\\\\O<[c0Ce\\\\O=\\\\c0Ae\\\\O??POl`0?f^Oa0>POn`0<e^Od0JUORb03U^Oh0E[OZb0GQ^On0A@^b0@R^OQ1^O@Wd0?g[OCYd0>e[OCZd0>f[OAZd0`0g[O]OZd0d0g[OXO[d0h0g[OPO^d0R1V12_YOmNSf0U1lYOmNSf0S1oYOjNRf0U1RZOgNoe06iYO63O`f0HfYO013[f0LYZONje02\\\\cf8\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [354.4345397949219, 458.62994384765625, 46.220184326171875, 9.1669921875], \"score\": 0.9141924381256104, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_fZ81mg03O1N10000001O0000001OO2O000000001N1O100000000001O1O1O1O1O001O0O2O000000O1O1O10O2O001O_Yd>\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [986.66552734375, 444.00518798828125, 30.3409423828125, 6.814666748046875], \"score\": 0.8844165802001953, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"n]Ug01og01O001O0O1O1000000000000000000000000000000000O1000000000Tj4\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [590.2638549804688, 343.2751159667969, 139.02203369140625, 223.43899536132812], \"score\": 0.8015046119689941, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ba^>1ng02N2N1N2M4N1O1M3L4K5M3YLUOV@n0g?VOT@n0P?mNY^O:`2P1U?jNW^O9`2P1X?lNS^O6a2P1_?dNT^O;]2Q1V`0mNh_OU1Z`0iNc_OV1d`0bN`_O]1\\\\c0410[L^Nn@_1U?fNf@X1]?kN_@T1c?oNX@R1j?nNW@Q1i?oNW@P1j?POV@o0j?ROV@m0k?SOU@m0j?UOS@l0l?VOS@j0m?XOR@f0n?]Ol^O[OWOX1ma0^Oj^O]OXOT1na0Ag^O^O[OQ1ka0Eg^O\\\\O^Oo0ia0Hd^O]OCk0ga0K`^O^OJh0ca0M]^O@Oe0aa0N\\\\^O_O4e0Ya02_^O\\\\O8i0n`01f^OWO<[1X`0CX_OUO`0X1T`0HX_OSOc0V1V`0EW_OTOd0W1V`0EV_OSOd0X1V`0FU_OROe0X1W`0ET_OROf0Y1V`0DV_OROd0[1U`0^O]_OWO<\\\\1V`0`Nm^OFk0?04]OGj`0An^ON4Oo0g0E2IC_`0HR_OK6KP1m0\\\\O22@Z`0JT_OK5JR1o0VO18_OX`0KU_OK2LS1o0TOM?AV`0HS_OO1MR1n0TOKf0@R`0IR_O00NR1n0SOJl0]Oo?KQ_O2N0R1m0POJU1XOl?No^O20OQ1n0nNLY1UOY`01Q@h0cN0Tc0WO[^Ob0iN2ob0\\\\OY^O`0jN0Qc0_OU^Oa0kNNTe01oZOKSe05\\\\10000O101O0000001N3MWRg7\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [700.1517333984375, 403.5250549316406, 297.74822998046875, 127.67965698242188], \"score\": 0.7340817451477051, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Xe^b03lg03N1O1OO100O1O1N3NYRh01gmWO001N100001O0O1000am52[RJ4M2O1N101O00001O0000000O100000000001O0O2O1N2O1O1N2L\\\\hZ1OhWeN2M2O1N101O0O2O0001O0OS[OITb06k]OKVb04b]OLcM2kd02a]O5cb0F]]O<Se00O10000O100O1O1O1O10O01O100000O1O100O1O100O2N100O101N2O1Nim8J^RG1N100010N\\\\Rf0\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [514.283203125, 303.4941711425781, 71.9569091796875, 215.07192993164062], \"score\": 0.710159182548523, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mgR<7fg04M2O2O0O1O1O1O10000O1O1lLOX^O2ga00b\\\\O_Oi0a0eb01_\\\\OBj0=gb02\\\\\\\\OCl0<gb03Z\\\\OCn0;gb06U\\\\OBS19hb02Z\\\\OBn0=hb0N`\\\\OBg0a0ib0Hh\\\\OC?f0gb0Ba^O?]a0Ad^O`0Za0Ae^Oa0Ya0@d^Od0Wa0@i^Oa0c?mN\\\\_Oe0P1?]?VO^_O=T1>\\\\?XOa_O8R1a0[?ZOf_O0n0h0[?[Of_OLm0k0]?ZOb_O2l0d0c?^O[_O2o0a0f?^OZ_O4l0>m?XO[_O=c0=T`0QO\\\\_Oc0==]`0iNW_Om09:Ra0eNV^OU1d07Va0fNS^OU1f05Xa0fNR^OV1c05\\\\a0fNo]OV1b07`a0dNi]OX1g03aa0kN^]OV1R1N_a0h0b^OWO^a0k0]^OXOca0l0U^OXOka0k0Q^OVOPb0l0m]OTOTa0XNS_Og2FQOYb0iN[]OB5>b1d0Ra0oNh@l0Y?QOk@l0Q>ROW_O2h2j0R>WOU_ONi2h0Zc0N101O1G9NO201O1O2N1002M6J2O2NWiX:\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [713.2286987304688, 404.7262268066406, 143.68194580078125, 78.04977416992188], \"score\": 0.45674365758895874, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"QUg`09bg09J3O2N1O1N2O6lXORO3Nbf0S1_YOmNaf0P19O1N1100000000PYOTOlf0P100000OaYOoNRf0o0oYOQOQf0o0nYOSOQf0m0oYOSOENYf0o0QZOSOFNYf0P1PZOSOEN[f0P1oYOVOQf0i0oYOWORf0h0nYOXORf0h0nYOXORf0h0nYOXORf0g0PZOROF1[f0m0oYOROG0[f0m0SZOSOme0m0SZOSOme0m0SZOSOme0m0c01O01O0O2O1H^YO^OTf0`0mYOBSf0<nYODRf0<nYOCTf0<mYOCSf0=nYOBSf0<i000010O01O000000001O0000000000010O001O0010N10O1000O1001O0001O0O010O10O1000000O1000000000O10001O0001O000001O0O101O1OO2O0O2O3L2Mgjd4\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [396.4438781738281, 295.8898620605469, 29.361175537109375, 130.76101684570312], \"score\": 0.4088137149810791, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Uc^93k1NUd05c[O3Qd0m0V[OWOPd0Dl[O[1KXOmc0^Oh[O23j2Ud0SMg[O6Oj2Ud0f03MN20O10OM2TOl00M31O2N2bMQ[Oa0Yf0]OlYO9\\\\f0FfYO7V2HXa03d\\\\O1S2NZa01b_ONT_Q>\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [506.6829528808594, 364.3025207519531, 506.2359924316406, 190.04507446289062], \"score\": 0.3247929513454437, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"bU[=3mg01OO2Nl`o02S_PO5J2O1N101N100O101O0O101O0_N_Oc[Oa0\\\\d0Ba[O`0]d0BT[OK_Oc0\\\\e0DS[OJAc0Ze0ET[OI@d0[e0CT[OJAd0Ye0DU[OIAd0Ye0DU[OHAf0Xe0DU[OHBe0Ve0FW[OEBg0Re0IZ[OADg0id00c[OZOCg0hd01Y[ORO16Mh0id00X[OTO05Oh0hd0OZ[OSOO6Oh0hd0N[[OTON6Oh0id0MZ[OUON6Oi0id0J\\\\[OVOL7Oi0md0FX[OYOM8Nj0nd0nNR[O>5BM8Nj0Ue0[OQ[OCL8Nk0Ue0YOQ[ODL8Nl0Te0XOS[OCK:Mm0Te0UOT[ODK:Nn0Qe0SO_[OO@P1Pe0PO`[O0@Q1od0oNa[O0@T1md0kNc[O1@X1jd0fNf[O2AY1hd0dNg[O3AKMV1ld0iNg[O6AG0Z1kd0eNd[O:Lo0Xe0oNkZOn0We0POkZOm0We0ROV[O8Ue0GnZO4Ue0JoZO1Se0Na1OYb]62d]bI2N2N1O2N100O2OO2O0O101N3N1N2Omo`1\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [963.6615600585938, 382.9342041015625, 32.76361083984375, 76.56878662109375], \"score\": 0.25958335399627686, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"gTdf0;Rg0LUYOe0]f0a0M3O4K3N4L3M2N1O2N1OO1O1O100N4N4L3L5L2M4M1O2M3N2O0O3L7UOc[e0\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [260.41650390625, 319.8511962890625, 484.48828125, 241.242919921875], \"score\": 0.2024156004190445, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"kXd:1mg04M3M2N2N2N2O0O100O10000O10001O0O100000000O100000O1O010O1O1O010O10O010O0010O01000O010O100O0100O010O100O00100O1O100O01000O10O10O010O01O001O01O000010O01000O100O10O01O1O1000O010000O0100000O100O2O0O2O1N2IT_7MPaH6L4N2N2M3N2N2M2L5K5D;M4M2O1O1N3OO10O100000O01000000A[NcZOe1\\\\e0\\\\NdZOd1\\\\e0]NcZOb1]e0`NbZO`1^e0`NbZO`1^e0`NbZO`1^e0aNaZO^1_e0cN`ZO^1ae0aN_ZO_1ae0aN_ZO^1be0bN^ZO^1be0bN^ZO^1be0cN]ZO]1ce0cN]ZO\\\\1de0dN[ZO]1ee0cN[ZO]1ee0cN[ZO]1ee0dNZZO\\\\1fe0dNYZO]1ge0dNXZO[1ie0eNVZO\\\\1je0=00000000O2O000O101CiMoZOW2Pe0kMnZOW2Qe0kMmZOU2Re0mMmZOS2Se0nMkZOT2Se0mMmZOS2Se0nMlZOS2Re0oMlZOR2Te0>N101OO1RNoZOm0Qe0ROR[Ol0md0UOV[Oh0jd0WOY[Og0gd0XOZ[Og0gd0XO[[Og0ed0WO][Oh0cd0SOd[Ol0\\\\d0oNi[OP1Wd0QOi[Oo0Wd0QOj[Om0Vd0SOk[Ol0Ud0UOl[Oj0Sd0WOm[O:SO_OQe07m[O8UO_Omd09o[O7VO^Old0;n[O6XO^Okd0<l[O6ZO\\\\Okd0>k[O6ZO\\\\Okd0>k[O5\\\\O[Ojd0a0i[O4^OZOjd0a0g[O5id0KW[O5id0KW[O4jd0LV[O4kd0KU[O4ld0LT[O4ld0BXZO0l0<nd0AZZO3g08Se0CYZO4d06Ve0DXZO6b05ce0K]ZO5de0J\\\\ZO5fe0JZZO5kf0L3L`RY8\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [297.8782043457031, 309.3108825683594, 606.467529296875, 272.6083068847656], \"score\": 0.15750867128372192, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"PPPh0\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [315.22003173828125, 401.9051208496094, 27.286041259765625, 3.991119384765625], \"score\": 0.11443155258893967, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ddb71og00O1000001O00000000000000000001O000O]So?\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [136.62989807128906, 309.38037109375, 488.5186767578125, 202.968017578125], \"score\": 0.10365712642669678, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[o^44gg06O1N2N1O2O0O100O010O0000000O01OO2M4N10001O00100O10O100O100O1O100O001M3N2O1N20GFlXO9Tg0KhXO6Xg0KgXO4Zg0JiXO5Xg0HkXO6Vg0IkXO7`g0N3M2NX^1OhaN3M3N2M3O1cXOERg0=jXOGTg0d0O1N10000000000O1000000O100O100O1O100001O2M5BjXOJXg03lXOIVg05nXOGUg05>LXc`6Ok\\\\_I000O10000O1000000O11O1O1OTXf:\"}}, {\"image_id\": 28, \"category_id\": 1, \"bbox\": [725.7587890625, 413.55596923828125, 219.453125, 86.26705932617188], \"score\": 0.06348564475774765, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[eUb01og00O2O001O004K2O1O1N1O2N1O1O2N1O1hXO\\\\OSg0f0kXO[OVg0c040000O100O2O0O2O000O^ZOCdc0<[\\\\OEec0:[\\\\OGec09Z\\\\OHfc07[\\\\OIfc05Z\\\\OLgc02Z\\\\ONgc00Z\\\\O0gc0M[\\\\O3he01000000001O1O00000000000000001O0000000000O1000000000000000000001OaMOo\\\\O1Pc02n\\\\OMSc06Z21O1O1O0O10001O00O100000000000000000001O00010O000O2O0O100O100O2N]XV1LfgiN4M2O0000001O000001O3M2M`bk1\"}}, {\"image_id\": 28, \"category_id\": 2, \"bbox\": [488.8538513183594, 624.9339599609375, 341.6596984863281, 47.80828857421875], \"score\": 0.05967755243182182, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"b[Q<1ng02O0000000000001O00000000001O0000000000001O000000001O0000000000001O000000001O0000000000001O00000000000000001O0000000001O01O00000001O01O0001O00010O00010O000000010O0000010O01O00000010O00000000000001O0001O000000000001O0000000001O000000010O0000001O000000010O0000001O00001O01O0000000001O00000001O0001O00000000010O00000001O000000000010O000000000001O0000001O000001O000001O0000001O01O0000000001O0001O01O00000000000000010O00000001O01O00000000000000010O000000000000001O0000000010O00000000000000000001O00000O10000000001O00000000000000000000000000000000001O00000000000O100000000000001O0000000000001O00001O0000000010O0000000000000000001O001N2O2Lck`4\"}}, {\"image_id\": 29, \"category_id\": 1, \"bbox\": [369.8441467285156, 218.43994140625, 232.20608520507812, 190.65447998046875], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"gSj77gd0g0]O=^O`0J5K5N1N1oNjMm^OX2Pa0lMm^OV2o`0nMo^OS2Pa0PNl^OR2Sa0RNg^OR2Wa0QNe^OQ2Za0QNc^OR2\\\\a0oM_^OU2`a0k0O1O1O1O2N100O100O1O1O001N2O100O1O1O1N2N1N3O1O1O100O1O100N2O1O100O1O100O1O1O1N2O1O1_OTKc@m4\\\\?]KZ@d4d?b0N2O001O1000000O10000000000000000000000000000000000001O0000001O001O1O1O1O1O2N1O001O001O001O000000001O000000001OO10000000000000000000000O1000000000000O10000O10000O1N2O1N2N2O1O1000000O100000000000001O0000000000000000000000000000000000jJe@]4[?_Kj@`4W?]Km@a4S?]KPAb4P?\\\\KSAc4m>[KVAd4k>ZKWAe4g?01N10001O0O10001N1O2N2N4L5L3L4M5J5K3N4J3N2N2O0O2O1N3M4M2M4L3M3M2O2M3M8IU1jN5J5J5L4Jcfj8\"}}, {\"image_id\": 29, \"category_id\": 1, \"bbox\": [595.915283203125, 140.01724243164062, 228.62298583984375, 156.02041625976562], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"h``<5Pe0?T[OBPd0U1YOe0J5M2N2N101N2L4[OgMS^O]2la0b0O0O100000000O100000000O010OO1000001O1O0010O1O0M4N101O00100N1N3L4O1O010O101N1N2N3M2O100O2O0O100O1O1O101N10000O100000000000O100000000000O10000000000000000000000O1000000O10000O10000O101O000000000O1000000000000000000000000000000000000000001O0000001O00001O001O0000001O000000001O000000001O0000001O001O000001O000O100O10001N101N1O2N1M3M4N1O101N1O101M3O0O2O1O001O101N1O100O10O01O3N3L2N100O1O100O2O1N2O1N100O1O1O2Nb0^O8H3M3M3M3M6I6Ii0eNgYT4\"}}, {\"image_id\": 29, \"category_id\": 1, \"bbox\": [838.4102783203125, 134.99252319335938, 182.17401123046875, 121.1015625], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"ld_a0;ld04M4G8K7I7L3N2M3N1N4L9H4K2O1N2O1N2O1N2N100O100O1O1O100O101N100O1O100O1O001N2O00100O1L4K5N200O1O1O10O0100O100O100O1O100000000O10000000O100000000000000O100O10000O1000000O1000000O100O100O10O0100O010O10O010000O10000000O101O0000001O001O0001OO2O0000001O0000001O00001O001O001O1O001O010O00001O001O00O100000001N100O1N3M2N2O1O2N1O100O10001O001O1N2O001O1O1O1O2N1O1N3M4K?QNWl1\"}}, {\"image_id\": 29, \"category_id\": 1, \"bbox\": [111.7788314819336, 387.5503234863281, 309.98370361328125, 250.48342895507812], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"UR[2X3na07L2N3M2O1N2N101N1O2M2O2N1O101N100O1O1O1O1N2O1O1O1O100O1O2N1N2L5K4M4M2O2M3N2N2N2N2M3M4L<E4L3M3N1N2O1O001O1O0O101O00001N10000000000000000001O000000000000000000001O000O100O100O1O1O1O1O1O100O10000O10000O10000O100O00100O100O10O0100000O010O1O100O100O10000O10000000000O100000000000000001O0000001O00001O001O001O1O001O001O00001O0000000000000000000000O1000000O100O100O100O10000001O0000000000001O001O00001O0000001O000000001O0000001O001O00001O1O1O10O01O1O001O001O010O1O103L5L1N10000O10O0100O2N4M4K5L2M100O10O010O10O010O100O010O1O000001O000000000000000001O1N101O1O1VL]_Ok2d`0jLi_OS3X`0cLT@Z3Pa0N1O1O1N2O1O2M6K=B6K3J5L?@7J5J6SOS\\\\O2Vd0EdQi<\"}}, {\"image_id\": 29, \"category_id\": 2, \"bbox\": [617.9659423828125, 275.8699951171875, 197.50103759765625, 22.601287841796875], \"score\": 0.9999980926513672, \"association_id\": 1, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"mVo<1Xe02N100O100O100O1N2O2N10000O10000O1000000O1000000000000000000O10000000000000O10000000000000010O0O1000000000000000000000000000000000000000000000000000O100000000000O10000000001O00000000000000000000000000000000000000001O000000000O2O1O002N00001O1O1O1N1000\\\\dd06Z[[OIS[O9md02000000000O100000000O101N101O000O2O001Ncbc4\"}}, {\"image_id\": 29, \"category_id\": 2, \"bbox\": [187.47715759277344, 606.3401489257812, 196.15919494628906, 35.3743896484375], \"score\": 0.9986854195594788, \"association_id\": 2, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"jTY42Ve03K4N2O2N1O1000000O1000000O1000001O000000000000000000001O000000000000000000000000000000000000001O0000000001O0000000000000000000000000000000001O00000000000000000001O000000000000000000O100000000000000000000000000000000000000000000O10000000000000000000000000000000000000000001O000000001O001O1O002N003BS[O105[cm=\"}}, {\"image_id\": 29, \"category_id\": 1, \"bbox\": [0.0, 60.16845703125, 142.07029724121094, 76.78974914550781], \"score\": 0.9940817952156067, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"c\\\\136O8:gc0Im[Oj0Qd0=K1N2O1O2N1O0O2O1O001O1O1O00001O0000001O00001O000000001O000000001O00001O0000000000001O0000000000O100000000000000000000000000000000000000000000000000O10000O10000O100O100O2N1N2N2O1O2O0O1O2M2O100O1O1O2N1O2O0O1O1O10001O000000000000000001O001O001O00000001O0O2N1N3N1O2K6HTf^b0\"}}, {\"image_id\": 29, \"category_id\": 2, \"bbox\": [393.0168762207031, 370.8957214355469, 189.55587768554688, 34.717010498046875], \"score\": 0.3740839958190918, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"YTZ82Xe0000000000O100000001O000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000001O000000000000000000001O0000000000000000000000000000000O100000000000000000000000000000000000000O2O00cnS:\"}}, {\"image_id\": 29, \"category_id\": 2, \"bbox\": [147.95664978027344, 580.8339233398438, 252.98011779785156, 56.20391845703125], \"score\": 0.12025817483663559, \"association_id\": 0, \"segmentation\": {\"size\": [682, 1024], \"counts\": \"dT[46Re02O2M200O100O1000001O0O10000000000O2O000000000000000000000000000000000000000000000000000000000000000000000000000000000001O00000000000000001O0000000000000000000000000000000000000000000000000000O10000000000000000000000O10000000000000000000000000000000000000000000000001O00001O001O001O1O1O001O3L6KnXl=\"}}, {\"image_id\": 30, \"category_id\": 1, \"bbox\": [302.5799560546875, 240.3757781982422, 213.0223388671875, 96.01252746582031], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [450, 736], \"counts\": \"VVW43i=8K7F:J3N2N2O1N1N3M4K7J5L3M2N2N1N4L3M2O2O0O2O0O101N100O2O000000O10O1000O100O010O1O0010000O10000000000000O1000000000000000000000000000000000001O00000000000000001O0000001O0000001O0000001O001O1O001O001O1O2N1O1O1O0001O01O000000001OO100O10000O2O00001N100O101N10000O2O0O2O1O0O10001O1N2O001O001O0O2O2N2N1O0O2O001O2N1O1N2O1O001O001O001O1O00001O00000001O01O0000001O001O01OO101O0O101O000O2O000O2M2N2O2O000O2N2N3M2N1O2O0O2O1N6Fi\\\\P3\"}}, {\"image_id\": 30, \"category_id\": 1, \"bbox\": [429.4906921386719, 197.42239379882812, 56.499969482421875, 31.706924438476562], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [450, 736], \"counts\": \"hcm58e=6K5N1N2O100O1O100N200O1O1000O010000O100000000000000000000000000000000000000000000000O101O00000O2O001N1O2H9J]e]3\"}}, {\"image_id\": 30, \"category_id\": 2, \"bbox\": [263.6672058105469, 317.7193298339844, 230.60720825195312, 21.65740966796875], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [450, 736], \"counts\": \"kVe31Q>1O0O2O000O2O00000000000O2O0000000000000O10001O000000000O10001O0O100000000O2O00000000000O101O00000000000001O00001O001O00001O1O1O1O0000000001N10000000000O100O1N20O0100000000O10000000000000000000000000000001O00000001O0000000001O000000000000000010O000000000001O0000001O0000001O3M1O0000001O001O3M1O001Oej4OZUK2O10000O10000O1O1O1O1000000000O010000000000000000O1000O100000000000O10O1000000000000000O01000000000001O00000O100000O1000O10001O1O1NoPZ3\"}}, {\"image_id\": 31, \"category_id\": 1, \"bbox\": [743.8051147460938, 107.19990539550781, 263.59716796875, 254.2284393310547], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [502, 1024], \"counts\": \"Xn\\\\;U1Y>a0A;H5L3M4L4L5J;E<E4M2N2N2O1N2N2M3M3N2M3N2N1O2N1O2M4M2M4M2M3N2N1O2N2N2N3M3L3N3M2N2N2O1N2N101N1O2N1O2N101N101N2O1N2O1O001N100O1O2N1O1N200O1O101N2O0O2O2M2O1N2N2N100O2N10000O2O00001O1O1N2O001O1O00001O0000000000000000000O100O100O100O1O1O1000000O10000000000O10000000000000000000000000000000000001O00000000000000001O0000001O00001N101O0O100N3N1O1O1O1O1O2O0O1O1O1O2N1O1O100O2O0O2O0O2N1O2N2N2N101O001O000O2O000000010O0000010O00010O001N3N1O1N2O1O1O1N2O1N2O3L4L4L2M4M2N1O2O1N3L3M4J5K5M3N1O2N2N2N2N2M3M2N3N2N1O2N1O2M3L5J6K5M2M3N3M3[N\\\\C3k<]OoCG\\\\>ElR8\"}}, {\"image_id\": 31, \"category_id\": 2, \"bbox\": [40.016971588134766, 98.44770050048828, 781.0533447265625, 374.16094970703125], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [502, 1024], \"counts\": \"Rkf07\\\\?6K4K6J5dA_OZ=b1K5L4N101N2N2O1N2N2O1N3M2N4L5L2M4L3M1O2N2N101N1O2O0O101N100O2O0O101N100O2O0O2N101N1O2N2N1O2O1N1O2O0O2N101N100O2O000O2O0O100O2N100O2N1O2N101N1O2N1O2N2N2N2N1O2N2N101N1O101N10001N101N100O2O0O2N2N101N2M3N2O1N1O2N2O0O2O0O101N101O0O101O0O10001N100O2N101N1O2N1O2N2N1O2N1O2N1O2O0O1O2N100O101N10000O10000O1000000O1000000O2O0000000O10000000000O100000000O1000000O100O10000O100O1O100O1O100O10000O10000O10000O1000000000000000000O10000000000000000O10000000000000000O10000O10000O10000O100O1O100O100O10000O100O1000000O100000000O10000000000O1000000O100000000O10000O1000000O100O10000O100O10000O10000O1000000O1000000O1000001O0O1000000000000O1000000000000000000O100000000000000O1000000000000O1000000000000000000000000000000000O100000000000000000000000001O00000000000000000O101O0000000000000000000000001O0000000O10000000000O1000000O1000000O10000O10000O100O100O1000000O1000000O100000000O1000000000000O100000000000000O1000O100000000O10O10O10000O10O10O100O100O010O1000O010000O01000000O10O1000O1000000O1000O1000O10000000000O10O1000000000O1000000O100000000O101O000O1000000O100000000O1000000O10000O100O101N100O1O10000O101N10000O1000001N100000000O2O00001N101O0O2O001N101N100O2N1O1O2O0O1O101N10000O2O0O1000001N10000O2O000O2O0O2O0O2N101N2N1cJ\\\\Gh3e8VL_Gf3c8XLaGe3_8ZLeGb3]8\\\\LfGb3Z8]LSHV3o7hLWHS3i7lLYHR3h7nLYHQ3h7mLZHR3f7mL\\\\HR3d7nL]HP3e7nL]HQ3c7nL_HQ3a7oL_HQ3b7mL`HQ3a7nL`HR3a7lLaHS3_7lLbHS3`7kLbHT3^7kLcHT3_7kLbHT3^7kLdHS3^7kLcHU3^7iLdHU3]7jLdHV3_7fLcHX3`7fL`HY3d7cL^H[3e7bL\\\\H^3e7`L]H^3f7_L[H`3o9O0O2O0O2O000O2O1O001N2O001O1O1N101O001N10001O0O2O000O2O000O2O000O2O0O101N100O2O1N101N2O0O3N2M2O2M3M3N2M3N1N3M2O1N2N2N2O1N2O1N2O1N2O1N2O1N2O1N2O2M2N3M4L6J5K`Un3\"}}, {\"image_id\": 31, \"category_id\": 1, \"bbox\": [307.9883117675781, 112.18212127685547, 473.1570129394531, 296.85247802734375], \"score\": 0.0711296871304512, \"association_id\": 0, \"segmentation\": {\"size\": [502, 1024], \"counts\": \"Tkn94n>NbA6W>2dA2V>4gANW>5fAMX>6dANZ>h0N;F2N2M3N2M3N2M2O2N1N2O2N1N2O1O001O00001O0O101O00001O00001O001O00000001O000000000O2O0000000000001O00000010O000001O00001O00001O001O01O010O10O0100O1O101N1O101N1O100O1O1O1O1O1O2N1O100O2N1O2N1O2N1O2N1O010O0001O001O0010O02N1O1O1O1O2N100O1N3M3M3L4@Y1^NSB@]Zh3\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [731.865234375, 66.7221450805664, 270.6826171875, 741.2510375976562], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"lo]c0d1Si0>C8H7I6K5K7I8G;E9G6I5K5L4L3N2N2N2N2N2N2a^OeK\\\\<]4_CPLX<Q4eCSLX<P4dCTLY<m3fCULW<n3fCULW<m3hCULT<o3iCSLS<Q4kCRLo;d1_@RO^3]OP<T1WA[Oe2EQ<h0hA_OT2LR<b0RB@h11T<=[B_O_17T<8bB]O[1<R<3kB\\\\OT1b0P<MVC[Ok0i0n;HbCWOa0R1k;FlCPO;[1h;DSDjN7c1c;D\\\\DcN3i1_;DcD]N0P2Z;EjDTN0W2T;FoDoMN\\\\2Q;GREkML`2Q;FfH<X7FeH=Z7CcHa0\\\\7@_Hf0_7[O\\\\Hj0c7WOZHl0f7TOXHn0g7ROXHP1g7ROVHQ1h7POVHS1g7oN`E[NRNl2[<kNZEQObM_2P=bNTE^4h:eKoDf4k:]KQEj4i:YKTEm4g:VKVEn4f:TKXEP5e:SKWEP5g:SKTEP5k:TKoDP5o:UKhDQ5V;Z501N1O2O0O2aHPBR2Q>gMWBV2k=cM]B[2d=\\\\MfBa2\\\\=XMkBf2W=VMmBi2U=SMnBk2U=QMnBm2V=nLmBP3V=mLkBQ3Y=kLjBR3Y=kLiBS3Z=jLhBS3\\\\=iLgBU3\\\\=gLgBW3\\\\=cLiB[3Z=aLiB^3Y=^LjBa3W=]LkBb3W=\\\\LjBc3W=[LkBe3V=XLlBh3U=ULmBk3S=QLQCo3P=lKTCS4n<gKWCY4i<dKZC\\\\4g<`K\\\\C`4e<]K]Cb4f<ZK\\\\Cf4f<VK\\\\Ci4h<RKYCn4S=cJQC\\\\5Wa0O1N2O1N1O2M2N1N2M2M1N]Of0F;H9M3O1O0O1N1O00210O11O01010000O1000O10O10000O1N2N2M3M3L4M3N2N2N2O1N2N2M3M3L4L4M3N2N2N2M3L4J6J6L4M3M3N2N2N2M3K5M3L4N2N3M2N3M2M5K4H8I8UBgEl:`:bDSFV;T:PDgFi;Y<K4L5L3L5J6H9G9H8J5L4L3M3dJX_Ob0k`0UOe_Oa0^`0TOX@<l?[Oh@2a?Ij@Cc?;d@VOg?g0`@POd?n0a@lNc?R1b@hN`?Y1c@`N`?a1e@VN^?l1g@iM]?Y2aAaLe>a3Y43M3M2N2N2N2N3N1N4L4L4L4L2N3N01N101O1N3N3M1O2M101N3N2M4M4K7J4L3L3N2M3L6K9F=C=C:F9F8I7I6G:EPic0\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [22.70050811767578, 133.49012756347656, 197.07052612304688, 541.7015991210938], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"[Rf0=[j07I5J5K3M4L3N3M3M2N3N1N3M2O1M3N2M3N2O1N2O0010O0000O10O1OO4L3N2M4M2fZOnMka0S2P^O[Nea0h1U^O`Nga0e1Q^OcNia0e1k]OfNma0e1c]OlNQb0l5RO=C6K3M4M4L4K5L2WCcFP8`9lGdFR8^9jGfFT8\\\\9iGeFW8]9eGeF[8_9`GcF_8a9[GaFe8a9WGaFi8a9RGbFn8`9mFdFR9]9kFeFT9]9hFfFX9\\\\9dFfF\\\\9\\\\9`FfF`9\\\\9\\\\FfFd9^9UFeFk9]9QFeFo9]9kEgFU:\\\\9cEiF\\\\:[9YEmFg:W9_D_Ga;m;000O001O0O1N3K4N2O0O1N3M2O1O2O10O0O1cNVCjDk<V;Y14N101O2O1M3M3K5M3N2N2N2N2M3K5K5M3N2N2M3J6D<@`0H8L4N2M3M3J6L4M3N2O1O1O1O1O1O1O1N2M3L4L4K5K5M3N2M3L4L4J6M4L3N3M4K4K6J9PHW^Oa6Vb0oHS^Ol6Wb0fHQ^OV7kb0M4I6I7I7]Mn[OYNWd0d1P\\\\OTNUd0g1S\\\\OSNPd0h1Y\\\\OQNic0j1Q]O^MSc0]2^]OSMfb0h2h2L5K5L3N3L3N3M2N3M3N5K:E=C`0WO_Vbe0\"}}, {\"image_id\": 32, \"category_id\": 2, \"bbox\": [79.25618743896484, 661.0457763671875, 242.35296630859375, 80.0714111328125], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"kgS22gj02N2O0O100O100O100O1000000O10O10000O10O1000O1000O0100O10O10O1000O010O010O01000O10O0100O01000O1VOLnVO4Qi0NnVO1Si01jVO0Ui01kVOOTi03kVOLVi04jVOLUi06iVOKWi05iVOJXi06hVOJWi08hVOHXi08gVOIYi07gVOIXi09gVOGYi09fVOHZi08fVOG[i09eVOGZi0;dVOF\\\\i0:dVOF\\\\i0:dVOF\\\\i0:cVOF^i0;`VOF`i0;^VOFbi0;]VOEci0h01000000000O0100000O01000000O100000O010000O1000000O10O1000O100O00100O100O1000000O010O10000O1000000O010O1000O10O10000000000O10000O10O010000O100O1000000O1000O010000O10O10O10000O10O10O10000O10O1000O10O0100O010O10000O01000O01000O10O010000O010O100O10O1000000000000000000000001O0000001O0000001O000000001O01O0O1000001O0O2O000O2N1O6JoWbb0\"}}, {\"image_id\": 32, \"category_id\": 2, \"bbox\": [432.05828857421875, 709.349609375, 216.038330078125, 80.48583984375], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"YVR<1hj01N2M3L5N1O10O001OGEoUO`0gi0>000010^OROXWOm0hh0TOWWOl0jh0TOVWOk0kh0UOTWOj0nh0VOQWOj0Pi0VOPWOi0Qi0WOnVOj0Ri0WOmVOi0Ri0XOmVOh0Ti0XOlVOh0Ti0YOkVOf0Vi0ZOiVOf0Wi0\\\\OhVOd0Xi0\\\\OgVOd0Zi0]OdVOd0\\\\i0\\\\OdVOc0\\\\i0_ObVOb0^i0@`VO?ai0<1O1O010O100O0100O0100000000000O100000000O100000000O10000000000000000000O10000O10000O0100000O1000O0100000O01000O100O010000O010O1000O10O10O0100O01000O100O1000000O1000O0100O10O10O10000O100O1O100O100O10000O100O100O1O1O100O100O1O10YVO]OWi0c0gVO_OYi0a0fVOAYi0?fVOCYi0=gVODXi0<gVOEYi0<fVOD[i0;dVOF\\\\i0:eVOE\\\\i0:dVOE]i0;dVOC^i0<cVOB_i0=a0N1O0010O010000O01O01O010O00010O00001O000O2O0O2N101N2N\\\\Pl9\"}}, {\"image_id\": 32, \"category_id\": 2, \"bbox\": [845.2348022460938, 732.923583984375, 170.66900634765625, 73.1923828125], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"dnXf0131`j06N2O1N3N0O2AC^VO?`i0B`VOa0]i0@aVOb0^i0^ObVOb0]i0_ObVOc0]i0^ObVOb0^i0^O`VOi0[i0>M1N2O001O1O0O2O00000000O100000O1000000O10O10000O1000000O10O1000O10000O1000000O1000O10O10O1000O100000000O010000O0100000O1000O10O10000O0100000O10O100000O100000O10O10O100000000O1000000O100O10000O100O101O0O010O100O101N100O1O1O1O101N1N2N2O2O001N1O2N100O101ImUODTj0;nUOCTj0;600O100010O000000001O3M2N00000001O00001O0N3IXj6\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [357.9371643066406, 130.3109130859375, 236.65890502929688, 645.7599487304688], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"[Y\\\\9R1Ui0X1QOc0B9I6L101N1O2N101N1O2ON2N101N1O2N1O1O2N1O1O1O2N1N2O1N2O02M3N3M2N3M2N3M2N5L5I9F>oZOQM^a0e3S]OhMYb0g5D:F9H6N2N2N4L7IX2gM7J4K4K4L5K6J8G:G5K4L3M3N1O3M3M4^EmAU8e>eFQBY9j?N2N1N1O1O2O0O1O1O100O1O1N2N2N2dM__O\\\\Jd`0b5f_OmIc`0P6e_ObId`0]6m1O1000000O10aJTJhB3n0i5T<gJTB3b1V5V<UKeA3P2h4Y<YMbCh2a<YM[Cg2h<[MRCf2Q=\\\\MjBd2Y=^MaBc2a=`MYB`2k=dMmA]2T>gMfAZ2[>jM_AW2c>kMWAW2j>lMPAV2Q?i41O1O1O1O1O1O1O001O1O1O001O2NN200O1O1O1O100O100O100O100O100O100N3M2M3K5K5M3M3N2O1M4L3F:@a0^D_Be9e=VFdBc9_=YFhBb9[=YFkBc9X=XFnBd9V=UFRCf9S=kE\\\\Co9m>C<YOf0G9I7J4M4J6I6H9C<C>H8K4M4N1O2N2N1O2M3N2N2O1N2O1O1O1O1O1N201N2O001O1O10O102OoMR[OPNQe0l1g[O^M]d0\\\\2Q\\\\O^Mlc0]2c\\\\OVM`c0e2b\\\\OZMcc0`2^\\\\OaMec0Z2\\\\\\\\OgMjc0P2X\\\\OQNmc0g1U\\\\O[NQd0Z1P\\\\OjNSd0n0o[OUOTd0c0n[OASd08o[OMQd0IU\\\\O<jc0VO_\\\\On0df04M2N01O1O1O1N2O1O2N2N2L4M2MUb^;\"}}, {\"image_id\": 32, \"category_id\": 1, \"bbox\": [555.94580078125, 166.71475219726562, 72.059326171875, 317.74005126953125], \"score\": 0.9162294268608093, \"association_id\": 4, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"[Sc>3dj06WVOLch0:VWOMch0d0lVODjh0]1D:K5L4M6[IcMeD]2X;hMfDY2V;jMgDZ2U;hMgD]2W;dMhD]2V;eMoDW2o:kMPET2P;mMQER2n:nMm@AT1DjMj2U`0SNh@Fn0GRN`2X`0VNa@HS1CRNa2Z`0UN^@JT1ARNb2\\\\`0SN\\\\@NP1DPNa2c`0nMZ@11mNCV4R`0nLV@20QOCQ4W`0jLU@71QO]OQ4]`0gL`_Ol0f0^NUOU4e`0`L`_On0e0Y3k?jKe_Oh0=`3n?hKi_Oe05f3R`0fKc_Og0?a3n?hK]_OEG;R1V4j?jK]_OCJ:Q1@mNU4k`0]Lc_O_OE;R1^4f?gKh_O[OB;S1d4c?fKh_O[OC5X1i4]?hKf_OZOF0]1m4W?iKf_OZOFN_1o4U?gKh_O^OALa1Q5W?bKi_O1P1_4W?`Kg_O3S1\\\\4W?aKb_O7X1V4W?bK]_O<^1P4Z?ZK\\\\_Of0Z1Q4_`0oK`_OR4^`0oKd_Oc2eNnLea0a0j_OT2lNYMUa0g0S@U1XNaMg0a0o`0h0U@m0FZNU`0i0c@9_OmNn?j0aAWOfNNh?k0cDUO];j0eDUO[;l0eDSO\\\\;m0dDQO];P1eDmN\\\\;S1dDlN\\\\;U1eDiN`;S1`DkNj;n0TDROn;m0RDROP<k0RDTOo;j0SDTOo;j0SDTOo;k0PDWOQ<g0oCYOT<a0QD[OV<<jT_:\"}}, {\"image_id\": 32, \"category_id\": 2, \"bbox\": [553.05078125, 242.16036987304688, 97.1031494140625, 195.54998779296875], \"score\": 0.6014589667320251, \"association_id\": 4, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"PPjj0\"}}, {\"image_id\": 32, \"category_id\": 2, \"bbox\": [111.91605377197266, 566.13671875, 68.76605987548828, 30.85504150390625], \"score\": 0.22616319358348846, \"association_id\": 0, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"YP`33^j0:M2O1O10O010000O1000O1000O100000O10000000000O1000000O10000O10000O1000000O1000000O2O001O0O2NocRf0\"}}, {\"image_id\": 32, \"category_id\": 2, \"bbox\": [73.79548645019531, 564.5819091796875, 111.83100891113281, 40.38995361328125], \"score\": 0.13980896770954132, \"association_id\": 0, \"segmentation\": {\"size\": [858, 1024], \"counts\": \"iRQ23fj02N2O1N2N2O2NO1O1O1O1N3KfkT1:RTkN200N2O1000O10O1000000O0100000000O1000000O1000000O1000000O1000000O10000O1000000O101O000O2O000O4K`SPf0\"}}, {\"image_id\": 33, \"category_id\": 2, \"bbox\": [722.5217895507812, 512.9003295898438, 168.31195068359375, 140.5386962890625], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_iR?R1Vd05H71O1OO1O10O010N2N1O2N1O1O2N2M3Fa0]OVR`00Tn_O3M1N1K6I7L4N2O0N2L4]Ob001O10O00M1L502O1O1O0M4B>L4O1O100000000010O2N1O1O2N101N1O1O2N101N101M3N1O2N1O100N2O1O1O1O1O1O2WO^\\\\OEdc06k0O10001O1O001O0000001O000000010O1001OO10O010000000O10O010O11O00O10O01O2N100O001000001OO01O100O10000O010O1O2O0O10O02M3M2N`_k2\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [447.93389892578125, 201.89834594726562, 249.609130859375, 475.3065490722656], \"score\": 0.9999985694885254, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"eb`97md0>B=C9I7XOdNY]Oa1ab0h0I6G9D=VOi0H8L5M2N2O0O1N2N2MZNP@XNn?h1]@nMd?P2c@iM^?W2g@cM[?\\\\2i@_MY?`2l@ZMV?c2QAVMR?c25fMa@S2`?_NT@]1l?lNn_OS1S`0SOf_On0[`0UOa_Ok0a`0XO[_Oi0d`0[OW_Of0j`0]OQ_Oe0o`0k1O100O1O1O101O1O1O1O1O100oAjKl:W4kD[Ll:e3REbLh:_3WEdLg:]3VEgLh:Y3UEmLh:T3SETMi:l2jDhMo:Y2dDZNU;g1dDeNV;[1fDlNY2]Lh5h4jGQOV2`Lm5`4iGUOT2aLQ6\\\\4eGYOR2dLV6U4aG^OS2eLX6c8_IiGZ6h<lN=C2N2N1O1O00000000001O0000000000000000O1000000O1O1O1N2O1K5nMhImD\\\\6P;lIgDX6U;QJaDT6[;P2J6F:oNQ1E;K5M3M3K5H8J6N2O1N2O100O11O010O1O1O2O0O2N110O11O4L4L4L3N2M1O2N3M4N21Kg0YO8G5K4L2M3M3M5K6K5J3M3M3M2O1N4L5K;E?B6ImFWGi5d8oIfGR6X8mIjGR6W8lIlGR6U8lInGQ6T8mIPHP6Q8nIRHP6o7nITHo5m7PJUHo5k7QJVHm5k7RJVHn5j7RJVHm5k7SJVHk5l7TJVHi5l7UJ^HYO`N[4U9ZLcKZ2`4dMPLk1R4SNcLX1_3gNcLV1^3iNdLV1]3hNeLU1]3jNfLR1]3mNeLP1^3mNnLf0Z3SOVN\\\\OS2<XNXOm1d0\\\\NROg1l0`NkNd1S1[9O2O1N2O1O2M3N1O0000000000001O001N2O2M3N0O100000O101N2OO010O11O00O5K3N1N3L4Lc\\\\V7\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [694.9076538085938, 140.26480102539062, 225.837158203125, 512.4344482421875], \"score\": 0.9999862909317017, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_Wg>9md08F:G8H9C<D=G8L3M3M4L3L4K5S@ZMo<k2cBRNj<S2QCXNe<k1WC^Nb<d1[CaN`<d1[CaN`<f1XCbNa<h1SCXOm;l5VO=C6K5L2O2N2M5L6I9oE`Eg8e:lFlEk8];L3N1N2N2N1SNdCjJ^<R5kCgJW<T5QDgJP<U5VDhJk;U5WDkJj;R5YDnJg;P5[DoJf;o4[DQKf;m4\\\\DRKe;l4]DTKc;j4^DVKc;g4aDWK`;e4hDUKZ;i4lDPKV;o4PEkJR;T5REgJo:Y5VE`Jm:_5_210O01N2N2J6I7I7M3N2N2N2M3M3I6I8J6L4M3N2L4L4I7J6K5M3N2N2N2M3L4K5M3N2N2O1N2N2M3L4N2N2N2O1N2N2M3L4M3N2N2O1O1N2M3L4M3N2O1O1O1N2O1[HREj4Q;SKYEc4j:[K]E]4f:aK^E[4e:cK]E[4d:cK_EZ4c:dK`EX4d:dK_EX4e:cKaEW4e:cKaEW4d:eK`EW2nNdMf;0aES2UOgM\\\\;3bEP2\\\\OgMU;7`En1CdMP;<_En1o<QNSCm1o<PNSCn1o<QNRCn1P=PNQCo1P=PNRCo1o<oMRCP2n<SNQCl1o<UNRCj1m<YNQCf1n<^NRC_1n<dNRCY1n<iNRCU1n<lNSCS1l<nNTCQ1m<oNTCQ1j<oNWCR1h<nNYCS1h<jNZCV1e<hN_CW1`<hNbCW1^<gNdCX1^<eNcC[1a<^NcCb1d<RNaCn1f<eMaCZ2l?O0O0O1OO10O2O4M8I3K1O1OO01N3N2M3N2M2O0O101N201O51N8HM1OO00O1000O2O1O00O001N2N2N2N2N1O2N2M3M7I8GmXZ2\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [210.49581909179688, 301.085693359375, 47.17193603515625, 36.16510009765625], \"score\": 0.9999821186065674, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\Q]49nd07L3L6K1N3N1O2N1O10000000000000000000001O000000000000O1000000O2O1O1N101N101O1O0O2O1O1O1O1N3Lbcn?\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [243.2184600830078, 299.921630859375, 48.53221130371094, 37.09405517578125], \"score\": 0.999974250793457, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"lQS5=nd00N2O0O2M2O20001O0O100N2O2N1N3N2M2000001O00000000000000O1000000O100000000O101O0O3N1O1N2N2N3M3LomW?\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [393.7965087890625, 288.8255920410156, 41.410797119140625, 36.642578125], \"score\": 0.9994866847991943, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"UhW8;md05M5J4M3M2N2N2O0O101O000000000000000000O100000000O1000001N2O001N2O1O1N2O2L4M7FQbX<\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [715.5592651367188, 26.974258422851562, 159.1138916015625, 276.38128662109375], \"score\": 0.9980727434158325, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"SkQ?6od08L3N0WJBeF?Y9IaF6^91\\\\F0c93[FMd96ZFKe96ZFKd98ZFHf99YFHf9:XFGg9:YFEf9>WDPOTO`0d<b0VDSOUO7e<h0SDWO[OIc<R1lC[O[>f0YAGe>:WAJi>6SALo>4g@4Z?M`@8a?HX@NmNSOk`0P1S@1UOlNi`0U1m_O2\\\\OfNg`0]1f_OOEbNe`0n1T_ODZa0V201O01000001O0O\\\\_OULi?m3T@VLk?l3m_O^Ln?e3k_OeLP`0]3i_OjLT`0Y4N1O1O2N102M3SOcJgA^5W>eJeA^5X>gJdAZ5[>hJbAY5^>iJ`AX5^>jJ`AW5`>kJ]AW5b>iJ]AX5c>iJ[AX5e>h0O0O2O01O0000000000000000001O0O100O2O000N2O1N2O2N1O001O1O001O1O100O100O10000OTBoI`<Q6`CPJ_<P6`CQJ`<o5_CSJ`<m5_CUJa<j5^CXJb<f5_C\\\\Ja<b5^CaJb<\\\\5_CeJa<6\\\\BY4R1cKc<NcBZ4k0hKd<ElB^4a0lKe=Q4]BlKf=R4\\\\BjKh=T4\\\\BeKi=Y4e1O2M3N2M4Gf@ZL^=X3nBnLn<c2aC^M]<_2gC`MZ<]2lC_MT<_2oCaMQ<Y2UDfMk;Y2VDgMj;]2SDbMm;a2PD_MQ<b2nC]MR<f2kCZMU<h2jCWMW<j2gCUM[<m2Q32O1O5J5K5L00N5K4M2M:F6I=D=C9F5J7H?]OiVW3\"}}, {\"image_id\": 33, \"category_id\": 2, \"bbox\": [492.67608642578125, 523.2911376953125, 158.02972412109375, 121.6353759765625], \"score\": 0.994615375995636, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"h\\\\Y:`0id05K3N1O001K5O1O100O0O200O10O010O1O0M4M3M4J5O1N3K7In\\\\c0MYn[O2\\\\c0c1J5N3N1M3K5\\\\Od0N2N20000O10000O100FPMX^OP3ga0UMS^On2la09M2100N2N2M3M3O1O1M3L4gMb]Ob1ab0]Na]Oa1`b0_N`]O`1ab0`N_]O`1ab0aN]]O`1db0_N\\\\]Oa1db0b0101bM\\\\]OOOl1Yc0N4L3M1O1O1O1O6J5L6I>B5J6Jmcb0LV\\\\]O5K4N1O1O1O00001O0000001O01O0000001O001N2N5DTXh7\"}}, {\"image_id\": 33, \"category_id\": 1, \"bbox\": [51.342369079589844, 399.074462890625, 253.088134765625, 283.925537109375], \"score\": 0.9866470098495483, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"RZS1m0[d07I4M3M2N2N3K4F:J6L4N3N1O1O1N2N2oN]MS_Oj2b`0U1N2N2O1O1O1O1M3M3M3N2N2O100O1O1O1O1N2O1N2O100O1O1O1N2O1N2O1O1O100O1O100dMdJhD]5T;jJhDW5U;nJiDR5U;RKhDo4W;SKgDn4W;UKgDl4X;WKdDk4[;WKcDj4\\\\;WKcDj4\\\\;XKbDi4\\\\;ZKbDg4];ZKbDg4\\\\;\\\\KbDe4];\\\\KbDe4^;\\\\K`De4_;\\\\K_Df4`;\\\\K^De4a;]K]Dd4b;^K]Db4c;_K[Db4d;`KZDa4e;aKYD`4f;bKXD_4g;cKWD^4h;dKVD]4i;fKRD]4n;eKlC_4S<]200O100000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1O1O1O1L4L4M3N2N2O1N2M3N2N2O1N2O1O200O010O1O2O0O100O10000cJ]BR3c=eLlBS11]NR=N_Co09POX<GgCe0f0Dc;CjCd0h0K];]OnCd0i0OY;[OPDc0j03U;WOTDc0j06`=FdB:\\\\=BhB>Y=ZORCc0S=nN[Cn0c`0N001O000O1O3M3M4L2N1O100O1O001O1N2O1O2N1O1O1O001N101O1N2O0O2OVXP`0\"}}, {\"image_id\": 33, \"category_id\": 2, \"bbox\": [472.0242919921875, 283.083984375, 250.62335205078125, 374.09735107421875], \"score\": 0.8328986763954163, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"oQX:<md06K2N2O1N101O001N10001O0O1O100O1N2O1O1N1101N3M8GVQh0;PnWOg0D9I6K5L2N101O0O10001O00000000O1000001O001O1O010N2O0O2N2O0O2O2N3N3L5K6J7I4L3M2N3L3M5J:DaWl8\"}}, {\"image_id\": 33, \"category_id\": 2, \"bbox\": [601.292236328125, 542.25, 102.01312255859375, 36.4864501953125], \"score\": 0.6119136810302734, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"bgR=`0kd02N002N1O00001O0010O0010O01O0010O00000001O100O1O7I1O001O0010O0001O00001O01O0001O01O0000010O0000010O0001O00001O01O0001O000000000O100000000O1O1000aag6\"}}, {\"image_id\": 33, \"category_id\": 2, \"bbox\": [506.75958251953125, 526.434814453125, 200.6759033203125, 69.17242431640625], \"score\": 0.12203115969896317, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Rgb:>jd09J1N2OO100M_[O\\\\Obd0`06M3N2O2NZfd0MgY[O4N1O1O1GC_[O2N<^d0=A?L4M3O10000O1O1O1H]Ng\\\\Od1Xc0^Nf\\\\Oc1Zc0]Nf\\\\Oc1Zc0]Ne\\\\Od1[c06O10000000000001O00000VNe\\\\Od1[c0[Ng\\\\Od1bc0N2N2bNY\\\\OT1Sd0L1O1O001O2N2N4H6N1O1N5AT[O30MZdd09aP\\\\O8J3N001O0001O00001N10010O00001O0010O0002N4M2M1O010O1O1O001O001O01O0001O01O01O000001O00010O000001O0001O00000001O0000000000000O2O001N1OnVj6\"}}, {\"image_id\": 33, \"category_id\": 2, \"bbox\": [240.22406005859375, 331.3019104003906, 20.101959228515625, 4.892242431640625], \"score\": 0.058401092886924744, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"dgS51Ye0101O000O1000O101O000OZXo?\"}}, {\"image_id\": 34, \"category_id\": 1, \"bbox\": [0.0, 149.3645782470703, 1024.0, 270.01324462890625], \"score\": 0.9999994039535522, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"YP7:Pe04M2M4L3N000O2O0O101N100O2O0O101N1O1O2O0O1O2N1O1O2N1O2N1O1O2N100O2N100O101O0O100O2O000O10001N100O10001N100O1O101N1O100O2N1O1O2N1O101N1O101N10001N100O2O000O2O00000O101O0O10000O10001N100O100O101N100O100O2O0O100O101N10000O2O00000O2O00000O101O00000O1000000O10000O100O100O100O100O100O101N100O10000O1000000O1000000O10000000000O010000000O1000000O0100000O10000000O0100000000000O010000000000000000000000000000000000000000000000000000000000000000SMQL^Do3b;RL]Dn3c;SL\\\\Dm3d;TL[Dl3e;ULZDk3e;VL[Dj3e;WLZDi3f;XLYDh3g;XLYDh3g;XLYDh3g;XLYDg3h;YLXDg3h;XLYDh3g;XLYDh3h;VLYDj3g;VLZDi3f;VL[Dj3e;UL\\\\Dk3d;UL\\\\Dk3d;TL]Dl3a>000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O0000000000000001O0000000000000001O00000000000000000000000010O00000000000000000001O01O0000000010O0000000000010O000000000010O000000000001O000001O0001O000000001O00001O00001O0O101O00001O000000001O0000001O00000000001O000O100000001O000000000000001O000O100000001O00000000001O0O10001O00000000001N100000000000000000000O100000001O00000000000O1000000000000000000000000O100000000000000000O1000000000O10O100000000O1000000O1000O10O1000000OgN^KkAb4T>bKiA^4W>dKgA\\\\4Y>gKdAY4[>iKdAV4]>jKcAV4]>kKaAV4_>jKaAV4^>lKaAT4_>mK`AR4a>nK_AR4a>oK^AQ4b>PL]AP4c>QL\\\\An3e>RL[An3e>SLYAn3g>SLXAm3h>TLWAk3j>VLUAj3k>VLUAj3k>WLTAh3l>YLTAg3l>ZLRAf3o>ZLQAf3o>[LPAc3R?]Ln@a3T?`Lk@]3X?cLh@Z3[?gLc@X3_?hLa@V3a?kL]@S3f?mLY@S3h?mLW@R3k?oLS@Q3n?oLP@R3Q`0oLm_OQ3T`0oLk_OQ3V`0R10O1O1O1O100O100O1000000000000000000O10001O000000000000000000000000001N1UL__Ol2a`0TM__Ol2a`0TM__Ol2a`0TM`_Ok2``0UM`_Ok2``0UM`_Ok2``0UM`_Ok2``0UM`_Ok2``0TMa_Ol2_`0TMa_Ol2_`0UM`_Ok2``0UM`_Ok2``0UM`_Ok2``0UM__Ol2a`0TM__Ol2b`0SM^_Om2b`0n00000000000000000000000001O0001O000000000001O00000000000000001O000000001O0001O0001O000000001O0001O0000mK`_O[3a`0`Ld_O_3\\\\`0`Le_O`3[`0`Lf_O_3Z`0`Lg_O`3Y`0`Lh_O_3X`0`Li_Oa3V`0_Lk_O`3V`0^Lk_Ob3U`0^Ll_Oa3T`0^Lm_Ob3S`0^Lm_Ob3S`0^Ln_Oa3R`0^Lo_Ob3Q`0^Lo_Ob3R`0]Lo_Oc3P`0\\\\LQ@d3o?[LS@d3n?[LS@d3m?[LT@e3j`0O000000001O00000000001O0000000000001O000000000000001O00000000000O1000000O1000000O1000000O1000000O100000000O10000000000O10000000000O100000000O10O1000O10000O1000O0100O10000O01000O1000O010000000O0100000O010000000O10O1000O10O10O1000O0100O010O010O010O0O2N1N3N1O2N2O0O2O001N2J5N3O10O0100O01000O10O10O10O10O10O01000O0100O010O0010O010O010O10O01000O01000O10O100000O10O1000O10O10O1000O0100O0010O010O010O0010O010O01O10O10O010O01O10O01O10O0100001N10000O2O0O10001N100O10001N100O101N100O1O2O0UNY^O1ha0OY^O0ga0O\\\\^ONfa00\\\\^OOda00^^ONca02^^OMba02`^OLba03_^OcNJm0ga0?l^O@Ua0`0l^O_OUa0?l^OATa0?m^O_OTa0`0m^O@Ta0?l^OAUa0=m^OAUa0>k^OBVa0<l^OCVa09l^OFVa04P_OKQa01S_OMo`01R_OOo`0NT_O0o`0MS_O2n`0LU_ONQa0NW_OHSa01Tlc1\"}}, {\"image_id\": 34, \"category_id\": 2, \"bbox\": [0.0, 388.6619567871094, 845.71728515625, 55.310546875], \"score\": 0.9972288012504578, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fR33Ue05M1MHS[O9ld0300O10000MCX[O=hd0301O000000000O10000000000000000M^O][Ob0cd0^O][Ob0cd0^O][Ob0cd0_O\\\\[Oa0dd03000000000000000000000000000000000000000000000000000O101O00000000000000000000000000000000000000000000000000000000001O000000000O100000000000000000000000000000001O000000001O000000001O00000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000O1000000O100O100O10000O10000O1O1O1000000000000O1000000000000000000000000000O1000O1000000000000O1000000000000000000000O1000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000O100000000000000000000000000000O100000001O000000000O1000000O101O00000O100000000000000000000O100000001O00000000000000000000000000001O00000000000000000000000000000001O00000000000000000000001O00000000000000000000000000001O0000000001O0001O001O001O00001O000010O00000000000000000001O0000000000000001O000001O00000000000001O0001O000000000000000000001O00000001O000000000001O0000000000000000001O01O0000000000O101O0000000000001O000O100000001N1000000O101O000O10001O000O1000000O2O0O1O101N1O1Oaed0MbZ[O2N1O2O000O2O000O10000O1000000O1000000O10000O100O100O1O1O100O100O100O10000O1000000O10000000000O100000000000000O100000000000O1000000000000000000000000000O10000000000000000000O10000000000000000000000000000O1000000000000000000000O10000000O1000000000000O1000000000000000000O10000000000000000000000O100000000000001O000O1000000000000000000O1000001O00000000001O000O10001N101O001N100O2N10003L101O00Zfm5\"}}, {\"image_id\": 34, \"category_id\": 1, \"bbox\": [532.7373657226562, 152.50933837890625, 441.5972900390625, 112.967529296875], \"score\": 0.8596808910369873, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"d\\\\T;7Re03N2N2O0O2O1O0000O100000000000000O1000000000000000001O000000001O00001O00000000000O10001O0000000O10000000000O101O000O1000000O101N1O1OdPV10\\\\oiN2O2N0O2O0000000O2O00O1000000000O1000000000000O11O000000001N10000O2O000O101O0O10001O]oe03_PZO2N2N3N1O1O0O2O001O0O101O001O1O0O101O0000001O0O100000000001O00000000001O000O10000000000O10001N100O100O100O1O100O2O000O100000000000000000000000O10001O010O0000001O000000001O000000001OO100000000O1000a\\\\ODoa0<P^OFoa09P^OIPb06o]OMPb03n]OORb00n]O2Qb0No]O2Qb0Mo]O6oa0JQ^O8ma0IR^O6na0KR^O5na0LQ^O3oa0OP^O1oa00Q^OOoa03P^OMPb03P^OMPb03P^OMoa05P^OJQb06o]OJPb07P^OIPb08o]OHQb08o]OHPb09Q^OFoa0:Q]OZO`04B8mb0:Q]O[O=5E6mb0:d]OA^O4ob0;a]OC@2ob0:a]OE@0Pc0;`]OE@OQc0<_]OE@JVc0`0[]OF_OIWc0a0Y]OH_OGXc0`0Z]OI^OFYc0a0Y]OI^OFYc0`0Z]OJ]OFYc0`0Z]OJ]OFYc0?[]ODYO03MZc0>Z]OEYO03MZc0=[]OFXO03L[c0>Z]OFXO03L[c0>Z]OEYO13K[c0=Z]OGXO0ec08U]OFXO1cc08V]OGWO1dc06V]OHXO0cc07V]OIWO0cc07W]OHWO0cc06[1M4MVP:0boE4Q[OOm0Mob0<P]OFfb00`\\\\O=i0Cgb01]\\\\O>l0Beb0d0[]O\\\\OXOMWc0h0`]O[OXONWc0h0a]OZOWOOXc0h0`]OZOVO0Yc0g0`]O_O`b0b0^]O@`b0a0`]O@_b0a0`]O@^b0b0a]O_O^b0a0b]O@]b0a0b]OA[b0`0e]OCXb0=h]OFTb0<k]OFSb0:l]OHRb09n]OGRb0:m]OGQb0:o]OFQb0:P^ODQb0<P^OBQb0=Q^O@Qb0`0[^OmNla0S1V1000000O1VOQ\\\\O8oc0@m[O09<kc0C]\\\\O<cc0C_\\\\O<ac0D_\\\\O;bc0E_\\\\O:ac0Ea\\\\O:Zd0M3M3NR`h1\"}}, {\"image_id\": 34, \"category_id\": 1, \"bbox\": [460.1387634277344, 142.596923828125, 517.6859130859375, 234.94674682617188], \"score\": 0.24424073100090027, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"bSc97<O0<Yb0Y2I5K7I5L2O0O2O0O2OO10000000000000000000gNaLe@`3W?dLi@\\\\3T?hLk@X3T?jLk@V3U?kLj@T3V?nLi@R3V?oLj@Q3V?PMi@P3W?PMi@P3W?QMh@o2X?RMh@m2X?TMg@m2X?SMh@m2X?TMg@l2Y?TMg@l2Y?UMf@k2Z?UMf@k2Z?VMf@i2Y?XMg@h2Y?XMg@h2Y?YMf@g2Z?YMf@g2Z?YMf@g2Z?YMg@f2Y?[Mf@e2Z?[Mf@e2Z?[Mf@e2Z?[Mg@d2Z?\\\\Me@d2[?\\\\Me@d2[?\\\\Me@c2\\\\?]Me@b2[?_Md@a2\\\\?_Md@a2\\\\?_Md@`2^?_Mc@`2]?aMb@_2^?aMb@^2_?cM`@]2`?cMa@\\\\2_?dMa@[2`?fM_@Z2a?fM_@Z2b?eM^@[2b?eM^@[2b?eM_@Y2c?fM]@Z2c?fM]@Z2c?fM^@Y2c?fM]@Y2d?fM]@Z2d?eM\\\\@[2d?eM]@Y2d?gM\\\\@Y2d?gM]@X2d?gM\\\\@X2e?hM[@X2e?hM\\\\@V2e?iM\\\\@W2d?iM]@U2e?jM[@V2e?jM\\\\@U2d?kM\\\\@T2e?lM[@T2e?kM\\\\@U2d?kM\\\\@U2d?kM\\\\@V2c?jM]@V2c?iM^@W2b?iM^@W2b?iM^@W2b?iM^@W2b?hM_@X2a?hM_@X2b?gM^@Y2b?gM^@Y2b?gM^@Y2b?gM^@Y2b?fM`@Y2`?gM`@Y2`?gM`@Y2`?gM_@Z2a?fM_@Z2a?fM_@Z2a?fM_@Z2a?fM_@Z2a?fM^@[2b?eM^@[2b?eM^@[2b?eM^@[2b?eM]@\\\\2c?dM]@\\\\2c?dM]@\\\\2c?dM\\\\@]2d?cM\\\\@]2d?cM\\\\@]2e?bMZ@_2f?`M[@`2e?`M[@`2e?`M[@`2e?`M[@`2e?`MZ@a2f?_MZ@a2f?_MZ@a2f?_MZ@b2e?^M[@b2e?^M[@b2e?^M[@b2e?^MZ@c2f?]MZ@d2e?\\\\M[@d2f?ZM[@f2e?ZM[@f2e?ZM[@f2e?ZM[@f2e?ZMZ@h2e?XM[@h2e?XM[@h2e?XM[@h2e?XM[@h2f?WMZ@i2f?WMZ@i2f?VM[@j2e?VM[@j2e?VMZ@k2g?TMY@l2h?SMX@n2h?QMX@o2i?oLX@Q3i?nLW@R3k?lLU@T3l?kLT@U3Pa010000000000O2O0000001O000O2O00001O000O2O00001O000001O0000001O0000001O00000000001O000001O0000000001O000001O0000000001O000000000000000000000000000000000000001O000000000000001O000000001O000000001O00O01000000000000O010000000O1000000O10O10O100O10000O100000oMmLiAS3X>mLhAS3W>nLiAR3W>oLhAQ3W>PMiAo2W>RMiAn2W>SMhAm2W>TMiAl2W>TMiAl2W>TMiAk2X>UMgAl2X>UMhAj2Y>VMgAj2Y>VMgAi2Z>XMeAh2Z>YMfAf2[>ZMeAf2[>ZMdAf2]>ZMcAf2\\\\>[MdAd2]>\\\\McAd2\\\\>]MdAc2\\\\>]McAc2^>^MaAb2_>^MaAb2_>^MaAb2_>^MaAa2`>_M`Aa2`>_M`Aa2`>_M`Aa2`>_M`Aa2`>_M`Aa2`>_M`Aa2`>_M`A`2b>_M^Aa2b>_M^Aa2b>_M^Aa2b>_M^Aa2b>_M^A`2c>`M]A`2c>`M]A`2c>`M]A`2d>_M\\\\A`2e>`M[A`2e>_M\\\\Aa2d>_M\\\\Aa2d>_M\\\\A`2e>`MZAa2f>_MZA`2g>`MXAa2h>_MXAa2h>_MWAa2j>_MVAa2j>_MVAa2k>^MTAb2m>^MSAb2m>^MSAb2m>^MRAc2n>]MRAc2n>]MRAb2o>^MQAb2o>^MPAc2P?\\\\MQAd2o>\\\\MQAd2o>]MPAb2Q?^Mn@c2R?]Mn@c2R?]Mn@c2R?]Mm@c2T?]Ml@c2T?]Mk@c2V?^Mi@b2V?_Mi@a2X?_Mg@b2Y?^Mf@b2[?^Me@b2[?^Md@b2]?_Ma@b2_?^M`@c2`?]M`@b2a?^M^@c2b?^M]@b2b?_M]@b2c?_M[@a2e?aMZ@_2e?cMY@^2g?bMX@_2h?bMV@^2j?eMR@]2n?dMP@]2o?eMn_O\\\\2S`0fMf_O_2Y`0Y1100O010O010O10O10O10001N100O10000O100O1O100O100O100O1000000O10000O1000000O10000O10000`Ln^Of2Sa0YMP_Oe2o`0\\\\MR_Ob2n`0_MQ_Ob2o`0_Mo^Ob2Pa0_Mn^Ob2Sa0k00O010000O010O100O10O0100O_Lo^Oj2Pa0WMP_Oi2Pa0i0O100O1O1O1O1O1O1N2O1O1O1O1O100O1O10O10O10000000O1000O10[N`_O^Na`0^1f_O_NZ`0i0c_OVN:n0S`0h0j_OSN9R1n?i0m_OPN:U1i?i0m@UOT?i0o@VOP?Dh_O2Y1:o>Bj_O3Y19o>@m_O3W1<m>]OP@4V1=l>]OQ@2V1`0k>ZOS@2U1c0j>WOaBg0Sa0O1N2O1N3N1N2O1N2N2N1O2N1O2N1O2NUR_1\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [15.267549514770508, 59.91611099243164, 436.25372314453125, 166.333251953125], \"score\": 0.999995231628418, \"association_id\": 2, \"segmentation\": {\"size\": [900, 928], \"counts\": \"^P?b03BUk0k0K5K5L4K3N110O00001O001O001O1O00010O000010O100O10O100O100O100O100O010O000100O10XWO[NPg0d1oXO^NQg0b1eXO[NkN6`h0^1dXOjN]g0V1aXOlN_g0T1_XOnN`g0T1^XOmNbg0S1\\\\XOoNcg0S1ZXOPOdg0Q1[XOPOeg0P1YXOROfg0n0YXOTOfg0m0XXOUOgg0k0XXOVOig0j0VXOWOig0i0WXOWOig0i0WXOWOjg0i0UXOXOjg0h0UXOYOlg0g0SXOYOng0g0QXOZOng0f0RXOZOog0f0PXOZOPh0g0nWOZORh0f0nWOZOSh0e0mWO\\\\ORh0e0mWO[OSh0e0mWO[OTh0d0lWO\\\\OTh0d0lWO\\\\OTh0d0lWO\\\\OUh0d0jWO\\\\OVh0d0iWO]OXh0b0hWO]OYh0c0gWO]OZh0b0fWO^OZh0c0eWO]O[h0c0eWO]O\\\\h0d0bWO\\\\O^h0f0_WOZObh0f0]WO[Och0f0\\\\WOZOeh0e0ZWO\\\\Ofh0d0ZWO\\\\Ofh0d0ZWO\\\\Ofh0e0ZWOZOfh0f0\\\\WOXOeh0h0[WOWOeh0i0[WOWOeh0i0[WOWOfh0h0[WOWOeh0j0ZWOVOgh0i0YWOWOgh0i0YWOWOhh0h0XWOXOhh0h0XWOXOhh0h0XWOXOih0g0XWOXOhh0h0XWOXOhh0h0XWOXOhh0i0VWOXOjh0i0TWOXOmh0h0RWOXOnh0o10000001O0000001O00001O00001O0001O01O0000001O000000001O00000O101O00001N10001O000O2O00000O101N10000O101N10000O101O0O101O000O1000001O0O10000000001N100000000000000000000000O100000000000000000000000000001O0001O00000000O100000000000001N100000000000000000000001O000000000000001O0001O1O102M3M4L2OO01O1O010O001O0010O01O10O01O01000O010O10O01O010O1O001O10O01O0010O01O010O010O010O000010O01O01O01O0001O01O01O01O01O00010O01O001O01O01O1O001O00001O00000O2O00000O101O0O100000001N101O00001N2O001N2O1O1N2O1N2O1O0000001O00001O001O001O001O01O01N101O0J7N101M]ai>\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [235.86886596679688, 432.35009765625, 261.45867919921875, 47.07763671875], \"score\": 0.9999719858169556, \"association_id\": 3, \"segmentation\": {\"size\": [900, 928], \"counts\": \"bVS71Pl00QTO1nk0300O101N1000000O2O00O1000000000000000000000000000001O000000000O1000000000001O00001N101O0Oedl10Y[SN4N2N1aTOHQk09nTOIQk07oTOIPk08PUOIoj07QUOHPk08QUOGoj09QUOFPk0:=0000000000O10O1000000O10000000O10O10000000O1000000000000000000O10O10000000000000O1000O1000000000000O100000000O100O1O100O10001O000001O000001O000010O01O1O100O001O0001O0@eTO7[k0IfTO7Zk0GhTO8Xk0HiTO7Wk0HjTO9`k00001O001O010O1O00_OKSUO5mj0KTUO5kj0JVUO5]k00001O001O001N2Omkm;\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [45.18974304199219, 146.04229736328125, 398.28607177734375, 93.64810180664062], \"score\": 0.9999672174453735, \"association_id\": 2, \"segmentation\": {\"size\": [900, 928], \"counts\": \"Q[i11Sl00O2O1N101N10000O2O000O10000O1000000000000O1000000000000000000O01000000O100000O010000000000000000000000O10O100000000001O0000000000001O000000000000000000O10000O10000O10000O10000O10OO2N2N2N1O2O01000O10O1000O010000O01000O010O1000O100000O1000O1000000O01000O1000000001O0O10001O002N2N001O0000000001O0000001O000O2K^OfTOb0Zk0_OeTOa0[k0_OeTOb0Zk0_OeTOa0Zk05000000000000O10000000000000000000O1O1N2O1O1O1O10000O10000000000000001O00000000000000001O00010O001O001O000010O0000010O0001O01O01O010O01O01O01O00001O01O01O10O0001O010O00001O01O00001O010O001O001O00000010O000010O01O010O00010O00001O01O0000000000001O0001O00000001O00001O000010O01O00001O01O00000001O0000000O1000001O0000000O2O1N2NmPm>\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [0.0, 665.2225341796875, 139.964111328125, 73.30804443359375], \"score\": 0.9999443292617798, \"association_id\": 4, \"segmentation\": {\"size\": [900, 928], \"counts\": \"ce0g0Zk09H3M;D4L3L5N2OO0010M201000O0100000O101O001O001O0010O00000000O10000000000000001O001O1N2O1O1N2O001O001O00001O0000000000000000000000000000O10000O1000000000O1001O0000000001O000001O00001O000000000001O0O1O100O100O1O2O001N100N3N1O2O000O2O0O101N1N3O0O2N2N3M2O1N1O2M9Hgike0\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [209.85678100585938, 363.40948486328125, 268.5741882324219, 109.63372802734375], \"score\": 0.9999362230300903, \"association_id\": 3, \"segmentation\": {\"size\": [900, 928], \"counts\": \"e\\\\j63ok03L3O1O101N100K5O100O1iTO]Omj0c0SUO]Olj0d0TUO\\\\Okj0e0UUO[Okj0e0UUO[Okj0f0TUOZOlj0f0SUO[Omj0f0RUOZOnj0i0oTOWOPk0n0100000000O10O1000000O10O1001N1000001O0O101O0000001O0000001O001O0010O01O001O2N1O1O1O1O1O2N1N101O00001O0O1000000O101N10000O100O010O1O100O1000000O10000000000000000000000000O101O0O1000000O100O2O0O100O2O0O1O1O0O2O1O1O1O100O10O10O1O010O010O0100O01000O10O01O010O010O1000O01000O10O01000O10O100000O010000O10000O10000O100000O1000O01O100O1O10O01O100O1O1M3N2O100O00101O00000000001N[OTVOoNFNVj0R1VVOmNF1Uj0P1VVOoNE1Uj0m0ZVOPOC2Tj0b0kUO^Oh00^i0:PWOFQi07QWOJnh04TWOLmh0MYWO4[j01O01OY`f<\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [404.45880126953125, 281.7732849121094, 264.768310546875, 96.4945068359375], \"score\": 0.9998843669891357, \"association_id\": 1, \"segmentation\": {\"size\": [900, 928], \"counts\": \"Ynd;1Rl0`0A2M2O001O1O1N101O1O1O00001O000010O01O001O00001O00001O001O00001O0000000O101O0O10001O001O1O1_UOgNVj0Z1fUOkNWj0b10\\\\NeUO]1bj0O010N2O2N1O1O00001N100000001O0000001O000000000000O10001O00001O1O001O010O001O0000000000000O100000000O100O100O1000000O1000001O000O101N1O11`NbUOX1Yj0dNkUO1NZ1Uj0hNmUOL0[1Sj0iNmUOL0[1Qj0kNVVOT1ii0mN[VOo0di0RO]VOn0bi0RO^VOn0ai0SO_VOm0ai0SO`VOl0`i0SOaVOm0_i0ROcVOm0]i0ROeVOm0[i0ROgVOm0Yi0ROiVOn0Vi0ROjVOn0Vi0ROkVOm0Ti0TOmVOk0Si0TOoVOl0Qi0ROQWOm0Pi0QORWOn0Rj001O1O1O010O00001O00000000001O000000000000O1000000000000O101O0O100O10001N01000O100O100O100O010O0001M2N2M300100O001O00N3N11O001O010O01O1O1O001O1O1O1O101M3N3KUnk7\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [833.869140625, 20.67694854736328, 49.50579833984375, 40.8717041015625], \"score\": 0.9998559951782227, \"association_id\": 9, \"segmentation\": {\"size\": [900, 928], \"counts\": \"`cWg01Ql02M3M3N2O0000001N10OO2O1J600O20ON[OgTO0Ob0\\\\k0_OgTO>[k0CbTO=]k0AdTOb0_k0O2N101N010O1O010O1O2N101N3LRYY1\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [459.00604248046875, 329.7572021484375, 206.60723876953125, 52.819244384765625], \"score\": 0.9994243383407593, \"association_id\": 1, \"segmentation\": {\"size\": [900, 928], \"counts\": \"joW=4ok01O2O0O100N2O1O10000O10O01O00100O10O10O01N1010O010O10O1O1O1ONHZTO7gk0IYTO7gk0JXTO6hk021N2O100O10000O100LSTO1cf]10]UcN0O2O0000001O00001O01O0001O000010O000001O01O01O00010O000000001O0001O0001O0001O01O00001O00001O01O01O00001O001O00000000000O100000000O10000O1000O10OO2N020O01000O100O010O100O1O2NeiV7\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [546.474609375, 115.5594711303711, 351.66033935546875, 117.24811553955078], \"score\": 0.9993245601654053, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"Wlf?1Rl02N100O2O000000O2O00000O2O00X`\\\\21a_cM7N2O0O2O000O101O0O1O1O2N1O101O0O100O1O1O1L4O00100O100O100O01HoTO[OSk0d07N1K600O100O10O10O101O0O100O101N100O2O000O101O0O101O000O2O001NaPm00nmRO1nVO1Pi03mVOMRi04nVOMPi05oVOKQi05oVOKPi07nVOKQi05oVOKQi05oVOLPi04PWOLPi04PWOLPi05oVOLPi04PWOLPi04PWOLPi04PWOLPi05oVOKQi05oVOLPi05oVOKQi05oVOKQi05oVOKQi06nVOJRi06nVOJRi06nVOKPi06PWOJPi07oVOIQi07oVOJPi06PWOJPi06PWOKoh06PWOJPi06QWOIoh07QWOJnh06RWOJnh07QWOIoh07QWOIoh07QWOIoh07QWOJnh06SWOImh07SWOImh07SWOImh07TWOIkh07UWOIkh07UWOJjh07VWOHjh08VWOHjh08VWOHjh08VWOIih07XWOHhh09WWOGih09WWOGih09WWOGih09XWOGgh09YWOGgh0:YWOFfh0:ZWOFfh0:[WOFdh0:\\\\WOFdh0:]WOFch09]WOGch09^WOFbh0:^WOFbh0:^WOGah09_WOGah08`WOI`h06`WOJ`h06`WOL_h02bWO1[h0OfWO1[h0LfWO4[h0JfWO7[h0BjWO>Wh0_OkWOa0Uh0^OlWOc0ji0000010O01O001O1O001O000000O100O100O1O100O100O10O0100O10O010000O10O1000O10000000000O10000O100O10000O2O0O2N3LkQk0\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [0.0, 319.19537353515625, 378.67230224609375, 152.64590454101562], \"score\": 0.9989135265350342, \"association_id\": 10, \"segmentation\": {\"size\": [900, 928], \"counts\": \"U;W1ej0=I4L3N2M3M200O2O000O10000000000O100000000001O000000001O000000000O101O0000000O2O000000001O0O1000000000O100000000000000000000000001O0000001O0000001iWOQNWf0o1hYORNXf0n1gYOSNYf0n1eYOSN[f0n1cYOSN]f0n1bYORN^f0o1aYOQN_f0P2_YOQNaf0Q2]YOoMcf0S2ZYOnMff0T2XYOlMhf0U2WYOlMhf0V2UYOkMkf0Y2QYOgMof0]2mXOcMSg0`2jXO`MVg0d2eXO]M[g0d2dXO\\\\M\\\\g0e2cXOZM^g0g2aXOYM_g0h2`XOXM`g0j2QXOlL8:gg0P3WXOQMig0P3VXOPMjg0Q3UXOoLkg0R3SXOoLmg0Q3RXOPMng0Q3QXOoLog0Q3PXOoLQh0Q3oWOoLQh0Q3oWOoLQh0Q3PXOnLPh0R3QXOmLog0T3QXOkLog0U3RXOjLng0V3RXOiLog0X3QXOgLog0Y3QXOgLog0Y3QXOgLog0Y3QXOgLog0Y3RXOfLng0Z3RXOeLog0[3QXOeLog0Z3SXOdLng0\\\\3RXOdLng0[3TXOcLmg0]3=O10000O1`N[WOAeh0;_WODbh0;_WOEah0;_WOEah0:aWOD`h0;aWOE`h09aWOG_h08bWOG_h08cWOG]h09cWOE_h0:bWODah0:aWOD`h0<`WOCah0<aWOAah0>aWO@ah0>`WOZOhh0f0YWOWOjh0g0XWOWOih0i0WWOoN[OK_i0U1WWOPO[OJ^i0V1VWOQOQi0o0oVOQOPi0P1PWOPOPi0P1oVOQOQi0o0oVOQOQi0o0nVORORi0n0nVORORi0n0mVOSOSi0l0nVOTORi0l0nVOTORi0l0mVOUOSi0k0mVOUOSi0l0lVOTOTi0m0kVOSOUi0n0jVOROVi0n0jVOQOWi0P1hVOPOXi0P1hVOPOXi0Q1gVOoNYi0Q1gVOoNYi0R1fVOnNZi0R1fVOnNZi0S1eVOmNHE[i0^1mVOmNHE[i0_1lVOlNHF\\\\i0_1kVOkNIF[i0`1lVOiNKFYi0a1mVOhNJGYi0a1mVOhN\\\\i0Y1cVOgN]i0Z1cVOdN]i0_1aVOaN_i0a1`VO^N`i0c1`VO[Nai0e1?1O000O10000000O10000O10000O100000000000000000000O0100000000000000O100000000O100000000O1000O10000O00100O1O1O1O1O00100J7N100O1O1O1O2N100O2O0O7J1O001O0O2O001O0OUWOYO\\\\f0f0cYO\\\\O\\\\f0d0ZYO\\\\OQN1fh0b0XYO_OPN1hh0>XYODmM0kh0;WYOOjf0CVWO4P2;Zf0]ORXO0G8m1:[f0^OYXO6^1;Xf0@ZXO4_1;Xf0AZXO3^1<Xf0BZXO0`1<Wf0DZXON`1>Vf0DdZO;\\\\e0FdZO:\\\\e0FdZO8^e0HTZOMhM:Uh0JRZOLjM8Uh0LQZOLjM8Uh0LQZOLjM8Th0MRZOKjM7Uh0NQZOKjM7Uh0NQZOKjM7Uh0OPZOJlM5Uh01oYOJlM4Vh02nYOJlM4Vh03mYOImM3Vh05mYOHmM2Wh06lYOHmM2Wh08jYOFPN0Wh0=fYOCSN0Wh0d0_YO\\\\ORg0d0nXO\\\\OQg0f0nXOZOSg0e0mXO[OSg0e0mXO[OSg0f0lXOZOTg0f0U210O01O001O00010O000000000001O00000000jWOYOPe0f0eZO[O]M1mg0e0eZOZO]M2ng0d0eZO[O[M3og0b0fZO\\\\OZM2Qh0a0eZO]OYM3Rh0`0eZO]OYM3Rh0`0eZO\\\\OZM4Qh0`0fZO[OYM5Rh0?eZO[OZM6Qh0?kZOBTe0>lZOBTe0>lZOBUe0=kZOCUe0=kZOCUe0=kZOCUe0>jZOBWe0=iZOCWe0=iZOCXe0;iZODXe0<hZODYe0:iZOEXe0:hZOFYe08hZOHZe00lZO0eh0N10001O000O2O00YRo?\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [316.9045715332031, 35.702579498291016, 313.1834411621094, 118.19815063476562], \"score\": 0.9989109039306641, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"ceV;4hk0O\\\\TO7^k0K`TO7_k07O101O000O10001O0O101O001O01O1O001O001O02OYOfTOe0\\\\k0OnTOBDMej0b0aUOM]j04`UON`j0l0O1O010O001O00001O0000O100O100O10000O1O1O10000000000O1000000000000001O00000O10000000000000000O10001O0000000000000000000O10000000001N100O100O100O100O10000O101N100O2O0O1O2N101N2O0O01000O100O010O100O10O1O2J5O1CQVOdNQj0V1h0F7I9\\\\OdTO1ak0L=NUe_:\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [372.0208435058594, 121.30131530761719, 406.5538635253906, 140.13658142089844], \"score\": 0.9985489249229431, \"association_id\": 8, \"segmentation\": {\"size\": [900, 928], \"counts\": \"cWS;3Pl03M2N2N1O101N1O1O2N1O1O2N1O1O1O2O0O101N101N100O2N1O2N1O2O0O1O2O0O1O2O000O2N100O2O0O2O001N2O001O01O10O04M7H1O100O1O100O1O1O0010O01O00010O00010O010O01O010O0010O0001O010O001O010O1O1O010O001O010O100O1O100O1O100O001O010O1O00100O10O01O1O1O010O00001O0001O0001O0001O01O01O0010O00010O010O010O100O1O100O1O1O1O010O1O001O1O1O2N1O1O10O0001O000000001O01O000001O01O00100O001N10001N101O0O10000O2O000O100O100O1O100O1O0010000O1000O03N1O101N1O1O1O2N1O001O0010O0000010O010O010O010O001O010O001O0`NfNWXOY1hg0iNWXOW1ig0iNWXOX1hg0hNYXOW1fg0kNYXOV1fg0jN[XOU1eg0kN[XOU1dg0lN\\\\XOU1cg0kN^XOT1bg0lN^XOU1`g0lN`XOT1`g0lN`XOU1_g0kNbXOT1^g0lNbXOT1^g0lNbXOU1^g0iNcXOW1^g0gNcXOY1^g0eNdXO[1Ri0000010O01O01O01O0001O00000001O0000000000000000000001O00000000ZNeNdXOZ1[g0hNdXOX1[g0jNdXOV1[g0kNfXOT1Yg0nNfXOR1Yg0oNgXOQ1Yg0POgXOo0Xg0ROhXOn0Xg0ROhXOn0Xg0ROhXOn0Xg0ROiXOm0Wg0SOiXOm0Wg0SOjXOl0Vg0SOkXOm0Ug0SOlXOl0Sg0UOmXOk0Rg0VOoXOi0Pg0XOQYOg0of0YOQYOg0nf0QOYWO3j1k0mf0ROYWO3j1k0mf0ROYWO3j1k0mf0ROYWO3k1j0lf0\\\\OTYOd0lf0[OUYOe0kf0[OVYOd0jf0\\\\OVYOd0jf0\\\\OWYOc0if0]OXYOb0hf0^OYYOa0gf0_O[YO?df0B]YO=cf0C^YO<bf0D^YO<bf0D_YO;af0E_YO;`f0EbYO:^f0FbYO:^f0FbYO:^f0GbYO8^f0HcYO7]f0IdYO6\\\\f0JfYO4Zf0LhYO1Yf0OjYONWf01kYOMUf03mYOKSf05nYOISf07oYOGQf09m20O101O000O2O0O2O1M2N3N1O2N1O101O000010O01O0010O01O100O010O0001O2M2O2M2M7GoeS4\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [270.73193359375, 202.4466552734375, 232.04263305664062, 117.14935302734375], \"score\": 0.9983822107315063, \"association_id\": 6, \"segmentation\": {\"size\": [900, 928], \"counts\": \"eY_72fk0=M3O1O001N101O00001O00001O1O1O10O01O001O001O001O01O01O00001O1O1O1bUOPOfi0R1PVOVOPj0l0lUOWOSj0l0iUOVOVj0\\\\10001O001O001O001O1O002O0O001O1O00100O1O1O10O01O1O1O001O010O1O001O00O10001O0000001N100O1M4J5M3N2K501O0O100O101O000O10000O2O0O10001O000O1000001O00000001O0001O000001O00010O0010O000010N100000001O000O110O10O01O10O0001O001O0010O0001QOaNWWO_1hh0bNXWO^1jh0_NXWOa1fi00002N1O0000O01O0O100O2O0O100O2O001O00001O00000O100O2O0M3N2O100O010000O100O100O01000O100@ROjUOn0Sj0VOdUONNm0^j0UOdUOOMl0^j0WOcUOP1^j0PObUONOh0`j0[OaUOL1f0`j0^O`UOK2d0_j0A_UOJ4d0]j0B`UOH5e0Zj0DmUO:Tj0FnUO8Rj0HRVO3oi0MSVO1mi0NVVOOli00WVOJli05S1O1M`_R<\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [440.678955078125, 177.98390197753906, 318.52874755859375, 107.92439270019531], \"score\": 0.9983262419700623, \"association_id\": 8, \"segmentation\": {\"size\": [900, 928], \"counts\": \"gYW<1Rl02O00000O2O001O00001O000001O001O1O001O0000001O0000RVl00niSO0O2O000O101N100O100O1O100O100O1000000O100000000O010O100O100O10O010000O10000O100O10O10O10000O010000000O01000000000001O001O2N2N001O1O001O004L1O0O2O1O00Xba0Oc]^O8M2ZTOG^k0;`TOG^k0a0O1N10N2M3M210O1000000000000O10001O001O00001O01O0001O00001O010O00010O0000010O0010O00100O001O01O0001O01O01O01O01O0000010O0000010O000100O01O01O0000010O00oMFXXO:eg0J[XO5dg0M[XO3eg0N[XO2cg00\\\\XO0dg01[XOOeg01\\\\XOOcg01]XOOcg01]XOOcg01]XOOcg01^XOOag01_XOOag01`XOO_g01bXON^g01cXO0]g0OdXO1[g0OeXO1[g0NfXO2Zg0KiXO6ci0O0001O01O01O10O0001O000001O0001O0000000000001O000000001O0000000000000000000000000001O000000001O0O10001O00000O2N1MTig4\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [4.963890075683594, 697.5995483398438, 151.65090942382812, 49.56622314453125], \"score\": 0.9981281161308289, \"association_id\": 4, \"segmentation\": {\"size\": [900, 928], \"counts\": \"mo54ok02M2O1N2M4O0O1O100O10000000000000000000O10000000000000000000000O01000000000000000000000O1000000000000000000000O010000000000O100000000000000000000000000O1000001O00000000001O00000O01000000O100O1O1O100O1O1O1000000O1000O10O11O0O1000000000001O0O100000001N100000001O0O100000001N1000001N10001O0O101NV^Xe0\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [804.5897827148438, 26.799327850341797, 50.69256591796875, 40.94210433959961], \"score\": 0.9973642230033875, \"association_id\": 9, \"segmentation\": {\"size\": [900, 928], \"counts\": \"g^Tf04ik07M3N3M2O1O1O100000000M3M3M3O1O1O010O0010O001O100O100O10000O0100001N1O10O1O01N200O1N2L5^ObTO9gk0N1O2KSTO0nk0OSTO0moP2\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [445.03472900390625, 64.41265106201172, 433.62567138671875, 146.93963623046875], \"score\": 0.9972656965255737, \"association_id\": 5, \"segmentation\": {\"size\": [900, 928], \"counts\": \"WQY<1Pl05XTOKVk0=gTOFVk0>fTODZk0d0N101O00001O001O010O00001O01O01O0001O1O001O001O00001O001O001O001O000001O0000001O00010O001O001O0001N101N1O2N3M2N2N2Nabl1GoaRNOak0:O10000O101N10000O2O0O101M21O001O00000000000000001O000000000001O001O002N1O1O2N1O2N1O1O0000001O0000001O001N2O001O1O001O000101N010O10O010O00010O001O01O00O2O0000001N101O001O001N2O001O0010O0001O010O0010O01O0010O01O0010O000000010O000000001O000001O01O000O2O00001O00001O0000001O00000001O00001O001N1000001O0000001O00001O010O001O001O001O0010O0001O0010O0001O010O010O00010O001O01O0010O01O001O001O10O01O00001O001O001O001O1O001O001N101O001O001O00001O00001O00001O001O00001O01N10001O0O101O0O101N1O1O2M20000O1000000000000O1000000O1000000000O100O2O0O1O2N101fN\\\\UOQ1ej0nN_UOm0_k0[O6JThQ2\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [126.50726318359375, 481.700439453125, 288.5484313964844, 85.9266357421875], \"score\": 0.9964189529418945, \"association_id\": 7, \"segmentation\": {\"size\": [900, 928], \"counts\": \"egU52Ql03N001N1000000O100000000O10000000000000O1000000000001O000000010O0000000000000000001O000000000000000001O0000000000001O0000000001O000000001O001N2O2NYeo00i^oN0bk00]TO3NO2NWk00iTO?NCWk0h0M3N3N1O2O0000O1O1O1M3N2O1O1O1O010O001O0O1L41O00010O10O1O001O0O1L4N3O010O100O1O100O1000000O1000001N2O0O]gg00cXXO2N1O1O101N1000001O0000000000000001O0000000O10000000000O101O0O`PU>\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [82.51924896240234, 403.4828186035156, 346.6693115234375, 149.86441040039062], \"score\": 0.9962098598480225, \"association_id\": 7, \"segmentation\": {\"size\": [900, 928], \"counts\": \"[XZ2275J4Vk0KkTOd0ej0ZO_UOP1_j0RO_UOo0Xj0nNiUO5Mo0Yj0mNiUO5Mn0Yj0oNhUO\\\\1Xj0eNgUO[1Yj08O10001N10000O1M4O03M1O001O00001O01O000001O01O001O10O01O001nUOTNki0S2O00100O1000O100O1O2N2O1N2N2N001O001O0010O01O3M3M1O1O100O1O\\\\VOaNdh0^1\\\\WObNeh0^1ZWObNfh0`1XWO`Nih0`1VWO`Njh0a1UWO_Nkh0b1TWO^Nmh0a1SWO_Nmh0b1RWO^Nnh0b1QWO_NPi0`1mVOcNSi0^1kVOcNUi0]1kVO`NYi0`1g0O2M010O010O0001O01O0000010O000000001O01O00000001N10000O10000O10000O2O0000O100000000000000000000O10001O0O10000000000O1000001O0000000O10001O000000000O2O000O2O0O1O2N100O2N11O00TOVN^WOj1ah0XN_WOg1ah0YN_WOh1`h0YN_WOg1`h0ZN`WOf1_h0[NbWOd1]h0]NcWOc1\\\\h0^NPWOI=h1bh0aNoVOI>f1bh0bNoVOJ>d1bh0cNPWOI?d1_h0dNSWOF?f1^h0cNUWOE>h1\\\\h0dNmWO\\\\1Qh0eNoWO[1Qh0eNoWO\\\\1og0eNQXO]1mg0cNSXO^1kg0cNUXO^1jg0bNWXO_1gg0aNYXO`1fg0`NZXOa1dg0`N\\\\XOa1cg0_N]XOb1bg0_N^XOc1_g0]NaXOf1\\\\g0ZNeXOf1Zg0[NeXOf1Zg0`NaXOa1\\\\g0aNcXO`1\\\\g0aNdXO_1[g0aNeXO`1Zg0aNfXO_1Yg0aNgXO_1Yg0bNgXO^1Xg0bNhXO^1Xg0bNhXO^1Xg0bNhXO^1Xg0bNhXO]1Yg0cNgXO]1Yg0cNfXO^1[g0aNeXO_1[g0aNeXO_1[g0aNeXO_1[g0bNdXO]1]g0cNcXO]1]g0cNcXO]1]g0cNcXO]1]g0gN_XOX1bg0jN\\\\XOV1dg0jN[XOV1fg0kNYXOT1hg0lNWXOT1jg0mNTXOS1mg0nNQXOR1Ph0oNmWOR1Th0^11O1O100O1O1O1O1N2N2N2O1O1O1O100O101N10001N101N102M5L3L3NM3M3N2O001O10000O01000O11O1O1O1O1O1O1O1O3M1O1O00001O0000000^N^VOb0bi0\\\\ObVOa0`i0]OcVOa0]i0^OfVO`0Zi0_OhVO`0Xi0_OkVO?Ui0AlVO>Ti0AmVO?Si0AmVO?Si0@nVO`0Ri0_OPWO`0Pi0_OQWOa0oh0^ORWOc0mh0\\\\OTWOd0lh0\\\\OTWOd0kh0\\\\OVWOd0jh0\\\\OVWOd0jh0[OWWOe0ih0[OWWOe0ih0ZOXWOf0hh0ZOXWOf0hh0ZOYWOe0gh0ZOZWOf0gh0XOZWOh0fh0VO]WOh0dh0WO]WOi0ch0UO`WOj0`h0VO`WOj0ah0TO`WOk0ah0UO`WOj0`h0UOaWOk0_h0UObWOi0_h0VObWOj0^h0VOcWOi0]h0WOcWOi0]h0WOdWOh0\\\\h0XOdWOh0\\\\h0WOfWOi0Yh0WOgWOi0og0WOjVO0X1h0mg0[OhVON\\\\1f0ig0GXXO8gg0IYXO7fg0I\\\\XO6cg0K]XO5cg0J_XO5bg0I_XO7ag0HaXO7_g0GdXO6^g0IfXO0_g0NeXOM]g02hXOHZg08Z2N1O5LnYl=\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [817.3482055664062, 8.902290344238281, 49.592041015625, 54.86603927612305], \"score\": 0.97240149974823, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"fS^f0=ck05N1J6N2N2O100O10O10ZOTOSVOl0mi0UOSVOk0mi0UOSVOk0li0VOTVOi0mi0WORVOj0ni0VORVOj0ni0VORVOi0oi0WOQVOi0oi0XOPVOh0Pj0XOPVOg0Qj0YOoUOg0Qj0ZOnUOe0Sj0\\\\OlUOd0Tj0\\\\OlUOd0Tj0^OjUOa0Wj0_OhUOb0Xj0^OhUOb0Xj0^OgUOb0Zj0^OdUOc0]j0]OcUOb0^j0^ObUOa0_j0_O`UO?cj0A]UO=ej0B^UO;cj0DYUOJ1=gj0HYUOLJO2<lj0IXUOK1:hj0KWUOK^k02hl0NeSO0mgk1\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [818.846923828125, 82.27985382080078, 29.3575439453125, 22.448074340820312], \"score\": 0.9698083996772766, \"association_id\": 5, \"segmentation\": {\"size\": [900, 928], \"counts\": \"Ym_f07mk00O101O0000000O100O10O10O01O010OO101OO1O2N101O1O1O1O2OaSW2\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [4.8743205070495605, 390.72265625, 364.3806457519531, 70.78469848632812], \"score\": 0.9257470965385437, \"association_id\": 10, \"segmentation\": {\"size\": [900, 928], \"counts\": \"ee59ik03N1O2O0O1000000000000000000000000000000000000000O1000O1000000000000000000O1000O10000000000000000000000000000O100000000000001O0000001O0000001O000O3N2MlTWg0\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [755.4631958007812, 69.94331359863281, 46.79229736328125, 23.962127685546875], \"score\": 0.8955965638160706, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"jimd03Pl02J6M2O1O1O101N101O00001O001O001O0000001O000000010O01O1O001O2N2N4L2N1O001O002MQe^3\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [297.3521728515625, 247.65420532226562, 192.35903930664062, 76.878662109375], \"score\": 0.8854475617408752, \"association_id\": 6, \"segmentation\": {\"size\": [900, 928], \"counts\": \"Z[c81Sl00UUl04fjSO2N3M2O1N4L2O0O2N010O0OO30O1O0100O1N2O001O100O1M3N2O100O1000000O10000O1000001O01O000001O010O000000010O01O001O01O000010O00001O0000010O00001O00001O000001O00000000000000001O0O1M3O1OVX_=\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [822.8900146484375, 0.8863402605056763, 42.59393310546875, 20.480728149414062], \"score\": 0.8701261878013611, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"YWdf05mk03M2O2N101N100O101O0000000000000000000000000001O0000000000^TOA_k0?aTOA_k0<dTOD\\\\k0;eTOE\\\\k08fTOHZk07gTOIYk05iTOKXk00hlj1\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [541.92919921875, 2.25838565826416, 173.207275390625, 40.593589782714844], \"score\": 0.7898861169815063, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"UV\\\\?2Rl01N101O1O1O0000001O001O001O0000001O1O001O000010O0000001O00001O001O000000000O101O00O2O00001O001N100O101N2O00001N1000001O1O1O001eTOZOVk0g0iTOYOWk0g0iTOYOWk0j0000001O0000000001O0000000O100000000001O010O00010O0000010O01O00010O001O01O01O0010O000001O00000000000010O00001O00010O00000001O00100O001O00001O1O001O0O2O1O3M1KYTOJQl00bRS6\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [13.923295021057129, 481.478515625, 143.0666046142578, 58.97198486328125], \"score\": 0.6563889384269714, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"\\\\i<o0Tk02N1000001N1001O000000001O00000000001O1O001O00000O10000000000000001O000000000000000000000000000001O00001O0000000000000000000000000001O0000O10000000000000000000000000000001O001O00001N101O0000001O001O0O1000001O000001O000000000000000O2O0O101O000010O001O0001O0001O001O1O2N001N3N1O001O1N4KRTUe0\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [9.66063404083252, 615.3973388671875, 194.9244842529297, 57.95452880859375], \"score\": 0.5736616849899292, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"j\\\\9j0Xk07jTOPOnj0T1N10001O0O10001O00O2O000O101N100001O000000001O0000000000000000O10000O101O000O1000001O00000000000000000000000001O000000001O00000001O000000000001O00000000000000000O100000000000000000000000000000000000000000000000001O000000000O1001O0000000000O0100000O100000000000O101O00000000001O0000000O2O000000000000000000000O20O0000000001O00001O0000001O001O001N2O001O002^OeTO7fk0N0O101O000O4L2O0OhRPd0\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [765.6930541992188, 88.17752075195312, 55.68548583984375, 11.412567138671875], \"score\": 0.49704238772392273, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"jRSe01Sl00O100010O0010O01OjX50VgJ001O0010O0001O01O00010O0000O1O1O00100O1001O1N10RQ7NPoH1O000\\\\nm2\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [36.391483306884766, 351.6034851074219, 420.65008544921875, 158.45327758789062], \"score\": 0.4748031795024872, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"]lP1a0\\\\k0=F5N2N2N2O0O101O0O101O0O1O100O2O000O10001O01O001O1O2O2M2N2N2O0O1O1O100O010O0010000O101OXVOVOiNJUi0Q1nWO@Ph0c0jWOAUh0b0fWOBXh0`0dWOC[h0?oVOlN6i0kh0P2O1O001O00010O001O00001O000001O01O000000000000001O000000001XWOjL`h0V3_WOkLah0U3^WOmLah0T3\\\\WOnLdh0Z3O1O001O1O1O1O001O1O001O001O00000000001O0OoMlWO4Th0IQXO5og0FXXO8hg0GYXO9gg0F[XO9eg0G[XO9eg0F]XO9cg0G]XO9cg0G]XO9cg0F^XO:bg0F^XO:bg0F^XO:bg0F^XO:bg0F^XO:bg0F^XO:bg0F^XO:bg0E_XO;ag0E_XO;ag0E_XO<ag0C_XO=ag0B`XO>`g0B_XO?ag0A_XO?ag0@aXO?_g0AaXO?^g0BbXO>^g0BcXO=]g0CcXO=]g0CcXO=]g0CdXO<\\\\g0DdXO<\\\\g0DdXO<\\\\g0DdXO<\\\\g0DeXO;[g0EeXO;[g0DfXO<Zg0DgXO;Xg0FhXO:Xg0FiXO9Wg0FlXO8Tg0HnXO5Sg0JoXO5Qg0JPYO6Pg0JQYO5of0JSYO5lf0KVYO3kf0LVYO4jf0KXYO4hf0LYYO2ff00[YOOcf02^YON`f04aYOJ`f06aYOI_f06bYOJ^f06bYOJ^f05cYOK]f04dYOL\\\\f03fYOLZf03gYOMYf02hYONXf02hYONXf01iYOOWf01iYOOWf00jYO0Wf0OiYO1Wf0NjYO2Vf0NkYO1Uf0OkYO1Uf0NlYO2Sf0OmYO1Rf00nYO0Rf00nYO0Rf0OoYO1Pf00PZO0Pf00PZOUOoMd0bg0YOkXOn0j1IZe0ZOkXOn0l1GXe0\\\\OlXOl0m1HWe0\\\\OlXOl0m1ZOgM3_g0IlXOj0o1YOhM2]g0KlXOj0o1XOjM2Zg0MlXOj0P2WOkM1Yg0NlXOj0P2VOmM1Wg0OlXOi0R2VOkM2Wg0OlXOi0R2VOlM0Wg01lXOh0Q2WOlM0Wg01lXOh0Q2WOlM0Wg01lXOg0R2XOlMOVg02lXOg0R2WOmM0Ug02lXOg0S2VOmMOUg04kXOf0T2WOmMNTg05lXOe0S2WOnMOSg05lXOe0T2VOnMORg06lXOd0U2WOmMORg05mXOe0Wj0[OiUOe0Wj0[OjUOd0Vj0\\\\OjUOc0Wj0]OiUOc0Xj0\\\\OhUOd0Yj0[OgUOe0Yj0[OhUOd0Yj0[OgUOe0Yj0[OgUOd0[j0[OfUOd0Zj0[OgUOe0Yj0[OhUOd0lj0O2O00001N10001N100O4MgUOGlh03TWOOmh0ORWO3_j01N1O2O1N2N2N2M3L4M2O100O1000O100000O1000000ZUOUOSj0k0lUOVO90nh0k0hVOVO65Qi0d0iVOWO;Nnh0k0hVOVOTj0k0kUOUOUj0k0kUOUOUj0k0`UOTO51[j0Q1eUOoN[j0R1dUOnN\\\\j0R1dUOnN\\\\j0R1dUOnN\\\\j0R1;00001O01O01WUOmN_j0S1`UOnN`j0S1^UOnNbj0S1\\\\UOnNdj0Z1O010O001TVOeNmh0[1QWOhNnh0Y1_VObN:6Wi0a1fVO`NZi0a1dVO`N\\\\i0a1bVOaN]i0_1bVObN^i0^1cVO`N^i0`1bVO`N]i0a1dVO^N\\\\i0a1fVO^NZi0b1gVO]NXi0d1jVOYNWi0g1c0000O01000000000O1000000O01000HTNWVOm1ii0UNUVOk1ji0WNUVOi1ki08O10000O101O00000001O0000000000000001O000001O000PO[VOAfi0=]VOAci0?^VO@ii09XVOGgi08\\\\VOFei09\\\\VOFdi09]VOGdi08]VOGci09^VOFci08^VOHbi08_VOGai08aVOG_i09bVOF_i08cVOG]i08eVOG[i08gVOGZi0NkUONl03Zi0MlUONl03Yi0OmUOKl05Xi0NnUOLk05Xi0NSWO1mh0NTWO2hNLli01]WO2gNMli01]WO1hNOki0N_WO2eN1li0M_WO1fN3ji0LjWO3Vh0NkWO1Uh0OkWO1Th01kWOOUh01lWOMTh04m11O1O1O1WVOHeg0:ZXOFeg0;ZXOFeg0;[XOEdg0<\\\\XODdg0<\\\\XODdg0<R201O1N2O3M1O1O1O001O000O2O00000000d_30Z`L4L5K3N3N1N2iTO^Ojj0b0UUO@jj0?>O1O100O1O1N2O2N1O1O1O1O1O10ZgS=\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [360.807373046875, 106.98210144042969, 219.5718994140625, 58.274017333984375], \"score\": 0.4624817371368408, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"hca:1Rl01O2N2N1001O00000O1000000001O00001O010N100O10001O0O100001O1O1O1O1O3M001O0OZlm06`SRO2UTOHgk0<O001O003M1O1O1N20O00010O00010O000001O000001O01O001O001O001O001fTOWOWk0k00000001O001O001O00001O0O101O0000001O001O00001O00001O1O1O1O1O1O1O1O1O1O2N001O000O3Nklk:\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [827.560791015625, 79.83265686035156, 19.3665771484375, 17.97406768798828], \"score\": 0.28434130549430847, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"Zjgf08lk000000O100O0000001O00O1N20001O100OioW2\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [5.253898620605469, 414.70672607421875, 410.6530456542969, 64.47079467773438], \"score\": 0.2790133059024811, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"XZ81Ql03N2N100000000O100000000000000000000000000000O10000000000000000000000000000O10000000000000000000000000000000000000000000000O1000001O000O2O00bg^82[XaG1O2O000O10000000000000000O100000000000000001N2O1LYZS>\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [4.102137088775635, 658.1065673828125, 160.54469299316406, 111.84124755859375], \"score\": 0.26840975880622864, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"Ph77lk02O000O2O000O10000000000O10O1000O1000000O100000000000001O0000000O100000O1000O100000000000O1000O100000000001O0O1000O100000000000000O100000000000O10001O000000000O1000000000000O10000000O100000O10O100O1O1O1O10000O100O10O10000000000O1000000O1000000000001O001O0O101O0000001O00001O1N2O001N2M^VZe0\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [0.0, 240.2299041748047, 30.027114868164062, 38.85795593261719], \"score\": 0.22688473761081696, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"j71nk0?E1N4M2O1O0001O0O1000001O000000000O2O0O2O0N3O001M200O2O1MaPeh0\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [5.460431098937988, 512.5431518554688, 161.5483856201172, 32.92071533203125], \"score\": 0.20121216773986816, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"ce64Pl01N100O101O0000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000001O00000000000000000000001O000000000000000000O1000000000000000001O0000000000000000000000000000000000000000000000000000000000000001O00000000001O001N1000000000001O0000Re_e0\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [15.482156753540039, 650.4852294921875, 199.9994354248047, 24.5203857421875], \"score\": 0.17204661667346954, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"[ZW11Ql04M1O1O10000N20000000000000O1O10000000001O00000O101O000O2O000000000000000O10001O00000000000000Ylj10gSUN0O10000000000000000000000000000000O10O10000000000O100000000O1000O1000000O1000cRPd0\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [149.58631896972656, 439.18609619140625, 345.8563232421875, 60.285125732421875], \"score\": 0.11484146863222122, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"PRa:4ok02N1O2O000O1000000O10000O10000O01000O10000000000000000000O100000000000000O010000000O10O1000000000000000000O100000000000000000000O2O000000000000000000000000000000000000O100O1O2N1O1O1O10001KSTO0nk00STOOmk0040R^X<\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [58.8206901550293, 433.138671875, 435.3207702636719, 100.39764404296875], \"score\": 0.09074690192937851, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"id]51Sl0001N2O00001N1000000000000O1000000000000O100000000001O00000000001O0000000000001N100000000O2O000O10eg^30ZXaL2O001O000O2O0000000O1000000O1UNKgWO6Wh0KiWO5Vh0MiWO3Wh0MiWO3Wh0MiWO3Vh0NjWO2Vh0NjWO2Vh0NjWO2Vh0NjWO1Wh0OiWO1Vh00jWO0Vh00kWOOUh01kWOOTh02mWOMSh03mWOMSh03nWOLQh05PXOJPh06QXOIog07o1000000000000O10000000O1000000000O1000O10000O1000O10O1000000000000000O10000000000000000000000000000O10000O2O000O101O0O10001O000O2O00kb^<\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [302.2692565917969, 75.92167663574219, 339.4693298339844, 91.0025634765625], \"score\": 0.07881009578704834, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"\\\\lf:2Ql03M3M2O001N100O101O0O10001N100000001O0001O0000000001O01N2N2O1N2M5K3NV]Q14fbnN4M0O2O0O2O00000O2O001O1N2O001O01O01O001O001O010O000000010O01O001O001O0010O000001O0000001O00001O001O00001O001O1O001O1O001O001O00001O3MUR>NlmA1O10001O000O101O00001O000000000O1000000000O010000000O10000000001O00000000001O001NeiY9\"}}, {\"image_id\": 35, \"category_id\": 2, \"bbox\": [552.3796997070312, 6.939164161682129, 157.8131103515625, 39.960304260253906], \"score\": 0.07226508855819702, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"mg\\\\`04nk02J7O00000000000001O00000000001O001O00000O100O10000000000000O10O100O10O10O10000O1O100O100O100000001N`e91`ZF001O0000000O2O0001O0001O000001O01O000001O001O01O01O00jhc6\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [0.0, 44.55895233154297, 836.9429321289062, 154.630859375], \"score\": 0.07159395515918732, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"ZS72Ql02N2N1O2N1O2O0]TOE]k0;7O001N101O001N101N101NgQ70YnH2O0O2O0O2YTOJ^k06`TOL`k05^TOMak03^TONbk02^TOOak01_TOOak01_TOObk090000010O000010O00010O0001O100O0000000000010O00000000eVOCof0=QYODnf0<RYODnf0<QYOFof0:PYOFPg0:PYOGof09QYOGof09QYOGPg08oXOJPg07oXOIRg06nXOJRg06nXOKQg05oXOKRg05mVOCj18Yg05iVOGm15Zg0:fXOFZg0;eXOE\\\\g0:dXOF\\\\g0:dXOF\\\\g0:dXOF\\\\g0;cXOE]g0;cXOE^g0:aXOG_g0:`XOF`g0:`XOF`g0:`XOF`g0:`XOF`g0;_XOEag0;_XOFag09lVO\\\\O\\\\1;hg09kVO]O]1:hg09kVO]O]1:hg09kVO]O]1:ig08kVO]O\\\\1:jg09jVO]O\\\\1:jg09jVO]O\\\\1:kg07]XOIcg07^XOHcg07]XOIcg07]XOIdg06\\\\XOJdg05]XOKdg04\\\\XOLeg03\\\\XOLjg0MWXO2kg0MUXO3lg0LTXO4mg0JTXO6mg0ISXO6oi0O0O10000O2O000O1000^dh70a[WH100O101O0O10001O000O10000000001O0000000000000000000000000001O000000000000000000ZTOI_k07aTOI_k07aTOI_k07aTOI_k07aTOI`k06`TOJ`k06`TOJ`k06`TOJ`k05`TOM_k03aTOM`k02_TOOak01_TOOak01_TOObk00]TO1ck0O]TO1dk0M\\\\TO4ik02N1000001O00001O1O001O1O1O001O1O001O001O001O0000000000000000000000000000000000000001O0000000000000000001O0000000O2O0000001N100O100O2N100O101N1O1O2M2OePP50\\\\ooJ00000O2O00000000000000000000001O0000000000000000000000001O000000000000000000001N100000001O0000001N10000O100O2O00000O1000000000000001O00003M3M1O001O001O0O2O00001O00RiU3\"}}, {\"image_id\": 35, \"category_id\": 1, \"bbox\": [345.11810302734375, 91.25148010253906, 280.9296875, 80.24049377441406], \"score\": 0.05485030263662338, \"association_id\": 0, \"segmentation\": {\"size\": [900, 928], \"counts\": \"WVi;1Vk01mTO0b00[j00SUO2NOd00Zj00RUO7b0J\\\\j0;cUOIXj09cUOL\\\\j05aUOMZj0C^UOb00]O3?]j0?_UOD`j0P1O2O1OO100[ObUOD^j0;dUOC]j0=dUOSON;^j0a0jUO]OVj0a0mUO^OTj0b0mUO\\\\OTj0c0mUO\\\\OTj0d0f00000000O1000O1000MWOkTOi0Wk0100000000000O100000001O0000001O00000001O000000000000000O1001O000000010O000000000000001O0000001O00000001O01O000000001O00000000001N1O1O100O101O0O10000O1O1O101N104K4L`Xb:\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [0.0, 156.38131713867188, 122.07432556152344, 42.8570556640625], \"score\": 0.9999996423721313, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"cm0462Yg00eXO2Zg0NdXO5Zg0900O100O10000000O01000O01000O0100O010O10O10O1000O1000O10O1000O100000O10O1000O100O10O10O10000O10O1000O1000O100O10O01O01000O01000O10O01000O10000000O0100000000O10000O1000000O100000000000000O1000000O1000000O10001N1O100O10000O100O100O1O2O3KQ[Te0\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [251.23512268066406, 499.1651306152344, 732.7410278320312, 212.95321655273438], \"score\": 0.9999994039535522, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"abW74jg04L3N2M3O0O2O0O2O000O100O00100O1O1O1O1O1O1O1O1O1O1N2O0O2O1O1O100O1000000O10O010000O00100O1O1N2N101N2O100O10O010000O10000O010000000O10000O01000O100O100O10O0100O100O10000O01000000000001N100000000000001O00000O1000000000001O000000000O2O001N10001N3N3L4L6Kbgl1AjXSN6J7K8H2O1N101O1N101O001N101O3M5K1O2N2N2N<`ZOmMDM\\\\d0^3L3M1O2N1O1O1O1N2O001O1O00001O00000001O000O101O00001O00001O0O101O00001O00001N2O1O001O1O1N2O1N2N2N2L5L3N5L6I3M4M2M4L4L3N0O2G801N100O2O0O1000000000000O1000000000000O100000000000000O10000000000001O0O10001O0O101O001N101O0O2O001O0O2Z[OQNYc0o1f\\\\OSNYc0n1f\\\\ORNZc0n1e\\\\OTNYc0n1e\\\\OSN[c0m1Y\\\\OoM@4Wd0m1X\\\\OPN@4Xd0m1W\\\\OPN@3Xd0n1X\\\\OnMB3Vd0P2X\\\\OlMB4Vd0P2X\\\\OkMC5Td0Q2e\\\\OoM[c0R2e\\\\OmMZc0U2e\\\\OkM[c0V2d\\\\OiM\\\\c0Y2d\\\\OfM\\\\c0[2c\\\\OeM\\\\c0]2d\\\\ObM[c0`2d\\\\O_M\\\\c0c2P11N1O4M2M2O2M101N100O101N1000O001N101O00001O0001N2O1N1O2O1O1O00100O100O1000O01000000O0100000O010000O010O100O10O1000O10000000O010000000000O100000000O1000000O100O10O10O1O1N2O1O1O1O100O10000O10000O100O100O1O1O100I8K4M3N2N2N2O1N2O1O100O100O101N100O2O0O2N2O2L3M4L9^O`[d0YO]e[O;XNHQ[Og0gd0A_ZOE8R1ld00kZO6Se0]1O0O2N100O2O000O100O10O0100N1O2N1O1ON2O02K5L4L400001O000100O01000O01000O10O01O010O001O001O00O1O2N2N110O10O010000O10O1000O10001O0O10001N10001N1O2M2O2M2N2O2N100O2N100O2O0O10000O2O0WObZOmNae0e0`ZOoN44be0l0lZOnNYe0P1l0O2N1O2N3L4J7B^_T2\"}}, {\"image_id\": 36, \"category_id\": 1, \"bbox\": [70.40282440185547, 95.91014099121094, 499.89068603515625, 573.6597290039062], \"score\": 0.9999983310699463, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"nSS2<kf0l0C=G9H7I7I7I7J5K6YNiMk\\\\On2ib0\\\\1I6J7I6H9F:E;H8I7J6J6J5L4M3L4M3N1N3M3M2N3N1N3M3N1N3N2N2M2O2N2N2N1O2M3N1N3M3N2M3N3M2N3M3M3M4L4L4M2M2N3N0O100O100O100O010O100O10O010O10OO2M2G:H8O001O001O1O001O1O00100O1O001O1O1O1N2N2N2M3M2N3M3L4M3N2N2N2O1N2O1O1O1O1N2O001N2N2N2N2N2N2O1N101O1O1N2O1O1O1N2O1O1N2O2M2O1O1N2O1O1N2O1O1N3N1O1O100O1O1O100O100OnHoHXMo6g2SIYMm6d2VI\\\\Mj6a2YI_Mf6^2_IaMa6Z2dIfM[6Y2gIgMY6Y2hIfMX6Y2iIhMU6Y2kIgMU6X2lIiMS6V2nIjMR6U2PJkMo5U2QJlMo5R2RJoMm5P2TJPNl5o1UJRNj5n1VJSNi5l1YJTNf5k1[JVNd5j1\\\\JXNb5g1_JZN`5e1aJ\\\\N^5c1cJ^N\\\\5b1eJ]N[5b1fJ_NY5`1hJaNW5^1jJcNU5]1kJdNT5[1mJfNR5Y1oJhNP5W1PKkNP5S1QKoNm4P1TKQOk4n0VKTOh4k0YKXOd4g0]K[Oa4e0_K]O_4b0bK_O]4`0dKA\\\\4>dKC[4<fKEY4:gKHX48hKIW46jKKV44jKLV44jKMV42iKOW40jK1V4NiK4V4LjK5V4JiK7X4GiK:V4FiK<W4ChK?X4_OhKc0X4\\\\OhKe0Y4YOfKh0[4WOdKk0\\\\4SOdKo0\\\\4POcKR1\\\\4nNdKS1\\\\4kNdKV1]4jNbKV1^4jNbKV1_4jN`KV1`4jN_KW1b4hN^KY1a4hN^KX1c4gN]KY1c4hN[KY1e4gN[KY1f4gNYKY1g4gNYKY1h4gNVKZ1j4fNVKZ1k4fNTKZ1l4gNSKY1m4gNSKZ1m4fNQK[1o4fNPKZ1Q5eNnJ\\\\1R5dNmJ]1T5bNlJ^1T5cNjJ^1W5aNhJ`1Y5_NfJb1Z5^NeJc1\\\\5\\\\NcJe1^5[N`Jf1a5YN^Jh1c5WNSJT2m5kMoIY2R6gMjI\\\\2W6cMgI_2Y6aMfI`2[6b71O001O1O001N2O1O1O001O1O1O1O2N1N2O1O1O1O1N2O0O2O1N101N101N101O0O2O0O2O000O2O1N2O1N2O1O1N3N2N2M3N2M2O2N1N2O1O1O1N2O1O1O2N2M3N2N2N2N2M3N2N1O2M3N2N1N2O2M3M3N2M3M4L6J:F;E8H7I6K4K5K5L3L4M3M9G=C4L5K5K6J3M4L3M3M2N3M2N4L4L3M3M2N2N2XGdAh7g?C1O1O001O1O0010O01O001O001O000000000000000000001O01O000000000000001O001O001O001O001O001O1O1O001O001O1O001O0O2O001O001fMUH]Dk7U7XHZGd07O5_OS1g7i6mKkGbLZ1a7d6YLiG\\\\La1\\\\7b6_LeG[Lf1W7b6dM\\\\I]2a6gM]IY2a6kM]IV2a6mM]IT2a6oM]IQ2a6SN\\\\Io1b6TN\\\\Im1b6VN[Il1b6XN\\\\Ii1a6[N\\\\Ig1a6]N]Id1_6aN^Ia1^6dN_I`1[6fNbI\\\\1Y6iNdI[1U6VO]Im0\\\\6`8H8H8I7I8I;E;D?Ab0^O;E9H9G8I8H5L4K5J6K4L5I7I6K6J7@P`i:\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [188.13214111328125, 87.58528137207031, 308.3760681152344, 46.42979431152344], \"score\": 0.9973621964454651, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ZS`41lg00VXO0jg00VXO0ig01VXO0hg0OXXO50Keg00[XO50Kdg08\\\\XOIcg0:000001N2O1O0O100O10001N1000000O2O00000000010O000000000000000000000O1000000001N1000000O101O0O10001O0MbXOD^g0<301L3O2M2LWXO0gg<3SPD1N3N1N5L3M2M2O001O1O0O101O00001N1000001O000000000O2O00000000001O0O101O1O1O1O001O0000001O0O10001O000000000000000000000000000000000000O10000O100O10000O1000000O1O100O10000O100000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000O1000000000000O1000000000000O1000000000000O1000001O0O10000O10001O0000000O10001O0O100000001O0O10000O1000001N10000000000O101O0O10001O000O2O0O2O3Jolj<\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [244.25123596191406, 517.8212280273438, 251.2405242919922, 113.41754150390625], \"score\": 0.94483882188797, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"PSd64lg01N10000O1O1O1O1N2O10000O00100O1O1O0010000O1O1O100O1O100O100O1O1O1N2N101O100O1O1O1M3K5N2O1O100O100O010O10O010000O10O10000000O10000000O10O01O1O010O0010000000O0100000000O010O010O0010000O02O000O100O1O2O1O1N10000O100O1O10000O010O100N2O1N110O10O010O01O10O0100000O001O100O1O010O1O1O10O0100J501O1K5O1O2N2L4L4M3SOQYO5O2]g0Lb_h=\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [698.1697387695312, 502.3824462890625, 238.63800048828125, 202.4383544921875], \"score\": 0.754215657711029, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"aQ\\\\`0o1ce0l0_O6K4K;T\\\\OdLcb0\\\\3^]OeL`b0W3e]OlLXb0R3l]OlLSb0U3m]OkLSb0U3m]OlLQb0T3Q^OkLoa0U3Q^OkLoa0U3Q^OkLna0V3R^OjLna0U3U^OhLla0X3V^OfLja0Y3_100O100O100O101N100O101N100O1O010O001O1N101000O01000O1000000O100O101N100O1O10000O10001O00000O100000000000001O000000O10000O1000000O10O10O10O010O0001O1O2M2M3N2N2N2N3N2M4K6J7Cd0ROf0^OoXOKWg00g\\\\;0ecD2N3M2N2N1M4M3N1O2O1N100O1O0N3M2O2O1O1N3N1L4@?YOh0I7M3000001O0O1I6N2O2O001O2N1N2M4I601N1010O01O0N1O1010O10001O100O1O1O1N2O1O1O1O2N1O1N2M4D[[OYMgd0c2=M4N1O1N3M2O1O2N1O100010O1O1O2N100O1O1OO1000000000001N11O01O10O0100O10O01N1O2M3L3^OXZOhNme0Q1^ZOgNge0S1f0L4M4K6FX_Q2\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [145.9436798095703, 507.1267395019531, 78.68814086914062, 165.74166870117188], \"score\": 0.4835440516471863, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mWf36ig04M2N4L4L3M3M6J7I5K2N3M5K8I7H4L2N3M20021L?@5KbMW[Oj1dd0VN][OA5j1]d0nNa[OZ1[d0dNe[Oa1Xd0_Nf[Oe1Xd0\\\\Ne[Og1Zd0R1O01dLT\\\\O`2lc0ZMX\\\\Oh2`d001O001OO3NO010PN_[O?bd0ZOm[O=Sd0[OX\\\\Oc0de0O1N3N1O2M5K3N2M2O7GckQc0\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [590.5850830078125, 223.5828399658203, 133.65460205078125, 18.880584716796875], \"score\": 0.14127175509929657, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"T_k=:eg0101O000000000001O000O10001O0000000000010O00000000000000001O00000000000O100000O10000O10000O1000000O1000000O1000000O10000000000000000000000001O00000000O100001O0000000O1000000000000000001O0000000O100000001O00000O1000000O100O10000000000LVXO1gh\\\\7\"}}, {\"image_id\": 36, \"category_id\": 1, \"bbox\": [7.550235748291016, 17.42833709716797, 168.7841796875, 155.78497314453125], \"score\": 0.1270575076341629, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"SR6k2jd0=D;00O01`Le[OY3ad010O001O1O1O1O1O001O00001O00000000001O0000O100O10000O100N3N1O1N3N1000000000O2OO10O0100O001N2O000000010O0N3N2N2O001O1O0O2O0000001N101N100010O10O01000000O11O00000000001O1O001O0O2O00001O1O1O001O0000000000000000O1000000O100O1O2O0\\\\OV\\\\OQMkc0m2X\\\\ORMhc0l2[\\\\OkLKNjc0S3`\\\\OmLHOhc0Q3d\\\\OmLG0fc0R3e\\\\OlLF2ec0R3o\\\\OmLQc0R3P]OnLPc0R3P]OnLPc0Q3R]OnLnb0R3P1O10001O0000001N2M2N3N1K6B=01O1O11O1NN3M3N1O2N2M3N2O010O010GjYOfNVf0Y1lYOfNUf0X1;L8G7K4L4M5Afflc0\"}}, {\"image_id\": 36, \"category_id\": 2, \"bbox\": [205.97354125976562, 514.80078125, 347.4640808105469, 177.445556640625], \"score\": 0.05215487256646156, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"bbT73lg04M1N100O101N100O100O2N1N2N2N201N1O100O100O100O1O1M3M3N2O1O1O1O10O01O100O10000O10O010000O10O10O100O01000O10O1000O1000O100000O010000O10O10O10O2O0O100O101N10000O101N10O01O1O1O1O1O00100O1O1000O00100O010O100O010O1O00100000000000000O2O000000000O2O1N2N3]OhYOYO\\\\f0=mYO@Vf07kXW2KUhhMa0^O>Cm0gYObNXd0h2K5K3M2O1O1O001O00100O1O1O010O1O0010N3L3N3\\\\Lk[OV3od0\\\\OffP;\"}}, {\"image_id\": 37, \"category_id\": 1, \"bbox\": [246.28208923339844, 82.64505004882812, 379.73858642578125, 1065.2269287109375], \"score\": 0.9999984502792358, \"association_id\": 1, \"segmentation\": {\"size\": [1272, 954], \"counts\": \"[g^:j0lV1`0@3dWOROj9R1jEDh9?jE;g9HnEi0i9ZOQFm0k9VOPFP1l9SOPFR1l9QOPFT1m9nNPFU1m9mNPFW1n9iNQFY1n9fNQF^1l9cNRF`1m9`NPFd1n9\\\\NPFh1n9XNoEn1n9RNPFR2m9nMSFU2j9lMTFX2j9hMUF[2h9fMXF]2e9cMZF`2d9aM[Fa2c9_M]Fc2`9]MaFe2\\\\9\\\\McFg2Z9YMfFj2U9XMiFm2R9VMiFQ3P9SMmFS3o8mLmF[3P9eLkFc3S9ZLjFn3S9QLlFT4R9kKmFX4R9hKlF\\\\4S9dKkF_4S9aKkFb4S9`KkFb4T9_KhFe4U9^KgFe4X9]K]Fn4_9VKkE_5Q:fJVER6f:SJdDb6X;cI\\\\Dh6b;[IUDk6j;YInCm6Q<VIkCk6V<XIgCh6X<\\\\IeCe6Z<_IbC`6^<dI_C\\\\6`<iI\\\\CV6c<oIZCo5g<VJUCh5k<^JPC_5Q=gJkBU5W=RKdBj4Z=`KcB\\\\4U=TLgBh3S=eLkBV3j<]MSC_2`<TN^Cj1Z<cNcCY1W<TOfCh0R<HjC4o;:nCAl;m0PDQOf;_1VD_N\\\\;V2_DjMR;i2jDVMP;V3jDkLQ;_3iDcLS;e3gD\\\\LV;l3dDVLY;Q4cDoKZ;Y4`DhK\\\\;a4_D_K[;l4_DUK[;V5_DjJ\\\\;b5\\\\D_J^;n5ZDTJe;S6RDPJm;X6eCoIZ<Z6jBZJV=S`001O0010O01O0000O1O000O0O201O001O2O1N3N1N3L4K5J6J6J6J5L5K5K5M3M3N1O2N2N2N1O2N2N2O0O2N2N2N2O10000O10001N1000000O101O0O11O00000O101O000000001O00000010O100O010O100O2O0O101N100O1O1O1O100O1O1O1O1O10O01O1O1O1O001O1O1O001O1O001O001O001O001O00001O001O001O001O010O2N1O2N2N2N2N3M3M2N2N101N1O1O001O1O010O001O001O010O001O100O1O2fEg]OROYb0j0n]OROTb0j0R^OROoa0j0Y^OROha0j0_^OQOda0i0d^OSO]a0j0j^OPOYa0k0o^OPOSa0k0Q_OSOQa0j0R_OUOn`0m0m^OVOTa0k0g^OXOYa0k0\\\\^O^Oca0e0P^OFoa0=c]OO\\\\b05S]O:kb0[:01N2N1O1N101O01O000O2O001O1O1O1N3N2N1O2N2O1O2N2N2O3M5Kn0o[Og\\\\OU<YObHhe0X6[[ObIld0P6`[OkIed0i5f[OSJ]d0c5m[OYJVd0_5S\\\\O]Joc0]5W\\\\OaJlc0W5[\\\\OhJhc0o4_\\\\OoJec0h4b\\\\OWKgc0iMXYOo3Y3VNXd0[LaYOn4^2fN`f0i0hYOVO`f0;hYOE\\\\f00kYONZf0GlYO:Wf0]OnYOc0Sf0WORZOi0ne0SOUZOn0ke0mNYZOT1he0fN\\\\ZO[1de0aN_ZO_1de0ZN`ZOg1ae0TNbZOn1`e0jMdZOW2_e0aMeZOa2ae0SMdZOm2ae0jLbZOW3be0`LcZOa3^e0XLgZOh3[e0QLkZOn3Ue0mKP[OR4]l00O10N2O1O2N21O1O1O001N1O2M2N3M1N2N2O1N101N2O1N2O1O1O001OM6A`0]Od0ZOg0ZNh1hNVhg<\"}}, {\"image_id\": 37, \"category_id\": 2, \"bbox\": [93.22038269042969, 942.5843505859375, 522.1148681640625, 195.9234619140625], \"score\": 0.9999881982803345, \"association_id\": 1, \"segmentation\": {\"size\": [1272, 954], \"counts\": \"gkZ42ZW1=N1O101N1O1O101N100O1O100O1O100iNYO\\\\kNh0dT1]OVkNe0hT1^OUkNc0kT1_OPkNd0PU1^OhjNh0WU1[OejNg0[U1ZOcjNg0]U1[O_jNg0aU1h000O10000000O10000O100O100O1O1N2O1N2O100O100O1000000O1000000O10000O10000O1000000O10000001O00000000001O000000000000001O0000001O001O001O1O3M3WkNUN_S1o1QlNeNgS1`1RlNeNkS1]1QlNfNnS1\\\\1okNeNQT1b2O001O001O00001O001O001O1O2N1O2N1O1O001O00001O00001O00001O001O1O1O1O2N1O2N1O001O1O0000O100O1O1O2M2O1M3K5K5L4N2N2N2O2N1O100O100O2N1O1O2G8M4lMVkNl0WU1QOljN`0bU1^ObjN:eU1ZOgjN`0_U1^OdjN>_U1_OgjN;[U1DjjN7^V1M4Lfef11WZYN;_O`0C;F7K5M2N2N2O0O2O001O0000O1000000O101O0O10000O1000001O0O10001O00001O001O00001O0000001N1000001O0000001O001O001O1O001O1O001O0010O01O001O001O001O001O001O1O1O10O01O00001O000010O0001O01O01O001O100O001O000010O0001O0001O01O00100O0010O010O010O00010O000010O001O010O1O010O001000O010O01O010O0010O0001O0010O01O0010O00010O010O0010O000010O000000010O0000000010O00000001O0001O00010O00010O0010O000010O00000001O01O00gjN]O]S1c0clN]O]S1c0clN^O\\\\S1b0clN_O]S1a0clN_O^S1a0alN_O_S1a0`lN@`S1`0`lN@`S1`0`lN@`S1`0`lNA`S1>_lNCaS1=_lNCaS1>^lNBbS1>]lNCdS1<\\\\lNDdS1<\\\\lNDdS1<\\\\lNDeS1<YlNEgS1;YlNEgS1;YlNEhS1:XlNFhS1:XlNFhS1;WlNEiS1;WlNEiS1;VlNFkS19UlNGkS1:TlNFlS1:TlNFlS1:TlNFlS1:TlNFmS19SlNGmS1:QlNGoS19QlNGoS19QlNGoS19QlNGoS19QlNGPT18PlNHPT18PlNHPT18okNIQT17okNIQT17okNIRT16nkNJRT16nkNJRT16nkNJRT16mkNKTT14lkNLTT15kkNKVT14jkNLVT14jkNLWT13hkNNYT11gkNOYT11gkNOZT10fkN0ZT10fkN0[T1OdkN1^T1NckN1]T1NdkN2]T1MckN3^T1LbkN4_T1JbkN6`T1H`kN8aT1G`kN8aT1F`kN:oU100000001O0000001N1000Qn]=\"}}, {\"image_id\": 37, \"category_id\": 2, \"bbox\": [438.0426330566406, 1108.881591796875, 394.5611877441406, 51.775634765625], \"score\": 0.8926070928573608, \"association_id\": 0, \"segmentation\": {\"size\": [1272, 954], \"counts\": \"ShVc01fW14M1N101N102M1000001O0O10001O0O2O0O2O0000000O10001O00000O1000001O0O1000001O0O100000001N100000000O10000000000O2O00000000000O2O000000000O100000000O2O000000000O1000000000000O10001O000000000O1000000000000000000000000000000000000000000000000000000000O1000000000000000001O00O01000000000O100000000O1000O100000001N100000000O1000000000000O10000000000000O100000O100000000O1000000O1000O1000000000O1000O1000000O010000000O10O100000O100000O10000000000O1000000000000000O10O100000000000000000000O10000000000O1000000000000O100000000000000000000O100000000000000000000000000000001O000000000000000000001O000001O0000000010O000000000000001O000000100O00001O00001O0O2O001O1O1O1O1O001O003L1O2FdhN5k\\\\k4\"}}, {\"image_id\": 37, \"category_id\": 2, \"bbox\": [104.4284896850586, 888.14501953125, 277.0462341308594, 248.1239013671875], \"score\": 0.2519327998161316, \"association_id\": 0, \"segmentation\": {\"size\": [1272, 954], \"counts\": \"eaY45_W16J6hjN@QS1f0dlNDWS1b0clNBZS1b0alNB]S1a0_lNB_S1`0^lNC_S1a0^lNAaS1`0]lNBaS1`0\\\\lNBcS1`0ZlNBfS1`0VlNBiS1a0RlNCkS1d0mkN_ORT1T2O1O1N2001O0O2N2O0O2O0O2O001N2O1N2N2M3M2M4N1N3N1O2N2O0O2N2O1N2O0O2O0O11O00000001O0000000001O0O10001O0000000O2O0O1O1N3M2N3L3N3L4N1N3N1O1O2N100O2N100100O1000O2O001_lNjL\\\\R1W3amNnL]R1Q3amNRM^R1o2`mNTM_R1l2^mNWMbR1i2\\\\mNYMdR1g2XmN^MgR1h2olN]MPS1h3N00000000O010O10000000000000O010O00100O1O101N2N2O0O2O001O000O10000O2NO1O2N1N2N2M3N3M2O1O1O2M3M3M3K5E:L5L3N3N1O2N1O1N3L3N2O2M2O2O0O3K4L3WNTkNd0oT1ZOVkN`0mT1]OYkN>iT1@YkN>hT1A[kN;hT1B[kN<gT1B[kN<fT1B^kN;dT1@akN<RV1K7GRfa1o0lX^N;J4L3N2N1O100O1O100O00100O10000000000000O10000000000O1000000O1000000O101O0O101O000O2O001N3M2N4KfiVf0\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [704.5761108398438, 328.59527587890625, 55.10589599609375, 123.77825927734375], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\lg><nd0;E6K4L3M1O2N1O1O2N3M1O2N2N2N5_\\\\OXN11kb0`2H3N0O1O11OO1O01O01O011e]O^Mha0c2T^O`Mla0o21O1O1O1O2N1_NT^OAma0:Z^OCga08b^OC_a09h^OCXa0:T_O\\\\Om`0c0X_OVOk`0j0l1O1N2M4M2M3M4Jj\\\\b5\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [894.4765625, 201.01612854003906, 104.89447021484375, 312.49609375], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"hmgb0Z1jc0>A=G;Ga0^O3MB>L5020O101N100O1N103n]OgMl`0c3h@[L]<U4oBRLn<U4gBRLW=R4bBQL]=e4gAcKX>h5O2M2J6ZOf0N2O1O1O1O100O1N20O01O010OO2jNRCmIP=k5fCbI_<Y6]1H7_Ob0G9L4N2M3N2O100O1O12O3O0O0O2N5K3M1N3L8I5Ia0_O=B6J5L2N2O1N101O00jIaB`4^=]KmB]4Q=^K]CZ4b<aKnDU3Q;gLaEl2^:SMnEb2Q:_MkFf1S9ZN^GW1a8iNdGS1Z8lNmGP1o7oNdHc0X7^OUI8i6G]I6c6GcI4^6KkIKW65PJ]OW6c0U72N3M22M3M3M3N_Ta0\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [347.07159423828125, 346.7978820800781, 49.897247314453125, 117.02166748046875], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"oiY7>jd07J2O2L4L4E;M4L6Z\\\\OdN8L\\\\b0m1`]OYN^b0f1c]O\\\\N[b0\\\\2N1O1O1O2N2P^ORM_a0R3W^OWMfa0U2d^OVN\\\\a0e1k^OXNWa0d1m^OZNUa0d1l^O[N_a0Y1d^OeN_a0X1c^OeN`a0Y1a^OdNaa0\\\\1a^OaNaa0^1b^O^Naa0a1U103N1O02N0009G5J3M;D2O1000O4L9IN1N3M3LlhQ=\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [831.1922607421875, 474.6824035644531, 133.697021484375, 24.35260009765625], \"score\": 0.9999997615814209, \"association_id\": 4, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Y]]a01Ve06M2O0O101O0O101O00000000001O00000000000000000000000000O10O100000001O0000000O1000000O100N3O0O10000000000000000000000000001O01O00001O00002N00000000000O3N1O3FS[O0md0MW[O1Se0OeZa03Xe^O1O1O1N3N00010O0000000001O0000001O001N1000\\\\aW1\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [276.5770568847656, 349.0798034667969, 54.55657958984375, 122.31942749023438], \"score\": 0.9999995231628418, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"STi5Q1Wd07K3M3M3L2O101O1O2M5K3K6C<K5P^O]MQa0j2e^O]MYa0f2b^O]M^a0d2\\\\^OaMda0S30000000ZO^^ObMca0R2l^OjMUa0T2o^OhMSa0V2R_OeMQa0Z2Q1N3N2N4L4L2O1O0101M100O3N3L100O2OXOW]OnNib0a0g]OBVb0MZ]OBe0d0Rb0A`^O?Vc0000001O2M2M3LYi]>\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [504.7898864746094, 330.48052978515625, 44.276885986328125, 125.99114990234375], \"score\": 0.9999994039535522, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Uka:<nd04L1O6][O\\\\OTd0Q1L9G4L5K4K4L5K4L4J6K5L5K7m]OXMXa0^30001O000000001O1002M1O3N2M2N002N2N1O3hNd^O[N]a0\\\\1P_O^NSa0]1R_O`NPa0[1d1L5GaTm9\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [428.6502685546875, 328.7325134277344, 41.664306640625, 133.03475952148438], \"score\": 0.9999990463256836, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"RUo8<md05K4L4K4K4J8H9H9K4M7I5K3M2N2N2N2N2N3O2N02O01T^OXMPa0h2k^O[MVa0e2f^O]M\\\\a0Y30O1O001O1O1N1]N[^O@fa0:k^OZOVa0b0U_OVOn`0c0Z_OXOi`0c0]_OXOi`0b0]_OWO_Pc;\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [634.9630126953125, 330.9215393066406, 46.1007080078125, 126.73263549804688], \"score\": 0.9999986886978149, \"association_id\": 7, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"el]=9kd0>TOi0K4L`0A4L2N5K4L2N2N2N1O2N3M3M2X^OTMn`0e30O5K3M3N2M2N3L4YNZ^OCja07\\\\^OEna0M\\\\^OMha0M^^ONcc0K4M4IneW7\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [31.083650588989258, 351.59906005859375, 56.21002960205078, 125.53964233398438], \"score\": 0.9999986886978149, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Yge07Qe06K3K4O1N2N20O00Y\\\\OZOib0h0Q]ODgb0>V]OGcb0c0U]ODbb0g1G9P^O]MRa0g2j^O]MSa0g2d^O`MZa0Z3N10000000mNn^OlMSa0P2R_OoMo`0k1Y_OQNi`0k1\\\\_ORNf`0i1__OUNc`0f1c_OWNa`0e1a_OYNa`0e1b_OUNe`0h1a1N2N200O10O2N2N3M101N1010O00O11]NW]Of0fc06KM2N2N2N5J3N2L5Kk``c0\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [201.15853881835938, 354.37640380859375, 46.125152587890625, 119.246826171875], \"score\": 0.9999979734420776, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"S^X49od07I6K5J8d[OROoc0Z1G9G8J6K4N4K5L4L3M3n]OXM[a0^3N000001O001gN_^OfNda0V1a^OeNca0W1h^O^N\\\\a0_1f^O^N^a0`1d^O[N`a0d1T1000001OO2O1O1020M5M7Ga0B72KH3N1NZjU`0\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [588.0947875976562, 335.8383483886719, 47.2264404296875, 119.18435668945312], \"score\": 0.9999978542327881, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"aVY<8ld0?A?E=F1N3N2N2N4L<D4L1O2O1N10101OaM\\\\]OU2Pc0NM5L4L`0AO00010O000fNR^OWOoa0d0X^OYOha0d0_^OWOca0f0c^OVO^a0e0i^OXOXa0d0l^O[OTa0c0P_O[OPa0`0W_O]Ok`0>]_O[Of`0`0U2N4K3N3NR^V8\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [756.2059936523438, 325.89764404296875, 47.89178466796875, 122.73040771484375], \"score\": 0.9999955892562866, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_Xi??hd05L4L4M001J8G8L4O7J3M002N2N1O2N3M2N3M3M5K3M3O1O01O4K2N100O00O10001O2O9EgNn]O[OQb0a0U^O]Ola0?X^O_Oja0=Y^OBia08\\\\^OGfa04_^OJca0Oc^O0`a0Fg^O:Vc0O3M2MUZb4\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [92.81854248046875, 328.7689514160156, 47.99351501464844, 146.85357666015625], \"score\": 0.9999880790710449, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_\\\\n1h0^d0:H6J5L4F;A?K6J7J5L4K3N4L3M5g^OjLS`0\\\\3a_OkL^`0Q4001O1O2lNU_OdMl`0W2]_OdMd`0V2c_OhM_`0T2f_OiM\\\\`0T2g_OjM[`0T2g_OiM]`0V2c_OhM_`0Z2^_OeMd`0\\\\2Y_OdMi`0^2R_OcMo`0_2m^OcMRa0[32N2N001O1O1SNi^ODXa08S_O_OPa0<f_OQO\\\\`0j0j_ORO]`0;V@[OS`0<RP`b0\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [138.8375701904297, 339.0818786621094, 61.576812744140625, 139.1063232421875], \"score\": 0.999972939491272, \"association_id\": 6, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"i]P39md09F7G7N101N1T\\\\OClb0<T]OJgb04]]ON_b04a]ONYb07d]OMTb0j1Q^OXMRa0o2j^OUMQa0Q3h^OTMUa0c3O1N101O00O1jNU_OlMl`0n1`_OjMa`0S2c_OkM_`0S2c_OlM_`0Q2c_OmM``0o1d_OnM_`0n1e_OnM_`0o1b1M1O1O1N3N1O1O010O1O2O00011N003OO30M2N6K2ON001OO0=FO:D4KnUUa0\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [491.84539794921875, 451.0480041503906, 46.30523681640625, 8.662750244140625], \"score\": 0.9998759031295776, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PnY:2Ye01N100000000000001O0O101O0000001O00000000000O10O10000000001O001O0O2O000O101O1OV\\\\V:\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [423.572021484375, 454.2530517578125, 29.58758544921875, 7.146697998046875], \"score\": 0.9994543194770813, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fbl83We0101O000000000O1000000000000000001O0000000001O1O3MYRm;\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [566.9341430664062, 448.59600830078125, 89.3206787109375, 7.817138671875], \"score\": 0.991367757320404, \"association_id\": 5, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"jnm;2Ye00O100O100000000000000000000000000O10000000000000000000000000000001O0OmZ?OUe@00001N10000000000000000000000000000001O0000001N]Pe7\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [292.6968078613281, 459.232666015625, 36.00390625, 6.418243408203125], \"score\": 0.9551923274993896, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"mV[61Ye03M1000000000O10000000000000001O0Oohc>\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [650.1898193359375, 445.0792541503906, 150.8837890625, 10.35443115234375], \"score\": 0.954377293586731, \"association_id\": 7, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"VZc=2Ye00XPb00fo]O3O000000000000000000000000O101O00000000000000000000000000000001O000O100000001O000000000000000001O2N1O00^e80`ZG3N10000000000O11O0000000000000001O01O00000000000O10000000000000000000000000000O1000001O000O10000OQof4\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [159.43682861328125, 464.3799743652344, 33.43028259277344, 6.2900390625], \"score\": 0.5444602966308594, \"association_id\": 6, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Qka32We020000O010000000000000000000000O1000001O00__Za0\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [668.6369018554688, 328.2937927246094, 119.12957763671875, 125.30789184570312], \"score\": 0.25092679262161255, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"jah>4Ve03M2N2N2N101O2N3M1OO2O0O2O1N2O1N7I4LoZO1O0dZ11ZZO4N1O1EK[[O:dd07h\\\\O_Oea0<n\\\\OE\\\\1=iNHQb05k]OE\\\\1?dNJTb01`_Oc0``0\\\\Ob_Od0^`0[Oc_Od0]`0[Oh_Oa0\\\\`0[Oe_Od0^`0YOc_Of0^`0YOc_Of0^`0XOd_Og0cb0M6J3L4M2Kfof5\"}}, {\"image_id\": 38, \"category_id\": 1, \"bbox\": [629.7251586914062, 326.0874328613281, 108.63397216796875, 128.1832275390625], \"score\": 0.2165505588054657, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]V_=6Te04K:H4K3N000000O10000000001O1O2OX^O\\\\Om>N\\\\^O8g2MX?Lh@5Ub03M2000000^MFS@:`b0O2N2N4Kfle00[SZO2M2N2O3L2ON2N2O1O2O1N1OkhY6\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [569.1387939453125, 450.14495849609375, 52.4366455078125, 6.74285888671875], \"score\": 0.20439781248569489, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Udn;2Xe01000000O1000000000000000000O10000000000000000000000000000000000001OlPe8\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [348.0415344238281, 456.6853332519531, 40.62335205078125, 8.57177734375], \"score\": 0.14455094933509827, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PbZ73Xe0001O0O1000000000001O000002MTP8NnoG1000000000000001OYhW=\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [148.65444946289062, 346.2144470214844, 73.69949340820312, 128.89804077148438], \"score\": 0.1248013824224472, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PP[e0\"}}, {\"image_id\": 38, \"category_id\": 2, \"bbox\": [633.8216552734375, 447.3450927734375, 88.54534912109375, 8.120849609375], \"score\": 0.11713068932294846, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PZW=3Xe01O000000000000000000000000000000000000000000OmZ=NTeB3N10000000001OO100000000000000O1000000000000000000O101O000001O00000000000000000000000joX6\"}}, {\"image_id\": 39, \"category_id\": 1, \"bbox\": [340.5099792480469, 492.6719970703125, 453.1517028808594, 438.55474853515625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1365, 1024], \"counts\": \"h`P?5_Z18H6J2O1N2N2N1O2N2N2M3M3M3M3N2N3M2O1N2N2O1O001N101O1O001N101O001N101O0O2O001N1O2O0O2N2N2N101VmNdMXm0^2eROoMol0R2oRORNol0o1oROTNnl0m1QSOVNml0k1QSOWNnl0i1QSOZNll0g1TSOZNkl0f1TSO[Nll0e1TSO[Nll0e1SSO]Nll0c1SSO^Nml0b1RSO_Nnl0a1QSOaNml0_1SSObNml0^1RSOdNml0\\\\1RSOeNnl0[1QSOfNol0Z1PSOhNol0X1PSOiNPm0V1PSOlNol0T1PSOmNPm0S1PSOnNnl0S1QSOnNol0R1QSOmNPm0T1oROkNQm0V1oROiNRm0X1mROgNSm0[1lROdNTm0]1kROcNVm0^1iRO`NXm0b1gRO]NYm0e1fROZN[m0f1dROYN]m0i1bROVN^m0l1`ROSNam0o1]ROQNcm0Q2ZROnMhm0T2TROlMom0U2nQOiMUn0Y2jQOdMXn0^2nQOWMUn0k2i42N2M3]lNYLUP1i3hoN[LTP1h3joNZLSP1i3joNYLPP1n3moNULio0U4TPOmKho0X4UPOjKjo0X4TPOjKjo0X4SPOjKlo0X4RPOiKmo0Y4QPOhKno0Z4PPOgKoo0[4ooNfKPP1\\\\4noNeKPP1^4noNbKRP1a4koN`KSP1d4joN]KTP1g4ioNZKTP1l4hoNUKVP1o4foNSKXP1T5boNmJ\\\\P1Z5]oNhJbP1\\\\5YoNeJfP1`5UoNbJjP1b5RoN_JmP1c5PoN_JoP1d5nnN]JQQ1e5mnN\\\\JRQ1g5knNZJTQ1h5inN[JVQ1g5gnNZJXQ1j5cnNYJ\\\\Q1`7O2O1O0O2TQOiFXl0X9ZSOVGel0k8YSOVGfl0l8WSOWGhl0j8VSOXGil0i8USOXGjl0j8TSOXGkl0i8TSOWGll0i8SSOXGml0i8QSOYGnl0h8QSOXGol0i8oROXGQm0i8mROXGRm0j8lROWGTm0j8jROXGUm0j8hROWGXm0j8fROWGZm0j8dROWG\\\\m0k8`ROWG`m0j8^ROWGbm0l8YROWGem0k8YROVGgm0l8VROUGjm0l8TROUGlm0j8VROUGim0j8ZROUGfm0i8^ROUGam0j8bROUG]m0j8gROTGXm0k8lROSGSm0m8oRORGPm0m8SSORG\\\\l0]9gSOaFUl0b9nSO]Fmk0f9UTOZFgk0h9\\\\TOWFck0i9_TOVF`k0i9bTOWF]k0h9fTOWFYk0i9hTOWFXk0h9jTOWFUk0h9nTOVFSk0h9oTOXFQk0f9RUOXFnj0g9VUOVFkj0e9[UOZFej0_9dUO^F]j0Q9[VOgFei0e8VWOTGkh0i8b4M3N2N101N2O1O1O1O1N2O1N2N2N2M3N2O1N200O1O1O10001N1kIVmN_4kR1XK`mNd4aR1UKimNh4XR1TKomNh4QR1VKVnNd4lQ1XKZnNe4gQ1WK_nNe4cQ1XKanNe4aQ1SKgnNl4cS1M2N3N1N2N2O1N2O1O0O2O1O1N2O001O1N101O1O0O2O1O1O1O1N101O1O001O001N101O000000000000O1000000O1000000O10000O100O100O100O1O100O100O10000O10O10O100000000000000000000O10000000000000000000000000000000000O1000000001O00000000000O101O000000000000001O000O100000001O0O100000001O0O10001O001O00001O001O001O00001O0000001O0O10000O2O0O10000O101N10000O100000000O2O0000000O10000000000O2O000O10000O2O000O100O2O0O100O2O0O2N2N2N2N2M4M2N2N2N2N2N2N101N2N2N1O3M2M4L6H9GkYc9\"}}, {\"image_id\": 39, \"category_id\": 2, \"bbox\": [172.21543884277344, 923.6845703125, 394.18719482421875, 63.8751220703125], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [1365, 1024], \"counts\": \"hld74_Z120001N100O101O000O1000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000O100000000000000O1000000O1O1O1N2L4O1O100O1O100O10000O100O1O1O100O100O100000000O10000O10000O10000O10O100000O10000O10000O10000O100000000000000000O1000O100000000000000000000O10000000000O10000000O1000O10000000000O1000000000O1000O10000000O010000000000O01000000O10O1000000O010000O01000O010O010O10O10O10O1000O10O100O100O10000O10O1000000000000O01000000000O1000000000000O2O0000000000001N10000000001N101O001O001O1N4M00001UOXfNg0hY1300O01000000O10O1000O1000000O10O10000000O10O1000000000BWfNOjY1MVfNI1:iY1L\\\\fN3dY1M\\\\fN3dY1L^fN3bY1KafN4_Y1JdfN5nY1O2O000O101O0O1000000O100O10[WPf0\"}}, {\"image_id\": 39, \"category_id\": 2, \"bbox\": [411.1301574707031, 932.4970092773438, 272.2341003417969, 38.27178955078125], \"score\": 0.9004683494567871, \"association_id\": 0, \"segmentation\": {\"size\": [1365, 1024], \"counts\": \"`i\\\\b01cZ1100NO`eN1`Z1O`eN1`Z1O`eN1`Z1O`eN1`Z1200000000000000000O10000O100O100000\\\\O0cfN0\\\\Y11dfNOZY13ffNMYY14ffNMYY14dfNKC1hY15efNN[Y12efNN[Y12efNN[Y12dfNO\\\\Y11dfNO\\\\Y11dfNN]Y12bfNO^Y11bfNO^Y10cfN0]Y10cfN0]Y10cfNO^Y11bfNO^Y10cfN0]Y10cfN0]Y10cfN0]Y10bfN1^Y1NcfN2_Y1LafN3QZ1000000000000O100000000000000000000O100000O1000000O1000000000000O1000000O100O1000000O10000000000O10O10000000000000O10O10000000000000O1000O1000000000O1000000000000O10000000O100000000000O10000000000000O1000000000000000O10O10000000000000000000O100000O1000000000O1000000O10000000O10000000000000O100000000000O10000000000O10000000000000000O10000000O100000O10000000000000000O10000000000000001N10adU>\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [679.10546875, 142.01724243164062, 298.0831298828125, 506.3899230957031], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"e]T>1Re0>C;F6L2O1N2O1N2O1N2O1N2O1O10O0010O0i_O4S;KUDe1S;[NbDU2Z;kM`D_2[;aMbDe2[;[McDj2[;UMcDP3Z;QMcDS3\\\\;lLaDY3];hL`D]3^;cL]Dd3`;\\\\L]Dk3_;WL\\\\Do3b;RLZDS4h2gJ`4X1cHV4n1cK\\\\59aHX4l1fKa55_HX4k1iKc54PHb4X2^Kh5h6SJ]Ik5e6oIaIo5d6hIcIV6a6_IkI[6[6RIXJh6Z:K1N2O0O1O10O0000010O00000010O00000001O000000001O0000O10000O100O10000O10000O100O10000O10000O100000000O100000000000000O10000000000O1000000O100O100O100O1O1O1O1O1N2O1O100O1O100O100O10000000000O100000000000000000000O10000000000000O100O10000O100O1O100O1O1O1N2N2N2N2N2N2N2M3M3M3L4L4L5J5L4M3M3M3N2N2N2N2N2O1N3M2O1N2O1N3M3N2N1N3N3M2N3M2N2N2N2M3N1N3M3M2N3N1N3N1N2N3M2M4L3L5K5K4M4M3M3M4K4M3K5K5M3L4M2O2M3N1N3M2M4M2M4M3M2N3N2M2N2N3M2M4L3M4K5L4M2N4L4L4L4K6G9hJh@S4j`0YO<H8I5K4M3M3L4M3M4K5K5K4L4L4M2M3N3L4L4L4K7H9F:G8H`kR1\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [353.0744323730469, 227.59689331054688, 310.6949157714844, 311.1014709472656], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"jVU83Re03M4M2O2O001N2O001O01N1O4L4LkU2J[jM1O2O1k\\\\ONQa03k^O9m`0Hm^O?Pa0Bl^Od0Qa0^Ok^Oh0Qa0ZOj^Ol0Sa0VOh^OQ1Ta0QOg^OU1Va0nNe^OV1l`0aN]^O=a0W1m`0@e^Oh0Ua0i1N1G9N2O1000001O0O10000UMjKbDX4Y;TL]Do3`;_LSDc3l;dLlC^3S<iLeCY3[<jL`CX3_<lL\\\\CU3d<nLXCT3h<mLUCU3j<nLRCT3n<oLmBS3R=oLkBR3V=PMfBQ3Z=RMbBo2_=SM]Bn2c=Z21I8@?O100O2O00000O1O0010O1O1O011O00001N2O0N3N2N2O0O2O1O1O101O0O2O1O1N101O001N1000001O000000000000000001O0O1O2O0O1N3N1O101N10O01000O10O1000O10O1000O10O100O010O10O01O1O010O01O10O01O100O01000O10000000000O100000O100000010O0000001O00001O00101N3YIPBP6U>eIUBX6`>M100O10O100O010O0010O01O010O01O01O01O0001O00010O00010O000001O0O100O2O0O1000001N100O1O1O1O2N1N2N2O1O1O100O2N1N2N2M3cLV@Y1n?dNV@X1l?gNW@T1l?jNX@R1j?mNY@P1h?oN[@n0g?oN^@l0d?SOb@f0a?XOd@b0^?\\\\Oh@8a?FR3N2M3N2N3L3N2N3M3L4L9GWRf7\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [440.1448669433594, 423.7196044921875, 265.9076843261719, 118.14068603515625], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"g[U99nd02O0O2O0O101N1O1O1O1O100O100O1O10O01N2O100O1O100O100O10O010000O100O10O10O10O100000O01000O1O100O100000O010000O1O100O1000O01000O0100O010O1000O10O1000000000O100000O0100O0100O0100000O100O010O1O1O010O010O100O00100O1O10O01O100O00001O00100O10O1000O0100O1O1000O01000000O100O010O10001N101O1N101N100O101O0O10001O001O0O2OO100O01O010O10O10O10O100O00001O010O00100O10O01N1N3O001O100O10O01N2D<L4O1O1N2M3O1000000O1000000000O10000O100O10000O10000000000000000O1000000O10000O10000O10000O100O1O1O10000O100O100N2N2O1O1O10000O100O101N100O10001N3N7H7Gjgb6\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [114.72663879394531, 253.37429809570312, 192.35337829589844, 189.760498046875], \"score\": 0.9999997615814209, \"association_id\": 4, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"Z\\\\i29md03oMEf^Oi0Ta0^O_^OOmNe0[b0Fm]O_O24HU1Ub0]Om]O]O4`1la0WOl]O^O4^1la0DP^O?ba0_N]^OQ3_a0?L4M3O00O10000O100001O1O1O0010OO2O1O1N2O1N3N1N2O1O1O1O1N10001O0O100O100O100O100M3M3L4J6003M4L1O1O001O3M2N1N101O000O100O1N2O100O1O001O000O101O000000000O10001000O100O1O101N1O1N2O1O2M2O100O10001O00000001O1O1O2N101N1O1O001O001O001O1O1O1O1O1O001N2N2M3M3M4L3F;N1N3N1N2Ok^OlLW`0c0n^Oh1j0hMX`0=R_Oi1f0jMY`0N__OW2;gMo`0W2Q1I8N002UNk\\\\Oa1_c0M4M3M4L4M3L4M3H:H7FPSU?\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [12.111346244812012, 451.96722412109375, 260.0657653808594, 69.46270751953125], \"score\": 0.9999996423721313, \"association_id\": 4, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"_^8<lc0Im\\\\O;Pc0Gn\\\\O:Rc0Fn\\\\O;Qc0En\\\\O<Rc0Dn\\\\O<Rc0Dn\\\\O<Rc0Dm\\\\O=Rc0Dn\\\\O<Rc0Dn\\\\O<Rc0Dn\\\\O<Rc0Dn\\\\O;Sc0Em\\\\O;Sc0Fl\\\\O:Tc0El\\\\O<Tc0Dl\\\\O<Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Sc0Em\\\\O;Rc0Fn\\\\O9Sc0Gl\\\\O:Tc0Fl\\\\O:Tc0Fl\\\\O9Uc0Gk\\\\O9Uc0Fk\\\\O;Uc0Ek\\\\O:Vc0Fj\\\\O:Vc0Fj\\\\O:Vc0Fj\\\\O:Vc0Fj\\\\O:Vc0Fi\\\\O:Xc0Ei\\\\O;Vc0Fj\\\\O:Vc0Fj\\\\O9Wc0Gh\\\\O:Xc0Fh\\\\O:Xc0Fh\\\\O:Wc0Gi\\\\O8Xc0Hh\\\\O8Xc0Gi\\\\O9Wc0Gh\\\\O9Yc0Gg\\\\O8Zc0Hf\\\\O7[c0Hf\\\\O8Zc0Hf\\\\O7[c0Ie\\\\O7[c0Ie\\\\O7[c0Ie\\\\O7[c0Ie\\\\O7[c0Id\\\\O7]c0Hd\\\\O8\\\\c0Hd\\\\O7]c0Ib\\\\O8^c0Hb\\\\O8^c0Hb\\\\O8^c0Hb\\\\O8^c0Hb\\\\O7_c0Ia\\\\O7_c0Ia\\\\O7_c0Ha\\\\O9_c0Ga\\\\O8`c0H`\\\\O8`c0H`\\\\O8`c0H`\\\\O7ac0H`\\\\O8`c0H_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0F_\\\\O;ac0E_\\\\O;ac0E_\\\\O;ac0E_\\\\O;ac0E_\\\\O:bc0F^\\\\O:ac0F`\\\\O:`c0F`\\\\O:`c0F`\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0G_\\\\O9ac0F_\\\\O;bc0D^\\\\O<bc0D^\\\\O;cc0E]\\\\O;cc0E\\\\\\\\O<dc0D\\\\\\\\O<dc0D\\\\\\\\O<Xd000O100000000O01000000000000O10O10000O0100000000000O1000000000000O010000000O01000000000000000O1000O10O1000O1000000000O10O1000O1000000O100000O10000O10O10000000O01000O10000000O10O1000O1000O1000O1000O100000000000O10O10000O1000O100000O1000000000O1000000000O0100000O10000000000000O100000000000000O101O0O10001O0O101N1NfWh?\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [0.0, 295.3447265625, 100.91675567626953, 107.45388793945312], \"score\": 0.9999979734420776, \"association_id\": 5, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"`P1a0ed05ZN]O^^Oh0]a0]O_^Og0_a0YO`^Oj0]a0XO_^Om0^a0UOi]O6Fm0]b0nNh]Ol1Vb0TNi]Oo1Ub0RNk]Oo1Rb0RNn]OP2Pb0PNP^OR2na0oMo]OS2Qb0mMn]OU2Qb0a00000001O00001N1O2O0O100O1O1O1O10O01O00001O00010O0100O0100O1000000O10000000000O10000O100O1O1000`]O^MZb0b2e]OiMQb0j2MYOS^OTNla0j1V^OfML;ma0o1W^OWNia0j1V^OVNja0k1U^OUNka0k1V^OTNka0k0]^OPO10ba0l0f^OmNL5_a0i0d_OVO^`0FQ^Oh0f1A_`0<f_OB[`03Q@Ko?5Q@JQ`04Q@KP`03R@LQ`01d2N2O00001Oeb^c0\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [7.330225467681885, 409.6004333496094, 106.6990966796875, 18.588134765625], \"score\": 0.9999971389770508, \"association_id\": 6, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"a\\\\63od0OU[O1jd0700000000000000O1000000000000000000O1000000000O10000000000000000O100000000000O010000000000000O100000000000000000O10O1000000000000000000O01000000000O100000000000O2O00002N001O1O1N101O00001NfkQc0\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [263.1099548339844, 227.42257690429688, 224.34689331054688, 258.5080261230469], \"score\": 0.9999964237213135, \"association_id\": 3, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"lQa5;:Lnc0m0I8H:A;H8J5K5L2N3M2M4J5J6E<]OcLT_Ob3f`0c0M3O1N2M3L4J6L4K5N2M3M4L3O1N2O1O1O1O1O001O1O1O100O100O00100O00001O01O0010O0100O010O010O000O100O11O01O00000O100O2O00O20O000O101O001O1O100O10O01O100O100000O1000000000000001O01O000000000O10000O2O0O1N2O1M3N2\\\\Od0O1O1O100000000O10000000O10000000000O100O100O2O0O1O1SMbAPO_>j0jAROV>m0kAROW>l0kASOV>j0mAUOU>g0nAXOT>d0oA[OR>?TB_Oo=;VBDk=5[BKg=0^BMg=M]B0g=L\\\\BOm=IWBOS>ITBHb>KeAIg>1\\\\3]Od[O2`d0Dn[O3ni21ejM4L3M20L5JmeT<\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [287.6131286621094, 453.6479797363281, 139.193359375, 37.52783203125], \"score\": 0.9999719858169556, \"association_id\": 3, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"W[Z62Ve0000O10000O1O100O100O100O1O1O100O100O01000O1000000000O10O100000000O010000000O100000O0100000000O010000000O01000000000O01000000000O10O100000000O0100000O100000O100000O100000O10O100000O010000000O10O10O100O100O010O1000O10O100000000000O100O1O2M2N5JUe\\\\<\"}}, {\"image_id\": 40, \"category_id\": 1, \"bbox\": [48.44716262817383, 282.23248291015625, 113.03213500976562, 142.0869140625], \"score\": 0.9999581575393677, \"association_id\": 6, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"P`[1e0ad0;U\\\\OQOjb0Q1R]OTOfb0T1T]OROib0R1P]OSOnb0d1N3C=L3M4MTOm]OgNQb0Y1R^OeNna0[1T^OdNka0[1Y^OcNga0]1Y^OcNfa0_1X^ObNha0^1W^OcNha0_1U^OcNja0]1V^OeNia0Z1X^OfNha0X1Z^OiNda0W1\\\\^OkNba0W1[^OkNda0^2O0O2N1N3M^N^^OD_a09f^OGXa09i^OGVa02f^O\\\\N5d1Sa0OT_O2k`0MV_O4j`0KW_O6g`0KY_O5g`0KZ_O4e`0M[_O3e`0L[_O5e`0K[_O5e`0J[_O7e`0IZ_O9e`0G[_O8f`0HX_O:h`0FU_O=k`0CR_O`0o`0@n^Oa0Sa0@g^Oe0Za0d11O1O0O2O1O001O001O0001O1O1O100OXOf^OTMN6\\\\a0\\\\2\\\\_OaMc`0l1e^OSNR11X`0k1i^OoMR16T`0j1V@WNi?e1[@\\\\Nc?a1b@^N]?`1g@^NZ?a1i@[NX?e1U20100O1O10O01O00002N2O2L3QOW\\\\O;Rd0\\\\OP\\\\Oa0_d0I7GdhPb0\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [137.92367553710938, 435.8825378417969, 125.09368896484375, 16.059112548828125], \"score\": 0.9900211691856384, \"association_id\": 0, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"R]m21We00000000O1000000O100000000O1000000000000O10000000O1000001O000O2O`c>0_\\\\A1O100O100N2O100O1O100000000O1000O10000000000000000000000000000O1000O1000000000000O1000000000O0100000000000000000000000O10000000000O10dmh?\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [7.231074810028076, 398.7140197753906, 77.74116516113281, 14.459625244140625], \"score\": 0.9652104377746582, \"association_id\": 5, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"bf57Qe00O2O01O00001O000000000000O10000O00100O10000000000000000000O1000000000000000000000000O10000000001O1O1O1O1O1Ochmc0\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [866.0518798828125, 442.4382019042969, 145.63775634765625, 165.12460327148438], \"score\": 0.9422325491905212, \"association_id\": 2, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"^ZPb07Pe02N1N2I7I7O1N3N1M3L4O1O1O1O1O1I7O1O1N2O1O1O11O0000000000O1O1O100N2M3C=VOj0K5N2N2L4M3N3M2O001N2N`Nj^OROUa0n0m^OQORa0P1n^OPOQa0R1o^OmNo`0U1R_OjNm`0W1U_OgNj`0Z1W_OeNh`0]1X_ObNg`0a1W_O_Ng`0c1Z_O\\\\Nf`0d1[_O[Nd`0g1\\\\_OYNb`0h1`_OVN_`0k1c_OSN[`0P2g_OmMV`0V2k_OiMS`0Z2n_OdMQ`0]2P@bMo?_2R@`Ml?b2V@\\\\Mh?f2Y@YMe?i2\\\\@VMd?j2]@TMc?n2]@QMb?P3`@nL_?T3b@jL]?W3e@gLZ?Z3h@dLX?\\\\3j@aLW?_3PAYLP?h3X11O0000O100000000001O0000UMe^Oi1\\\\a0UNl^Od1Ta0[NQ_Oa1o`0_NR_O`1n`0_NT_O`1l`0`NT_O`1l`0`NU_O`1j`0`NV_O`1j`0`NW_O_1i`0aNW_O_1h`0bNY_O]1h`0bNX_O^1h`0bNX_O^1i`0aNX_O]1j`0cNU_O]1k`0cNU_O]1k`0cNU_O]1k`0cNV_O[1k`0eNU_OZ1l`0fNT_OX1m`0iNS_OV1o`0iNR_OV1n`0jNR_OT1Qa0kNP_OS1Qa0mNP_OP1Sa0oNn^Oo0Sa0QOn^On0Ra0ROP_Ok0Qa0TOR_Oi0o`0WOS_Oe0Pa0ZOQ_Od0Pa0\\\\OQ_Ob0Qa0]Oo^Ob0Sa0]Oo^O?Ta0_Oo^O=Ua0Al^O=Ua0Cl^O;Va0Cl^O2hNB`b08n^ONaa0L[aa0\"}}, {\"image_id\": 40, \"category_id\": 2, \"bbox\": [796.1142578125, 549.261962890625, 146.83648681640625, 124.012451171875], \"score\": 0.5499617457389832, \"association_id\": 0, \"segmentation\": {\"size\": [680, 1024], \"counts\": \"fkm`02Te02N2N3N100N2O1O1O1O1O1O1N2O100O1O1O1O1O1O100N2O1O100O1N2O1O1O001N1O101O1O1O1O1O1O001O1L4N2O001N2K5L3O2O0O2L3L5N2O0O2M3K4N201O0O2L3O2O1O1M2N2O1O110000O010O100O2N1O100O100N3O0O101N1O2N1O2N2N3N3K3N2N2N2N1O2N1O2N100O100O101N2O1N2N2N2O2N5J4M1O1O0O2N3L3N2N3M4HjVe1\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [633.053955078125, 152.71617126464844, 207.33636474609375, 483.8509521484375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"R^`?2o`0h0[E^O[:Q1YEVOa6C\\\\G`1m1oN]6N^G[1o1iN[68^GW1Q2cN_6<XG[1Q2[Nf6>SGb1j1UNQ7>iEhNBP3Y3nM[7>eEgNBV3U3iMc7=cEfN@[3T3dMk7:^EnN[OZ3W3aMP87[EVOUOY3Y3^MX81XEf3d1QMT9YOUEm3Z1SMa9POREY4j0QMU:eNPEd49oLi:[NmDn4NmLU;WNhDT5IjL^;WNaDk8_;m110000O10000O10001N10000O10O101O0O1RNZD\\\\Gf;`8bD\\\\G^;c8eDZG\\\\;e8hDVGY;j8lDQGT;o8REjFo:W9VEcFi:^9\\\\E\\\\Fe:d9_EXF`:j9m11O1O100O1O100O1O1O1N2N2N2M3M3M3M3K5H8E;I7K5M3L4L4K5I7E;I7L4N2L4L4J6J6K5L4N2O1N2O1O1O1O1O101O00100O1O1O102N2O1O2OUB^Go;g8`CoGV<W8[CTHc<j9N1O3N4K5M2M1N2N0N3N2N2M2O1N2N2N2N2O1N1O1O1O1O001O00001O000001O000000001O001O2N2N3L3N2N1O1N1mJXDhMh;n1WE\\\\Mj:[2kEYMV:b2WFTMj9i2_FPMb9n2eFlL\\\\9R3hFkL[9Q3jFjLY9S3mFeLZ9U3nFaLZ9Z3VHRKS8j4g5L4M2N3M3M3L6J6H7J5K5K6H<YNj1ZNX\\\\_4\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [107.2240982055664, 188.15167236328125, 118.6124496459961, 396.28857421875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"iXP3=e26da08k]O=ga0Mk]Oc0ka0Bj]Ok0Pb0YOf]OT1Sb0TO]]Ob1Tb0k2\\\\O:EY1hN7J7I5K3M4K7J7G6H9Fb0\\\\Oa0_O=E:E9J5L4N2N1N3N1N2N2N2N2O1O100O100000000000000001O00000O101O2N3M3M2N001O1N2O2N7Ij0WMSCeHP=Q7ZCmHg<l6aCQIa<g6gCWI]<`6jC^IY<Y6oCeIS<V6RDhIP<R6VDlIk;m5]DQJc;j5dDTJ];h5gDWJZ;e5iDZJX;\\\\5SEcJn:R5\\\\EnJe:l4aESKa:f4eEYK]:[4nEcKV:R4UFkKl9n3]FnKf9m3_FQLb9k3cFSL^9n0oAR1g4oM\\\\9g0YBS1]4UN\\\\9c0`BQ1V4ZN^9?bBT1R4ZNf96]B^1Pe0N3M5K3L4M2M7H8Gejmb0\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [496.4146423339844, 241.48362731933594, 102.38894653320312, 308.1268310546875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"SXj;?cg07K4K4M3oYOZO^d0i0][OBZd0a0b[OISd0=f[O6ic01o[O>2fN[b0Q1]]Oe1^b0W2]^OcJQ`0l6L4N100O1O2N1O1N2N3N010O10O01000OO2M2M3O1N0J3013I7A>G:L4O2N2N3O02O1O1O3M6S@XIP>T7YA_I_>W8M1O1O1O1O000001O0O2O003L3M3M2N1L5K4K6B`0B>C<K4L4jNW1K4L6J6I5L4M4K5K5L4M2N3M2N2O2M4K3O1O1N2O1PNh[OO\\\\d0Io[O1Ud0HR\\\\O3Qd0IT\\\\O2Sd0_O\\\\\\\\O7gaQ:\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [256.0539245605469, 584.3531494140625, 88.58004760742188, 111.6563720703125], \"score\": 0.9999963045120239, \"association_id\": 0, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"ZRR6<ig03N2N4K3N1O=C8H2N8H7I1O3M4L1O2N1O1O0000001O1O2N1O4L3M2N3M2N1O1O2N1O1O1O1O001O001O00001O1O0000000000000001O0O10000O1000000O101O001O3L5K2N100O2O001N100N2O011O0ZN^ZOh0be0VOjZOXOFR1ae0AU[O<md0AZ[O:gd0Dc[O3]d0Lh[O0Yd0NP\\\\OFUd08o1Lf\\\\Q`0\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [366.692626953125, 651.3410034179688, 102.5732421875, 122.65899658203125], \"score\": 0.9999912977218628, \"association_id\": 0, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"mQf8m0Wg09H2O1O0O2N4L5K2N3L4M4L3M3M6K3L2O3M5K4K3N7H6J2O1N2O1N2O00001O000000000001O0000O100000000000000000000000000000000000000001O00001O1O001O001O0000001O00001O00000000000O2O0N2N2M4I6C=N2N2L4M4N1N3hNXZOLle01W1N2L4L4M5JTkR=\"}}, {\"image_id\": 41, \"category_id\": 2, \"bbox\": [534.5018920898438, 515.5713500976562, 159.772705078125, 23.45977783203125], \"score\": 0.9999563694000244, \"association_id\": 3, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"Xc^=5Qh01N2N010000O100000000O1000000000O1000O10000000000001O000O100000000O2O000O10000000000O101O00000O10000000000000000O10000000001O00000O100000000O100000000O100000000O1000000O10000000O10000000000O10000O10000O1N2N2L4O3MSaV8\"}}, {\"image_id\": 41, \"category_id\": 2, \"bbox\": [752.76904296875, 570.141845703125, 260.42388916015625, 46.94989013671875], \"score\": 0.9999254941940308, \"association_id\": 1, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"ngia0:kg02N2N10000000000000000001O00001O001N2O1O1N2O001O1NS_m09Z`RO=M1N3O0000O010O100O10000000O0100000000O1000O1000O100000O10O1000O1000O100000O1000O100000O1000000O01000000000O100000O10000000O100000000O1000000000000000000000000O100000000000000000O10000000000000000000000000000O10000000000001O00000000000000000000000000000000001O00000000000000000000000000000000O1000000000O100000O1000000O10000000000O1000000O10000000000000000O1000000000000O1000001N10001N2NUh7\"}}, {\"image_id\": 41, \"category_id\": 2, \"bbox\": [214.895263671875, 550.6364135742188, 287.42706298828125, 36.96527099609375], \"score\": 0.9997685551643372, \"association_id\": 2, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"\\\\jS54Qh02O0O1000000O100000000O10000000000O10O1000000000O10O100000000000000O10O100000000000000000000000000000000O100000000000000000000000O100000000000000000O1000000000000O01000000000000000O01000000O1000000O10O10000000000000O10000000O10O10000000000000O10000000O100000000000O010000000000O1000000O10YXOL\\\\g04cXOO[g00fXO1Yg0OgXO1Yg0OgXO1Xg00hXO0Xg00hXO0Xg00hXO0Xg01gXOOXg02hXONXg02hXOOWg01iXOOWg01iXOOWg01iXOOWg01iXOOVg02jXOMWg03iXOMWg04jXOJVg06<101O00000O101O000000000000001O0O10000000000000000000O1000000000000000000O10000000000O1O1000000O100000000O1000000000000O100000000000000000000O1000000000000000000000000000000000000000000000000000000O10000000000000000000000O1000000O101O00001N10o`[<\"}}, {\"image_id\": 41, \"category_id\": 2, \"bbox\": [32.002960205078125, 449.3648681640625, 95.0685806274414, 9.24884033203125], \"score\": 0.9351516962051392, \"association_id\": 0, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"Tdh03Rh02O000000000000000000000O101O00000000000000000000O10001O001O1O0O101O0000000000000000O1O10000O100000000000000000000000000000000000000001O000O101OUVfe0\"}}, {\"image_id\": 41, \"category_id\": 2, \"bbox\": [91.06981658935547, 464.8844299316406, 45.22858428955078, 9.192840576171875], \"score\": 0.8653748631477356, \"association_id\": 0, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"bPY23Sh01O1O00001O00001O00000000000000000000000000000O100000000000000000000000O101N2Nngnd0\"}}, {\"image_id\": 41, \"category_id\": 2, \"bbox\": [66.64583587646484, 450.8953857421875, 54.557640075683594, 7.443328857421875], \"score\": 0.09303886443376541, \"association_id\": 0, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"XTi11Th01O10000O100000000000000000000000000000000001O00000000001O0OP^ee0\"}}, {\"image_id\": 41, \"category_id\": 1, \"bbox\": [57.39073944091797, 382.89801025390625, 17.702354431152344, 69.34039306640625], \"score\": 0.08524683862924576, \"association_id\": 0, \"segmentation\": {\"size\": [774, 1024], \"counts\": \"Q`^1:<Hjf0Z1mXOlNYf0c1N20O0000001O2N8H?A8TORYO0dg0Mok\\\\f0\"}}, {\"image_id\": 42, \"category_id\": 1, \"bbox\": [290.4698486328125, 135.2745361328125, 88.58697509765625, 52.325836181640625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [246, 478], \"counts\": \"SRV2c0R72N1O100O001O1O0N2O1O2L5N1O1N2O101N1O2O000O2O1O001O1O00001O000000O100O100O1000000O10000001O00O1000000000000000000000000O100000000001O00010O00100O10O2O0O1O3M3M1O1N6K1N2NX[h0\"}}, {\"image_id\": 42, \"category_id\": 1, \"bbox\": [77.8326644897461, 115.2461929321289, 52.742576599121094, 31.094154357910156], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [246, 478], \"counts\": \"U[c08U7JnH<o67N1000001N100O1O100O100O1000000O100000000O100000O1000000000000000001O00001O001O1O1O1O2M2O2N2M6IU_c2\"}}, {\"image_id\": 42, \"category_id\": 1, \"bbox\": [174.48977661132812, 133.13331604003906, 65.737060546875, 45.505889892578125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [246, 478], \"counts\": \"]mZ14a73M3L4BC\\\\If0[6ZO^I11l0_6WOaIi0_67O1O1L4O100O10O1N2O1O1N20000O100000000000000000000001O001O000000000000001O000010O02N010000O4L1O3M1O2N3M4K8G3McPi1\"}}, {\"image_id\": 42, \"category_id\": 2, \"bbox\": [293.5125427246094, 178.85877990722656, 88.82330322265625, 10.790283203125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [246, 478], \"counts\": \"ahW21d71000001O0000001O0000001O0001O1O100O00V^1OjaN1N3N1O10000O100001O01O0000000000000000000000000000000000000001O0000001O001O1O00001O000000000000O100000000001O000OWlf0\"}}, {\"image_id\": 42, \"category_id\": 2, \"bbox\": [171.71438598632812, 170.1929168701172, 71.39151000976562, 11.169052124023438], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [246, 478], \"counts\": \"e_Y13c70000001O0001O1OYU2OhjM1N100N2O1O1O101N10000O1000000001O0000001O00001O000000001O000000001O1O1O001O001O0000000000O100000000000OQYh1\"}}, {\"image_id\": 42, \"category_id\": 1, \"bbox\": [125.05729675292969, 127.49144744873047, 69.86616516113281, 42.297035217285156], \"score\": 0.9999992847442627, \"association_id\": 0, \"segmentation\": {\"size\": [246, 478], \"counts\": \"ddn01c7c0^O4M2N1O000O10000O10O01O1O001O1000O0011O000000001O1O001O1O1O1O1O3M00001O00DfI\\\\OZ6d0gI[OY66gIAO1334OS69SJGJ0S68TJHJNS6:TJGCN20X6:TJI@Oa68oIGW6:iIEX68d000O1N2O1O10000O2O0O^XV2\"}}, {\"image_id\": 42, \"category_id\": 1, \"bbox\": [153.67437744140625, 133.2769775390625, 39.6536865234375, 41.47383117675781], \"score\": 0.7740271091461182, \"association_id\": 0, \"segmentation\": {\"size\": [246, 478], \"counts\": \"nZV11e71kH2BMk63UINO>^6AXJ?g5AXJ`0h5@WJa0h5BfI08`0Q6_OiIN8b0Q6^OUJc0k5]OUJc0k5]OUJb0l5^OTJ>P6CnI:@Fb62mI3ALf61gI:X6FhI;W6EiI:Y6DfI`0X6@hIa0W6^OjIc0d60001O3K3N1M6IooU2\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [741.4671020507812, 225.11097717285156, 306.26446533203125, 410.6658935546875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"^Pgc01dj01N100O2N1L5G9N2M5L<D<D3N2M3N3L4dYOoMnc0T2i[OTNTd0P2`[OZN^d0i1^[OZN`d0h1^[OZNad0h1\\\\[OZNbd0h1[[O[Ncd0g1\\\\[OYNcd0j1Z[OWNed0k1Y[OVNfd0P2S[ORNld0U2mZOlMRe0l3O1N2O1O1O1N2N1O2N1O1N2O2M2O100000000010O01O0cLnJXAS5f>QKWAo4h>UKUAl4j>VKUAi4k>YKSAh4l>ZKRAf4n>\\\\Ko@f4o>^Km@c4S?`Ki@a4U?dKg@^4W?fKf@Z4Z?iKb@X4]?kK`@V4`?mK]@S4c?oKZ@R4g?nKX@Q4i?QLT@P4m?RLP@m3Q`0ULm_Oj3T`0YLh_Oh3Y`0[Lb_Oe3_`0_L\\\\_Oa3e`0bLW_O]3k`0fLP_OZ3Ra0iLe^O[3]a0hL\\\\^O[3ea0h22N2O2O1N101O0O2O1O001O1O1N2O100O1O100O100O100000000000000000000000001O001O1O001O1O1O1O1O10000O10000001O00000000O2O1N3N2M3M3N1N100O1O1000000001000O001O00O100O1O101N1O3M101N1O1O1O1O1O1O2O2M3M2N2N1O1O2N100O3M5K6J3N1N2N1O10O10O101O1O6J9G2N1O1O0O1O2O1N:F7I100O1O1ORLS@UMm?f2\\\\@UMd?h2d@SM]?h2i@VMX?f2n@WMS?d2TAYMm>h0Y@ZMS1j1e>4oCHS<MYD1h;I]D6d;G`D7a;GaD8`;EcD:];FdD9];FdD9\\\\;GfD7[;HgD6Y;JlD1U;MPEOP;1UEJl:5VEIj:7WEGk:8WEFj:9WEFj:9WEFj::VEEk:;UEDm:;TEBn:?SE^On:b0`EoNa:R1aEiNa:W1aEfN`:[1`EcNa:]1_EaNb:`1_E^Nb:b1^E\\\\Nc:e1^EWNe:i1T71O010O10O0000000O1O1O1O2N101N3M1O2N1O1N3M3M3M?[OTWc6\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [1044.1353759765625, 115.17070007324219, 228.0882568359375, 492.754150390625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"[d[k0<Pj0`0F9H7I4M3M2M3N3L4L3N3L5L3N1N3N1O1N2O1O1N2O1O1O1O1O2N2N2N1O2S\\\\OmLT`0U3g_OoLX`0S3d_OPM[`0S3__ORM_`0R3Z_ORMe`0Q3T_OTMk`0o2o^OUMPa0m2l^OUMTa0l2i^OWMVa0l2e^OVM[a0m2`^OUM`a0Q3X^OQMga0`3f]OcLZb0m3e\\\\OdL[c0[500000000000O100000000010O1O0^_OQJ^<P6TChJa<Y5YCRKa<o4\\\\CUKb<m4ZCWKe<j4WCYKj<h4QC[Ko<i4hB]KX=g4UBgKm=a80010O000002N9G=C6K1N10N001O01O0001O0O11O0102N5J6KT1kN00O01O100O1O0010O01O0000000001hKaBXK_=d4jBVKW=h4mBVKS=h4PCWKQ=h4PCWKQ=h4PCXKQ=f4QCXKQ=e4QCZKQ=d4PC[KS=b4nB]KT=a4nB]KS=b4nB]KR=c4QCZKP=f4RCWKn<i4UCTKl<l4WCPKi<P5[ClJf<T5]ChJc<X5`CeJa<Z5aCcJa<\\\\5aCbJ`<^5bC_J`<_5cC]Ja<`5gCXJ\\\\<e5lCRJW<l5oCnIS<P6QDkIQ<U6PDiIQ<V6QDgIR<X6oCfIR<Z6PDbIS<^6PD]IS<b6WDQIm;n6n3000O0O100O010O010O10O20001O1000OO010O1N2N5Ka0_O9G6J3M2O2M3N4Lc0]O5K3`MWZOMke0G`ZO8ae0CdZO=\\\\e0]OjZOc0Ye0ROoZOm0Xe0`NU[O_1Tg0M3M1N2O00O01O00001O001O001O1O2O1N3M2N1O2N010O0010O00010O10O1O001O2N1O2O1N2M4J7HXP7\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [143.41371154785156, 224.53570556640625, 217.23524475097656, 522.1143188476562], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"cVn3j0`i0a0G7\\\\]OdNd;b1VDdNf;_1VDeNg;_1SDgNi;^1PDhNk;`1jChNR<_1eCgNW<_1bCeNZ<`1aCcN]<`1`CcN\\\\<a1aC`N\\\\<e1bC\\\\N[<i1bCWN[<Q2`CoM^<Y2\\\\ChMa<^2[CbMc<c2ZC]Md<g2YC[Md<i2ZCWMb<Q3ZCoL`<[3\\\\CfL[<f3aCZLX<R4cCoKW<Z4eCgKV<_4hCaKT<e4jC\\\\KR<h4mCYKo;l4oCTKm;Q5RDPKj;U5TDlJg;Y5XDgJe;^5YDcJc;b5[D_J`;g5^DZJ^;k5`DUJ];P6aDQJ[;T6cDmIV;Z6hDgIi:i6VEXIb:P7]EPI\\\\:X7cEiHY:Z7fEfHY:]7eEcHZ:a7bE`H^:b7_E_H`:e7[E\\\\Hf:h7UEXHl:l7oDUHQ;n7lDRHS;Q8kDoGT;T8kDkGT;X8jDhGV;Y8iDgGW;[8hDdGW;_8gDaGY;`8gD_GX;d8fD\\\\GZ;f8dDYG\\\\;k8`DTGa;Q9ZDmFi;k;4N1O2N2N2O2N10000O10001O0O2M2M4L4M2O2O0O2N2N1N3O1N2O0011O100O1O1O2N1O2N3M3L5L5K5K4L3M2O2M4aDlAU:m?^OYHRFgNl9U1\\\\FiNc9T1bFjN^9S1gFkNX9S1lFkNU9Q1QGlNo8Q1WGlNj8o0\\\\GoNe8m0`GQOa8l0bGSO_8k0cGTO^8i0eGVO]8h0eGVO]8f0gGXO[8e0iGXOY8e0lGXOU8d0oGZOT8`0QH^OQ8?QH@Q8_MmD]OU3S3Q8_MhD^OX3R3R8_MdD_O\\\\3Q3S85nGJV83kGLX80kGNX8bMaDkNX3b3Z8]MbDoNW3b3[8ZMaDROV3b3]8WM^DWOX3^3`8UMZD\\\\OY3\\\\3c8QMVDB[3Y3e8nLQDI^3UOlMm0i:[OdDb0n5Nd5VOdDk0k5Ll5hN_D\\\\1g5JT85mGJT85nGIS86oGHQ7dNWBb1k6Gm6iNYB_1l6Dl6mNZB]1m6Ch6RO\\\\BY1n6Cf6TO^BW1n6CR8;oGDT89mGFV87kGG[84gGJ]83cGKa82aGLb81aGLa82dGH^86eGH]85eGJ\\\\83gGL[80hGOZ8LjG3]a0O001N2O1O001O^fc00aY\\\\O20NdXVh0\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [474.8777160644531, 172.70849609375, 171.14590454101562, 494.46807861328125], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"lRa<j0ei0:F9H8F:D=D:F:J6J5K6I6J8J5K6K4m^OjK`;[4VDnKa;]4TDlKa;a4WDdKd;d4SDcKi;d4oCaKm;e4nC]KP<g4jC]KS<j4bC^KW<n4UC`Kg<k4^BiK_=`4cAXLZ>o3WA\\\\Lf>k7M3M3O2M2O1O1O1N2N2M3M3N2M3^NTDWDP<d;dDiC`;R<d1M3N2O2M2O1O1O1O1000000O10000O1O1O1O1N2O10000000iL]E\\\\Fc:V9QFdFo9Z9XFaFg9]9aF^F^9`9jF[FV9a9SGZFl8b9_GXFa8e9eGXF[8f9iGXFX8d9lG[FV8`9oG]FT8X9VHgFl7S8YIlGh6f7gIXHZ6d7jI[HV6b7oI[HR6b7SJ\\\\Hm5`7ZJ]Hg5]7aJ_H`5\\\\7gJbHY5[7lJbHU5[7oJdHQ5Y7SKeHo4h2XDP1X7ULa4b2QE8o6TMQ4]2[E2n6_Mh3\\\\2`EMnb01W]OImb05X]ODkb0;Z]O_Ohb0`0]]OZOeb0e0_]OWObb0h0a]OTObb0i0b]OUO^b0i0g]OTOZb0h0P^OROPb0j0Y^OPOga0m0_^OPOba0m0c^OPO_a0m0h^OnNZa0l0m^OoN[a0g0j^OWO^a09k^OFSf0O1O00O2O1O1O1O0010N3MVk0NhTO7N1O1O2O0O1O2N2N2N4L2O0O1O2OO1O2O1N2O1N01O001O1O001000O1O1O10O01N3K4K7Ielc`0\"}}, {\"image_id\": 43, \"category_id\": 1, \"bbox\": [729.0110473632812, 597.9151000976562, 75.44384765625, 70.4425048828125], \"score\": 0.9999940395355225, \"association_id\": 4, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"mkob0m0fi06I4J6N2M4L3O0O2N1O2O0O2O0O100O100O100O2O000O1000000O1000O10O100000000000000000000000000000000001O001O001O001O1O1O1O001O002N1O1N2O1N2O2M2O1N2N2N3L4Lol[<\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [882.9365234375, 682.5585327148438, 148.224609375, 18.41595458984375], \"score\": 0.9999145269393921, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"QgWg01dj01N1O2O000O101O0000000O2O0000000000000000000O1000000000000000000O10000000000000000000000O100000000000000000O1000000000000000000000000000000000000000O10001O000000000000000O100000O10000000000000O1000O100000000000000O1000000000000000000000000O101O0000000000001N1000000O101M3NY]`6\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [270.1238708496094, 694.8565673828125, 645.3291015625, 31.96630859375], \"score\": 0.9998451471328735, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"`hg81cj010000000000000000DOSVO2mi0ORVO1ni0ORVO1ni0ORVO1mi01RVOOni01RVOOni02QVONoi02RVOMni03RVOMmi04SVOLmi05RVOJoi06800000000000O1000000000000000000000000O1000001O000000000000000O100000000000000000000000000000001O0000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000O10000000000000000000O100000000000000000000000O100000000000000000000000O1000000000000000000000000000O100000000000O1000000000000000000000000O100000000000000000000000000000000O1000000000O1000000000000000O100000000000000000000000000000000000000000000O1000000000000000000000000O10000000000000000000000O100000000000000000000000000000000000000000000000000000O010000000000000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000O100000000000000000000000000O100000000O10000000000000000O1000000000000000000O10000000000000000O10000000000000000O10000000000O10000O10000000000O1000000O100000000O100000000O100000000O100000000000000O100000000000001O00oee:\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [768.205078125, 612.3792114257812, 499.1395263671875, 47.3629150390625], \"score\": 0.9997648596763611, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"akPe01aj00`UO1`j03N10002L200O1000000O1000000O10000O100000000000000000O10000000001N1000000000000000000000O1000000000000000000000000O100000O1000000000000000000000000000O1000000000000000000000O10000000000000000000O100000O1000000000000000O1000000000000000000O1000000000000O100000000000000001O000000000000000000001O000O101N101N4M^_P1N``oNNcUO6Zj05N00001N2O0O100O10000000000000000O100000000000000000000000O100000fVO@_h0`0_WOBah0>^WOCbh0=^WOCbh0=^WOCbh0=^WOCbh0<^WOEbh0;^WOEbh0;\\\\WOA\\\\O4Xi0;\\\\WOGdh09[WOHdh09\\\\WOGdh09\\\\WOGdh09[WOHeh08[WOHeh08[WOHeh08ZWOIeh08[WOHeh07\\\\WOIdh07[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06[WOJeh06ZWOKfh05ZWOKfh05ZWOKfh04[WOLeh04[WOLeh04ZWOMfh03ZWOMfh03ZWOMfh03ZWOMfh03ZWOMfh03ZWOMfh03YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWOOfh01ZWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWONgh02YWOMhh02YWONgh02YWONgh02YWONgh02YWONgh01ZWOOfh01YWO0gh00YWO0gh00YWO0gh00YWO0gh0OZWO1fh0OZWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh00YWO0gh0OZWO1fh0OYWO1hh0OXWO1hh0OXWO1hh0NYWO2gh0NYWO2hh0MXWO3hh0MXWO3hh0MWWO4ih0KXWO5hh0KXWO5hh0KXWO4ih0LWWO4jh0KVWO5jh0KVWO5jh0KVWO5jh0KUWO6kh0JUWO6kh0JUWO6kh0IVWO7jh0IVWO7jh0IVWO7kh0HUWO8kh0HUWO8kh0HUWO7lh0ITWO7lh0HUWO8kh0HUWO8kh0HUWO8lh0GSWO:mh0FSWO:nh0ERWO;di0O10000000000000000O100000O10000000O100000000000O10000000O1000000000000000O1000000000O10000000O10000000000000000000000000O1000000000000000000000000O100O2O0O3Nlk;\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [792.6250610351562, 658.4698486328125, 101.47406005859375, 8.68206787109375], \"score\": 0.9961963891983032, \"association_id\": 4, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"kQfd01dj0000O1000000O101N1000000O1000000000000O100000000000O100000000000O1000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000O100000000O2NclZ:\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [281.7189025878906, 818.718994140625, 278.4450378417969, 25.48907470703125], \"score\": 0.9957563281059265, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Sg[7:[j01N100000000O1000000000000000000000000000000000000O1000000000000000000000000000000O10000000000000000000000000000000000000000O1001N10000000000000000O1000000O100000000000000000O1000000O10000000000O10000000O100000O10000000000000000O100000O10000000O100000O10000000000000000O1000O10000000000O10000000O1000O10000000000000O100000O1000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000O100000000000000000000000000O10000000000000000O100000000000000O1000000000000O1000000O1000000000000O10000O100O100000001N1O100OUdmb0\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [952.3795776367188, 627.186279296875, 319.83465576171875, 36.01025390625], \"score\": 0.9913272261619568, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"TY]j01dj00000001N10000O101O000]VOLeh04YWOLVO0ai04XWOOhh01XWOOhh01XWOOhh01XWOOhh01XWOOhh01WWO0ih00WWO0ih00WWO0ih00mVOLJ4Yi00mVOLJ4Yi00mVOLJ4Yi00mVOLJ4Yi00mVOLJ4Yi00mVOMI3Zi00mVOMI3Zi00mVONH2[i00lVOOI1[i00lVOOI1[i00lVOOI1Zi0OoVO0G1Zi0OoVO0G1Zi0OoVO0G1Zi0OoVO0G1Zi0OoVO1F0[i0OoVO1F0[i0OoVO1F0[i0OnVO2GO[i0OnVO3FN]i0NmVO4FN]i0MnVO6DM^i0MnVO6DM^i0MnVO6DM^i0MnVO7BM`i0LnVO7BM`i0LnVO?Ri0AmVO`0Si0@mVO`0Si0@mVO`0Si0@mVO`0Si0@mVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0AnVO?Ri0AnVO?Ri0AnVO?Ri0AnVO?Si0@mVO`0Si0@mVO`0Si0@mVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AlVO?Ti0AmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO>Si0BmVO=Ti0ClVO=Ti0ClVO=Ti0ClVO=Ti0ClVO=Ti0ClVO=Ti0ClVO=Ti0CkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>Ui0BkVO>ei010000O10000O10000000000O1000000000000O100000000000O100000000O0100000000000O1000000O1000000000000000O100000O1000000000000000000O1000O100000000000000000000000O1000000000O1000O10000000000000000000000000000O010000000000000O10000000O1000O1000000O100000000O100000000O100000000O10001N1O2N]Q6\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [1171.0985107421875, 594.7155151367188, 98.0767822265625, 10.1068115234375], \"score\": 0.9896023869514465, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"PaQo02bj0100O100O101O000000000000000000000000000000000000000000O1000000000000000000000000000000000O100000O10000000000O01000000000000000000000000000000001O0O10X]9\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [530.1456298828125, 630.4697265625, 181.381103515625, 22.98138427734375], \"score\": 0.962568998336792, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Zcj>4aj01N101O00000000000000O1000000000000000001O0O10000000001O000O100000000O11N100000000O100000000000000O1000000000000000000000000000000O10000000000000000O100000000000000000000000000000000000001N10000000000mUOHii08VVOIji07UVOJli05SVOMli03SVONmi02SVONmi01TVOOli01SVO0mi00SVO0mi00SVO0mi00RVO1ni0ORVO1ni0ORVO1ni0ORVO1ni0ORVO1oi0NQVO2oi0MRVO3oi0LPVO5Pj0KPVO5Pj0KPVO5Pj0JQVO6Pj0IPVO7Pj0IQVO6Pj0IPVO6Xj000000000000001N1000001N1Oa`m>\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [279.9679870605469, 548.6541748046875, 495.3529357910156, 54.07916259765625], \"score\": 0.895256519317627, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"_i^71cj03N1O000O2O000O10000O1000001O0000000O100000000000000000O1000000000000000000000000000000000000000000000O100000000000000000O100000000000000000000000000O100000000000000000000O1000001O0000O1000000000O10000000000000000000000O10000000000000000000000000000000000000O1000000000000000000000000000000O1000000000000O10000000O100000O1000000000000000000000000O100000000000000000000O10001O00000000000O100000000000000000000000001O00000000000O1001O1N101O0O2N2NmdT1LX[kN0MObUO1^j030001O00000O1000001O000O10000000000000000000000O100000000000000000000000000O100000000000000O1000000000000000000000O100000000000O100000000000O10000000000000000O100000O100000000000O101O00000000000000000O1000001O000000O10O10000000000000000000000000000O10000000000O101O0000000000000O10O1O1000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000001O000000001O00enc=\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [279.3287353515625, 535.0271606445312, 448.52008056640625, 107.785888671875], \"score\": 0.356667160987854, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"^ic72aj02O2O00001O0O10001O000000000000000O10001O000000000000000000O1000000000000O1000000000000000000000000000O10O100000000000000000000000001O0O10000000000O10000000000O101O000000000O1000000000001O0O100000\\\\oW11cPhN0000001O0O10001O000O10001N1000000O100000001O00000000O10000000000000O10O10000000000001O000000001O00000O101O0000000001N101N101MWjX1KoUgN4L2O0O2O000O100000000001N100000000O100000000O100000O2O00000000000O10000000000000000O10000000000O10001O000O1000000000000O10O10000000000000O10000000O100000000O10000000000O10000000000O10000O101O0O10000O2O000000000O1000000O1000000O10O10000000000001O000O100000000000000000Ze10fZN0000000000000000000O10000000000000000000000000000000000000000dSa>\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [471.69488525390625, 534.0511474609375, 473.26318359375, 31.77581787109375], \"score\": 0.3508407473564148, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Zf_>2bj02O1N1000000O2O00000000O10000000000000000000000000O100000000000000000000O1000001O000000000000000O100000000000000O100000000000O1000000000000000000000000000000O010000000000000O1000000000000000000000000O100000000000000000000000O10000000000000O1000000000000000000000O100000000000O10000000000000000000000000O0100000000000000000000O100000000000000000000000000000000000000000O1000001O00000000000000000000O2O00001O001N3NjdY1NV[fN2O1O2N1000000000O01000000000000O100000000000000000000000000000000001O0O100000O10000000000000000O10000O1000000000000000000000000000000000000000000O10000000000000000000000000000000000000O101O000000000O10000O3N0O2NX[[9\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [526.9522705078125, 544.7389526367188, 303.93060302734375, 20.5228271484375], \"score\": 0.23527657985687256, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"[Vb>1dj00O10000O2O00000000000O10000000000000000O100000001O00000000000O010000000000000000000O1000000000001O00000O10000000000000000O10000000O0100000000000000000000O10000000O100000000000000000O100001N100000000000000O100000000000000000O1001O000000000000000000000000O100000000000000000000O100000000000000O1000000000O1000000000000000000000000000000000001N100000000000000O100000000000000O1000000000000000000O10000O100O3N00^Y]=\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [801.0584106445312, 629.0403442382812, 86.096923828125, 26.68115234375], \"score\": 0.19178295135498047, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"QfQe08\\\\j02O1O1N10000000001O0000000O2O000000000O10000000000000000000O100000000O100000000000000000000000000000000000000000000O1000000O1000000O100000000O1000000O10002MWhV:\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [827.0778198242188, 657.555419921875, 54.7708740234375, 8.5509033203125], \"score\": 0.1416093409061432, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"[aae05`j00O1000000000001O0O10000000000000000000000000000000O10000000000000000000000000000000000000000O101O0O2Nb\\\\]:\"}}, {\"image_id\": 43, \"category_id\": 2, \"bbox\": [510.751708984375, 597.9840698242188, 769.248291015625, 61.92852783203125], \"score\": 0.051258634775877, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Tmga01dj01N101N100000000000000001O001OohT30PWkL2N101N10000O101O00000000000000001O000000000000000000000000001O0O1000000000000000O1000000000O1000000000000000000000O1000000000000000000000000000O01000000000000000000000000000000000000000000000O1000000000000000000000000000000O100000000000000001O000O1000001O001O0O2N3M3M`T\\\\11^kcN6K2N1O101O0O2O00001O000O100000000000O1000000000000000000000O1000000cVOAdh0?ZWOEdh0;\\\\WOFch0:\\\\WOGdh09\\\\WOGdh09\\\\WOGdh09\\\\WOGdh09\\\\WOGdh09\\\\WOGdh09[WOHeh08[WOHeh08[WOHdh08]WOHch08]WOHch08\\\\WOIdh07\\\\WOIdh07\\\\WOIdh07\\\\WOIdh07\\\\WOIdh07\\\\WOIdh07\\\\WOIdh06\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05\\\\WOKdh05[WOLeh04[WOLeh04[WOLeh03\\\\WOMch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04\\\\WOMdh03\\\\WOMdh03\\\\WOMdh03\\\\WOMch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch04]WOLch03^WOMbh03^WOMbh03^WOMbh03^WOMbh03^WOMbh03^WOMbh03^WOMbh03^WOMbh03^WOMbh03^WOMbh03]WOMdh03\\\\WOMdh03\\\\WOMdh03\\\\WOMdh03\\\\WOMdh03\\\\WOLeh04[WOLeh04[WOLeh04[WOLeh04[WOLeh04[WOLeh04[WOKfh05ZWOKfh05ZWOKfh05ZWOKfh05ZWOKfh05ZWOKfh05ZWOKfh05ZWOKfh05ZWOKfh04[WOLeh04[WOLfh03ZWOMfh03ZWOLgh04YWOLgh04YWOLgh04YWOLgh04YWOLgh04YWOLgh04YWOLhh03XWOMhh02YWONgh02YWONgh02YWOMhh03WWONih01XWOOhh01XWOOih00WWO0ih00WWO0ih0OXWO1hh0OXWO0ih00WWO0ih00WWO0ih00WWO0ih00WWO0ih00WWO0jh0OVWO1jh0OVWO1jh0OVWO0kh00UWO0kh0OVWO1jh0OVWO1jh0OVWO1jh0OVWO1jh0OVWO1jh0OUWO2kh0NUWO2kh0NUWO1lh0OTWO1mh0NSWO2mh0NSWO2mh0MTWO3lh0MSWO4mh0LSWO4mh0LSWO4mh0LSWO4mh0LSWO3nh0LSWO4mh0LRWO5nh0KRWO5nh0KRWO5nh0KRWO5nh0KRWO5nh0KRWO5nh0JSWO6mh0JSWO6mh0JSWO5nh0KRWO5oh0JPWO7Pi0IPWO7Pi0IPWO7Pi0HQWO8fi0000000O100000000000O100000O10000000000000000000O100000000000000000000000O10000000000000O100000000000000000000000O1000000000000O100000000000000O1000000000001N10001NeV5\"}}, {\"image_id\": 44, \"category_id\": 1, \"bbox\": [256.8251647949219, 192.1514892578125, 36.66656494140625, 74.15780639648438], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"fTi39c>6L3L4M4M2jBVOS<m0iCVOU<Q1WClN27f<h1N200O1O001000B\\\\C`Nc<c1ZC_Ne<n1O2O0001O0000000O2N1O1M3L4YOUCmNX=o0?N3L8Gf^R5\"}}, {\"image_id\": 44, \"category_id\": 1, \"bbox\": [356.5623474121094, 146.1116943359375, 81.16250610351562, 145.97732543945312], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"a`W5a0[>7I8J5L3L5K4N2M4L3M2N3M3N1N2O1M4L3N2N2N3L3O1O2M2O1O1O2N2N1O2N2N1O1O3M2N101N2O010O1N1O2N2O0O1O2O002N2N5J3M0010O00001O2N1O1O002N2N001O1O2N3M2N2N3_NnDnNU;l0REPOZ;a0kD]Of;0aDKb;OcDNa;MeDO\\\\=IV[o2\"}}, {\"image_id\": 44, \"category_id\": 2, \"bbox\": [57.881492614746094, 279.2305908203125, 359.1332702636719, 81.1993408203125], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"Sh^12n>0O100O2O0O100O1O101N100O100O2O0O100O1O001O1I7I7K5N2O100O100O1N101O1O1O1O1O100O01000000000O010000O1000O10O100000O01000O10O10O1000000O0100000000O01000O1000O01000000O0100000O010000O1000O10O100000O100000O0100000000000O01000000O01000O10O100000000O010000O01000000O0100000000000O01000000O10O1000O10000000O1000O10O10000000O2O001O1O001O000000001O00000000000O100[OYB@1<e=4`BL`=3bBL^=4bBL^=3cBM]=3dBL\\\\=3eBM[=3eBM[=2fBNZ=2fBM[=2fBNZ=1gBOY=OiB0X=OiB1W=OiB1V=OkB1U=OkB0V=0jB0V=OkB1U=OkB1U=OkB1U=NlB1U=OkB1U=NlB2T=NlB1U=OkB1U=OkB1U=NlB2T=NlB1U=OkB1U=OkB1T=OmB1T=NlB2T=NlB2T=NlB1U=NkB3U=MkB2V=NjB1Y=LhB3S>O1000O100O10O10000000O1000O1000O0100000O10O10000O1O1000000OWO0dB0\\\\=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1cBO]=1k0OO000eY31ZfL000000000000000000O10000000[Si3\"}}, {\"image_id\": 44, \"category_id\": 2, \"bbox\": [162.04331970214844, 267.5562744140625, 70.07676696777344, 11.924713134765625], \"score\": 0.9998887777328491, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"bV\\\\23l>2N10000O10000000O1000000000O10000000O10O1000000000000000000001O0O10O100000000000000O1000000000O100000000O100000001N1000000O100000000000c^o5\"}}, {\"image_id\": 44, \"category_id\": 1, \"bbox\": [0.0, 226.15525817871094, 142.0872344970703, 204.11830139160156], \"score\": 0.9901199340820312, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"R9X3`:kLYFm3X9YLWFLNQ4g9VLUFX4k9=0001O1001N101O1O1O1O1O2O0O1O0O2N101N2O001N200O1O001N1O2O001O2M2N2N2O000L4L5N101N2I7K5M3O0O2N2N2N101N2N2K5M3N2O1N2N2N2O100O010O100001O0000001O1O2N2N2N1O2Nm0SO3M2N2N1O2N001O1O1O1O2N1O00001O0000000000000000O100O1O10000000000O1O1N2O1N2O1N2O1M3M3N2M3K5F:M3N2N2J6K5L4N2O1M3N2O1O2M4LmVY7\"}}, {\"image_id\": 45, \"category_id\": 1, \"bbox\": [434.66180419921875, 282.0588073730469, 147.50323486328125, 286.9382019042969], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"gcV9:Vd0n0A`0SO^N\\\\]OR2Tb0k0F9M4K4L5G9H8L3M3M3L5K6K5L2N2L5K7I5M2M3L5J8I6K4L4M4L3M3N2M2M2N3M2N2O1O2O0N2O2M2N2OO01O1O1O2M2N3L4N101OO001OO200010O0100O100000O100000000000000001O001O001O1O1O2N1O1O1O001O1O001O1O001O1O001O001O00000000O101N1N2\\\\HiCQ6Z<mIkC`5CfIf<h0lCY5f<eJ_CT5e<kJ^CQ5e<mJ]CP5e<oJ_Ck4e<RK`Cg4e<VK_Ce4e<YK^Cd4e<XK^Ce4g<VK]Cf4j<SK[Cb4P?E9J6L5K7I6I5K5M3O4K7I3L2O1M5K:F7I4L6H<Cn[V9\"}}, {\"image_id\": 45, \"category_id\": 1, \"bbox\": [773.4592895507812, 240.6722412109375, 184.31072998046875, 246.19363403320312], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"cZY`02Ue07K4K5L3K6I6L4M3M2M4J6]Od0J6K5L4L4Lg0XO9@<K4N2N2M3L4H8J5M4M3L3M2]Od0E;M3M201O1N2O1N2N2O1O1O001N2O1N2O00100O10O10O100O1O001N2O1O1000O01000O10000O1O1O1O100O1000000O1000000000000000000000000001O0000001O1O1O1O1O001O000000001O01O1O1O2N1O1O001N1O2N1D=M3M3L5J6I6L4N2N2N3L4I6I7M3M2O2M4I7[Od0\\\\Od0K5L4M3K6I7L3M3N2O1O2O2N3M2OO01O101O9HO0O10O00100000O1O1O010O1O1O2N0O10N3L8@Uf[1\"}}, {\"image_id\": 45, \"category_id\": 1, \"bbox\": [68.82193756103516, 330.82781982421875, 220.2081298828125, 328.40142822265625], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"jhc1c0cd0:H6K5L3L5J6J<Q]OB\\\\`0a3UO5L3M2N2N3M3K6K4L3N2O1N2N2N2M3M4PC^IQ;f6lD\\\\IR;f6lD\\\\IR;f6lD\\\\IR;e6mD\\\\IR;f6mDZIR;g6mD[IP;g6nD[IP;h6nDYIP;i6oDXIo:j6oDXIP;j6nDXIP;j6nDWIP;m6mDTIQ;P7lDQId:b7YE^Ha:j7\\\\EWHa:n7\\\\ESHb:P8\\\\EQHc:R8ZEoGe:S8XEoGh:Z9O10000O1000000000O10000000000O1000000O100O100N2O1O1O100O100O100000000O100000001N1oFSEd7m:WH_Eb7a:ZHeEd7\\\\:XHhEf7Y:XHjEg7X:UHkEi7W:RHoEl7i;N4L3M3M2N2O1N2O2N3L3N2N1O1N2O1O3M4L4L2N2N2N1N3N3M4L2N1O1O1O1O2N3M5K3M2M3N4L5L7Hh0XOi1WN8H2N2N1O2N3M6J6K1N2N1O001O0010O01O00100O2N2N100O1O10O010O2N001O1O10O00001O1O001O1O1O100O001O001O001O00000000000000001O000O2O001O001O00001N10001O0O5Kmc[?\"}}, {\"image_id\": 45, \"category_id\": 2, \"bbox\": [831.5974731445312, 469.3290100097656, 191.5291748046875, 15.631988525390625], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Q]ca03Ue04O0O10001O000000000O1001O000000000000000O10O1000000000O1O001N20000000000O100001O000000001O010O1O001O00001O0000001O00000000000000000000000000001O000000000000000000000001N1000O100000000000000000000000000000000000000000000000000000000000000000000000000000O10O1000000000000000000000000000000O1000000000000000000000000000O100000000000000000000000000O2O0000001Nd6\"}}, {\"image_id\": 45, \"category_id\": 2, \"bbox\": [487.7815246582031, 550.4876098632812, 286.9972839355469, 17.80767822265625], \"score\": 0.9999938011169434, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"cfX:1Ye02O00001N1000001O0000000000003M0000001O000000000001O0O10001O0000001O1NiZ?NSP@0Se0401O1O001N10000000000000000000000O1000000000000000000000000000000000000000000O10000000000000000000O10000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000O100000O100000000000000000000000O10000000000000000000000O100000001O0000000000000O1000000000000000000000000O10000000000000000000000O1000000000000000000O100000000000000000000000000000000000000000000000000000000000001O00000000000000001O00000O101OeVZ5\"}}, {\"image_id\": 45, \"category_id\": 2, \"bbox\": [143.07266235351562, 620.332763671875, 259.78973388671875, 31.45721435546875], \"score\": 0.16080716252326965, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"X[Y32Ye0000O1O10000O10000O10000O100O10000O1000000000000000000000000O10000000000000000000000000000O1000000000000000000000000000O2O0000000000001O00001O1O0000000O101O001O001O000O1000000000hZ90XeF1N101O00000O100000000O1000000000000O100000000000000000000000000O10000O1O001N2O1O10O10000000O010000O1000000000000O1000000O10000000000O10O010000000000O1000000O10000000000O100000000000000O10O10O100000000000O10O100000O100000000O100000O10O100000000000000000O100000000000000000000O2O00WXn<\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [345.5720520019531, 171.39859008789062, 188.09652709960938, 345.3871765136719], \"score\": 0.9999982118606567, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^Te7?jd05L3OO1O1O010O10O00000O01000001M3M7IbZ3J]eL:J6M3M3N2M3L5L5K3M4M1O2N1N2N3M2N2O1O2_MTN`An1[>[N^Ah1^>`N[Ac1`>gNVA_1e>kNo@[1o>jNg@^1W?gNU@i1]3hLc8U5WGQKg8Q5QEeKHCU;i4fDWLHWO`;d4dDbL]OoNo;a4_DgM_;\\\\2]DfMb;R6N2N2N2M3M3M3TOl0N2O1O1O1000O1O1O1M3N2N2O1N2N2eNSETHQ;a7dEaGP;]8P1O1O2M4L3M6I8I6K5K6J6J4K4K4M3N2O0O2O1N2O0O3N1N2N2WKj@Z3X?dLj@Y3X?gLh@W3[?gLg@W3[?gLh@W3X?gLn@V3R?hL`Ag2`>YMRBU2o=iMWBS2i=kM[BS2e=kM]BT2e=hM^BY2d=_MaBb2b=SMfBl2m?1O101N110O108H6J0N1000102N3M2M1N1O105L7I7IN010O0O2N5K4L3M4L3lLh^OP2[a0eMQ_OU2Yb0J6J5K3M2N2N3L9H>B7H5L2M4KeeW:\"}}, {\"image_id\": 46, \"category_id\": 2, \"bbox\": [0.0, 524.8439331054688, 486.66436767578125, 38.69805908203125], \"score\": 0.999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Vl12Xe02O00000O1000000O1000000000000000000O10000000000000000000000000000O10000000000000000000000000000000000000000O10000000000000000000000000O1000000000000000O1000000000O1000000000O100000000000000000O10000000O10000000O10000000000000000000O100000O10000000000000O100000000000000O10000000000O10000000O100000000000000000O0100000000000000000000O0100000000000O1000O1000000000000000O1000O10000000O10000000000000O1000O10000000O100000O1000O1000000000000000O1000O1000000000O10000000O1000000000000O0100000000000O1000000000000000O10O1000000000000000000O100000000000000000000O100000000000000000000O1000000000000000O01000000000000O10000O1000000O10000000000000000O100000000000000000000O10000000000000EY[O1hd0OY[O0gd0OZ[O1fd0OZ[O1ed00\\\\[OOdd01\\\\[OOdd01\\\\[OOdd00][O0cd00][O0cd00][O0cd00][O0cd00][O0cd00][O0cd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O^[O1bd0O_[O0ad00_[O0ad00_[O0ad0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0O`[O1`d0Na[O2_d0Na[O2_d0Na[O2_d0Na[O2_d0Na[O2_d0Na[O2_d0Na[O2_d0Mb[O3^d0Mb[O3^d0Mb[O3^d0Mb[O3^d0Mb[O3^d0Mb[O3^d0Lc[O4]d0Lc[O4]d0Lc[O4]d0Lc[O4]d0Lc[O4]d0Lc[O4]d0Kd[O5\\\\d0Kd[O5id00000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O00000000000000000000000O2O00idY;\"}}, {\"image_id\": 46, \"category_id\": 2, \"bbox\": [589.5731811523438, 504.49652099609375, 434.42681884765625, 28.11175537109375], \"score\": 0.9999954700469971, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"V\\\\c<1Ze0000O100000001O000000000O1000000000000000000000000000000000000000O10000000000000000000000000000000000000O1000O10000000000000O1000000000000O100000000000000000000O1000000000000000O10O1000000000000O10000000000000000O1000000000000000000O10000000000000000000000000000O100000O100000000000000000000000O1000000000000000O10000000O1000000000000000000O10000000O100000O100000000000000O100000000000000O100000O10000000000000O10000000000000000000000000000000O0100000000000000000000000000000O10O100000000000000000O1000000000000000O100000O1000000000O10000000000000O1000000000O010000000000O10000000000000000O1000O1000000000000000O1000000000000000000000000000000O10000000000000000000000000000000000000000O100000O100000000000000001O0O100000000000000000000000000000O10O100000000000000000000000000000000000000000000000000O1000000000000000001N100000001O1NV`1\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [536.5973510742188, 528.5909423828125, 15.05987548828125, 17.84747314453125], \"score\": 0.9999734163284302, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"VfV;:Pe03M10001O001O00000000O2N2O1MfYj9\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [299.9187316894531, 189.91969299316406, 76.19537353515625, 166.1185760498047], \"score\": 0.999891996383667, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"UZX64nd0e0B5L3M2N0P]OWO^a0k0_^O[O]OJVa0k0[_O^OTO1`a0b0[_O^OQO4ca0`0Z_O7e`0NW_O2e`05W_OLe`0:Z_OEc`0>o_OPOn?S1X@gNh?Z1X@eNg?]1Y@bNg?`1W@RNeN6Ta0h1X@mMlN6l`0m1Q200100O1OZOS]OoNlb0o0X]OPOhb0n0[]OPOfb0m0]]OSOcb0j0`]OUOab0i0d]OSO]b0k0e]OTO]b0i0d]OXO_b0d0a]OXOgb0d0Y]O[Ojb0b0Q1O2N2M3M2O2M2OV]O0[`0Md_O8fb03M4M2O2M1O102N11O00N11O1OO3O0OO0O4L1O3M3M2N4L3M2M3NUad=\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [369.60888671875, 166.26324462890625, 121.64877319335938, 231.0203857421875], \"score\": 0.9786441922187805, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ZTg7P1Wd0;H100O100000001O0O11N2O0]OR\\\\OHQd0NZ\\\\OMhc02\\\\\\\\OGic06g0O3KXe02bZO8M3N1N000d[OKdc04^\\\\O0^c00b\\\\O3[c0Mf\\\\O5Wc0Li\\\\O6Sc0Ll\\\\O7ob0LQ]O6lb0JU]O7ib0IX]O8fb0G\\\\]O9bb0Ga]O9\\\\b0Gf]O9g?QOUBf0UN:a?\\\\OPB;`N9\\\\?AQB6eN9W?FPB2jN8T?IoA0nN7o>NPBJTO8j>1oAGYO7g>6o@D\\\\O1o05e>:i@`0b0VOd>c2^A\\\\Ma>d2_A]Ma>b2`A^M`>a2aA_M`>`2aA_M`>a2`A\\\\Mb>d2`AWMd>h2]AVMe>j2[AVMd>l2ZATMe>n2[ARMc>P3\\\\APMc>R3]AmLb>V3`AgLh=3QAZ3]1\\\\L]=`4e12N2O1O2M2O1O1O2N1O1O0000001fAgJ`<Y5i1O10001lMo_OlNQ`0U1n_OkNS`0U3fAlJ_<T5`CmJa<R5_CnJd<n4V2F`0A1N2O2`Mn^Oc0Sa0[OS_O`0n`0^OU_O`0l`0_OU_O`0m`0]OS_Oe0m`0ZOS_Of0n`0YOR_Oh0m`0XOR_Oi0o`0VOR_Oi0n`0XOR_Og0o`0XOQ_Oh0o`0XOR_Oh0o`0VOS_Oh0o`0VOU_Og0l`0WOY_Of0f`0YO^_Of0``0YOW@2h?M\\\\@1e?M^@0b?Oa@N`?1b@N_?0b@0^?Od@0^?Ld@4^?Ic@7^?Fc@;^?@f@c0nMUObc0b1I10M8lNeVS;\"}}, {\"image_id\": 46, \"category_id\": 2, \"bbox\": [72.34925079345703, 401.9026184082031, 334.3741149902344, 12.935791015625], \"score\": 0.9429988861083984, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"[^P31Ze01O000000000000000O1000000000000000000000000000000000O10O10000000000000000000000000000000000000000000000O01000000000000000000000000000000O100000000000000O0100000000000000000000000000000000000000000000000000O1000000000000000000000O1000000000O100000000000000000000000000000000000000O010000000000000000000000000000000000000O10000000000000O10000000000000000000000000000000O10000000000000000000000000000000O10000000000000000000000000000000000000000000000O100000000O10Vj_=\"}}, {\"image_id\": 46, \"category_id\": 2, \"bbox\": [513.804443359375, 544.3756713867188, 28.455810546875, 3.6173095703125], \"score\": 0.731511116027832, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"m[S;1Ze0000O100000000001OenP:\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [303.83251953125, 186.85598754882812, 116.26171875, 204.24362182617188], \"score\": 0.7285131216049194, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"ZoZ6a0ed0<G5Y]OQOk`0R1f^OROkN4[b0m0f^OKk`07S_ONh`04W_OMh`04Y_OKd`08]_OF[`0d0j_OVOS`0n0m_OPOR`0P1S@lNl?T1[@eNe?l0m^OlNb15`?n0R_OkNb12\\\\?T1Q_OlNj1GV?]1o^OPOQc0R1l\\\\OQORc0>T]ONkb0Od]OE]b08g]OFZb08i]OEXb0:k]OCWb0<o]O]OTb0b0[1B][O2Z1IUb07i]ONUb00l]O0ZY1JPYO0k]O0Ub01b11O1N20NgR23WmM5K4L4L2O11OO1O01011O3L0000006J7I01011M2M01O2N1O0O11O0O10O1O0O2O0O11N1O1O1O10N2N2Ck[OBWd0<o[O^OTd0`0<N2Lmo1I^PN00N3NoP21onM7iNGQ]O<oc0O001OeNJa]O6^b0Kb]O8Zb0Hf]O:Wb0Hi]O9Sb0Jm]O6Rb0Jo]O5Rb0JP^O4Qb0KQ^O2Qb0JW^O0ja01f13L4G81M^Wc<\"}}, {\"image_id\": 46, \"category_id\": 1, \"bbox\": [988.6725463867188, 402.102294921875, 32.0477294921875, 23.139312744140625], \"score\": 0.14055714011192322, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]\\\\ed09Pe05M1O1N2O1O000001O01O01O001O1OO01000000O1000O01O01O0100O2N\\\\c1\"}}, {\"image_id\": 47, \"category_id\": 1, \"bbox\": [355.6671447753906, 178.50344848632812, 152.89031982421875, 83.51321411132812], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [394, 583], \"counts\": \"nUY4m0[;3N2N2N1N2N200O1O1O0O20O0100O0010O0100O1O10000O100O2N100O2O000O1O1O2O0O1O1O1O2O0O101N101N101N101O2N1O001O1O2N1O1O00000000001OO100O1O100O100O1O100O100000000000000O100000000001O000000000000000000000000000000000000000000000000010O01O0000010O01O00010O01000O01O00100O001O002N2N1O1O1O2M4M4K3M2N2N3M<@mUn0\"}}, {\"image_id\": 47, \"category_id\": 1, \"bbox\": [48.71792221069336, 142.719970703125, 73.79486083984375, 43.66584777832031], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [394, 583], \"counts\": \"ePc05Q<5L6L1O2N1N3L3N2O1O010O1O1O001O101N100O1O2O000O2O000000000000000000000000000000000000000000000000000000000001O0O1000001O1O0010O01O1O1O2N2N1O6J1OPWa5\"}}, {\"image_id\": 47, \"category_id\": 1, \"bbox\": [157.80194091796875, 165.99111938476562, 119.97708129882812, 97.70492553710938], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [394, 583], \"counts\": \"o`n14?=d:JTEc0b:d0N2M3N2N101N1N2N2N2L5M3N3N2M2O1N2N2N1O1N2O2N1O1O1O1O0100N101O1O1O100O1O100O100O1000000000000000000000000000000000000000000000000001O000000000000001O0000001O01O01O0000010O00100O001O10O01O01O101N2N1O002N6J1O1O1O4L2N1O1O9G:F3M2M<DTTe3\"}}, {\"image_id\": 47, \"category_id\": 2, \"bbox\": [365.5315856933594, 244.4785919189453, 136.42626953125, 26.424240112304688], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [394, 583], \"counts\": \"\\\\^]42X<1O000O2O000000000000001O0000001O000010O000001O1O0000001O00001O01O01O01O1O4M0O001O2Nm^30RaL2O000O1N2O101N1O1O10000O101O00000001O01O00000000000001O0001O0000000000000000001O00000000000001O00000000000010O0001O001O00001O1N2O0000000O100000O10000000O1000000O3LWmn0\"}}, {\"image_id\": 47, \"category_id\": 2, \"bbox\": [179.94503784179688, 241.92581176757812, 119.603515625, 34.630584716796875], \"score\": 0.9999994039535522, \"association_id\": 3, \"segmentation\": {\"size\": [394, 583], \"counts\": \"iVX21W<2O2N1O1O1M3O2O0O1O101N100O10001O0O100000000O101O0000000000001O01O00000000001O00000000001O0000000000001O000000001O001O012M3M1O1O001O1O0000001O01O0001O0000000000O1000000O100M3O100O1M3O100001O001O002M2O1O001O001O1O0O2O1O0O4MXP]3\"}}, {\"image_id\": 47, \"category_id\": 2, \"bbox\": [57.655704498291016, 180.3670196533203, 68.16365051269531, 7.1911773681640625], \"score\": 0.9999990463256836, \"association_id\": 2, \"segmentation\": {\"size\": [394, 583], \"counts\": \"]Xg01Y<001O0000000000001O01O1O000001O0000000000001O000000000000000000001O0000000000000O1000000000000000000001O0000001O0000000000001O0O\\\\Q`5\"}}, {\"image_id\": 47, \"category_id\": 2, \"bbox\": [277.8927917480469, 231.83377075195312, 11.416107177734375, 7.709259033203125], \"score\": 0.9438832998275757, \"association_id\": 0, \"segmentation\": {\"size\": [394, 583], \"counts\": \"TV[36T<000000000001O1O0O2Oal`3\"}}, {\"image_id\": 48, \"category_id\": 1, \"bbox\": [0.0, 268.55487060546875, 89.404541015625, 108.80520629882812], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [685, 1024], \"counts\": \"W9_1ic0:J4L4M2M2O3X]OiMTb0Z2g]OhMXb0\\\\2b]OgM^b0d2010O0000001O000000O10TOb]OiN^b0V1d]OiN\\\\b0V1g]OhNYb0W1j]OgNUb0Z1l]OeNTb0[1m]OdNSb0\\\\1n]OcNQb0^1P^OaNPb0_1P^OaNPb0_1P^OaNPb0_1P^OaNPb0_1Q^O`Noa0`1Q^O`Noa0_1R^OaNna0_1S^O`Nma0`1T^O_Nla0`1V^O_Nja0a1X^O\\\\Nia0c1Z^OZNfa0g1P101N1000000O100O100O2O00000O2O000O1O100O10000000O1O1000O1000O10000O10O01O1O001N2O001O1N1001O2H7D?KZVac0\"}}, {\"image_id\": 48, \"category_id\": 1, \"bbox\": [99.4918441772461, 273.0865478515625, 781.0330200195312, 241.9990234375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [685, 1024], \"counts\": \"SnW21Ye05K5B>L4_[O[OTd0Q1M3M2O2N2N2N2N2N2M3F;M3N1N3M2N2O0O2N2aNZMU@f2k?[MS@g2l?YMT@g2l?ZMR@h2m?YMh_O4TOc2Ta0ZMg_OQ3X`0QMe_OP3[`0QMP_OM9T3e`0[MZ_Oe2f`0\\\\MX_Oe2h`0[MX_Of2g`0[MW_Of2i`0[MV_Oe2j`0\\\\MT_Of2k`0[MS_Of2m`0[MQ_Og2n`0l00001O001N10001O00001O00001O0000001O00001O0O11O00000001O0000000000000000001O000000000000000000000001O0000001O00001O2N3M2N<oKi^OT3ka0M3M1O1O1O001O1O001O001O00001O00001O001O1O010O1O1O1O1O1O1O100O001O001O001O001O00010O00001O00001O010O001O00010O0010O01O010O0010O01O0010O0001O01O0000000000001O000000000000001O00000000001O0O10000000001O000000000O100000000O10000000000O100000000000000O1000000000001O00000O1000000000000000001N10000000000000000O10001O00000O10000O10000O100O100O10000O1000000O10000O100000000O1000000O1000000O100O10000O1000000O10000000000O1000000000000O1000000O0100000O100O100O100O100O10O10O1000000O100000000O101O00000O101O000O101O0O10001N100O101O000O101O000O101O00001N10001j_OkKQ?V4o@kKP?V4n@lKQ?U4m@lKR?V4l@lKS?T4l@nKR?T4k@nKU?S4^@hKJ7f?R4_@[L`?f3]@\\\\Lb?f3\\\\@\\\\Lb?e3]@\\\\Lc?e3[@]Ld?f4O2O0O101O000O2O00001N100000001N10000O101O0O100O1000001N1000000O1000001O00000O100000000000000000001O000000000O100000001O00000000000000001N1000000000001O0000000O1000001O0000000O1000001O00000001O00000000010O000000000000001O01O00000000000001O01OO10001N100O2O000O2O0O2O0O2O0O2O0O2O0O101N100O2O0O10000O2O0O1O101N1N2N3L3L5I7N3M3M3M3N0O2N101N100O2O0O100O2O00000O101O00000O101O0000001N101O00001O001N10001O000000001N10000000000000000000000000001O00000000000000000000000O100001O00001O0000001O00001O001O001O001O1O1O1O1O1O1O1O1O1O001O001O001O1O001O1O1O1O1O2N1O2N1O2N0O2O00001O00001O00001OO100000000O1000000O10000O100O1N2N2N2L4UN`_OeNd`0Y1c_O_N``0_1f_O[N\\\\`0d1g_OXN[`0f1i_OWNX`0h1j_OUNX`0j1j_OTNX`0j1j_OlM_`0S2c_OiM``0V2b_OfMa`0Z2Y1O1O1O1O1O1O2N1O100O1O1O1O2N1O100O1O2N100O100O2O0O101N100O2O0O2O1N1O2O1N2N1O3M2O1N2N2N2N2N2N2N2N2O2M2N2O1M3KbeX3\"}}, {\"image_id\": 48, \"category_id\": 2, \"bbox\": [11.231396675109863, 448.4638671875, 795.0176391601562, 165.1954345703125], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [685, 1024], \"counts\": \"obj07Pe07O1N2N2O1N101N10000O2O000O101O0O100O1O2O0O1O101N1O1O2N1O1O2O0O100O101O000O100000001O0O10O100000O10000O10000O10000O10O10O100000000O100000O100000O1000O1000000O1000O010000O100O01000O10000O100000O010000000000O2O00000O10000O100O1O2L3N20000O10001N10000000000O2O000000001O00001N101O001O1O0O2O1O001N101O001N101O00000O2O000000001O00000000001O0000000O2O00000000001O00001O00001O00O1000000O1000001N1000000000001O0O1000000000001O0000000000001N10000000001O0000000000000000001O000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000000000000000001O00000000000000000000001O000000000000001O00000000000000001O00000000000000000000000O101oNZOY]Of0fb0]OX]Oc0gb0@W]O`0ib0AV]O?ib0CV]O=jb0DU]O<jb0FU]O:kb0FU]O:kb0GT]O9kb0IS]O8mb0HS]O8mb0HS]O8mb0IR]O7mb0JS]O6mb0JS]O6mb0JS]O7lb0IS]O8mb0HS]O8mb0HS]O8mb0HS]O8mb0HR]O9nb0GR]O9nb0GR]O:lb0GT]O9lb0FT]O;lb0ET]O;lb0ET]O<kb0DU]O<kb0DT]O=lb0CT]O>jb0CV]O=jb0CV]O=jb0CU]O?jb0@W]O`0ib0@W]O`0ib0@n\\\\O^O2R1Pc0@m\\\\O@1R1Pc0_On\\\\OA1P1Qc0_On\\\\OA1P1Qc0_On\\\\OB0o0Rc0_Om\\\\OD0m0Sc0_Om\\\\ODOo0Sc0]On\\\\ODOo0Tc0[Oo\\\\OEMP1Uc0ZOn\\\\OFMP1Vc0YOn\\\\OGKP1Xc0WOn\\\\OIJP1Yc0VOm\\\\OJJQ1Yc0TOn\\\\OJIR1Zc0ROn\\\\OU1Sc0iNQ]OU1Rc0fNQ]OX1bc01O1O2i\\\\OdNYb0^1Y]OVO_b0Q2K3M1O1O1O1O1O7I4L2N001O001O00100O001O00001O01O01O000010O0001O00010O0000010O0000010O0001O001O00001O0010O0001O001O00001O002N3M2N00001O0001O01O0000001O00001O0O101O001O1O1O1O1O1O1O0000001O000000001O0000000000000000001O0000000O1000001O000000001O000000001O000O10000000001O000000000000000000001O0000000O1000000000001O00000000001O00000O2O001O001O1O1O1O1O1O00001O00001O0O10001O0000001O0000001O001OoNVN`^Oi1]a0[Nc^Od1Za0_Ng^O`1Wa0cNi^O\\\\1Ua0fNl^OY1Ta0gNm^OX1Sa0iNm^OV1Ra0kNn^OU1Ra0kNo^OT1Qa0lNo^OT1Qa0lNP_OS1o`0nNQ_OR1o`0oNQ_OP1o`0POQ_OP1o`0POQ_OP1o`0POR_Oo0n`0QOR_Oo0n`0QOR_Oo0n`0QOR_On0o`0ROR_Om0n`0SOR_Om0n`0SOR_Om0n`0SOS_Ol0m`0SOT_Om0l`0SOT_Om0l`0SOU_Ok0l`0UOT_Ok0l`0UOU_Oj0k`0VOU_Oj0k`0VOV_Oi0j`0WOW_Og0j`0YOY_Od0g`0\\\\O\\\\_O`0e`0@^_O=b`0Ba_O<_`0Db_O:_`0Fb_O9^`0Gc_O8]`0Hc_O7^`0Ic_O6]`0Jc_O6]`0Jd_O5\\\\`0Kd_O5\\\\`0Ke_O3\\\\`0Md_O3\\\\`0Md_O3\\\\`0Lf_O3Z`0Mf_O3Z`0Mg_O1Z`0Of_O1Z`0Og_O0Z`0Of_O1Z`0Og_OOZ`01f_OOZ`00h_ONY`02h_OLZ`03g_OKZ`05h_OHY`08i_OEX`0;^2N2N3L3N]id5\"}}, {\"image_id\": 48, \"category_id\": 2, \"bbox\": [1.54511296749115, 384.95855712890625, 29.699615478515625, 6.060028076171875], \"score\": 0.9661754965782166, \"association_id\": 0, \"segmentation\": {\"size\": [685, 1024], \"counts\": \"mf12Ze02O000000000000000000000000000001O00O1000000000000001O00Zlgd0\"}}, {\"image_id\": 48, \"category_id\": 1, \"bbox\": [92.41081237792969, 245.86167907714844, 230.31813049316406, 164.0552215576172], \"score\": 0.9255075454711914, \"association_id\": 0, \"segmentation\": {\"size\": [685, 1024], \"counts\": \"\\\\nY2b0ad0>I4G9M3K5H8B>D<F:L4N1N2M3J6VOj0E;O1O1O1O100O1O1O1O100O10001O000000001O0000001O0000001O00000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000O100000000000001N101O2N1O0O2O1O1N2O6J9F4M1O1O1O002N3M5K3M2N1O1O1O2M5L8H6J2N1O2N1O1O2O2M7I1O2N100O2N3M3M3M1O1O001O01000O1O100O001O000010O01O001O1O0000000000O100000000O1000000O1000000O100000000O1000O100000000000000O100000O0100M3L42N10O000001O01O0AZ\\\\OVOhc0g0b0LoWe>\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [433.78765869140625, 29.62174415588379, 247.31939697265625, 113.25519561767578], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"PbU:=`g0:H3L4M2O1O00001O0O101O000O01O0010O0001O1O010000O10000O10000O2N101N101N100O2O0O1O2O0N2O2N1O2N2N101N2O2M2O2N5J2O1N2O001O1N2O1O1O001O001O1O1O1O1O2N2N1O1O1O001O1O001O1O2N1O001O001O1O001O000000001O00000000000000000000000000000000000000000000000000O10000O1O100O1O1O1O100O1O1O10000O10000O10000O100000000000000000000001O0000001O0000000010O000000010O0000001O00001O000010O01O1O1O100O1O001O010O1O100001O0O10O0100O10O1O010O001O00001O00001O001O00001O000000001O00000O2O0M4SOm0M3N2N1N4M3Ma0oNhXO9edX8\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [695.0618896484375, 49.73197937011719, 187.0350341796875, 135.2887420654297], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"o[Y`0h0Sg09I4M2O2M2J7M3L3M4N101O0O2O1O0O2O001O000O101O0O1O2M3M2O2O0O2N1O1O1O1O1O1O1O1N2O1N2N2O1O1O100O1O100O1N2O1N2O1O1O10000O100O10000O1000001N10000000000000000000000000O01000000000000000001O0000000000001O0000001O001O0O2O001O1O001O1N101O1O001O0O2O001O00000000000000000000O1O1O1N2O1M3M3M3K6M2O1O100O1O1O100O1000001O001O00000000001O00000O2O001N2N1O2O0O2N1N2H9M2N3N2N2M3M3N2N2N3M3M8Ee]Z3\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [300.5460205078125, 62.16727828979492, 131.09970092773438, 90.33595275878906], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fkQ71hg0?F7J2N3L4K4O2M3N1N3N0O2M2O1O101N100O1O1N2O010O10O10O000N3O1000O100000O100O100000001O0O1N2N3N1O1O2O001N101N10001N101O0O10001O0O100O10000O10000O100O10O10000000O1000000000000000O10000001O0000000000000001O000000000O10000000000O2O00001N1jMcZOi1]e0VNeZOi1\\\\e0UNfZOi1[e0WNfZOh1Ze0WNgZOg1[e0XNgZOf1Ze0WNiZOh1je0K5M4K7F6K<@Qel=\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [177.53793334960938, 45.105499267578125, 179.62030029296875, 97.80351257324219], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ibU4;bg0e0\\\\O3N2L4L4I7N2N1O2N1O1M3M4O0O100O2O0O1O2N101N100O2N2O1O001N101N2N101O0000000O10000000O10O10O100O001O1O10000O1000000000000000000000000000000001O000000000000001O00001O00001O00001O00100O001O001O1O001O01O00000000000000000O10000O100O1O1O1N2O1000000O101O0O1O1O0O2L4H8O2N100O100O10aZOTNjd0i1i0L3N3N1O2O0O2O001N1O2N2N2N3M3M3GoXO]OUg0?;K3InmS`0\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [710.9478759765625, 154.0299835205078, 179.58563232421875, 36.49989318847656], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[mo`02ng01O1N102N4L1O001O00001O00001O000000000000000010O0000000001O000000000001O000001O00000000000000000000000O2O0000000000000000000001O01O0000000001O0000001O001O001O001O2N1O001O00001O1O1O1O000000000O1000O100O1O1O100O100O1C=O00100O1O1N2O100O1O01000O10O10O1000O10000O10000O10O01000000O10000000000O100N2M300O100O100O1O10000O100O1O10001N10U[T3\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [959.9636840820312, 175.85092163085938, 61.84515380859375, 34.93341064453125], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Pfaf01ng02N2O1O2N1O1O1O1O001O00001O0O10000O1000001O0O1000000000001O00000O100000000000001O00000000000000001O0O10000000000000O5KTZ1\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [949.4347534179688, 61.65119171142578, 72.448486328125, 132.47714233398438], \"score\": 0.9999990463256836, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_]Yf09^g0=I5E;G8L3M3K5E;K5N2O0N2O2N2O010O2N0N3N1O2O1N2L4N101N2N1O1O2O1O100M3M3L4N2001O0O1001O00O100000O10000O1000001O0O20O001O00010O1O1N2N2O3L6PM^[OX2`e0Ek0kMhYOj0PR2\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [205.17654418945312, 132.59176635742188, 99.67819213867188, 14.276748657226562], \"score\": 0.9999980926513672, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"]li44kg02N100O1000000000000000000000001O0O10000000000000000O100000000000000O10000000000000000000000000000O1000000000000000000001O0000001O1O002N1N102N1O1O0OQ`10o_N1O2O0O1O1O100O1N200O00100000000000O10001O5HgSk`0\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [870.102783203125, 38.08018112182617, 117.583984375, 74.07212829589844], \"score\": 0.999994158744812, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"miad04jg09H4L3L4N1N1O2O1N4L3M2O001O1O00000O2O0000000O10001N101N10001O0000000000000O100000001O0010O01O1O1O1O001O1O01O01O0010O000001O0000000010O0O101O001N101N1O2O0O2J5O1L4K5N2N3N2O1N1O2L4N1N2O2O0O100O1O2O0O1000001N10001NRfn0\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [253.7897186279297, 333.05694580078125, 723.049560546875, 180.2166748046875], \"score\": 0.9999938011169434, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fVj77gg04L2O2N2N101N101N10001N100O2O0O100O1O101N1O1O1O1O1O2N1O100O100O10000O2O00000O100000000O100000000O100000001N1000000O1000000O100000000O10001O00000O100000000000001N1000000000000000001N100000000000000O2O000000000O100000000O101O00000O1000000000000O1000001O0000000O1000000000001O000O100000000O1000000O10O10O100000000O1000000000000O1000000000000O100000O10O10000O100O10000O1000000O100000000000O10O10000000O10000O010O10000O10O1000O10O10000000O0100000000000O10O100000O10000O10000O100000O010000000000O10000000O1000000000O01000000O10O10O1000000O01000000O10O1000O100000O0100000O01000O01000O10O1000O10O1000000O010000000O100000O10O1000000O10O10000000O1000O1000O1000O0100000O010000000O10O10000000O1000O1000000000001N1000001N10001N2O1O2M2O2N001N1000001O000O101O000000001N1000001O0O2O001N2O0O2O001O0O2O00001N10001OO100000O1000O1000O1000000O10O010000O1000000O1000O10000000O01000000O100O001N2M3J6N101O100O00100O010O10000O010O100O10O0100O1O1M2K6E;M3M210O10O0100O01000O10O1000000000000O100000000000000O100000000000O10O1000001O000O101O000O101N101N100O2O0O2O0O101O0O100O2O00000O10001O00000O101O00000O2O001O000O2O000O2O00000O2O00000O10001O0O10001O000O2O001O0O2O00001N1000001N1000000O101O000O101O00000O2O1O1O1O0O2O1O1O0O2O00001N10001O0O10001N100O2O0O101N103LkSg1\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [100.49388122558594, 145.0594024658203, 856.6506958007812, 346.31219482421875], \"score\": 0.9999895095825195, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"bi\\\\2;`0JTf0[1nYOcN1=mc0[1k[OaNOe0dc0R1V\\\\ObNMb0gc0R3M4M2N2O1N2O1N2O1N2O1O001O1O001O001O00001O001O00001O00001O00001O0000001O0000001O002O1N0010O010O00100O6J7I2N2N2N3M3M1O1O1O100Ob]OgKVa0X4h^OkKXa0T4f^OnKZa0R4d^OPL]a0o3b^ORL_a0m3`^OTL`a0l3_^OULba0j3]^OXLba0h3]^OYLca0g3]^OYLca0g3^^OXLca0g3^^OXLba0h3^^OXLba0h3_^OWLaa0i3`^OVL`a0j3`^OVL`a0j3`^OVL`a0j3a^OUL_a0k3a^OUL_a0k3b^OTL_a0k3a^OUL_a0k3a^OUL_a0k3a^OUL_a0k3b^OTL^a0l3b^OTL^a0l3b^OTL^a0l3b^OTL^a0l3b^OTL_a0k3b^OTL^a0l3b^OTL^a0l3b^OTL^a0l3b^OTL^a0k3c^OUL]a0k3c^OUL]a0k3c^OUL]a0k3c^OUL]a0k3d^OTL\\\\a0l3d^OTL]a0k3c^OUL]a0k3c^OUL\\\\a0l3d^OTL\\\\a0l3c^OUL\\\\a0m3c^OSL\\\\a0n3d^ORL\\\\a0n3d^ORL[a0o3d^ORL[a0o3e^OQLZa0P4e^OQL[a0o3e^OQLZa0P4e^OQLZa0P4e^OQLZa0P4e^OQLZa0P4d^ORL[a0o3d^ORL[a0o3c^OSL\\\\a0n3c^OSL\\\\a0n3c^OSL\\\\a0n3c^OSL\\\\a0n3c^OSL]a0m3b^OTL]a0m3b^OTL]a0m3b^OTL^a0l3b^OTL]a0m3b^OTL^a0l3b^OTL]a0m3b^OTL^a0l3b^OTL]a0m3c^OSL]a0n3a^OSL_a0m3a^OSL^a0n3b^ORL^a0n3a^OSL_a0m3a^OSL^a0n3a^OSL_a0m3a^OSL^a0o3`^ORL`a0n3_^OSL`a0n3`^ORL`a0n3_^ORLaa0P4]^OQLca0o3\\\\^ORLca0o3\\\\^ORLba0Q4[^OQLba0R4]^OoKaa0Y5N2M3O1O1O1O1O001O100O1O100O010O100@ZIe_Og6W`0c0J7I6M3M3O100O100O101N1O1O1O1N2N2N2O1O100O1O100O100O101O0O010O100O010O10O01O0010O01O010O010O010O010O01O010O10O01O10O010O010O010O001O01O000001O00O10O10O10000O2O001O0010O01O010O10O0100000O010000O01000O010000O010O10O0100O01000O10O10O10O1000O0100000O0100O10O10O1000O0100000O010O1000O010000O10O010000O1000O01000000000000000000000000O010000000000000000000000O1000O10000000O1000000O100O10000O100O100O10000O1000000O010000000O100000000000000000000000000O100000001O00000000000000000000000000000000000000000000000O1000000000000000001O00000000000O1000000000000000000000001O00000000000000000O1000001O0000000000000000000000000O1000000O100000O010000O1000000O1000000O10O10000000O1000000000000000000O101O00000000000000000000000O1000001O0000000000000000000O100000000000000O1000000000000O100000000O101O00000O100001O01O000010O0001O000010O000001O01O0001O0000010O0000001O0001O00010O00001O0010O01O00100O1O10O01O10O010O010O010O010O010O010O1O1N2O1O1O1O2N1O2N1O2N2N2N2N2N2N2N2N2N1O1O1N101O1O001O001O00001N101N101N101N1O2O1N2N1O2M3L4M2M4L3N3M3M2O2N1N3N1O1O2N1O2N1O1O2N1O2N1O1N3M2N2N2M4L3L4L4K6L3N2N2N2O2M2O1O100O101N10000O1000000O1000001O000O100000000O10000000000O101O0000000O100000000O10001O0O100O10000O100O2O0O100O10000O101O0O1000000O2O00000O101O001N10001N101N100O2O0O2N1O2N1O101N1O2N1O2O0O1O2O0O2N101N1O2N1O2N1O2N2N1O2N2M3M3K5K5K5I7JoWY2\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [314.4143371582031, 144.77627563476562, 74.209716796875, 12.58978271484375], \"score\": 0.9999493360519409, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"f\\\\\\\\72mg0101N1000000000001N100000000O101O0000000O100000001O000000000000000000000000000001O1O001O1O001O001O0000001O0O2NXcY?\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [887.4486694335938, 98.76899719238281, 89.79693603515625, 11.731452941894531], \"score\": 0.9994227886199951, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Ucnd06jg0000O2O0001O000000000000000000000000000000000000000000001O000000001O0000001O01O01O0000000000000000000001O0O1000000O100000000000000000O101O0O2Nh\\\\[1\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [874.685546875, 125.64664459228516, 24.85882568359375, 7.931953430175781], \"score\": 0.997908353805542, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"P\\\\`d04lg0000O101N1000000000O2O00000000000000001O00001NPdm2\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [80.20497131347656, 469.90716552734375, 360.95452880859375, 255.0257568359375], \"score\": 0.9509072303771973, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"SZV2>]g07M3M2O1N3M2O010O1O1O001O1O001O1O10O01O10O01000000O100O100O10O01O1O010O10O0100O100O2O000O10O0100O2N101N2O0O2N100O100O100001O1O100N2O000O10O01O010O1O102M8H2O0O2O0O100O102M2N3N1N2O0O1O100O10O10O10O01O010O010O01O0001O01O0001N1O1L5L3M4M2N2N\\\\ZW3OgehL1O4L4L1N101O0000000000O10000000O1000O100000000O101O0O1000000O100O1O2M2O1OWal>\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [437.59075927734375, 553.4384155273438, 148.32855224609375, 214.56158447265625], \"score\": 0.3099295496940613, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"]oY:=_g07I4L4K6K4M3H8D=D;L4M3N2N2M3J6M3N2N2N2dN[M[]Of2ab0eMV]O^2gb0iMS]OY2kb0_1N2O1N101O1L5dNPKa_OU5^`0TKX_On4g`0WKR_Ol4m`0XKn^Oj4Ra0R101O00000000O10000O1000000001OO1O1lJg^O`3Za0\\\\Ll^Oa3Ua0\\\\LP_Oa3Qa0]LS_Oa3m`0]LV_Oa3l`0[LY_Oc3h`0[LZ_O3_OT2Xa0gM\\\\_O2AS2Ta0iM]_O1CT2Ra0gM^_O3CS2Ra0eM__O5DR2Xa0ZMZ_Oa0Mf1]b0XNP^OZ1Rb0eNR^OW1Pb0hNS^OT1na0lNU^Oo0na0oNZ^Oh0ha0XOb^O;ba0D^3O01O001O10O11O0O100O01O100O2O1N1O1N3LQ\\\\c;\"}}, {\"image_id\": 49, \"category_id\": 2, \"bbox\": [499.2298583984375, 740.38330078125, 56.23077392578125, 20.61395263671875], \"score\": 0.23227396607398987, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"dWi;4gg05N3N1O1O1M3O10000O1000000000000000000000000000000O1001OO1001N100000000O101O1N4M1O3GYXO2kg0O3N]PU;\"}}, {\"image_id\": 49, \"category_id\": 1, \"bbox\": [93.14109802246094, 405.4095764160156, 374.9266357421875, 325.4922180175781], \"score\": 0.18671919405460358, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fWY23dg0=I7J4L;E7I4L4L5jM_Ne]Of1Xb0]Nc]Ok1Wb0UNh]OP2Sb0RNk]OR2Rb0nMm]OU2Qb0kMn]OY2oa0gMP^O]2ma0dMQ^O_2ma0aMS^Oa2ka0_MU^Oc2ha0^MW^Oe2ga0[MY^Oh2da0XM\\\\^Ok2aa0UM_^Ol2`a0TM_^Om2aa0SM_^On2`a0RM`^On2`a0RM`^Oo2_a0QM`^OP3`a0PM`^OQ3_a0oLa^OQ3_a0oLa^OQ3_a0oL`^OS3_a0nL`^OR3`a0nL`^OR3`a0nL`^OS3_a0nL`^OR3`a0nL`^OS3_a0mLa^OS3_a0nL_^OT3`a0lL`^OT3`a0lL`^OU3_a0kLa^OU3_a0lL`^OU3_a0kLa^OV3^a0kLa^OU3_a0lL_^OV3`a0lL^^OT3ba0mL]^OT3ba0QMY^OP3ea0UMW^Ok2ia0XMS^Oj2la0YMQ^Og2oa0[Mo]Of2Pb0\\\\Mm]Oe2Sb0]Mk]Od2Tb0^Mi]Oc2Wb0d1O1O1N3N1O2N1O2N2N1O1O2N1O2N2N2N2N3M2N3M3M3M3M3M2N2N2N1O1O00001O001O00001O00001O000000000000000000000000001O00O100000000bIf_OV5[`0eJj_OZ5V`0bJo_O]5Q`0bJQ@]5o?bJS@]5m?bJU@]5k?bJW@]5i?cJX@\\\\5h?dJW@]5i?bJX@^5h?bJX@^5h?bJW@_5i?aJW@_5i?`JX@`5h?`JW@a5i?_JW@a5i?_JV@b5j?^JV@c5i?]JW@c5i?\\\\JW@e5j?ZJV@f5i?[JW@e5i?[JW@e5i?[JW@e5i?[JW@e5i?[JW@e5i?[JW@f5h?ZJX@f5h?ZJY@e5g?[JY@e5g?[JY@e5g?[JY@e5g?ZJZ@g5e?YJ[@g5e?ZJZ@f5f?ZJY@h5f?YJX@i5g?XJV@k5i?UJU@o5i?RJT@S6i?nIS@Y6i?hIT@]6i?o0N2N1O2N1O1O001O2M2O2N1O1O1O1O0O2O1O0000000000000001O0O1000000000000O10000O10000O01000O10001O0O1000001O0O101O000O2O001OOUJ_@X3b?gL`@X3`?fLc@Y3]?fLe@Y3[?fLf@Y3[?fLg@Y3Y?fLi@Y3V?gLk@Y3U?fLm@X3T?gLn@X3R?gLo@Y3Q?fLQAY3o>gLRAW3o>hLRAY3n>eLSA[3n>cLSA^3n>^LTAb3n>[LSAe3o>XLRAi3n>TLTAm3m>kJW@e0m0`4m>eJ_@e0f0g4k>`Jg@b0a0m4b?oJ_@R5a?kJa@U5a?hJ`@Y5`?eJa@[5a?bJa@]5b?^J`@b5Pa0O10O01O001O001O01O0001N1O1O1O1N2O100O1O1O1O1O1N3N1O1N3N100O2N1O101N1O1UMh\\\\Om0Zc0nNk\\\\OP1Vc0lNn\\\\OT1Sc0dNU]O[1lb0`NX]O`1hb0_NY]Oa1hb0^NX]Ob1ib0]NW]Ob1kb0\\\\NV]Od1jb0\\\\NV]Od1kb0[NU]Oe1kb0ZNV]Of1kb0YNU]Og1kb0YNU]Og1lb0WNU]Oi1kb0WNU]Oi1lb0VNU]Oh1lb0XNU]Oe1nb0YNT]Od1nb0\\\\NT]O_1Pc0`NR]OZ1Rc0eNQ]OV1Sc0iNn\\\\OR1Vc0nNl\\\\On0Xc0POi\\\\Ol0Zc0SOh\\\\Oj0[c0UOf\\\\Oh0]c0WOd\\\\Og0^c0XOb\\\\Og0_c0YOb\\\\Oe0^c0\\\\Oc\\\\Ob0^c0^Oc\\\\O`0]c0BQ]ONPc02U]OGlb0;\\\\2000O2O0O101O0001O001O1O001N2O001O001O0O2O0001N101O001O001N101O1O1O1OO2N2N1N3N3K4JZWZ=\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [449.1925048828125, 4.092830657958984, 143.48663330078125, 198.08421325683594], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"]o^9b0fd06K4L3M3L4L4M3M3M2M3M3M4N1N2N2N2O00100O100O010O1N1O2O1POgMm^OY2U`0nMS_O;f0h1R`0mNi_OV1Q`0SOi_Oo0P`0^Og_Of0W`0X2N2E:E<L4N2N2O100OTNf@hM[?W2h@gMW?[2g@eMZ?U41O1O1O1O100000000000000000000000000000000000000000000000001O00001O001O1O1N2O001N1YKX@j3j?ULY@g3h?XLZ@f3g?YL[@e3g?YL[@f3g?WLZ@h3m?kK[@T4_`0O001O2N1N2O1O1N100O1O1O1O2O0N4TOe^OcM]a0Y2m0N2N1O1O101N11O001O001001N101N010O0O2O001O002M2O1O1N2N2M8Ge\\\\o8\"}}, {\"image_id\": 50, \"category_id\": 2, \"bbox\": [11.939676284790039, 570.1791381835938, 228.02476501464844, 112.82086181640625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"dZ;1Xe03L4L3L4O2O0O101N1O1O1O1O100O2O0O100O1O1O1O1O1000O01O1O001O100O010O1O1O1O101N100O10000O100O100O100O100O100O100O1000O10O10000000000O1O100O100O100O100O100O100O100O100O1O1O1O1O100O100O100O1O100O100O100O100O1O1O100O100O1O001O1O100O10O001O0O101O001O1O0O2M3M30O0100O010O000001M200O20O100O1O1QNj\\\\O2<S1jb0jNl\\\\O1<S1hb0mNm\\\\ON=S1gb0nNa]OR1]c0N102M2O1N2N101O000O10001O000000000O2O0000001O00001O000O1000000O10000000000O101O00000000000O101O000O2O0O101N100O101O0O1O2N1NSZd`0\"}}, {\"image_id\": 50, \"category_id\": 2, \"bbox\": [389.7422790527344, 176.94406127929688, 177.74343872070312, 58.14045715332031], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Q[T85Te05L2N2O1N1O100O10001N10000O1000000O10000000000O100000000000000O1000O1000O1000O0100O010N2N101O1O0O2J5M4O001O10O0100000O1000101N1O1O001O1N4M2N1O001O00001N100O101N100O100O100O010O10O100O01000O001000O10O1000O010000O001O100O10O01O100O0100000O10O100000000O101O00000O100000000O2O000000000000O1000O010000000O0100000O1000O0100000O10000000O1000000O2O00001O0O101NZd`9\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [39.3697624206543, 77.67926788330078, 276.1554870605469, 554.5895385742188], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"lTm0g0bd07K1O001O00001N1000000O10O010O0100O01000kCoNP4Q1PLPOo3o0RLSOl3m0SLUOl3k0TLWOj3i0VLYOh3f0XL]Of3c0ZL@c3`0]LC`3=_LF_39cLG\\\\39dLF\\\\3;dLD]3;eLC\\\\3=eL_O^3a0bLYOd3f0a80O0100O1O1O]DZOc2d0TMGl28QMMn22SMOl21SM0m2OTM2k2MVM3j2MUM5j2JWM6h2KXM4i2LXM3h2LYM4g2LYM4f2MZM3f2MZM2f2N\\\\M1d2O\\\\M1d2O\\\\M1c20]M0b21]M1a2O`M2]20bM1]20cM1Z21fMOX23gMOV23jMNS24mMLR25mMMP24RNJm18TNGi1<WNCf1a0[N^OZ1m0gNRO^Oi2d0TMROW3Q1fLlN]3V1`LiNb3Y1\\\\LeNf3\\\\1YLbNi3]1XLbNi3]1YL`Ni3`1WL]Nm3a1ULZNo3e1RLXNQ4g1QLUNR4k1oKQNU4m1lKRNU4n1lKoMV4Q2jKnMW4T2gKjM\\\\4V2dKgM^4Z2aKcMc4]2\\\\K]Mk4c2TKWMR5j2mJPMZ5o2gJlL^5T3aJjLb5V3]JiLe5V3]JfLg5W3]JeLf5Y3^JbLf5Z3aJ^Lc5`3cJXLb5d3fJRL_5l3cJoKb5o3`JlKe5Q4]JlKh5Q4ZJkKj5S4WJkKl5S4VJiKo5T4SJhKS6T4oIgKY6U4hIfKb6S4bIeKm6P4VIjKV7m3mHnK\\\\7k3fHQLb7i3_HULf7g3\\\\HVLh7g3ZHULk7h3WHULl7i3WHQLn7m3VHlKP8Q4THcKV8[4lG\\\\K]8b4fGUKc8h4_GQKh8m4ZGmJl8Q5VGiJP9U5RGdJT9[5_3O1O1O001O001O001O001O001O001O1O1O010O00001O001O001O010O00001O0000010O000001dITKWLl4_3dK\\\\L\\\\4X3WL`Li3Y3gL^LZ3Z3UM^Lk2]3`M]La2_3fM^LZ2_3jM_LW2_3lM_LU2_3mM`LS2_3oM`LR2^3PNaLQ2]3RNaLo1]3SNbLm1]3VNaLk1]3XNaLh1]3\\\\NaLe1]3^NaLb1]3bNaL_1\\\\3dNcL\\\\1[3gNeLX1X3lNgLT1W3nNiLR1T3ROkLn0Q3WOnLi0n2[ORMe0k2_OTMb0i2@WM`0g2CXM=f2EZM;c2H]M8`2K`M5\\\\20cM1S28mMHh1c0XN]Od1g0\\\\NYOa1j0_NVO_1l0aNTO]1n0cNRO[1Q1dNoNZ1S1eNnNY1U1fNkNW1X1hNiNU1[1jNeNT1]1kNdNS1_1lNaNR1a1mN_NS1c1lN]NR1f1lN[NS1f1mNZNQ1i1nNWNP1l1oNUNn0n1QORNl0R2SOnMk0U2TOkMj0X2UOhMj0Z2UOfMj0\\\\2UOdMj0^2UObMj0`2TOaMk0a2SO`Ml0b2QO_Mo0c2oN^MP1d2nN]MP1e2oN\\\\MP1f2nN[MP1h2nNYMP1j2nNWMP1l2mNVMR1l2lNUMR1P3iNRMU1S3fNoLX1V3cNlL[1Z3^NiL`1\\\\3WNjLf1[3SNjLk1[3mMjLQ2Z3hMkLU2^3^MiL`2g3hLcLU3l:L3L4H9E:H8K6I7J5L5M3L4M2N3M3N2M3NW^k>\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [680.0254516601562, 203.9717254638672, 330.10015869140625, 281.7423095703125], \"score\": 0.9998985528945923, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Zc\\\\>3Se07K5M3L5L2O1N2O1O101N101mM]O[_Oe0^`0E\\\\_O?^`0Ea_O=[`0Fd_O:Z`0Id_O8Z`0Jf_O6X`0Od_O2Z`0Lj_O3U`0Nl_O2Q`00P@0m?2U@Mh?5Y@Kb?9_@G]?;f@DV??k@BQ?`0PA@m>b0TA]Ok>d0VA\\\\Oh>e0YA[Oe>f0[AZOc>g0^AZO_>h0bAXO[>i0fAXOS>n0mASOa=^1`BcNX=b1iB_NQ=f1PCZNk<j1UCWNf<m1[CSNc<m1^CSN`<o1aCPN^<Q2cCoM[<R2fCmMX<U2iCkMT<W2nCgMQ<Z2QDeMl;]2WDaMg;`2[D_Mc;b2^D^M`;c2aD]M];d2dD\\\\MZ;e2gD[MX;f2gDZMY;f2hDYMX;g2iDYMV;h2jDWMW;h2jDXMU;i2kDVMU;k2kDUMT;l2lDTMS;m2mDRMS;P3kDQMU;P3jDPMU;Q3kDoLT;S3jDmLW;U3fDlLZ;V3iBlK1Kf0R1b<X3`B`Ln08b<5ZBk2T1PMj<^OWCm2OdMi>T2XAkMk>R2VAnMl>P2TAoMn>o1SAPNn>o1RAQNo>n1RARNm>n1SARNn>m1RASNn>m1RASNn>m1SARNm>n1SARNm>n1SARNn>m1RATNm>l1TASNl>m1TASNl>m1TASNl>m1TASNl>m1UARNk>n1UARNj>P2UAPNk>P2VAoMi>R2WAoMg>S2XAmMg>T2ZAkMf>U2ZAkMe>W2[AhMe>X2\\\\AgMc>Z2]AfMc>[2]AdMb>]2^AcMb>^2^AbM`>_2`AaM_>a2aA_M]>c2bA]M]>e2bA\\\\M[>i2cAWMZ>m2dATMZ>P3cAPM\\\\>R3dAnLZ>T3eAmLY>U3gAjLX>X3gAhLX>Y3iAfLU>\\\\3kAdLS>_3mA`Lo=d3RB[Li=k3XBRL`=W4aBhK[=\\\\4fBcKX=`4hB_KV=c4kB\\\\KS=_2RBkNk0eNR=]2ZBkNe0gNQ=\\\\2]BlNc0gNo<^2`BhNc0iNk<a2cBeNc0iNi<b2gBbNc0jNd<f2jB_Nc0kN_<i2oBZNd0lNR<T3]CmMb0oNo;V3`CiMc0POk;Y3cCfMb0QOi;[3gCVMlN@f1Oe;[3kCQMo0DU;\\\\3mCiLT1Kn:]3oCcLW10i:^3RD[L[15c:a3jF_LU9b3kF^LT9c3mF[LS9e3nF[LQ9f3PGXLP9i3PGVLP9k3W4100O11N101O1N101_Nm^OhNSa0S1U_OiNm`0T1W_OjNj`0S1Z_OjNg`0e0Z^OROR18X3\\\\OP9W1jBROQ18V3@n8T1oBQOo0:T3Bn8Q1SCoNl0>S3@R9n0`DAmb0O1O1O1O1O001O5K2OO0O1N2M3O10000O1000000O103M2M4f_OZOi;g0SD\\\\Om;e0PD]Oo;e0nC^OQ<c0mC^OS<c0kC^OT<d0kC\\\\OU<e0iC\\\\OV<f0iC[OV<f0iCZOV<h0iCXOV<j0iCVOV<l0iCTOT<o0lCQOR<R1mCnNP<U1PDjNm;[1QDfNm;\\\\1SDdNk;_1TDaNj;a1VD_Ni;b1WD^Ng;d1YD\\\\Nf;f1YD[Ne;f1ZD[Ne;f1[DZNc;h1]DXNa;k1^DVN^;m1aDTN];n1cDRN[;P2eDPNZ;R2eDnMZ;S2eDoMY;R2gDnMX;S2hDmMW;U2hDkMW;V2iDjMU;Y2jDgMS;\\\\2mDdMP;`2oD`Me:l2[ETMa:Q3^EoL^:U3bEkL\\\\:X3cEhL\\\\:Y3dEgL[:Z3eEfLZ:\\\\3eEdLZ:]3fEcLY:_3fEaLY:a3fE_LX:c3hE]LV:f3iEZLV:h3iEXLV:j3iEVLV:l3iETLV:n3iERLV:P4iEPLW:Q4hEoKX:Q4iEnKW:S4hEmKX:S4hEmKX:T4gElKY:T4gElKY:^1RD^Of1SOX:m0hDMo0VOY:i0nDOi0XOZ:f0QE1d0YO[:d0TE3a0XO[:c0WE4>YO[:b0YE5;YO]:?[E78ZO]:=^E85[O_:9`E;O]Ob:3eE>I_Oi:EgEj0@A[>=dAC^>;bAE_>:aAF`>:_AFb>9]AHd>7\\\\AIe>7ZAIg>6YAJg>6YAJh>5XAKj>4UALl>3TAMm>2SANn>1RAOo>1o@0Q?0o@0R?On@1R?On@1R?0m@0S?0l@1T?0j@1V?Oi@2W?Og@2X?0f@1Z?Od@3\\\\?Na@4_?M^@5a?M\\\\@5d?LY@6g?Kl_Oa0S`0@l_Oa0S`0Ak_O`0T`0Bi_O`0U`0Bj_O?T`0Dh_O?U`0Eh_O=U`0Gh_O;V`0Hg_O:W`0Hg_O:X`0Hf_O9Y`0Kb_O7]`0M[_O8e`0IY_O8f`0JX_O7g`0KW_O6i`0V2O001O1O1O100O10O2O1[Mj_O2X`0Jn_O1W`0HQ@O\\\\`0@Q@0W`9\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [393.9391784667969, 49.238487243652344, 28.970245361328125, 31.9617919921875], \"score\": 0.9992979764938354, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"akV8;md04L2M5M2N200O1O2N1000001O0000000001O001O1O001O1N2J6M4M5Fji`<\"}}, {\"image_id\": 50, \"category_id\": 1, \"bbox\": [615.1367797851562, 149.86778259277344, 92.2598876953125, 91.57496643066406], \"score\": 0.9992387294769287, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\\\\\j<e0bd08J4I7L4J6M3L4M2N2M4M2N2N2N3M2N1O2O1N101N1O1O1O1O100O100000000O10000O10000O10000000000000000000000000000000000001O0000001O001O00001N101N2O0O101O0O1O2O1N2O1N2O1N1O2N3L2O2N2N3M2N3L4L4M_Xc6\"}}, {\"image_id\": 50, \"category_id\": 2, \"bbox\": [746.9783935546875, 371.5205383300781, 247.5831298828125, 126.02395629882812], \"score\": 0.9986673593521118, \"association_id\": 3, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"^fn?1Xe03O0O2O00001O002OO01O1O0001000O10O10O0010O01O001O001O0000001O01O00000010O0001O01O00001O0001O01O0000000000000010O000001O001O01O01O000001O001O00010O0000001O000000010O000010O0001O01O01O00000000010O0000000000001O0000001O00000O2O000000000O10001N1O1O2NgYg11_YZO\"}}, {\"image_id\": 50, \"category_id\": 2, \"bbox\": [915.9444580078125, 542.6365356445312, 93.94512939453125, 85.22283935546875], \"score\": 0.9976996779441833, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"_aWc03Se0g0[O6C<F:L3M2N3GSNn\\\\OQ2Pc06O2O0000O00101N10001N101N101N2N2N1O2M2O2N1O2N1O1O2N2N1O2O0O1O1O2O0O100O2^Om[OITd06Q\\\\OEPd09T\\\\ODnc0:b0O1O2N100O100000001O01O0001O001O00001N101N101N`^c0\"}}, {\"image_id\": 50, \"category_id\": 2, \"bbox\": [235.54501342773438, 566.1253662109375, 47.478668212890625, 75.02001953125], \"score\": 0.06041960418224335, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"PP[e0\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [203.3945770263672, 451.0469970703125, 139.3329620361328, 143.31317138671875], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"PjW43^d0g0A7I3N=C8H2M2O0O2O0O1O100O1N2O1O1O1O1O1N2O10O00N200000000L4O1O100O2K4M30001O1K5M3O1O001N101O01J5M2K601N101O000000001O1O001000O1O1O0010001N2O001O1N4Mg0XO2N2O1N101O1N2OO1O1O1O1O1O1[Oe0A?N200O1O1O1N2000000O100001O1O1O1O1O1O1c^OXMf`0c3K1O1O0000000000O1O1K5H8O1N2I8F9M3O1N3K5I7M3N3M5EWU\\\\=\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [471.7633056640625, 449.6309814453125, 253.8260498046875, 178.598876953125], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"S^c9k0kc0`0C3L4M3N1O1N4M2N3M2M2O001O001N2O1O2N2N1O1O1O1O1O1O3M8I1N2N2N1O011O0O2O000O100O0010O01O10O010O0010O0001O010Oe_OiLS?W3f@RMX?o2e@TMZ?l2d@WM[?j2c@XM\\\\?h2c@YM]?h2a@ZM^?g2a@YM`?h2\\\\@ZMd?h2Y@YMg?i2U@ZMj?g2S@[Mm?g2P@ZMP`0g2m_O[MS`0e2n_OZMR`0e2P@YMQ`0g2P@WMQ`0h2R@UMo?j2V@QMk?o2[@jLe?V3P1O1O100O10000O10000O10O1000000O10O010@o^OWMQa0h2R_OVMMMe`0m2m_OSMS`0l2n_OTMR`0j2P1O1O100O10[_O]MS?b2k@eMP?\\\\2PAeMo>[2RAeMm>[2SAfMl>[2TAeMk>[2UAfMj>Z2VAgMi>Y2WAhMh>X2YAgMg>Y2YAgMg>Y2YAhMf>X2[AfMf>[2YAdMg>]2ZA`Mh>a2]AVMf>j2e11O1O001O0000001O000000000O2O00001N101O1N2O000000000000000000000O0100O1O100O010O101O0O100O1O2N1O100O10O100000O10O1000000000000O1000000000001O0^_OZMP?g2k@^MT?b2i@cMU?^2h@eMW?\\\\2f@fMZ?[2d@gM[?Y2e@fM\\\\?[2b@eM_?]2^@cMc?^2[@bMf?e2R@ZMo?h31O1N2O1O1M3Bf_OWL_`0c3?M3N2O100O10000000000000000001O0010O01O2O0O1O2OO10010O2O09H3K2OSMe_OX1\\\\`0bNi_O]1Z`0ZNn_Oe1U`0lMY@Q2]a0L4K4L3N0O100N2O1M3Nicc5\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [118.71844482421875, 373.0406188964844, 96.78286743164062, 68.3616943359375], \"score\": 0.9999996423721313, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"Pj]28^d06L2K5O1O1O1O100O0O101O0O01O1000N120O0O1O100O1O2O001O001O010O1O10000O10001O2M2N2O6J6J0O1O0100N2E;L301O1O1O1000O0100000O100000001O0000000000001O002N00001O00000000001O1O100O100O1002N0000m0SO0O4K_^o?\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [410.3345031738281, 330.1443176269531, 70.24478149414062, 51.611297607421875], \"score\": 0.9999995231628418, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"WZ\\\\85Zd0=M1N3N2N1N2O10O01O1O001O1O1O001O01L3N2100O101O2M2O0O10K5N2O00100O01000000000000000O100000000001O001O1O4L2N2N1O2N1O001O0010[Oe\\\\OI\\\\c01S]OCQc07m0MW_d:\"}}, {\"image_id\": 51, \"category_id\": 2, \"bbox\": [634.0435791015625, 353.7788391113281, 106.71954345703125, 15.965606689453125], \"score\": 0.9999969005584717, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"edn<1hd02O1O0000001O000000001O000000001O0000001O0000001O00010O0000001O01O00000001O01O00000000001O000001O000001O0001O0000000000000001O0000000000001O000000000000001O00000O100000001O00000O100000001N10Vc^5\"}}, {\"image_id\": 51, \"category_id\": 2, \"bbox\": [375.6976013183594, 407.3004455566406, 156.87875366210938, 23.905029296875], \"score\": 0.9999954700469971, \"association_id\": 1, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"Ynd74ed02O1O0000001O00000000000000000000001O000000001O0000000000001OO101O000000000000000000010O00000000000000000000000001O000001O000000000010O0000000001O0001O00000000001O0001O00000001O00000001O0001O00000000001O01O0000000000000001O01O00000000000000010O00000000000010O00000001O0000001O00001O001O000000010O00000O10001O1ObXa9\"}}, {\"image_id\": 51, \"category_id\": 2, \"bbox\": [2.2501978874206543, 263.98968505859375, 77.11456298828125, 20.714447021484375], \"score\": 0.9999804496765137, \"association_id\": 2, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"oa11fd05N1O001O001O1O001O001O0000001O0000000000001O0000010O0001O001O010O00001O001O01O0000000000001N100000000O10001O0O1001O000000O11O01O0000000001O000O2O001O2FeQhb0\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [68.07256317138672, 201.4524383544922, 78.51496124267578, 87.77946472167969], \"score\": 0.9998753070831299, \"association_id\": 2, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"j\\\\`11gd05J4L4N3M2O0O2M3N1O1O1O1O1M3O1iNQOX^OZ1ea0o0O1O1O100O100O100O10000000000001O1O001O002N4L2N6J7I1O2N3M1O2N2N2N6J2N1O1O1O1O001O00001O1O1O0000001O1N2O1O1N3M5G[j[a0\"}}, {\"image_id\": 51, \"category_id\": 2, \"bbox\": [4.206997394561768, 620.2653198242188, 187.34471130371094, 18.9287109375], \"score\": 0.9932957291603088, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"ek36cd02O000O101O00000000000O01000000O100000000O100000000000000000000000000000000000000001O00000000000000000000000000000000000000000O2O000000000000001O000000000000001O00000000001O000000000O2O0000000000000000000000000000000000000000000000O100000000O10000O100N2N20000O100000000O1000000000000000000000000000000000001O000000000000000000000000001O000O1000001O0Ob[e`0\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [368.1654968261719, 239.1644744873047, 24.80859375, 33.56614685058594], \"score\": 0.7712899446487427, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"jW`7;]d07I2N2N2N2O1N2O00100000000001N101N2O1N3Jei]<\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [535.4235229492188, 383.17877197265625, 47.0299072265625, 50.445587158203125], \"score\": 0.7270745038986206, \"association_id\": 1, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"Uem:9`d07H5F7M4M10001N10M3O10001O001000O2O1O3L3N2N1O1OO1N2N2N200O100O1001O1O000000O1O2M3L4H8M3LUi`8\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [760.7070922851562, 337.5847473144531, 37.071044921875, 35.80084228515625], \"score\": 0.6735468506813049, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"WY_?>[d0100O01O0O12N2N2N2O4K1O100ON3O10O2O1O4L1O1O0000000000O100O100J7L3N4D<JooT4\"}}, {\"image_id\": 51, \"category_id\": 2, \"bbox\": [420.738037109375, 367.387451171875, 40.9493408203125, 17.723297119140625], \"score\": 0.1674252599477768, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"ejb82hd00O11O01O00001O0000001N1N200O100O10000010O2N001O00001O000000HLj[O4Vd0Mi[O3Wd0Og[O1Yd00h[OLZd0550001NQ`o:\"}}, {\"image_id\": 51, \"category_id\": 1, \"bbox\": [29.346179962158203, 241.50103759765625, 28.32427215576172, 32.68389892578125], \"score\": 0.06684309989213943, \"association_id\": 0, \"segmentation\": {\"size\": [666, 1002], \"counts\": \"VRe04dd02O2M2N2N3N1O2O0O100O1O100O1O10O2O1O1N2N3CgZXc0\"}}, {\"image_id\": 52, \"category_id\": 2, \"bbox\": [147.27572631835938, 482.91827392578125, 338.99951171875, 72.47601318359375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"fWS31We06M0O101N1000001N1000001N10000O100000001O0O1000001N1]O_O\\\\\\\\Oa0dc0AZ\\\\O?ec0B[\\\\O>ec0B[\\\\O>dc0C[\\\\O>ec0CZ\\\\O>Vc0^Oi\\\\O41>Vc0^Oh\\\\O60=Wc0^Oi\\\\O50=Wc0_Oh\\\\O5O=Yc0^Og\\\\O8N:[c0_Of\\\\O7N;\\\\c0^Of\\\\O8M:]c0_Oe\\\\O7M;^c0^Od\\\\O8N:^c0^Od\\\\O9M9_c0^Od\\\\O9L:`c0]Od\\\\OU1\\\\c0kNd\\\\OV1[c0jNe\\\\OV1[c0kNc\\\\OV1]c0jNc\\\\OV1]c0jNc\\\\OW1\\\\c0jNc\\\\OV1]c0<1O0000000000000000001O000001O00000000000000000000000001O0000001O000000001O00000000001O0000001O0000000000000000001O00000000000000001O0O1000001O00000000000000000O100000000O101OO10000000000000000000000000000O1000O1000O1000000O100000000O10O10O100000000000O2O00001O001O0O10001O00001N101O2N1O1N101O0000001O001N2O1O1O0O2O0000001O000000000000000000000O100000000O100O1000000O1000000O1O1O100O1O100O100000000O1000O10000000000O10000000O1000000000000000001O000000001O0000000001O01O000000001O00000001O0001O000000001O0000001O0000000001O0001O000000000000001O00000000001O000000000O2O0O2OmoV;\"}}, {\"image_id\": 52, \"category_id\": 1, \"bbox\": [189.76007080078125, 76.10951232910156, 291.1886901855469, 438.1640625], \"score\": 0.9999905824661255, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"\\\\Qo3j4[`0:J6J4M3M3M1O2N1N3N1N3M2O2N1O10O000000O1ZJVJlKj5Q4_JfIL`Nf5g7eJ`I0aN^5m7gJ[I2cNX5R8iJVI4dNT5V8jJSI6dNQ5X8lJPI7dNo4[8lJnH9dNl4]8oJjH<bNf4d8QKeHc0]N_4m8RK_H\\\\7`7hHZH[7f7gHUH\\\\7j7gHRH[7m7gHPHZ7Q8gHmGZ7S8gHkGZ7U8gHhGZ7Z8fHcG\\\\7]8eH_G]7b8dHWGb7i8`HPGd7Q9Z22N1_LiFnJW9P5lFnJU9S5iFlJY9a82N2M3N2M3N2L4M3M3N1N3M3M2O2N100O2O00000O2M2N2M4L3M4M3M2O2N2O0O2L4eM`DgJc;W5bDbJc;Z5cD`Ja;[5eDaJ^;m2jDPM0MZ;R3oDdLM7U;U3RE^LM:S;W3RE]LM9S;[3nD^LN6U;\\\\3mD_LM1Z;a3gDaLLL`;d3cDaLLHd;h3^DcLLDg;j3[DdLMAi;m3XDcLN_Ok;o3UDeLN[On;R4RDeLNXOQ<T4oCfLNWOS<R4oCjLLSOV<R4oClLISOX<P4PDnLGROY<o3QDPMDQO\\\\<o3PDQMBQO^<m3QDSM@PO_<m3QDTM^OPOa<k3RDVM\\\\OnNc<l3QDWMZOnNe<k3QDWMZOnNe<j3RDYMWOnNg<g3TD]MROnNi<b3XD_Nh;^1[DbNe;[1^DfNa;VNYCh1[12h0lMb79`Fb1Y1c04QNk7IlF^1W1Q1BTNW8[OUGY1U1^1SOUNb8SOZGU1S1[4a7^J_GT1R1_4_7ZJaGV1Q1`4_7VJdGX1m0d4_7PJgGZ1l0g4]7fIoGb1d0j4P9TKQGm4n8SKQGP5m8oJTGR5l8lJTGW5j8iJVGX5i8gJXG[5g8dJXG^5h8bJUGa5j8`JTGc5k8]JSGe5m8ZJQGj5n8VJoFm5Q9VJiFn5V9UJ[FX6c9lITF[6k9hIbEh6^:T2N1O101N2O0O2O2M2O0O1000O1O100O1O010O001O10O01YHTFm3m9mK`Fk3a9nKhFo3Y9lKmFR4T9kKoFT4R9hKRGW4P9fKSGX4n8eKUGZ4k8eKVG[4k8bKXG]4i8_K[G`4e8]K_Gb4b8ZKbGe4^8XKfGg4Z8VKjGi4W8TKlGk4T8TKnGk4S8RKPHm4P8RKQHn4o7RKRHm4n7RKTHm4l7SKTHm4l7RKUHn4k7RKVHm4j7RKWHn4i7RKXHm4h7SKXHm4h7RKYHn4g7RKYHn4g7RKZHm4f7SKZHm4f7SKZHm4f7SKZHm4f7SKZHm4f7SKZHm4f7SKZHm4f7SKZHm4f7SKZHm4f7SKZHm4f7SKZHm4g7RKYHn4g7RKYHn4g7QKZHo4g7PKYHP5g7PKYHP5h7oJXHQ5i7nJWHR5i7nJWHR5j7mJVHS5k7lJUHT5l7jJUHV5l7iJTHW5m7gJTHY5m7fJSHZ5n7dJSH\\\\5n7bJSH^5n7`JSH`5n7^JSHb5m7]JTHc5m7[JTHe5m7XJUHh5l7UJVHk5k7QJXHo5j7lIYHS6j7gIZHY6\\\\;000000O10001N1O2N2M2N3N2M3N3M2N2N2N2N1O2M3L3N3M2N2O2N1O2N2M3M4K5K5L4L3M3M3N1N3N2M3M4M2M4M3L3N3M2N3K5K6I7Ic0lNR1YO^T_;\"}}, {\"image_id\": 52, \"category_id\": 1, \"bbox\": [152.97674560546875, 89.48750305175781, 212.08480834960938, 399.6817626953125], \"score\": 0.20751795172691345, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"T_`3`0mc0Z1@7K4L3M4K6I8H8I6K4L5K3K5I7oNQ1K4M4N1N3N1N2M3N2O1O1O1O10000000000O100O100O1N2N2UJRJWLP6e3VJXLk5e3ZJWLh5g3]JULd5h3cJSL^5k3hJkIDdNf5_7lJ_IOiNW5g7mJZI5hNP5m7nJWI8fNl4R8nJUI:eNj4V8nJQI=cNh4\\\\8nJkH`0bNf4c8lJfHc7Y7aHaHb7_7`H]Hb7c7_HZHc7f7_HWHb7i7`HSHa7n7bHlGa7T8eH`Ga7`8d21O100O1O1O1M3N2N2N2N2N2N2N2N2M3N3M2N2M3N2N2M3N1M4N2N2O2N1O1N2O1N2N2N2N2N2N2M3M3H8K5L4N2N2QK_D`MH?k;j1lDXMCi0c;k1SEkLIU1V;l1YEcLM8o;P3[DaL8Kd;c3VD_Ln=a3RBaLl=_3SBcLl=]3TBdLj=^3UBdLi=\\\\3WBeLh=[3WBhLg=X3YBiLf=V3\\\\BhLe=W3\\\\BhLf=V3\\\\BhLe=X3[BfL?Jj:_3iDcL>1h:\\\\3kD^L?9e:X3ZGjLe8V3[GjLe8U3oDdL86j:V3mDoL0JS;W3lDUMLDX;W3kDZMJ]O];W3jD`MEYOa;W3iDcMETOc;Y3hDdMDSOe;W3iDcMIPO`;[3jDYMY=e2h2N\\\\EiL\\\\3P3VLaMm3Y2RLjMQ4Q2nKRNU4i1jKZNY4`1gKeNY4V1gKoNY4l0fK[OX4a0gKFV47iKOV4MjK7U4FkK>T4_OoKa0P4^OSLa0l3^OWLb0g3]O^L`0a3_OdL>Z3CjL:U3FoL6R3HRMlNdHf0Z:=eME[2:dMH\\\\27aMQOnG=b:`0_MQOSH?_:>^MSOTH>b:;YMWOVH?c:7WMZOWH?c:5WMYOXHc0a:4iMNV21kMOU21jM0V2OjM2V2NiM3W2MhM3Y2LgM5Y2JhM5Z2IfM7[2IdM7^2GcM8^2HaM8`2H_M8b2H]M8e2GZM9h2FWM:P3@oL`0T3@hLa0Y3F_L:b3F\\\\L;d3FZL;f3FXL;i3DXL;h3FXL9h3GYL7h3IYL6h3IYL5h3LXL3h3MXL3i3LXL2P4DUL8c<@ehg=\"}}, {\"image_id\": 53, \"category_id\": 1, \"bbox\": [259.714111328125, 278.7991027832031, 118.12911987304688, 88.68377685546875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"Smm35i>3TCLj:5SEOj:3UENi:4UENi:4UEMj:4VEMi:4UEMj:5TELk:6RELm:5RELn:5PELP;5nDKS;=aDG^;<_DEa;=]DBc;b0YD_Og;i0mC[OR<h0kCYOU<i0hCWOY<h10O1O100O1N2N2O1O2M3M3N101N3N1N102M3M2O0O2O1N2N1O101N1N2O1O2N1N3N100O2N1O1000000000000001O0O2O1O2N1O001O2N1N2O1O2N1O1O1QC^N^<f1VCcNi<j1010O0000001O0O100O100L4BPCfNV=U1>N1O2L4M200O2B=L5M3N2Ma\\\\l3\"}}, {\"image_id\": 53, \"category_id\": 1, \"bbox\": [92.19361877441406, 182.1015625, 141.15185546875, 175.0986328125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"na_12a>T1VO<F6I5Lc0\\\\O<D4L4N1N4L7oDfLo9\\\\4J2M3M3M3M2O1O0O2O001O001O0O10cN`FZM`9c2fFZMZ9d2jFZMV9d2mFZMS9c2RG\\\\Mn8_2XG`Mg8`2[G_Me8`2\\\\G`Md8`2\\\\G`Md8_2]GaMc8_2]G`Md8`2[GaMe8_2[GaMe8`2YGaMg8`2UGbMl8_2QGcMo8_2lFcMU9^2iFbMW9a2dFaM]9o32N2O001N2L4M3N101O1O1O1O1O12N2N2mEdK^9R5M2N1O1O001O000000000001N1SOgFRLZ9]3aGSLf8g3\\\\1C>L4K5L3M4N101N101O0O201O01N2O00O110O01O0000011O0N1001102NOO0010O2N1O00O02N2N1O100N2O4L3L3kNYCMh<N`CMb<NfCM_<Je^P6\"}}, {\"image_id\": 53, \"category_id\": 2, \"bbox\": [11.47582721710205, 331.2168273925781, 164.37493896484375, 30.362030029296875], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"\\\\[71o>1N2O2N1O1O001O1O1O001O0000001O0000001O00000000001O0000000000001O000001O000001O000000010O0000000001O0000010O00001O0000000000001O00000001O00000000001O0001O000001O00001O0001O0000001O0010O02N2N1O1O1O1O01O0001O00001O001O0000000001O0001O00O2O000000000000000000000000000001O0000000000000000000001O0000000001O0000001O000000mRj6\"}}, {\"image_id\": 53, \"category_id\": 2, \"bbox\": [210.54238891601562, 349.10247802734375, 129.97811889648438, 15.4971923828125], \"score\": 0.9999990463256836, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"PaV31n>101O0000000000001O0000000001O00000001O0000000001O0001O00000001O0001O0000001O0000000O2O00001O0000000000O1O1O2O1NQm83lRG2O000O10000000O10000000000000000001O000001O000001O0000000000000000001O000001O00000000001OlX\\\\4\"}}, {\"image_id\": 53, \"category_id\": 1, \"bbox\": [614.0435791015625, 151.9246826171875, 25.435791015625, 121.7901611328125], \"score\": 0.9999071359634399, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"QRP9?[>7nNR1J5I7lNT1J6L4M3O1N2O1O0100000000O100O1N2O1oNPERNP;k1VEiMS;P2Z;\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [769.8787841796875, 94.6421890258789, 164.74713134765625, 425.4259338378906], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"SYd?]1ob0>E6I6G9K5L4M3M2N3K4o@RMZ<Q3ZC_M`<d2\\\\CbM`<_2^CeM^<^2_CfM]<]2^CkM[<<lB[O:h0e<L_C\\\\OROZ1[=ZOjCQ2S<mMSDQ2k;nMWDR2i;jM\\\\DU2d;fMbDY2];dMhD[2X;bMkD^2T;aMnD_2Q;aMnDa2Q;^MoDd2o:]MoDf2n:\\\\MQEf2m:[MREg2l:[MREf2n:]MkDg2T;^MbDg2_;^MlCQ3U<VM]Co2d<WMSCk2o<YMkBi2U=Z2O2O0O2O0^FcHP6]7mIhHQ6W7jIPIU6o6eIYIZ6f6bI`I]6_6_IgI`6W6^ImIa6S6]IQJb6n5\\\\IUJd6j5XI[Jh6d5PIeJo6[5jHmJV7R5gHRKY7o4dHSK\\\\7n4^HWKa7l4VH[Kj7g4iGdKW8_4_GhKa8\\\\4VGiKi8]4nFfKQ9o7M2dN]1K4O10O2K4]Od0F9N3N2N2M3L4M3N2O1N2M3K5M3N2N2N2L4B>K5N2N2M3L4C=K5N2N2O1N2M3M3N2N3N2N2N2nHQD\\\\5S<\\\\JVD`5o;UJ[Df5]=J7F9G7B?G7L4K6I6L4N3M2O1O1N2O1N3M2O2N3N1N3M3M3N1M3N2M2N3M3M3N2O1N2O3L4M3M2N3L4M2M4L4K5J6J6K5K7GYfi1\"}}, {\"image_id\": 54, \"category_id\": 2, \"bbox\": [136.11065673828125, 474.91455078125, 125.35507202148438, 23.819061279296875], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"kdh22`d0100O2N10000O1O01000O1O1M3O10000O10O100000O0100000000O01000O10000O010O10000O1000O10O10000O1000O10000000O100000000000O11O1O1O1O1O1NSe9NP[F0O1J6N2O0010O010O010000000O1000000O1000000O101O0000000O1000000O10000O10000OaT]?\"}}, {\"image_id\": 54, \"category_id\": 2, \"bbox\": [288.2535705566406, 489.6175231933594, 125.32684326171875, 30.337799072265625], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"SYP61ad01O1KNf[O5Wd0500O1O1O100O1O001O1M300O001O1O01O1N2N2O1O010O1000000O1000000O100000O01000000000O0100000000001O00000000000000000000000000000000000000000001O0O10001O00001O0O1000000O10000O101O0O10000000000000000000000O100000001O000OgY[<\"}}, {\"image_id\": 54, \"category_id\": 2, \"bbox\": [769.1239624023438, 493.0179138183594, 151.0015869140625, 28.273345947265625], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"\\\\\\\\_?3^d04N001N1000001O00000O101O00000O2O0L400O2O000000FS\\\\ONmc0<001O00000000000O100000000000000O10000000000O1O1L4N2N2Ne[OL]d00ZP90ioF3N1O1O1O001O2N;E3M0000000000000000O10000O101N100O101O001N1O101N1000000O100000000O100000000O10000000000O100O2O0000000O1000000000000000000O1000000000001O0O100O2OZgS2\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [403.8702087402344, 83.62701416015625, 153.16189575195312, 446.45654296875], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"dY[8l0bc0i0XO;F4L4M3M3M3M3L5H8G9I8J5L2O2M2N3N1M4K7J6J?Af0[Oj0UO8H<E`0A6I5M2nFTHX5P8\\\\JZHa5l7UJ[Hi5i7PJ\\\\Hm5h7mI]HP6g7kI\\\\HS6h7gI\\\\HX6g7bI^H\\\\6g7\\\\I]Hc6g7TI_Hk6e7oH^HQ7d7jH_HV7d7cH`H]7[:O100000O1O1O0O2N2M1O2M2N1O1O1N1L2O4L411O01N1N3N11O1N2N3N1O2O1N2M3K5K5L4M3L4H8F:L4L4M3L4J6K5L4N3N2N2N2N2N3L7H:D`0B<aHUCb6b=K4K6I6I8J6J5K4G9eN[1G8K6L3N2N2O1O2N200O102M3M2O0O100000O10O101N2O2M3L4L4K6K7I5K6H`0^OYZ\\\\9\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [126.18147277832031, 92.83087158203125, 130.0946807861328, 378.6291198730469], \"score\": 0.9999995231628418, \"association_id\": 4, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"PUb27Xd09I4J6XKWOSBS1Q1KR<3UBi0`1XOU<_2gCbMU<e2hC[MS<P3fCQMU<Z3dCgLY<^3dCcLZ<a3bC`L\\\\<f3^C]LZ<W4PCRLf<S6H7F9I7N2N3O1O01M2N4N1O2M3]MeDiK`;Q4UE\\\\Ko:a4aEnJc:o4b2K5N2N2N2O1N1O2N2O1O010O01N2^Ob0C=M3N2M5J7J7J4M3N3M5K4L2N2N4K5L3M1O3M5K3M1O2N2N3M4L2N3M7I:F5L2M5YFhFi7S;N2N10O1O1O1O2N2N2N1O1O1mKdFmL\\\\9k2ZGfLh8l0]GhL`0Q2U8P1jGYLd0`2d7R1_JkMiKVOk9i2eJ`MbLoNn8Z3lJnLP7i2a5K5L4K5J6K4L6J7I7H7K5L3N1O2N2O2N2N2N1O100O1O100O1O2L7I8HZP`?\"}}, {\"image_id\": 54, \"category_id\": 2, \"bbox\": [424.5628662109375, 494.67596435546875, 129.455078125, 29.2298583984375], \"score\": 0.9999994039535522, \"association_id\": 1, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"RPb86]d01O000O2O000000000O2O00000001O0Ii[O1Xd0M`^?3da@3M2N4M7I0O2OO10000000O100000000O01000000O0100000O1000000O10000O100000000O100000000O10000O1000000O10000O10000O10000O100Jk[OMVd03k[OKWd04400000000000001O0001O00001O0000000000O2O00001NSd^9\"}}, {\"image_id\": 54, \"category_id\": 1, \"bbox\": [263.4728698730469, 107.09162902832031, 144.09732055664062, 394.49603271484375], \"score\": 0.9999722242355347, \"association_id\": 2, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"ZS\\\\54]d04L3M4L2M3M3N3M2O1000O010000N2N2M2lBQOd6Q1YITOc6n0[IUOd6j0ZIZOe6e0ZI]Of6b0XIAh6=XIFg68ZIIe68QEBn17P98lDLD]Oj0?e:9aDT2=dMQ;9WDc2c0TMU;:SDi2f0mLT;a0oCh2i0hLS;[5hDgJ`:X6VEkI[:^8M3N2N2L4K5N2N2N2N2^NQF\\\\HR:Z7jFjGa9T8_1000bNXHPFk7`9k1H9F9F;D<F:J6M3M3M3M3N2N2O100O100O11O001O001O002N3M5K5K3M4L6J7I4L2OO01O10O011O00V1iN7J1O0N2N2N1O2N2O1N2O2M4M1N3L3L4L4fKdBY1_=cNgBY1[=eNhBW1]=eNiBS1]=jNhBn0^=nNgBj0_=SOeBg0_=XOdBc0`=[OdB>a=_OcB:c=D`B7e=F]B6g=G]B5e=J^B2e=L^B0e=N]BOe=0]BMf=1\\\\BMg=O\\\\BNg=0\\\\BMe=2]BKf=2]BLd=2`BEh=7]3Gmdb<\"}}, {\"image_id\": 54, \"category_id\": 2, \"bbox\": [404.2482604980469, 224.16574096679688, 143.85177612304688, 269.5411071777344], \"score\": 0.06038331240415573, \"association_id\": 0, \"segmentation\": {\"size\": [659, 1024], \"counts\": \"PPcd0\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [296.1820373535156, 229.79766845703125, 102.65625, 318.5133056640625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"n\\\\X64md0m0[O`0@>B:G4K5L4K6K3M2N2N3N2N4K6K3M6J9G4L2YNlKRBY4h=QLPBR4l=RLPBQ4m=RLoAR4o=RLkAT4Q>RLdAY4V>kKbA]4Z>_1K=C9G3ZDWHd9n7UFVHi9P8oESHP:P8lERHS:Q8gERHY:Q8`ETH_:n7\\\\EUHd:l7ZEUHf:m7VEUHj:S8kDPHU;S90000000000000000000fMkE^IU:_6SF[In9e6UFXIk9h6WFUIj95PE\\\\5Y1[Jh96TE]5W1XJi96TEa5W=TJQCm5V>010O1O1O1O1O001O1O00001O01O100O2dNPAZLP?`3\\\\AZLd>_3gA_LW>]3nAcLQ>Z3RBfLm=W3WBiLi=R3\\\\BnLc=o2aBPM_=m2fBQM[=i2nBSMR=i2TCUMl<h2XCVMj<d2]CYMd<\\\\2\\\\3K5K7G9H7I9E?ZOdbR=\"}}, {\"image_id\": 55, \"category_id\": 2, \"bbox\": [343.4925842285156, 514.655517578125, 81.30520629882812, 30.79705810546875], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"Zo_71hd03>?A10O100O100000O1000000000000O10000000O01000O010000O010000O10O0100O100O100O1O100O100O1000000O100O10000O100O100O100O100O10001NUf_<\"}}, {\"image_id\": 55, \"category_id\": 2, \"bbox\": [500.02935791015625, 468.5456848144531, 47.10687255859375, 14.365264892578125], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"bYa:2Ye01O0O1O1M3O2M102O00O1000000000000O100O10000O100000000O100000000O2O000O10000O101N2OVfm9\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [474.4131164550781, 283.76947021484375, 67.21420288085938, 194.68099975585938], \"score\": 0.9999995231628418, \"association_id\": 2, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"mTm9<ld05K4M3M3L4N100OO01O101O10001N2O1N3L6[\\\\OhNSb06P^Ok1ha0[NQ^Oj1la0o0L2N2Z@cLZ=_3`BkLZ=X3`BkL`=X3[BkLd=X3XBiLh=Y3VBgLj=\\\\3RBeLn=`3mA`LS>f3gAZLY>j3cAVL]>o3]ARLc>Q4ZAnKh>S4YAiKi>W4\\\\A_Kh>b4Q1010d0[O3M10OfNT_OUNj`0h1[_OWNe`0f1^_OYNb`0e1a_O[N_`0b1e_O\\\\N[`0b1i_O[NX`0d1k_OYNV`0d1P@WNR`0c1n1J6M4H8K5L4M3L4H8K6L8DijS:\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [783.78564453125, 287.01336669921875, 54.173583984375, 145.75338745117188], \"score\": 0.9999995231628418, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"iW[`0b0hd04K6J3M4L3L3L6K4M6K4L7H6K6Jj0WO5I8A<J6M1N100000O11O001O0000100OO4N3M2M3L3N100N2O1N2O0O1N3ZM]^OU1Rb0gNU^Oi0Xb0SOQ^O6ab0Fi]OBib0<U1M4Lngm3\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [900.4688110351562, 294.7135314941406, 35.59344482421875, 131.63485717773438], \"score\": 0.9999854564666748, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"gblb0;P17ECna0R2^]OeNUb0]2L6J2N5K5K2N1O1OO11O000100N5M6H4M3N2\\\\NU^OBna05_^ODfa0WOU^OOa0f0ac0K5K4K5JbYl1\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [976.81982421875, 309.3539733886719, 32.551513671875, 88.0675048828125], \"score\": 0.9963900446891785, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"hd\\\\d08od08J5J5K5K8I5I>B7J301N1000001O:F5K10O001N3M=A6aNg\\\\O1=]ORc08m\\\\O5ic0C_\\\\O7Sd=\"}}, {\"image_id\": 55, \"category_id\": 1, \"bbox\": [939.3831176757812, 312.0325622558594, 37.91058349609375, 89.82156372070312], \"score\": 0.9950856566429138, \"association_id\": 0, \"segmentation\": {\"size\": [683, 1024], \"counts\": \"cYcc05Se0:H:E5L6J5J5K4K6J7H6M3N1O1O0000O1N20000N2O202mMm\\\\Oi1[c010YOe\\\\OZO\\\\c0b0j\\\\OZOYc0a0m\\\\O[OUc0a0l0O2N5I6EVfQ1\"}}, {\"image_id\": 56, \"category_id\": 1, \"bbox\": [424.3655700683594, 250.07000732421875, 254.07595825195312, 119.59832763671875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [601, 785], \"counts\": \"egi7h0la0;I2N101N1N3N2M3N2N2O1N101N100O2O0O2O000O2O0000000O101O0O10000O10O10000O010000000O0100O10O0100O0100O10O1O10OO200O100O1O1O1O1N2O2O0O100O101N1O1O1O2O0O100O100O101O0O1000000O10000O10000O10000000000000000000000O101O0000001O00001O0010O01O2N1O001O1O001O1O2N2N001O1O00001O0001O01O000001O000O100O10000O2O000O100O1N2N2O100O100O1000000O010O100000000000000000O10001O00001O0000001O0000010O00001O0010O000001O00000001O0001O01O001O1O001O00001O001O3M1O1O1N2O001O003M3M2N1O1O001O0O2O1O1N3N2M2O0O2O001N3N3L3N1N2O2M2O2M3M6Gl_n1\"}}, {\"image_id\": 56, \"category_id\": 2, \"bbox\": [344.65130615234375, 340.2510986328125, 301.32769775390625, 35.070709228515625], \"score\": 0.9999991655349731, \"association_id\": 1, \"segmentation\": {\"size\": [601, 785], \"counts\": \"cb\\\\62gb01O000O101O0O2O001O0000000000000O2O0000001O0000000000001O0000000000001O000000001O00000000001O000000001O0000000000001O00000000001O000000000000001O00000000000010O0000001O01O01O0000001O101N1O0010O0000000001O0001O000000001O000O1000000O2O0000000O1000000O10001O0O1000000000000000001O0000000001O0000001O01O00000001O00000000000000001O0000000001O0000000000000001O001O00001O000000010O00002N1O00001O0000001O001O5K1O1O001O00001O00000000001O0000000001N100O100O100000000O1000000N2O100O10000000O10000000000000000000000000000O10000000000000001O000O100000001O0000000O1000000000001O0000000O10001N10eZe2\"}}, {\"image_id\": 57, \"category_id\": 1, \"bbox\": [204.14007568359375, 109.88412475585938, 246.6812744140625, 240.8760986328125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"aYP3k0P>8H8G7B?C<H9G:Ec0C9G8I4L2N2M3L5K5L4L3N2N2O1N1O2N1O2N2N2N2O1O000O10000000000000000O10000O2O000O101N101N2O1N3N1N2O1O1O2N2N?A9G3M1N2O1O001O001O1O001O1O1O0000001O000000000000O1O100O1O1O1000000000000O1000000000000000000000000000000000000000000000000O1000000000000000000000000000001O00001O001O001O1O1O1O001O001O001O1O1O3M2N2N2N1O1O1O1O2N2N2N1O1O1O001O1O1O2N1O1O1O1O001O1O1O2N1O1O1N101O1O1O1O2N1O1O001O001O000O2O001O1N101O1N2O1N101N2O001N2O1O1O2N2M2O1O1O1O1N3N3M9G:E3N2N1N2N3M4K;D_1gMQCGX_l2\"}}, {\"image_id\": 57, \"category_id\": 2, \"bbox\": [101.343505859375, 258.8916015625, 329.2893371582031, 114.54922485351562], \"score\": 0.9999960660934448, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"lR`17f>6L4M1N101O0O2O001O1N4M2N001N2O001O001O1O1O1O6J2M2O001O00001O0O101N5L5K2N2M101O00001O0O2O009G4L1O001O00001O00001N2O001O001O00001O000O101O0O100000001O000O1000000000000000000000000000000O10001O000O100001O000000010O000001O01O00001O1O3N9F3M1O1O1O001O00001O001O00001O00001O0000001O0000000000001O0000001O1O001O1O1O1O001O001O1O002N4Ll0TO2N1O1O001O0000001O0000001O00001O0000O100000000O1000000000000O100000000O10000O10000000000000000000000O1000000000000000O100000000000000O100000000000000O100000000000000000000O100000000000000000000000000000000000000000000000000000000O100000000O1000000O100000O1000O1000000000000O101O0O1O1O1O1O1O1O2N1O1N\\\\eX3\"}}, {\"image_id\": 58, \"category_id\": 1, \"bbox\": [464.7535400390625, 161.6643524169922, 108.36590576171875, 269.33563232421875], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [431, 640], \"counts\": \"g\\\\T6:Q=5K6I6M3M2K4M4M2N3M2N3N1O00M4M3N3N1N2OdFTOX6k0gI^OP6c0mIJj56SJ:`5F]Jd0\\\\5\\\\OcJi0Z5WObJT1W5lNeJP2b4QN[KU2a4mM\\\\KW2`4kM^JQOBY3f5PN\\\\JZ3V5TMdJR3W5e2K5M3O10O10O01N1000O0100M4L3O2N2M3N2O1O100000101002N3L3M3N1O10N4L3L3N0N4M3M1O001O0O1000000000001fLoIUOP6e0]JTOc5j0bJSO_5i0iJROX5j0WKjNk4Q1ZKmNh4Q1ZKmNg4Q1]KlNd4S1`KiNa4U1bKiN^4V1eKhN]4T1kKeNW4W1YL\\\\Nj3`1^LXNd3e1fLoM`3m1S4L4L5K9_Nlnn0\"}}, {\"image_id\": 58, \"category_id\": 1, \"bbox\": [357.50579833984375, 185.5033416748047, 32.615081787109375, 84.20384216308594], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [431, 640], \"counts\": \"QZg43Y=5N0N1L4M4N3M4N7Hf0YO4M4J<D5M2N2O001O0001O2N3M3K2O3M2QOjDAZ;:P1K8I[^Y3\"}}, {\"image_id\": 58, \"category_id\": 2, \"bbox\": [348.5805358886719, 263.786376953125, 28.71514892578125, 7.17791748046875], \"score\": 0.9999483823776245, \"association_id\": 1, \"segmentation\": {\"size\": [431, 640], \"counts\": \"hjd41^=1O000000001O00000O101O0000000010O002N1OZf_3\"}}, {\"image_id\": 59, \"category_id\": 2, \"bbox\": [128.35496520996094, 348.8900146484375, 277.80511474609375, 46.970306396484375], \"score\": 0.9999992847442627, \"association_id\": 1, \"segmentation\": {\"size\": [428, 640], \"counts\": \"ikj11[=0000000Xk55aTJ3O001O1O1O3RCBh<a000000001O0000000000000000O100000000001O000000000001O0000000000000000000O101O000O103IRCIcZ34\\\\eLM10O0k<a0M1N101O000O1000O1000000O1000O100000O10000O100000000O100000000000000O10O100000000000000000001O000000001O000000001O0O1000000000001O000000000000000000000000000O2O00F`CH`<8bCF^<9cCG]<9cCH\\\\<8dCH\\\\<8dCH\\\\<7fCHZ<8fCHZ<8fCIY<7gCJX<6hCKW<4jCNT<2lCNT<2nCMQ<3oCMQ<2QDMo;3QDMo;3RDLn;3UDKk;5WDIi;7f0O100000000000000000000000000001O01O0000000001O01O0000001O00000000001O0000001O00000000000010O0001O00000010O000001O00001O01O000001O0000010O00000001O01O0000001O0000001OdSS3\"}}, {\"image_id\": 59, \"category_id\": 1, \"bbox\": [27.19635009765625, 108.3047866821289, 426.774658203125, 311.8355407714844], \"score\": 0.9999985694885254, \"association_id\": 1, \"segmentation\": {\"size\": [428, 640], \"counts\": \"m[i0>l<4K5UE^Of8f0TGBf8a0UGFf8=WGGe8<YGEe8>PFTOn0>P9m0nFTOo8Q1nFPOQ9R1mFoNR9S1lFnNS9U1jFlNT9X1iFjNU9Z1gFgNX9_1bFbN]9c1^F^NR9QOXGf3d8cLVG^3g8hLUGY3h8g0N2O0NoN`G\\\\M^8b2hGZMU8g2SHSMi7P3^HjL^7Z3hH`LT7e3QIULi6Q4T1500O100000010O1O1O0O2O1N2N101hM_KcKc4]4^K`Kd4`4^K]Kc4b4`K[K`4f4bKWK_4i4mKjJT4V5nKhJR4W5PLgJR4X5oKgJQ4Y5QLdJQ4[5PLcJR4\\\\5PLaJR4^5PL_JR4_5QL]JR4b5RLWJS4g5PLQJV4n5e1O1O2N1N2RKdHj3]7SLfHl3Z7SLgHm3Y7SLgHm3[7oKgHQ4_7fKdHZ4W8000O100000000aMVK\\\\Lj4c3ZKZLf4d3^K_JKY1g4W4aKZJM^1b4W4eKTJNd1]4W4WLdJPOa0i4i4[L`JSOd0b4k4_LYJWOi0Z4k4XMQKi2l4\\\\MRKd2j4cMSK]2i4iMTKX2k4jMSKW2k4mMRKT2m4nMoJU2P5T3O1O100O100O1O100O1O100O1O10000O10000000000001O00001O00001O001O001O001O001O001O00010O0000001O01O00010O1mKUHc2k7ZMYHe2k7TMZHk2k7iL]HU3o8N2O0O100O10O0fHjL_4V3]KoLa4R3ZKSMe4m2]KPMd4Q3_KiLc4W3j20O001O0ZGcL]7]3bHeL]7\\\\3aHfL^7Z3`HhL`7Y3^HiLa7W3]HlLb7U3\\\\HlLd7T3ZHnLf7S3XHoLg7Q3YHnLh7R3ZHkLg7U3\\\\HgLe7Y3_HaLc7_3U10000000000000000000000O100O1000000O100000000O10000000000001O00000000000000001O000O10001N101N101N1O2N2N2O0O2N2O001N2O001O1O1N3N2N3M4L3M4K4M2N3M0O2O001N1O2QIkJd5W5SJRKl5V3gIgM:VOm5k2_JcMAEn5b2kJgMTOIP6_2oJiMlNLS6[2SKoNm4o0VKQOh4o0YKQOg4n0[KROd4m0]KSOb4m0`KQOa4n0aKPO`4o0bKoN^4P1eKeMXN;T6n1gKXMjNb0_5T2RMgMo2V2XM\\\\MQ3a2o3L3M3000001O010O001O100N101N2O1O1O001O1O100O10O010O100O1O00100N110O1O010O1000001O100O001O1O001O010O000010O010O101O001O1O1O1O1O00000O100O10000O101O1O2M3N1O0O10O01O10O0001O1O001O1O1O2N3M2N1O2N1O1O1O1O0O2N3N1N2N3M<[ORjc2\"}}, {\"image_id\": 59, \"category_id\": 2, \"bbox\": [56.63185501098633, 358.8075866699219, 109.94316101074219, 46.431671142578125], \"score\": 0.999883770942688, \"association_id\": 0, \"segmentation\": {\"size\": [428, 640], \"counts\": \"gch03Y=00O[S2OWZ34PbJM^C6\\\\<NdC2Z<1eC0X<2gCOOHQ<:PDNOHP<;QDL0Io;;QDL0Jn;:RDL0Jm;:TDK0Kl;:TDK0Kl;:SDL1Jl;:SDK2Kk;:SDK2Kk;;RDJ3Kj;<SDI3Kj;>oCH8Ji;i0WDWOi;i0WDWOi;h0YDVOh;j0XDVOg;j0>000kCWOg;i0YDWOg;i0YDWOg;h0ZDXOf;h0ZDXOf;g0[DYOe;f0\\\\DZOd;f0\\\\DZOd;e0]D[Oc;c0_D]Oa;a0aD_O_;?cDA\\\\;>fDBZ;<hDEW;9kDGU;9kDGU;9kDGU;8lDHU;7kDIT;8lDHT;7mDIS;7lDJT;6lDJT;6lDJT;6lDJT;6lDJT;6lDJT;5lDLT;4lDLT;3mDMS;3mDLT;4kDMU;2kDOV;0iD0X;OiD1W;NiD2Y;KiD5Q<KSC6n<KPC6P=21O1O1O100O10000000001N10001O0OdRY6\"}}, {\"image_id\": 59, \"category_id\": 2, \"bbox\": [73.09777069091797, 196.880615234375, 18.878623962402344, 5.022735595703125], \"score\": 0.057201556861400604, \"association_id\": 0, \"segmentation\": {\"size\": [428, 640], \"counts\": \"gno02Z=1N10000000O10O10O10000O1000gXU7\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [9.409642219543457, 234.85452270507812, 115.73822784423828, 216.20230102539062], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [471, 640], \"counts\": \"SW73a>4K6L3N3M2M2N3lDYOT8j0eGFP8;mGKo75lG3Q8NeG<Z8DbGb0[8_OdGd0Y8]OeGf0W8]OeGh0Y8YOdGk0Z8WOcGl0[8VOcGl0Z8WOdGk0Z8WOcGl0\\\\8UOcGl0[8VOdGk0Z8WOeGj0Y8XOgGh0W8YOiGh0W8YOgGh0X8YOfGi0Y8XOeGj0Y8bNkF]Ok0R2Y8aNmF^Oh0R2Z8^NRG@b0S2[8[NWGC;S2^8XNYGF7S2_8XNZGF4T2`8WN]GGOT2c7^McHh0LHIU2g7^M`Hf00Q3_7\\\\L]He04o2_7`LWHc08o2`7nM]HT2c7mMZHU2f7mMVHU2j7\\\\20000000O1000000N2N2L4M3N2L4L4M3M3K5M3N201N2N2N4L4L6J4L3N1O2N1O0001O001O100O0010O101N1O001O001O001N2O3L2M4L5Jk1UN9H4M4K7H6J5L;@m[^7\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [263.4026184082031, 234.44102478027344, 15.250335693359375, 41.07145690917969], \"score\": 0.9999997615814209, \"association_id\": 4, \"segmentation\": {\"size\": [471, 640], \"counts\": \"VTj35>LC5g=j0K4N2O10000000O1HbBVOa=`0c0I5N6GhiU5\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [307.2099609375, 230.86102294921875, 27.46966552734375, 62.025390625], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [471, 640], \"counts\": \"o[^4`0Q>:I5L2N5L1N2OKgBnNW=[1N4L4M4L2N2N0000O3N3L4L3XOZCYOj<b0h0M4N3LV`\\\\4\"}}, {\"image_id\": 60, \"category_id\": 2, \"bbox\": [52.578495025634766, 395.2403259277344, 255.87933349609375, 66.0196533203125], \"score\": 0.9999977350234985, \"association_id\": 1, \"segmentation\": {\"size\": [471, 640], \"counts\": \"]Ql01d>201N1O100000O010O100O1000O10O101N1O^Q52`nJ2N101O0O101N100O10000@H`B9_=HaB8^=IbB7]=KbB5^=KaB6^=L`B5`=N\\\\B2d=0ZB1e=a0O1000O0100hBQOh<o0WCSOh<m0WCUOg<l0YCTOg<k0ZCUOf<k0ZCVOd<k0[CVOe<j0[CVOe<k0ZCUOf<g0PCTO96g<f0PCTO86h<f0QCTO67i<d0RCUO57i<d0SCTO39i<c0UCUO18j<c0UCUO09k<b0UCVOO8l<b0UCFk<:UCFk<:TCGl<9TCGl<8UCHk<8UCHk<8UCHk<8UCHk<:SCFm<;RCEm<=RCCn<T1O00000000000000LSC]Nn<c1SC\\\\Nm<d1SC\\\\Nm<c1400LoBaNR=^1400O1O101L3N3N1O1O101O0O100000001N1000000001O00000000000000000O10O10000000000000000000000O10000000000000000000000E\\\\BCd=<^BCb==^BCb=<_BDa=<_BDa=;aBD_=<aBE^=;bBE^=;bBE^=:cBF]=:cBF]=:cBF]=9dBG\\\\=9dBG\\\\=9dBG\\\\=8eBH[=8eBH[=8dBI\\\\=7dBI\\\\=7dBI\\\\=7dBI\\\\=7dBI\\\\=7cBJ]=5dBK\\\\=5dBK\\\\=5dBK\\\\=5cBL]=4cBL]=4cBL]=4cBK^=4cBL]=4cBL]=4cBK^=5aBL_=4aBL_=4aBL_=4`BM`=3`BLa=4_BLa=5]BKd=5[BKf=6YBIh=8VBIi=:UBFk=b010000O100O10000O10000O1O1N2O10000O1000000O100O1000000O1000000O101O0O100000000O10000O101O00001N2O2LlYh4\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [290.0899353027344, 234.53099060058594, 18.66949462890625, 42.67057800292969], \"score\": 0.9999955892562866, \"association_id\": 0, \"segmentation\": {\"size\": [471, 640], \"counts\": \"aaV47]>8H9G8J8I1O001O01O1O2M4K4[Of0Jgmh4\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [382.0120849609375, 240.34469604492188, 15.713043212890625, 30.6732177734375], \"score\": 0.9999531507492065, \"association_id\": 3, \"segmentation\": {\"size\": [471, 640], \"counts\": \"gn_55b>O10O1N1N3G9L7L2O1O0001O2N1@_a_3\"}}, {\"image_id\": 60, \"category_id\": 2, \"bbox\": [274.228271484375, 273.0682373046875, 23.9891357421875, 3.24737548828125], \"score\": 0.9998472929000854, \"association_id\": 4, \"segmentation\": {\"size\": [471, 640], \"counts\": \"eeo31e>10000000000000000000000000000001O00fnm4\"}}, {\"image_id\": 60, \"category_id\": 2, \"bbox\": [315.4507141113281, 289.6347961425781, 86.15570068359375, 13.105316162109375], \"score\": 0.9997546076774597, \"association_id\": 0, \"segmentation\": {\"size\": [471, 640], \"counts\": \"Wff42d>1000000001O000001O000000000001O00000000001O0000000001O00000000001O00000001O000001O000000000000001O0000000000000000000000000MMbA3^>30O10000L`A0a>0b__3\"}}, {\"image_id\": 60, \"category_id\": 2, \"bbox\": [393.1884460449219, 267.2611999511719, 28.889434814453125, 3.968048095703125], \"score\": 0.9993129968643188, \"association_id\": 3, \"segmentation\": {\"size\": [471, 640], \"counts\": \"P]f51f>1N10000001O000000000O10eoY3\"}}, {\"image_id\": 60, \"category_id\": 2, \"bbox\": [328.3642883300781, 278.17413330078125, 49.803619384765625, 7.977569580078125], \"score\": 0.832843005657196, \"association_id\": 2, \"segmentation\": {\"size\": [471, 640], \"counts\": \"Ucg42e>001O000000000000001O000000000000000001O01O0001O0000000000001O00000O10lVn3\"}}, {\"image_id\": 60, \"category_id\": 2, \"bbox\": [272.18609619140625, 273.92022705078125, 35.4150390625, 2.906829833984375], \"score\": 0.21080201864242554, \"association_id\": 0, \"segmentation\": {\"size\": [471, 640], \"counts\": \"nVo31f>0000000000000000000000000000000000000fnm4\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [307.5927734375, 232.60826110839844, 14.417388916015625, 53.81443786621094], \"score\": 0.08518064767122269, \"association_id\": 0, \"segmentation\": {\"size\": [471, 640], \"counts\": \"W\\\\^43c>4^AKQ>OVBb0]=`0L3N2OK500:FHhBTOW=;_C]Od<a0j00YQb4\"}}, {\"image_id\": 60, \"category_id\": 2, \"bbox\": [299.5592956542969, 275.9848937988281, 14.429107666015625, 3.46771240234375], \"score\": 0.0627155601978302, \"association_id\": 0, \"segmentation\": {\"size\": [471, 640], \"counts\": \"US\\\\42d>10000001O00jQg4\"}}, {\"image_id\": 60, \"category_id\": 1, \"bbox\": [287.3069152832031, 233.28712463378906, 43.46136474609375, 49.71995544433594], \"score\": 0.051068175584077835, \"association_id\": 0, \"segmentation\": {\"size\": [471, 640], \"counts\": \"`aV47^>5J9H9H8H3N0010O2N1O2M2M3F:N2N1CQB2Z>1CMRB8i=?J8J00O04M10McBPOX=[1M1O1O000000TOlB5U=JlB5T=g02N2^OlB\\\\OW=?d0Jel]4\"}}, {\"image_id\": 61, \"category_id\": 1, \"bbox\": [80.99006652832031, 171.64312744140625, 148.221435546875, 276.6790466308594], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [473, 640], \"counts\": \"miY1?V>7I7mCTOQ:P1dE^OV:e0eEBV:`0fEFV:=cELX:6aE4Y:0`E8[:M_E8^:OYE6`0[NT9g3gF_LU9e4M3M4L2M4L3M2N3N2N2N4K5K3N3M2N4L9H4K3L3M5K6K5K3M3L3L4M3M2O2N1N101N2O10O1O0O2N2N2N2M3I7J6N2N201N100O1010102M3N3L2N3N0O0O2O0O1N2N3M2N2M3M3M2L5M2lMbGkMa8U43K6J6K4L3N3M4K3M4L3O0O2O1O2N1O1N2O0O2N2N2M3N3L4M3N3L5J5J5L5L3M2N2O1N101N1O2M2O1O2N1O2O2M3M2TOoCmNS<o0WDiNk;S1P1N2N2O1N3L7HSbm5\"}}, {\"image_id\": 61, \"category_id\": 2, \"bbox\": [600.4284057617188, 314.3984375, 38.2662353515625, 18.576263427734375], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [473, 640], \"counts\": \"\\\\me81h>2N001N2O010O001O010O001O00001O01O000010O00010O010O01O01O01O01O010O0010N2Ne4\"}}, {\"image_id\": 61, \"category_id\": 1, \"bbox\": [580.66748046875, 159.95236206054688, 51.063720703125, 158.11105346679688], \"score\": 0.9999991655349731, \"association_id\": 1, \"segmentation\": {\"size\": [473, 640], \"counts\": \"PQ]8:]>8Hk0TO5M4K4K3M5Ii0ZO7I;E?@8H6K3N3M3N1O0O2O00O01O01O1O1O1O3N1N102M4K<A8G9D:F:M3M220K5HoCjMl;R2`0M4hN[C3>]Ob;9[D24Ga;5dDJM1`;3fEK[:OQQ4\"}}, {\"image_id\": 61, \"category_id\": 2, \"bbox\": [81.02239990234375, 432.12042236328125, 130.49742126464844, 40.87957763671875], \"score\": 0.9999985694885254, \"association_id\": 2, \"segmentation\": {\"size\": [473, 640], \"counts\": \"aZV11f>4M2O1N2O000O2O001O00001O0O1000000000000O10000O10000O1O100O1O1O10000O100O1O100O100O100000010O000000000000000O10000O1O1O1O1O100O1N200O1N101O10000000000000000000000000000001O;E1O1O001O1O1O2N002N1O1O0000001O0000001O0000001O001O1O002N2N001O1N2O001NX_X6\"}}, {\"image_id\": 61, \"category_id\": 1, \"bbox\": [532.1072387695312, 327.3283996582031, 43.50531005859375, 136.38128662109375], \"score\": 0.9999911785125732, \"association_id\": 0, \"segmentation\": {\"size\": [473, 640], \"counts\": \"k`g7P1f<\\\\OlCm0i;W1XOh0I7F8M4M4M2O1N10000000000O10000000000000000001O00000O3L2N3N4L3K8JY1dN[Qo0\"}}, {\"image_id\": 61, \"category_id\": 1, \"bbox\": [298.2314453125, 66.21898651123047, 249.889892578125, 406.781005859375], \"score\": 0.9999797344207764, \"association_id\": 3, \"segmentation\": {\"size\": [473, 640], \"counts\": \"kh[49Z>8C>F:K6J5K4M3L4M2N2M3N3L4L4L4M3L4`E\\\\Mf8f2VGWNn7m1nGXNm7n1lGZNm7n1hG\\\\NP8]4I8G7I7J4L4M1N3M2N2N2M4M2O1N20O01O0001O000O10000O0O2L4L3M4O000001O0K5[Oe0O0101O010O11O0O2N2N101O001O1000001O0O2O001O00001N101O0O2O0O2N101N101N3N1N3N2M3N1N2O2M2N3M3N9F9H5J4M2M4M2M4L4M4K4L2M4L3M2L4M4K4M3M3M4M2N2O1O10000O10O10O1001O1N2O2M3N2M2N3N2M3G9F;\\\\Od0D;I7K6K3N3L3M4L4K4M4K5L5K3M3M3N2N1O2N1O2N1O1O2O0O2O001O001N101O1O2O0O2N100O1O010O1O1O100O2N2N2O1N1O1O1O1N3N3L=Dc0]O3L3N2N3L4M2N2M3N1_NTGRMn8l2UGRMl8k2XGSMi8k2ZGRMi8l2ZGPMh8n2\\\\GoLf8o2d1M2O2M2O1O1O1O101N3M2O1N101N0O2N2N2N3L3N3M4L3K6D<DcU[1\"}}, {\"image_id\": 61, \"category_id\": 2, \"bbox\": [235.2852325439453, 296.7163391113281, 39.79237365722656, 34.38531494140625], \"score\": 0.9994719624519348, \"association_id\": 0, \"segmentation\": {\"size\": [473, 640], \"counts\": \"^k\\\\34b>4I7N1O2M2N2O1O2N10001SBVOj=n0N1O00000001O1O00001O001N2O0O1O2L4N8H3N1O0O100000OO3NThX5\"}}, {\"image_id\": 61, \"category_id\": 2, \"bbox\": [349.9568786621094, 397.03314208984375, 232.24325561523438, 73.923583984375], \"score\": 0.9994509816169739, \"association_id\": 3, \"segmentation\": {\"size\": [473, 640], \"counts\": \"fSR5:]>4L3N2O01O1O1O1O001O1O00001O1O001O000O2O00001N3Nbl<MXU53Tn]O3N2O0O1N2O1CJVB7i=KUB6k=JTB7k=KTB5l=<0000UOUOhCk0U<YOjCg0T<[OlCe0R<]OnCc0R<]OnCc0R<]OnCc0R<\\\\OoCc0S<[OnCe0S<YOnCf0S=00O100O101O00000O10000000O10000O2O001O001N101DTOhBm0W=ZObBh0\\\\=;N2O101N10000O2O0O1O1O2O0O100O100O1O1O1O100000000000000000000000000000000001O1O2Nh0XO7I3M1O00001O003M2N1O001O0000001O00001O000000O100O10000O1000000O1O1O1O1O10000O10O10O100O1O1001O2L5KV[j0\"}}, {\"image_id\": 61, \"category_id\": 2, \"bbox\": [327.5413513183594, 439.8914489746094, 63.188018798828125, 33.108551025390625], \"score\": 0.9887808561325073, \"association_id\": 0, \"segmentation\": {\"size\": [473, 640], \"counts\": \"]ng4=Y>4M6K2N2O00000000000001O00000000000000O10000O1000000001O1O001O1O1O1O1O2N1O1O1O2N001O1O0000001O0GeA4[>KfA4[>LgA1iah3\"}}, {\"image_id\": 62, \"category_id\": 1, \"bbox\": [145.02052307128906, 141.27272033691406, 154.11412048339844, 186.96546936035156], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [375, 500], \"counts\": \"oah1<b1Ho7i0[GKU8Q2J6L3N1^HPMZ6S3aISM\\\\6S3TIYMi6m3N1O00000O1000000000000000000001O000000000000001O00000XNiIPNW6m1RJlMP6Q2VJkMj5S2YJkMh5T2[JiMg5U2\\\\JeMh5Z2l1O1N101O00001O00000000000001O001N101O0O3M2M2O2N1O2M3N2N2N3N0O3N2N8G5L4L4L5`IoKP5Z4`JTLZ5Q4^JRLa5P4ZJSLe5P4XJRLg5P4RJULm5k400O100O10O00100N2N10100O100O100O1O1O100O1O1N2N2L4^OfIcK^6Y4b0J6TOl0M3N2O1N2L4M3N2O1O1nMeGe0^8YOWH2k7L[HMh72[HJg75[HHh76[HCj7<i1O1O1O2N2Mn_]2\"}}, {\"image_id\": 62, \"category_id\": 1, \"bbox\": [17.20522689819336, 162.56524658203125, 45.89680862426758, 117.60202026367188], \"score\": 0.9999903440475464, \"association_id\": 0, \"segmentation\": {\"size\": [375, 500], \"counts\": \"[n78n:e0F7@a0A=I8F9I7L4N101M3N1M3F:10000O100O100O100000000O10000O1M3@a0]MhGc0EEm94Q1I4M10101N100NYVP5\"}}, {\"image_id\": 62, \"category_id\": 2, \"bbox\": [94.46088409423828, 237.15139770507812, 164.93328857421875, 85.85562133789062], \"score\": 0.9985195994377136, \"association_id\": 1, \"segmentation\": {\"size\": [375, 500], \"counts\": \"\\\\]b17Z;KiD7V;6O1O3M7H3N1O1O1O3M2N1N20O10O110O1O00O2M6J4K3K6M4BiD03Mj\\\\84cnG4M2[EGg9=UFEi9=UFDj9>TFCl9>SFCl9?PFCP:R1000000001O00001O0O20O01O1O001O001000O100O0010000O101N1O00100O11O0O1O1N5[ObEI`:4eEBb:;`0K[TQ3\"}}, {\"image_id\": 62, \"category_id\": 1, \"bbox\": [35.46624755859375, 174.1934814453125, 100.07417297363281, 118.0001220703125], \"score\": 0.9523448944091797, \"association_id\": 0, \"segmentation\": {\"size\": [375, 500], \"counts\": \"n]=?U;5N000O11O1N2N2N2N2N8HTo30mPL2N2M2O1O2N0100O100O0001O001O10000O10O010O00100O1O0010O001O100O1O00100000O0010O001O1000O10O010O1O1O001O01O002O0O0010O10O010001N0010O001O100O1O0O101O2Mg]U4\"}}, {\"image_id\": 62, \"category_id\": 2, \"bbox\": [410.345458984375, 249.78573608398438, 29.6278076171875, 4.3937225341796875], \"score\": 0.8511143922805786, \"association_id\": 0, \"segmentation\": {\"size\": [375, 500], \"counts\": \"\\\\kg41f;001O00001O0000001O0000000000000000000000000000000Zge0\"}}, {\"image_id\": 62, \"category_id\": 1, \"bbox\": [444.21343994140625, 188.82040405273438, 12.351104736328125, 65.18075561523438], \"score\": 0.4000343382358551, \"association_id\": 0, \"segmentation\": {\"size\": [375, 500], \"counts\": \"`VS59P:^1F:0000000000001QNbFe07Gdb`0\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [229.1555633544922, 155.49215698242188, 111.36759948730469, 162.0784912109375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [327, 500], \"counts\": \"Tm[21Q:6M3M2M3M3N3I6I7O2M2N3J9J5L7I3K5M2jHQNQ6Q2kISNS6P2iITNS6o1jIVNQ6n1lIUNR6Q3N4M4K4M2L4M2N2N1O1H8L4N200O101OO1000O1000000000O1O1VORKXLQ5e3VKULl4i3j000O1O2O0O1N2O1O2[NPJPOT6i0RJTOQ6h0SJTOQ6j0QJSOW6e0PJROX6h0f1N1O1O3M100O2O2M1O2O2N2OO01N2O2M1O101N2OO2OO101O0O10O010O01N4L4KYid1\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [81.42132568359375, 111.30996704101562, 157.07696533203125, 200.69509887695312], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [327, 500], \"counts\": \"W]j0Q1Q97M1N2M3H8N2N2O1N2K4B>O2O0O01CaHQN[7P2>M5O101O1M3L3M4O1O0O1O1N20001O00100O1O10O3N1O1O1`H`NW6`1gIdNW6]1eIhNW6\\\\1aITOT6S1YIA^6W2M2N2N1O0N3MnNSJoMl5o1\\\\JmMb5o1hJkMW5W2TKXMb4X3o0701O0100000O10001O00001VKeLf2[3XMiLf2X3XMjLg2W3WMjLi2Y3SMhLm2g3bL\\\\L]3V4oKlKQ4[4eKiKZ4R51N101bJPLb4R4XKULe4i4N1WObJfLa5V3fJdL\\\\5T3QKbLU5l2\\\\1M4L2N3N2M2NoIaM_4]2bKfM[4Z2dKjMZ4U2eKmMZ4T2cKnM^4Q2`KRN`4n1]KUNc4k1[KVNe4k1YKVNh4j1VKWNj4j1RKYNn4i1nJYNS5h1iJZNW5h1fJYNZ5i1cJXN]5k1\\\\JYNd5T3100O1O1K5G9G9N2gNXIROj6l0^IkNd6T1gI_N\\\\6`1V100O1011O002N000O100O2N1O10OO2O00000001O00001N1WOPH[OQ8`0VH]Om7>XH^On7:XHBn76WHEdmf2\"}}, {\"image_id\": 63, \"category_id\": 2, \"bbox\": [195.9064483642578, 284.0616149902344, 87.10633850097656, 40.379730224609375], \"score\": 0.9998939037322998, \"association_id\": 1, \"segmentation\": {\"size\": [327, 500], \"counts\": \"dVo1110U:000j]51jaJ6XFLd9=O1N2O1ON21aF]OZ9b0gF_OX9`0iFAU9?mFBP9>QGCn8<RGFm89TGHk87VGIj87VGJh87XGIh87XGIh87XGIh88WGHi88WGHi89VGGj8:VGEj8;XGBi8?WG_Oj8a0>1N101O1O001O0010O10000O5L0O103M0O106I2O0O2O1O0OQSZ2\"}}, {\"image_id\": 63, \"category_id\": 2, \"bbox\": [79.97476196289062, 257.72576904296875, 130.6625518798828, 48.062255859375], \"score\": 0.99979168176651, \"association_id\": 2, \"segmentation\": {\"size\": [327, 500], \"counts\": \"Pcn01T:2O2N6J2O1O00001O1O001O004L1O1O001O001O0000000000000001O001N2MSl2LRTM5[F]O^9f0O2O2N1O00010O01O01OO101J5N3N1O3K\\\\l25`SM3N2O0001O0000001M3M3M2O2MVe0JQ[O3M1O101N100O1O1O2O0001O00001N3N2N2N1OU`o2\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [403.3948669433594, 229.60702514648438, 61.774566650390625, 51.7747802734375], \"score\": 0.896142303943634, \"association_id\": 0, \"segmentation\": {\"size\": [327, 500], \"counts\": \"U_T42T:1O2E;M3B=N3L3N200O11O1O00001O00000000000000O10000O1O100O1O100O100000000001O001O1O1I8L3M3K7K6I4MXb;\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [347.1922302246094, 192.5021514892578, 124.87750244140625, 76.85789489746094], \"score\": 0.3693290650844574, \"association_id\": 0, \"segmentation\": {\"size\": [327, 500], \"counts\": \"liR43T:1O0O2N1O1jGI\\\\67cIK[66dIK\\\\65cIL]65aIL^65aIL_65_ILa64^IMb62PIJQO;o7IoHb0Q7\\\\OPIe0P7ZOQIf0P7XOQIh0V81O1001O1O001O00000FeFHZ99gFFY9:gFFY9:gFFY9:fFGZ9c00000000EiFFW96nFIS95nFKR95nFKR96mFJS98kFHU99eFC24Y9=fFC[9=dFC\\\\9d01N101M4H:GRa<\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [126.4034423828125, 148.75880432128906, 342.5605773925781, 157.89109802246094], \"score\": 0.2304760068655014, \"association_id\": 0, \"segmentation\": {\"size\": [327, 500], \"counts\": \"_k_13P:5N101N2O1O0O2O1O001O000O2O00001O0O101O01O1O1O001O1N2O1O1O1O1O001M3N[hi00VQVOLj2<PMFe2f0YMZOd2i0[MYOb2i0^MWO`2k0`MUO_2m0`MSO_2n0`MTO_2l0aMTO^2m0bMTO]2l0cMTO]2l0[MoNUM7^5k0\\\\MPOUM5_5k0RMlNeM5I4_5l0SMkNdM6J3_5l0SMjNfM4K5\\\\5m0^MkNYM7Y5m0iMPOY2P1hMnNY2Q1iMlNY2S1^3O1O100N200O2N1O2N1O2M3N1O3N2M2O1N2N2O0O2N2NlhS10RWlN5M3M2eHKa46]KNa42^K0a41^KOb40^K1b4O^K1b4O^K1c4M^K4a4K`K5`4J`K7`4I`K7a4G`K9`4F`K;`4E`K;b4B^K?e4^OZKc0V7O1N101O1N101L4J`e`0\"}}, {\"image_id\": 63, \"category_id\": 2, \"bbox\": [154.65472412109375, 278.8456115722656, 112.3035888671875, 42.079254150390625], \"score\": 0.10630273818969727, \"association_id\": 0, \"segmentation\": {\"size\": [327, 500], \"counts\": \"]Zd1:l92O1O001O1O001O1O1OO1M4IhY1KbfN1O2N1O2O0O1N3N100O2O001O01O2N100N3N4L001O2NmT41QkK1L4G:N2N2N1O10JdFA\\\\9?eF@Z9?hFCU9=mFCQ9=PGDo8<QGDo8;RGFm8:SGFm89UGFk8:UGFk8:UGGj89VGGj8:UGFj8;VGEj8;VGEj8<VGCj8>UGAl8?UG@k8`0VG^Ok8d0;001O001N20O01O0100O2O4K100O1O6LO0O02N4L00egZ2\"}}, {\"image_id\": 63, \"category_id\": 2, \"bbox\": [204.25807189941406, 163.84664916992188, 95.93006896972656, 152.2987060546875], \"score\": 0.09154308587312698, \"association_id\": 0, \"segmentation\": {\"size\": [327, 500], \"counts\": \"`TX22S:7KN101O01N200O10O001001N2O1O1N2O0O101O1O1O1O0O3N1O1100O1N4L4L2N4JXe\\\\2\"}}, {\"image_id\": 63, \"category_id\": 1, \"bbox\": [296.4076843261719, 133.59217834472656, 178.66769409179688, 125.90510559082031], \"score\": 0.07807169109582901, \"association_id\": 0, \"segmentation\": {\"size\": [327, 500], \"counts\": \"ddo21mg50YbJ0O101N1O1O2N100000000000000001O1N2N3NZd73b[H3jFJV89hGGX8:gGFY8<h01N2O1O001iF_Of8a0ZG_Of8b0YGBc8d0WG\\\\Oi8e0VG\\\\Oi8e0VG[Oj8f0UGZOk8f0SG\\\\Om8d0RG]On8n0000O10000001O0000000000001O0000001O001O1O001O1O00001O00001O000RHbNS7^1lHcNT7]1lHdNS7\\\\1kHfNU7Z1jHgNV7Y1jHgNV7Y1jHgNV7X1kHgNV7Y1jHgNV7Y1iHfNY7Y1hHgNX7Y1iHdNY7\\\\1i0O1O100O1O10000O100O1O1O100O1000000O100O1O1N2N2O1O100O1O1M3HcFD`9:7O1O1N2O1Mkn<\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [19.355716705322266, 124.60393524169922, 124.01779174804688, 142.96075439453125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"[V?2k>6L3M2N3lBD_;d0UDDg;b0RD@m;c0PD_On;d0oC]OP<f0lC\\\\OS<g0jCZOT<i0iCYOU<j0iCWOU<m0gCUOV<l1O1O1O1N200O1O1O1N2O100O100O1O1O1O1O100O1O1O1O100O100O1O100O1O1BdLXE\\\\3f:jLUEW3i:mLTET3j:a00K5M3N2O1O10100000O1000O10000000000000001N1O1O2N2N2O2M2fN]DUOd;K]DH3:h;DZDO1:o;\\\\OUD7N:Q<YOUD<K9j<EXC:i<EXC9j<FWC9j<FXC7i<JWC4k<KVC3k<MVC0m<OXCKi<5Q1000010O00001O001N2O1N2MRa\\\\7\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [286.1286315917969, 70.74881744384766, 127.2987060546875, 191.92987060546875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [480, 640], \"counts\": \"^YV4;a><F5LO01O001O000000000100O0O110000O2N10000YN^OPEb0n:NcD4];3XD1g;5RDNk;c10001O100O10N2000O10N3N1O2O0N2L4O1O100O1M4N100M2O1010O2J6^Ob0O002N1O1N3N102N1O1O1O1O1O2N2M200O2O1O1O00005J6K1O004M9kEjKl8W4kFPLU9n40O2O1cLhFn0Y9POlFl0T9TOnFk0R9TORGg0o8XOWGc0i8]OZG`0f8@[G?f8@\\\\G>d8B_G<`8DiG^NXOd0P9n0iHQOW7n0lHPOT7o0nHPOQ7P1RInNn6R1TIkNm6T1UIkNl6Q1XInNh6n0\\\\IROf6i0^IVOc66`F^OP3;`65dF]Oo2<]66WJIj54YJKg54[JKe54\\\\JLe53\\\\JLd53_JJc55`JH`57cJF^59g4N6HfQ^3\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [446.23406982421875, 103.20403289794922, 54.386138916015625, 80.0809555053711], \"score\": 0.9999979734420776, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"`Tb62:OZ>0Z\\\\1f0XcN5N3L3\\\\BVOn<m0mBVOQ=m0kBUOT=[11FjBiNV=W1lBgNU=X1lBhNS=[1jBfNV=b10001N3N7I00O10000O1L401N101N2cNiBn0X=oNmB`00ZOL0Z=3lB>j0]OX<;nCGQ<8]1N3N1O1O001N1000001NoYT2\"}}, {\"image_id\": 64, \"category_id\": 2, \"bbox\": [2.8237643241882324, 259.7663269042969, 58.10858154296875, 12.42681884765625], \"score\": 0.999987006187439, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"Xe12m>2O0O100000000000000O10000000000O101O00001O0000001O00000000001O0001O001O2N1O001O001O001O00001O00001O010O00aPa8\"}}, {\"image_id\": 64, \"category_id\": 2, \"bbox\": [392.38311767578125, 177.13951110839844, 104.27044677734375, 7.7676239013671875], \"score\": 0.9999635219573975, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"c]o51o>00001O0000000000001O00000000000000001O00000000000000000000000O10000000000000000000000O10000001O0O1000000000000000000000000000000000001O000000001O0OQl10PTN00001O0\\\\VU2\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [497.5140686035156, 110.48709106445312, 63.859588623046875, 75.65469360351562], \"score\": 0.9996305704116821, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"SbY73l>3M2O00M3O1O2O0Ocn08VQO6fAEd=a0TBAk=k0O1O001O001O01O1N2O1O0O2M2O00O2YOCjB?U=BhB`0W=f0O1N2N1O100O1O00VOnB^O2CS=o0mBPO00^=S164N3O1O0000O11O00OPChNZ<W1eCmNY<S1fCoNZ<o0fCRO[<n0cCSOg<d0TC@n<W13M4I7eN\\\\BQ1S>F5J_lT1\"}}, {\"image_id\": 64, \"category_id\": 2, \"bbox\": [62.291290283203125, 242.3994598388672, 307.03497314453125, 20.508132934570312], \"score\": 0.9644847512245178, \"association_id\": 3, \"segmentation\": {\"size\": [480, 640], \"counts\": \"jTf11n>101O000O100000000000000000000000000000000000000001O0000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000001O0000000000O10000000000000000000001O0000000000000000001O0000001O000000000001O0001O0000000000000001O01O000000000000000001O01O00000000000000001O01O00001O10O000001N102NmU<OSjC2O0000O10RVn4\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [337.323486328125, 77.38774108886719, 104.5257568359375, 88.40925598144531], \"score\": 0.7411049008369446, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"]Rn4>1Ea=;fBg0k<]ORCe0m<g0O10000000000O100000O10000O100O100O11O1O1O0001O0000101O10O02M2N1O01O1O0PC\\\\Nd<e1XC_Nh<l101O01O0O1O2O000O1`NVCj0j<VOXCg0j<WOXCg0i<YOXCf0h<ZOYC0F4R=L\\\\CEM;h<OmCOS<1oCLS<3oCJR<6oCIQ<7PDGQ<9\\\\1O000010O01OdBIo;8mCLR<4mCMS<3mCNS<2lCNT<2lCOT<0lC1T<OkC1U<OlC0U<0mCLT<4nCIT<6lCJT<6lCJU<5kCKU<6jCJV<6kCIU<8jCHV<9iCFY<:V1O100O101M3N2N2Ncg33[XL10NPVm2\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [607.3923950195312, 120.86125183105469, 12.89508056640625, 11.739639282226562], \"score\": 0.5819493532180786, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"mSm83k>4M2O0O2O01N101N2NUV:\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [432.8223876953125, 92.66644287109375, 24.908111572265625, 77.52438354492188], \"score\": 0.2278883159160614, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"n`[65j>2O1ZAJ`>>M3]BB^<a0]CF]OIg<d0aC3a<4VCNj<V1N2N11O7H1O1O0O3QOnB\\\\O3OT==`C^Oc<5eBK`>0iRg2\"}}, {\"image_id\": 64, \"category_id\": 1, \"bbox\": [466.2640380859375, 106.94813537597656, 54.81658935546875, 78.55052185058594], \"score\": 0.14861249923706055, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"_aj6<f03e<8jB\\\\OLf0V=LUC5i<IYC8f<F]C9c<F^C:b<F^C9c<G^CB^Oc0T=LaC2`<NaCOa<1dCI]<8hB@i05`<?`C_Oa<3jB1V>LnA2S>KoA6R1Kl;7PDKo;6QDIP<5`1L3N1O2ORj21lUM2O010O1O2M4L4N0O12O2M3K2OXALb>;N6L0fAAR>i0K4L3VBnNd=X1O1001N0O2Lgdg1\"}}, {\"image_id\": 64, \"category_id\": 2, \"bbox\": [16.785663604736328, 232.85671997070312, 80.38825988769531, 37.085601806640625], \"score\": 0.1285533905029297, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"VW84k>2O0O101O0O10000O2O001O000O10000001O1O2N1O2N1O1O001O001O1Ocfe8\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [205.52716064453125, 172.59619140625, 64.42001342773438, 182.6160888671875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [427, 640], \"counts\": \"oSf2Q1W<6H8J5I5N3L6K6K6K8G4K;F5K6J4M2N3M3Lb0_O2L3L3E\\\\KlGh4o7nNnGcMo7Z2[HbMc7_2PInLQ7Q3VIgLl6Y3c1002N3M3M7I<WGnKa7Q5M1O1O11N6cJXHk4W8L3L2F_GhKd8j3e0L5K6F:A>WNXFHn96UFGo96ZF^Ok9`0ZFUOo9i0]1N0020O000O02OO1ON4M2KaQj4\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [463.7024841308594, 205.11264038085938, 106.53036499023438, 114.57861328125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [427, 640], \"counts\": \"mbR63U=3O2N1O1O100O10000000O1O1O1O10000O10O1O000010OTDIf:7YELe:3ZE0d:0\\\\E1d:N\\\\E4b:L^E5`:M`E3`:L`E5`:K^E6S9DYG7B7S9HWG1D9S9IXGND:R9LVGME9T9MTGKZOf0`9BTGIZOg0b9ARGJYOf0f9AmFX1R9kN`F`1a9U10O1_Oa0N201M2F;L4N3M5J5L2O2N10O010O1O1O1000;E1N10O02OO100O001O1O100[OkDROV;l0mDROT;l0nDSOT;j0nDVOS;f0oD[OQ;c0PE]OQ;a0QE^OR;>oDBT;9oDGQ;7PEIR;4PEKS;1nDOT;NnD1S;MnD3U<0001O00001O001O0100O2N1O1O2O00Llll0\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [378.8447570800781, 174.14569091796875, 71.404296875, 126.77249145507812], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [427, 640], \"counts\": \"lRo46Q==D9D9K3L3O1O1O1O1O1O2O002N2M1N3M2C?M2M2O1O2O00010Oh0XO3N4L1O00000>B0mN[F_Ne9`1]F^Ne97VF59^Oc9;VF7l:HUE7k:HWE8i:FYE:g:D[E=d:A^E`0a:_OaEa0]:_OgE`0W:_OjEa0U:^OoE`0P:@QFa0m9^OUFc0j9[OZFd0d9\\\\O\\\\Fd0d9]O^F`0a9AbF<^9CcF<_9CcF:]9GcF8]9IbF7^9JbF5_9KaF3a9L`F3a9LaF1b9LP2NW`a2\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [148.72097778320312, 196.11988830566406, 41.62196350097656, 67.59361267089844], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [427, 640], \"counts\": \"S\\\\n14U=4M2M2N2O1N01L4O100O02M2L4M3L5M2N2N1N2JSOPDn0o;71O10000001O1\\\\DlNo:i1O10O2WNbD2>i0o;M2CeCF_<2kmm5\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [413.3306579589844, 295.37158203125, 67.11563110351562, 11.95355224609375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [427, 640], \"counts\": \"[P_55V=001O0000001O0000000000001O0O100000000000000001O001O1O0O101O00000000000000000000000001N10000000000004KdUV2\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [224.35482788085938, 343.0352783203125, 84.62579345703125, 28.361968994140625], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [427, 640], \"counts\": \"V]o23X=0O3N1O1O3M100O0010O10O0000010O00001O0O101O001O00001O0010O000000010O000000001O00000000001O000000001O00001O00010O0010O01O010O000000000000N2M30000O11O00001N3N5J4J`eY4\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [46.0318603515625, 158.16920471191406, 93.78480529785156, 218.9900665283203], \"score\": 0.9999997615814209, \"association_id\": 5, \"segmentation\": {\"size\": [427, 640], \"counts\": \"oVd04P=l0XO6L3K6K5L3N2N3L3N2L5K5K3M6YOk0F7K7H?B7cH`K`5f4YJ^Ke5l4nIWKR6P5dITK[6n4aIUK^6m4_ITKa6T5UIoJj6j5O00000001O00001M4gMgIZM`6mNiHU3P1kL]O4Z7l2_IhLc7T3]1M3N1O0010O1O3N1N2O2M2O1O01O01N2O000O10kL[Fc2e9XMfFc2V:L2L6K6I>C?@=D2N2O00O0101O00O10O100103N1M1O1N10000000O1N2M3L4Igb`6\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [64.37731170654297, 361.7147216796875, 81.05387115478516, 39.181365966796875], \"score\": 0.9999996423721313, \"association_id\": 5, \"segmentation\": {\"size\": [427, 640], \"counts\": \"VZl05U=2N2M2O2[OEXD;h;FUD<j;HRD9n;IPD8o;b000001O000000001O001O011N1O2N2OO01O0010O01O001O00001O000000000O010000O010O10O010O10O10O100O10000O010000000O0100000O0100000A^C4c<K_C4a<L_C4a<L_C3b<M_C2b<L_C3c<K_C4l<OWQ^6\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [588.0860595703125, 191.67724609375, 28.44195556640625, 57.73857116699219], \"score\": 0.9999995231628418, \"association_id\": 7, \"segmentation\": {\"size\": [427, 640], \"counts\": \"ldf72W=5M0O1\\\\OMPD3P<NnC2S<0iC2W<a001fCZOm;S1O001O14L2N1O1O1AkCEY<3kCMV<LoC6b<2N1O10O2Nmh9\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [501.5058898925781, 312.96343994140625, 97.39730834960938, 14.941619873046875], \"score\": 0.999998927116394, \"association_id\": 2, \"segmentation\": {\"size\": [427, 640], \"counts\": \"`da61Z=0O1000000000000O100000O0100O10000000000000000000000001O00000000001O000000000000000001O00000000001N10000000001O00001N2O1O1O1O000000001O0000000000000001O00001O0000001O0O10001O0O100O2O0O2NUQb0\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [100.69927978515625, 189.2644805908203, 49.02070617675781, 112.64857482910156], \"score\": 0.9999657869338989, \"association_id\": 6, \"segmentation\": {\"size\": [427, 640], \"counts\": \"_W]11<5Z<NdC<GGR<OVDi0d;XO[Di0d;WO\\\\Dm0`;QObDQ1`;kN`DW1^;mN]DU1m;0N2M1O201O0000001O010O11O0O100001O1O1OO3bNXDT1j<UO4M01O01N1O1O11N10000O10LRR\\\\6\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [159.80490112304688, 261.01446533203125, 35.71917724609375, 4.680511474609375], \"score\": 0.999941349029541, \"association_id\": 4, \"segmentation\": {\"size\": [427, 640], \"counts\": \"dlU21Z=0000000000000000001O0O1000000000000000000000000001NTgi5\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [552.6354370117188, 193.9220733642578, 24.69818115234375, 52.617279052734375], \"score\": 0.9998759031295776, \"association_id\": 8, \"segmentation\": {\"size\": [427, 640], \"counts\": \"kcW76U=1`C0c;n0N2N2N3N22L2N1N1EnC[OU<a0oC[OU<a0k0_OR`22l_M1cQj0\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [108.35946655273438, 296.0379943847656, 49.606353759765625, 10.24407958984375], \"score\": 0.999396800994873, \"association_id\": 6, \"segmentation\": {\"size\": [427, 640], \"counts\": \"cZb12Y=001O1O1O1O001O000000O10O10O1000000O01000000000O1000000O100M300000i[Z6\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [613.852294921875, 245.7083740234375, 24.203857421875, 4.467559814453125], \"score\": 0.9993544220924377, \"association_id\": 7, \"segmentation\": {\"size\": [427, 640], \"counts\": \"iXP81Z=1N100001O000000000000000000000000000000001O00nb0\"}}, {\"image_id\": 65, \"category_id\": 2, \"bbox\": [569.0889282226562, 242.91432189941406, 24.3843994140625, 4.189300537109375], \"score\": 0.9806766510009766, \"association_id\": 8, \"segmentation\": {\"size\": [427, 640], \"counts\": \"YP`71Z=00000]e10cZN00000O2O000001Okhc0\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [341.3095703125, 8.891812324523926, 152.833740234375, 189.52645874023438], \"score\": 0.4206327497959137, \"association_id\": 0, \"segmentation\": {\"size\": [427, 640], \"counts\": \"eg`4746Z<f0G6J5I8F:H:H8I5M5J5L3M2M2O00000000001O2N2Ng0YO3M1O1O1O1O001OO1O1N2M3N2O1O100O1O100O1M3K5N2O1O1O1YOZFiMf9U2^F_MLOf9a2`F]MONb9d2`F]M0N`9d2k0N2O1O1O1000000O100O11O1O000000001O001O001O001O0000001O00001O0000001O0000001O00000000001O1O001O1O1O00001O00001O0010O01O01O2O1N100O101N3M2N2O0O2N2N2N2N1N2N3M3K5UOfDZOb;e0aDoNi;o0=K5J7J6J6Kdgo1\"}}, {\"image_id\": 65, \"category_id\": 1, \"bbox\": [86.34165954589844, 194.594482421875, 88.46600341796875, 89.8868408203125], \"score\": 0.23827332258224487, \"association_id\": 0, \"segmentation\": {\"size\": [427, 640], \"counts\": \"id]11Z=1O0O2O0_Y1NafN4J5N10001O1O001O1N2O2N1O1O3M1O101N2c0\\\\O0O100O11N0100000O010O01O1001ZOJTD7h;KYD4d;0\\\\D0c;0^DO`<O1O1O001O1O1NaQW6\"}}, {\"image_id\": 66, \"category_id\": 1, \"bbox\": [19.11474609375, 108.1940689086914, 150.7677001953125, 111.62439727783203], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [559, 559], \"counts\": \"TX=;l`0b0g_OYOT?n0e@XOV?b1M4L;D4M2O2N1O1M3N2O1O1N2N2M3O1O100O1O100O1O010O100O10O0100O100000O10O0100000000000000000000000000000000O10000000000000000O100O1000000O1000000000000000000000000000000O101O0000001O1O001O00001O0010O01O1O10O01O2O0O100O1O1O101N2N1O1O2O1N1O1O100O3N2M2N101N2N6J1N2O1O2N8H3M1O002M4M4J4L2N3L5K`Ud6\"}}, {\"image_id\": 66, \"category_id\": 1, \"bbox\": [238.11375427246094, 197.5048828125, 224.4815216064453, 315.2889404296875], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [559, 559], \"counts\": \"Y^Z4l0h?\\\\1\\\\O:G5K3M4M2K5I8A`0\\\\Of0C<H6K5K4M3L4L4K4K6L4L3O2N2N2N2N2O0O2O100O2O1N3M2O0O1O1O2N1O4Le0[O6J3M3M100O101O1N2O1O1O0O010O10O01O10O01O1O2O0O1O1O010O00000001O00001O00000000O10000000000O1000000O10O10N101M1G:M2O2O00100000O2L3M4L3N3O1N2O1N2N2O1N2O1O1O1N2M3M3M3N2N2N2SL`FIb96dFC^9;jF\\\\OY9c0lFUOX9i0nFlNY9S1kFfNY9Y1jFcNX9\\\\1jFbNW9]1kF`NW9_1lF^NU9a1nFZNU9f1nFRNW9m1nFfM[9Z2gF`M]9_2fF\\\\M]9d2eFXM]9h2fFSM]9k2lFcL^9]3Z201O000O2fNVBmNk=Q1[BiNh=U1ZBhNi=U1\\\\BfNg=X1^BaNg=[1Y1N4L4K6K2O2M2N1O2N100O1O1N2O1O001O1N101N3M3M9FQdU2\"}}, {\"image_id\": 66, \"category_id\": 2, \"bbox\": [27.376434326171875, 190.7593536376953, 143.4217071533203, 45.4903564453125], \"score\": 0.9999912977218628, \"association_id\": 1, \"segmentation\": {\"size\": [559, 559], \"counts\": \"U`?1[a05M2O001O1O010O00001O000O2O00000O2O000O10000O10000000000000000000000000001N10000000000000000000000O10000000000O100000000000000O100000000000000O10000000000O100000000000000000000000000000000001O0O1000001O001O0000000000001N10001O00000O10001O000000000O100000000O10000000000O2O0O101N102M^Td6\"}}, {\"image_id\": 66, \"category_id\": 2, \"bbox\": [267.8365173339844, 427.0531311035156, 245.38375854492188, 117.12918090820312], \"score\": 0.9999799728393555, \"association_id\": 2, \"segmentation\": {\"size\": [559, 559], \"counts\": \"Rjc48Va03M2O001O100O1O100O10O1O1N3L\\\\[W1NgdhN3L4M2N2N1N2L5M2N2O2M2O1O1N3M2M4J5O2N1O2N100O10000O2O00000010O0100O010O0010O01O1O100O1O0TARNX>n1eAUNZ>k1eAVN[>k1cAWN]>h1bAYN^>h1`AZN`>Z201O0001O00001O001O2N1N2O001O001N2O1O1O1O1O001O1O0O2O1O1N2O001O0O10000O1000000O10000O010O10O0100O010000O010O010OkNa@;`?Dc@:\\\\?Ge@7\\\\?Hf@7Z?If@7Z?Hh@6X?Kh@5X?Ki@3X?Li@4W?Lj@2V?Nk@1W?Nj@1V?Nk@1W?Mk@2U?Mm@1U?Mo@MU?1V1N101N2N1MnmW1\"}}, {\"image_id\": 66, \"category_id\": 1, \"bbox\": [454.2298278808594, 300.30267333984375, 86.46066284179688, 127.47854614257812], \"score\": 0.9999487400054932, \"association_id\": 0, \"segmentation\": {\"size\": [559, 559], \"counts\": \"agh7d0h`04M3O0O2N1O2N101O0O2N1N2O2O1O2N2M3M3M2N2O1O1N10000O2O0ATNXAo1f>ZNQAh1l>?OO2O01O1000O100002N2N10O01O1O001O1O2N1O1O1O4cAUMn=W30O001O3iLSBf2^>M100O2N1O1O2N1O1O2M2O1Bn@]NR?[1[A_Ng>i0[1M3L4N2O101O0O2N2N2M4LdS;\"}}, {\"image_id\": 66, \"category_id\": 1, \"bbox\": [471.375, 258.00982666015625, 37.095428466796875, 61.30902099609375], \"score\": 0.986035943031311, \"association_id\": 0, \"segmentation\": {\"size\": [559, 559], \"counts\": \"`nQ86Va05M2N2O0O2M2O2N1N2N2ODd_OF\\\\`08i_OGT`09m_OJo?5S@Mj?3V@0h?0W@3f?OW@4i?JV@:g?EZ@<d?D^@<b?D^@<a?Db@:^?Ee@;Y?Dl@6U?LV1O200N2O1O1O1O2Niel0\"}}, {\"image_id\": 66, \"category_id\": 1, \"bbox\": [25.340808868408203, 363.8006896972656, 255.65489196777344, 115.2672119140625], \"score\": 0.12387631088495255, \"association_id\": 0, \"segmentation\": {\"size\": [559, 559], \"counts\": \"PR>;b`0j0D:F6M7J00O2O0N3M3N101N2O000O2O0O2O0O1O1O2N1O010O10000O01000O1000O01O01O0100O010000O0100O01000O10O11N1O1O1O1N3M2N2O1O2O000O2O0O1O1O2O0O101O0O3M2MPgj7\"}}, {\"image_id\": 66, \"category_id\": 2, \"bbox\": [457.2190856933594, 371.89190673828125, 61.688751220703125, 48.078643798828125], \"score\": 0.10796555131673813, \"association_id\": 0, \"segmentation\": {\"size\": [559, 559], \"counts\": \"`em71^a0000Re70nZH1j^O3h`0NW_O3h`0MW_O4i`0NU_O3j`0NU_O2k`0OS_O2n`08OGT_OOl`01V_OMj`03V_OMj`02W_ONi`02W_ONh`03Y_OLg`04Y_OLh`03Y_OJi`05:O1O1ObXo0\"}}, {\"image_id\": 66, \"category_id\": 2, \"bbox\": [528.5070190429688, 413.3683776855469, 21.07586669921875, 22.884735107421875], \"score\": 0.06927859783172607, \"association_id\": 0, \"segmentation\": {\"size\": [559, 559], \"counts\": \"RVQ92\\\\a02N1O101O001N1102M1O1O1O10N2O1O0O1001NaQ5\"}}, {\"image_id\": 67, \"category_id\": 1, \"bbox\": [481.2207946777344, 78.82611846923828, 170.19515991210938, 106.37999725341797], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"Pie36b74M2O0O2N3M2O2M2O1N6J9G3M3M1O2N2M3K6M2O0O2N2M2M4O1N2O001O00001O00001O1O001O001O00001O001O001O000^K^Ml3b2RL`Mn3a2PL`MP4`2PLaMo3`2oKaMR4^2nKbMS4^2kKcMV4]2iKcMW4]2iKcMX4]2fKdM[4[2eKeM\\\\4[2bKfMe4U2XKlMi4b21O00001O1O1O00000O100000000O2O0O10000O10000O1O100O1O10000O100O2O001N101O00001N10001O001N101O0O2O1O001N2O001N2O1N101N2O1N2O1N10001N101N101O00001O0O101O1O001O000O2O00001N2O1O001O002N1O001O001O2N2N001O1O001O2N1N101N5IPRe2\"}}, {\"image_id\": 67, \"category_id\": 1, \"bbox\": [812.9723510742188, 17.779216766357422, 149.2760009765625, 68.33343505859375], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"\\\\hW6<[75M2O1N2M4I7M2N1O2M2I8N1O2O0O1M4O0O2O0O100O101N10000O100000000O100000000000000000000O1000O10O10000000000000000000000000O100000000000000O1000000000001O00001O0000001O0O101O0O10001O0O10000O100O1O1O10000O101N1O100O1O2O0O100O1O1O1O100O100N2O2N10001N2O001O2M2N3L3K5N2N2N2Lle;\"}}, {\"image_id\": 67, \"category_id\": 1, \"bbox\": [23.431720733642578, 87.74626922607422, 213.94253540039062, 76.7682113647461], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"Sn75d72N2O0O2N1O2M201N10000O001N1N2010O10O1O0O1N2000010O10000O010O100O11O00000001O01O0000100001N100O0100O1002N2M10O01O00001N2O1O00001O0O2O1O1N101N10000O1O1N2O100O100O1O2M2O1O100O100O100O1O100000O01000000O1000000O100000O1000000O10000O0100000O10000O100000000O10000O100O1000000O10000O10000O10000O100O100000000O100000000O100O100O1O10000O10000O1O10000O2O000O1O1O100O2O0O2O1M5POWIa0NBT7:8N1N3N2NeRn5\"}}, {\"image_id\": 67, \"category_id\": 2, \"bbox\": [359.92266845703125, 45.58211135864258, 137.72024536132812, 57.4363899230957], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"Sej2214`74N2L5K3O100O1O0O10100O0O2O1O100O1N2N2O1O1O1O1N2O100O10000O100000000O100000000O01000000O10O10000000O10000O10000O100O10000O100O10000O10000O1000000O1000000O101OO10O1000000O10000O1000O01000O010000O10O0100O1O001O100O1L4N2O100N2N200O100O1O1O100O100O1O101NXhj3\"}}, {\"image_id\": 67, \"category_id\": 1, \"bbox\": [663.39697265625, 173.38983154296875, 137.6875, 75.1922607421875], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"nYR55c73N3M3M3L5L2O1N4M5J3M2N2O1L5M3M2N1O2J6L3O2N1O1N20001O0O100000000000O1000000000000000000000001O000000001O001O00001O00001O0000001O000O100000000000000O100000000O100000000O100000000O100O100O2O000O2N1O100O1O2M2N2O1O100M4L3O2O0O1N2N2N200O10O0100N3N1N2N2M3O2M3N2N1O2Ngka1\"}}, {\"image_id\": 67, \"category_id\": 1, \"bbox\": [309.642333984375, 16.638696670532227, 84.45709228515625, 92.4363784790039], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"WW\\\\2?V77M2N3L3N2M2M4CnNoIT1P6nNkIU1U69O0O2O1O1N2O2N1N3N=B3N1N2O0O1O1O2N1O1O1000O100000000O100000000001O00001O0010O6J2N1O2N1O1O1O1YOgJkNY5S1jJkNW5T1lJiNU5V1nJhNS5V1PKgNQ5X1j0N2M200O2N2N2N1O2N1O2N3L2O2N3L4M2M[dd4\"}}, {\"image_id\": 67, \"category_id\": 2, \"bbox\": [747.05615234375, 183.97862243652344, 81.195556640625, 56.226715087890625], \"score\": 0.9998922348022461, \"association_id\": 2, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"\\\\^l55d70O2O100O001O1O1N101M3N2N2K401N1I8000O01000O2O001N101N10000O1O2N1O1O1O2N1O100O1O100N2O1O1O1O1O100O100O2N101M\\\\YZ1\"}}, {\"image_id\": 67, \"category_id\": 2, \"bbox\": [159.03189086914062, 101.19139099121094, 99.62301635742188, 41.15997314453125], \"score\": 0.31481054425239563, \"association_id\": 0, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"Tfa11i70N101O1O1N1O200M3O010N2O100O0100000JCnH=Q7EoH;Q7EnH;S7FkH;T7510O010O0100O100O10O10O10000O1O010O1O1O1O1J6O10O10O1O011N101O^Ye5\"}}, {\"image_id\": 67, \"category_id\": 2, \"bbox\": [881.5335693359375, 33.703792572021484, 98.11102294921875, 46.33915328979492], \"score\": 0.2746637165546417, \"association_id\": 0, \"segmentation\": {\"size\": [250, 1000], \"counts\": \"gSS71f74O1N0O2O10O0001O010O00100JGiH:T7800N2O1O10O0100O100000O2O000O100O10001O0O100O101O0O100O100O1O2O0O2N3Llj4\"}}, {\"image_id\": 68, \"category_id\": 1, \"bbox\": [279.1902160644531, 204.61331176757812, 78.23959350585938, 141.89669799804688], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [427, 640], \"counts\": \"eje38R=2O1N2O1M2gCE`;>YDMa;5\\\\DO_;S1K5iDZNb:i1YE\\\\Nc:X2M3N1N3M2O2N2M3K6J5M4K3N3N10000000O100O10000001O1O3M6K011O20M4L1O1N1O1O00001O1O2N001O0]ObFYM_9e2eFWM]9g2e0N3M6J4M4K4M1O2N2M2`NjDh0Z;ROVE<Q;@VE8Sch3\"}}, {\"image_id\": 68, \"category_id\": 2, \"bbox\": [327.5010986328125, 340.7140197753906, 141.37130737304688, 16.255462646484375], \"score\": 0.9999978542327881, \"association_id\": 1, \"segmentation\": {\"size\": [427, 640], \"counts\": \"fk\\\\43W=1O101O0O100O1O1000001N100000FQC8o<HQC8Q=00000O10000000000O101O0000000000000000000000000000000000000010O0000000000000010O000000000000000001O0001O000000000000000001O000000000000000O2O0000000000000001O00000000000001O000000000000001N100000001O001O000O10001O000O10aQY2\"}}, {\"image_id\": 69, \"category_id\": 1, \"bbox\": [154.66207885742188, 181.99424743652344, 48.453094482421875, 100.93324279785156], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"ciZ27d<5kD5S;6YD6d;d1O1O100O1N10@QDQNm;n1=51O1100mCkMZ;W2cDkM];V2`DlMa;T2]DmMc;i2O01O1O2N2N2N2N1N102N1O1O2N1O2N5Ke0[O3M9G5K6I5K:_Om`]6\"}}, {\"image_id\": 69, \"category_id\": 2, \"bbox\": [190.42901611328125, 269.0072937011719, 83.40890502929688, 16.163177490234375], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"iTl21n>2N101N1O100O1000O100000O10O10000O1000O0100O10000O010000000000O10000O01000000O10000O1000O1000O100000000000000O100000000000000O100000000Oc`_5\"}}, {\"image_id\": 69, \"category_id\": 1, \"bbox\": [318.29815673828125, 168.36607360839844, 169.09274291992188, 192.73744201660156], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"bge4V1_=?J5M1O2M3N2N2N2N1003N:E10WOWNXDi1f;XN[Dg1e;ZN[Df1c;[N]D9\\\\Ok0V<nN^D1CP1n;oN`DKIT18mNb:5RFm0YOSOc:NTFn0]OPOb:1PFo0Q;nNoDR1n;TOoBm0P=UOoBj0R=VOmBk0R=WOmBi0R=XOnBh0Q=ZOnBf0R=ZOnBf0R=[OmBe0R=\\\\OnBd0R=^OjBd0U=e0O001N1010O10O10O010O010O100O1000N2O2M3N2M3N0O2M2N3mNoMRET2k:UNlDn1S;Q1N2N2M3N2N2N2O1O1O1O2N1TFYL]8h3^G^L`8b3^GbL`8`3[GdLd8^3WGgLg8]3QGgLo8f4O001O0000000001O002N4]JPGn4b9O1O1O1N2O2N1N2N2M3L4I8I6L4M3N2O1N6lLoDo1U;kMoDS2R;kMPES2R;kMPET2R;iMQEU2Q;dMUE[2i;O001O001O1O2N1O1O1O00001O01O01O101N10O1000O002N100O1O1O002N2N1O100O001O2N1O1O1O010O001O010O1O2N1O1N2O1O1N4K`]W2\"}}, {\"image_id\": 69, \"category_id\": 2, \"bbox\": [393.3335266113281, 323.64495849609375, 134.49630737304688, 66.24179077148438], \"score\": 0.9999994039535522, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"\\\\Qi59g>1O0000001O01O001O1O0000000O1000O100O100O1O1O1CKnA6n=`0N2M3J6O1N20O0100O100001N100O2O0O101N10001N10001N101O001O0000100O00010O0010O1O01O01O10O00010O01O1O00010O0010O000001O00001O010O0010O00010O0010O010O01O1O00001O10O00010O0001O1O00010O1O1O001O1O1O1O001O100O1O001O1O1O001N3M[Td1\"}}, {\"image_id\": 69, \"category_id\": 1, \"bbox\": [310.80029296875, 179.6612548828125, 108.18035888671875, 138.13253784179688], \"score\": 0.9868587255477905, \"association_id\": 0, \"segmentation\": {\"size\": [480, 640], \"counts\": \"aoa4j0m=<L2N3M4L3L4M5J7J2O1O3M2N1O3M4K6K2M3M5K4M2M3M3M5K3eDaLQ;j3L2O001O1O1O10O1O1oNVEiMl:S2ZEjMh:S2]EgMP;n1UEkMo:R2R1N100O1O101O001O0O100O010O1O3M101M2O1O1O0011N2N2N2M3M2N1jNnM[EV2a:QNWES2h:QNSEQ2l:RNPEP2P;Q1O0000000000O1001O001O00O1O1000000001ON2N2N2L51O17H6J0O1O001N2N2M3M3L6Fg\\\\W3\"}}, {\"image_id\": 70, \"category_id\": 1, \"bbox\": [255.3519287109375, 101.42466735839844, 175.69012451171875, 332.5482177734375], \"score\": 0.9999985694885254, \"association_id\": 1, \"segmentation\": {\"size\": [457, 685], \"counts\": \"UTg31S>7K4L4N1O2M2N2N2N2M4M2N2mMROgFR1V9MbE?Y6fNoLR3k2VMQMl2n2VMPMk2o2VMoLl2P3VMmLl2S3TMkLn2T3TMiLn2W3SMgLn2Y3RMfLo2Z3SMcLn2]3SM`Lo2`3SM[LP3e3QMkJOmNX3X6jLgJ`4Y5aKeJ`4[5aKbJ`4f3bJkLP1]O^4f3gJkLl0[O^4j3kJhLh0YOe3[OdKj46_Lb0XOl2d5gLQK`0UOX2O_LP6l0jJd0nNQ2g6]MYJY4g5jKVJU4l5nKoIQ4S6ULeIk3]6YL\\\\Ii3f6ZLTIg3n6[LnHg3Q7h1100O010O1O001N1O10001O2O001O1N101OO1O1O2N1O1O1O2O1O2N2N2M2O1O1O2N3MmI`I_4^6[KjIe4T6YKoIh4o5XKRJg4o5WKUJg4j5YKYJf4e5ZK^Jd4a5[KaJf4]5ZKdJf4\\\\5XKfJi4Z5UKgJk4Y5SKjJl4W5RKkJl4V5RKnJl4R5SKoJl4R5SKoJm4Q5QKQKn4P5QKPKo4Q5PKPKo4Q5PKoJP5R5nJPKQ5Q5nJPKQ5W5fJlJY5T7N3M2PL^GU2c8iMaGT2`8jMcGT2^8jMeGT2]8iMgGT2[8iMiGT2Z8hMjGU2X8hMkGQ1QOTOU9IlGn0UOWOQ9HmGm0TO\\\\Oo8CSHm0PO@Q;;PEFT;4nDLT;1lD0U;MlD5R;KoD4R;KnD6Q;IQE6P;IQE6o:JSE5l:KVE3k:KWE4j:JXE6i:GYE8S;ZOoDf0[<0O01O0001O0000000100000101OO1O0O0001O0000010O000010O000001O001O001O1N6IT^a3\"}}, {\"image_id\": 70, \"category_id\": 2, \"bbox\": [128.4033966064453, 322.8200378417969, 173.15834045410156, 114.112548828125], \"score\": 0.9999983310699463, \"association_id\": 1, \"segmentation\": {\"size\": [457, 685], \"counts\": \"Sij11U>4N2O2N1N101O001O1O001O1O001O1O1O001O001O1O1O1O1O1O1O1O1O1O4L100O101O1N2O0O001O100O0010O1O01O01O00001000O0100O01O01O1O010O10O1O10000O100O010O2O0O2O0O01000O2N010O010O10000O001O00100O1O001O010O100O010O001O1O1O0010O0YOaCE^<:eCD\\\\<;fCEX<:iCGW<8jCIOZOm;m0UDJK[OP<j0UDKJ\\\\OQ<i0WDJG^OQ<h0YDIF@P<g0ZDIEBP<e0\\\\D2c;O]DFEDo;e0\\\\DFHBm;h0\\\\DDo;<RD]ODH[<l0PDZOW<f0jCXOW<i0iCTOZ<k0fCSO\\\\<n0`0O1O0000010O01O1O1O10O0001O010O00O2M2M4N2M3M4L;Cb_c5\"}}, {\"image_id\": 70, \"category_id\": 1, \"bbox\": [628.3551635742188, 159.63902282714844, 26.7794189453125, 59.01283264160156], \"score\": 0.9999959468841553, \"association_id\": 2, \"segmentation\": {\"size\": [457, 685], \"counts\": \"amh82V>2N2M2M3M4K4K5H8M4G8I7K5N201O0001O=C3M4L?A5K4K4M2NlU=\"}}, {\"image_id\": 70, \"category_id\": 2, \"bbox\": [245.7714385986328, 373.129638671875, 186.19813537597656, 65.73916625976562], \"score\": 0.9927523136138916, \"association_id\": 0, \"segmentation\": {\"size\": [457, 685], \"counts\": \"lT^3?b0Be<?YCH`<:[C@F8n<8\\\\CL3Gm;>oC^OH9>Il;`0QDGb<9_CFa<:_CFa<:_CEb<;^CEb<;_CDa<<bC@_<`0e0000000000000000000001O000001O0000001N2CZB3o[9JTRG4M3N0000001O000000000000000000000010O00010O000000010O0000001O01O00010O000001O010O0000010O001O0000010O00010O01O01O00010O00010O0000010O010O000000010O000001O010O00001O010O001O001O00010O0010O01O01O00100000O0001000O10000O0010O0010O01O01O01O001O1O000O2O1N3M]Uc3\"}}, {\"image_id\": 70, \"category_id\": 2, \"bbox\": [601.8197021484375, 211.82093811035156, 29.3250732421875, 5.084442138671875], \"score\": 0.9915209412574768, \"association_id\": 2, \"segmentation\": {\"size\": [457, 685], \"counts\": \"`h]82W>0000001O00000000001O0000000000000001O001O001OZWi0\"}}, {\"image_id\": 70, \"category_id\": 2, \"bbox\": [146.7076873779297, 351.5276184082031, 285.927490234375, 87.63714599609375], \"score\": 0.20781943202018738, \"association_id\": 0, \"segmentation\": {\"size\": [457, 685], \"counts\": \"RdT21X>2N001N100000001O00001O1O1O2N3M001O00001O001O001O2N001O00001O00001O1O1O001O1O00001O001O2N2N1O1O1O00001O001O003M1O1O00001O01O01O1O2N1O10O0001O001O010O2N1O001O010O000010O0[OaCA^<?cC@]<`0dCAZ<>gCCY<<hCDW<<iCD3_Og;m0VDE2^Oh;m0WDD0Ah;k0YDCOBi;j0YDCNDh;i0ZDCNEh;g0[DCMFh;g0\\\\DBLGh;g0\\\\DCKGi;e0]DCJHi;f0\\\\DCJHi;e0^DBJHh;f0^D^OOJd;h0^D\\\\O0Kb;i0_DZO1L`;j0`DXO1N`;j0VETOk:l0Y100000010O00000010O01O010O00001N1O2N9]Ok\\\\92]cF2N2O000O1000000000000000000010O000000000000000010O0001O0000010O0001O01O000010O000001O000001O01O010O0010O00000010O0010O000010O000001O01O010O01O010O001O00010O01O01O01O01O001O01O01O100O010O001O00010O0010O0010O001O01O001O010O1O001O010O000O2O1O1N2O0Nkcc3\"}}, {\"image_id\": 70, \"category_id\": 1, \"bbox\": [396.9951171875, 164.863525390625, 20.996734619140625, 51.85296630859375], \"score\": 0.0604829378426075, \"association_id\": 0, \"segmentation\": {\"size\": [457, 685], \"counts\": \"cja52T>4K4J6D<J6I7F:01O00O202M9F4N?@5J5L2Nl\\\\g3\"}}, {\"image_id\": 71, \"category_id\": 2, \"bbox\": [213.3860321044922, 407.5727233886719, 275.09356689453125, 135.66720581054688], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [559, 559], \"counts\": \"eWj31Ua01P_O1o`09N100O10O0K41M301O001O1O10O001000O0100O10O1000O010O010O010O010O1000O100O00100O010O01000O10O01O100O010O1000O010O01O010O10O10O10000O001N2N2O10O010O0100O001O1O1O00100O100O0O2O0O2O01O0100O10O01O1O1N2O001O100O010O1N1O2N2O1O01000O101N1O1O101N10001N100O2O0O2N1O2N1O2O1O00000000001O0000000001O1O2O1N2N1O1O1O00010O001O100O001O010O00000000000000O1000001O001O1O1O1ODb@hN]?W1d@iN\\\\?V1f@jNY?V1g@kNX?U1h@lNW?T1i@lNX?S1i@mNV?S1j@nNU?R1k@nNU?R1l@nNS?R1m@oNR?R1m@oNR?Q1o@nNQ?Q1PAoNP?Q1QAoNn>Q1RAoNn>P1TAoNm>P1SAQOl>o0TAQOl>n0VAROi>n0WAROi>m0XATOg>l0YATOg>l0ZATOe>l0[ATOf>j0\\\\AVOc>j0]AVOc>j0]AWOb>i0_AVOb>h0_AYO`>g0`AZO_>f0bAYO^>g0bAZO]>e0dA[O]>d0cA\\\\O]>c0dA]O\\\\>c0eA]OZ>b0gA^OY>a0hA_OX>a0hA_OY>`0hA@W>?jAAV>?jAAV>?kAAT>>mABT>=lADS>;oADR>:oAGQ>8oAHQ>7PBJP>5QBJo=5RBKo=4QBMn=3RBMn=3RBMo=1SBOm=0SB0n=NSB3n=JTB6l=ITB7m=GUB8l=GTB:k=FUB:l=EUB:l=DUB<l=CTB>m=_OTBa0o=\\\\ORBc0]?001O0010O000O2O1O1O1O1O1O001N2N2N2JZXV1\"}}, {\"image_id\": 71, \"category_id\": 1, \"bbox\": [177.97023010253906, 41.92325210571289, 318.9251708984375, 477.9553527832031], \"score\": 0.9999984502792358, \"association_id\": 1, \"segmentation\": {\"size\": [559, 559], \"counts\": \"PbV3e0e`06J6I7L3N3L4M2O1N3M2J6I7A`0bN]1I7J6N2O1N3N1nKYK[Kh4a4fKSK[4h4TLmJn3l4`LjJc3d4WMQKl2V1UK^O\\\\2VOb2U1bKmN]2JS2T1lK_N^2:h1S1TLWN\\\\2c0a1R1[LSNY2i0]1R1aLjMY2Q1X1R1nLVMV2f1l0S1j1iNWNU1n1gNSNX1S2_NQNa1Z2nMlMR2g81hN[MeCh2Q<fMgC]2U<lMcCW2Z<oMbCS2[<SN_CP2^<a1M4L4K5I6H9H8M3N2O1N2O001N2N2M3M3N2N2O1O1O1O1N2O1N2N2M3N2N2O1O1N2O1O1O1O1N2O1N3N1O1O100O100O10001N1000000O101O0O200O2N2N2N2N1O2N1O1O100O1O1O2N3M2N2O0O2N1O1O1O101O1N3N3M3M2N2N100O000000001N101N102N1N101N1000001O1O10001N100O000O2O001O2M7J?A9G4L1O01O010O0101O1O100O01O000O010O01O2O1N101O1O1N1O00001O0O2O1N3N2M3M3N1N2O0O2N1O2N5K6Jc0aKUDT2g=L4L2N3M1O2N2N1O1N3N1O1O1O001O1O1O001O00100O1O1O1O1O1O1O0000O10000O100O10000O100O2O1N2O1N2O1O2M2O1O1N2O010O0000000010O0000001O00100O1O1O1O1O10O01O1O001O001O00001O001O001O1O001O1O1N3N2N2N1N3N1O1N3M4K4Li`R1\"}}, {\"image_id\": 71, \"category_id\": 2, \"bbox\": [92.74150085449219, 523.4208984375, 27.704795837402344, 12.444091796875], \"score\": 0.9999759197235107, \"association_id\": 0, \"segmentation\": {\"size\": [559, 559], \"counts\": \"cjc15Ya02N100O100O101O0O10000000O10000000000O101O000O101N\\\\T_7\"}}, {\"image_id\": 72, \"category_id\": 2, \"bbox\": [0.0, 153.13449096679688, 538.8805541992188, 62.46214294433594], \"score\": 0.9999927282333374, \"association_id\": 2, \"segmentation\": {\"size\": [236, 592], \"counts\": \"RZ71[70O3[IOj54TJMk53TJOj53TJNl52RJ0m52PJ1o50PJ0P61nI0R61lI0S61jI2V6NjI2V6OiI1W6OhI2X6NhI2X6NhI2X6a00000O1000O1000000O100000000000O01000000000000O1000000O1000000O10000000000000000O1000000000000O1000O10O100000000000000O100000000000000000O01000000000000000000000000000O10000000000000000000000000O1000000000000000000O100000O10HaIB`6=bIB^6>bI^O00^6b080ZI^O0010X6b0gICOLZ6b0eIC1LX6b0gIGY6i0M1O0O2O1O0O2O0000000O2O0000000000O10O1000O1000000O10000000O010000000000000000O1000000O1O010O1000000000000O100000000000000O1O1K5N2O100000000000000O100000000000000O100O10000O1000000000000O100000001O0O10000K6M2M3O2J500O2O00000000000000O2O000000000iHMU76O000000000O10000O101N1000001O3M2Mb0WInNV6U1O1O002N1O0O10001OO100000000O1O1O1000000000000O1000000000000O10000O100O1O1N2N2O1O10000O10000000000O10000O1O100O100O1000000O100000000000000O1000000000000000000O100000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000O10000000000O1000000O2O0000000O1000000000001N1000001N100O1O10000O10001O00000O100000000O10001N101Olnh0\"}}, {\"image_id\": 72, \"category_id\": 2, \"bbox\": [11.713224411010742, 161.3423614501953, 278.5499267578125, 56.25689697265625], \"score\": 0.9997962117195129, \"association_id\": 1, \"segmentation\": {\"size\": [236, 592], \"counts\": \"jn51[70O2L3O1O1N2N3YOIWJ7i5JUJ7j5JVJ6j5JVJ6j5JVJ6i5LUJ4l5LTJ4k5MUJ3k5MTJ3l5OSJ1m5OSJ1m5ORJ1o50PJ0o51QJOo51QJOo52oIOQ61oINS61mIOS62jI0V60hI1Y6a0000000O01000000O10000000000O10000000000O10000000O10O1000000O100000000O100000000O100000O10O100000O01000000O1000O1000000000000000000000000O1000000000000O100000000000000000O1000000000000000001N10001M3[OdINJ1]`2OkfM0O2hHNT75O101O1O4L7SI1o5h0N0O2O00000000O10000O10000000000O10000O1O100O1000000O10000O10000O100000000O100000000O100000000O1O1O1000000O1000000O1O1N2O1O10000O100O1O100O100O10000000000000000O1000000000000000000000000O1000001O000000000O100000000O101N1IUILl60RI03Nl62QIOV7O40XTV2\"}}, {\"image_id\": 72, \"category_id\": 1, \"bbox\": [3.2110157012939453, 5.965528964996338, 126.88665771484375, 192.723876953125], \"score\": 0.9996312856674194, \"association_id\": 1, \"segmentation\": {\"size\": [236, 592], \"counts\": \"no08R74N0O2O0O2O1N3N2M4L3YNZOcLh0X3D^Lb0[3<hK4^1TOMi2I_M4e2F^M:m2TOZMk0d401N1000000000O10000O100POnNeKS1V4T1N2N20000000000001O0000000000000000000000000000000000000000O10000000000O100O1O100000000000000O1SNXMVOh2g0]MWOc2e0lMPOT2l0RNQOo1m0UNQOk1n0XNPOh1o0[NoNe1P1^NnNb1Q1aNmN_1S1bNlN^1S1eNkN[1T1gNkNY1S1jNlNV1S1lNlNT1T1mNkNS1T1POjNP1V1ROhNn0X1SOgNm0Y1TOfNl0Y1VOeNk0[1VOdNj0\\\\1WOcNi0\\\\1YOcNg0]1YOcNg0]1ZObNf0?WMOS2Bf0<[M1o1Cf09_M3l1Dd09aM1l1Fc09bM0k1Gc08cM0l1Ga09dMNl1I`08eM@C6Y22?8oMDc13>9PNBd14<9RNAd15::SN_Oe168;j0DWO:j0EWO;j0DVO<j0DWO:k0DVO<m3O1O1N3K_fZ3\"}}, {\"image_id\": 72, \"category_id\": 1, \"bbox\": [104.40789031982422, 12.37454605102539, 198.81411743164062, 168.7961883544922], \"score\": 0.969273567199707, \"association_id\": 2, \"segmentation\": {\"size\": [236, 592], \"counts\": \"f_i06U72O001N101O01O00001O001OO10000O100000000000000000O1000O101O00000O1000001`MHjM8S2LlM5R2MmM3R20lM1S2OlM3S2NkM3T2OjM3U2NgM6X2LaM<\\\\2F`M=_2D_M?hNlN]3f0hM`0kNjN\\\\3i0dM`0POgN[3k0aMb0SOcNZ3o0]Mb0YO_NY3Q1[Mc0ZO]N[3R1XMb0]O\\\\NZ3U1VMa0]O\\\\N]3T1TM`0T3BjL2BiNf3X1eLOe33XLNg35ULLl35QLLo36nKLR4g1O1WOkL[MU3c2PMYMR3d2RMZMn2f2SMXMn2g2TMXMl2h2UMVMm2i2TMVMl2i2XMSMi2m2k0000000000001O1O1O0O2O0000001O000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000O100O100O100O1000000O100000000000000000000000000001O00001O001O2N3M8H3M1O1O1O2N1O2N1O001O0000000000hNoMVMP2g2UNWMl1f2WNYMi1b2^N\\\\Mb1a2dN\\\\M\\\\1]2nN`MR1\\\\2TObMl03TNU1S1fNk02^Nm0i0POi02aNk0h0ROh00dNj0g0TOk0KiNe0h0TOZcR2\"}}, {\"image_id\": 72, \"category_id\": 2, \"bbox\": [200.07528686523438, 149.16148376464844, 327.2558288574219, 53.93617248535156], \"score\": 0.4907070994377136, \"association_id\": 0, \"segmentation\": {\"size\": [236, 592], \"counts\": \"TX^1m0\\\\65N1O1N1000000000O10000O10000000000O01000O100N2O1O100NcIUO]6j03000000O100N200O1000000000000O10000000000O100000O10000000000O1000000000000001O0O2O1O1O0O4M0MSIGm65SIL0On60RI1000No61QI010Q7OoH100U7OlH0^e33daL4N2O001O001O1O001O9G=C4L4L1O00000000000000000000000000000000O100O1N2O1O1000000O10000O1O1N2O1O10000O100000000O100N200O10000O10000O100O1N2O1O10000O10000000000O10000000000O1000000000000000000001O0O100O10000O1O100O2O0000000000000O100000000000000000000000000000000000000001O000000001O1O00001N1000001O0O2O001N1000001N1000001O0O100O2O000O10000O2O000O1O1Olo2NVPM1O10000000000000000000000001OnXe0\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [470.485107421875, 76.63056182861328, 128.53375244140625, 73.56464385986328], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [457, 685], \"counts\": \"oka6;g=a0E1O2O001N101O00000000000000000O100N2O1O1O2K4F:O1O2N1N2O2O0O100O1O2O000O101N10000O1000000O10000O100000000000000000000000O1000000O100000000001O000000000000001O0000O2O001O001Od0\\\\O001O001O1O000O101O0000000O1000000O1N10100O1O1O1O1O1O2N1O2N100O2M2M4M3MY`X1\"}}, {\"image_id\": 73, \"category_id\": 2, \"bbox\": [338.10870361328125, 304.7688293457031, 178.14251708984375, 55.449462890625], \"score\": 0.9999997615814209, \"association_id\": 4, \"segmentation\": {\"size\": [457, 685], \"counts\": \"V]i41X>001N2O000000O1000001Nh[92UdF6K2N2O000O100000000O1aBBS=>lBCT==kBET=;lBET=;kBGS=:mBFS=:mBFS=:lBGT=9lBGS=:mBFS=;kBFU=:kBFU=:kBFU=;iBFW=f00O10O1000000O1O[OoB4n<NSC2m<NTC1k<0UC0j<0XCOg<2ZCLf<4[CLe<4\\\\CKd<5\\\\CKd<5\\\\CKd<5\\\\CKc<6]CJc<7\\\\CId<7\\\\CId<7\\\\CId<8[CHe<8[CHd<:ZCGf<;WCFi<<QC[O09o<m0000O100000000000000CQC@o<?RCAn<>TCAl<>UCBk<>UCBk<>VCAj<?VCAj<?VCAj<?WC@i<`0WC@i<`0WC@i<`0WC@i<`0XC_Og<b0YC^Og<c0XC]Oh<c0XC]Oh<c0XC]Oh<c0YC\\\\Of<e0ZC[Of<e0ZCZOg<f0ZCYOe<h0\\\\CVOe<j0=1N100000000O10000000O10O10000000000O100000O10000O010000O010O10ZOjB8W=HiB8V=IkB6U=ImB6S=JnB3S=MoB1R=OnB1R=OnB0S=0nBOR=0oB0Q=0QCKR=5a0O10000000000O1000001MQT[2\"}}, {\"image_id\": 73, \"category_id\": 2, \"bbox\": [147.90463256835938, 358.3350830078125, 128.81072998046875, 52.10223388671875], \"score\": 0.9999954700469971, \"association_id\": 2, \"segmentation\": {\"size\": [457, 685], \"counts\": \"fYS21T>6M2N101OO010O1O10O100O101O0000000000001O000000001O00001O001O0oBCV<=iCEV<<hCEY<;bCJ]<7_CLa<4^CMb<5[CMd<4XCOh<i000000000010O000001O000001O00000000000000001O00001O0O10000000000O10001O0000001O000000000O100O1O100O101N1O100O2N1O100O100N2O101O0O1O1O1O2O0O1O1O1O1O2N100O100O1O2O000O3LX`g5\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [0.0, 229.73214721679688, 205.44540405273438, 200.01708984375], \"score\": 0.9999935626983643, \"association_id\": 2, \"segmentation\": {\"size\": [457, 685], \"counts\": \"[8l1d0a0d8g2K3N2O0O10000O2O000O10000O100O1O100O100O1000000O100000000000000O100O100O1O1O1O1000000O10000000000000000O10000000000000000000000000000000000001O000000000O1000000O100O1O1O100O1O1O100O101N1N2O100O2O0O1000000O10000O100O1O2N101N1000001O0O10000000000O2O0O1O1O100O101N100O10000O1O1O1O1M3O1N2M3M3L4M3N2O100O1001O0gE^Lk9c3RFaLl9o3O1O1O1O1O1O001OdNcFlM^9R2dFnM[9Q2iFlMW9T2mFhMS9W2QGeMP9Z2SGdMm8[2UGdMk8[2VGeMj8Z2XGdMi8[2XGeMh8[2XGdMj8[2VGdMk8[2VGdMk8\\\\2UGdMl8[2TGdMm8\\\\2SGcMo8[2RGeMn8[2SGcMo8\\\\2SGaMn8_2TGYMR9g2Y1O00000O101N100O1O1O1O10000O1000000O2O0O3Lmne6\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [245.2340850830078, 12.999058723449707, 201.63282775878906, 298.142333984375], \"score\": 0.9999916553497314, \"association_id\": 0, \"segmentation\": {\"size\": [457, 685], \"counts\": \"P^^3=j=4N0O2O0O100O2O000001O000O2O000000000O2O0O100O1O2N1M4L4LSR2?_mM4K4L4M2N2M2N3N2O1eDnN^9S1_FQOm0Nk5Q1VITOm0Nk5n0XITOk01l5k0YITOj02m5j0XIUOj03m5h0YIUOi04m5i0YISOi05n5h0WITOk05n5g0TIXOl03o5f0PI\\\\Oo00P6e0nH^OQ1NP6e0mH_OQ1NR6c0lHAP1LT6e0jH@P1MU6d0iHBP1KV6e0hHBo0JY6e0fHBQ1JY6d0dHDR1IZ6d0aHDU1JY6b0`HFU1J\\\\6`0\\\\HHW1J\\\\6`0YHHY1J^6?VHJZ1H`6U2_IlMb6S2^InMc6P2\\\\IRNd6m1[IUNg6h1XIZNh6e1WI]Nj6b1TI`Nl6^1TIdNl6[1TIfNl6Y1TIhNl6V1TIlNl6R1UIoNl6m0VISOk6g0YI[Og6c0YI^Oh6a0VIAj6?TICm6<oHIP76oHLQ74mHNT71kH0U70jH1V70gH3X7OdH5Z7NaH6]7M_H6_7L]H8[1RMY4j2WJ6]1SM[4j2UJ4\\\\1VM^4j2PJ3_1UMa4R6YKSJf4P6jJ]JV5i5[J`Je5R701O00000000000000000000000000iNVJoIj5o5\\\\JmId5Q6bJkI^5T6dJkI\\\\5T6fJkIZ5T6gJlIY5S6iJlIW5S6kJlIU5S6lJmIT5S6mJlIT5U6jJkIV5V6hJkIY5V6eJjI[5W6dJiI]5V6cJjI]5W6aJjI`5m4PJbK?Ab5j4RJeK:Bf5b4UJmK3Bi5\\\\4YJXLF]OQ6X4\\\\J[Me5`2`J_Ma5^2aJbM_5]2bJbM_5\\\\2dJcM\\\\5\\\\2fJcMZ5Z2jJdMX5Y2kJfMU5W2oJgMR5W2QKgMP5X2RKfMo4Y2SKcMP5XO_In1e1gNm4YObIl1e1hNj4[OdIi1f1jNf4[OgIi1e1jNe4\\\\OiIf1f1lNa4]OkIe1f1mN_4]OmIe1f1lN]4_OoIb1f1mN\\\\4@RJ^1d1ROZ4_OVJZ1c1VOW4@YJU1c1YOU4A[JR1b1]OS4A]Jo0b1^OR4B^Jn0a1@Q4A`Jm0b1_Oo3D`Jk0c1@m3DcJh0d1Aj3GeJb0g1Dd3JgJ>h1Fc3JgJ>h1Ga3KhJ<i1Ga3KhJ<i1H`3JjJ:h1L`3GkJ7j11c3^OoNb0]7N2O002M3L5K]hc3\"}}, {\"image_id\": 73, \"category_id\": 2, \"bbox\": [5.901841163635254, 59.524696350097656, 111.03607177734375, 11.529754638671875], \"score\": 0.9999616146087646, \"association_id\": 0, \"segmentation\": {\"size\": [457, 685], \"counts\": \"mU32W>000000000000000000000000000nAOl=2SBNm=2SBNm=2RBOn=1RBOo=0QB1n=ORB1n=OQB2o=NQB2o=OPB1P>OPB1P>40000000000000000001O00000000000000000000000000000O1000000000000000O01000000000000O10O1000000000O10O1000O100000000O100000O10O1000000O1000000000000O10000000000O10PWo7\"}}, {\"image_id\": 73, \"category_id\": 2, \"bbox\": [250.99554443359375, 188.38955688476562, 254.0333251953125, 71.28695678710938], \"score\": 0.9963550567626953, \"association_id\": 3, \"segmentation\": {\"size\": [457, 685], \"counts\": \"[Xd33U>2O001O0O1000001N10000O10000O100O100O2O0O10000O100O1O1O1O100O2O0001O0000001O0000001O001O2N001O00001O001O1O1O2N1O1O001O001O001OhYf0MZfYO2K8K2N2O0M4O0O2O0O2O1L4K4N3N1O1O2N1O1N2M3L301000000000000000O10000000000O1000000000000000000O100000001O0000001O00000000001O0O100O2O0O100O2O000O10000O1O2O0O1O1000001N100O10000O101O000O101O001O000O101O001O1O1N2O1O1O001N101N3LVUh2\"}}, {\"image_id\": 73, \"category_id\": 2, \"bbox\": [563.4136352539062, 130.9241180419922, 38.01641845703125, 17.700592041015625], \"score\": 0.9949758052825928, \"association_id\": 1, \"segmentation\": {\"size\": [457, 685], \"counts\": \"hko71V>3N1O1O100N20O0100O10O010O00001O010O001O1O100O1O1O10hiU1\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [201.03041076660156, 0.0, 124.68833923339844, 219.83348083496094], \"score\": 0.985238790512085, \"association_id\": 3, \"segmentation\": {\"size\": [457, 685], \"counts\": \"ohi27g=a0nEAl5e0oI^Ol5i0oIXOl5n0SJROe5X1XJiNe5[1ZJeNd5^1[JbN`5d1_J\\\\N\\\\5k1aJVN]5n1aJSN]5o1bJQN]5Q2aJPN]5U2_JlM_5Y2]JhMb5[2[JfMd5\\\\2ZJeMf5`2SJbMm5\\\\3SIfLl6`3hHgLX7`3oGRMP8d400O1N2O1N2O1N2O1N2N2O1OPOcHVK[7k4hHSKW7n4kHPKS7R5oHlJP7l3dHgL=\\\\Oo6k3hHdL=@k6i3aJVL_5e3fJ[LZ5a3kJ^LU5^3oJbLQ5Y3TKgLl4R3[KnLe4o2_KPMa4n2aKRM_4n2aKRM_4m2bKSM^4m2bKSM^4m2aKTM_4l2aKTM_4k2bKUM^4a2eHUMn2:]4`2jHRMi2>]4_2nKaMR4^2oKbMQ4^2oKbMR4\\\\2oKdMQ4P2cHiM;Ge2`0]4P2fHeM9Lc2?^4o1\\\\LQNd3n1]LRNc3n1^LQNb3n1_LQNb3o1^LQNb3n1_LRNa3n1_LRNa3n1_LRNa3n1_LRNa3m1aLRN`3m1`LSN`3m1`LSN`3l1aLTN_3k1bLUN_3i1bLWN_3g1bLXN`3e1bL[N_3c1bL]N`3NZHV1W4kNb3J]HV1T4oNm3m0VLPOn3m0TLQOl3o0VLQOh3o0ZLQOe3n0]LRO_3Q1bLnNa3o0`LPOe3l0\\\\LROg3k0o4AiBEZ=7<1O100O11HZG1j82N2O1M4O3M00F[B1d=N]B2o=N2M3Ne8OlW10R]S5\"}}, {\"image_id\": 73, \"category_id\": 2, \"bbox\": [24.20587730407715, 371.61187744140625, 131.0983428955078, 80.67471313476562], \"score\": 0.9814085364341736, \"association_id\": 0, \"segmentation\": {\"size\": [457, 685], \"counts\": \"]Rd02V>2O0O10000O100O1O100O001O100O10O0100O10O01O100O100000O001000O001O01O01O01O010O01N1010O0010O1O0010O01O010O010O000100O01O000100O10O01O10O1O01O1O010O00100O010O1O100O010O1O10O010OO2O00100O01000O102KWna7\"}}, {\"image_id\": 73, \"category_id\": 1, \"bbox\": [282.0435791015625, 162.30233764648438, 185.54629516601562, 185.78280639648438], \"score\": 0.9048416018486023, \"association_id\": 4, \"segmentation\": {\"size\": [457, 685], \"counts\": \"omn3;k=5M2N2M2O2N2N1O100O1O100O2O0O100O2O0O101N101O1N2O1N4M3L2O1gF_Ng5c1lIlNQ6U1iISOR6o0mISOn5R1PJPOm5S1QJnNo5S1oInNQ6S1nImNR6S1mInNS6S1kInNU6R1jIoNV6R1iInNV6S1iInNW6R1iInNW6S1hImNX6S1gInNX6T1gIlNY6U1fIkNZ6V1eIiN\\\\6W1dIiN\\\\6X1bIiN^6W1bIhN_6Y1`IgN`6Y1_IhNb6X1]IgNd6Y1\\\\IgNc6[1[IeNf6\\\\1WIfNi6[1SIhNm6\\\\1mGFS8=`GLa8n200000001O000000001O001N101O00010O0000001O00O10000000000VL`Gj1a8TNbGj1_8UNdGf1_8XNeG[1f8cN]GZ1f8dN\\\\GY1f8fN\\\\GX1d8iN\\\\GV1e8iN\\\\GU1f8iN]GU1d8jN]GU1d8iN_GV1a8iN`GV1a8iN`GV1a8hNaGW1`8cNfG\\\\1[8`NiG_1X8^NlGa1T8^NmGa1T8^NmGb1S8\\\\NPHc1P8[NTHc1l7ZNZHb1g7[N_Hb1b7[NaHd1_7YNeHf1[7WNhHi1X7UNkHi1V7UNlHj1U7UNmHh1U7XNlHg1U7WNmHg1T7YNmHe1m9O1N3N2M3M2N1O2O1N4M5K1O1N2O0O2O1N2M3N2O1N2N2M`]b3\"}}, {\"image_id\": 73, \"category_id\": 2, \"bbox\": [269.7185974121094, 175.1387939453125, 233.989013671875, 139.8521728515625], \"score\": 0.2968777120113373, \"association_id\": 0, \"segmentation\": {\"size\": [457, 685], \"counts\": \"ZUi36R>2N2N10000O100O100O100O2O000O10000O10000000000000010O000001O001O010O1O001O1O001O1O1O1O10OO2O1O1O2N1O1O0000bdg01T[XO<K4M3M2N1O1N3M2N2O2N1O1O1O1N3M2M3N2O1O2N10O0100O0100O010O101O0000001O0O1000001O0000000000001O00001O0000001O000O1000000O2O000O2O0O2O0O101O0O10000O1O1O100O100O101O0O100O10001O0O101O1O001O00001O001O001N2O1O001O0O2O1N`ch2\"}}, {\"image_id\": 74, \"category_id\": 1, \"bbox\": [206.78282165527344, 135.53118896484375, 100.36112976074219, 130.07696533203125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [314, 500], \"counts\": \"kco1f0Q95M2M=C8I2N6Ig0ZO3N1O>B;E1O1O2N2N002N1O1O001O001O1O1O001O000000001O001O1O1O00000000000000000000O10000O100O100O100000000O1O1N200O10000O1O1N200O1O1O1O1N2M3O1N2N2M3O1N2J6L5M3M3F:L3O2G:I6N2N1L5M4M3K5J6L4LPQk1\"}}, {\"image_id\": 74, \"category_id\": 2, \"bbox\": [290.3431396484375, 87.06549072265625, 183.81344604492188, 167.3248291015625], \"score\": 0.999994158744812, \"association_id\": 1, \"segmentation\": {\"size\": [314, 500], \"counts\": \"Y^i2e0S93L4C<L3N3M2N3O0O100O1O1G9O1O101N1O2N2N101000O1O1O001O1000O0010O00100O0010N101O1O10O010O0O1000010O010N2O001O10O1O1O001O100O10O0O0100010O10O0000O10010O10O10O1O0010O010N101O10O01O000000000100O000000000010O01O00000000010O01000O0010O0O1O2N101O0010O1O1O001O100O1O1O1O1O1O100000000O10000O100N2O100O100O1O1O1O1O1O100O1O100N2O100O100O1O1O1O1O100O2O000O100O100N3N1O100O3M5JWl7\"}}, {\"image_id\": 75, \"category_id\": 2, \"bbox\": [61.94807815551758, 218.14373779296875, 332.8915100097656, 258.1975402832031], \"score\": 0.9999990463256836, \"association_id\": 1, \"segmentation\": {\"size\": [566, 643], \"counts\": \"fdS11ca06K5K2N2N1O1N2O1N2O2N1O1O1O1O1O1O01O01OO1O1000O10000000001N101M2_OUOS@Q1l?<0100O10O10000000000O100000000O0100000001O000000000000010O00001O001O1O1O1O1O1O1O0010O01O001O101N1O1O10O01O0010O00010O01O10O01O1O100O001O1O100O2N1O1O1O1O0010O01O1O1O1O3M2O0O2N1O1O001O101N1O1O2N2N1O1O0010O01O100O100O2O0O10O01O010O010O100O2O1O1O1O000O1001N10100O01O1N10000O0010O01O001O101N1O1O10O01O0100O1000000001O00O10O10O10O010O100O2O0000O100O10O010O10003M4M01N0O010O01O0010O01O1O1O1O100OVNdB[O[=d0gB[OZ=d0gB\\\\OZ=b0eB_O^=?aBB`=<`BEb=8]BJf=2ZBOf=Q201OZN_BUOb=f0cBZO]=c0fB]OY=a0jB^OW=b0iB]OW=d0hB]OX=c0hB\\\\OY=d0gB\\\\OY=c0hB]OY=a0hB_OX=`0iB@W=?kB@U=`0kB@U=?mB@S=?nB@R=`0oB@Q=?QC_OP=a0PC_OP=`0QC_OQ=`0oB@Q=?PCAQ=>oBBR=<PCCP=<QCDP=;QCDo<;RCEn<:SCEn<;RCEm<;TCDm<;TCEl<;TCDl<<UCCl<=UCAl<?TCAl<?UC_Ol<`0UC@k<`0UC_Ok<b0UC^Ok<a0UC@j<a0VC_Oj<`0WC_Oj<`0WC_Oi<a0XC_Oh<a0XC^Oj<`0WC@l<<UCDQ=5oBMP=3PC0m<OTC1l<NUC2j<NVC2k<MVC3j<KWC4k<JWC5j<IWC7j<FYC9S?O001O1N101O00001N2O1M3N5J4M3L3NaWd4\"}}, {\"image_id\": 75, \"category_id\": 1, \"bbox\": [223.55657958984375, 67.7779312133789, 317.34259033203125, 420.70269775390625], \"score\": 0.9999985694885254, \"association_id\": 1, \"segmentation\": {\"size\": [566, 643], \"counts\": \"c]P46_a0<D2N2N2N1O2N2O0O1N2O1N1O2N2O1N101O1O001O0000O10O1000000010O001O01O01O001O0O101O00000001O001O001O010O10O01000O10O10O1000O0100O10O011N1O1O2M2O1O2M2O100O2O0O101O001O001N100WBeNW;\\\\1fDjNV;V1hDoNU;R1iDROS;P1jDUOS;l0iDZOT;h0gD]OV;i0^DA_;g0nCFP<S3L3N3L3N1N2N3M2N3M4L5K5K5K4L4L4M2L4M3M3L7^Of0ZOd0B=I6J5K6J4L5L1O2N2N2N2N2M3N3M3M2N2N3M3M3M4L5Kf0ZO`0@6J5K1O1O1O1O001O001O00001O0000000000000000000000000001O001O1XMXL`Gi3[8cL]G^3^8jL^GW3^8PM^GS3^8QM_GR3]8QMaGR3[8QMcGQ3Z8QMeGQ3Y8PMfGS3W8oLfGW3U8kLiG[3P8gLnGa3k7bLRHc3i7_LTHf3h7_LRHf3j7^LQHd3n7`LmGc3Q8R3O1O010O10O0100O10O0100O100XK[F\\\\1g9`N^F]1e9^NbF]1`9`NfF\\\\1[9aN_Gf0c8WOZHMh71`HHa75fHE[7:iHCX7<jHBW7<kHBW7>jH_OX7`0iH_OX7a0iH\\\\OZ7c0hHXO]7e0iHTO\\\\7i0UIcNQ7Z1h4M3M2O0O101N2O1N2O2M2O1O0O100O10001N101N2O001N100O100O01O10O01O010O00010O010O0000O2O0000001O001O001N101N2N2O1N2N3L3I8IngX2\"}}, {\"image_id\": 75, \"category_id\": 1, \"bbox\": [437.2041015625, 78.35118103027344, 169.912109375, 166.77484130859375], \"score\": 0.9992073178291321, \"association_id\": 0, \"segmentation\": {\"size\": [566, 643], \"counts\": \"^mh71da03M20N1O2NiS10WlN101O0O2O000h^O1j`0MW_O4i`0JZ_O4Ta010O01O0O3M3M4L3N1O1O01O1O1O1O1O0001O00001N11O000010O01O0000O101O000O100O1O0O2O1O1O1O1N2M201N2O0O0N2N02O20OO01M201O2000O00\\\\Ob0H:O2K5N2M3K4N3O101O000M2N3O11O00O1O1O1O2O00100O0000010O00010O0010O100N2O1O10O0100O1O1N200O100O1O1O2N1O1O100O1O1O2N1O2N2N2M2O2N1O2N1O2L4M3M3N2N3L4K4M3M3N2K=@dic0\"}}, {\"image_id\": 75, \"category_id\": 1, \"bbox\": [81.71314239501953, 358.4747619628906, 20.55614471435547, 19.112213134765625], \"score\": 0.5364384651184082, \"association_id\": 0, \"segmentation\": {\"size\": [566, 643], \"counts\": \"ge]15`a04L2N3M2O001O000000000000O101N3M4L2NWW[9\"}}, {\"image_id\": 76, \"category_id\": 1, \"bbox\": [205.36361694335938, 204.52911376953125, 263.9605407714844, 147.14324951171875], \"score\": 0.9999951124191284, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"a^P4<b>2N2N2N2E<E<J=E4L3RCTNe<U2M2`ChMW<X2jCfMV<[261O1O2N1O1O1O10O001N1O1O2N1001O0100O010O1O001O1O010O010O01000O010O1O10O0100O1O1O1O1O1O100O1O100O1O001O1O1O10O010O01O10O01O100O010000O100O1O100O100O10000000000O1BSNjCm1U<UNjCk1U<WNjCi1W<UNkCi1X<SNkCl1e<O1O3L3N1WOiBGY=6kBGW=5TCBn<:WCCk<:T1HWog3\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [407.4389953613281, 335.3905944824219, 215.92947387695312, 143.60940551757812], \"score\": 0.9990407824516296, \"association_id\": 2, \"segmentation\": {\"size\": [479, 640], \"counts\": \"goQ67g>6I4M2O1O001N2O1O00001O0000000000000O1001O00000000000000000000000000000001O00001O001O1O00010O00001O010O00102M<D3M2bBcNW=c1O1O1O108Gi0XO2M100O0100012M2N1O00O10O101O3M6I4M2M1O1O1O100O001O0001O1O101N1O1O1O001O1O1O2N1O001O1O002N1O1O001O0000000000000000001O000000001O0000001O000000001O0000001O001O00001O00000000001O00001O001O001O001O1O001O2N1O001O001O2N1O2N1O001O1O1O3M7I3M2N2N2N2N2N2N2N2N2N1O1O1N3N3M2N2N1O1O1N2O1O1O2N2L3K6LQ^8\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [83.8902816772461, 294.55908203125, 28.05162811279297, 6.665283203125], \"score\": 0.9970194697380066, \"association_id\": 1, \"segmentation\": {\"size\": [479, 640], \"counts\": \"coX11m>2O00001O00000001N100000O1000000000O10O1000O01XUg7\"}}, {\"image_id\": 76, \"category_id\": 1, \"bbox\": [86.8269271850586, 283.5348205566406, 24.711288452148438, 16.229339599609375], \"score\": 0.994147539138794, \"association_id\": 1, \"segmentation\": {\"size\": [479, 640], \"counts\": \"Z^Y16h>1O2O0O1000000O2O0O10O1O10O10000001O1O1O1N10_Ug7\"}}, {\"image_id\": 76, \"category_id\": 1, \"bbox\": [471.7800598144531, 198.48739624023438, 161.90756225585938, 187.789306640625], \"score\": 0.9514859318733215, \"association_id\": 2, \"segmentation\": {\"size\": [479, 640], \"counts\": \"gRm6b0T>=K3N2N3M3M3N1N2N010000O10O100O3M1O1O1O00010O00001O001O001O1O010SNZN_Ff1^9bN^F^1^9fNaF[1]9hNbFX1]9iNcFW1[9mN`FV1_9oNZFT1d9oNXFT1g9nNUFU1i9mNTFV1k9lNQFW1m9kNQFW1n9kNmEX1R:mNhEV1V:mNfEV1X:nNdES1\\\\:i1O0O1000001N1O1O1O2N1O101N100O2OhKSF^3l9bLWF\\\\3i9dLWF]3g9dLYF\\\\3f9dL[F\\\\3d9eL\\\\F[3d9eL\\\\F\\\\3b9eL^F[3a9fL^F[3a9fL_FZ3`9gL_FZ3_9iL_FX3`9iL`FW3`9T1O2N100O1O1O1O100O100O11O01O000000000O10O1000000000000000000VOiFkKX9o3PGoKP9o3SGPLm8h0fFj1a0\\\\Mi8j0SG\\\\15jMh8P4WGPLi8P4XGoKh8R4WGnKi8R4WGmKj8T4UGlKk8T4XGhKi8X4_G`Ka8`4o0O1000000001O00001O0000000000mKlE[3T:iLhEW3X:i0O1O001O1O1O1O1O2N1O10O001O1O2O0O1O2[KlE[4^:M4M3aMXEa0k:XO\\\\Ed0o:lN[Em0h<VOl`2\"}}, {\"image_id\": 76, \"category_id\": 1, \"bbox\": [13.876736640930176, 298.4383850097656, 48.30670166015625, 38.059356689453125], \"score\": 0.7425930500030518, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"gZ75h>3N1N2O100O1M4M2O1M202N12M2OO1O010O101N100O2N100O100O01O01O0100O1\\\\OPB8R>FPB8R>ERB9o=ESB9[>N3Mhia8\"}}, {\"image_id\": 76, \"category_id\": 1, \"bbox\": [427.9767761230469, 211.36642456054688, 76.44015502929688, 118.47134399414062], \"score\": 0.6005334258079529, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"WZZ61j>8J4N1UCEo:>mDGo:;]DFRO9_<a0mCMR<`10O1000O10O01000001O0O1O10001N100D]M\\\\Dc2b;aM[D`2b;dM]D\\\\2a;gM^DY2a;iM^DW2b;jM]DV2b;lM]DT2c;d001O001O001O000000O1fM`DU1`;kNcDQ1^;nNgDm0Z;TOeDk0\\\\;VO[DP1g;S11mMXDVO4J0X1e;G[DoNl0d0POAi;k0aEAhNAh;m0cE@eNBi;m0dE@a:?aE@_:?bEA^:=eEYO\\\\N2P<h0aEUO`N3o;m0\\\\EPOeN4OLj;R1`EnNgN4NNj;n0lEnN^:P1i1O11O2M10O10102M3N2L4I`0_O`\\\\o1\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [143.5285186767578, 305.91741943359375, 256.88946533203125, 75.41409301757812], \"score\": 0.48996052145957947, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"Sjk31m>2O0O2O1N2N2O1N1O101N10000000000000O1000O11O001O001O1O010O000000^AG^>=0000O101O00000O01O00010O000100O1000O10O10000000O011O0N2O1E`A5YZ2IdTN0bY_4\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [244.92974853515625, 295.7829284667969, 190.864990234375, 51.259857177734375], \"score\": 0.2962338328361511, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"eTg31n>0iZ20VeM101M3K4O2O0O10000O100O101O0O0100O00100O0010000O1000000O2O000O2O0O2O0O101O000MWA080W>ObA063Z>MeA4W>MdAO45X>OhA1W>1dA3Z>:00O1000O0100OO1001000O01000O1O01001O1N2AbA;h>HY\\\\1LkcN1N3N100O3N0O00100O100O10O100O1N2O1O100N2O1O1O10hln00WSQO101N1000000000000001N^QP3\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [162.72756958007812, 328.0289306640625, 181.17254638671875, 52.589202880859375], \"score\": 0.25354984402656555, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"d[g21n>000O100000000000000000000000000O10001O00000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000001O000001N1000UOMjB2V=0jBNV=3jBMU=5i01M4N1O2O0000000O100000O100N2O100O100O100O1O100N20000O101O000000000000^A1T>OhANK6\\\\>LhA7X>IgA8Y>HfA9Z>510O01000000O100O10000O100N2O100O100O10000O10000O2O00001NTY_4\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [178.65199279785156, 318.4645690917969, 188.8472137451172, 45.8365478515625], \"score\": 0.14700016379356384, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"gcQ31n>0h\\\\g02UcXO2O0O1O1O1M300O2O0O10000O100O1000O1O001O1O001O10000000000000000O2O000O2O0O10001O`ANS>1lA2S>KfA075S>KfA066T>NkA2T>OfAL17X>;0100O100O010OO1001000O1O100O100000000O3M:F4LmTa4\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [153.77781677246094, 349.1291198730469, 197.8074493408203, 27.4383544921875], \"score\": 0.0620230995118618, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"iad21n>0O2O000O11O0000000000000000000000000001O0000000000000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000001O000O100001O000O100O10001Mob^5\"}}, {\"image_id\": 76, \"category_id\": 2, \"bbox\": [140.43417358398438, 380.3664855957031, 115.95477294921875, 7.251953125], \"score\": 0.05513828620314598, \"association_id\": 0, \"segmentation\": {\"size\": [479, 640], \"counts\": \"`iR22m>0000001N10000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000001O0000000000000000000000001O1Oe\\\\h5\"}}, {\"image_id\": 77, \"category_id\": 1, \"bbox\": [102.55587768554688, 72.25706481933594, 252.74417114257812, 200.5012664794922], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [343, 500], \"counts\": \"V[b14T:`0J6I6K5N200O100O100000001O001O0O101O00100O1O1O2N1O2N1O1O10O10O101O0000000O010000010O001O000O1000001O1O:E6K2N1iGgMl7c201O1O9Fa0eH]La6R4YIUL64\\\\5j4L1O10O00100O1O1O1O1O001O001O1O1O2N1O0SNYJYM9l0_5g1RKWNn4f1VKYNk4c1ZK[Ng4a1^K]Nc4^1cK_N_4[1kK_NW4Y1[LXNi3g1e2N1O2N101O001O1O1N2O1O0000001O0000000001O01O1O1O1O100O001O00001O1O001O1O1O1O00001O000O101O00001O0O100O1O2M2N2O1O2N001O010O010O10O010O1O001N1M4M2O101O1O001O100O010O10O1000000000000000000000001O001O001O001O1N101N1O2N2O1N2N2N3M4L[Sa1\"}}, {\"image_id\": 77, \"category_id\": 2, \"bbox\": [37.686763763427734, 151.23118591308594, 293.2585144042969, 134.7152862548828], \"score\": 0.9999268054962158, \"association_id\": 1, \"segmentation\": {\"size\": [343, 500], \"counts\": \"\\\\cc01f:2N2M101O00000O10000O10000O1O100O1O1O1O1N2N1O2N2N2O02O001O0010O01O0000001O00101N2N2OO10O0100O001O001O1O1O1O1O1O001O00001O1O001O001O001O00001N101O001O00001N1000001O00010O001O00001O000000001O00000000001O01O01O1O001O1O1O0010O01O10O0100O3N2M1O1O010O1O3M3M2N3N1N1O1O1O000000001O0000000O20O0O2O2N2N>A<E;D6J4L5JR[8HhdGa0N3M1O2O1N10O10O0010O0100000O0100000O100O100O10000O1000000O1000000O1000000O1O100O1O100O1O1O100O010O1O1O0O101O1O001O1O2N1Mh_S2\"}}, {\"image_id\": 77, \"category_id\": 1, \"bbox\": [68.42778778076172, 86.83348846435547, 82.23790740966797, 45.520362854003906], \"score\": 0.9945936799049377, \"association_id\": 0, \"segmentation\": {\"size\": [343, 500], \"counts\": \"Ulf09[:6M1M3N101M2O2O0O2N1O1O2O000O10000O10000O1000O10O10O10000000001O0O10001O0O1000001O2N002N2N0O4M3M003M1O002N1O1O001O1O001O010O000OQ[12PeNLhih3\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [0.0, 321.38232421875, 175.86129760742188, 35.776611328125], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [453, 604], \"counts\": \"Y:e0_=2O1O000O2O00000000000O10000000000000O1000000000000000000000000000000001O00000000001O0000000O10000000O100000000000000O0100000000000000000000000001O0000000O1000000000O100000000O01000O1000000O1000000000O10O100000000000000O1000000000001O0000000000001O000000001O001O001O00010O007I1O1O1OO010N200O1O1O1O0010O01000O101N100O10000O2O0O10000O1000001O000O101N1OaPm5\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [165.84542846679688, 292.5390930175781, 153.35879516601562, 22.476715087890625], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [453, 604], \"counts\": \"Sn[22S>001N101O006J0O10001O000O1000000O100000000O1000O100000000000000000000000O2O00000000001O00000O10000000000000000000000000000000000000000000000000000000000000000000000001O01O00000000000001O000000000000000O100000001O0000001O0O10000000000O1000000000000000001O0000001N100000001O001O0000001O0O10001O1NRim3\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [0.24938666820526123, 294.3852844238281, 72.07349395751953, 16.249267578125], \"score\": 0.9999996423721313, \"association_id\": 4, \"segmentation\": {\"size\": [453, 604], \"counts\": \"]g0325e=601O0000000000000O10000000000000000000000000000000000000000001O001O6I1000000O01O001M2010O10000O1000000O2O000O10000000000O100000001O0000001NkQ[7\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [418.019775390625, 92.56617736816406, 164.935546875, 229.7785186767578], \"score\": 0.9999996423721313, \"association_id\": 5, \"segmentation\": {\"size\": [453, 604], \"counts\": \"emi53R>7I001O1O1O2OO3L4Lb>4ZAc0^O01M4M5]OdB4k=Kkj01SUO6J5L2M4M2N2O2M5L4K;F7H6K4K2O1O1O1N2O2N2N1O0O2O000O1000001N100000000O101O00000O2O001O1O1N101O1O2M2O3jG`M`4b2ZKeMd4]2XKfMg4\\\\2\\\\JkLiNo0k6h2_IXNa6o1hHaNX7U4010N2L4M2I8C=N2BkGjJX8S5>O2M2O1N3N1O1O2N1O1O1N2N2N2M4M2O1M3L4J6N2N2O1O1N2O1O1eMcEn0]:oNgEo0Z:nNiER1W:nNjEP1W:oNjEQ1V:oNjEQ1W:nNjEQ1V:oNjEP1V:POkEP1U:POlEo0T:QOlEo0T:QOlEo0T:QOmEn0T:QOmEn0S:ROnEm0R:SOoEk0R:UOoEj0R:UOoEj0Q:VOPFi0P:XOoEh0R:WOnEi0R:WOnEi0R:WOnEi0R:WOnEh0S:XOmEh0T:WOlEi0T:WOlEi0U:VOkEj0U:VOkEj0U:VOkEk0U:TOkEl0U:TOkEl0U:TOkEm0T:TOkEn0S:ROmEo0R:ROmEP1R:oNnES1P:nNoER1Q:nNoENF0Z:3PFJLOT:7PFI1Nm9:QFG7Ne9;TFF:Na9<VFD=^Ol9m0hEC\\\\;=eD@];`0dD^O];b0eD[O];c0fDZO\\\\;c0iDYOZ;`0QW9\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [37.92069625854492, 356.17596435546875, 506.1803283691406, 96.82403564453125], \"score\": 0.9999996423721313, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"UTQ154M09S=h0J4L2O1N2O001O0O2O1O1O0O2O001N101N2O0O2O001N10001O00000O101O00000O1000000000000O101O0000000000000O100000000O100000000O10000000000000000O100000000O100000000O100000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000O10000000000O100000000O100000000O100O10000O1000000O1000000000000O1000000O100O100000000O10000000000000000O1000000000000000000000000000000000000000000000000000000000000001O000000000000001O0000000000000000001O0000000000001O00000000001O0000000000000000000000000000000000000000000000001O000000000000000000000000000000000000000000001O000000001O0000000000000000000000000000O1000000000000000000000000000000001O0000000000000000001O00001O001O1O1O1O3M1O1O001O00001O0000000000001O00000000000000000000000000001O0O1000001O00001O0O1000001N100000000O2O00000O2N1M3O2N1O100O10001O000O1000001N10000N3M3N100O2O00001N1000001O0O101N1O2N1O2M4M2O0O2O0O2O000O2O000O2N2O1Nf]S1\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [187.30628967285156, 148.92787170410156, 241.54515075683594, 151.85902404785156], \"score\": 0.9999988079071045, \"association_id\": 1, \"segmentation\": {\"size\": [453, 604], \"counts\": \"ZVh22R>3M3N1N2O2N2M2O001O1N1O2O1N2O1O1N2O1O000O01000O100O1O100O1O100O100O100000000O1000000000000O100000000000000000001O001O001O00001O001O001O001O001O1O001O001O001O1O001O001O001O000000001O0000001O00000000001O0000000000000000000001O000OiDZNQ:f1oE[NSOOc:e1[F\\\\NQO2b:b1\\\\F^NPO3b:^1_F`NmN4c:\\\\1`FnN_9R1aFoNQO\\\\OU:d1kFPOoN^OU:b1lFQOmN^OW:`1mFROmN]OV:a1nFQOmN\\\\OV:b1UGjNT9c0cEXO\\\\14Q9b0gEVO[17n8b0dG]O\\\\8b0fG]O[8a0gG^OY8a0lGYOV8g0e2N1O100O2O0O1O100000000O2O000000000000010O000001O0000010O00001O010O002N1O6J1O2N1O1O1O00001O00001O0001O00001O00001N101O001O1O1O1O1O1O1O1N2N2N3JShh2\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [29.06230354309082, 87.72991943359375, 211.51304626464844, 212.0584716796875], \"score\": 0.9999923706054688, \"association_id\": 2, \"segmentation\": {\"size\": [453, 604], \"counts\": \"[n<2P>d0]O7J3M2N101N101N10001O001O1N101O001O001O001O001O001O001O001O0O10001O001O0O2O001N2O1N2N2N2N3N1O6I4M1O00001O001O000000000000000000O10000O100O1000O100000001O0000O0ZOg0N2O1O001O001O01O010O0100O11N2O001O1O001N101N101N2N3UDUNW;_2N1O1N2O1O9G1O1O2NkNWEnNi:i0cERO`:h0hEiNd:U1W1O2N1O2N101O00001O0000010O01O001O100O002N100O1O1O101N101O0O2O1N100O1O000001O000O100001O1O1O2N1OiNmC=R<BQD<o;DQD=o;BPD?P<BnC?R<k0O0000010O000100O0100O10O00001O000000001O001^OlCQOT<m0PDQOQ<m0RDQOo;m0SDROn;m0RDSOo;k0SDSOn;l0g0M2O1O2N100O2N2M4JVmP5\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [72.45886993408203, 276.52886962890625, 120.36341094970703, 14.973419189453125], \"score\": 0.9999369382858276, \"association_id\": 6, \"segmentation\": {\"size\": [453, 604], \"counts\": \"YeS11T>1N1000001N2O0000001O00000O2O0000000000000000000O10001O000000000000000000000000000000001O0000000001O00000000001O1N6K1NYl00eSO1PB0n=5M3O0O10001O00000000000000O10000000000O10001O000O101O001O000000000000O10O10000O10001O0O10000O2Ocoe5\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [430.12310791015625, 292.0935974121094, 106.02001953125, 21.32720947265625], \"score\": 0.9999237060546875, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"j_R635Ng=3WB0f=2XBOh=8O00001O001O001O00000000000000000000000O100000000000000000000000000000000000000000O1000001O000000001I7N2O1OSBNg=1ZBOe=:N1O2O0000O101O00001O000000000000001O0000000000000O101O000O10000O2O000O2O0O]Wn0\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [370.0599060058594, 233.9275665283203, 42.12982177734375, 47.69990539550781], \"score\": 0.9998553991317749, \"association_id\": 3, \"segmentation\": {\"size\": [453, 604], \"counts\": \"djT59d=I_B:]=9K5J6M3M3M111O10000000000000O10000000010O01N101O0O100O100O1000N2O1O2Oi0UO5K6Iged2\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [300.541259765625, 321.44451904296875, 217.27642822265625, 40.734832763671875], \"score\": 0.9828428626060486, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"hPY43R>1N101O0O100O2O000O101O000O1000000O1000000O2O0O10000O10000O2O000O10001O00000O101O00000000000O1000000000000000000000000000000000000000001O0000001O0000000000001O0000000000000001O000000000000001O0000000O2O001O0000000O10001O0O1O101K^BDSo37d^L6K4N1O100O100O1O010O10000000000001O0000000O1000001O0000000000000000001O000000000000001O000000000001O0001O000000000000000O10001O00000O100O100O100O10001O0O1000000O10001O0NUgU1\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [388.0151062011719, 293.8659973144531, 51.406585693359375, 14.536285400390625], \"score\": 0.97208571434021, \"association_id\": 5, \"segmentation\": {\"size\": [453, 604], \"counts\": \"S\\\\\\\\54P>2O001N10000O10001O00000O10000000000000000000000000001O000001N101NRi_2\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [376.65850830078125, 279.314453125, 24.171234130859375, 7.46759033203125], \"score\": 0.9718494415283203, \"association_id\": 3, \"segmentation\": {\"size\": [453, 604], \"counts\": \"j_W55P>000O2O0000000000000000000001O001O1O0O2OV]j2\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [30.285646438598633, 222.2249298095703, 25.769575119018555, 75.89927673339844], \"score\": 0.9558180570602417, \"association_id\": 4, \"segmentation\": {\"size\": [453, 604], \"counts\": \"Rh?f0Q=?L300009G4]OiB0Y=JQC0o<Od00VONbC0]<4j0N2NQYe7\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [14.2272367477417, 147.9637451171875, 65.9687271118164, 169.60537719726562], \"score\": 0.8716526627540588, \"association_id\": 6, \"segmentation\": {\"size\": [453, 604], \"counts\": \"Qi6b0b=0100O10O010000000000N2O100O10001O02O0nBEY<<WC7e<g0QFlNX7b0eHC\\\\79eHI\\\\70jH0h:L5L4IYc=3h\\\\BO32G6J68CVdW7\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [437.0533142089844, 276.9887390136719, 98.79367065429688, 14.624542236328125], \"score\": 0.7855353355407715, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"d[S62R>10001O0000001O0000000000001O0000000000001O0000001O000000000000000000000000000000000000000001N10000O2NTS30mlL1O1O00001O0000000000000000001N1001O0O1000000000000000000O1000001N100Olim0\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [265.58795166015625, 276.4828796386719, 82.52987670898438, 11.03643798828125], \"score\": 0.7517799735069275, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"Zne34P>1000001O000O10000000000000O1O2Nnd20R[M3N1O1O000010O000001O0O10000000000001O00000000000000000000000000000000000000000000000000001O001O1O001O0O2O1Nigb3\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [158.41795349121094, 188.91566467285156, 62.82637023925781, 19.02978515625], \"score\": 0.6254695653915405, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"^X`23P>3L4O0O1000001O0000000010O00000010O0000001O00000000000000010O0002N1O001N101O1OcWY5\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [418.2832946777344, 152.4264373779297, 24.726654052734375, 262.44525146484375], \"score\": 0.5997180938720703, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"]Uj5P1R<W1SOj0TOk0H8D;XNcLXHn3_7]1M6E4Ac0OL8jNW1^Oc0YK`G_3i8YLPHf2V:^MlDQ1WQZ2\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [340.6418151855469, 39.113243103027344, 238.68020629882812, 317.36370849609375], \"score\": 0.445709228515625, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"i]^54P>5K3M101O0O101N10000O10000O2O0O10000O1O101N1O1O1O100O2O0O2N2N2N2N2N2N2N3M5K5L4K3M3N1N2O1O1N3N2N1N2O1O0O2O010O00001O0001O0001O00001O00001O00001O0000001O0000001N2O1N1O2N2N2N2O0O2O2N2N2N3L5L2N2N1O2M3SFTL]9\\\\4N1O2N2N4L2NO10000000000O10000O100O2O0O100O1O2N1O1O2N1O2N1N3L4M2N2O2M200O101N100O1O1O2N1O1O1O1O1O1O1O1O1O1O1O1QNTEd0m:[OUEd0k:]OSEc0n:]OQEd0P;X10O10000O100O1O1O2M2M3N2J6H8H8@`0N3N1N2O1N2O2M2O2L6J;@[n:\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [64.67779541015625, 195.8501434326172, 39.802825927734375, 79.49794006347656], \"score\": 0.4301452040672302, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"fgn05n=5K3M4NO03N4K4PCEo;d0eCAY<W1B=OC\\\\DZNe;^1fD^NZ;a1f0O100001O3M3M5K2N_NlCS1R<iN[DP1e;lN_D7E8e<EaC8^<EiC8W<EPD5R=N1O100N3MTjm6\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [23.272090911865234, 183.9818115234375, 38.8756103515625, 118.55026245117188], \"score\": 0.17098398506641388, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"^h?2l=c0@5M2O11O;E6Iclg7\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [95.93607330322266, 118.78131103515625, 384.6614685058594, 199.5977783203125], \"score\": 0.1603643298149109, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"V[h17l=5L3M2O1N2O1N101O001N2O001O1O1O1O00000000O2O001N10001N101N1O1O2N1O2O0O2N1O2N5J\\\\P;1boD4O1O001N2O1O1O1O2N2[C^Ob;e0ZD^Oe;c0XD@g;b0UD@k;a0SDAl;a0PDAP<b0lC_OT<d0hC]OX<e0eC\\\\O[<W101O00O1000000000000000001O0000000O2O001N3N1O1N3N1N2O1N101N2O010O001O1O2N1O001O1O0010O00000001O00000010O2N5K1O2N100O1O10O010O01O010O1O100O1O10N100O100O10000O10000OhDjNc9V1]FjNc9V1\\\\FlNc9T1]FlNd9R1^FlNd9S1g1O2O0O1O2O0O101O000O10000O101N100O1O100N2O100O100000001O0001O01O0000000010O00000O10000000000O100000000O1001O000001O0000001O0010O0001O0000001O000000000000001O01O001O001O1O00100O10O10O0HaBJ`=5aBJ_=5bBL]=4cBL^=3bBM^=3bBM^=3bBM^=3bBM^=3bBM^=3bBM_=2aBN_=2aBN_=2aBN_=1bBO^=1aB0]<JaD5RO2\\\\<JaD3UO1[<LaDOXO4W<MmD1T;OnDOR;1`12O0000000000000O2OO10001O00000O100000000O101OO10000O2iBMZ<3fC0W<0hC2W<OhC3U<NkC2U<NjC4U<MjC3V<MjC3V<MiC5V<LiC4W<L]CI2;a<LiC4W<LiC4W<KjC4X<KiC4W<JkC6R=01O1OlNI\\\\D5c;M^D1b;O_D0`;1aDN^;3bDL`;3[1MVfn1\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [255.19668579101562, 277.40185546875, 52.609283447265625, 10.276092529296875], \"score\": 0.13804824650287628, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"bgc31S>2O00001O000O100000001O0O10000000000000N3Nod2OR[M3N2N0000001O04LO101M200O1000000001OaeR4\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [421.64178466796875, 69.85974884033203, 49.76593017578125, 327.50439453125], \"score\": 0.1155070886015892, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"\\\\fk54S>MRIk0f3XOeE1m0P1X9=eE1o8fNoFS4n7e1MXNdHhL]7l2jIWL]6U3f2WOh0nNW1jN]N1fi5<[XJ7I5I6L4M3N00L3N4O0@`0K6I8H61N3M21Lhl0L]ji1\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [298.75164794921875, 310.0595703125, 225.4991455078125, 84.41091918945312], \"score\": 0.08549186587333679, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"bRU4;i=3N1N2O001O1O00000O2O000lN]OfDc0Z;_OdDa0[;AdD?[;BeD?Z;BeD>Z;CfD=Z;DfD;Z;EfD;Y;FgD:Y;FgD:X;GiD8W;HiD9U;HkD8U;HkD8U;HkD8U;HkD8U;HkD8U;HkD8T;HmD8S;HmD8S;HnD7R;InD7R;InD7R;InD7R;InD7R;InD7R;InD7Q;JoD6Q;JoD6Q;JoD6Q;JoD6Q;KnD5R;KnD5Q;LoD4Q;LoD4Q;MnD3R;MnD3R;MnD3R;MnD3R;MnD3R;MnD3R;MnD3R;MnD3R;MoD2Q;OnD1R;OnD1R;OnD1Q;0oD0Q;0oD0Q;0oD0Q;0PEOP;1PEOP;1PEOP;1PEOP;1PEOP;1PEOP;1PEOP;1PEOP;1PEOP;2oDNQ;2oDNQ;2oDNQ;2oDNQ;2oDNQ;2oDNQ;2oDNQ;3nDMR;3nDMR;3nDMR;3nDMR;3nDMR;3nDMR;3nDMR;3nDMR;3nDMR;3nDMR;3oDLQ;4oDLQ;4oDLQ;4oDLQ;4oDLQ;3PEMP;2QENo:1REOo:ORE1n:NSE2m:NSE2m:NSE2m:MTE3l:MTE3l:MTE3l:MTE3l:MTE3l:MTE3l:LUE4k:LUE4k:LUE4k:KVE5j:IXE7h:HYE8g:HYE8g:BPD2Y1<h:@PD3Y1=l:@UE`0Y<N7GUBL^R7Oc[I2O2WNN]E2_:3`EM_:5_ELa:5_EJa:6_EJ`:7aEH_:9aEF_::aEF_::aEF_::bEE^:;bED_:<f1O000000000000001O00001O0000000001O0001O0000000001O000000000000001O0O10001O000000000000001N100001N1000000O101O000O100O2O0O1O101NkmW1\"}}, {\"image_id\": 78, \"category_id\": 1, \"bbox\": [236.9393310546875, 10.540247917175293, 367.0606689453125, 69.05278778076172], \"score\": 0.06366244703531265, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"bYY35o=2O1O1O00001O0000001O00001O000000001O000000001O0000001O00001O00000000001O0000001O0000001O000000001O00001O0000001O000000001O001O1O2N1O1O00001O00001O001O002N2N2N001O001O00001O001O001O3M2N0000001O000000001O00001O1O1O1O001O00000000O1000000000000O1000000O10000O100O100O1O1O100000000O10000O100O100O10000O10000000000O100000000000000O1NTCkNl<U12RORCd0o<\\\\OSCb0m<]OTCc0l<]OTCMMa0o<AVCMKb0o<AVCMKb0o<AVCMKa0P=BUCMKa0P=BUCMK`0Q=CTCMK`0Q=CTCMK`0Q=CTCMK`0Q=CTCMK?R=ERCLL?R=FQC>o<BQC:M@S=7oB9T=GkB:U=FkB9V=HiB8W=HiB8W=HiB8W=;0000000000000000000001O00000000O100000000000000O10000000000000000000000000000000000O100000000000000000000000000000000001O000000001O0000001O001O001O001O00001O0000001O00001O001O1O0000001O00000000001O001O00001O000000001O001O001O001O00001O00000000000000O10000000000000000O100O100O1O1O1O10000O1000000O100O1O1O1O10000O100000000O10000000000000000O100000000O2N3LD\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [267.92584228515625, 369.4463806152344, 259.674560546875, 83.55361938476562], \"score\": 0.06274376064538956, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"amf3]2f;4M10001O0000000000000000000000000000000000000000000001O000000000000001O0000000000000000000000000000000000000000000000000000000000001O00000000000000000000000000001O00000000000000000000001O00000000000000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000001O1O1Od0\\\\O1O1O1O00O100O10000O1000000O1O1O1L4M3O100O100001O001O001O0000001O0O101O001O0O2O00001N10001O0O10000O10000O10000O100O10001N10000O1O2N1O1O2O000O101N1O1O2N1O101N10001N1O2N1O2N100O101O0O2O1N101N10001N101O0O2N3MWhT1\"}}, {\"image_id\": 78, \"category_id\": 2, \"bbox\": [387.23382568359375, 325.6577453613281, 131.2679443359375, 31.83709716796875], \"score\": 0.05043499916791916, \"association_id\": 0, \"segmentation\": {\"size\": [453, 604], \"counts\": \"i`[511:c=:L001OO10000001O001O000000O100O100IdBE\\\\=:eBF[=9fBE\\\\=2_B55I\\\\=2_B5h=KXB5h=32FVB7j=IVB6k=JWB4i=LXB4g=LZB1LOQ>0oA0c`74RmHO]B7`=9N0000O1000000001O0000000000000000O10000000000000000000000000000000000000001O000000001O000000000001O000001O00010O00000O1000000O100O100O100O100O100O10000O1000000O2OSgU1\"}}, {\"image_id\": 79, \"category_id\": 1, \"bbox\": [137.75784301757812, 92.87965393066406, 71.1510009765625, 245.55165100097656], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [356, 297], \"counts\": \"\\\\ja1h0Z:5Kb0\\\\O=F7Jj1UN7Je0[O?A7I[1eN2N4K4M1O00001O2N2N1O1O001N1000000001O0000000000000000000O1000000O101N100O1N2O1O1O1O1K5M5I6`N\\\\LnJW4h2_KfNg5Fh3_Oc0BSWP1\"}}, {\"image_id\": 79, \"category_id\": 2, \"bbox\": [193.6814422607422, 263.6546325683594, 92.83775329589844, 71.08255004882812], \"score\": 0.999992847442627, \"association_id\": 1, \"segmentation\": {\"size\": [356, 297], \"counts\": \"aST22l:6B>O1N2I7N2O1000000000O010O1000O010O10O100O1000O1O1000O100O01000O10O01O0100O10O01000O01000O1000O010O010000O100000O100O10001N100000O10O1O01O10O01O010O1O1O100O1O1O1J6O1N2O2N1O1O2N1Oka3\"}}, {\"image_id\": 80, \"category_id\": 1, \"bbox\": [0.0, 116.61481475830078, 286.114013671875, 172.24945068359375], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [391, 640], \"counts\": \"`Z1356_;`0I4L2M3L4L3N3N2N2N2N2N101N2O0O2O1N2O001N2O1N2O1N101N2O1N101O1O1N3N1N3N2M3M2N3N2M3N2M2O0O2O0O2M2O2O0O2O1N101N1O2L3M4N2N1O2O0O2O0O2N2N2O1N2N3N1O1N2O1O1N2O1O2M2N2O1O1N100000001O0O1000000000000000000000000000000000000000000000000001N10000O1O100O1O100O1O101N1M3L4N2N2O2M2O1O2M2O1M4M2N2O1O2O0O1O101N2N1N5L6jLVGd2Y9M2N3M102N1N3N1N3N0O2N2N1N3M2N20001N101N10kFVNn7h1PH\\\\No7c1oGbNo7]1lGkNS8T1kGoNT8P1kGSOT8g0oG\\\\OQ8d0lG@T8>jGFU8:iGIV86iGMV82kGOU80jG2U8MkG4U8KlG6S8ImG8S8GmG:S8DnG=R8@PHb0o7^OQHb0o7]OQHd0o7\\\\OQHd0o7\\\\OoGg0P8YOoGh0Q8WOPHi0P8WOPHi0P8WOPHh0R8WOnGh0S8XOmGg0T8YOlG?YOiNm8g0jG2k8NUGLT91lFMY90gFEe9:[FEf9;ZFDh9;XFEh9;XFEh9;XFDi9=VFCk9<VFCj9=WFBi9>\\\\F]Od9c0Q1OPE^O[:b0f0O0D]D4d;GaD7i;O1N2N2NnSm4\"}}, {\"image_id\": 80, \"category_id\": 1, \"bbox\": [140.80386352539062, 131.5014190673828, 477.3214416503906, 201.8990020751953], \"score\": 0.9999983310699463, \"association_id\": 2, \"segmentation\": {\"size\": [391, 640], \"counts\": \"]jf11Q<6L4M3M2O2N1O2O0O100O2O000fD\\\\OP;f0lD]OT;l0O1N101O001O1N101O6J3L2O1O1O2N4K6K2N3M2M2O1O1O1N101O1N101N2O1O1N2O2M2O1N2N2O1N2N1O2O001O1O001O1O1O1O1O1O2N1O1O2N1O1O2N1O1O2N2N2N2N2M101O00001O0000001O00000000000001O0000000000000000001O0000O100000000000000000000000000000000O1000000O1000000O1000000O100000000O1000000O1000000O10000O10000O100O100O1O1O1O1O1N2N2N2N2M3M3N2O100O100O100O1O1O1O1O100O1O100O10000O10000O010O10O010O0I8K5L3N3N2O00100O10000O010O100O1O1O100O100O010O1000000O1000000000O100000000000000000O10000000000000000O1000000000000O100000000O10000000000O2O00000000000O1000000000000000000000000000O1000000000000001O00000000000000000000000000000O10000000000000001N100000000000000O100000000000000000000000000000000001O0000001O00000000001O00000000001N101O001O1O4K6K3M2N1N2O001O0O20O0001O010O1O001O010O001O000010O0000000000000000000000000000001O000000001N100O2O0O2N2N1O2O1N1O1O2O0O100O2O0O1O2N1N3N3J9dMTIhNZ7k0X2eNaE1J9m:AlE1^Pd0\"}}, {\"image_id\": 80, \"category_id\": 2, \"bbox\": [116.7235107421875, 289.0360107421875, 458.030517578125, 101.3074951171875], \"score\": 0.999981164932251, \"association_id\": 2, \"segmentation\": {\"size\": [391, 640], \"counts\": \"_P]1l0Y;5L3M2N2O1O1N10000O2O0O1O1O1O2O0O1O1O10001N1000000O2O000O100O101N100O100O1000000O1000000O1000000O10000O1000000O100000000000000000000O10000000000000000000000O100000000000000O1000000001O00001O00001O001O001O001O0000001O00001O2N3M=C3M1O1O001N101O00001O001O0O2O005K5K1N2O00001O0000000O2O00000O101O0O101N10000O2O00000O10000O010O100O100O10O1000O1000000000O1000O10000000O1000O100000O01000O100O10O01O1O1O1O1O0100000O100000O010000O0N3K5N2O1O100O010O100O100O010O00OM3]Oc02O01O01000O1000O10O1000O01000O010000O10O10O1000000000O100000O01000000O100000000000O10O1000O10000O10000O1000000O10O1000O100O01000O100O0100000O100000000O10O100000O10O1000O01000000000000O1000O100000000000O10000000000O10000000000O1000000000000000000O10000000000O10000O100000000O1000001O000000000O101O0000001N1000001O0O101O001O2N4L3M1O3M1N2O0000001O0000001N101O1N3N001O0O10001O00000O10000000000O10000O1000001N10000000000O101O00non0\"}}, {\"image_id\": 80, \"category_id\": 2, \"bbox\": [1.2145333290100098, 261.21282958984375, 178.5194854736328, 39.36865234375], \"score\": 0.9999790191650391, \"association_id\": 1, \"segmentation\": {\"size\": [391, 640], \"counts\": \"kd0d0b;2N2O0000000000000000000000000000000000000000000000000000000O1000000000000O10000000000O100000000000O0100000000O1000O10O1001O001O00001O001O3M3M0000001N3N2N1O00001O001O00000O10001O000000000000000000000000000000000000001O000O10001O001O1N1000i`10\\\\kN1hC2L4N2O100O1N2M3O100000O01000000000000O1000O1000000000O10000000000000000O10000000000000000000000000O2O3KX\\\\_5\"}}, {\"image_id\": 80, \"category_id\": 2, \"bbox\": [3.8398637771606445, 254.53810119628906, 48.12286376953125, 10.307601928710938], \"score\": 0.543256402015686, \"association_id\": 0, \"segmentation\": {\"size\": [391, 640], \"counts\": \"mh16P<2O00O10000000O1000000000000000000000000000000000000000000001O001O1O1N2O1OeRT7\"}}, {\"image_id\": 80, \"category_id\": 2, \"bbox\": [279.6725769042969, 280.2405090332031, 283.4997863769531, 93.783203125], \"score\": 0.06392386555671692, \"association_id\": 0, \"segmentation\": {\"size\": [391, 640], \"counts\": \"n`b31U<10001O0O10000O1O100O1O1O100O1000000O100O1O100O100000O0100N2O1L30100O01000O1O1N1H9I7L4N1O2O1N10000OO10020O10O10O1000000O0100000O1000O10000000000000000000O1000O10O100O1000000000O100000O10O10000O10000O1000O0100O01000O10000O100O010O10000O1000000O10000O1000O0100O10O01000O10O100000000000O1000O10000000O10000O100000000000000O100000000000000000000O10000O10000O1000000000000000000000001N10000O2O000O101O000000001O000000001O0O2O1O1O4K7J2M2O1O001O1O1O1O1O1O1O1N101O0000001O0O101O0O2O00000O1000000000000000000O101O0O100O100O2Olon0\"}}, {\"image_id\": 81, \"category_id\": 2, \"bbox\": [0.365234375, 339.6561584472656, 290.1236877441406, 114.432373046875], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [533, 400], \"counts\": \"gl0:Z`02O1N2N2O1N101O1N2O001O0O2O001O001N101O1O1N2O1O00001N10001O1O001O0O101O00000O100O2O000O2O00000O0100O0100O010O1000O100O1^Oo@^OS?`0b0O1000O0100000000000000000000000000000000000000000000000O101O0O101N101O000O101N100@VOTAl0i>XOSAj0m>`0N1O2O000000001N1000000000010O01O100O10O00010O000001O001O001O001O00010O00000100O00010O00001O00001O010O001O001O000O2O001O2N1O1O0O2O00001O000O2O1O001O000000000000000000[OjAkNV>T1oAhNQ>W1VBbNk=^1f000O10000OeAcNa=\\\\1^BiN^=V1bBmN\\\\=S1dBoNZ=Q1fBPOZ=n0gBROY=n0jBnNX=P1kBnNV=Q1X1N7Hom:UOhRE9L6J2O0O100N2O2N1O1O10000000000001O00001O00000000001O000001O0000001O00010O001O100O10O01O0001O01O0001O01O00010O01N2N3L6Iglh1\"}}, {\"image_id\": 81, \"category_id\": 1, \"bbox\": [63.19682312011719, 112.58100891113281, 303.1639404296875, 332.06781005859375], \"score\": 0.9999986886978149, \"association_id\": 1, \"segmentation\": {\"size\": [533, 400], \"counts\": \"WXW1?R`0>B6K5M3M2N3M3M7I:F:F6I6K4L5K4M3M3L3N3M3M4K4M3M2N2N1N101O001O000O2O00000000O100O1O1N2O1N2O1O1O1O1N2N2M3M3M3K5L4M3N2O1N2O1O1N2N2N2N2N2N2O1O1O10000O100O10000O100001O1O1O1O1O1O1O1O001O1O2N1O3M2N3M3M2N2N1O2N1O1O2N2N2N2N2N2N2N1O1O1O1O1O1O2N1O2N2N2N1O1O1O1O1O2N3^F]Kk6i4lH`Ko6d4iHeKR7_4fHgKX7]4bHfK]7]4^HeKb7]4[HeKc7_4YHbKf7b4UH`Kj7e4PH^Kn7e6N2O1N2O2O0O100O1O010O001N[NWHaJh7]5cH[J]7b5jHZJV7e5oHVJR7j5QIQJQ7n5SInIn6Q6VIjIl6T6j1N2[OkEfJW:X5`0O3O31N3N2O1N3M2L5L4M201O001O01000000000O10001O000000000000000000001O0000000001O000nKjEe1W:iM^FS2b9hMfFU2[9eMnFW2R9fMRGY2o8dMTG[2l8dMVG[2k8cMWG\\\\2k8`MWG`2l8\\\\MVGc2m8XMUGg2P9PMUGP3X;0001N10000O1001O1oDlL_8U3^GnLa8S3[GQMd8P3YGSMf8o2UGUMj8l2TGVMk8l2RGVMm8l2PGVMo8l2nFVMQ9j2oFVMQ9i2PGWMP9i2PGVMQ9i2QGVMo8h2SGWMn8h2SGXMm8f2VGXMk8g2WGXMi8g2YGWMh8g2\\\\GWMd8h2^GVMc8i2`GTMa8l2bGoL`8P3iElLS<R3oCnLQ<Q3PDoLP<Q3PDPMo;o2RDRMm;m2SDUMl;j2UDZMg;e2YD`Mc;_2^DbMa;^2^DcMb;]2\\\\DeMd;[2[DfMe;[2WDhMi;X2UDkMj;U2UDlMk;T2TDmMl;S2TDmMl;T2RDnMm;R2SDnMm;R2RDoMn;Q2RDoMn;Q2RDoMo;Q2PDoMP<Q2PDnMQ<R2nCoMR<R2mCnMT<Q2lC`MIM[<c2kC_MNLW<f2iC^M2KV<f2gC_M5IU<i2eC]M9HS<j2cC^M=FQ<k2bC_M?CQ<m2_C`MV=_2jBaMW=^2hBbMZ=l21N101N101A?K5I7I8Dc_a0\"}}, {\"image_id\": 81, \"category_id\": 2, \"bbox\": [204.1455078125, 348.2021789550781, 123.74114990234375, 57.794677734375], \"score\": 0.24720646440982819, \"association_id\": 0, \"segmentation\": {\"size\": [533, 400], \"counts\": \"hbh3f0n?2O1N1O10000O100000000000000000001O0001O01O000000000001O01O01O00010O001O01O010O010O001O10O0001O10O01O010O1O0010O0100O1OO101]O\\\\@2e?N[@2e?N\\\\@1e?N[@Oh?1Z@Lg?4[@Ej?;9O000100ON2N3KcQ`1\"}}, {\"image_id\": 81, \"category_id\": 1, \"bbox\": [208.23336791992188, 167.53427124023438, 169.292724609375, 244.0931396484375], \"score\": 0.08209706097841263, \"association_id\": 0, \"segmentation\": {\"size\": [533, 400], \"counts\": \"Sd]338]1`>j0C1O100O2N1O1kKcMPJ^2l5gMRJZ2i5kMWJT2g5oMbJh1\\\\5ZNdJe1[5\\\\NfJc1Z5]NgJb1X5_NjJEgK]1_9POXKl0h4UO[K]OXL2\\\\8b0[61N2N2M3N2WAQOj=P1UBSOj=m0UBUOj=k0UBWOi=j0VBWOj=h0VBYOj=g0TB\\\\Ok=d0TB]Ol=c0SB_Ol=`0SBBm=>RBDm=:TBHk=5UBOj=0TB4k=KSB9l=HPB<o=DoA?P>AoAb0o=_OoAc0P>]OoAg0n=ZOPBj0b1^Nb:d2WE`Mi:d2QE_Mn:d2nD^MQ;f2iD\\\\MW;g2dD[M\\\\;g2_D]M`;e2_DZMa;j2]DTMc;S3VDmLj;T3VDkLj;V3UDjLk;V3VDjLi;W3VDiLj;W3VDiLj;X3UDhLk;X3UDhLk;X3UDhLk;X3UDhLk;Y3SDhLm;Y3QDhLo;o30000001O1N2fKXDe3d<H2N2M2O2M2O2M2N2N1000000001O0000000TERMQ8n2lGXMQ8i2jG]MT8c2jG`MU8`2jGaMV8a2fGaMZ8`2bGcM^8^2^GdMc8^2YGdMf8^2XGbMi8`2SGbMm8`2oFaMQ9c2hFaMW9T3bE^M^:]4O1O0010O001O1N2O1N2L4[Oe0H8N1O2N2N2cNaDbMa;Z2jD]MX;`2nD\\\\MS;d2nDZMS;f2nDXMS;i2mDVMT;i2mDVMS;k2SEmLn:T3Y1002`CRMU;P3dDWMZ;T4O1N2N11O0100O1N2O1N1TEUKl9m4lE]KQ:f5M1O0100O2N100O1O1O0O2N1oMUFWMm9a2`FWMc9e2cFVM`9g2dFVM_9f2eFVM^9f2Z2L5K5L3N3N3Ja0ROZW;\"}}, {\"image_id\": 82, \"category_id\": 2, \"bbox\": [6.356079578399658, 276.2588195800781, 138.1328582763672, 53.270599365234375], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [333, 500], \"counts\": \"mW2g0c96L100O100O10000O10000O101O0O10000O10000O10000O1O1000000O1000000001O7I2N1O00O1O100O1O1J6K5N2O11O0000JcN]G\\\\1c8dN]G\\\\1c8dN\\\\G]1d8dN[G\\\\1e8dN[G\\\\1e8dNZG]1f8500000000000000000000000bNYGV1h8eN]GY1d8fN]GZ1k801O00000000000000000000O10000000000000000000000000O10O1000O10001M2O10000O1O100O2O0O2M3O0O2O4K2M3N2OO01N1N210O00N1N210O20O1O1N3N3N2Lf_c3\"}}, {\"image_id\": 82, \"category_id\": 1, \"bbox\": [19.714609146118164, 207.4920654296875, 137.17111206054688, 94.84329223632812], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [333, 500], \"counts\": \"`g6`0k97J2J5N3N2N2O0O1O2O0O10001O1O1O001O00l0SO3N1O0O2O0O100000O1N2N2O1O1G9E;N2000000000000001O1O1O1O1O4L1O0O2OO100O1O1O1M3O0O2O100O1O1O10001O00001O0001O00001O000000000000000000000000O100O100000O100000000000001O1fGaNd7_1WHiNf7X1XHkNf7V1XHlNg7U1VHmNj7U1RHnNm7i100O10O1O1O1O001N2N2O1O1O1N2N2O1O100A?L4O1O100O2N2N101N2MbZ_3\"}}, {\"image_id\": 82, \"category_id\": 2, \"bbox\": [225.48587036132812, 267.7069091796875, 274.5141296386719, 62.460784912109375], \"score\": 0.9999977350234985, \"association_id\": 1, \"segmentation\": {\"size\": [333, 500], \"counts\": \"`f\\\\2?8Jm8Q1L3O1N2O0O101N2O0O2O00001O000O10000000000000000000000000000000000000000000000000000000000001O0000000000001O0000000000000000001O000000001O000000001O0000001O001O001O00001O00001O001O1O0000001O001O2N2N001O00001O001O001O00001O00000000000000001O0000000000O10000000000000000001O000000001O000000000000001O000000000000000000000000ZOTOkGl0T8VOjGk0U8VOkGj0T8XOjGi0V8ZOfGg0Z8f000O10000000000000000000000O11O0000000000001O0000001O00001O001O00001O0000001O1O001O1O0000001O002N3M2N1O00001O00001O1O1O1O00001O00001O1O1O1O1O000O101O0001O01O1O1O0000001O0000001O1N101O00001O0O2N2MXF\"}}, {\"image_id\": 82, \"category_id\": 2, \"bbox\": [242.7687225341797, 209.7034454345703, 41.32963562011719, 6.0883026123046875], \"score\": 0.999995231628418, \"association_id\": 0, \"segmentation\": {\"size\": [333, 500], \"counts\": \"[W_21[:2O0O10000000000000001O00000001O00000000000000000000000000000O1000000000000O10b[V2\"}}, {\"image_id\": 82, \"category_id\": 1, \"bbox\": [231.6154022216797, 222.97979736328125, 268.38458251953125, 73.13272094726562], \"score\": 0.9999924898147583, \"association_id\": 1, \"segmentation\": {\"size\": [333, 500], \"counts\": \"lk`23X:4M3M3N1O1O1N10001O001O1O2N2N0hFZOi8h0SG]OJJl8j0VGHe89YGHg8n01000000000000000000000000001O000000000000001O00000000000000001O000000001O00O100lN[G`0e8_O]G`0c8@^G?b8@`G?`8AaG>_8AcG>]8BcG>]8BcG>]8BcG>]8CbG?\\\\8AeG?Z8AfG`0Y8@gGa0X8_OhGa0X8_OhGb0W8^OiGb0W8^OiGc0V8]OkGb0U8^OkGc0T8]OmGc0R8]OnGd0Q8\\\\OoGe0P8[OPHf0o7ZOQHf0o7ZOQHf0o7YORHh0m7XOSHh0m7XOTHg0l7YOTHg0l7YOTHh0k7XOUHi0j7VOWHk0h7TOYHl0g7TOYHm0f7RO[Hn0e7SOZHm0f7SOZHn0e7RO\\\\Hm0d7TO[Hl0e7TO[Hm0d7SO]Hl0c7TO]Hl0c7TO]Hm0b7SO^Hm0b7SO^Hm0b7SO^Hm0b7SO^Hm0b7SO^Hm0b7SO^Hm0b7SO]Hn0c7SO\\\\Hm0d7SO\\\\Hm0d7SO[Hn0e7RO[Hn0e7RO[Hn0e7SOYHn0g7ROXHo0h7ROVHo0j7ROQHR1o7POmGR1S8d00000000000000000000000000000000000000VOPHWOP8e0VHYOj7f0XHYOh7g0YHXOg7h0ZHWOf7h0[HXOe7h0\\\\HWOd7i0\\\\HWOd7h0^HWOb7h0`HWO`7i0bHUO^7j0dHUO\\\\7j0eHVO[7j0fHUOZ7k0fHUOZ7l0fHRO[7n0fHQOZ7P1Q12N1O1nFgNn8]1O0000O1G9N2K5O1O1O10000O1000000O1000000O10001O00000000001O00000010O0001O0010O000001N101O000O101O00000O10000O2O0O10000000001O0O10000000000O2O0000000O101O0000001N10001N1Ol[2\"}}, {\"image_id\": 82, \"category_id\": 2, \"bbox\": [381.13226318359375, 210.48568725585938, 63.761688232421875, 5.328277587890625], \"score\": 0.9999610185623169, \"association_id\": 3, \"segmentation\": {\"size\": [333, 500], \"counts\": \"keo31\\\\:000000001O00001O00000001O0000000001O0000000000000000000000000O100000000001O00001O0000000000000000mdb0\"}}, {\"image_id\": 82, \"category_id\": 1, \"bbox\": [246.2325897216797, 220.4413604736328, 131.96693420410156, 66.62120056152344], \"score\": 0.9947804808616638, \"association_id\": 0, \"segmentation\": {\"size\": [333, 500], \"counts\": \"ba`21Z:4K4O1N2O1N2O0O2O0O2O001N102N1cFCh8>VGEh8<SGNg8m0O1O1O0000000000O100O100O1000000000O10000000001O001O00000000001O0001O01O00000000O1UO\\\\GMd8O`G0a8OaGO`81`GO`80bGO^80cGO^80cG0]80cG0]8OcG2]8NcG2]8NcG2]8NcG3\\\\8LeG4\\\\8KdG6[8JeG7Z8IfG:W8FjG9V8GjG:V8DkG=T8ClG>R8CnG>Q8APHb0m7^OSHb0m7^OSHc0l7\\\\OUHd0k7\\\\OUHd0k7\\\\OUHd0k7\\\\OUHd0k7\\\\OUHd0k7\\\\OUHd0k7\\\\OUHd0j7]OUHd0k7\\\\OUHd0k7\\\\OVHc0j7\\\\OWHd0j7[OVHe0j7[OWHd0h7]OXHc0h7]OXHc0h7]OXHc0h7]OXHc0h7]OXHc0h7]OXHc0h7]OXHc0h7]OXHc0h7]OWHd0i7[OXHe0h7[OYHd0g7\\\\OYHd0g7[OZHe0f7ZO\\\\He0d7[O\\\\He0d7[O\\\\He0e7ZOZHg0f7YOZHg0f7SOeG1e0l0g7ROdG1f0m0f7QOfG0e0o0e7QO_HP1_8000XOTOoGl0o7YOmGh0P8^OkGd0U8]OiGd0W8i0001O000GbGcN_8X1aGcN13c83gG<M@hfW1\"}}, {\"image_id\": 82, \"category_id\": 1, \"bbox\": [385.20806884765625, 168.4164276123047, 51.808258056640625, 43.61407470703125], \"score\": 0.9602708220481873, \"association_id\": 3, \"segmentation\": {\"size\": [333, 500], \"counts\": \"obQ42[:1O001O00000000001N11O0000000001O000O100000001O00001O001O01O0000kFHU87R1O1N2OQod0\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [80.51625061035156, 180.64178466796875, 122.84771728515625, 141.580078125], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [434, 650], \"counts\": \"XWT1j0d<=F3L5I7J4N3N2M4L4L4L3N6TEgMR:Q3L3M1\\\\FgLm8^3jFgLU9k301N1000000O100O10000O10O10O01O1M3E;M3O1O1000000003M4L5K3M1O001O000000O1O2O0O1N3L3M4L4J6@`0J5N4N2N2N2N001O100O1O0000010OO10O010O010iNSEGo:6W1O2O10000O1000O10001O00O101O0000O1000000000O101O0N2O1N2O1O100O2O0O3G^Xm5\"}}, {\"image_id\": 83, \"category_id\": 2, \"bbox\": [118.40254974365234, 294.8625793457031, 110.6423110961914, 37.243621826171875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [434, 650], \"counts\": \"_Zb11^=5N0O10000000000000000001O0O01000000O1O1N20OEHTC168c<>O100O0100000O10O1000000O1000O0100000000O1000O101O00000000001O001O00001O1O1O001O2N001O0010O000001O000000000001O000O2O1O1O1N103M2N001N3N1O0O2O1O0O2O00001N10001O000O101O1N2N_ab5\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [559.6797485351562, 199.95989990234375, 67.30426025390625, 53.25883483886719], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [434, 650], \"counts\": \"lW_73]=4M1O2O1O2M1O3N1VCNQ<4mCOP<j001N1O100000000O10001O00O100O1_OWDYOi;g0ZDUOg;k0`00001OO2O00001O0O100000001O0jCPOj;Q1SDROl;P1PDROP<Y10O000001O0000010O000001O2M9oNXDGl;2hj:\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [312.4535827636719, 193.41888427734375, 54.4842529296875, 48.74900817871094], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [434, 650], \"counts\": \"^kT4;T=<E4M<D2N2O0O1000000O010000O1F:O1O1O101O0001N110O:F2M2O000000001O010O1O0000O101DoCTOR<g0`0N1N3N2O1N2O2M3M5L1Mjbh3\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [374.02606201171875, 192.24203491210938, 50.029388427734375, 73.900146484375], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [434, 650], \"counts\": \"XTo46Y=:H9F7J7I7I4M2M2O2N3M6J3M001O000000001O001O10O1O1O2O0O3L3N2N1O1O1O1O1O1N3N1O2M2M3L8POmCKE0W`R3\"}}, {\"image_id\": 83, \"category_id\": 2, \"bbox\": [584.7735595703125, 245.92921447753906, 53.2547607421875, 7.764129638671875], \"score\": 0.9999454021453857, \"association_id\": 2, \"segmentation\": {\"size\": [434, 650], \"counts\": \"Sli72`=0O100000000000000000001O000001O00001O00001O0001O000000000000000O1O01000001O000000000000000OSf5\"}}, {\"image_id\": 83, \"category_id\": 2, \"bbox\": [163.35614013671875, 249.04270935058594, 59.29217529296875, 8.539291381835938], \"score\": 0.9995929598808289, \"association_id\": 4, \"segmentation\": {\"size\": [434, 650], \"counts\": \"mkW23^=3N001O0001O00000000001N01000000000000000000000000001O0O100000000000000O010000001O0O100000O100000O10QPf5\"}}, {\"image_id\": 83, \"category_id\": 1, \"bbox\": [113.46813201904297, 187.6605682373047, 117.9831771850586, 78.63594055175781], \"score\": 0.9993346333503723, \"association_id\": 4, \"segmentation\": {\"size\": [434, 650], \"counts\": \"R``13^10_:6[EOXOIg:9QFNXOIg:9QFO_:0bE0^:0bE4Z:KgE5Y:KgE6X:IhE;lN@Q;5RF>jN_OS;2RFb0jN\\\\OT;3QFa0kN\\\\OT;3PFb0lN[OT;3PFb0lN[OT;3oEc0mNZOT;3oE>lNFT;LPF=nNFR;MPF<oNGQ;NoE:QOIo:MPF:QOIo:NnE;SO_OL3S;3mE<TO]OO2P;6lE<TO\\\\O10P;8jE=UOZO21o:9iE<]O\\\\Oi:9hE<^O\\\\Oj::fE:@\\\\Oj::eE;A[Oj::eE;BYOj:<dE;CXOi:<eE<BXOj::dE?BWOj:=`E>m:AoDf0n:[OkDl0U;SOiDP1V;POkDP1V;nNjDV1T;hNmDY1S;eNmD\\\\1U;aNlD^1T;cNlD\\\\1T;dNmD[1T;fNlDX1S;kNkDU1U;lNgDW1Y;iNgDW1Y;jNfDV1Y;kNfDV1Z;mNbDT1^;`00O0MdDRN\\\\;m14A`0H8O001O1O10000000O100000001O0000001lCjNi;X1UDiNk;W1VDhNj;Y170kCgNQ<Y1oCgNQ<Y1400O2O01O010O1O00001O1O00100O1O1O@oC@Q<?PDCm;<VDCj;<WDDh;<XDDi;<WDCj;>TDBm;?RD@n;?YDZOj;b0ZD\\\\Ok;>WDAl;:XDCk;:iVe5\"}}, {\"image_id\": 83, \"category_id\": 2, \"bbox\": [346.99346923828125, 237.50970458984375, 32.441070556640625, 6.429901123046875], \"score\": 0.9929159879684448, \"association_id\": 1, \"segmentation\": {\"size\": [434, 650], \"counts\": \"`]e41a=1M20000001O000000010O0000000000001O000001O000001O0Omcb3\"}}, {\"image_id\": 83, \"category_id\": 2, \"bbox\": [409.6454162597656, 247.0316619873047, 24.806121826171875, 18.067489624023438], \"score\": 0.8892927765846252, \"association_id\": 5, \"segmentation\": {\"size\": [434, 650], \"counts\": \"\\\\^^52`=0O2N1O1O1F:O10000000000001O001N2O3M1N101N1NcYk2\"}}, {\"image_id\": 83, \"category_id\": 2, \"bbox\": [324.40936279296875, 235.9277801513672, 56.40594482421875, 7.4693756103515625], \"score\": 0.4654364287853241, \"association_id\": 0, \"segmentation\": {\"size\": [434, 650], \"counts\": \"ae[41`=1000000000000nm5OSRJ0O2O0O100001O000000010O00000000001O00000000000000001O00ZVb3\"}}, {\"image_id\": 84, \"category_id\": 1, \"bbox\": [143.7554473876953, 54.71254348754883, 161.5736846923828, 142.42645263671875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [600, 521], \"counts\": \"dgi23bb0<@:U_OER?`0^@0\\\\?4_@1_?0_@3]?1^@5^?NY@<`0]N^>T3ZAQMd>b3O1O2N100O100O1O1O1O1O100O1O100O1O10lNlAdMS>[2RBbMn=\\\\2WBaMi=\\\\2_B^Mb=^2eB_M[=_2iB_MW=_2oB\\\\MR=c2b1O1M3M201O1O1O1N2O1O1O100O100O1000001O00000001O0f@SNi=o1SBVNj=k1SBYNk=h1PB^Nn=c1iAfNV>\\\\1eAgN[>Z1aAiN_>h20000000O1N2]OWARMn>m2?O1O1001O001O001O1O4L8H8H1O1O1O100O1O10O1N1O2O0O2O0O1O1H8@m@UMY?U2TAhMX?R2k0N2N2O0O2O0010O2i_ORN`?h2K3M101N2O1O2N2M2O1O001O1O00O010O2N1O100O1O1O4L2N1N2O1O1N6J3N1N2N3L5K5VO\\\\_OWOi`0`0l0M4L4KY^n3\"}}, {\"image_id\": 84, \"category_id\": 1, \"bbox\": [308.3033447265625, 266.2545471191406, 153.82205200195312, 277.6302795410156], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [600, 521], \"counts\": \"gid5f0aa0Y2TN6K4M3M2N2M4N1O2M3N2M3M3M3N2M3M2O2M3M3N3L6K4REPKm7S5lGTKP8P5kGTKS8P5gGSKW8T5aGPK]8\\\\5UGgJj8e5gF_JY9d5bF^J^9f5[F]Je9l5kE[JU:T7O1000000001O00000O2N2N2mNfEbI_:[6Q1L4M3J6K5L3N30001N101O02^DVJP:n5gEXJX:S6SEYJk:j601O1M2N2O1O1O1O10O0100O100N01oN_EkIb:X5m1H8M3N3L4L4J5N3N2N1O2M3N1N30O3M3ZCWKX;l4bD_KV;d4eDbKW;a4cDeK[;m5N10001N1000O100O1O1O1O1M3M3H8L4O1N2N2M3L4M300O1O1O1O11O6J8Hi0WO3N0O0100O100O1O1N1^N^DnKc;f3UElKm:o3\\\\EkKe:S4_EiKd:S4bEfKc:V4R2A?I8K4L4L3L5L3N3N2L6H[YR1\"}}, {\"image_id\": 84, \"category_id\": 1, \"bbox\": [47.23221206665039, 289.5937194824219, 241.21255493164062, 160.55438232421875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [600, 521], \"counts\": \"dXU1<oa0f0bNVOT@Y1b?nNW@X1d?P1O1N2N2O1N4M8G4L2N2O1O1O001N2O001O001O0001O001O001O001O00001O00001O001O001O0010O00100O001O0010O01O1O1O1O001O00000000001O000000000000O10001O0000O010000O100O10000O100000000O100000000O10O1000O10000O10000O100O1000O0100O1O00100O010O10O10O10000O010O1O10000O10000O100000000O1000000O10000000000000000O1001O00010O001O0010O0100O2O0O10O01O1O100O01O010O1O1O010O0O101O001N2O1O001O1N101O1O1O1O1N2O0O2O2M3N4K2O1N2O1N103L6J:F9G5jNa^Oi0ia0N2N2N2_Oo]O8Zb0N2N2O1MPe[4\"}}, {\"image_id\": 84, \"category_id\": 2, \"bbox\": [128.390625, 429.7531433105469, 238.26242065429688, 123.92245483398438], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [600, 521], \"counts\": \"mW]23bb05N1O1O001O0O2O001O001O001O10O01O001N101O1O1O2M101O001N101N1O3N1N2O1N2O001N2N2N2O2M101N101N2N101O0O2O001O0O2O001O000O2O0O100O10000O101N100O100O1O100O010O1O100O0O2O0O_Oo_O]NR`0b1T@XNm?h1a0001N101N1O2N1010O01O1O1O1O1O001O100O101N1O101O0O10001N1000001O0O10001O001N10001O00001N1000000000000O10000000000000000000000000O01000000000000000000O1000000000O1000000000000O1O2N1N2L4N2N2O1O101N100O1O2O0O101O1N2O1O1O2M4M1O1O0O2O1O5J6J5L2M2O2M3M6IobY3\"}}, {\"image_id\": 84, \"category_id\": 2, \"bbox\": [89.30059051513672, 156.35304260253906, 179.71270751953125, 47.6806640625], \"score\": 0.9999963045120239, \"association_id\": 1, \"segmentation\": {\"size\": [600, 521], \"counts\": \"hPP24db01N101O1O1O2N1O001O001O3M;k]OUOga0P1N1O001O00000000001O000000001O00000O2O0000010O00001O00010O00010O01O002N2N<RO\\\\^O0Xb0N001O0O`U83\\\\jG5K3O2M101O2N2N1O0000000001O00000001O00001O00001O01O010O01OO2O0ER^OLna0O[^OLi]63\\\\TJ1BNU^O3ha02U^O0ia02V^ONja03T^ONla04R^OMla05S^OKma0`000O2O01O00001O01O01BV^OLka02W^OMma0MW^O1Zb0N2N001O]e49ZZK1N2N1000000000O101O0O3Md_c4\"}}, {\"image_id\": 84, \"category_id\": 1, \"bbox\": [324.33221435546875, 112.82845306396484, 57.130584716796875, 58.50354766845703], \"score\": 0.9999881982803345, \"association_id\": 0, \"segmentation\": {\"size\": [600, 521], \"counts\": \"Sbn58_b02O001N101N101O3K6J3M3N2L3N2O1O1O1N2O1O2N2N1O2O1N10001O001O001O001O1O1O00000001O1O0000001O1POT_O2l`0I]_O3d`0Gc_O7^`0Ei_O7Y`0ET@0Za0OjVd2\"}}, {\"image_id\": 84, \"category_id\": 2, \"bbox\": [3.2157704830169678, 373.4750671386719, 93.50072479248047, 66.96005249023438], \"score\": 0.9999406337738037, \"association_id\": 3, \"segmentation\": {\"size\": [600, 521], \"counts\": \"dT22cb06L2O1O1O1O100O1O1O2N1O1O1O1N2N1K6M2O2M2O2N1O1O100O2O1O0O101N101O001O00001O1O1O1O1O001O2N1O001O001O2N1O00000000001O01N3N2N1O8G6J2O2M3N3L2N9B8L3O2M3N001N3Nl\\\\Q8\"}}, {\"image_id\": 84, \"category_id\": 2, \"bbox\": [168.32559204101562, 152.22039794921875, 107.22787475585938, 45.70121765136719], \"score\": 0.12986548244953156, \"association_id\": 0, \"segmentation\": {\"size\": [600, 521], \"counts\": \"VdY35ab05M1N3N1N2O001O0001O0001O00010O00001O0000001O0000001O1N11O000001OFU^OEnb78T]HIja09R^OKma06S^OJla06T^OJla06T^OJma05S^OKma05S^OKma06S^OJla06T^OJla06T^OJla07T^OHma07S^OJma05T^OKla04T^OLma02U^OM]b0N1O001O`e43[ZK201N4M2M11O1O001O0010O01O001O001O001O100Nnn_4\"}}, {\"image_id\": 85, \"category_id\": 1, \"bbox\": [19.55573272705078, 89.13557434082031, 66.10740661621094, 111.24020385742188], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [358, 359], \"counts\": \"Vj75o:4L4L4M2M5PF_OB7P9?XG0c8]1K5[Od0L4N2O1N101O002N001O0000000000000000000000000001O001O1N4L:F3M4M1N3M2N1O100YO\\\\GkNe8f0WHiNm7V1R1O1000000O101N3L4[Og0G_RQ3\"}}, {\"image_id\": 85, \"category_id\": 2, \"bbox\": [50.37279510498047, 189.4444122314453, 115.31574249267578, 28.245758056640625], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [358, 359], \"counts\": \"Wgb03R;1O2O000O1O1O2O0O1O1O100O100O100000001O0000001O00000001O000000000000001O0001O00000000000000000001O000001O000001O00000000000000000000O100000001O00000001O01O0000001O0000001O00000010O00001O0000001O000O2O1O0O101O0O101O0O2O0O2O1MkXS2\"}}, {\"image_id\": 86, \"category_id\": 1, \"bbox\": [17.5874080657959, 0.0, 77.04512786865234, 155.2582550048828], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [371, 483], \"counts\": \"Qm62[;c0@8I3N<D7J5K7I2N3M3M2N4K5L3K`0A5K5L9bH^LX6f3bI^L]6h3ZI[Le6Z4N2O100O100000000000UNnI`MAMb6`2mJZMS5e2n1O1O10000001O1O1O001O0dHTMQ6m2lIZMo5h2mI[MR6g2jI\\\\MV6h2cIZM]6i2^IZMb6h2XI[Mj6f31O00001O001N101O0O1O1N2O00000O2M3L3L4iMW2N3mNYF7l9E`Z]4\"}}, {\"image_id\": 86, \"category_id\": 1, \"bbox\": [185.28880310058594, 8.175943374633789, 94.00941467285156, 197.77658081054688], \"score\": 0.9999963045120239, \"association_id\": 2, \"segmentation\": {\"size\": [371, 483], \"counts\": \"\\\\hS23_;6K1N2O1O1N2O001O1O1O2N1O1O1QF@i8b0mFc0V8^OfGm0R8UOiGQ1T8d1C6J4L2N2N1O3M3M2N2N1O1O101N2O1O01O10O01O3M1O1N2N010O02O1eIhKW5Y4`JPL_5R4WJWLh5j41O101N1O00001O00000000001O001N3L4]Ob0L4K5I8K5L5E:K5L4F9H9K6I6F;H7J8_OQ1UOTg\\\\2\"}}, {\"image_id\": 86, \"category_id\": 2, \"bbox\": [214.995849609375, 155.5582733154297, 242.7327880859375, 188.41294860839844], \"score\": 0.9999895095825195, \"association_id\": 2, \"segmentation\": {\"size\": [371, 483], \"counts\": \"fV_24Q;OYEa0Y:]OhEc0X:]OhEc0e:O000CRE5n:JSE65F`:1^E81Ih:7WEJi:6WEJh:692O01O10cFK\\\\74dH3V7LkH6R7JnH8Q7HoH9o6HQI8o6HPI:n6GRI:l6GTI:g6JYI7c6M\\\\I3c6N]I3a6N_I3_6MaI4^6LcI5Y6MhI3R62oIOl55SJLh58YJI`5<aJDW5c0iJ^OP5i0PKXOj4l0XKSOf4n0[KSO`4o0bKQO[4Q1gKoNU4R1mKnNQ4R1QLnNn3R1SLoNk3Q1VLoNj3o0XLQOg3o0ZLQOe3o0\\\\LQOc3n0_LROa3m0`LSO_3m0bLSO^3k0dLTO\\\\3k0fLUOZ3j0gLVOY3k0fLUOZ3l0eLTO[3m0eLRO[3o0dLQO\\\\3P1cLoN^3Q1cLnN]3S1bLmN^3S1cLlN]3T1cLkN^3V1bLhN_3Y1aLfN_3Z1bLdN_3]1aLaN`3`1`L^Na3c1_L[Nb3f1^LXNc3i1\\\\LUNf3k1\\\\LRNe3n1]LoMe3Q2]LjMe3V2l21O01O01O01000O10000O010O010O010O100O10O01O0100O0100O10000O01O01O010O00100O1O1O010O0100000000O0100O0100O100O100O01000000010O01O000O10O1001O9G4L3M1O00O0100O1O2O0O1O0010O01O010O01000O010O010O00010O00001O01O000010O01O1O010O010O01O1O010O001O010O10O100O10O01O1O010bNiG^OY8=nG_OS8>RH_Oo7?SH@m7>VHBj7<XHCh7<YHDh79[HFf79[HFf78[HIf75[HJf74\\\\HKe74\\\\HKe73\\\\HNc72^HMc72^HMc72^HMd72\\\\HMg70[HNh7OYH0j7NVH1l7OSHOR8MnG3U8JlG5j9O001N3M2O2LYm:\"}}, {\"image_id\": 86, \"category_id\": 2, \"bbox\": [9.574712753295898, 174.66334533691406, 198.83612060546875, 173.70652770996094], \"score\": 0.9999387264251709, \"association_id\": 1, \"segmentation\": {\"size\": [371, 483], \"counts\": \"bk33o92UF2`0>]OBo8m0VGROMk0k86SG7l8X100000001O0[GZM]8f2bG\\\\M]8e2`G`M]8m2N2N2N1O1O100O001000O100O2O00000O010O010O0100O100O1000O10O10000O1000O2N101O0000O1O001O001O001O1000O100O2O0O0010O001lNgLlIZ3S6gLmIY3R6hLmIY3R6gLnIZ3Q6gLoIX3P6iLPJW3P6jLRJS3n5mLUJQ3j5oLXJo2h5QMZJm2f5TMZJk2f5UM[Jj2e5WM[Jh2e5YM\\\\Je2d5[M]Jd2c5]M]Jc2b5]M_Jb2a5_M_J`2a5`M`J_2a5`M`J`2_5aMaJ_2^5aMcJ^2]5cMdJ\\\\2[5kM`JS2`5mMaJS2^5nMbJQ2`5mMaJR2a5cMSIO^1\\\\2a5`MUI3\\\\1[2i5dMZJY2g5gMZJX2e5hM]JV2d5jM\\\\JU2c5nM\\\\JQ2d5QN\\\\Jm1d5XN[Jd1e5]N\\\\Ja1d5`N\\\\J_1d5bN\\\\J]1d5cN]J\\\\1c5eN\\\\J\\\\1c5eN\\\\J[1d5gNZJZ1e5lNVJT1i5nNUJS1j5oNTJQ1l5QORJP1l5ROTJm0l5TOTJk0l5VOTJi0l5YORJh0m5YOSJf0l5\\\\OSJd0m5]ORJc0n5]OSJb0d5TN]I[1n0a0c5XN\\\\IY1Q1>a5]N\\\\IV1S1<_54`JK`56`JI_59aJG^59bJG^5:bJE^5;bJE^5<bJC^5?aJ@^5b0aJ_O^5a0cJ^O]5c0cJ\\\\O]5d0dJ[O\\\\5e0dJ[O\\\\5e0eJZO[5f0eJZO[5g0eJXO[5h0eJXO[5h0fJWOZ5j0eJVO[5j0gJTOY5m0gJROY5o0fJQOZ5P1fJoNZ5Q1gJnNY5S1fJmNZ5S1gJlNY5T1hJjNY5V1hJiNX5W1jJgNV5Y1kJfNU5Z1lJeNT5[1mJdNS5\\\\1nJcNR5]1oJaNR5_1oJ`NQ5a1oJ^NQ5b1oJ^NQ5b1PK]NP5c1QK[NP5f1oJZNQ5f1QKWNP5i1UKQNl4P2ZKhMg4Y2ZKcMh4]2ZK_Mh4b2T2001O1O1O1O2N001O00001O1O002N000000O1000000N3M3[O\\\\_S3\"}}, {\"image_id\": 86, \"category_id\": 2, \"bbox\": [180.18466186523438, 14.294025421142578, 206.09686279296875, 42.87614822387695], \"score\": 0.9999294281005859, \"association_id\": 0, \"segmentation\": {\"size\": [371, 483], \"counts\": \"Vaa25];2O001N101O2N1O1N1000001O00100O001O000010O01O1O010O0000100O1O0001O00001O01O0001O0O100000001O0000000000001O0000000001O0000000000000000000000000000000001O0000000000001O000001O00000000010O0000001O01O0000000001O01O00000000000000000010O0000000000000000010O00001O0000100O00001O0001O0010O000001O001O1O1O001O001O1N3N1O0O101M]jS1\"}}, {\"image_id\": 86, \"category_id\": 2, \"bbox\": [9.395824432373047, 152.93331909179688, 361.8849792480469, 201.74945068359375], \"score\": 0.9709063768386841, \"association_id\": 0, \"segmentation\": {\"size\": [371, 483], \"counts\": \"T`7:S;:I5L3M3G9N1O2N101N1DfN]F]1]9lN^FU1\\\\9e0L3N3N1O2M2O2O0O101O001O001O1O001O1O000O2O01O01O1O010O1O1O100O001O1O01O01O100O1O010O1O10O01O10O010O100O1O100O100O001OVNPMQKQ3m4RMSKl2l4WMTKg2l4ZMUKe2i4]MXKb2f4`MZK_2e4cM[K]2d4dM\\\\K[2c4hM\\\\KX2b4kM^KS2b4nM_KQ2`4oMbKo1^4QNdKm1\\\\4TNeKk1Y4VNkKf1U4ZNnKc1R4]NoKb1Q4^NQL`1o3`NRL_1n3aNTL^1k3bNXL[1h3eNZLY1f3gN[LX1e3gN^LX1a3hN`LW1`3iNaLV1_3jNbLV1]3jNcLV1^3jNbLU1^3lNaLU1^3lNbLT1]3mNcLR1]3oNQKUO;l1d4oNnJYO>h1c4POkJ[Oc0d1c4POhJ@d0_1d4ROeJBg0\\\\1c4SOdJCi0Y1c4TOcJEi0W1e4TOaJFj0U1e4VO_JGk0S1f4XO\\\\JGn0Q1e4a0[K^Oe4c0[K\\\\Of4d0ZK[Of4g0YKYOf4h0ZKWOf4j0YKVOh4i0YKWOf4j0ZKUOf4k0[KUOe4j0[KVOe4k0[KTOe4l0\\\\KTOc4l0]KTOd4l0\\\\KSOd4m0]KROd4n0[KROe4o0[KPOf4P1ZKoNf4R1ZKmNg4S1YKmNf4S1[KlNf4T1ZKkNf4V1YKjNg4V1ZKiNg4W1YKhNg4X1YKhNg4Y1YKeNi4[1WKdNi4\\\\1XKcNh4^1XKaNi4_1WK`Ni4`1XK^Ni4c1WK\\\\Ni4d1XK[Nh4e1YKYNi4g1WKXNi4h1XKWNh4i1XKVNi4k1WKTNj4k1VKTNk4m1UKRNk4o1TKPNn4P2RKoMn4R2QKmMP5T2PKjMQ5W2nJhMT5W2mJgMT5Z2lJdMU5]2jJbMW5_2iJ_MX5a2jJ\\\\MW5e2kJVMW5j2j10O1O1O2N1O1O2N001O1O0010O01O2O0O2N2O1NTNQH7m7FYH8e7G^H8b7FaH:]7EdH;]7DcH=]7AdH`0]7^OcHc0]7ZOeHh0\\\\7SOfHo0Z7kNjHV1X7fNhH]1X7_NjHb1W7[NjHg1U7WNlHj1U7SNlHn1\\\\8002M2O1N2O1O1O0O2O1O1O001N2OcNVG1b85cGjNJd0b8`0hGgNIi0^8?THBk7;XHFf78]HIa74dHL[72gHOW70kH1S7NoH2P7NQI3l6NUI2j6OVI2i6MYI2f6N[I2d6M^I3a6LaI4^6KeI5P63RJMS5_O]I=j14g4e0ZK[Od4f0]KZOb4f0`KYO^4h0cKXO[4i0fKWOX4k0hKVOU4k0lKUOS4k0oKTOP4l0QLTOn3k0ULTOk3k0WLTOh3l0YLTOg3j0\\\\LUOd3j0^LUOa3l0_LTOa3k0`LUO`3k0aLTO^3l0cLTO]3l0cLTO]3l0dLRO]3n0cLRO]3o0cLPO]3P1cLPO]3P1dLoN\\\\3Q1dLoN\\\\3R1dLlN]3T1dLkN\\\\3U1eLjN[3V1eLjN[3W1eLhN[3X1eLhN[3X1fLfN\\\\3Z1cLfN]3[1bLdN_3\\\\1bLcN_3]1aL`Na3`1`L]Nb3d1aLUNb3l1S3O010O01O10O010O010O0010O01O0010O01O1O00100O001O1O00100O1O1O010O01O010O1O100O1O100O10O01O01O010O10O010O01000O100O010O01O1O00100O1O101N1O100O002M2M4L4L6oNbEa0\\\\fX1\"}}, {\"image_id\": 86, \"category_id\": 2, \"bbox\": [65.34174346923828, 115.60130310058594, 165.72894287109375, 219.3403778076172], \"score\": 0.10441171377897263, \"association_id\": 0, \"segmentation\": {\"size\": [371, 483], \"counts\": \"Qjg0V1]9T1ZMWNoJW2k4oMnJV2P5lMoJU2n4nMQKR2o4oMPKR2n4oMRKQ2m4QNQKQ2n4PNPKQ2P5PNnJQ2R5PNkJR2V5nMhJS2X5nMfJS2[5mMbJU2^5lMaJT2W5TNkJk1R5WNQKf1m4\\\\NUKb1i4`MYJ:o0T2f4aM_J:l0T2c4cMbJ9l0S2`4eMfJ8j0S2]4gMiJ7j0R2[4hMlJ5j0S2W4iMPK5h0T2U4hMTK4g0T2T4hMUK4h0S2S4iMUK4i0R2R4jMUK5j0o1Q4kMWK5i0P2o3kMXK5j0o1n3lMXK5k0n1n3lMWK7k0l1o3mMTK8n0k1o3mMPK9R1i1P4QNgJ9Y1f1P4WOPLi0Q4VOPLj0P4VOPLj0P4UOQLj0Q4UOPLj0Q4VOPLg0Q4[OoKb0R4@mK?S4AnK=T4BmK<T4CoK;R4hN]JFb1`1Q4iN_JFa1`1Q4iN_JFa1a1o3iNaJF`1`1o3jNaJG`1^1o3kN`JI`1\\\\1P4lN_JIa1Z1P4mN_JK`1W1Q4nN_JM_1T1S4oN\\\\J0`1P1U4oNZJ4`1l0W4POWJ8_1h0\\\\49bKG_49aKF`4;^KEb4<]KDc4>[KBe4?ZKAg4?XKAh4`0XK_Oh4b0WK^Oi4c0VK]Oj4e0UKZOl4f0TKYOl4h0UKVOk4k0UKTOl4k0UKTOk4m0TKSOm4l0TKSOl4n0SKROn4o0PKQOQ5o0oJPOQ5Q1nJoNS5P1nJoNR5R1nJmNS5R1oJlNQ5T1PKkNP5V1PKiNP5W1QKhNP5W1PKiNP5X1PKgNP5Y1PKgNP5Z1oJfNQ5[1oJdNQ5]1oJbNQ5_1oJ`NQ5a1oJ^NQ5b1PK]NQ5b1oJ]NR5d1nJ[NR5e1nJ[NS5d1nJ[NR5e1nJ[NS5e1mJZNT5e1lJZNV5e1kJZNU5f1lJYNT5g1nJWNR5j1oJSNQ5n1PKQNP5o1QKPNn4Q2SKnMm4S2SKkMn4V2SKgMo4Z2QKaMR5`2PK[MS5e2m1001N2N3M4M3L2O2O1N3N2N1O0O1O2O1N2N100O10001N1000O2O0O1000000eMdH>\\\\7]OjHb0W7ZOmHd0U7YOmHg0S7VOQIh0P7VOSIi0m6UOVIi0k6VOWIh0k6UOWIk0i6RO[Il0f6PO^I_OTOm0^7@XJ>h5@[J?e5_O^J>e5_O]J`0e5]O^Ja0d5\\\\O^Jb0e5ZO^J3`M9S;M3M4JTPk2\"}}, {\"image_id\": 87, \"category_id\": 1, \"bbox\": [14.784979820251465, 4.481450080871582, 95.54330444335938, 171.306396484375], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [305, 405], \"counts\": \"b]5a0m85L3M5K4L3N3K5UJhNX3[1cLiN[3Y1bLjN]3V1aLlN^3W1]LlNa3W1\\\\LkNc3W1ZLkNe3W1ULnNj3T1RLoNl3T1QLnNm3W1mKlNR4V1jKmNU4U1iKkNW4X1eKiNY4]1aKcNa4_1\\\\K^Ni4c1SK\\\\NP5o24N2O0O1000O11N1O200O100001O<D2M3N2N1O00000000O10O10XORKmLo4m2[KmLf4Q3_KkLb4S3aKkL_4T3n01001O1jMYJa0h5]O[Ja0e5_O^J=d5A`J9d5DaJFR6:PJCQ6<RJCm5;WJDg5<\\\\JCc5<`JC_5<dJC[5=gJBX5?hJAW5?jJAU5`0lJ_OS5b0mJ^OS5b0nJ]OR5c0oJ\\\\OQ5d0QKZOo4f0RKYOn4g0SKWOn4j0l100O1000000O1O1O1O2M3N4J`kg2\"}}, {\"image_id\": 87, \"category_id\": 1, \"bbox\": [292.1504821777344, 0.0, 21.8375244140625, 29.656509399414062], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [305, 405], \"counts\": \"alg21\\\\96]O1^G1^8c0O10000000H_G@a8?aG@_8=fGAZ8?gG@Y8?=00O1M3I7Nn\\\\k0\"}}, {\"image_id\": 87, \"category_id\": 1, \"bbox\": [200.01290893554688, 17.197437286376953, 86.12261962890625, 180.88232421875], \"score\": 0.9999995231628418, \"association_id\": 3, \"segmentation\": {\"size\": [305, 405], \"counts\": \"ocn14\\\\95J4oNHcH;Y7JdH8Y7McH5[7NcH3Z7OeH3Y7OfH3W7NiH2V70hH2W7OgH2Y73aHO;]OY6g1dI\\\\N[6h1_I\\\\N^6]2N2O1N1O2O2M5L3M4M7QKbL[3a3`LdL^3]3_LeLa3`3XLaLi3a3RLbLo3`3iKdLl4f32N001O001O2N2N1O001O1O2N2N1cNjJ]NX5^1QK\\\\NR5^1VK]Nk4a1XK]Ni4b1XK]Ni4a1YK^Ni4`1YK^Ni4_1ZK_Ng4`1[K^Nf4_1]K_Ng4]1\\\\K`Ng4\\\\1i1N2N4H7J6L5I5M4L7G6KVZV1\"}}, {\"image_id\": 87, \"category_id\": 2, \"bbox\": [237.25411987304688, 155.33383178710938, 153.77011108398438, 42.03202819824219], \"score\": 0.9999990463256836, \"association_id\": 3, \"segmentation\": {\"size\": [305, 405], \"counts\": \"WZ]22]93O1O0O10000oFMd83\\\\GMc84\\\\GMd83\\\\GMd82]GNb83]GNc81^GOb81^GOa82_GNa81`GO`81_GOa82_GNa82^GOb81^GOb80_GOa82^GOb81]GOd81[G0e8O\\\\G1d8O[G1f8OZG1e80ZG1f8OYG2g8NYG2g8NYG1g8OYG2g8NYG2g8NYG1h8NXG3g8NYG1h8OXG1h8OXG1h8OWG1j8OVG0k80UG0k80UGOl81SG0m80SG0m80SGOn81RGOm82RGOn891000O1000000O100000000000000000000O10000O100O10O10O100000000O100000000O1000O10O10000O0100000O1000000000O10O10000000000000O1000O10000001AXG2i8M\\\\GOd80^GOb81^GOb80_G0a80`GNa82bGK^84`00000O101O0000cc4\"}}, {\"image_id\": 87, \"category_id\": 2, \"bbox\": [27.651203155517578, 135.7010955810547, 285.67431640625, 56.800140380859375], \"score\": 0.9999934434890747, \"association_id\": 2, \"segmentation\": {\"size\": [305, 405], \"counts\": \"W_>3\\\\94L200O2O00O1O100O00100O10000O01000O10^OLgG4W8OhG1W80iG0W81hGNX83gGNY82gGNY82gGNY82gGNY82gGNY82gGNY81gG0Y80gG0Y80gG0Y80gG0Y80gGOZ81fGOZ81fGOZ80fG1Z8OfG1Z8OfG1Z8NgG2Y8NgG2Y8NgG2Y8MhG3X8MgG4Y8LgG4Y8LgG4Y8KhG5Y8JgG5Z8JgG6Y8JgG6Y8JgG6j800O10000000O10000^OJiG6V8LhG5X8KhG4Y8LgG4Y8LgG4X8MhG3X8MhG3X8MgG4Y8LgG3Y8NgG2Y8OeG2[8NdG3\\\\8MdG2]8NcG2]8NbG3^8MbG3]8NcG1^8ObG1^80`G1`8O`G1`80^G0b82YG2g8<0O1000000000O100000000O1000000000000O2O001O0O101O00000O10000O2O00000000O001O1O10O0100000000O1O1000000O1000O10000000O1000000O10000000000O1000000O10000000O10O100O1O10000O10000000001O0000000000001O0O101MdW9EihF0O101O0O100O100000000O10000001N1000_GJh75WHLi73XHNg72XHOh71XHOh70YH0h7NXH3c80000000000000O1000000O100O1000000000000000O3L3NTak0\"}}, {\"image_id\": 87, \"category_id\": 2, \"bbox\": [305.3192443847656, 27.44654655456543, 36.31884765625, 3.1601409912109375], \"score\": 0.9999103546142578, \"association_id\": 1, \"segmentation\": {\"size\": [305, 405], \"counts\": \"RRl21`90000000000000000000000000O100000000000000000O100000000000Vdc0\"}}, {\"image_id\": 87, \"category_id\": 2, \"bbox\": [268.9867248535156, 25.588151931762695, 28.88262939453125, 3.146085739135742], \"score\": 0.9949316382408142, \"association_id\": 0, \"segmentation\": {\"size\": [305, 405], \"counts\": \"jg`21_9100000000000000000O1000000000000000000000fZQ1\"}}, {\"image_id\": 87, \"category_id\": 1, \"bbox\": [259.9665222167969, 0.0, 13.772308349609375, 29.51697540283203], \"score\": 0.6562979817390442, \"association_id\": 0, \"segmentation\": {\"size\": [305, 405], \"counts\": \"\\\\^]2482a8c0M201O000000FbG@_8?:O2L7K4LmYW1\"}}, {\"image_id\": 88, \"category_id\": 1, \"bbox\": [87.40778350830078, 172.2215118408203, 233.645263671875, 474.5682373046875], \"score\": 0.9999979734420776, \"association_id\": 1, \"segmentation\": {\"size\": [924, 520], \"counts\": \"RmR3d0nk0>G7J5M4K5L4K4L4_FQNoGS2m7VNhGQ2T8YN_Gm1^8[NYGj1b8]NXGf1f8_NTGe1i8_NRGd1m8_NoFd1o8_NjFg1T9]NbFk1]9ZNUFo1j9WNhES2W:SNTD]3j;mLcC]3]<kLYCX3e<lLUCX3j<jLRCX3n<lLlBV3T=nLeBU3[=QM\\\\BR3d=VMPBn2o=ZMgAi2Y>ZMaAi2_>YM]Ai2c>YMXAj2h>YMQAk2o>YMf@n2Y?\\\\5O0000001N10000O100O100O100O010O010O010O01O1O001O1N2O001O1O1O1O1O1O1O1N2O1N2N2M4L3N2M3M4L3O1O2O1O1N2O001N101O001O00001O0O10001O00001O00001N10001O1O2N2M5L4L3M2N3M3L4M4L5K4K5L3M2N2M3N3M9Gg1XN9H2N2M3N2N2O3L3M2N2N100O1O101O0O2O2N1N1001O0O2O1O1O1O0O20O\\\\LjIa@V6\\\\?oIc@P6[?TJe@j5X?ZJi@d5U?`Jj@_5T?dJl@[5R?hJn@X5P?iJPAW5o>iJSAU5n>jJTAU5k>kJ[AP5d>PKcAj4]>UKiAf4V>ZKmAc4T>\\\\KoAb4P>^KRBa4n=ZKWBd4l=UK\\\\Bh4f=QKbBk4mb0J6K5J5K3N2M3M4M2M5K4M3L3N2M3N2M4L5L3L4M2M3N2L7J<C:F8H:YOP_h5\"}}, {\"image_id\": 88, \"category_id\": 1, \"bbox\": [321.3459777832031, 429.75543212890625, 141.4765625, 292.5054931640625], \"score\": 0.9999451637268066, \"association_id\": 2, \"segmentation\": {\"size\": [924, 520], \"counts\": \"k``9e0Tl05L4L4L4cZOFQ>`0[@T1a?oNU@[1h?gNo_Ob1o?`NV_OZ2i`0gMo^Ob2Pa0_Mm^Oc2Ra0`Mj^Oc2Ua0^Mi^Oc2Wa0^Me^Oe2[a0\\\\M`^Oh2`a0YM[^Ok2ea0VMW^Ol2ja0UMS^Om2ma0TMo]Oo2Qb0RMk]OQ3Ub0QMd]OT3\\\\b0nL^]OV3bb0lLY]OW3gb0mLQ]OW3ob0VM\\\\\\\\OR3dc0S300000001O001O00000000001O1O1O1O001O0000001O0000000000000000000000000000000000000000000000000000000000000000O1O1O1O1N2M3L4@`0H8M3L4SOm0^Ob0K5M3L4F:^Ob0F:L4L4kNU1B>J6M3L4D<L4M3M3N2O1O1O1O10000000001O000O101O2L=ZOZPc1\"}}, {\"image_id\": 88, \"category_id\": 2, \"bbox\": [61.51653289794922, 422.9011535644531, 75.51775360107422, 169.87368774414062], \"score\": 0.7654669284820557, \"association_id\": 1, \"segmentation\": {\"size\": [924, 520], \"counts\": \"`[h1e0Wl01N2O001O1O2N1O1O1O1O2N1O1O1O2N2N1O001O1O2N2N00001O001O2N2]UOgN_i0Z1^VOkN_i0V1^VOPO^i0c2D5K`0@5K3M1O1O2N1O2N2N3M1O00O1N2M3N2M3L4M3J6H8N2J6M3K6I6L5VOi0@b0J6D<H:Gf\\\\P;\"}}, {\"image_id\": 88, \"category_id\": 2, \"bbox\": [308.00897216796875, 434.7742004394531, 118.69024658203125, 312.2831726074219], \"score\": 0.7483850717544556, \"association_id\": 2, \"segmentation\": {\"size\": [924, 520], \"counts\": \"fUj8Q1ek0;G6J6K6J6J4L4K4fJgMm_O^2P`0jMd^O7iLS2cd0fMa^O?cLo1ld0bM`^O`3aa0_L^^Ob3ba0^L]^Od3ba0]LZ^Of3fa0[LT^Oj3la0XLn]Ol3Rb0VLj]Ol3Vb0VLf]Ol3[b0SL\\\\\\\\OJ\\\\OU4\\\\d0nKi[Ob0Bb3jd0hKZ[OT6id0jIS[OY6Qe0jIdZOZ6\\\\e0b01O2N1N2TOcZOQJee0^4U2H9I7@`0XOi0G9L2O1O1O1O3K5K5J5L5K6J5K5J6K6GgU]4\"}}, {\"image_id\": 88, \"category_id\": 2, \"bbox\": [58.62630844116211, 400.0406188964844, 206.03045654296875, 175.39303588867188], \"score\": 0.08045275509357452, \"association_id\": 0, \"segmentation\": {\"size\": [924, 520], \"counts\": \"Vee11cl0;K5L2N2O1N3N2M2O001O1N102N2N2M3N1O1O1O1O1O2N1O1O0O2O1O1O1O1O1O001O1mUOZNnh0g1kVOgNmh0Z1jVOD`h0`0VWO4\\\\h0X2N2N100O001O00001O00O1O2O0N2I7O1O1O1O1H9F9D<lNZVOPOii0n0ZVOlNji0R1YVOYNN4oi0a1aVOYNei0[1T1L4F;J;E6J6Lc_o:\"}}, {\"image_id\": 88, \"category_id\": 1, \"bbox\": [87.40962982177734, 500.986083984375, 233.86968994140625, 208.10308837890625], \"score\": 0.06638431549072266, \"association_id\": 0, \"segmentation\": {\"size\": [924, 520], \"counts\": \"fSQ33gl05AIQTOc0ck0=L4E;K5M3L3O2M2N2N2M3I7I7D<O10O10O10000000O01000000000000000000000001O0iXOQMad0o2^[OTM`d0m2^[OVM`d0j2_[OYM_d0h2_[O\\\\M^d0d2a[O_M]d0b2b[O`M\\\\d0`2c[OaM]d0`2^[OeMad0[2[[OiMed0W2Y[OlMfd0U2X[OlMhd0T2W[OmMid0S2W[OnMhd0R2W[OnMjd0R2U[OoMkd0P2U[OQNkd0o1T[ORNld0n1S[OSNmd0m1R[OSNod0l1Q[OUNod0k1P[OVNPe0i1Q[OWNod0h1R[OXNnd0g1R[OZNnd0d1T[O\\\\Nld0b1V[O^Njd0^1Z[ObNfd0T1c[OmN]d0P1f[OPOZd0o0g[OQOYd0m0i[OSOWd0k0l[OTOTd0i0o[OWOQd0e0T\\\\OZOlc0`0Z\\\\O@fc0<^\\\\ODbc0:`\\\\OF`c08c\\\\OF^c0;a\\\\OF^c0;a\\\\OE_c0=_\\\\OCac0>]\\\\ODbc0=]\\\\OCcc0>\\\\\\\\OBdc0>\\\\\\\\OCcc0>\\\\\\\\OBdc0>]\\\\OAcc0`0\\\\\\\\O@dc0b0[\\\\O]Oec0d0Z\\\\O\\\\Ofc0e0Z\\\\OZOfc0g0Y\\\\OYOgc0h0Y\\\\OWOgc0j0X\\\\OVOhc0j0X\\\\OVOhc0k0W\\\\OUOic0k0W\\\\OUOic0l0V\\\\OSOkc0n0T\\\\OROlc0o0S\\\\OQOmc0P1R\\\\OPOnc0Q1Q\\\\OoNoc0Q1Q\\\\OoNoc0Q1Q\\\\OoNoc0Q1R\\\\OnNnc0R1S\\\\OmNmc0S1S\\\\OmNmc0S1T\\\\OlNlc0T1T\\\\OlNlc0T1U\\\\OjNlc0V1T\\\\OjNlc0W1S\\\\OiNmc0W1T\\\\OgNmc0Y1S\\\\OgNmc0Y1S\\\\OeNPd0Z1P\\\\OeNQd0[1o[OeNQd0[1o[OdNRd0\\\\1n[OcNSd0]1n[ObNRd0]1o[ObNRd0^1o[O`NRd0`1o[O\\\\NTd0d1o[OWNSd0i1P\\\\OQNSd0o1]3000000O101XN]UOU1cj0jN`UOT1`j0jNdUOS1]j0lNfUOR1Zj0mNjUOP1Vj0POkUOn0Vj0ROjUOn0Vj0ROkUOm0Uj0SOkUOl0Vj0UOiUOk0Wj0UOjUOj0Vj0VOjUOi0Wj0WOjUOh0Vj0XOjUOh0Vj0YOhUOh0Wj0YOhUOh0Xj0XOhUOh0Xj0XOhUOh0Xj0XOhUOh0Xj0XOhUOh0Xj0XOgUOi0Yj0WOgUOi0Yj0XOfUOi0Yj0WOfUOk0Yj0VOeUOl0Zj0VOcUOl0\\\\j0l0N1O1O1O101N10nMlUO\\\\1kj00O1N101OO001O001O1O1N2O1O00O1O1N2O1O1O100O01000000O1000O010O0O2O001O001FSNdUOn1[j0UNcUOj1hj001O2M2O1N7I4M2M2N2N2M;EQQc5\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [126.7133560180664, 16.896007537841797, 312.4027404785156, 284.52685546875], \"score\": 0.999998927116394, \"association_id\": 2, \"segmentation\": {\"size\": [725, 525], \"counts\": \"\\\\ni2Q3`c0:H4L4L3N2O1N2O0O2O1O0O2O001O0O2O00001O001O001O001O1N101O010O00001O00001O0000001O00010O00000010O0001O001O100O4L3M6\\\\^O`Kg`0\\\\5L0O2N1O101N2N101N2N1O2O1N1O1O0000001O0001O00000001O00001O001O0010O01O001O1O001O2N1O1O2N1O1O2N1O1O2N1O3M3M7I6J5K2O0O1O1O001O001O1O1O001O1O2N1O1O2N1O2N2N10O01O001O000000001O000001O00000000001O0000001O00001O0000001O000000001O000000000000001O000000000000000000000000001O0000000000000000000000000000000000000000000000000000001O000000000000000000001O00001O00001O001O1O00001O0000001O0000001O00000000001O000000000O10001O0000000O100000000O2O0O1N3fMiAPL[>g3TBoKP>\\\\3ZCgKo<P4o2N3N1N3N2N1O2L3M4K4L4N3M2N2N3N1N3M3M2N3I7F;C=H7C>J5L4BXeo1\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [254.63552856445312, 389.09088134765625, 196.57443237304688, 235.569091796875], \"score\": 0.9999887943267822, \"association_id\": 1, \"segmentation\": {\"size\": [725, 525], \"counts\": \"cXn53ee0^1QOd0H4L3N2N3M4L7I5L1N2N3N2M7SMbLWBd3b=`LZBc3c=`L[Ba3c=bLZB`3e=aLZBa3d=`LZBb3e=_LZBb3e=_LZBa3f=`LYBa3f=`LXBa3h=`LWBa3h=`LWB`3i=`LWB`3i=aLVB_3j=aLVB_3j=bLUB^3k=bLUB^3k=cLTB]3l=dLRB]3n=cLRB]3o=cLoA^3Q>cLmA^3S>cLlA]3T>eLiA\\\\3W>eLgA\\\\3Y>eLdA]3\\\\>eL`A]3`>fLYA^3h>W20001O000001O0^IV@Y6T`0O1O1O0001O001O100O1BdI`@]6_?gI\\\\@[6d?<O0O101O000O1000000000001O0000000O10001O1O001O1O1O1O0000001O0000000000000000001O0000000000000000aIj@_5V?`Jm@^5S?aJPA]5P?bJQA^5o>aJSA^5m>aJTA_5l>`JVA_5k>^JXAa5h>\\\\J[Ad5e>ZJ^Ae5b>XJaAh5_>VJdAi5]>TJeAl5\\\\>QJfAo5[>nIgAR6^?01O0000000O1000000000001O1N2N3M3M2N1O2N2O0O2O2N1N2O1N2M3N2M3N3M2N2N2N3L6XLi^Oi1]a0mMk^Oo1[a0iMj^OT2nb0N1N3M2M4L4L3N3M2M4L4L6I6J5L3N3M4Kkad1\"}}, {\"image_id\": 89, \"category_id\": 2, \"bbox\": [106.63499450683594, 337.5964660644531, 213.9137420654297, 268.3600769042969], \"score\": 0.9999083280563354, \"association_id\": 1, \"segmentation\": {\"size\": [725, 525], \"counts\": \"Umc27Zf06M4L4L3M2N2O1N1O2O1N3M3M3M2N101N2O2M3N2M2O1N2N2N2N3M3M3M2M2O2N2N3L4M3N2M2O1N2O3M4L9G9G5K1O1O2N2N3M2N2N1O100O1O2N4M2M3M2O^L]^OQ2ca0eMh^OZ2Za0`Mj^O`2Wa0\\\\Mm^Oe2Sa0VMR_OP3i`0kLZ_OW3e`0dL__O[3b`0bLb_O\\\\3^`0aLe_O_3ja0O9G10OXMc\\\\OX2]c0dMi\\\\OZ2Vc0bMP]O\\\\2Qc0`MX]OY2lc0G4L3M3N2NRM[Nj@d1P?bNQA]1k>gNVAX1i>iN[AS1c>oNaAm0^>UObAk0\\\\>WOcAj0Z>ZOeAe0Z>]OeAc0Z>_OeAa0Z>_OgAa0Y>^OgAc0X>^OgAc0Y>\\\\OgAf0X>YOhAi0W>VOiAk0V>VOiAk0W>TOiAm0W>SOhAm0Y>ROgAo0Y>QOfAP1Z>oNfAR1Z>nNeAS1Z>mNgAT1X>kNhAV1Y>iNeAY1[>fNeAZ1\\\\>fNbA[1_>dN`A^1`>aN_A`1a>aN^A_1c>`N\\\\Aa1d>_N\\\\Ab1c>^N\\\\Ac1d>]N[Ad1f>[NYAf1g>ZNXAg1h>ZNUAh1k>XNSAj1m>VNQAl1P?SNo@n1Q?SNm@o1S?QNl@o1T?RNj@o1W?RNf@P2Z?UN`@l1_?XN\\\\@j1d?WNY@k1f?VNY@k1g?VNV@k1j?WNS@k1l?WNP@k1Q`0WNk_Ok1T`0WNh_Ok1X`0XN__On1b`0VNU_Oo1j`0n100000000001O00001O001O001O0dNe^O^M\\\\a0^2j^O_MVa0\\\\2T_O^Mn`0j1n_OoMT`0_1i^OhMf16g?P2f2O2N100O2O001N10001N1010O00010O0001O001N2N3M2M3M3N2N3L4M4K4K7J9EcVi4\"}}, {\"image_id\": 89, \"category_id\": 2, \"bbox\": [32.11723327636719, 60.01995086669922, 107.66714477539062, 108.8606185913086], \"score\": 0.9270167350769043, \"association_id\": 2, \"segmentation\": {\"size\": [725, 525], \"counts\": \"Phf024l0Se0k0C3OO0O20OO0N1K6N20J700001000O1001O01O001O1O1N101O1O1O1O001N101O1O1O00001O1O1O1O0000001O100O010O0000001O1O1O010O000000001O1O1O0000001O00010O00100O01O1O1RNn[OBO^1Yd0bNi[OM:_1ld0J3M8H2O0O3M4M4L0O2N4M0O2M2O2N1O1MPnh8\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [186.26861572265625, 342.4626770019531, 248.13031005859375, 237.86911010742188], \"score\": 0.573329508304596, \"association_id\": 0, \"segmentation\": {\"size\": [725, 525], \"counts\": \"W_U43`f05K5M1000O10000Oh[O3ja0KT^Ob0ba0^O\\\\^Od0ca0\\\\O\\\\^Og0ca0XO\\\\^Oi0da0WO[^Ok0ea0TOY^On0dNkNfb07b^OS1fNhNhb04a^OV1dNiNkb00`^OX1dNjNlb0M_^O[1bNkNob0NZ^OW1fNnNRc0HW^Ok1Xc0000O1O100000N3O000O1O1O10@YNS\\\\Og1mc0[NR\\\\Od1mc0_NQ\\\\Oa1Pd0`No[O`1Pd0bNn[O^1Sd0dNj[O]1Vd0b0O0000001O000001N2N1N3N2dNe[O;\\\\d0Ch[O:Zd0Eg[O9Zd0Fh[OAK9_d04h[O_OO:Yd07Z\\\\OFhc08\\\\\\\\OEec0:]\\\\OCec0;]\\\\ODdc0:_\\\\OCdc09^1M2O3M2MnP:1RoE3]NO\\\\\\\\O6_c0<a[O@Mb0i`0YOXB^1hL]Oc`0D^BS2X=WNcBT2Q=PNlBS2Q=oMmBS2Q=nMnBS2Q=oMnBR2P=oMPCQ2o<QNPCo1o<SNPCm1o<UNPCk1o<WNPCi1o<ZNoBf1P=\\\\NoBd1Q=]NnBc1Q=^NoBb1Q=_NmBb1R=`NmB`1R=bNlB_1S=cNlB]1S=eNkB\\\\1U=fNhB[1W=gNhBY1W=hNhBY1X=hNfBY1Z=gNfBY1Z=hNdBY1\\\\=hNcBX1\\\\=jNcBV1]=kNaBV1_=kN`BU1`=mN]BT1c=mN\\\\BS1d=oNYBR1g=oNWBR1i=POTBQ1l=QOoAR1Q>ROeAT1[>[On@m0R?[30000000000000000000000000000000000000000000000000000000000000000000000001O0000000000001O0000001O1O1O001O1O001O00001O1O002N1O1O001O1O1O2N2kJZ@W3h?cL]@\\\\3d?bL]@^3d?`L^@_3c?`L]@`3d?^L^@a3b?^L_@b3c?[L^@e3e?VL]@j3h?mK\\\\@S4m?aKV@_4Qa0O0000001O001O1O1O1O1O00001O00001O000000001O1O1O1O2N1O1O1O001O1O1O2N2N2eM[]Ob0gb0XO`]Oe0ab0XOc]Of0^b0UOh]Oi0Zb0oNP^Ol0Sb0oNU^Ok0oa0nNa^OH_N>WbP2\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [271.5157470703125, 345.77252197265625, 112.58779907226562, 74.3787841796875], \"score\": 0.4721482992172241, \"association_id\": 0, \"segmentation\": {\"size\": [725, 525], \"counts\": \"UnP6c0oe07J3M4I7M1O2M4L5L2O0O2N2N101N1O2O1N10000O100O1000Ee[O]NZd0c1g[OZN[d0f1:0000000000O11O001O001O0000001O1O1O1O0000000000001O0000000000000000001O00001b[OPNUd0R2h[OoMXd0W201O000001O001O000000001O0000001O001O00001O001O001O001O1O001O001N2O001O0N5L2N2O0O3N1N2N3LP_S3\"}}, {\"image_id\": 89, \"category_id\": 2, \"bbox\": [426.7703552246094, 75.0234603881836, 98.22964477539062, 179.24691772460938], \"score\": 0.23276911675930023, \"association_id\": 0, \"segmentation\": {\"size\": [725, 525], \"counts\": \"X\\\\c91[f0a0B;C;I6L4M3M3M3N2M2O1N3N1O1O1O1O3M2N1O102M100000001O0O101001O0O10001N1O1O11OO1O2N2O000O0010O1001O1OO100O2N01O1O01O1O01N2O101OO00O2O2N010O2N00002O00O1dMPNQ@P2b?]N^@c1X?gNh@Y1V?iNi@W1U?lNk@T1S?nNm@S1P?oNo@Q1P?POPAo0P?ROPAo0o>QORAn0n>QOSAn0n>POUAn0l>POXAe0bM`NYa0`0dAh0fb0gNXJ\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [128.8612518310547, 333.73101806640625, 205.8957061767578, 202.56103515625], \"score\": 0.11743901669979095, \"association_id\": 0, \"segmentation\": {\"size\": [725, 525], \"counts\": \"^nl33<?We0CgZO`0We0AgZOb0We0_OgZOd0We0]OgZOe0Xe0\\\\OfZOf0Ye0<O10O;F0O100O01000O1OlZOTOn0OQb0k0P]OZOc06]b0>o\\\\OC5;kb00P]OV1nb0hNS]O]1jb0`NV]Oe16mMda0=V^Oj1OPNha07V^Ok10PNka07P^Ol13mMna0U3n]OkLTb0W3h]OjLXb0W3e]OjL\\\\b0U3d]OlL\\\\b0T3c]OmL]b0R3c]OnL^b0Q3b]OnLbb0n2`]OQMbb0m2^]ORMfb0j2^]OSMeb0g2e]OQM]b0l2g]ORMZb0m2g000001O001O000000O101N100O2O000O2O000O2O000O101N100O100O2N100O2N2N2N2hNe[O3]d0Jg[O3[d0Kg[O3[d0Aa[OE7h0Zd0AR\\\\O<Pd0B[\\\\O3hc0Ib\\\\ONdc0Ka\\\\O1eh9OYlF8n]OI^=:]BN^=4_B0_=0RBBgLa0Ua0OoAFhL=Ya0NdAl0Z>VOdAk0\\\\>UObAm0^>TO`An0^>TO`Am0`>TO_Al0a>UO]Al0Y>`NQ_Og0d2i0Y>dNQ_Od0e2h0Y>FeA:Z>HeA8Z>JdA7[>JeA6\\\\>fNU_Oc0_2h0]>cNX_Ob0Z2l0_>_Na_O=P2T1`>]Nk_O5d1_1b>ZNm_O4b1b1e?\\\\NZ@e1g?YNZ@g1f?XN[@h1Yb001O000000iKZN`Ce1`<ZN`Cg1g`00O1O100N2O1N1O3M2N2O1N2EmMR\\\\OU2b`0fMeBd2f`0O2O1O7H2O0O1N3M3M2N3N2N1^NZ[OT1hd0iN][ONM`0LFjd0Jn[O:ZOJje01[ZOJTTX4\"}}, {\"image_id\": 89, \"category_id\": 1, \"bbox\": [50.89351272583008, 196.1040496826172, 157.27088928222656, 177.38755798339844], \"score\": 0.0736502856016159, \"association_id\": 0, \"segmentation\": {\"size\": [725, 525], \"counts\": \"lYT1T4[b0a0D7X^O^KPa0f4k^O\\\\KUa0R5000O10000001O000O10000001[OW_O\\\\Ki`0c4[_OZKe`0f4\\\\_OYKd`0e4g0M3N2O1O1O100O1O1O1O1O10000O10000N2O1O1N2O100O1N2O1N2O1M3L4N2O1N3N100O2N2O1N3oLe\\\\Od2\\\\c0ZMf\\\\Od2gc0O1O1O1O1N3N1O2M2O001O1O1O2N1O001O001O001O1O2N2N1O2OO02N2O3L3N1jNoZOe0Ue0ROS[Oi0_e0N1O2M2O1O0O100000001N1O2N2N2N4L2N3Neca0O[\\\\^O2N1O2N3M2N2OO2O0O101N2N2Ne_T7\"}}, {\"image_id\": 90, \"category_id\": 2, \"bbox\": [170.63009643554688, 307.47076416015625, 391.9585266113281, 149.01565551757812], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [613, 820], \"counts\": \"kZo35ob0101O0O101O000000001N100000001O00001N10001O00000000000000000O10001O000000O010000O10O1000O10O10000O0101O000O10000O100O100O10000O2O0O100O101M2M3N200O1O2O000O100O100N2O1N2N2N2O1O1O101N100O100O1O1O1L4B>K5L4BdMo_Oa2i?a0O1O1N2O1N2M3N1O2I7O100O100O10000O100000O010000O10000O1000000000O1000O0100O100O1000000O100000000O1000000O100O10000O10000O100000000O1000000O10000O10000O100O1O100O1O100O2O0O1000O010000O100O100O100O100O100000000O100000000O10000O100O1000000O1000000O10000000000O1000000O100O100O10000O100000000O10000O100O1O10000O10000O2O00000O2O0O2O2M2O1N2O001N10001O0O2O1O0O5K`0@4M2M3N1N2O1N101O1N2N3M6J<DRPk5\"}}, {\"image_id\": 90, \"category_id\": 1, \"bbox\": [123.08502960205078, 231.94398498535156, 186.17507934570312, 204.8363494873047], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [613, 820], \"counts\": \"US^2k0Wb09H4M3M3L=P_OeN^?l2D6K2N2N1O3L3M7J4L2N2N2M3M4K5L3N2N2O1N2N3L3N2N2N101O2N2M2O1O000O2O0O1O2N1O2O1N2O1O0O2O000000000000001O000000001O00001O0000001O001O1O001O00001O0000000000000000000000000000000000000000000000000000000001N1000001O1O1O1O1O001N10000O101N100O2O1N3N1N2O001N2O1O2N1N2O1O1N2O1N3M2O1N2O1N2O3L2O1O1N2O2M4M4K2N2N2O0O2N3L3N3L2O2N2N2O2M3M2M3N2L4L5M4K4M2N3M3J7GVca9\"}}, {\"image_id\": 90, \"category_id\": 2, \"bbox\": [509.51458740234375, 274.6485900878906, 304.1263427734375, 143.04373168945312], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [613, 820], \"counts\": \"gj]:5ob01O101O000O2O0000001N1000001O0000001O0O100000000O100O100O100000000O1000O10O10O10O10O0100O100O100O1O1O1O1O10O0100O100O1M2N3M201O010O100O010O1O1L4N200O100O100O100O1M3N101O100O10000O100O1L4J6O1O1O100O100O100M3J6A?I7EiLf@Y3Y?jLd@W3[?kLc@V3]?kLa@V3^?900O1O100O100O10000000000000000000000000000000000000000000O1000000O100O1000O0100000000O10000000000000000000000O10000O1O010O1000000O100000O0100O100O10000O1000O010000O100O10000O2N100O2N1O2N1O10001N101N101N2J6M200O2O0O101N101N1N3N2M200O2O0O10001N100O2O0O2N2O0O101O000O2O001N101N2O0O2O0O2O1N2N7EYY3\"}}, {\"image_id\": 90, \"category_id\": 1, \"bbox\": [468.0863952636719, 192.08050537109375, 207.22396850585938, 204.86123657226562], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [613, 820], \"counts\": \"Y]Q9m0Tb08J4M3M2M4L5Db0E5L2N2O1N2M4L7I<E4L3M3M3M4M4K6J2O2N0O2N2N2M4M3M2N2O0O2N2K5K5M2O2O0O2N1O1K5L4N200O1O1O1O1N2O1O10000O10000000000O1O1O1O1O1O100O100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100O1O1O2N1O1O101N2M3N2M3O0O1O2N1O1N2O1O2N101N2N2M3N2N2N2O0O1O1O2N1O1O2N102M2N2O0O2N2N1O2N2N3L4L4M3M2N2N2N4K:@a0C8J5L3M4K6K6G>\\\\O<J6K5K]`l2\"}}, {\"image_id\": 90, \"category_id\": 2, \"bbox\": [219.43096923828125, 284.3282470703125, 600.5690307617188, 153.6695556640625], \"score\": 0.15461638569831848, \"association_id\": 0, \"segmentation\": {\"size\": [613, 820], \"counts\": \"`Rf41Sc02O1O1O1O001N10000000001O000O1000000000001O000O1000000O10000O1N2N2O1N2O1O1O1O100O10000O10000O100O100O100N2N2D<I7M3M3N2XOTNU@n1i?VNS@l1l?WNP@k1o?f0O1N2M3O1O100O1O1O100O1K5N2N2O1N20000O10000O100000000000000O100000000000000000000O1000000000000000000000000000000000000000000000000000000000O100000000O10000O1O100O10000O10000O1000000O10000O100O100O1O100O1O10000O2O00000O10000000O01000000O100O100O10000O1000000O1000000000000O10000O10000O100O10000O10000000000O101O00001N10001O0O2N2O1N1O2O0O101O0O10001N1000001N101O2M2O2N3Mo0PO8I3M1N2O1N2O001N2O0O2O0O2O0O2O1NXhi00iWVO1N100O101O0O101O0O10000O2O00000O10000O100O100O100O1O10000O10000O100000000O1000000O10O10O1N2N2N2O1O1O1O1O10O10O1000000O10000O1000O0100O1K5M2N300O10O01000000O0100000O10000O100O1L4I7E;N2N2O1O100O1000000O10000O10O10O100N2M3L4]Oc0O1O1O100O1O10000O100O100O100O10000O1O10000O10000000000O100000000000O100000000O100O10000O10000O10000000000O10000000000O10000O100O1O1O1O1O100O10000O100000000O10000O2O0O100O1O1O1O101N1O100O100O2O000O100O2O0O1O1O1N2N3M2O1O1N201N10000O10001O000O101O0O101O0O100O2N1O100O1O2O0O10001N1000001N101O1N2O1N2N5L]F\"}}, {\"image_id\": 90, \"category_id\": 2, \"bbox\": [0.0, 58.59722137451172, 255.90667724609375, 180.43972778320312], \"score\": 0.1098068580031395, \"association_id\": 0, \"segmentation\": {\"size\": [613, 820], \"counts\": \"e3\\\\3^?a0H5L3M1O2O0000001N100000000001O00000001O0001O000O101O0000000000000O100O1O1O2O0O10000O100O100O1O1O10O10O1000O01000O010O10O1O001O100O10O0100O1O1O1O0O200O1O1O010O1O0010O001O001O0000001N10001O0O2N1O2N1O100010O01O1O001O00000001O010O100O100O1O1O2N100O010O100O100O1O00100O2O000O100O100O2O0O00100O100O10000O100O2O0N200O2N100O101N100O1N2O2O0O100O100O1O2N1O1O100O100O101N1O1O1O1O101O0O2N100N3O0O1O2O0O100O100O1O2N1O100O101N101N1N3N2N101N1O100O2N1O1O100O101N100O1O1O1O1O2O0O101N101N1O2O0O2O000O101N100O2N1O1N3N1O5KiZc:\"}}, {\"image_id\": 91, \"category_id\": 2, \"bbox\": [514.4518432617188, 293.771728515625, 50.95220947265625, 9.108306884765625], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [453, 640], \"counts\": \"kaT75o=2O00001O00000000000000000000000000000000001O000001O001O00001O00000000000000001O0000001O000O101O[lP1\"}}, {\"image_id\": 91, \"category_id\": 1, \"bbox\": [75.61931610107422, 95.7388687133789, 514.8619384765625, 310.3203430175781], \"score\": 0.999998927116394, \"association_id\": 1, \"segmentation\": {\"size\": [453, 640], \"counts\": \"[jQ1[2d;?E4M2N2N3N1N1O2O1O1O0O2O1O1O001N2O001O1N101O0O2O1O001O001O001O00001O0001O01O000000001O0000000000001O0000000000000000001O0000000O2O0O1O2O1O0O2O1N2O1O0O2O001O0O2O00001N101N101N2O1N2N2M3N2N1O2N2N1O2N1O2O0O1O2M3M2N3L6UOk0M3L4L4M1O2M2O1N2O1O1O1O0O2O1O1O1O1O1O1O1O1O1O1O1O001N101O0000000000000000000000000000000O10000000000O10000O100O010O1O10000O010O1000000O100000000O10000000000O10O100000O10001O0O100O10000O1O100O100O2O000O1000000O1000001O00000000000O1000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000O01000000000O1000000O1000000O10000O100O100O10000O10000O1000000O10000000000O2O00000O10000O10000O10000O100O1O100O10000O100O1000001N1000000O1000000000001O00001N10001O001O1O001O001O00001O000000000O101O000000000O1000001O000O11O00000000001O01O0000000001O00000001O001O001O001O1O001O100O001O001O00001O001O001O1O0O2O3M4gIVIo4n6]JgI^5e7K3M2N2VKWGT4k8gK\\\\G1Jc3l8WLPHg3Y9N2M4M8H4K2O2M2N3N1M4L3M4dNaDKd;IREIQ;3YE\\\\OR;`0\\\\1M4K4J^cb1\"}}, {\"image_id\": 91, \"category_id\": 2, \"bbox\": [67.96660614013672, 308.2793884277344, 459.61724853515625, 128.53253173828125], \"score\": 0.99997878074646, \"association_id\": 1, \"segmentation\": {\"size\": [453, 640], \"counts\": \"do\\\\1:k=0O2O000O2O000000001O00001O1O3M2N1O100O1O1O001O0010O010O01O010O1O1O10O0001O0001O01O0001O10O0100O100O000010O000000010O0000010O01O22OOOO0010O01O0010O0001O001O100O1O6K1O01O0O1O01O01O01N1000001M3K7JcS70^lH4M3M2O001N10001O0O101O0O101N3M2N3L4M2O1N101O0000001O0000001O001O1O001O001O001N1001O01O0010O0100O102M2N10O01O010O00000010O00010O0001O01O0000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000O10000O010O100O10000O1000O01000O010O010O01O01O01000O0100000O0100000O10000O010O100O10O10O1000O1000O10000O010O10000O1000000000O01000000O10000O10000O010000000O10O1000000O100000000O1000000000000O100000000000000000000000000000001O00000000001O0000001O000010O000001N10001O0O2O1N5L1O1N2O1N2O001N101O0O2O0O2N2O2L4Lbho1\"}}, {\"image_id\": 91, \"category_id\": 1, \"bbox\": [508.0448913574219, 247.93887329101562, 63.950225830078125, 50.871490478515625], \"score\": 0.9953656196594238, \"association_id\": 2, \"segmentation\": {\"size\": [453, 640], \"counts\": \"`hR72o=8H:H6L1N2N101O0000001N10000HSOXCm0Q=O1O1O1N100O1O2O0O1001O001O2N1O2N0010O01O0000000001N100O1M3N2M3O1O1000001O000O1M3O1N2O2NYjm0\"}}, {\"image_id\": 91, \"category_id\": 2, \"bbox\": [85.47412872314453, 303.5263366699219, 178.34979248046875, 100.3468017578125], \"score\": 0.6311177015304565, \"association_id\": 0, \"segmentation\": {\"size\": [453, 640], \"counts\": \"[^Y13Q>2O1O10O01O1O1O1O001O001O1O1O0O2O001O1N101O1O1O1O1O1O100O001O100O001O00100O1O01O010O010O01O0010O001O10O01O010O010O1O00010O00100O01O010O000100O010O01O010O01O10O0100O100O10O0101O1OO01O100O3O0O00O10O003M9F3MUh9NnWF4L1N3N001N2O001N101N100O1O100L5N1O100O100O1O1O1O10000O10000001O00000000000000001N103M1N`0]ObnU5\"}}, {\"image_id\": 91, \"category_id\": 1, \"bbox\": [559.3385009765625, 337.22674560546875, 23.46380615234375, 115.77325439453125], \"score\": 0.18683794140815735, \"association_id\": 0, \"segmentation\": {\"size\": [453, 640], \"counts\": \"QUh7?\\\\<NQDg0U:_2O100000000000000000000000000000O2Ab0kMbji0\"}}, {\"image_id\": 91, \"category_id\": 2, \"bbox\": [141.60348510742188, 214.1307373046875, 430.1964416503906, 229.48788452148438], \"score\": 0.05771816521883011, \"association_id\": 0, \"segmentation\": {\"size\": [453, 640], \"counts\": \"gbo16n=8H6J2O1N101O001O010O001O010O001O0010O0001O01O00101N2N2O1N1O100O10O10O10O01O1N2N4K5JgS3N^lL1N11OQ^41laK3M2N3O00001O0010O01O1O1O1O001O1O00001N101N101O0O2O001N2O1O001O0O2O00001O001N101O1O1O1O1N101O1O001O001O00100O1O1O2N1O100O1O001O1O00010O001O00001O000010O0001O00001O01O01O00010O000001O01O0001O00000000000000000001O00000001O000000000O100000000000000000000000000000000000000000000000O100000O10O10O010O010O1O010O100O010O1000O01000O0100O010O0100O10O010O010O10O01O1O1O001O100O010O100000O010O100O1O10O0100O10000O01000000O10000O1000000O100000000O100000000000000000000O01000000000000000000000000000000000000000000001N100000000O101O000O1000001N100O1O1O1O2N100O1O2ZOSC1S=FTC2ecT2\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [3.5546510219573975, 391.2414855957031, 311.0545349121094, 242.42202758789062], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Z_3^3_d08J3L4M2N3M2M4L4L4L3N2O1N2N2O001N1O2N2N1O2M3N2N1O1O2O0O1O1O1M3M2N3N2N2O1O10O01O1N1O2M3M201O1O010O10O0100O001N1O1010O00001O0O100N200010O010O01000O10O01O1O1O1O100O1000000O100000000000000000O100000000O1000O10O10000O100000001O00000000000O10000O100000000O100000000000000O100000000000000000000000000000000000000000000000000000000000000000000000000001O000000010O0000001O001O001O001O001O00001O000000000000001O0000000000000000001O01O000001O001O010O1O1O100O1O010O00100O1O10O01O1O010O001O00001O0010O01O1O1O001OO2O001O1N2O1N2O1N101N101N1O2M3N2L5L3M3N2N2N2N2M3N3L5K6J6J5L3M2N2O1N2O1N2N3N5J5L2M2O1N2O1O0O2O2M4M3L4M2M3M3M2N2N2N3M3M3LY1YNgWc`0\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [525.0134887695312, 388.63421630859375, 61.08599853515625, 55.174468994140625], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"od[<?^g09I4K3N2K5I6M3O1O100O100O1000O001000O10000000000000O10000000000O10000000000O10000O100O1N200O1O2N1N3L4E;L5J6FkS[:\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [586.2799072265625, 375.4522705078125, 155.8052978515625, 223.54107666015625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Ydh=V2fe08D:L201O00001O0O0S]OeMW`0[2__OQN_`0o1[_OYNc`0g1Z_O^Nd`0b1[_O`Nd`0`1Z_OcNd`0^1[_OdNd`0\\\\1Z_OgNe`0Y1X_OjNh`0V1W_OlNh`0T1V_OnNj`0S1S_OoNm`0S1n^OPORa0R1j^OPOVa0V1b^OlN^a0k3000000000O1010O000001O00O010000O100O101N109F10K5L5O000000000O2O00001O1N3N1O2N2N1N2O2N2N3M3M2N2N1O1O1O001O00001O001O001O001O0000000000000O10000000O1O1O1O1O1N2O1M3M3N2M3N2N2N2N2L4L50O001O00001O2N3M1O1O1O001O00001O00O2N100O2N1O2M3eNR^O`LQb0W3^^O`Lea0\\\\3b^O^Laa0_3d^OXLba0e3_1L4L5oNi[OSNZd0h1n[ORNUd0j1V\\\\OlMmc0o1T1M4M3N2N3L7EfSc6\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [744.9751586914062, 389.2906494140625, 279.02484130859375, 198.41754150390625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"RWba0:bg0;F5K6I8G:E8K5L3M4M2N2N2N2N2N3M2N2N2O1N101N10001N101N101N101N2O0O2O0O2O1O1N101O1O001O001O1N101O1O001O1O00001O001O00001O00001O001O1O001O001O001O00001O00001O0010O01O001N10001O00001O0001O000000000000000000000000000000000001O00000010O0001O00001O00001O0000001OQO\\\\\\\\ObMdc0]2_\\\\OaMac0]2c\\\\OaM]c0_2c\\\\ObM[c0^2f\\\\ObMZc0]2g\\\\OcMYc0[2i\\\\OfMVc0Y2k\\\\OgMUc0W2m\\\\OjMRc0T2P]OlMPc0R2R]OoMmb0P2T]OPNlb0o1U]ORNjb0n1V]ORNib0n1W]OTNhb0l1X]OTNgb0m1Y]OTNeb0l1[]OUNdb0l1\\\\]OUNbb0k1_]OUN`b0l1_]OVN`b0i1a]OWN^b0i1c]OXN\\\\b0h1b]OZN]b0h1`]O[N_b0e1^]O_N`b0c1\\\\]OaNbb0a1Z]OcNdb0b1V]O`Nib0\\\\3O2N100O100O1O1O1O1O101N100O10000000000O1000000O100O1O1O1O100O100O100O100O100O100O1000000O1000000O1000000O10000O100O1O100O10000O1000000000000000000000000000000000000000000000000000000000000000000001O00000O1000001O00001O1O001O0O2O2TOR^OaKQb0\\\\2o]OcNc0dNga0S2i^OUNQ6\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [371.6615905761719, 384.8929138183594, 143.40481567382812, 218.55795288085938], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"YPl8>\\\\g0;H5Ji0WO5M4M2N0O1iLXNU@k1f?_NT@a1l?eNn_O\\\\1Q`0lNg_OT1Z`0SO[_OP1e`0^OS^O\\\\1na0Y22O1O001O100O11O1O1O1O1O1O1O1O001O0000000000000000000000O10000000000000000001O1O1O1O1O1O2N1O2N1O1O1O1O1O1O1O1O001O1O1O2N2N2N2N2N2N3M2N1O00001O0000000000000000O1O1O1L5L3L4H8F:L4N2O1O10000O11O001O1O001O001O010O1O001O10O001O1O2VKS^O\\\\3Pb0`LU^O]3ma0]LY^Oa3la0SL^^Oi3Vc0F4K5K5J7I:gM[[Oe0Qnm;\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [844.280029296875, 596.2447509765625, 165.00030517578125, 37.15606689453125], \"score\": 0.999998927116394, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"kZmc01mg02O3N1N2O001O1O1O1O00001O010O0O101O001O0000001O0000001O01O00000001O00000001O01O00000000000001O0000000001O00000000000000000O10010O000000000000000000001O0001O000000001O10O01O001O001O1O0010O02N1O001O00100O2N001O001O2N1O001O1ORhe00mWZO1O100O1000000O100O10000000000001N1Obd:\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [611.433349609375, 577.1104125976562, 100.7381591796875, 25.82305908203125], \"score\": 0.9999986886978149, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\Ze>3mg01N1GLfXO5Yg0KgXO5Yg0LfXO4Zg0LfXO5Yg0KfXO6Zg0KeXO5[g0LdXO4[g0NcXO3]g0801O0001O0000000000100O001O001O001O001O00100O1O001O1O1O2M3NP`c00P`\\\\O0O100COkXO1Tg01;200000000000000001O1N2OlmZ7\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [51.47351837158203, 585.671630859375, 237.86309814453125, 72.358154296875], \"score\": 0.999997615814209, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"YSb12ig0<Gf0[O2O1O0O2O000O1O1O101O0O100000000000000000000000000000001O0000000000001O000000001O0000000000000O100000001O000000000O1000000000001N10000000000000000O1000000O1000000000000000000000000000000000000000000000000000000001O0000001O000000001O0000001O0000001O000000000000001O0000001O000000001O000000001O0000001O00000O2O00001O001N102N5K1O1O1O1O0O2O1O4K4M1O001N101O1O1N2N2N2O0O101N100OoV>OWQA1fg08J5L101O1O1O1O0100000001N4J[eVa0\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [733.1008911132812, 545.8584594726562, 129.03875732421875, 25.69244384765625], \"score\": 0.9999960660934448, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ZiWa01ng01O2O1N1000001O00001O0000001O0000001O00000000000000001O0000000000001O000001O0001O000000001O000001O0001O0000000000000000001O0O100001O00001O0000001O010O0000000000001O00010O0000001O00001O0000001O00000010O00000001O00001O00001O00000000001O00001O1N10Xnk3\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [384.54119873046875, 571.9820556640625, 100.88247680664062, 30.791259765625], \"score\": 0.9999706745147705, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mYT91ng02O1O000000001O0000001O000bXOKQg05lXO0Rg00nXO1Qg0OnXO2Rg0OmXO1Sg0OmXO1Sg0OmXO1Sg0OlXO2Tg0NlXO1Ug00iXO1Wg0OiXO1Wg0OiXOOYg01gXONZg02fXONZg02fXONZg03eXOM[g03dXOM]g03cXOM]g03bXON^g02bXON^g0:000001O001O00001O00000000001O00001O01O010O1O00010O001O01O02^OcXOO1;dg0M2H^XO0VmZ=\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [525.0784912109375, 440.0428771972656, 51.2152099609375, 7.702056884765625], \"score\": 0.9999395608901978, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"imZ<2mg011O00010O01O0001O0000000000O2O00000000000000001O00000000000000000000O10000000000000000000001NVRa:\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [562.3251953125, 465.574462890625, 52.906982421875, 28.06591796875], \"score\": 0.9995126724243164, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"inX=2mg02O2N1N2O100O1O1N101O001N1010O1O010O00100O1O100O10O1O0O101N1O1O101N1000000001O0O2O2KaXOEeg03QQf9\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [580.1782836914062, 375.01751708984375, 86.4212646484375, 124.89053344726562], \"score\": 0.6729169487953186, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Z]c=180]f0Z2mN5L4L3N2N100O1O2M3N1O1OkNZ[OQOfd0o0\\\\[OPOdd0h0V[O_N7i0cd0`0W[OeN213k0dd0NU[OZOO1>JJl0ed0Ma[OWO<k0Td0L^\\\\O4bc0JZ\\\\O\\\\OkNi0ld0K_\\\\O5ac0K`\\\\O3ac0M`\\\\O1ac0Na\\\\O0`c00f\\\\OH\\\\c07f\\\\OD^c0<c\\\\OA_c0?b\\\\O^O`c0b0P2N100O10001O0000000000001O000001O00O10000O10001O000001N101O000000O1O11N2O1OO1N2N2001O1O0O3M2O1`ZO\\\\Obc0e0[\\\\OC_c0>\\\\\\\\OHbc09[\\\\OIec0:X\\\\OFhc0<V\\\\OCjc0?T\\\\OBlc0?nZOSOn0?Sd0?lZOVOn0<Wd0;oZOVOl0;`h\\\\8\"}}, {\"image_id\": 92, \"category_id\": 2, \"bbox\": [579.79541015625, 562.9134521484375, 56.5059814453125, 23.170166015625], \"score\": 0.17849160730838776, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"eac=2mg02O2N002N1O001O1O000000001O00000100O3M3N0O2N001O1OO2O01OO100O1N2O1O100O100001O01O00000000O2O00000O01000000001NU^R9\"}}, {\"image_id\": 92, \"category_id\": 1, \"bbox\": [865.1171875, 467.4556579589844, 156.0057373046875, 151.67013549804688], \"score\": 0.09119829535484314, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"iWYd07cg0?YO`0N0[YOlN^f0T1bYOmNRf0ORZOU1JmNPf03UZOP1KmNPf03VZOo0IPOQ1Kkc06[[OW1h0fNoc0OZ[O[1h0eN[d0[1f[OcN[d0]1T10O02O3L3N1M2oNVYOk0Pg00J50O2O0O2O0O200OjZOUOZc0i0f\\\\OYOYc0f0g\\\\O\\\\OXc0d0g\\\\O]OXc0c0]\\\\OWOgN;kd0<`\\\\OM^c04b\\\\OMkb0If[O9`1Oib0b0X]O^Ogb0b0Z]O_Oeb0a0[]O@db0`0\\\\]OAcb0>]]OEab0;_]OF`b09a]OH^b08a]OI_b07_]OL`b05T]O8jb0Hl\\\\Oc0Sc0]Ok\\\\Of0Sc0[Ol\\\\Og0Sc0YOk\\\\Oi0Uc0WOj\\\\Ok0Vc0TOh\\\\Oo0Wc0QOg\\\\OR1Xc0nNg\\\\OT1Xc0lNh\\\\OT1Xc0kNh\\\\OV1Xc0jNg\\\\OX1Xc0gNh\\\\OZ1Xc0fNg\\\\O[1Yc0eNf\\\\O\\\\1Zc0cNg\\\\O]1Yc0cNf\\\\O^1Zc0aNg\\\\O`1Xc0`Ng\\\\Oa1Yc0^Nh\\\\Ob1Xc0]Nh\\\\Od1Xc0[Ni\\\\Oe1Wc0ZNj\\\\Og1Uc0YNk\\\\Og1Uc0XNk\\\\Oi1Uc0VNl\\\\Ok1Sc0TNn\\\\Ol1Rc0SNo\\\\On1Pc0RNP]Oo1ob0PNR]OP2nb0PNR]OP2nb0oMR]OS2mb0mMS]OS2mb0mMS]OT2lb0lMS]OV2lb0jMT]OW2kb0iMT]OY2kb0gMU]OZ2jb0fMU]O[2kb0eMU]O\\\\2jb0dMU]O^2jb0bMV]O_2ib0aMW]O`2hb0`MW]Oa2ib0_MW]Oa2ib0_MW]Oa2ib0_MW]Oa2ib0_MV]Oc2ib0]MW]Oc2ib0]MW]Oc2ib0]MW]Oc2ib0]MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MV]Od2jb0\\\\MU]Oe2lb0ZMT]Of2lb0ZMS]Og2nb0XMQ]Oi2ob0nLh\\\\OF9]3ob0kLm\\\\OD4b3nb0iL]]OW3cb0gL^]OZ3bb0dL_]O]3ab0aLa]O_3`b0^Lb]Ob3^c00000000000000O100000000000000000000000000001O1O1O001O00001O001O1O1N3N1O3M;PMP[OY2_e0Lc0ZOff1\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [1261.6923828125, 916.9313354492188, 161.5718994140625, 131.91094970703125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"e_Wk11f_10_`N5]_1:``NDP_1l0C=C<K5L4K4N3K5M3N1N2O2N1N2O1N2O1O001O1N2O001N1O2N101O100O010O10O01O10000O1000000O10O10O10000O10O010000000O01000000000000000000000000000000000000000000000000000000000000000001O00000000001O0000001O0000000000001O000000000000000001O0001O01O001O01O010O0010O001O1O100O1O010O2O000O1N9lLTcNc2W]1N6I2O1N3N3L5K4L3L3N5K7H;F6K3F:Ac`N3S`[m0\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [783.2294311523438, 922.7030639648438, 156.49395751953125, 90.2366943359375], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Y^jT15i_15L4Kb0^O8I3N2M2O3L3N0O101N1O1M3O100O1O001O1O1O10O0100O001O10O010O1N1010O010O0100O01N2O1O10O10O1000000O100O2O0O1O2N1000000000000O1000000000000000000000001O00000000000000000000000O10O101O00000O1000000O1000000O10000O100000000O10000O10000O010O1O1N2N2O1O1O1O1L4O1N2O1M4K4K5M3O1N2L5N1N2O1O1O2O1N2N2O1N2M3M4LjRSd1\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [873.7597045898438, 915.3320922851562, 158.15203857421875, 332.28375244140625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"ldWY1X1\\\\^1`0B>I7G8G8I8I6L3M3N1N3N1N2N3M2L4SNaLSgNe3eX1[MXfNP3dY1i1M2M3N2N2N2O1N2M2K6J6M3M3N2M2I8L4N1O1O10O0001M3N2N1L5A>M31O1001O00O2O001N1O2O00001O101N100O10000O2O000O1010O10O100O1O2N2N100O1O1O1O100O1O1O2N2N2M2O1O1O1O1N3N2cKggNo0ZX1kNngNR1SX1fNXhNU1iW1fNahNT1aW1jNghNo0ZW1oNlhNk0UW1UOnhNf0UW1XOnhNe0TW1ZOnhNb0UW1]OnhN=VW1AmhN;VW1DlhN7XW1HihN4\\\\W1LchN0bW1O_hNJhW15ZhN@QX1>UhNTOTX1j0RhNmNUX1P1ngNiNYX1S1mgNdN\\\\X1W1^3H4L2O0O2N100O2O0O2N2N2O1N1O2N2M4L3L4M3M2MX_V`1\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [539.6461181640625, 898.8899536132812, 114.70159912109375, 151.82440185546875], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"kn]i07e_1:YNJ\\\\bNH8i0V]1FYbNM5c0^]1[1N3J6K6I6L3N4L7I3O2M7I1O1O010O0001O0001OYOQdN\\\\MQ\\\\1a2RdN^Mo[1`2TdN^Mm[1P2kcNmM<2k[1o1ddNoM^[1P2bdNPN`[1n1`dNRNa[1n1^dNRNc[1m1]dNRNe[1m1[dNSNf[1l1ZdNTNg[1j1ZdNUNh[1j1XdNVNk[1i1SdNWNP\\\\1g1ocNXNW\\\\1e1fcN[N^\\\\1c1acN]N_\\\\1c1acN\\\\N_\\\\1e1bcNYN^\\\\1i1bcNUN^\\\\1l1ecNPNY\\\\1T2f04H9L7K1O00O0101N3M2N3N2N2N2M3N1O1O2N1O010O010O010000O100O2N1O1O0010001N2N2N1O2N101N101N1O9XMhbNV2g]1J4L2N101N5K2N1O1O2N1O2N2N1N2N3M3M3M3L<BYPZQ2\"}}, {\"image_id\": 93, \"category_id\": 2, \"bbox\": [884.6754760742188, 1206.718505859375, 174.87091064453125, 64.052734375], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"ZWkY16i_12N1O2N1O1O2N100O100O100O101OO0100O1000O0100O100O100O100N2O1O1M300O1O1N2N2O1O100O100O10000000000O1000000000000O100000O010000O10O0100O100O010O1O0O11O1O01000O1N2L4N101O100O001O10O10O010000O01000O0100O100000000O10000O10000O1O100O10000O1O1O1O100O100O100O10000O100O10000O100O2O0O1O2O00001O001N1000001N100000001N10001O00000LX`NOkYf^1\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [1488.5106201171875, 898.9622192382812, 65.6121826171875, 165.05108642578125], \"score\": 0.9999977350234985, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"n^mU21k_1:A?E:I3N2UOhNjbN\\\\1Q]1nN`bN[1\\\\]1g0M3N2N1O1O2M2O2N2N2M2N3M3N3L3XdNfLgZ1\\\\3VeNfLhZ1]3QeNjLlZ1V4K5O10000O1O11O000O001N3cKedNo3`[1mKedNn3^[1RLadNl3c[1SL]dNk3m[1O2N2N1fNXdNhMD9T\\\\1n1fdNiMa[1U2bdNiM`[1U2bdNhM`[1W2U1O2N3J6VOi0M3L5M4N0Ik`NAV_1=:M1M2OjbUg0\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [1107.072509765625, 904.2382202148438, 71.31005859375, 141.65814208984375], \"score\": 0.9999942779541016, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"SPRd14k_13N0O1N3N2N1O0O101N1N2_NOScN2h\\\\1e1OK21O3UOj0OO4O100O1N2O2O010O1O1O100001OO10000000001O0O2M3L5M4L3oNibNRO]]1f0kbNSO[]1f0T1M3K5L5JRcaY1\"}}, {\"image_id\": 93, \"category_id\": 2, \"bbox\": [1267.75146484375, 1021.2815551757812, 152.4683837890625, 28.94342041015625], \"score\": 0.9999626874923706, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"]PQl12m_1101N101N10001O0O100O10000O101O000000000O101O0000000001OO10000000001O00000001OO10000O10000O10000O101O0O1000000000001O0000000000000000000001O00000001O00000001O00001O0000001O00001O00010O000000001O00000000000000000000001O000000000000001O0O10001O000O1O1O10000O100O011N2Ndoem0\"}}, {\"image_id\": 93, \"category_id\": 2, \"bbox\": [795.2489624023438, 1003.4429321289062, 115.3756103515625, 14.05780029296875], \"score\": 0.994976818561554, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"eoiU12m_12O000O100000000O100O10000O1000000O1000000O1000000000000000000000O100000000000000000000000000000000000000000O100000000000000000000000000000000000000O1000000000000000O1000000000000000000O1000000O2N103Kb`^e1\"}}, {\"image_id\": 93, \"category_id\": 2, \"bbox\": [1103.9361572265625, 1029.6904296875, 70.3460693359375, 17.20361328125], \"score\": 0.9825137257575989, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"W`jc12n_11O00001O01O1OS`NOo_R11monN10001N10000O10001O0O1000001O000O1001O000001O000001O000O101O001N2Oc_UY1\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [1580.1478271484375, 975.8912963867188, 308.929931640625, 397.54473876953125], \"score\": 0.7009730935096741, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"f^X[26i_16J6K1N3N2M3N2N3L4M2N2N1N3N1O1O1O2O1N4L4L4L6J5_bN\\\\N\\\\\\\\1h2L2N2N3M3M:Fe0[O6J6J4L3M3M2O2M2O1N3N2M4M4L2N2N1O1O10O01O2N2N4L6K3L1O0000001O1O002N3L3N3M2M3N1O2N2igNhHaW1Z7UhNPIiW1f7N2N2O0N2O00000O01O1O010O1O1O010O1O101N1O2N2O1N2N3M2N100O1O1O1O001O1O00100bKegNU1[X1hNWhNi0jW1SO^hNh0cW1VOchNe0^W1[OhhN=[W1BohN3RW1MViNJlV15YiNDiV1;[iNAfV1?]iN]OdV1c0_iNXOcV1g0aiNUO`V1k0aiNROaV1m0aiNPOaV1P1aiNlNaV1S1aiNhNcV1W1`iNbNfV1]1]iN[NiV1c1ZiNVNlV1h1XiNoMnV1Q2a3O1O1O100O2N1O1O2N100O1O1O001O100O100O101O0O100O1000O01O00001O001O00001O001O000100O5L6I4L3N4L5J3N10N2O2N2N2M5L4SNibNh0Y^1O000000O2N2O2M3M3M2N2N101N1O00010O01O1O001O1O001O1O10000O10000O1001N101O0001N2O1N2O1O1O1O100O001N101O2M3N5K3M1O0000O14I6I7IPiQ9\"}}, {\"image_id\": 93, \"category_id\": 2, \"bbox\": [581.1474609375, 1037.47412109375, 61.0836181640625, 11.0313720703125], \"score\": 0.5175880789756775, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"a`^k01n_1101N10001O001O0001O0000000000001O1O000001O01O000000001O00001O0000[_nR2\"}}, {\"image_id\": 93, \"category_id\": 2, \"bbox\": [1505.371337890625, 1053.70068359375, 41.831787109375, 9.51953125], \"score\": 0.43231189250946045, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"PafW23l_1101O0O1000001O0000O101O0O101NQ_ag0\"}}, {\"image_id\": 93, \"category_id\": 1, \"bbox\": [1416.906982421875, 725.7244262695312, 89.9805908203125, 125.26348876953125], \"score\": 0.13383974134922028, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"cW^R2n0o^17K2M2O2O1O000O2N1N3M3N2M3N2N3M3M2N2M3N3M102N1N2N2N3M2O1O1O<D1O001O1O1O000000O1O10000O100O101N101N2O1N2N2O1N2M3N2M4J5M3M4L3N2N2M4M2N1N3N2N2O1N2O1M3M4M1O2O0O2O0O2N10O1O0102N1O2M9CRhZi0\"}}, {\"image_id\": 93, \"category_id\": 2, \"bbox\": [1593.4368896484375, 1003.8264770507812, 266.2591552734375, 347.90179443359375], \"score\": 0.06628716737031937, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"dif`2>^_14K6J6L3M4L3N2N3M2N2000000O10000000000000000000000000000002N2N2N3L4M4K4M4K5L2M2O2HbVZ=\"}}, {\"image_id\": 94, \"category_id\": 1, \"bbox\": [521.681640625, 51.67573547363281, 572.0240478515625, 785.1101684570312], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [855, 1280], \"counts\": \"]Th>:Uj09F:K5N2N2N2N2O00001O0000010O00001O0001O01O000010O0000010O100O2O000O10000000O101O0O2O0O100O010O1O010O1O00100O1O00100O1O1]^O6W8LhFZ1R9fNjFc1P9^NkFk1P9WNlFo1P9SNkFS2R9nMkFV2T9kMhFX2W9kMdFY2Z9iMcFY2[9kM`FY2^9jM]FY2b9jMYFZ2e9jMTF[2i9jMPF[2n9jMjE[2T:lMaEZ2\\\\:QNWEU2f:WNiDQ2T;ZN]Dm1_;^NnCm1P<]NTBW3j=l4N3N1O1N2O1O1N2O0O2N1O2N1O2M2O2M3N1N3M3N1O2N2N2N2N2N2N2N1O2N1O1O1N2N3M2N2M3L4L4L5K4N2M3M3N2N3N1N2O1N2O0O2N2O1N2M3N1O2M3M3M3N1O2N2O1N2O1O001O00001N100O2O0O1O2M2N3M2O2N1O2N2N101N101O0O101N1N2O1M3N2M2N3L4L300000010O10O1000O101O0O10O01N2O1M2O2N1O1O100O01000001O01O1O001O000O1O1N1O1UOl0L4N2N2N3N1O2O1N200O1001O00001O001N101N1O2N101N101O1O001O10O01O10000000000O100000O1000000000O1000000000000O1O100O1O1O1O1O1N2O1N2N2O1N2O1O1O1O1O1O100O10000O10000O101O0O10000O10000O10000O101N1O2N2N2N2M3M3N2M3M4L4M3M3M2N3M2N2N2O1N2O2M2O1O2M2O2M3N3L3N3L3N2M3N2M3M2O1N2N2N2N3M2N2M4M3M3M5K6J5K6J6K5J3M3N1N3N1N2O1N2O1O1O001O1O2N1O1O2N1N3N2N2N2N2N1O2N1O1O1O1O001O001O001O1O001O1O001O1N2O001O1O1O1O1O1O1O2N1O1N3N1O1O1O001N2O1O001O001N101O001N1O1O1O1O1O1O1N2O1N2N2O1O1O1O1O1O1O1O100O2N1O2N1O2N2N2M2N3M3M3K5J7J5ZIR@^2R`0_MZ@S2l?iMW@T2l?iMW@S2m?kMU@R2o?jMT@S2o?kMS@R2P`0lMR@Q2Q`0lMR@Q2Q`0lMS@P2Q`0mMQ@P2R`0mMQ@o1T`0mMP@o1T`0lMP@P2U`0lMo_Oo1V`0lMo_Oo1V`0kMP@o1W`0jMo_Ol1^`0kMh_Ol1g`0iM__Ol1Pa0iMY_Ok1Ta0jMW_Ol1Qe0F;G9F=D;E<DSmc5\"}}, {\"image_id\": 94, \"category_id\": 2, \"bbox\": [32.435115814208984, 706.8740234375, 815.4296875, 104.3167724609375], \"score\": 0.9999901056289673, \"association_id\": 1, \"segmentation\": {\"size\": [855, 1280], \"counts\": \"jZo12dj02M2O2N2O0O2O0000001O0000001O000O101O0000001O0000000000001O000000000000001O000000001O0000001O0000001O00000000001O000000000000001O000000000000001O0000000000000000001O00000O10000000000000001O00000000000000001O000000000000001O0000000000000000001N10000000000000001O0O100000001O0000001O00001O0O10001O000000001O000000001O0000001O0000001O000000001O0O10000000001O0000000000001O000000001O00000000001O00000000000000001O000000000000001O000000001O000000001O000000001O000000000000001N100000001O000000001O00001O00001O000000001O0O10000000000000001O0000000000001O000000001O000000001O000000000000001O0O100000000000001O000O1000001O0000000O1000001O000O10000000000000000O2O0000000000001N100000001O000O100000000O1000001O0000000000000O100000001O00000000001O00000O100000001O000000000000000000000000000O2O0000000000000000000000001O00000000000000001O000000000000000000001O000000000000000000000000000000001O00000O100000000000000000001O00000000000000001O00000000000000000000000000000000001O000000000000000000000000000000000000000000001O000001O0000000000000000000001O000000000001O000000001O00000010O01O002N1O5K2N6J1O1O1O2N1O1O001O000000001O0000001O00002N1O2N1O1O6Jf0ZO7I2N1O2N2N001O001O001O00001O00001O000O2O001O002M3N2M2NPlj?\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [1652.5340576171875, 833.4521484375, 118.9619140625, 368.083984375], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Poa]2>]_1:B;E;E:ROZNXcNo1Z\\\\1R1J7L3M4L5M3L5L4L3L6jeNYLiW1o3hgN^LRX1i3agNbLZX1f5L4L2N3M1000000O1O2N1N2N2N2N2O2M2O2L4J6K6K5J6J6K5K5K6K5K5L3N2N:FT1lN2N1O000O1000000000O1O1N2L4J6H8I7J6M3O2N100O1001O1O1O01001O01O2O1N3M3L3N2M4L4L3M2N1O2N2M4\\\\NaiN_IdV1T4XiN^Lj0POUV1\\\\4PjN[K?OfU1b4SlNWKRT1e4h3G9I8I6J5K5G9F;I6J8A?E:I8D<Da0_O]cQ=\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [699.7796020507812, 769.2072143554688, 190.070068359375, 499.76275634765625], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"]^[Q14h_1:G:E8H6K5K5K4K6H8H9I7K5J5H8E<D=F;F9H8I7H;Dc0\\\\iNUJQR1b6UmNkIcR1^6QmNlIiR1Z6QmNlIjR1X6RmNlIkR1W6QmNlIlR1Y6PmNjIkR1^6nlNgIlR1d6ilN`ISS1i6elN[IWS1k6blNYI[S1l6_lNWI`S1m6ZlNWIdS1o6QlNWInS1^9O1O1O1O01O00O01O1O1O02O0O2N2O0XOVDZmNl;bR1`DTmNb;jR1k0M3N3L3M4M2N3N4L7I9G[1eN4M1N2O001O00001O0000000000000000000000000000000000O100000000O1O1O1O1O1O1M3L4L5J5L4M3N2O1N2N2N3M2N2N3M4L3N3M3M4L2O2N3M2N4K6K4L4L4L4L3M2M3N2^MjlNVHWS1f7olNWHRS1e7VmNUHlR1f7_mNSHcR1f7jmNQHXR1i7RnNRHPR1h7XnNSHjQ1i7[nNTHhQ1g7fnNlG^Q1n7]3C>fNY1^Ob0H8L5J6H9I7I6L3M4L4L4L5K6I7J5L3M5J9Gl0TOj0VOc0nNebhf1\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [1285.3179931640625, 881.1209716796875, 67.9019775390625, 180.982421875], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Vnel1`0V_1>G8C=D<E9@`0F:K5N3L3N2N2N4L4M4K4M2M5L2N4K4M<D;D4L3M2O001O000001O002N1O2N1O2N2N3M3M4L2N1N3N2N2N1O1O3bMkdNKV[11WeN_N]O`0^[1o0oeNmNSZ1P1QfNmNRZ1n0UfNlNPZ1D[dNl0j1[OgZ17fddP1\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [894.892333984375, 807.6602172851562, 173.68701171875, 434.97235107421875], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Z^UZ13g_1d0\\\\O8I7L4K4M2M4K5M3M2O2N1N2N1M4M3N2N2L4K6K4M4M2O1N2L3N3N2N102N3M4K3L5L3M3N2N3N2M4L4M3M2M5L6khN[KcR1_5_lNgJ^S1d5VlN`JhS1f5PlN]JnS1i5lkNYJQT1n5gkNVJVT1S6`kNQJ\\\\T1Z6VkNnIfT1Z6PkNkImT1]6ijNiIUU1c6]jNaI`U1b8M3N1N2O1N3K4K7@a0F>E;E9G8H6K4L3N2N2O00000000001OO10000O10000O1O100O1O1O1O11O0000001OO1O1M3N2N2M3N2L4G9F:I7K5M4M3M3K6K5K7J4M4L6K9F9H3L3M3M3L5J5K6K4M2VNPiNaJRW1W5_iN]JdV1Y4QkNXKRU1R4UlNZKoS1]4o3L4N3L4M5J4M3L4N1O3M3M4L2M3N3K5L6J9G6J7I8H7I7H7I8H8CQRY^1\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [154.07652282714844, 849.5879516601562, 161.33473205566406, 441.05767822265625], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"cn]7;__1?@:J5K5L3M3L4L4M3M3O2M2O2M2O1N2N2M2N3M3L3O2N3N2mdNSMnX1Q3TfNoMgY1Y2geNTNUZ1n3N2N3M4L3L4M2N3L5L5J6K5J4K6J8HT1kNc0^O:F7I7H:dNa2`N=D5L3L4M2O2M2O101N10000O100000000000000000000000000000000000000001O0000001O1O1N2O1O2N2N2N3M2M3N2M=Cg0YO3L3M3M3M3M3L3M4L3M4M2M3SMPlNfIUT1l5olNPI^S1k6n2M4K4L5J5K6K5M2M4SMffNmN`Y1j0YgN]NPY1^1]gNSNjX1i1_gNlMfX1T2d2N2O0O2O001O001O001O1O1O1O2N3M2M3N2N3L5L5J4L4K6Ia0VOiQ^a2\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [406.450439453125, 835.9827880859375, 169.2236328125, 391.378173828125], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Y]Yc0c0R_1=K5L4M2N2O1M4M2N3L3M4M2N3M2O2M3N3L6K;ET1SeNbLPW1d3ghNaLWW1b3chNcLZW1a3`hNbL`W1b3XhNcLgW1c3ogNaLQX1e3cgNaL]X1b5N1O1O2M2N2N3N1O2O1N2O1N2O1O2N3L4M;[hNmHgU1P9E4M1M3N2L3N3N1O2O0O10000O100O100O100O1O1O1M3N2O1N200N2O1O1N2M3O1N1000O2N1O1O0100O1N20000010O001O1O10O010000O10000000000001OO1O1O1O1N2O1O1N2N2K5H8_Oa0K5J6J6J6H8I7K5G9^Ob0_Oa0F:M3N2N2N2O1M3N2N3N1O2N1O2iJgfNe3ZY1QLUgNh3mX1TL\\\\gNe3hX1UL_gNe3jX1PL^gNj3aZ1I6J5L4K5K6E<D=_O?D=Dd0ZObbYU2\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [1686.5478515625, 1207.884033203125, 139.94189453125, 25.614990234375], \"score\": 0.9999994039535522, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Pfk_22n_11O001O1O000O2O0O2O001N1O1O2N1000001O000O2O0000001O00000000001O00000001O0000000000001O000000000000001O00000000001O000001O00O2O000000001O01O000001O0000001O000000000000000000001O0000000000001O0000001O000000000000001O000O2O0001O01O00001M3Ogia:\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [123.15959167480469, 1286.3902587890625, 165.9355926513672, 36.9620361328125], \"score\": 0.9999991655349731, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"lXc62n_100001O00000000000000000000001N10000O2O000O1O2N1O2O0O2O0O2O0O2O001N10000O2O00000O100O2O000O100O100O10000O1O100O1O1000000O100000000001O000O100000000000000O101O0000000O10000000000O100000000O10000000000000000O100000001O00001O00001O00001O001N3N001O001O1O1O001N101O001O00001O0000001O001N100O2O1N2OSW`b2\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [405.7601318359375, 1222.1982421875, 155.80987548828125, 23.0592041015625], \"score\": 0.9999988079071045, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"aVmc02n_11N2O001N10001O0O10001N100000000O101O00000000000O100000001O000000000000000000000001O00000000000000000000000000O101N100O1O10000000000O2O0000000O10000001O00000000000000000000000000000001O0001O000000000000001O001O000O2O1O001O000000001O0O10000000001N10000000000O2O0O100O102M_ifU2\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [1149.758056640625, 901.0848999023438, 76.4541015625, 181.58441162109375], \"score\": 0.9999980926513672, \"association_id\": 6, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Xn\\\\f13f_1<abNIhZ1;SeN0cZ15QeN:hZ1IhdNh0S[1\\\\OfdNj0W[1[OadNk0][1ZO^dNh0`[1]OZdNg0b[1m1N2M3N2N3M4L3M3M2N2M4L4M2M3O1O1N2O1O1O100O100O0100O1O1O1N2M3_O]eN_KiZ1V4f0N2O101N1O1O2M3M3M3L5L3L6I7J<C<D;D?_O`0D<_OQbgV1\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [917.575439453125, 1237.0733642578125, 165.3187255859375, 27.217041015625], \"score\": 0.9999802112579346, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Wgb[11n_1101N10000000001N10000000000000000O1000000O1O1O1O100O1O1O1O1O10001O000N20001O0000]Oe`Na0]_100000000001N100O1000001O000O100000000O10001O00000000000000000000001O000000000000000000000001O00001O0000001O00001O00001O0000001O0O2O00001O00001O00001O0O101O0000001O00001N1000001O000O10001O0000000O10001O000O10001MkX_]1\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [711.4661254882812, 1271.9305419921875, 178.5069580078125, 36.874755859375], \"score\": 0.9999768733978271, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"]XjR12l_13C=O0O2O0O101O000O101O00001O0000001O0000000000000000001O00000001O001O01O0001O000001O0000001O001O0O100O10001N100O100O100000000O1000000000000000000000000001O00000000001O0000001O0000000000001N10001O00000O101O0O10001O001O0O2O00001O1O1O1O0O2O001O001O00001O0000001N100000001O000000001N10000O2O1NbgZf1\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [1153.279541015625, 1077.2542724609375, 338.4462890625, 25.677001953125], \"score\": 0.9991543292999268, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"oaRf18d_16M1O2O0O1000000000000O1000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000O10000000000000000000000000000000000000000001O00000000000000000000000000000000000O010000000001O00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000000000000000000000000000000000001O00000000000000000000000000000000000000001O00000O10000000000000000000000O1000000000000000000000001O0000000O10001O00000O2O00000000000000000O100000001N101O1MTnSj0\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [1857.680419921875, 795.0296020507812, 53.6365966796875, 286.04058837890625], \"score\": 0.8663716316223145, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"i`Xg29__1>G<B9K2M2N2M4N1N3J5]IhNajNZOi1R2bS1Z1kkNgNRT1f1hiN\\\\Ld0Q2bU1a3TiNRMlV1Q6O100O100000001O00000000000001O0O101O0aKXiNiK4W3fV1>\\\\kNbNfT1X1lkNZNVT1_1YlNWNiS1`1`lN^NaS1Y1hlNeN\\\\S1@ohNEZ4h0mR1mNonNm0[X1F<C5Gonc6\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [1077.0654296875, 1078.5516357421875, 154.7255859375, 23.8223876953125], \"score\": 0.5454972982406616, \"association_id\": 6, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"mQbb1:e_12O1O0000000O101O000000000000000000000O101O000000001O000O100000000000000O100000000000000O1000000000000000000000000000000000001O000000000000000000000000O100000000000000000000000000000000000000000000000000000000O10000000000000000000000O100000000000000000000O10000000000000001O000000002M10001O001O000000002E[`N5imYV1\"}}, {\"image_id\": 95, \"category_id\": 2, \"bbox\": [1830.406005859375, 868.6961669921875, 100.527587890625, 171.818603515625], \"score\": 0.21302726864814758, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Zmih2a0V_1W1nNb0_O3K4L3002J6L3O2OJ52O1O11O000002N1O2M100000001O1O1O001O0O1O101N1O2N=^OYc^5\"}}, {\"image_id\": 95, \"category_id\": 1, \"bbox\": [564.8685302734375, 239.18850708007812, 1216.90771484375, 684.8870849609375], \"score\": 0.05460033193230629, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"bh]k03k_14L4M3L4L5beN[OYU1i0cjNYO[U1l0`jNWO]U1n0^jNUO_U1P1]jNQOaU1T1ZjNoNcU1V1WjNnNfU1W1UjNkNkU1X1oiNlNoU1X1mhNZNSNa0oX1Y1ghNeNkM5^Y1Y1ahNJ]W1;\\\\hNIcW1d0mgN_ORX1Q1[gNTOdX1Y1kfNlNTY1P4O2O1O0O2O001N2O001O1N101O1O0O2O001O1O0O2O1O001O001O001O001N101O001O001O001O001O001O001O001O001O001O001N101O001O1O001O001O001O001O001O001O001O001O001O001O001O001O001O001O00001O001O001O00001O001O00001O001O00001O001O00001O00001O001O001O1O001O1O002N1O2N2N1O2N2N2N3M2N3M3M2N2N1O2N2N1O2N1O1O1O1O1O1O1O1O1O1O001O1O1O1O2N1O1O2N1O2N1O2N1O2N1O2N1O2N1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O1O0000O10000O10000O10000O10000O10000O10000O10000O1000000O10000O10O1000O1000000O1000000O100O100O1O100O1O1O1O100O1O1O1O1N2O1O1O1O100O1O1O1O1O1O100O1O1O1O100O1O100O1O100O1O100O1O100O100O100O1O1N2N2N2M3N2M3L4L4N3N1O1O1N2O1O1O1O1N2O1O1N2O1O1N2O1N2O1O1O1O1O100O1O1O100O100O1O100O1O100O1O1O100O1O1O1O1O1O1O1N2O1O1O100O1O100O1O100O100O100O10ikN\\\\HRP1d7loN`HRP1_7moNeHQP1[7noNhHPP1W7ooNmHoo0S7PPOPIno0P7QPORIno0m6QPOWImo0i6RPOZIlo0f6SPO]Iko0c6SPOaIko0_6SPOfIjo0Z6UPOhIjo0X6TPOkIko0U6SPOnIlo0R6RPORJlo0n5SPOTJlo0l5SPOVJlo0j5RPOZJlo0f5SPO\\\\Jlo0d5SPO_Jko0a5SPObJlo0^5SPOeJko0\\\\5RPOhJlo0X5SPOkJko0U5SPOoJko0Q5TPORKjo0n4TPOVKjo0j4UPOYKio0g4UPO]Kio0c4VPO`Kho0`4VPOcKio0]4VPOfKho0Z4WPOhKho0X4WPOkKgo0U4XPOmKgo0S4XPOoKgo0Q4XPORLfo0n3YPOTLfo0l3YPOVLfo0j3ZPOWLeo0i3ZPOYLeo0g3ZPO[Leo0e3ZPO]Leo0c3[PO^Ldo0b3[PO`Ldo0`3\\\\POaLco0_3\\\\POcLco0^3[POdLdo0\\\\3\\\\POeLco0[3\\\\POfLdo0Z3\\\\POgLco0Y3\\\\POiLco0W3]POiLco0W3\\\\POkLco0V3\\\\POkLco0U3\\\\POmLco0T3[POmLeo0S3ZPOoLeo0Q3[POPMdo0Q3ZPOQMeo0o2ZPOSMeo0n2YPOTMfo0l2YPOVMfo0k2XPOWMgo0i2XPOYMgo0h2VPO[Mio0f2TPO]Mko0c2SPO`Mlo0a2QPOcMmo0]2QPOfMno0[2ooNhMPP1Y2moNkMQP1X2ioNnMTP1X2doNlMZP1[2]oNiMaP1_2UoNeMiP1k8N2N2N2N2N1O2N2N1O001O1O1O1O001O1O001O001O001O001O001O1O001O001O1O001O1O1O001O1O1O1O001O1O1O1O1O1O1O1O1O001O1O1O001O001O1O0010O01O00001O001O001O001O001O001O001O1O001O001O00001O010O001O00001O00001O00001O00001O0000000000000000010O000000000000000000000000000000000000001O01O0000000000000000000000000000000000000000000000000000000O10001O00000000000000000000000000000O100000000000000000000000000O10000000000O01000000000O10000000000O100000000O1000O1000O100000000O100000000O10000000000O10000000000000000000O010000000000000000000000O10000000000000000000000O10000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000001O000000000000001O000000000000001O0000000O10001O0000000000001O000000000000001O00000000001O000000001O00000000001O00000000001O00000000001O00000000001O0000000000001O000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000O10000000000O100000000O100000000O10001O0O100000000O10000O1000000O1000000O10000O100O1O1O1O1O1O1O1O1O1O1N2O1O1O1N2O1O1N2O1N2O2O0O1O1O1O1O100O1O100O1O100O1O100O100O100O100O100O100O1O101N100O1O100O1O1O1O1O100O2N1O1O1O1O1O1O100O1O1O2N1O1O1O1O1O1O1O1O1O2O0O1O10000O1000000O100000000O100000000O100000000O10000000000O1000000000000O100000000000000O1000001O00000000001O00000000001O000000001O000000001O0000001O001O1O001O001O001O00001O001O00000O2O000O101O000O101O0O101O001N10001N101O0O2O1O0O2O1O0O2O1O1N2lEXmNn6jR1oHYmNo6gR1PI[mNn6gR1PI\\\\mNn6dR1QI_mNm6aR1RIamNl6aR1RIbmNl6^R1SIemNk6[R1UIgmNh6ZR1WIhmNh6YR1VIjmNg6WR1XIlmNf6UR1XInmNe6SR1ZIPnNbMAX8aR1TJmnNe5VQ1XJPoNc5RQ1[JSoN_5PQ1^JWoN\\\\5kP1aJ\\\\oNX5gP1fJ]oNU5fP1hJ^oNT5eP1gJboNT5aP1gJfoNS5^P1gJjoNT5ZP1eJnoNU5VP1dJSPOW5QP1aJYPOX5lo0`J^POY5fo0_JgPOX5WU1H9FR1nNd0mLhcN5h\\\\a<\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [78.85570526123047, 285.26629638671875, 80.83307647705078, 73.1051025390625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [400, 431], \"counts\": \"PUo0o0`;3M3L6K3L3N3M2N2N3N0O2N1O2O0O2N101O00001O0O2O000O1000000000000000O100000001N100001OO2O00000000000000O10O100O1O2O0000000O2N101N101N2N2O1N2M4L3N3K4N2N3J6L3M9_OP[Z3\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [103.114501953125, 312.9515380859375, 84.33572387695312, 47.35003662109375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [400, 431], \"counts\": \"d`c12]<010O100O01O0100O1O10O0100N110O1N10100M2O2O1M2O2N2E:O2O00000101N1O1O1O100O1O1O100O1O1O2N1O1O1O1O100O1O100O1O2OSPo2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [303.2627258300781, 85.12206268310547, 36.633544921875, 42.7774658203125], \"score\": 0.9999998807907104, \"association_id\": 8, \"segmentation\": {\"size\": [400, 431], \"counts\": \"e^f35X<:H4L5K3N1N2O1N2O001N1O1O1N2O20O001O001O0O2O1O0O2O1O001N2O2N1M5L4J6IXWT1\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [266.345703125, 136.57794189453125, 46.9459228515625, 38.4742431640625], \"score\": 0.9999998807907104, \"association_id\": 6, \"segmentation\": {\"size\": [400, 431], \"counts\": \"P^X37U<9J3M4M3L3N3M1O2N001O1O001O0000000000000000000000000000000000000001O001O001O0O2N2L4N2L4N4K^n]1\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [179.29815673828125, 192.12197875976562, 40.49102783203125, 21.421051025390625], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [400, 431], \"counts\": \"c`V22]<1O100N101O1O001O1N2J5O200000O100O1O10000O10001N1O100O1O100O10000O1O2O0Oo\\\\c2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [8.137001037597656, 195.47486877441406, 76.18352508544922, 66.70268249511719], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [400, 431], \"counts\": \"fZ3S1\\\\;2N3L3O1N3M2O0O2O0O2O0O101O00000O101O0O2O0000000000000O100000000M3003M1OO100000000O10000O10000000O10000O2O00000O2O0O2O0O101N2N10001N2N3N2M2N3M2N2L5I6KiZW4\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [127.2607421875, 161.43319702148438, 64.60787963867188, 53.8609619140625], \"score\": 0.9999995231628418, \"association_id\": 4, \"segmentation\": {\"size\": [400, 431], \"counts\": \"cbb11m;1TDe0`;:K5M2N2N2N2N1O2N2O00001O0O10000000001OO1001O0000000000000001O00000000000000000O1000000O10000O101N1O2O0O2N2N2N2M3N2M4K4M4KTVm2\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [59.855228424072266, 230.77206420898438, 55.01560592651367, 31.296234130859375], \"score\": 0.9999986886978149, \"association_id\": 2, \"segmentation\": {\"size\": [400, 431], \"counts\": \"dkh01^<2O0O01000O010O1O10O01O10O01O1O010O0O101E:N3O0100O10000O100O100O1O100O10000O100O100O100O100N200O1O1O3MhVk3\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [154.2906036376953, 111.5272216796875, 46.4940185546875, 38.5751953125], \"score\": 0.9999967813491821, \"association_id\": 5, \"segmentation\": {\"size\": [400, 431], \"counts\": \"hjm1164n;>H3M7J1O1O0O101O00000O1O1000000O1000000O100000000000000001O00001O002N2N1N2O1N3M4L3ITgi2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [183.3777618408203, 55.19865417480469, 33.786407470703125, 25.323654174804688], \"score\": 0.9999960660934448, \"association_id\": 3, \"segmentation\": {\"size\": [400, 431], \"counts\": \"cZX25U<:K3M2N2N1O10000O100000001O00000001O0O101O001O1O0O2N2N2N2MSZd2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [59.407405853271484, 145.02923583984375, 68.08714294433594, 60.00823974609375], \"score\": 0.9999862909317017, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"cVg07V<6L3L3N2M3N2M3N2N2N2N2O001O1N2O1N2O1O1O1O1O1O1O1O001O001O00001O000001O00001O000000001O001O00001O00001O0000001N101N1O2K5N2N2N2K6I6L8ClC0b[g3\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [194.17991638183594, 137.66334533691406, 22.303314208984375, 11.6695556640625], \"score\": 0.9999661445617676, \"association_id\": 5, \"segmentation\": {\"size\": [400, 431], \"counts\": \"mk]233OR<900000O1000000O100O100O101N1O3MTkc2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [309.12115478515625, 64.66596984863281, 31.870849609375, 30.177047729492188], \"score\": 0.9999611377716064, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"iZj37V<6K4M2N3M100000O10000001O1O00000001O001N101N2N2O1M4KZ_S1\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [74.57259368896484, 62.250816345214844, 34.84037780761719, 25.355545043945312], \"score\": 0.9999476671218872, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"[hm04Z<6G7L2O2N1O2O0O101O0000000000000000000000000001O0O2O1O1N2N3M3L6FhC0QSn3\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [157.2089080810547, 28.027971267700195, 35.937652587890625, 36.0980224609375], \"score\": 0.9999344348907471, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"i[m1=R<2N2N3N2M2M3N100O101O0000000000O01O1O1O01O1O100O1O010N2K5O2N0O2N3I6L5MTnl2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [124.22957611083984, 73.66885375976562, 55.21373748779297, 51.39149475097656], \"score\": 0.9999310970306396, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"```1?o;3N2N2N2N2O1O1O1O001O1O2N3M2N1O3M1O1O1O1O001O0001O001O1O001O001O001O1O1O1L3N2O2N1O2O1O1N2O2M2N2M5GgYT3\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [399.5053405761719, 97.86788177490234, 31.494659423828125, 33.063270568847656], \"score\": 0.999875545501709, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"a[l45V<9I6K3N2N1O2N1O2O0000001O00000000001O00000000000001N102M3M6GhL\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [330.0085754394531, 117.26908874511719, 20.82159423828125, 9.328475952148438], \"score\": 0.9998277425765991, \"association_id\": 8, \"segmentation\": {\"size\": [400, 431], \"counts\": \"kbR43Z<3O1O1000000000O10001O0O1O2NYmo0\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [352.28564453125, 60.35091781616211, 32.60382080078125, 36.104312896728516], \"score\": 0.9997925162315369, \"association_id\": 7, \"segmentation\": {\"size\": [400, 431], \"counts\": \"`bY44Y<7J3N2M3M4K4N2L3N2O100000000000000000000000O100O1O1O1\\\\OeD2\\\\;KiD2X;Ki0MUfb0\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [202.4305877685547, 75.20559692382812, 23.690185546875, 7.5879669189453125], \"score\": 0.9963496923446655, \"association_id\": 3, \"segmentation\": {\"size\": [400, 431], \"counts\": \"`a`21_<1O0O1O1O100O100000O100000001N100O10c\\\\`2\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [301.99542236328125, 161.7443389892578, 24.73974609375, 13.6746826171875], \"score\": 0.9944895505905151, \"association_id\": 6, \"segmentation\": {\"size\": [400, 431], \"counts\": \"^fg31]<2O0O2M3N2N20000000O100O10000O1O101N2N]kX1\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [361.6542663574219, 75.8857650756836, 26.09454345703125, 19.308639526367188], \"score\": 0.7865585684776306, \"association_id\": 7, \"segmentation\": {\"size\": [400, 431], \"counts\": \"mnb42]<1O1O1O10000001O0O2OUSa0\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [142.03834533691406, 185.83119201660156, 71.81655883789062, 29.438201904296875], \"score\": 0.3332970440387726, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"dgU22]<10O01O1O00100N200N1O2O1J501O1O0100O2O0O10000O2O0O10000O1O100O100O101N_bd2\"}}, {\"image_id\": 96, \"category_id\": 1, \"bbox\": [353.87567138671875, 50.58484649658203, 31.45501708984375, 32.69189453125], \"score\": 0.22642850875854492, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"[[Z48W<1N2N2M3G9L4O1N200000000000000000000000000001O001CdDD^;8gDEZ;8iDFZ;6gfb0\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [289.1419677734375, 158.3328399658203, 34.92449951171875, 17.924835205078125], \"score\": 0.1668722778558731, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"^fg31]<2O1M2N3O1O1O1000O10000O10000O1O2O]dY1\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [376.31768798828125, 68.65253448486328, 14.0845947265625, 6.9476776123046875], \"score\": 0.16643327474594116, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"gSd43]<0O1O101O0000000001N[n?\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [370.2494812011719, 83.42232513427734, 18.06378173828125, 12.288955688476562], \"score\": 0.16511698067188263, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"^bb41^<100N2O2N10O10001O0O2O0Off`0\"}}, {\"image_id\": 96, \"category_id\": 2, \"bbox\": [320.8762512207031, 83.79053497314453, 24.2779541015625, 8.135406494140625], \"score\": 0.09971107542514801, \"association_id\": 0, \"segmentation\": {\"size\": [400, 431], \"counts\": \"hhQ42^<00O01O100O01000000O10klQ1\"}}, {\"image_id\": 97, \"category_id\": 1, \"bbox\": [0.0, 388.02471923828125, 713.4043579101562, 400.16876220703125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"ba0i5X\\\\1Z1PO>C6L4L4M2N3M2O2O0O1O2O0O1O1O100O1O1O1N2O1O1L4M3M4M2M3O1O100O100O100O100O10000O10000O10000O100O100O10000O1O100O100O1O100O100O100O100O100O100O1O100O100O100O100O1O100O100O1O100O1O100O100O1O1O1O100O1O1O1O1N2N2N2N2O1N2O1O1O1O1O1O1O1O1O1O100O1O1O1O1O1O1O1O1O100O100O100O10000O10000O1000000O100000000O100000000O100000000O100000000O1000000O10O0100O100O100O100O01000O10000O0100000O1000O1000000O1000O0100O100O1O100O1O100O1O100O1O100O100O100O1O100O1O0O2M3N2M3M3N2M3N2M3M3M3O100O1O010O100O10000O100O10O10O1O100O1O001O1O1O1N2O001N2O1O001O100O0010O01O010O1O010O00100O10O0100O00100O100O010O100O100O100O1O100O100O100O010O100O100O100O100O100O100O100O100O100O100O1O100O100O10000O100O10000O10000O10000O10000O10000O100O100O1O100O1N2O1O1O1O1O1O1O1O1O100O1O1O100O100O100O10000O100O100O10000O10000O10000O10000O1000000O10000O1000000O10000O100O10000O100O100O10000O100O1000000O10000O10000O1000000O1000000O100000000O10000000000O10000000000O10000000000O100000000000000O10000000000000000000000000000000000000000000000000000O10000000000000000000000O10000000000000000000000O100000000000000O1000000000000O100000000000000O1000000O10000O10000O2O0O100O100O1O1O1O1O1N2O1O1O1O1O1O1O1O1O2O0O100O100O1O100O2O0O1O100O100O2N100O1O100O2N100O1O1O101N1O1O1O1O2N1O1N2O2N1O1O1O1O2N1O100O101N100O1O2O0O1O1O2N1O1N2O1N3N1N2O1O1N3N1O1N2O1O2N1N2O1O1O2N1O1O2N1N2O2M2N3M2N3M3M2M4M3L4M3M2N3L4L3M4M3M2N3N1O2M3N2M3M3M3M5C=E;I7HQUSS2\"}}, {\"image_id\": 97, \"category_id\": 2, \"bbox\": [9.470832824707031, 808.9403686523438, 233.78012084960938, 134.349609375], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"mQ?43X1S`1]1^O6M2N2O00000000001O000000O1000000O101O000O100000000O100000001O000000000O1000001O000000000000000000000000000000000000000000000O10000O10000O1000O100000O10O1000000000000O1000000000000O0100000O10000O01000O100000O010000O0100000O100O00100O10000O100O100O100O01000O100000000O10O10O100O0100O0100000O10O100000000O10O01000O010O1000O010000000O100000O10O10000O10O10O10O1000000O10O100000O10000000000O10000000001N10000O10000O2O000O3N1O1O1N2O2N3L5Lc0\\\\O:F4L4M3L3N2M3M8G4^OV^N6Vb1KnYYi2\"}}, {\"image_id\": 97, \"category_id\": 1, \"bbox\": [98.68263244628906, 264.6192932128906, 1786.0738525390625, 1046.092529296875], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"P]m71]b15K5K4L5K4L5K5L3L5L4K5L3M4L4N2M3M3N2M3N2M3M4M2M4M2M4M2M3N2N2M3N2M3N2M3N2N2M3N1N2O1N2O2N1N2O2M2O2M2O2M2O2N2M3N2M3N2M3N2M4M3L6K5J6K5J6K6I6K4K4L4M3L4M3L4M3L3N3L3M4M2M6K5J6K5J5K6K4K5K3M4M2M3M3M3N2M3N2M3N2M3N2N2M3N2N3L3N1N2O2N1N3N1N3N1O2M3N2M3N2N2M3N2M4M2M4M3M2M3N2M4M2M3N1N3N2M3N1O2M3N1N3N1N2O2M2O1N2O2M101N2O0O2O1N1O2O0O2O0O2O001N10001N10001O001N10001O0O2O00001O0O2O001O0O2O00001N101O001N101O001N101O001N101O0O2O1O001N101O001N2O001N101O1O0O2O1O0O2O1O0O2O1O00001O0O2O00001O001O0O101O00001O0O2O00001O001O0O101O001O00001N101O00001O001N10001O001O001O0O101O001O00001N101O00001O001O0O101O0000000000000000001N10000000000000000000000O10001O000000000000000O100000000000001O000O1000000000000000000O100000001O0000000O10000O2O0O101N10001N100O2N100O2O0O2N100O2O0O1O2O0O2N100O2N100O101N1O101N1O101N1O100O2N100O1O2O0O1O1O2O0O1O101N1O100O2N1O100O1O2O0O1O100O1O2N1N2N2M3M3M3N3L3WN]\\\\OaTOfc0^k0`\\\\OZTOcc0ek0d\\\\ORTO_c0mk0g\\\\OlSO[c0Sl0l\\\\OdSOWc0[l0Q]O[SOSc0cl0^1O1O1O1O1O1O2N1O1O1N2O1O1XNoYOnVOTf0Pi0QZOkVOPf0Ui0TZOfVOme0Yi0XZOaVOje0^i0ZZO^VOge0bi0]ZOXVOee0gi0_ZOTVOde0ki0_ZOQVObe0ni0bZOmUO`e0Rj0cZOjUO_e0Vj0^1O100O2O0O100O100O2O000O100O10001N10000O100O2O000O2O0O2O0O101N101N100O2O0O2N101N1O2O0O2N1O2O0O2N2N1O2N1O2N2N101N1O2N2O0O2N101N1O2O0O2O0O2O0O101N101O0O101N101O0O101O0O101O0O101O0O101N10000O2O000O2O0O100O2O0O101N100O2O0O101N1O101N1O101N1O2N100O2N1O1O2N100O2N1O2O0O1O2O0O2O0O2O0O101N101N101N101O0O101N101O001O000O2O001O00001N101O00001O0O101O00001O0O101O00001O0O10001O000O101O00001O0O10001O00000O101O0000001O0O1000001O000O101O00000O101O00000O101O000O101O000O101O000O2O000O101O000O2O000O2O000O2O0O101O0O2O0O101O0O2O0O2O0O101N101O0O2O0O2O001N101O0O2O000O2O00001O0O100000001O0O1000000000001O0O1000000000001O0O100000000000001N10000000000000000O2O00000000000000000O2O0000000000000000000O2O0000000001O000O1000000000000000000000000000000000000000000000001O000000000O1000000000000000000000000000000000001O00000000000000000O100000000000O10O100000000000000O1000000000000O10000000O1000O10000000000O10000000000O100000000O10000000O0100000000O100000000O10000O10000O10000O10000O1000000O101O0O1000000O10000O1000000O1000000O1000000O1000000O100000000O10001O000O100000000O100000000O100000000000000001O0000000000000000001O00000000000000001O00000000000000001O000000000000001O000000000000001O000000000000001O00000000001O00001O010O001O1O010O001O10O01O001O10O01O010O001O010O00010O001O01O01O01O010O00010O0010O0001O010O001O010O000010O01O00010O00001O000000001O0000000000001O00000000001O0000000000001O000000000000001O000000000000001O00000000000000001O00000000000000001O000000000000000O10000O101N10000O100O100O100O100O100O1O100O1O2N100O1O1N2J6K5J6J6J6J6N2N2O1O1N2O1O2N1O1O1O100O1O1O1O100O1O100O1O100O100O100O101N100O100O1O100O1O100O1O1O1O100O1O1O1N2M3M3M3L4M3L4L4L4L4K5H8I7H8I7I8N1O1N2O1O1O1O1O1O1O1O1O1O100O1O1O100O1O100O100O100O100O100O101N100O100O100O1O100O1O1O100O2N1O1O1O1O1N2N2O1N2O1N2N3N1N2O1N2N2O1N2O1N2O1N2N3N1N2O1N200O1O1O100O2N100O100O1O100O10001N100O10001N10000O2O000O2O000O2O001N10001N101N10001N101N101O0O2O0O2O1N101N101N101N2O0O101N101N101N1O2O0O101N1O101N1O101N1O101N100O2O0O101N100O2O0O2O000O2O0O2O001N101N101O1N101N101O1N101N2O1O0O2O1O1N2O1N2O1O1N2O2N1N2O1O1N3N1O1N2O001N2O1O1N2O001N2O1O0O2O001N2O001N101O1N101N2O0O2N2O1N101N2N2O1N2N2N3M3L5L3L5L4K4M4K5L4K6K4K6K6I:F9H8G9G7I3N2M3M4L3N2O1N1O2N2N2O1N1O2O1N2O0O2O0O2O1N101N101N101O0O2O001N101N101O0O2O0O2O1O1N101N2O1N101N2N2O1N3N1N3M3N2M2N3M2N3N1N3M2N3M2N2N3L3N2N3L3N2N2N3M2N2N2N2N2N2N1O2N2N101N2N101N1O2O0O2O0O2O0O2N101N1O2O0O2N2O0O2N1O2N2N1O2N2N1N3M3N2M3M2N3M3M3M3M3M3M3L4M3M3L4M4K4M4L3M4L4L3M3M4M2M4L3N2M3N2N3L3N2N2N2M3N2N2N2N1O2M3N2N2M3N2M2O2M3M3N3L3M4L3M4L4L3M4K5L4K5K5J6J6J7I6I8H8Hjh[8\"}}, {\"image_id\": 97, \"category_id\": 2, \"bbox\": [417.7973327636719, 903.4050903320312, 1630.20263671875, 670.9420776367188], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"kmgi02]b13M3L4M3M3L4M3L4L4M3N2O2M2N2N2N2O1N2N2O1N2O1N101N101N100O2O0O100O2O000O2O0O10001N1000010O0000001O0001O01O00000010O000001O00001O01O01O00001O000010O0001O00001O001O01O01O001O001O001O0010O01O001O001O1O001O001O010O001O1O001O1O001O1O1O001O100O1O1O1O1O1O1O1O1O2N1O1O1O100O1O1O1O001O1O1O1O1O1O010O1O1O1O001O1O001O1O1O00100O001O00100O00100O00100O1O10O01O100O100O100O100O2N100O100O00100O1O100O001O100O00100O1O00100O001O010O1O001O010O001O1O010O001O001O001O0010O01O001O001O001O0010O01O001O001O001O010O001O001O001O0010O01O001O001O001O010O00001O001O00010O00001O000010O000010O01O01O01O0010O01O0010O01O0010O00010O01O00010O000010O0001O01O01O0001O01O0001O01O0000010O000001O01O0001O0000010O000010O0001O010O0010O0001O010O01O01O010O01O010O001O010O0010O01O0010O01O000010O0001O00010O00001O01O0001O00000010O000000001O000000001O0000001O0000010O00001O00001O00001O001O00001O001O001O010O001O1O001O00001O001O00001O00001O01O01O00001O000O101O00001O00001O00001O00001O001O00001O001N101O1O001O001O1O001O1O001O1O001O001O001O001O0O101O00001O00001O00001O00000O1000001O0O1000001O000O1000001O0O1000001N1000000O2O000O10001O0O10001N10000O2O0O10001N10000O2O000O101O0O101O00001N10001O00001O0O2O001O00001O001N10001O00001O00000O2O00001O0000001O0O10001O000000001O000O10001O00000000001O000O1000010O000000010O0001O01O0001O01O01O01O01O0010O00010O010O01O010O010O2O0O2O0O100O2N100O100O1O2O0O1O100O1O10O01O0010O01O000010O0001O001O00001O0010O0001O001O00001O001O010O001O001O001O001O0010O01O001O001O1O001O00100O001O001O001O00001O01O01O000000001O00000001O0001O00000000000010O000000000001O000001O00000001O00000001O0001O0000000000010O000000000000010O00000000O10000O100O10000O100O100O100O100O100O10O01O100O1O1O1O100O1O1O100O1O100O100O100O100O100O100O10000O100O10000O10000O1000O0100O100O100O100O100O100O001N2N2O1N2N2N2M3N2M3N2K5G9G8TOm0K5K5K5K5M3M3N2M3N2N2N2N2O1N1O2O1O1O1O100O100O1O100O100O1000O0100O1O100O100O1O010O1O1O100O1O1O001O1O1N2N2L4L4L4K4L5K5K5L4K5L4N200O00100O100O1O10000O010O100O10000O1000O01000000O10000O10O10O10000O100O100O010O100O100O1O10O01O1O1N2L4N2M3M2N3M3M3O1O1O010O1O100O10O010000O100O01000O1000O01000000O0100000O10000O10000O10000O10O0100O100O100O100O1O100O1O1O1O1O1O001O100O1O100O1O100O100O1O10000O100O10O010000O10000O10000O1000O010000O10O0100O100O10O01O100O1O1O001O1O1O1O1O001O1O1O1N2O001O1O1O1O1O1O00100O1O100O10O0100O100O1000O010000O10000O01000O10O1000O1000O0100O01000O10O10O10O0100O010O10O010O010O0100O0010O01O0010O01O10O01O10O0100O010O10O0100O10O0100O010O10O0100O010O010O10O01O01O010O01O0010O001O01O01O001O001O001O001O010O001O010O1O010O010O10O010O0100O01000O010O1000O010000O100O10O0100O100O100O0O2O1O1O1N2O0O2O1N2N2N2N1O2N2M3L4M2N3M3N2M2O2N2O1O100O00100O1O100O10O0100O100O100O01000O101N10000O101N100O100O101N100O1O2N100O2N1O1N3N1N2O2M2N2O2M2O1N3M2O1N3M2O1O2O0O1O2O0O101N100O101N100O2O0O101O0O10001N10000O10000O100O2O000O100O100O100O1O2O0O1O100O1O1O1N3N1N2O1N2N2O1N3N1N2O1O1N2O2M2O1O100O2N100O1O101N100O101N100O101N101O0O2O000O101N100O2O0O100O2O0O1O2O0O1O2O0O1O2N1O2N1N2N3N1N2O2N1N3N1N3N1N2O2N1N3N1O2M2O2N1O2N1O2N1O2N1O2N2N1O2N1O2N2O1N3M4L3M3M4L4K4L5K5J7J6I7J7H7Ic0]O`0_O8G9G9G:F9Dc0^Oe0UOc1iMce3\"}}, {\"image_id\": 97, \"category_id\": 1, \"bbox\": [410.30401611328125, 1194.7232666015625, 51.71490478515625, 53.6697998046875], \"score\": 0.7329941987991333, \"association_id\": 0, \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"acXd0:Rb19K3L2O2N3M2O1N2O2N001O1O1O1O001O1O1O1O1O1O2N2N1O1O1O000000000001O001O2N5K1O1O2lNR_Na0`a1O3M3M3M2N2N1N2O0O1Me]N0e\\\\Z^2\"}}, {\"image_id\": 97, \"category_id\": 1, \"bbox\": [228.8681640625, 479.2129821777344, 1074.6639404296875, 919.6820068359375], \"score\": 0.529293417930603, \"association_id\": 0, \"segmentation\": {\"size\": [1617, 2048], \"counts\": \"Zmc;;]`1R2C=C;E=De0ZOa0@6J6J5L5J5L4M3L4M3M4K4M4L3MV1jN5K5K5K5K6J6J7I7I8H7I7I7I4M2M4L3M4M2M3M3M3N2M2N3N2M2N3M2O1N2N2O1N2N2O0O2N2N2O0O2N2N2O1N1O2N2N2N2O0O2N2N2N2N1O2N2N2N2O0O2N2N2N2N101N2O1N2O0O2O1O001N2O001O0O2O1O001O0O2O001O00001N101O001O0O101O00001O0O101O00001O0O101O00001O00001O000O2O001O001O001O001O0O2O001O001O1O1O001N2O1O002N1O1O1O001N2O1O1O000000001O00000000001O00000000001O00000000001O000000001O0000001O000000001O0O101O001N2O001O0O2O1O0O2O001N2O001N2O0O2O1O0O2O1N101N2O0O3N1N2O1N2N2O1N1O2O1N2O0O100O101N100O10000O100O101N100O10000O100O10000O100O10001N10000O10000O10000O1O100O1O1O100O1O1O1O1O100O1O1O1N101O1O1O1O1O1O100O1O1O100O1O100O1O100O1O100O100000001N1000000000000O101O0000000O1000000000001N1000000000000O1000001O00000O100O100O100O2O0O100O100O100O2O0O100O1O100O101N1O100O1L4L4L4L5K4L4L4L4O1O1O2N1O1O2N1O1O2N1N2O2M2N2M4M2M4K4L4J7J5L5K4M4L3N3M2N3M3N101O0O2O001N101O0O2O001N101O000O2O00001N10001O0O101O000O2O00001O0O10001O000O101O00000O2O0000001O000O1000001O00000000000O10001O000000000000001O000O10000000001O000000000000001O0O10000001O0001O000000000000001O000000000000001O0000000000001O0000000000001O0001O01O001O001O001O1O001O1O1O001O001O1O001O001OT[OlUO[a0Tj0`^ORVO_a0ni0]^OWVOca0hi0Y^O\\\\VOga0di0U^OaVOka0^i0R^OfVOma0Zi0P^OjVOPb0Ui0m]OoVORb0Qi0k]ORWOUb0nh0k]OSWOUb0lh0j]OVWOUb0jh0k]OVWOUb0jh0j]OXWOVb0gh0j]OZWOUb0fh0j]O[WOVb0eh0j]O\\\\WOUb0dh0j]O^WOVb0ah0j]O_WOVb0ah0j]O_WOVb0ah0j]O^WOWb0bh0i]O^WOWb0bh0i]O^WOWb0bh0i]O^WOWb0bh0h]O_WOYb0`h0g]O`WOYb0`h0g]O`WOYb0`h0g]O`WOYb0`h0g]O`WOYb0`h0g]O`WOYb0`h0f]O`WO[b0`h0e]O`WO[b0`h0e]O`WO[b0ah0d]O_WO]b0`h0c]O`WO]b0`h0c]O`WO]b0`h0b]OaWO^b0_h0b]OaWO^b0_h0b]OaWO^b0_h0b]OaWO^b0_h0b]OaWO^b0_h0a]OaWO`b0_h0`]OaWO`b0_h0`]OaWOab0^h0_]ObWOab0^h0^]OcWObb0]h0^]OcWObb0^h0]]ObWOcb0^h0]]ObWOcb0^h0\\\\]OcWOdb0]h0\\\\]OcWOdb0]h0\\\\]OcWOdb0]h0[]OdWOeb0\\\\h0[]OcWOfb0]h0Z]OcWOgb0\\\\h0X]OeWOhb0[h0X]OeWOhb0[h0W]OgWOhb0Yh0X]OgWOhb0Zh0V]OgWOjb0Yh0V]OgWOjb0Yh0U]OiWOkb0Vh0T]OkWOlb0Uh0T]OkWOlb0Uh0S]OmWOlb0Sh0T]OmWOlb0Sh0S]OnWOnb0Qh0R]OoWOnb0Qh0Q]OQXOnb0Ph0Q]OPXOob0Ph0P]OQXOPc0og0o\\\\OSXOQc0lg0o\\\\OTXOQc0lg0n\\\\OUXORc0kg0n\\\\OUXORc0kg0n\\\\OVXOQc0jg0n\\\\OWXOSc0ig0l\\\\OWXOTc0ig0k\\\\OXXOUc0hg0k\\\\OYXOTc0gg0k\\\\OZXOVc0eg0j\\\\O[XOVc0eg0i\\\\O\\\\XOWc0dg0i\\\\O]XOVc0cg0i\\\\O^XOXc0ag0h\\\\O_XOXc0bg0g\\\\O_XOXc0ag0g\\\\O`XOYc0`g0g\\\\O`XOZc0_g0e\\\\ObXO[c0^g0e\\\\OcXOZc0]g0f\\\\OcXO[c0\\\\g0d\\\\OeXO\\\\c0\\\\g0c\\\\OdXO]c0\\\\g0b\\\\OfXO^c0Yg0c\\\\OeXO^c0Zg0d\\\\OeXO]c0Zg0d\\\\OdXO]c0\\\\g0d\\\\OcXO]c0\\\\g0d\\\\ObXO]c0^g0d\\\\OaXO]c0^g0d\\\\OaXO]c0^g0d\\\\O`XO]c0`g0d\\\\O_XO]c0`g0d\\\\O^XO^c0ag0c\\\\O^XO]c0bg0d\\\\O\\\\XO^c0bg0d\\\\O]XO]c0bg0d\\\\O\\\\XO^c0cg0c\\\\O\\\\XO]c0dg0d\\\\OZXO^c0eg0c\\\\OYXO_c0fg0b\\\\OYXO^c0gg0c\\\\OWXO_c0hg0b\\\\OWXO_c0hg0b\\\\OVXO_c0jg0c\\\\OSXO_c0lg0b\\\\OSXO^c0mg0c\\\\OQXO_c0mg0c\\\\OQXO^c0og0c\\\\OPXO^c0og0d\\\\OnWO]c0Rh0d\\\\OlWO]c0Th0e\\\\OiWO]c0Vh0k\\\\ObWOUc0^h0S]OYWOob0fh0Y]OQWOhb0oh0]301O00001N100000000000000000010O00000000000000000000000000T_OlVO[8Ti0dGmVO\\\\8Si0cGoVO\\\\8Qi0dGPWO[8Pi0dGRWO[8nh0cBlVOe17h;mh0bBnVOd17i;kh0bBoVOe17h;jh0bBPWOe18h;hh0bBQWOe18i;gh0aBRWOe19j;dh0`BTWOf19i;ch0`BVWOe19j;ah0`BWWOe19k;`h0_BXWOf19j;_h0_BYWOf1:j;]h0^B[WOg1:j;[h0^B]WOf19k;[h0]B^WOg19k;Yh0\\\\B`WOh19k;Wh0[BbWOi18l;Wh0XBdWOj17m;Uh0WBfWOj18n;Rh0VBhWOk17o;Qh0TBkWOk16o;Ph0SBmWOl16P<mg0RBoWOc0mNZNX1Q?lg0RBPXOa0ROVNT1V?kg0QBPXOa0YOPNn0^?ig0QBPXO`0_OjMk0d?fg0RBQXO=FfMe0i?eg0SBQXO<N_M>R`0cg0SBRXO9h1c=Vf0SBSXO8i1e=Uf0RBRXO7l1f=Rf0SBSXO4n1h=oe0SBTXO3o1i=ne0TBTXOOR2l=je0TBUXONS2n=he0TBVXOJV2Q>ee0TBUXOG[2T>`e0TBWXOD\\\\2W>^e0UBWXO@_2Z>Ze0VBWXO^Oa2\\\\>Xe0UBYXO[Oc2_>Ue0UBXXOZOe2a>Se0UBYXOWOg2b>Qe0VBZXOUOg2e>od0VB[XOROi2g>ld0WB[XOPOk2i>kd0UB\\\\XOPOk2j>id0VB]XOmNl2l>hd0WB]XOkNm2m>fd0WB_XOjNl2o>ed0WB`XOhNm2P?dd0WB`XOhNm2o>dd0XB`XOiNl2o>dd0XBaXOgNm2o>cd0ZBaXOfNm2o>bd0[BaXOfNm2n>cd0[BbXOeNm2o>bd0[BbXOeNm2n>bd0]BaXOdNn2o>ad0\\\\BcXOdNm2n>ad0^BbXOdNn2l>ad0`BbXObNn2n>ad0_BaXOcNo2l>ad0`BbXObNo2l>`d0bBaXObNo2l>`d0bBbXO`NP3l>_d0cBbXOaNP3j>`d0dBaXO`NP3l>_d0dBaXO`NQ3j>`d0eBS^OZ=na0dBS^O[=na0eBS^OY=oa0fBR^OY=na0gBR^OX=Pb0fBR^OY=oa0fBR^OY=na0gBR^OX=Pb0gBQ^OX=oa0gBS^OW=oa0hBQ^OX=oa0hBR^OV=Pb0iBQ^OV=oa0iBR^OW=oa0hBR^OV=oa0jBR^OU=oa0jBQ^OV=oa0jBR^OT=Pb0jBR^OU=na0kBR^OU=oa0jBQ^OU=Pb0jBQ^OV=Pb0iBP^OW=Pb0iBP^OW=Qb0gBP^OY=Pb0gBP^OY=Qb0fBn]OZ=Tb0dBm]O\\\\=Sb0dBm]O\\\\=Tb0cBl]O]=Ub0aBl]O_=Tb0aBl]O_=Ub0`Bj]O`=Xb0^Bi]Ob=Wb0^Bi]Ob=Xb0\\\\Bi]Od=Xb0[Bh]Oe=Yb0YBh]Og=Xb0YBg]Og=[b0WBf]Oi=[b0VBe]Oj=\\\\b0UBd]Ok=]b0SBc]On=]b0QBd]Oo=]b0PBc]Oo=_b0oAb]OQ>_b0nA`]OS>ab0kA`]OU>ab0jA_]OV>]k0100O10O10O100O1001O2M3N2N2N3M3L3N1O2N2N2M3N3M2N2N3L4M2N3M4KS1nN4L4K5L5K4L5J6K2N2M3N2N2M2O2N2M2O1O0O10000000000000000O2O0000000000000O1000000000001OYJYFdROf9[m0_FbROa9]m0bFaRO^9]m0fFaROZ9^m0jF_ROV9`m0lF_ROT9_m0PG_ROo8am0TG]ROl8bm0VG]ROj8lk0[FcPOn0_3g8mk0]FbPOn0`3e8mk0`F`POn0a3b8nk0bF_POn0b3`8ok0cF^POm0c3`8nk0eF]POm0d3^8ok0fF[POn0e3\\\\8ok0hF[POm0e3[8Pl0jFXPOm0f3Z8Ql0kFWPOl0h3Y8Ql0lFUPOm0i3W8Ql0nFUPOl0i3V8Rl0oFSPOm0j3T8Rl0RGQPOl0l3R8Sl0SGooNl0n3Q8Rl0UGnoNl0n3P8Tl0VGkoNP1l3j7Xl0XGjoNT1i3d7]l0YGioNX1e3`7al0ZGgoN]1b3Y7fl0\\\\GfoNc1]3Q7ml0^GcoNg1[3k6Qm0aGaoNg1\\\\3h6Sm0bG_oNh1]3f6Tm0jJkROV5Tm0mJjROS5Vm0nJhROS5Xm0mJhROS5Wm0oJWQOWKGj9Ro0PKTQOYKIg9So0QKRQOZKJf9Ro0QKRQO\\\\KKc9So0RKPQO]KLa9To0RKoPO_KL_9To0TKnPO^KN^9To0TKmPO`KN\\\\9Uo0UKkPOaKOZ9Uo0VKkPOaK0Y9Uo0WKjPOaK0X9Vo0WKiPObK1X9To0WKkPObK0W9Uo0XKiPObK2V9Uo0XKiPObK2W9To0VKlPObK0Y9So0UKmPObK0Z9Ro0TKnPOaK1\\\\9Po0RKQQOaKO^9on0QKRQOaKO_9on0oJRQObKO`9nn0mJUQOaKNd9kn0kJWQOaKNe9jn0jJYQO`KMg9in0hJ[QO`KMi9gn0gJ]QO_KLk9fn0eJ_QO`KLk9dn0eJaQO^KLn9bn0cJdQO^KJQ:`n0aJfQO]KKS:^n0_JiQO]KIU:]n0^JkQO[KIX:[n0\\\\JmQO\\\\KHY:Zn0ZJPRO[KG]:Wn0WJTRO[KE_:Vn0VJVROYKEb:Tn0TJYROXKEd:Qn0SJnSOn5Ql0QJPTOQ6nk0nISTOS6mk0kITTOW6jk0fIYTO\\\\6ek0aI_TO`6_k0^IcTOc6\\\\k0ZIgTOh6Wk0VIkTOl6Sk0RIoTOo6Pk0oHSUOR7Sg0UETXOg3k0V7if0^EWXOZ3R1Z7_f0fE[XOn2Y1\\\\7Vf0oE]XOc2_1`7Qf0oE_XOa2`1a7Qf0lE`XOe2]1`7Sf0iEaXOj2Y1^7Vf0fEbXOn2V1\\\\7Yf0dEcXOQ3R1\\\\7[f0aEdXOU3o0[7\\\\f0`EeXOW3m0Y7_f0^EeXO[3k0W7`f0]EeXO^3i0V7bf0ZEfXOb3f0T7df0YEgXOf3b0R7gf0WEgXOi3`0P7if0WEhXOj3=P7kf0TEiXOm3;o6lf0TEiXOn3;n6lf0REjXOQ49m6mf0REjXOS47l6of0PEjXOU47j6Xe0_DjXOa0h1W46i6We0`DjXO`0i1X45h6Xe0aDjXO>i1Z46e6We0cDjXO>j1Z46c6Ve0fDiXO<l1\\\\49^6Qe0kDjXO:l1^49\\\\6Qe0lDjXO:l1^49\\\\6Qe0mDiXO9m1_49[6od0nDkXO8m1`4QO[K]Nn:jg0PEkXO5n1c4lNaK]Ng:ng0PEkXO5n1d4hNeK`Na:og0QEkXO5n1e4eNgKbN_:og0QEkXO4o1e4cNjKdN[:og0RElXO3o1f4_NlKgNY:og0RElXO2P2h4\\\\NmKiNW:ng0REmXO2P2h4ZNoKkNU:mg0TEmXO0Q2i4XNoKmNT:mg0TEnXOOP2k4VNPLoNQ:mg0UEnXOOP2k4UNQLPOQ:lg0TEoXOOP2l4SNQLROP:lg0TEPYOMP2n4RNPLTOP:kg0QEUYOLm1n4YNkKPOW:gg0PEQ]Oi4cLdKmN_:\\\\m0hIQTO^KkNf:Wm0eIjUOX6Xj0cIoUOY6Sj0\\\\IZVO_6ii0RIiVOh6[i0iHUWOR7nh0eHkWOh6ei0aGVWOj7\\\\i0]GRWO]8gQ1H9G8H9G=Bl0kMidN\\\\Ki\\\\1Y2g\\\\gT1\"}}, {\"image_id\": 98, \"category_id\": 2, \"bbox\": [3.2584176063537598, 757.888916015625, 1200.430908203125, 457.238037109375], \"score\": 0.9999992847442627, \"association_id\": 1, \"segmentation\": {\"size\": [1355, 1943], \"counts\": \"ji4j3TU1h1SOh0C:G8K5K5L4K4M4L3O1N101N101N101O0O101O00001O00001O001N101O001O001O1O001O1O1O1O1O1O1O2N1O1O1O1O1O1O1O1O1N101O1O001O1O001O1O001O001O010O1O001O1O1O010O1O1O1O1O2O1N2N2N3M2O2M2N1O010O1O0010O01O010O00010O000010O000001O0001O01O00001O00001O001O001O001O003M3M3M4M3L1O001O1O001O001O00001O00001O00000010O000001O0000001O0000001O01O01O001O001O001O1O00<D5L0O1O1O001O1O001O001O0010O0001O00001O0000010O0001O01O01O010O010O010O010O11N100O101N1O100O2N100O1O100O1O01O01O001O01O01O00010O00000001O01O01O001O00001O010O001O1O003M4L5L4K1O2N1O1O1O1O100O001O1O001O10O01O001O00001O010O001O010O1O10O01O100O100O100O101N1O6K6I=D=B9G5L3L4L4L4M3L4L3M3N0O2N1O1O2O0O1O1O1O1O100O1O1O1O1O2N1O1O2N2N1O2N2N2N3M3M3M3M5K<Dc0]O5K4L4L3M001O1O001O001O001O000010O0001O0000001O0000001O000000001O00001O00001O00001O00001O001O00001O001O00001O00001O0000001O000000000000O10000O10000O10000O100O100O100O10000O10000O1000000O100000000O100000000O10000000000O1000000O1000000O10000O100O10000O100O1O100O1O100O1O10O0100O100O10000O10000O1000000O100000000O10000O10000O10000O10O0100O100O100O100O100O100O10000O10O1000O1000000O1000000000000000O100000000000000000001O000O100000000000000000000O101O0000000000000O10000000001O000O2O00001O00001O001O0O2O001O00001O001O0O10001O0000001O000O101O00000000001O000O10000000001N10000O10000O10000O10000O2O0O10000O100O100O100O2O000O100O1O100O100O2O000O10000O10000O100O10000O100O100O1O100O1O10O01O1O1O1O1O1O100O1O100O10000O100O10000O1000000O10000000000O100000000O100000000O1000000O10000O10000O10000O100O100O10000O10000O1000000O2O000000000O10000000000000000000000O1000000000000000000000000000000O1000000000000000000000O100000000000000O1000000000000O100000O100000O100000000000000O1000000000000O10000000000O1000O010000O010O10O0100O010O0010O01O010O01O010O010O010O010O01000O0100O10O10O10000O01000O1000O010000O010O10O10O10O1000O0100O00100O10O01O100O10O010000O100O0100000O1000O10O1000000000000000000000000O1000O10000000000000000000O100000000000O1000000000O100000000000000O1000O010000O10000O10000O10O0100O100O10000O10O10O1000000O1000O10O1000000000O010000000O1000000O100000000O100000000O10001O00000O1000000000000O100000000000000O100000000000000000O100O10000O100O100O100O1O1O100O1O1O1O1O1O1O1O1O1O100O1O100O100O1O10000O100O100O100O2O000O2N101N101N1O2N101N2N2N3M2M3N2N3L3M3L5L3M4L4L8I6H_ShQ1\"}}, {\"image_id\": 98, \"category_id\": 1, \"bbox\": [58.961021423339844, 308.2771911621094, 1304.2681884765625, 713.8193359375], \"score\": 0.9999991655349731, \"association_id\": 1, \"segmentation\": {\"size\": [1355, 1943], \"counts\": \"Van21XZ16K4XgNFWW1?ahNH[W1=^hNI_W1;ZhNKcW19VhNNeW17UhNMiW16QhN0lW1Z1M3L3N3M3M2N3M5J8I7F:@?B;I6N3L4L3M4L3N3L4K4M4L4L3M4K5J5L5J5L5K4L4L5M2N2N2N3M2N2N2N2O1N2N2O1N2N2O0O2N2O0O2N1O2N2N101N1N3N1O2N1N3M2N2M4M2M4M2M4M2M3N3M2O2N1N2O2N1O1O2N1O1O2N1O1O2N100O2N1O100O2N100O2N100O2N1O2O0O2N100O2N101N2N101N1O2O1N1O2O0O2O0O2N101N101N100O2N101N101N100O2O0O101N100O2O0O2O0O2N1O2O0O2N2O0O2N2N2O0O2N2N2N2N2O0O2N2N1O2N2N1O2N1O2N1O2N1O2N1O101N1O101N100O2O0O101N10001O0O101O00001N101O001O1N101O001O1O1N2O3M5K4L5J6K5K6J6J5K6I5L4L4L4L3L4M3M3M2N3M2M3N2N2N2N2M3N1O2N2N1O2N2N2N2N2N2N2N3M2N3M3M3M4L5K4L4L5K4L4L3M4L3M3M3M2N001O1O0O2O001O001O001O001O00001O000000001O00000000000000000000000000001O0000000000000O1000000000000000001O00000000000000000000000000000001O0000000000000000001O000000000000000000001O00000000000000000000001O000000000000000000000000000000000000000000000000000000000O10000000000000000000000000001O00000O10000000000000000000000O100000000000000O1000000000000O10000000000O10000000000O100000000O1000000O1000000O1000000000000O10000000000000000000000O10000000000000000000000O10000000000000000000000O100000000000000001O00001O0000001O00001O0000001O0000001O000000001O000000001O00000000001O00000000001O00000000000000000000000000000000000001O00000000000001O000000000000000000000000000000000000000001O000000001O0000001O00001O001O00001O001O001O1O001O001O1O1O1O001O1O1O001O1O001O001O001O00001O001O0000001O000000001O00001O0000001O0O101O0000001O00001O001O00001N101O00001O001O001O001O0O2O001O001O00001O0O101O00001O000O2O00001O00001N1000001O0O101O00001O0O10001O0O101O00000O2O0000001N10001O00O010000O1000000O100000000O1000000O10O100000O10000000000O1000O100000O10000000O10O1000000000O1000O100000000000O1000O100000O1000000O0100000O10000O010O100O1O10O01O1O1O1O1N101M3N2Ml_OfXO]=Vg0fBkXOY=Qg0lBPYOS=kf0QCWYOn<hf0RCYYOn<gf0RCZYOm<ff0SCZYOm<ff0SCZYOm<ff0SCZYOm<ff0SCZYOm<ff0SCZYOl<gf0TCYYOl<gf0TCYYOl<hf0SCXYOm<hf0SCXYOm<hf0SCXYOm<if0QCXYOo<kf0nBUYOR=nf0kBSYOT=Pg0iBPYOW=Tg0eBlXO[=Yg0`BgXO`=me0\\\\@oZOo1TOe=le0a@Q[Oe1SOj=ke0d@S[O_1ROl=ke0h@T[OY1QOo=ke0h@X[OU1mNS>je0j@\\\\[Oo0jNW>ie0k@`[Ok0hNY>ge0m@e[Of0dN]>fe0o@g[Ob0cN_>ee0PAi[O`0bN`>de0QAj[O?bN`>de0QAk[O>aNa>ce0RAl[O<bNb>ae0SAm[O;cNa>_e0UAo[O9bNb>^e0VAP\\\\O8bNb>]e0WAR\\\\O6aNb>^e0YAP\\\\O5bNb>]e0ZAR\\\\O3bNb>[e0\\\\AS\\\\O2bNb>Ze0]AT\\\\O1bNb>Ye0^AV\\\\OOaNc>Xe0_AW\\\\ONbNb>Ve0aAY\\\\OLaNc>Ve0aAY\\\\OLaNc>Ue0bAZ\\\\OJcNc>Re0dA\\\\\\\\OHbNd>Qe0eA]\\\\OGbNd>Pe0fA_\\\\OEbNd>od0gA_\\\\OEbNd>Pe0eAa\\\\ODaNe>Pe0eAa\\\\ODaNf>od0dAc\\\\OBaNi>md0cAe\\\\OA_Nl>md0aAf\\\\O@`Nn>kd0`Ah\\\\O^O`NQ?id0_Aj\\\\O\\\\O`NT?gd0^Al\\\\OYO`NY?ed0\\\\Am\\\\OWObN[?cd0\\\\AY^Od>ia0YAY^Of>ha0WA[^Oh>ga0UAZ^Ok>ga0SA[^Ol>ga0QAZ^Oo>ga0o@[^Oo>ha0m@Z^OS?ha0j@Z^OU?ga0i@Z^OW?^f0O000O2O001O1O001N2O1O2N1O2N3M3L5L6J1O2N1O1O2M2O2N1O1O1OO10O010000O01000O100O10O10O100O10O0100O100O100O010O100O1O1O001N2O1O1O1O100O00100O100O1O100000O2O00001O0O10001O00000O10001O000O1000000O10001O000O1000000O10000O1000000O10000O2O000O10000O100O2O0O2O0O101N101N100O2O0O2O000O2O0O2O001N2O1O1N2O1O1N2O001O1N2O001O1N101O001N2O001O001N101O1O1O1N2O1O2N001N101O001O0O2O001O0O2O001O0O2O001O0O101O0O2O001N10001N10001N101N10001N100O2O0O101N100O100O2O000O101N10000O2O0O10001N100O2O000O2O000O2O001N2O2M2O1O0O2O1O1N2O001N2O001O0O2O002M2O2M3N1O2M3N2M2O2M3M2O2M3N1M4M3M3M2M4M3M3M4L3M4L3M3M4L3M4M2M4L3M4L3N2M4L3N2M3M2O2M3K5J5K5J6K5K5L4M3M3M4K4M3M3M3L4M3M3M3N2N2N2N2N2N2N2N3M6K5`NRiNXOTW1b0nhN\\\\OXW1=lhN@YW1;ihNCZW1:hhND[W18hhNGYW17jhNFYW17ihNGZW15ihNIYW15ihNJYW13jhNJXW13khNKXW12jhNLXW12jhNMXW10jhNNXW1OkhNOXW1NjhN1WW1MkhN1XW1LjhN2XW1LjhN3hX1Nlhoh0\"}}, {\"image_id\": 98, \"category_id\": 2, \"bbox\": [1176.906982421875, 730.9932861328125, 686.2689208984375, 199.8531494140625], \"score\": 0.9999969005584717, \"association_id\": 2, \"segmentation\": {\"size\": [1355, 1943], \"counts\": \"dffd13SZ15M3N3I6L5N1N200O1O2N1O100O100O1O1O100O1N3M2N2M3M3L4O1O100O1O100O100O10000O2O000O1000001O0O1000001O0O10000000001N1000000000000000000000001O00000000000000000000000000000000000O10000000000000000000000000000O100000000000000000000O100000001O000O100000000000000O100000000000000000000O10000000000000000000000000000O1000000000000000000001O0000000000000000000000000000000000001O0O1000000000000000000000001O000000000000000000001O0000001O00000O10001O000000000000001O000O1000000000000O100000000O10000000000O10000000000000000O1000000000O1000000000000000000O1000000000000O100000O1000O10000000O1000O1000O1000000O1000O01000000O10000000000000000O100000O1000000000000000000000O100000000000000O100000000O1000000O10000O10000O10O1000000000O10000000O1000000000O01000000O100000000O10O100000000O1000000O100O01000O1000000O1000000000000O10O1000000000O1000000O1000000O100O10000O100000O1000O10000000000000O10000000O1000O10000000000O100000O100000000O01000O100O10000O10000O0100000O1000000000000000001O0O100000000000000000000O2O000000000O2O00001N101N101O0O101O0O2O001N101O0O2O00000O2O00001N100O2O0O2O0O2N2N3M5GWaX5\"}}, {\"image_id\": 98, \"category_id\": 1, \"bbox\": [659.3248291015625, 255.4942169189453, 1250.928466796875, 589.3428955078125], \"score\": 0.9999054670333862, \"association_id\": 2, \"segmentation\": {\"size\": [1355, 1943], \"counts\": \"X_VS17gY1>L4M3M3M3N2M3N2M3N1O1N2O2M2N2O1N3M2O1N2N2N3M2N2M3O1N3N1O1O1O1O1O2N1O1O1O1O1O1O1O2O0O1O1O1O1O1O101N1O100O100O101O0O1000001N10000O101O000O101O0O102N1O3L3N2N3M2M4M3M1N101O000O2O00001N100XoNcKbk0]4\\\\TOfKck0[4ZTOhKdk0Y4YTOjKgk0V4WTOmKhk0T4UTOoKjk0Q4TTORLkk0n3TTOSLkk0n3TTOSLlk0m3STOULlk0k3STOVLmk0j3RTOWLmk0j3RTOXLmk0h3QTOZLok0g3nSO[LRl0e3lSO^LRl0c3mSO^LSl0b3kSOaLTl0_3kSObLUl0^3jSOcLUl0^3jSOdLUl0\\\\3iSOfLWl0Z3hSOhLWl0X3iSOhLVl0Y3iSOiLVl0W3iSOjLWl0V3hSOlLVl0V3hSOlLWl0T3iSOlLVl0U3iSOmLVl0S3jSOnLUl0R3jSOoLUl0R3kSOoLTl0Q3lSOPMSl0P3lSORMRl0o2nSORMQl0n2oSOSMPl0m2oSOWMmk0k2RTOWMlk0i2TTOYMjk0g2UTO\\\\Mhk0e2XTO]Mfk0c2YTO`Mdk0a2\\\\TO`Mck0`2]TObMak0^2^TOeM_k0\\\\2aTOfM]k0Z2bTOgM^k0Y2bTOhM\\\\k0Z2cTOfM]k0Z2bTOhM]k0X2cTOhM]k0X2bTOjM\\\\k0X2cTOhM]k0X2bTOjM]k0W2aTOjM_k0V2aTOkM^k0V2`TOlM_k0T2`TOnM_k0S2`TOmM`k0S2_TOoM`k0R2^TOPNak0P2^TORNak0o1^TORNak0n1^TOTNak0m1]TOUNbk0l1\\\\TOWNbk0i1\\\\RObJX1g3[l0h1XROfJ[1d3[l0h1UROhJ_1c3Zl0e1RROnJb1_3[l0d1nQORKe1^3Zl0Z2eSOjMWl0W2gSOnMUl0S2iSORNSl0n1mSOUNPl0l1nSOYNnk0h1PTO\\\\Nmk0e1RTO_Njk0a1UTOcNhk0^1VTOeNhk0\\\\1WTOgNfk0Y1YTOkNck0W1[TOlNck0U1\\\\TOmNQ2]JXf0g6eWOnNQ2\\\\JZf0f6dWOQOo1ZJ]f0f6cWOQOn1ZJ_f0e6bWOSOm1YJaf0e6`WOTOm1YJbf0d6`WOTOl1YJdf0c6_WOVOl1WJef0d6^WOVOk1WJgf0d6\\\\WOWOk1WJhf0b6]WOWOj1XJhf0c6\\\\WOWOj1WJjf0b6[WOYOj1UJkf0d6YWOWOl1UJkf0j6RWOROS2TJkf0o6mVOmNX2TJjf0U7hVOhN^2SJjf0c=VYO]Bjf0c=WYO\\\\Bif0d=WYO\\\\Bhf0e=XYO[Bhf0e=XYO[Bhf0e=XYO[Bhf0e=XYO[Bgf0f=YYOZBgf0f=ZYOYBff0g=ZYOYBef0h=[YOXBef0h=[YOXBef0h=[YOXBdf0i=\\\\YOWBdf0j=[YOVBdf0k=\\\\YOUBdf0k=\\\\YOUBdf0k=]YOTBbf0m=^YOSBbf0m=^YOSBaf0n=_YORBaf0n=_YORBaf0n=_YORBaf0n=_YORB`f0o=`YOQB`f0o=`YOQB`f0o=`YOQB`f0o=aYOPB_f0P>aYOPB_f0P>aYOPB^f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA_f0R>aYOnA^f0S>bYOmA^f0S>bYOmA^f0S>bYOmA^f0S>cYOkA^f0U>cYOjA]f0V>cYOjA]f0U>eYOjA[f0V>eYOiA\\\\f0W>eYOhA[f0X>fYOgAZf0X>hYOgAWf0Z>m20000O1000000O1000000O0100000O1000000O100000O010000O1000000O10000O0100000000000001O00000000001O000000001O0000000O101O000000001O000000001O000000001O000000001O0000001O000000001O000000001O000000001O000000001O0000001O00000nTO`AYj0`>dUOdA[j0]>aUOhA]j0X>aUOkA^j0U>`UOnA_j0R>`UOPB_j0P>`UOSB^j0n=`UOTB_j0l=`UOVB`j0i=_UOYB`j0\\\\<_UOnC0H`j0X<cUOoCMJ_j0V<eUOQDJK`j0R<iUORDGM_j0P<kUOYETj0d9dUOnD:^1Qj0b9jUOkD7d1ni0a9lUOjD6e1ni0a9lUOjD6e1ni0a9lUOjD6f1mi0`9mUOjD6f1mi0`9mUOkD5e1ni0`9mUOkD5e1ni0`9mUOkD5f1mi0_9nUOkD5f16aMRi0n;cVOkD4\\\\2Yi0i8cVOkD4\\\\2Yi0h8dVOlD3]2Xi0g8fVOkD2^2Xi0g8fVOkD2^2Xi0g8fVOlD1]2Yi0g8fVOlD1]2Yi0g8fVOlD1]2Yi0g8fVOlD0^2Zi0f8fVOmDO]2[i0f8fVOmDO]2\\\\i0e8eVOnDO]2\\\\i0e8fVOnDM^2\\\\i0d8gVOnDM^2\\\\i0d8gVOoDK^2^i0c8gVOoDK^2^i0c8gVOPEJ]2_i0c8gVOQEI\\\\2`i0c8hVOQEF^2ai0a8iVOREE]2bi0a8iVO`HWi0`7iVO`HWi0`7jVO`HUi0`7kVO`HUi0_7lVOaHTi0_7mVOaHRi0_7nVOaHRi0_7oVOaHPi0_7PWObHoh0^7RWOaHnh0_7RWObHmh0^7TWObHkh0^7VWObHih0^7XWOcHfh0\\\\7\\\\WOeHch0Z7^WOgH`h0Y7aWOiH\\\\h0Z7WWOnHkh0Y<0O10000O1000000000000000000000001O0000000000000000000000000000001N10000000000000000001O01O0001O00001O01O01O00001O010O001O000010O000000000010O0000000001O01O00000001O000001O0001O0000001O00010O00001O00001O00000010O0001O00001O00001O0000010O00001O0000001O00001O000010O0001O001O001O001O001O001O001O1O001O00001O001O001O001O00001O001O000010O0001O001O00001O00001O00001O001O1O001O001O1O1O1O001O1O1O001O1O1O001O1O001O001O1O001O001O1O001O001O001O001O1O001O001O001O1O001O001O001O001O001O1O010O001O001O001O001O001O1O001O001O001O001O1O001O001O001O1O001O001O1N101O1O001O1O001O1O1O1O001O1O1O1O1O001O1O001O1O001O001O001O001O0O2O00001O001O0000001O000000001O0000001O0000001O00001O0000001O00001O00001O0000001O00001O0000001O0000001O0000000000001N1000000000001O0000000O1000001O00000000000O10001O000000000O1000001O0000000O100000000000001O000O10000000000000000000000O2O00000000000000000O10001O000000000O100000001O00O10O100000O1000O1000O10000000O01000000O1000O10O1000000O10O10O1000000O1000O010000O100O1000O100000000O100000000O2O0000000O10000000000O100000000O1000001O0O100000000O10000000000O10001O0O101O000O101O0O101N10000O2O0O101O0O101N10000O2O000O2O00001N1000001N10001O0O101O000O101O001O1O0O2O001O1O1O0O2O1O1O1O1O1N2O1O1O1O1N2O1O1O2N1O1N3N1O2N1O1N3N1O1O2M2O1O1O1O1N2O1O1O1N3N1N3N2M3M2N3M4L3L4M4K7I7J7H7I7I7H8G:Di0VOd1cM_S[1\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [1337.210693359375, 398.0515441894531, 213.2142333984375, 544.3681640625], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"ddho12h_1d0]O;G5K4L4K5K5J6I6M4M3iiNhMbn0Z2YQOlMen0V2RQOSNkn0o1oPOXNnn0k1nPOXNPo0j1`nNnMbM<jS1j1XnN_NbMIST1k1UnNhN_MAXT1k1SnNnN]MZO]T1n1mmNS1oQ1UOemNS1VR1XOZmNR1_R1]OalN[1XS1UOVlNT1eS1YOmkNm0nS1e5M3M3L5LgF]lN[6^S1hIilNS6QS1QJUmNk5fR1XJ`mNd5\\\\R1_JfmN`5TR1gJgmN]5TR1iJgmN[5TR1kJfmNZ5VR1mJcmNW5XR1[4L4N2N2N2O1N3N1N2N2N20001N100000000O1N2O1N101N2O1O1O01O1N2O0N3N2M3NO0O1^N^BfPOb=\\\\o0lBSPOb:dR1S1:M3O1O2N2M3K4N3N2N2O10O10000000000000000O2O001O1O002N2N3M4L3M3M2N2N2N3M4L4M2M2N3M2N2N2N2N101N1O1O010O00100O010000O101O5K3M2O1Nf0ZO00O100O10O01000O100O1000O10000O2N5K9Hk0TO9G5L2L3N1N2O1N2O1N101OgJ^oNSKbP1b4RPOVKno0c4cPOSK]o0f4SQOPKnn0i4aQOoJ_n0m4iQOnJXn0n4nQOPKRn0m4SROPKom0l4VROQKkm0k4\\\\ROPKgm0k4`ROPKcm0k4fROmJ^m0n4kROkJXm0P5PSOjJSm0R5SSOhJSm0n4XSOjJUm0a4\\\\SOTKZm0j1nnNiLd4h0`m7\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [536.6045532226562, 715.7157592773438, 165.76513671875, 43.640869140625], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"\\\\f`i03m_11O2N1O1O001O1O001O1O1O1O001O1O001O1O001O001O001O00001O001O0000001O001O00001O00001O00000010O0001O01O0000001O01O0001O0000010O0000001O000000000010O00000001O00001O001O1O01O0001O00001O00001O00001O000001O00000001O0000001O0000000001O0001O00001O00000001O00000001O0000000000000000010O1O1O2N1O001O001O1O1O1O00002M2ObhTX1\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [285.9134521484375, 373.37646484375, 124.66851806640625, 444.84912109375], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"eeo=<\\\\_1c0@9J5K4L4L5L4J5G8eGiMcROa2Pm0RNdROS2Wm0YN_ROk1^m0_NXROd1em0`NWROd1em0^NYROg1bm0[N]ROg1`m0_NYROd1fm0dNUlNVN]10i1Y3dP1ZOblN^Na2\\\\2lP1[OXlNeNb0bNj0d3[R1ZOnkN4[1f0gR1h2PmN\\\\MnR1g2llN^MRS1e2glNcMUS1_2elNhMXS1[2`lNlM^S1\\\\7O1O2N1O3M5K5K3M2N4K7J6J4M1N1O1O1O001O00001O0000000000000001O001O001O1O1O1O3M4SCdmNn;bR1eChmNX<RS1N1O2N1O001O1O1O3M2N1O2N001O1O002N1O1O2fKikNgLYT1d2flNPM]S1e2ZmNdJoN>jS1a4doNVK^P1a4ooNYKSP1`4UPO\\\\Koo0^4WPO_KPP1W4VPOeKWP1k3RPOoKTP1i3TPORLoo0h3WPOSLlo0k3XPOPLlo0l3V6N3L;G<D7J7H>A7H8I6I7H>]O[nde1\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [1083.4835205078125, 1011.291748046875, 471.5164794921875, 201.3675537109375], \"score\": 0.9999997615814209, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"^Pkb1h0U_17J5L2M4M2O1N2O1N101O1O2M101O1O1O1N101O1O001O1N10001O001N101O1N101O1N2O001N10001O000000001O0O10001O00001O00001O00001O00001O00001O000000001O0001O0001O0000001O001O001O001O00010O0000001O0001O0000001O01O0001O00000000010O00000001O00001O00000010O000001O000000000001O000000001O00001O001O0000001O01O000001O000000000000001O0000000O10001O000O2O00001N101O0O2O000O101O000000000O2O0000000000001O0O1000001O00001O001O001O001O00001O0000001O0000001O001O0010O01O001O00001O01O010O00100O2N101O0O10O010O01O00010O001O001O101N3M2N1O1O0010O01O000010O0001O100O3N3L1O100O1O10O01O001O1O1O1O1O2N2N5K5K4L2N2N1O2N001O00001O001O001O001O010O001O001O000000001OWMQdNa1o[1^NUdN_1k[1`NYdN]1g[1bN_dNY1a[1fNcdNW1][1hNedNW1[[1gNhdNX1Y[1fNidNY1W[1fNkdNY1U[1gNldNX1T[1gNmdNY1S[1fNodNY1Q[1gNPeNX1P[1gNQeNY1oZ1gNReNX1nZ1hNReNX1nZ1hNSeNV1nZ1jNReNV1nZ1jNReNV1nZ1jNSeNT1nZ1lNReNT1nZ1lNSeNR1nZ1mNSeNS1mZ1mNSeNS1mZ1mNTeNQ1nZ1nNReNR1nZ1nNSeNQ1mZ1oNSeNP1nZ1POReNP1nZ1POSeNo0mZ1QOSeNo0mZ1QOSeNo0mZ1QOSeNo0mZ1POUeNo0kZ1QOUeNn0lZ1ROTeNn0lZ1ROTeNn0lZ1ROTeNn0lZ1ROTeNn0lZ1QOUeNo0kZ1QOVeNn0jZ1ROVeNn0jZ1ROVeNn0jZ1ROVeNn0jZ1QOWeNo0iZ1QOWeNo0iZ1QOWeNo0iZ1QOWeNo0iZ1POXeNP1hZ1POYeNP1fZ1POZeNP1fZ1POZeNP1fZ1POZeNQ1eZ1nN\\\\eNR1dZ1nN\\\\eNS1cZ1mN]eNT1bZ1lN^eNT1bZ1lN^eNU1aZ1kN_eNU1aZ1jN`eNV1`Z1jNaeNV1^Z1jNbeNV1^Z1jNbeNV1^Z1jNbeNW1]Z1iNceNW1]Z1iNceNW1\\\\Z1jNdeNV1[Z1kNeeNV1YZ1kNfeNV1YZ1kNgeNU1XZ1lNheNU1VZ1kNkeNU1TZ1lNleNT1SZ1mNmeNT1QZ1mNoeNS1oY1oNQfNR1mY1oNRfNS1lY1nNTfNR1kY1oNUfNR1jY1nNVfNR1iY1oNWfNQ1iY1oNWfNR1gY1POWfNQ1iY1oNWfNQ1hY1QOVfNQ1iY1oNVfNR1iY1POUfNR1iY1POUfNR1iY1oNVfNR1iY1oNUfNT1iY1nNUfNT1iY1mNVfNT1iY1nNTfNU1kY1kNTfNV1kY1lNTfNT1lY1lNSfNV1lY1kNRfNV1mY1lNQfNU1oY1lNPfNT1PZ1lNmeNW1SZ1jNieNY1WZ1hNfeNZ1ZZ1gNdeNZ1\\\\Z1gNbeNZ1^Z1gN`eNZ1`Z1gN^eNZ1bZ1hN[eNY1eZ1Q200O10000000000000000000000000000000000000000001O001O001O1O001O1O8H2N1O1O2N1`KgdNV4d[1N1O1QLYdNc3h[1ZL[dNe3f[1WL^dNh3o[1M3[LlcNZ3`\\\\1L3Lf0YOn0gN[lN\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [97.48886108398438, 791.124267578125, 245.6373291015625, 70.43292236328125], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"Tii42n_11N2N3M2O001N10001N10000O1000000O10000000000000000000O10000000000O10001O01O00000000001O00001O0O101O001O0000001O001O001O00000O2O00001O0000001O0000001O00001O0000001O001O001O00001O001O001O001O001O00001O1O1O1O1O000010O01O1O001O001O01O01O0010O01O0001O01O01O010O01O01O000001O01O000000001O000010O01O0000001O000000000000001O0000000000000000001O0000000010O00000001O001O001O0000001O1O1O1O1O1O001O00001O001O00001O0000001O00001O00001O1O3M8H5K2N1Nbeki1\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [662.386474609375, 409.4927978515625, 134.443603515625, 346.93414306640625], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"PWno03b_1=J6J5G:@?ZOg0I6lLTNogNP2hW1[NPhNi1jW1aNogNc1iW1iNngN\\\\1lW1mNngNV1WV1h0ciN\\\\OnU1U1liNoNlU1`1eiNhNPV1i5I8G8J6M3M3N3N1N2N2N200O2O00000O100O0100dNnjNjGRU1T8VkNfGjT1W8]kNeGcT1X8ckNeG]T1X8hkNeGXT1Z8lkNdGTT1[8PlNaGQT1^8YlNWGiS1i8g1O1O1O100O1O1O1O1O1N2O100O1O100O100O10000O1O1O2M2N2oH\\\\hNe5fW1WJ`hNe5aW1YJihN^5XW1_JohN]5RW1aJSiNZ5nV1dJViNZ5kV1aJ_iNX5nX1G3L3dLjeNj0WZ1SOoeNh0SZ1VORfNe0PZ1YOYfN<kY1B]fN5fY1\\\\NUeNn0Z1a0QZ1_ORfN=oY1CUfN8mY1FZfN3gY1M\\\\fN0dY10^fNMeY11^fNJeY15`fNDdY1;cfNSOhY1l0h2N100O2O1N2O1N2N2N2OO01N101O1O3J7IlngS1\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [1128.4937744140625, 417.7798767089844, 135.8258056640625, 256.8785095214844], \"score\": 0.9999996423721313, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"`Snd1e0Y_16K2O0O100O10OO2N1O2N2O0000O01O01O1O1N2N20O0001O1O10O1O1O1O10O10O001O100O1O1O00001O2M2N3N1O1N10O1100O100N1bNfNXdN[1b[1POXdNP1b[1\\\\OXdNe0e[1DncNc0o[1g1010O001O01O1O0O1O2O1O100O2O1N2N101O1N2O2N1N2N3M3M3N2N4M4Ln0RfNcJ]W1h5lgNhJPX1_5agNkJ\\\\X1_6N1N3N1N2O00100O1O0O101O0000XLahNoN`W1j0ihNSOWW1i0RiNSOnV1g0ZiNVOgV1e0`iNXO`V1e0fiNXO\\\\V1d0jiNWOYV1c0QjNdMVN<jW1i1ZjNcMVN>bW1g1hkNXNZT1\\\\OdgNj1[4gNcT1l0ekNPO_T1h0jkNTOWT1c0SlN[OoS1=XlNAmS13]lNIZe[>\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [1057.72021484375, 862.1170654296875, 363.7568359375, 81.94525146484375], \"score\": 0.9999967813491821, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"^[ha16i_12O0O10001O0O101O0000001O0O100000001O0000000000001O000000000O10001O00000000000000000000001O00000000000000000000001O00001O001O001O001O00000010O0001O001O000010O000001O01O0001O0001O01O0000001O01O0000001O01O000001O00000000001O00001O01O0001O000000000001O000001O000001O000001O0000000000001O000000000000001O000000001O000000000000001O0000000000001O000001O01O0000000001O0001O0000000001O01O0000001O01O0000000010O00000010O000001O01O0000001O0001O01O00001O01O000001O01O0000010O00001O0000010O0001O01O010O00010O0000010O00001O2O0O001O01O01O01O0001O1O001O1O1O1O10O01O001O0O2O1N2MUSm9\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [932.5507202148438, 392.6613464355469, 106.35479736328125, 214.65127563476562], \"score\": 0.999994158744812, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"caX\\\\15]_12e`N>Q_1:iLTOZgNh0bX1BXgN>eX10QgNOPX1XOVfNm0i1IQX1ZOTfNQ1k1CQX1\\\\OQfNV1m1]ORX1^OneNY1P2WOQX1CmeNX1P2ROUX1HieN1Ao0_2UOXX1M`eN1NP1X2nN\\\\X12ZeN60l0Y2fNaX1d2Y25L2N2N2O1N2N2N3M5K5L5J6K5TeNTLZY1n3cfN]LRY1e3kfNmLeX1T3XgNQMeX1P3XgNUMeX1l2WgNXMhX1V3ffNnLZY1k410O101O001O0000O1000000O100O2N1M3M3I7G9M4M2M4M3N2M3M3J5M6K5I6J5L5K>@9I7J6K4M2M3N3M4K3M7K5K9RNnaN[1^^1O0Mb0_O4L2N2M4L6K5Jdo\\\\h0\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [1296.1707763671875, 645.5621948242188, 69.1026611328125, 22.4647216796875], \"score\": 0.9998434782028198, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"YdQm11n_12O1N101O001O00001O0000010O0000001O01O00000001O00000010O0001O00000010O01O00001O00010O001O01O0000010O0O2O2N1N_[X9\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [881.3692626953125, 580.9677124023438, 99.819091796875, 27.82965087890625], \"score\": 0.9996966123580933, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"\\\\b[Y15j_12O1O000O1000000O1000001N10000000000N2O2O00000000001O1O001O001O00001O001O0000001O00000000000000000000001O00000000@i`N7W_1Gm`N7T_1Dh`N06<R_1DQaN;[_1O01O0O2O0000001O0000000001O00000000010O000000000000001O001O0O3N1O2N2MUmPk0\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [402.7577819824219, 1289.4136962890625, 1035.54833984375, 243.39111328125], \"score\": 0.9995349645614624, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"Ummb0U2]]1d0G5K4L5L3L4L4K5K4_OVLkdNk3U[1VLidNl3U[1VLidNk3V[1WLidNi3W[1XLgdNi3Y[1XLedNi3Z[1YLddNi3[[1WLedNi3Z[1YLddNh3\\\\[1>00O10000O100O10000O1O1O1N2O1O1O1O100O10000O1000001O0O10000000000O10000000000000000000000O100000000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000O1000000000000000000O1000000O10000O100000000O100000000O100000000O100000000000000O100000000000000O1000000000000O100000000O1000000O1000000O100000000O10000000000O100000000000000000000O10000000000000000O100000000000000O10000000000O10000000000O1000000000000O10000000000O10000000000O100000000O100000000O100000000O1000000000000000000000000O100000000000000000000000000000000000000000000000000O100000000O10000O10000O1000000O1000000O100000000O100000000O100000000O10000000000000000O1000000000000000000000000O1000000000000000000000000000000O10000000000001O00000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000O10000000000000000O1000000000000O10000000000O1000000000000000000000000000000O10000000001O0000000O1000000000000O1000000001O1O001O2N1O1O1O001O001O001O1O001SJWfNa5iY1\\\\J[fNc5fY1[J\\\\fNd5dY1ZJ_fNe5bY1YJ_fNg5nY1O1O1O0dJheNP5YZ1mJjeNR5WZ1kJkeNU5UZ1iJneNV5^Z101O001O00001O0000000000O10000000000O10000000000O100DjJneNV5RZ1kJmeNU5SZ1kJmeNU5SZ1lJleNT5SZ1nJkeNS5UZ1mJkeNS5UZ1nJjeNR5VZ1nJjeNR5VZ1oJieNQ5VZ1PKieNQ5WZ1PKheNP5XZ1PKheNP5XZ1QKgeNo4YZ1QKgeNo4YZ1<O100000000O100O10000O1EXJ^fNh5bY1YJ\\\\fNh5dY1ZJZfNf5eY1_JVfNb5jY1:000000O10000O1000000O100O100O100O100O100O1000000000000001O2N1O2N2N1O1O001O001O001O001O001O1O001O1O001O00001O001O0000001O0000001O0000001O00001O000O2O001O001O001O1O001O001O001O00001O001O00001O0000001O0000001O00001O00001O001O001O001O001O1O1O1O1O1O001O1O001O1O00001O00001O0000001O0000000O2O000000001O0000001O000000001O0000001O0000001O000000001O000000001O00000O2O00001O001O001O001O1O1O001O1N2O1O1O001O1O001O001O001N10001O0000001O00001O0O101O001O001O1O1O1O2N2N2M2O2N2N1O2N2N2N3M2N2M3N2N2hMibNc1k]1O001O001O0O2O001O001O001O0O2O1O1O1O1O1N2O2N1O1O1N2O1O1O1O1N2O1O1N2M4M2L4L4M2M4MTal9\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [1.5804253816604614, 620.52734375, 84.37232971191406, 118.95330810546875], \"score\": 0.9995343685150146, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"fc3a2[]16M2N2N2N2O1N1000O1002N001O2N1O1O00O11O1O1O1O1O001O1O00001O000000000000000000000000O100O1O1O1O1N2O1N2N2O1N2N2O1N2O2O0N3M3N2M4M2N3M3K5L4L6J4M2M:F5K5K7G6K6IYkSU2\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [1048.9342041015625, 640.4913940429688, 175.623291015625, 26.44390869140625], \"score\": 0.9989995360374451, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"Sdca12m_12O1O1O001O0000001O000O11O01O000000001O000000000000001O0000001O0001O0000000001O00000000000000001O0001O00001O0000001O1O1O001O001O1O1O1O1N10Q`:0o_E0O1000000O2O000O10000001O01O01O000000000001O000000001O0001O0001O000001O0001O000000000001O000001O000000001O01O00000000000001O0000010O00000001O0000001O001O00001O001O0O10Xke?\"}}, {\"image_id\": 99, \"category_id\": 2, \"bbox\": [1156.7354736328125, 813.4572143554688, 335.793701171875, 75.0902099609375], \"score\": 0.9852584600448608, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"fYhg11o_10O2N101O0O2O00001O000000001O0000001O000000001O0000001O00001O00000000001O000000001O0000001N1000001O00001O00001O000000001O00001O00010O00001O000000001O00000000001O00000000001O0001XOm`Nb0R_1]Oo`Nc0Q_1]Oo`Nc0X_1O010O000001O0001O000001O000000001O00001O0000001O000000001O01O0001O00000000010O00010O00001O01O0001O0001O01O0001O0000010O000001O01O000001O01O000001O0001O0000010O0000000000010N10001N3N3KT`T3CZ`kL1O10000O2O000000000000000000000000001O000001O000000001O01O00001O10O000MX`NNWee3\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [916.41259765625, 370.8796691894531, 76.2957763671875, 231.74197387695312], \"score\": 0.519130527973175, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"\\\\`o[1d0Z_15O04MO2M=B8J02O6H2M1N2O3M2O1N2LQMAcfN:^Y1LafNO`Y14bfNFSY1NXdN=i^1a0K6J5I4WOkNabN\\\\1Y]1j0I=F8H5K5L6JO0K5@gfNSMdV1l2ViN_MfV1_2TiNkMiV1Q2TiNWNkV1c1TiNeNkV1W1SiNoNoV1n0QiNSOQW1j0PiNVORW1h0ohNXOoV1h0UiNTOjV1n0YiNoNfV1Q1]iNmNeV1R1\\\\iNlNfV1R1[iNmNdV1S1]iNlNbV1U1^iNlNaV1T1ViNVOlV1?^fNZNP3l0eV1h0\\\\fN\\\\NP3h0iV1f0giNUOhY]j0\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [1412.4307861328125, 382.5848083496094, 90.5430908203125, 145.82870483398438], \"score\": 0.2096959948539734, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"k\\\\nR22l_1>B;D3N3M4M6J5K5gaNcN8In\\\\1i1ebNaNh]1d1YbNVNg]1j1\\\\bNTNc]1n1[bNTNb]1V2O2N2N1O1O2N101N10N2N2L4001O1O00L4M3O1001O0000O1O1O1N2M300000000O10000001O2NO2M200O101O0O2O0O100O2M2N3O0O2O2C=H;YOTTe2\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [980.4418334960938, 1150.2691650390625, 66.04534912109375, 280.58642578125], \"score\": 0.16229195892810822, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"QiV^1e0P_1`0PNJXcNR1X\\\\1b1J7J6I7G?C8J4L4K5I6L4M3M4J2O111O4afNWJ_X1k5^gNWJaX1j5\\\\gNXJcX1i5YgN[JhX1_600O0O10001O0001O0O2O004L3L3N4L2fLdfNK`Y1\\\\OcgNdNK\\\\OiX1e1S4K5H:Cb0kN]aN2PXbh0\"}}, {\"image_id\": 99, \"category_id\": 1, \"bbox\": [1353.1275634765625, 411.18359375, 94.6920166015625, 307.72113037109375], \"score\": 0.11768306791782379, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1555], \"counts\": \"aTdo16g_19F?_O:G8G8F;H7K4L4M2M4M2L4M3M3L5K4N2N2O0N3K5J5L4hL_LQjNb3mU1eLliN]3PV1PMYiN^3dV1mLPiNX3nV1oLihNR3XW1VM^hNk2cW1[MVhN`12mLiW1g1RhNX1<mLbW1R2kgNo0`Y1UO]fNh0fY1ZOXfNd0kY1\\\\OSfNc0PZ1b2bgNYJlV1b5WiNbJfN1[W1Z5SjNgJ[N1dW1T5UjNYKmU1c4XjNZKlU1`4XjN`KiU1[4[jNfKfU1T4]jNoKbU1m3ajNdKhMLgW1]4cjNPLeU1m3\\\\jNTLdU1j3\\\\jNXLfU1a3]jNaLeU1X3ZjNoLfU1m2UjN[MlU1`2SjNcMPV1W2QjNiMTV1S2kiNnMYV1n1fiNSN`V1h1\\\\iN[NkV1^1SiNcNUW1W1ihNjNZW1U1`hNmNeW1S1RhNPOTX1o0igNTOXX1l0egNTO^X1m0ZgNjNWY14keNSOKA>X1PZ11meNHI7^Z1LWfNlNWO88l0ZZ1MlfNTOmNk0YZ1NofNTOlNj0VZ1OUgNPOlNl0PZ1O]hNLdW11bhNK_W13ehNJ\\\\W15ihNDYW1<V4N5N2M2N1O1M3N2NQQ\\\\5\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [199.24354553222656, 159.6803741455078, 62.07417297363281, 43.94859313964844], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [404, 639], \"counts\": \"_k_26\\\\<5K>D<D001OO10000O1I7L400O101O0001O00000000000000O100O10O1000001O2N1]DXOX;R101O00000000O1DkDXOU;h0<N101O1O1M300N2O1O1O2O1O001O1NTod4\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [302.0860900878906, 157.38580322265625, 68.0076904296875, 80.38169860839844], \"score\": 1.0, \"association_id\": 0, \"segmentation\": {\"size\": [404, 639], \"counts\": \"`fg31c<0M6hDMi92XF1f9L\\\\Fh0Q9WOoFk0\\\\:0XOTOgEm0e9VOnE5;f0g9WOiE?Nd0W:R11O00O100O10O0100O1O[O[F[Ng9b1f001O10000O1000001O00001O001O0O20O01O01O003M8SE^NW:R2M100O2O0O1000000000000000O1O1N3QOiEfNP;U1:M4M3L4ZO\\\\D6]gZ3\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [375.27899169921875, 169.91172790527344, 112.366943359375, 69.8536376953125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [404, 639], \"counts\": \"e`d47\\\\<3N004L2M3O0O001O00000000O11O000O2O1O1O1O1O1O4L2N1O2N1O0O2O001N2O001O3nDhNb:f1O1O0000001OO010O10O10L3@a0O010O100000O100000001O0000001O001O0001O0O100000000O101N1O100O10001O00000001O1TEnNT:T1dETOY:o0cESO]:b11O0000000O100H`E[Na:e1_E[Nb:d1^E\\\\Nb:d1]E]Nd:b1\\\\E^Nf:a1YE`Nf:g12N1_N`El0a:nNkEk0V;J4K2N2LQhk1\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [19.79524803161621, 164.19544982910156, 64.3768081665039, 59.78327941894531], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [404, 639], \"counts\": \"Tk8>U<4K;G6I8bDjNP;_1N1O001OO100O1CVEoNk:o0ZElNh:R1>O1O101O000001O00001O00000001O00000O10000O100001O1O003M5eDhNo:b1M1O1O0O100O10000000O1O1A?N2O1L4M3K6Mfij6\"}}, {\"image_id\": 100, \"category_id\": 2, \"bbox\": [234.06564331054688, 225.948974609375, 105.1055908203125, 11.649520874023438], \"score\": 0.9999998807907104, \"association_id\": 5, \"segmentation\": {\"size\": [404, 639], \"counts\": \"eZm22b<1N2O00001N1000000000001O00000000000O1000000000000000001O0000000000001O00000000000000000000000000000000000000000000000000O2O00000000000O2O0O2N1Ocg4N_XK2O1N2O00000000O100000000000000O2OZTf3\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [127.52275848388672, 164.26528930664062, 34.76331329345703, 43.15098571777344], \"score\": 0.9999997615814209, \"association_id\": 4, \"segmentation\": {\"size\": [404, 639], \"counts\": \"URc12]<>E6SD[Ob;R1I2N10O0O2L4N2003M5K1O0O10FlDSOU;m091O3\\\\DTOZ;S1O1000000000010O00O2CnDUO];>b0GePl5\"}}, {\"image_id\": 100, \"category_id\": 2, \"bbox\": [1.1578482389450073, 215.29458618164062, 64.09584045410156, 7.2476348876953125], \"score\": 0.9999982118606567, \"association_id\": 1, \"segmentation\": {\"size\": [404, 639], \"counts\": \"SP11b<2O00000O10000000O11O000000000O100000000001O003MZh22bWM110000000O100001O0000000000000000000001O00000000000001O1NcdR7\"}}, {\"image_id\": 100, \"category_id\": 2, \"bbox\": [174.57469177246094, 197.4468536376953, 59.966461181640625, 4.165863037109375], \"score\": 0.9999141693115234, \"association_id\": 3, \"segmentation\": {\"size\": [404, 639], \"counts\": \"kPV21c<000000001O00000000000O10000000000000000000000000000001O0Oab10`]N0O10000000000001O000000000000Y[R5\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [470.0401611328125, 155.869873046875, 54.43109130859375, 51.817962646484375], \"score\": 0.9992443323135376, \"association_id\": 0, \"segmentation\": {\"size\": [404, 639], \"counts\": \"hbi5:Y<2O00001N2O3M1O2N010O1O1O10O02N1O00001O01O0000000000000002N1O?A1O0000O1O1001O1O0000_OSEZOm:b0e0O0N3O0N3O1O1O100O10nV]1\"}}, {\"image_id\": 100, \"category_id\": 2, \"bbox\": [322.6537170410156, 227.91943359375, 101.10635375976562, 9.025741577148438], \"score\": 0.9980260133743286, \"association_id\": 2, \"segmentation\": {\"size\": [404, 639], \"counts\": \"Seo33`<10000O1000000000001O000000000000000001O1Odf6MmlH1^<3O1000000O100000000000000000001O0000000000000000000000001O000001O000000000000000000000000001OnSj2\"}}, {\"image_id\": 100, \"category_id\": 1, \"bbox\": [293.2900390625, 158.90249633789062, 29.85162353515625, 78.09591674804688], \"score\": 0.9837781190872192, \"association_id\": 5, \"segmentation\": {\"size\": [404, 639], \"counts\": \"SUd32a<2O1M5L4I:RDAZ;o0M3N3M3M2N3M3M5LcNjEg0i9VO[Fj0f9RO]Fm0e9PO]Fo0e9kN`FT1a:O1O2XO_DN2Ma;1mDOV;O]D=e;_O^Da0a;_O_Da0l;NEBZD002N7_<HY^l3\"}}, {\"image_id\": 100, \"category_id\": 2, \"bbox\": [120.66368865966797, 203.2773895263672, 27.482810974121094, 3.87127685546875], \"score\": 0.8184593915939331, \"association_id\": 4, \"segmentation\": {\"size\": [404, 639], \"counts\": \"dZ`11b<2O0000000000000001O00000000000000001O00c_S6\"}}, {\"image_id\": 100, \"category_id\": 2, \"bbox\": [405.61273193359375, 230.54620361328125, 54.148284912109375, 9.097702026367188], \"score\": 0.31686294078826904, \"association_id\": 0, \"segmentation\": {\"size\": [404, 639], \"counts\": \"o\\\\P58\\\\<0000O2Mk`5NX_J1O10001O0000O11OO101O001O000000000001O01O01O001O0001O0001O0001O001O_\\\\V2\"}}, {\"image_id\": 100, \"category_id\": 2, \"bbox\": [220.2187042236328, 226.73843383789062, 207.89549255371094, 10.832412719726562], \"score\": 0.06769958138465881, \"association_id\": 0, \"segmentation\": {\"size\": [404, 639], \"counts\": \"Qnl23a<0O2O0000001O000O10000000001O000000000000000000000000001O00000000000000000000000000000000001O000000000000000000000000O10000000000O101O00000O10001N^m50bRJ2N1000000000000000000001O000000001O00001NSZ6OmeI2O10000O10000000000000001O00000000000000000000000000000000001O00000000000000000000000001O000000001NRnh2\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [729.0618896484375, 392.6394958496094, 48.36456298828125, 134.24185180664062], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"YgTa08bg07I6J8L3L4M3L4oNQOmZOR1nd0YOfZOo0Pe0X1J5J5M2N1O2M3N2O000O1O1N2O1M4L3N200O2OO1000000O100O1M3N2J7G9C>Eg0]O9G9FSci5\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [841.464599609375, 619.0872802734375, 51.89404296875, 96.4573974609375], \"score\": 1.0, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Q\\\\jc0j0Qg0^1dN8I3N1O1O2OO00010000O10000O10001N1000001O000000000O10000O10O10O0100O101N1O3M3M2K6J6_Oa0iNfYOD49NFhbT3\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [451.149169921875, 425.40631103515625, 88.073486328125, 272.6978759765625], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"lPe:b0Vg0;[Oc0^Ob0K5M4I6K5M3L4M2b]OXMc?j2Y@`M^?c2]@cM_?a2o_OSNk?P2a_OeN\\\\`0b1S_OjNk`0W4N101N10000O100O001N2O1O100O2N100O2M2J7G8L5K5M3O0O100O101O1OO100O100000000O10000O100000000O101N100O1N2N3K5H9C=K4H8G:J6G>mNQ1H5K6J8I9F8H7F>CPY[;\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [293.861328125, 625.0145874023438, 52.46063232421875, 92.243896484375], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"V\\\\o6b0_1CRd0[2M2O1O1O100O1O1O1O10000O100O10000O10000000000000000000000001O001O1O0000001O0O2O1N2M4K6F>mNnYOSOejn?\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [942.2615356445312, 684.2945556640625, 81.121826171875, 19.37164306640625], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"b]Uf04kg010001O0000000O10001O000000000000001O000O100001O00000001O00001O00001O00001O00000000001O00000000000O100000001O000O10000000000001O000010O000001N10000000;DUj0\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [811.8316040039062, 431.534912109375, 81.5181884765625, 202.88409423828125], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_[Rc03kg07H4M2N3M2N2N2N2N2M2N2O1K4L5L3G8M300O02O1O0N3OPNgNT\\\\O:7m0cc05V\\\\OKlb0oNS]O_1IBTc0R3011N2N3M3N2N2O1O1O1N2001O1O1O1O1O103M1O2O1O0O1100O1N4N1N1N30N0O10g0ZOhMS]OXOob0=a]OZOcb0=k]O[OWb0`0o]O\\\\OTb0VOU]O5m0c0oa0QO_]O1j0l0Rc0iNW]OU1od0K7H5L6I8DflV3\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [945.9089965820312, 463.656982421875, 69.15533447265625, 180.41253662109375], \"score\": 0.9999992847442627, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_kVf0<bg03N2M1O2N2O1O1O1O1O1L4M3bLWOT]OOa1n0Ya0XOe\\\\O=h1`0aa0;T^OIja0=i]OJWb0<`]OHYb0R3M20O1N3O0N3L4O1N3M3M2M4N2K5N2M3N2N2N2N2O1O100O1000000001O00100O010O12O1O0N2N100010N1O10XOa[OmMad0h2k\\\\OfLea0d2U]ObMMK<0C`0Pc0g1`]OiMEN3P1nb0Q1j]OnMWOR1dc0NoZ6\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [35.43082809448242, 651.5335693359375, 107.96896362304688, 16.00030517578125], \"score\": 0.9999979734420776, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"glm02mg02N1L4N2O10000O100001O0000000000000O1000000O2O0000000O100000000000000000000000000O2O1O1O001O1O00001Ooo50QPJ00000O10O10O1O1000O1000000OPP90QPG0000001O0000000000000001O_Sgd0\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [745.4063720703125, 632.9774780273438, 87.7965087890625, 11.31964111328125], \"score\": 0.9999959468841553, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mk`a02mg010001O00O1001O000000000O101O0000000000000000001O00000O10000000000000001O000000000000000000000000000000000000000000000001O00000O2O00001O001N100000000O1000000000000000RT`4\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [478.27960205078125, 712.8129272460938, 110.25213623046875, 11.926513671875], \"score\": 0.9999948740005493, \"association_id\": 7, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_VZ;2mg010000O100000000000000000000O10000000000000000000000O10000000000000000O100000000000000000000O10000000001O0000000000000000001O000000000000000000000000000000000000000000000001O00001O4L1O001O1O^i]:\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [365.82989501953125, 675.89990234375, 122.65676879882812, 10.20672607421875], \"score\": 0.9999939203262329, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"WUf82ng00O10001O00000000000000000000000O10000000000000000000001O000000000000000000000O100000000000001O00000000000000000000000000O1000000000000000000000000000000000000000000000000000000000001O0000001O000000001O000000001O000O101Ogbf<\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [895.3091430664062, 632.0670166015625, 80.7467041015625, 11.1436767578125], \"score\": 0.9999864101409912, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mcPe03mg0000O101O000000000000000000001N1000000000000000000O100000000000000000000000000000000O10000001O000000001O0000O11O00001O00001O1N101O0000Q\\\\\\\\1\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [809.0515747070312, 708.605712890625, 43.63409423828125, 7.9222412109375], \"score\": 0.9999474287033081, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"WnPc02mg02O00000000000000001N1000000000000000000000000000000000000000000001O001O1O2NfiQ4\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [261.29498291015625, 713.2484741210938, 42.331939697265625, 6.7291259765625], \"score\": 0.9998887777328491, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[nU62ng01N1000000000000000000000001O0O10000000000000000000000000000001O00001O1O1Ndam`0\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [402.5309143066406, 400.89794921875, 47.7598876953125, 129.6453857421875], \"score\": 0.9995330572128296, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[f^96dg08\\\\Oe0L5H6M2K7I7L2M3@j0E5J3N3M3Mc0]O1O00000001N011N2O00001O1OO12M2N1O2L4G9J5M4L4I8\\\\Od0H7J402N6J7F9EdR^=\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [686.3170166015625, 517.7073974609375, 60.29876708984375, 6.48223876953125], \"score\": 0.9988328814506531, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"XPY`01og000001O000O10000000000000000000000O10000000000000001O0000000000000000000000000000000000O1000001O1Ni_a6\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [623.1304321289062, 398.551513671875, 53.8870849609375, 170.7537841796875], \"score\": 0.9983206391334534, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"oec>a1Zf0=F5K5J7K5J5K5L>A6J5L1O00L2O1001N4M4[\\\\OXLob0j3i\\\\ObLPc0c3k\\\\OaLRc0W4N3O1N3N0000001OO1001OO1N2N2VOj0K5F;J6J8I;G9H5K5J6K5J8I7I4M5K4L3K7HaiS8\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [189.1905059814453, 579.740234375, 200.8250274658203, 15.08721923828125], \"score\": 0.9976913928985596, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^je41ng02O0O1O2N100O10000000000000000000000000000000000000001O0000000000O2O00000000000000001O00O1000000000000O100000000O10000000000000000O100000000000000000000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O001N10001O0000001O1O001O0000001O0O2O00bmb?\"}}, {\"image_id\": 101, \"category_id\": 2, \"bbox\": [873.9302978515625, 578.1658935546875, 67.531494140625, 7.8365478515625], \"score\": 0.982315182685852, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Sjcd03mg01O000000000000001O00000000000000000000000000000000000000000000000000000000000001O00O1001O000001O0000000000MomR2\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [570.6818237304688, 619.7634887695312, 54.09234619140625, 100.5224609375], \"score\": 0.8809082508087158, \"association_id\": 7, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ol\\\\=`1^f03N1N2O1N2N2N2L4_Oa0M3O100N2O1O1N2O1O10000O1000000O1001OO100000000O11O000000001O1O001O003M1O4_MV[Od1ge0L2N2N2N1O2N2MnbZ9\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [43.8681755065918, 309.0262451171875, 206.08074951171875, 176.27224731445312], \"score\": 0.32904377579689026, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"nYQ1S4kc0:G5K3M101N100O10000O10O100O1O1O1O0O2000000O1000000O100O1O1Fb\\\\ORL^c0k3h\\\\OQLYc0m3k\\\\OPLVc0m3`0N2N2M3M3L4N2O1N2O1N2O1O1O1O1O100O100O1O100O1N2O1N2O1O1O1O100O1N2N2N2O1N200O1O1N2O100O1O100O10000O100000000000000O10001O01O000001O001O1O001OR\\\\OSNia0m1U^OUNla0j1P^OZNPb0e1P^O]NPb0c1o]O\\\\NRb0d1Q^OYNoa0g1S^OWNna0h1S^OWNna0h1R^OXNoa0g1R^OXNna0h1V2O001O0O10001N10001O000O101O001N2O001N101O000O2N1O1O1O2N101N101N1O2O0O101O00001O0000001O0O10001N1000001N101O000010O00000001O0001O01O0000001O001O1O0010O010O001O010O000000000001O001LUeUb0\"}}, {\"image_id\": 101, \"category_id\": 1, \"bbox\": [924.9247436523438, 406.4245910644531, 45.73699951171875, 213.69253540039062], \"score\": 0.12256059795618057, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"`ege03fg0:K5N2L10N3MO2M4TYOAUf0g0aYOBXf0V1M2N1000N3N2N3N1N12O0O0OhNcYOP1Zf0QOjYOl0Vf0?0fNgYOm0a4lNn<8S_O<m3_OQ=3m^Oc0Q4[OS=1l^Od0o3^OT=LU_Oa0d3EX>7hAJX>5iAK^>NaA3_>N`A2[>7bAHb>2`AMi>F[A;_>LaA3Vc00N20O1O2MUmW1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [190.4459991455078, 750.2996826171875, 72.51481628417969, 156.256591796875], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"[le88bZ12eeN0b0OTX14VgNV1f0[OXV1Y1_iNmN_V1V1UiNUOjV1`2OO01O02N2N000O2N1O2O00O101[iN_LnU1e3hiNbLWV1Q4O2OO0100O0100O2O0O1O101N2N2M3N2M3N3M2WN`iNTObV1g0iiNPOZV1l0QjNjNSV1Q1RjNkNRV1k0ZjNoNjU1g0`jNSOfU1d0_2@fUQ[2\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [1049.19677734375, 679.255615234375, 173.22509765625, 425.4708251953125], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"mQ\\\\\\\\1;ZZ1`0@9K4YMWOVkNm0\\\\T1D]kN?ZT1N`kN5[T12`kN2[T14`kNO[T17akNL[T18bkNKZT1:bkNG[T1`0^kND_T1b0WkNDgT1b0PkNCoT1V3N1O2O1O2N3L4SmNVKco0m4SPO^Kio0c4RPOdKjo0`4ooNgKno0\\\\4hoNoKTP1`7K3N3M2N2N3M4L4L4L3M4L4L6J4K3N2N001N10000O101O000O1000000O1O1O100O100O10000O10000N101O1N2N2N2N1O0001N1O2N100O1J5D>L3O2N3M2O1O1O101N1100O01O010O03M3M2N2O1N2000O10001O001O01N2O00000001N2N3M2O1N2N1O1O2N2N2N2N3L3N3M3mIcoNa1aP1XNhoNc1ZP1YNloNc1VP1[NnoN_1UP1`NooN[1TP1dNooNV1UP1hNQPOo0SP1POQPOk0RP1TOPPOg0TP1WOooNe0UP1XOooNb0UP1\\\\ORPO:TP1CUPO0PP1OUPOHQP16QPODUP1:moNBXP1;koN^OaP1:doNnN[Q1f0`oNeMdQ1Q2j3I5J7I7H;D:C:EaSeR1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [526.1403198242188, 740.4656372070312, 67.8597412109375, 107.65106201171875], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"nQaf0>c1EQW1`0ihNESW1c0fhN_OWW1g0bhN\\\\O]W1h0_hNZO`W1l0WhNWOhW1o0ngNWOPX1l1M2N2N3M2O2N2N1000000000000000O100O100000000O1000000000000O100O1O1N2L4L4L5J6L4J7J5K6J6I7D<I9EPell1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [759.8173217773438, 703.2431030273438, 302.36468505859375, 221.798828125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"TPmo08^Z1:G9H7J7I7I7E:G:H9H4L4M3M2N2N2N2N2M4M4K3N2N2N1O2N1O2N2M3N2L7I7I5L3N2N2O0O2N2N4K5L4K3N2O1N2N100O1O1O1O1O1O1O1O100O100O1O010O10000O10O1000O0100O1O1O010O100O10O1000O100000O1000000O10000O1000O1000O1000000000000O100000000000000000000000000000000000000000000000000O1000000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000001O0000001O00001O00001O001O001O001O001O00001O00000000O1000000000000O100O1O1O2M2O1O1O100O1O1O1O1N2N2N2N2O1O1O100O10000O10000O10000O100O1000000O1000001O000000000O2O0000001O00001O1O0O2O001O1N101O001N2N2O1K6J5H9XOi0G9I6E;K5L4I9E;Im^gY1\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [987.4256591796875, 1069.4814453125, 202.650390625, 70.8720703125], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"bXeY16dZ11O2N101N1O1O2M2O1K5O1M3O1O100O10000O10000O1000000O10000O10000O1000000O10000O10001N100O100O2O00001N100O101O0O100O1O1O1O2O00O10O10000000O100000000000O02O0000000O100000000000000000O10001O000O101O01O002N2O2M3M100O1O2N3N0O1O1O01OO2O1O001O00000000001O000000000O10O10O1000O010O1O001O010O1O001M2H8N2O110O01O0O2M3O00100O0_OafNG`Y18cfNE^Y1:efNC\\\\Y1<b00O10O010O10O0100O010O100O1O010O100O100O1000O0101N100O1O2N101N3MR\\\\nS1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [1852.6259765625, 672.3858032226562, 108.1409912109375, 241.3536376953125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"i[d]23^Z1c0@:J4K4F:H8K5M100001N1M3N201O101N101OO100O1O2O2M6fiN]NRS1h1ilN\\\\NQS1j1llNYNnR1o1klNVNhR1Z2QmNmMcR1]2YmNfMaR1c2YmN`MbR1m2RmNVMkR1T3jlNPMSS1U3hlNnLWS1V3blNmL]S1Y3UlNQMiS1Q50001O0000000000000000000100O3N1N2N2N6I4M6J6I3N2M3L4TNPlNdLWT1\\\\1WkNVOhV1d0hiNjN_V1S1g1L5L4M2O1O100O2N2M3N2O1O1O2O0N2N3L4M3N5LO3M10O20N3M1N1O100O2O10O1O0O1N2O1O8FhQe3\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [733.1256713867188, 882.95947265625, 281.59527587890625, 49.77032470703125], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"[Znn01iZ12N100O2O0O100O100000000O1000000O10000000000O1000001N100000000O2O00000O10001O0O1000000O10000O2O00000000000O100000000001O001O0010O01O001O000000000000000000000000O10000O2N100O10000O10000O1O1O100O10000O10000O10000O100O1000000O1000000000000O10000000000000000000000000000000O1000000000000000001O0000000000001O00000000001O000000000000000O10001O0000000000O10000000O1000000000O01000O10O1000000O1O100O1O1O011O002M3N3L2O1N2N2O000O2O001N3N[_Y17]`fN2N2O1N1000O1000O10O1000000000000O100000000O1000001N101Nj`W[1\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [1385.8134765625, 718.7628784179688, 78.8477783203125, 180.137939453125], \"score\": 0.9999932050704956, \"association_id\": 6, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"\\\\]Tj1d0QZ1=VOg0I;H4K8I>B001O000000001N1000000000001O01O0002N3M1O3M=C5K:F8iiNQLRU1n4N2N100O0010O1O011M2O2N3L6J3M3N2N2M2M3M3M4I6A?\\\\Oc0F;G:F9J7I6M3N3M4L4K4L4J5M6K7J2M5Jli_h0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [154.47633361816406, 901.3245849609375, 76.06277465820312, 14.109130859375], \"score\": 0.9999734163284302, \"association_id\": 4, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"X^h63hZ10O101O0O10000000000000O1000O100000000000000O1000O100000000000O10O1000000000O1000O10000000O1000O10000000001N10O10000000000001N10SaV\\\\2\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1857.3460693359375, 892.8594360351562, 73.98876953125, 16.7293701171875], \"score\": 0.9997441172599792, \"association_id\": 1, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"k_h]25fZ11O0O2O000000001O0000000O1000000O10000O10000O10000000000001O000000000O2O0000000O2O001O000000001N10000000O100000O10O100001O001N10Z_V5\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1232.39111328125, 925.6837768554688, 250.1171875, 49.83203125], \"score\": 0.9980708956718445, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"PTic12iZ100000O10000O100000000000000O2O0000O1000000000000000000000000O1000000000000000000000000000000O010000000000000O1000000000000000000000000O10O100000O100000000O100000llj10TSUN000O101O0O100000O0100000O0100000O1000O0100000O10000PTP1MSloN1OO100000hZ11WeN0O2O00O1O101Onjki0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [504.1309814453125, 839.6655883789062, 53.51654052734375, 12.141845703125], \"score\": 0.995887815952301, \"association_id\": 5, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"UdWe03gZ11000001O000O100000000000000000000O100O10000000000000001O00O1001OO10000000000O1000000000001N10002Mg][n1\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1376.3536376953125, 887.8003540039062, 54.93212890625, 11.57659912109375], \"score\": 0.9817744493484497, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"j`li11jZ11N1O1000000O10001O000O1000000000000O1000000000000000000000000000000001O001O1O1O1OhWQj0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1443.3541259765625, 809.143310546875, 54.069091796875, 28.3232421875], \"score\": 0.9692940711975098, \"association_id\": 6, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"ljhl13eZ13O1O1O100O1O1O1O100O010NDeeN<[Z1200000O10000000000000O10001O001O3M1O010O1O001O01O00000O101N2N`mPg0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1466.462646484375, 873.8192138671875, 66.0819091796875, 29.9061279296875], \"score\": 0.5276219248771667, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"fZbm15dZ14N1MGbeN<\\\\Z10100O10O02O00001O000000000010O10O1O02OO02N001O0O10000O10O100O010O1O1O01000O101N1000001N100000000O10000O100O10aede0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1473.6968994140625, 858.9769897460938, 224.591796875, 63.54351806640625], \"score\": 0.3927166163921356, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"bQmm15fZ10O2O0O2O0O20O00001O00000000000000O010O1O100O10000O01000O1000000001N100000000O100000O100000O10O10000O1O1O1O1000000O10001O0O110O001O100O1O001O000O2O0O1N3O00001O001O001O1O001O0O10001O000000001O00000000001O0000000000UO[fN`0eY1@]fN>dY1@]fN`0cY1_O_fN`0aY1@_fN`0aY1A]fN`0cY1@]fN`0cY1AZfNa0eY1<O00000010O00001O0000001O000000000000001O2N00001O000O2O001O001O00100O00100O2N4L2N1O2O0O001O00001O001O1O0O1M`eNKcZ12^eNMcZ1140idb`0\"}}, {\"image_id\": 102, \"category_id\": 1, \"bbox\": [1506.5185546875, 694.1224365234375, 34.5537109375, 183.88165283203125], \"score\": 0.3083517551422119, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"PWYo13\\\\Y1^1C;E<I6iN^NkhNl1bV1`1I700N2O100K53MI71O1OM35K2N3L3[LiiNXO2k10mNYV1IcjNc0FQOUV14fjNKmV16l1O1O2Id]We0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1436.45849609375, 806.5186157226562, 65.5611572265625, 16.60089111328125], \"score\": 0.18615680932998657, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"Wkll15eZ11O100O1N2M300O1000000000000O1001OO100000000000000000000001O00001O001O00000000001O1O1N2O1NRbkf0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1399.183349609375, 805.8217163085938, 106.5628662109375, 33.91253662109375], \"score\": 0.1752244383096695, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"fadj11_cQ22WgoM3M2O1O100O1O1O001O1O1O100000O0100O10O100000000000O100000000001O01O1heN_OTZ1f0N1O10O0001O0001N1000001N1O1]OjeN?ZZ1NfeNC]Z15ceNKgakf0\"}}, {\"image_id\": 102, \"category_id\": 2, \"bbox\": [1455.099365234375, 805.1170043945312, 45.7608642578125, 57.551513671875], \"score\": 0.0628526359796524, \"association_id\": 0, \"segmentation\": {\"size\": [1371, 2048], \"counts\": \"]kll11eZ19K1000O00O2O1O010000000M30000O100000000O1O11OO1O1keN]OSZ1j0K1O2N01O00001N2O1O1O0000000O2N1O3K5Mlllf0\"}}, {\"image_id\": 103, \"category_id\": 1, \"bbox\": [295.68011474609375, 231.90638732910156, 56.470184326171875, 36.52378845214844], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"o\\\\\\\\44a>McA;W>:M3M3N2O0O2N100O2O0000000000000000000000001O000000000000000000000000000000000000000001O001O1N2O1N2N3KchV4\"}}, {\"image_id\": 103, \"category_id\": 1, \"bbox\": [491.330078125, 257.5763854980469, 75.57073974609375, 46.04791259765625], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"ajW711=[>:I8H4N2N2O0O1O101N10000000O1000000000000000000001O0000000000000000000000000000001O0000000000000000000000000000000000O100000000O10000O100O1O1O3MmnQ1\"}}, {\"image_id\": 103, \"category_id\": 2, \"bbox\": [29.928363800048828, 343.7335205078125, 105.25825500488281, 12.552459716796875], \"score\": 0.9999996423721313, \"association_id\": 3, \"segmentation\": {\"size\": [480, 640], \"counts\": \"j\\\\>2m>2N2O00001O00000000000000001O000000000000000000001O0000000000001O000000000000000000000000000000000000001O000000000000000000000000000000000000000001O0000000000000001O000000000000000000000000000000000001O00000OW\\\\\\\\7\"}}, {\"image_id\": 103, \"category_id\": 2, \"bbox\": [410.9363098144531, 292.84283447265625, 92.97393798828125, 9.765899658203125], \"score\": 0.9999895095825195, \"association_id\": 2, \"segmentation\": {\"size\": [480, 640], \"counts\": \"U]Q62n>001O000000001O00000000000000000000000001O0000000000000001O000001O0000000000000000000001O00000000000000000000000000001O00000000000000001O00000000001O0001O000000001O1N2O1OdXR2\"}}, {\"image_id\": 103, \"category_id\": 2, \"bbox\": [266.523193359375, 261.8103332519531, 43.540130615234375, 6.369476318359375], \"score\": 0.9998517036437988, \"association_id\": 1, \"segmentation\": {\"size\": [480, 640], \"counts\": \"W[n31o>000000000O2O000000000000000000001O000000000000001O0000000001O00001O1O1OfYl4\"}}, {\"image_id\": 103, \"category_id\": 1, \"bbox\": [130.3877410888672, 265.86181640625, 140.5601348876953, 101.54678344726562], \"score\": 0.8984933495521545, \"association_id\": 3, \"segmentation\": {\"size\": [480, 640], \"counts\": \"jco15U>i0K3N1K6[Od0L5O0O1O1L4L4O101N1N2O00100O1N2O1O100O1000000O10000000000000000000001O0O2O00001O00000000000000001O00000000001O000000001O0000001O000000000O100000000000000000000O0100000000O1O100O10000O100O1O1FPDcMQ<]2PDbMP<]2RDaMo;^2:0000O100001O001O001O001O1O1O01N2N2O1N1O2O2L7I4L2O2N3M5H6L3N2M5Ine\\\\5\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [546.9259033203125, 507.3783874511719, 69.33282470703125, 183.33505249023438], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [768, 892], \"counts\": \"\\\\bQ=3ig09I3N1O2M3M3nYO_Obd0b0Y[OHIDic0e0U\\\\O=[OYO]d0<Q\\\\OW1mc0kNn[OX1Qd0lNf[OZ1Zd0V1O10O00^NcLb^O^3[a0gLa^OZ3]a0jL_^OY3`a0jLZ^OZ3da0jLS^O\\\\3la0iLk]O\\\\3Ub0jLb]OY3_b0S11O1O1O1O1\\\\O_]OgKcb0U4d]OdK`b0[4c000001O2N4L5K2N5K5K2N2N3N1N2N01WKd]Ol3]b0oKi]OQ4Wb0lKm]OR4Tb0jKP^OU4Tb0dKQ^OY4nb0M3M5K=CZ2fM:F8H;D5KjZ^6\"}}, {\"image_id\": 104, \"category_id\": 2, \"bbox\": [255.04876708984375, 683.5050048828125, 150.08035278320312, 19.703857421875], \"score\": 0.9999998807907104, \"association_id\": 5, \"segmentation\": {\"size\": [768, 892], \"counts\": \"deP64kg0101O00001O001O002N001O000000001O000000000000000000O1000000000O1000O1000000O10O10O1O100O1000O10000000000O10000000000000000O10001O000O1000000000000001O0000001O1O00001O00001O00010O00000000000001N100000000N2N20E_XO7eg000O2OIIeXO7[g0IeXO7[g0IeXO7[g0IeXO7[g0IeXO7bg0O1000000O1000000O1000000O10000O1000001N10\\\\jg;\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [814.2061157226562, 526.9005126953125, 69.88714599609375, 161.28717041015625], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [768, 892], \"counts\": \"T]Xc0:^g09TNC[[OV1kc0l1TOmL[\\\\O0MZ3^c0o0J6K5K5N200O100000000000000000000000000000000000000000000000000000000000000000000000O100O10001O1O2M4SMW^O[OVb0lNd_Og0Pd0I^S9\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [422.6926574707031, 481.88812255859375, 86.60324096679688, 223.73681640625], \"score\": 0.999998927116394, \"association_id\": 1, \"segmentation\": {\"size\": [768, 892], \"counts\": \"]en9b0[g04M2N3K4J6L4YLoNY@W1c?ROo_OW1P`0lNh_OZ1X`0iNk]O@e0o1`a0Hg]Og0Xb0a20O1O1N1010O01O001OO000O01N1M4M3ONNO1M6G93M3O200O100O100001O2N1O3M2N2N2N4L3M2N3M3M3M5K=C4L2N2N000001O1O2jIV_OR5n`0eJZ_OY5ja0K7eMV]OTOmb0KY^OMka0J_^O1na0YOf^O<na0fN`^OV1Yd0K5L3M3K5J8HkjS9\"}}, {\"image_id\": 104, \"category_id\": 2, \"bbox\": [388.86077880859375, 694.0181274414062, 97.54962158203125, 16.995361328125], \"score\": 0.9999980926513672, \"association_id\": 1, \"segmentation\": {\"size\": [768, 892], \"counts\": \"SVX92mg0101N1O1O100O100O1000000O10000000000O1000000O1000000001O001O00001O0000000000000O10000000O10000O0100000000O10000000000O100000000O01000O100O1000O01000000O10000O101OVZg9\"}}, {\"image_id\": 104, \"category_id\": 2, \"bbox\": [527.752197265625, 684.6319580078125, 76.3966064453125, 16.26763916015625], \"score\": 0.999993085861206, \"association_id\": 2, \"segmentation\": {\"size\": [768, 892], \"counts\": \"i]]<2mg02N100O2O000000000O100000000O1000000O10000O1000000000O010000O1O1000O100000000000000O101O000000001O0O101O0000000O1000000000O10O0100O1000000O10O2O002N_Zg6\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [40.1811637878418, 393.8293151855469, 414.42315673828125, 277.9131164550781], \"score\": 0.9999315738677979, \"association_id\": 0, \"segmentation\": {\"size\": [768, 892], \"counts\": \"m^n05dg0`0D7L3L2N2O0O2O00000000001O0000000000000000000000001O000000000000000000000000000000000000000000000000000001O0000001O01O0001O00001O000000000000000001O00000000000001O010O00001O01O01O0001O0010O01O1O2N1O2N2N001O001O01O01O00001O0000000000001O00000000001O01O0001O001O010O1O1O010O001O01O01O00001O001O001O001O1O1O1O001O000010O00000010O0000000010O00000000000001O0001O00000001O0000001O1O010O1O001O001O00001O0001O01O00000000001O0000001O0001O010O010O100O100O1O10O01O010O00001O0000001O000O2O001O00001O00001O0000001O0000001O00000001O00000000000000000000001O000O10000O100O100O2[NUNR]Ol1kb0_Nl\\\\Ob1Qc0cNm\\\\O]1Pc0hNm\\\\OY1Sc0iNj\\\\OX1Uc0jNj\\\\OV1Uc0mNh\\\\OT1Xc0nNe\\\\OT1Yc0POc\\\\OQ1]c0QO`\\\\OQ1^c0VOZ\\\\Ol0fc0h1N2N2O0O2N1O2N1O2N101N1O101N2N1O2M3M3M7j]OWKi`0R5l^OUKo`0S5h^ORKU1WOe>i6XAYIe>j6ZAVId>m6ZATId>o6ZARIe>o6\\\\APIb>R7^AnHa>T7_AkH`>V7aAhH`>X7`AhH_>Y7T10O101O000O101O0000N2O1N2N2M3mMW_OTL2^Oj`0\\\\4X_ORLba0l3b^OaKF1ia0\\\\4e^O]KH4da0^4g^OXKK6`a0W4a_OfK``0Y4c_OdK^`0[4d1O10001O010O1O004M3L5L6I100O100O001N2O1N2M4L4L3M4L4L3N2M4L3M4K4H9J6J=_OPRP;\"}}, {\"image_id\": 104, \"category_id\": 2, \"bbox\": [662.513427734375, 661.0299072265625, 51.46978759765625, 11.33001708984375], \"score\": 0.999891996383667, \"association_id\": 3, \"segmentation\": {\"size\": [768, 892], \"counts\": \"kdb?1ng02O1N100000000O1000000000000O1000001O0000000O10000000000010O1O1O001O00000O1000001O0O101O0OUkW4\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [677.640380859375, 517.4634399414062, 76.26007080078125, 148.22210693359375], \"score\": 0.9995219707489014, \"association_id\": 3, \"segmentation\": {\"size\": [768, 892], \"counts\": \"WXU`07ig01O001N2O;S[O>f`0LW^On0ha02h\\\\O>Xc0U201O0O100000000000000000000000000000000000000000000000000000000000000000000000000000000001N10000N3^L_]O8RO0cc0@o]O\\\\O`N8:f0RS\\\\3\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [301.9971618652344, 378.0814514160156, 122.90087890625, 311.2752990722656], \"score\": 0.9994145631790161, \"association_id\": 5, \"segmentation\": {\"size\": [768, 892], \"counts\": \"Zfe7:]g0d0B8H5L3`_OeNU:`1gEeNU:^1hEeNV:]1fEgNX:[1eEgNY:[1eEgNY:[1fEfNX:]1fEdNX:_1fEbNY:`1dEbN\\\\:a1WBjMV1h0b<a3ZCbLf<_3WCcLi<^3UCcLk<_3PCdLo<^3mBeLS=]3gBgLY=[3cBgL]=[3^BhLa=\\\\3mASMS>P3[A_Md>e2QAcMn>g2c@^M\\\\?d5N1O000OOC>N1110N3K5H9M3M2L5L4M3O1O1O1N2O11O1O1O1O2O1N3M3M201N4N;D5KUMQAcLQ?U3\\\\AcLk>Q3[AmLk>i2\\\\AUMe>g2_AWMb>g2dATM]>k2jAoLV>P3mAmLT>R3nAlLS>S3PBjLP>V3TBfLm=]3TB^Lm=S3[@oKg1j0P>k2f@[LZ1e0S>e2PAiLj0=Z>e2SAlLd0:[>h2TAoLb04]>k2RARMc0N^>P3n@SMg0G^>U3j@WM]a0i2b^OZM]a0e2b^O]M`a0_2c^O^M`a0_2c^O_M_a0_2b^O_Maa0^2a^O`Maa0^2b^O\\\\Mda0b2R2J6K3K6I7G;G6J7G^an:\"}}, {\"image_id\": 104, \"category_id\": 2, \"bbox\": [791.644287109375, 681.1090698242188, 40.87042236328125, 10.6322021484375], \"score\": 0.9965110421180725, \"association_id\": 4, \"segmentation\": {\"size\": [768, 892], \"counts\": \"^Udb03mg00O10000O101O00000O100000000000000O101O00000000000001O1O001O1O1O1O00000OaR]1\"}}, {\"image_id\": 104, \"category_id\": 1, \"bbox\": [603.6599731445312, 508.70904541015625, 52.2158203125, 140.533935546875], \"score\": 0.06230230629444122, \"association_id\": 0, \"segmentation\": {\"size\": [768, 892], \"counts\": \"TPZ>1kg0:I3M2O2M3N1O2N6J7]ZOKmb09V\\\\Of0dc0m1N4L2N1O1O2N00001O000000001O000000000000O10000000000O1]M[\\\\Ok0ec0cN\\\\]On0eb0hNX_O@l`0<_3M4ITPb5\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [203.55447387695312, 806.3938598632812, 202.29702758789062, 113.60516357421875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"akP81dS14O2jlNKeR17RmN2lR10RmN1nR1>N2O000O101O00000O2O2N1N2O0O2O1O1O1O1O001O1O001N101O2N2N8H7I2N2N3M3M3M3M9G2N2N2N5K1O1O1O001O0000001O0000000000000000000000O10000O10000O100O1O1O100O10000000000000000000000O10001O0000001O0000001O0O100000000O1000010O001O001O00100O1O1O1O001O1O1O001O001O010O001O1O001O0010O0000010O01O01O01O00100O001O001O1O001O1O001O1O1O2N1O001O1N101O1O001N2O1N2N4JkS_X1\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [807.986083984375, 765.9952392578125, 291.4744873046875, 261.8128662109375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"ZbZl0=YS19G9H7J6I8H9F=C`1aN6K3L4M3M2N2N2N3M4L4L4K3N3M2N2N2N2N2N2M3H8I7L3N3M3M2N2N3M2L4L4M4N1O1O101N00100O00100O1O001O10O0100O010O0100O010O010O01O000010O0100O01000O10O100000O10O100000O100O100O1O100O010O10000000000O10000000000000000000O100000O10000000O10001O0000000000000O10000O10000O1000000O1000001O0000001N100000000000000O1000001O00001O001O00001O000000001O000000000000000000001O00000000000000000000000000000000001O0000000001O01O0000001O001O001O001O1O00001O01O01O01O010O00100O0100O1O2O1N100O1O1O100O2O1O1N2O10O01N010O1O1O2N2N4L4L2N1O2N1O1O1O2N4K6K9G6I5L1N2O1N2N3N2M8H:Fb0^O7I4L4L3M3M3L6I:lNonNoNoUY`0\"}}, {\"image_id\": 105, \"category_id\": 2, \"bbox\": [753.6386108398438, 968.2631225585938, 328.35028076171875, 75.00640869140625], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"VS\\\\j06bS14L1O2N101N100O100O2N1O1O1O100O1O100O2O0O1N2L4N2O1O100O10000O100O100O1O1O1O100O10000O100O1O1N2O1N2O100O100000000001O00000000MZNSnNf1mQ1ZNSnNf1QR1O001O4_NimNX1bR1J2N1O1O001O00001O001O1O1O1O1O001O001O000000000000001O00000000000001O000000000O100000000000000O100000000O10000000000O100000000O100000000O101O00000000000O1000000000000000000O100000000000000000000O100000000000000000000000000O1000000000000000000O1000000000000000000O1000000000000000000O10O1000000000000000O1000000000000000O10000000000000O1000000000000000O10000000000000000O1000001O000000000O1000001O000000000O2O000000001N10001O00001O000O101O0000000O2O000O2O00001N1000001O0O1O2N100O2O0O101N100O2N2N2NT\\\\Wa0\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [165.20565795898438, 817.5594482421875, 110.22665405273438, 177.30096435546875], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"dmm51gS15K3L2N2N3N100O2N100nLCfRO?Xm0DeRO>Ym0EeRO<Ym0GeRO9Ym0KeRO6Xm0MfRO4Xm00fROOZm04dROMXm07fROIXm0<dROF[m0<cROD^m0?_RO@cm0d0URO\\\\Onm0h0kQOYOWn0j0eQOWOZn0m0cQORO\\\\n0S1`QOmN`n0V1\\\\QOkNcn0Y1[QOgNcn0]1[QOcNdn0`1YQOaNfn0c1VQO]Nkn0e1RQO\\\\Nmn0h1PQOYNPo0i1lPOYNSo0l1dPOXN]o0R3OO2N2O1O1N2N2M3N2M3M3N2O1O1O2N1O1001O00011N1O1O2N2N2N2N2N2N2O1NgN[PO_Ndo0]1fPO_NXo0^1oPO_NPo0]O]POd1g0oNjn0[OgPO^1c0VOjn0TOjPO`1?\\\\O\\\\o0`0gPO^O\\\\o0=gPOCao02bPONao0GhPO:Zo0^OlPOb0UQ1:F1O10O1O0O2N2N2N2N3N3L2N1O1N2O1O2M3M4LUVX]1\"}}, {\"image_id\": 105, \"category_id\": 2, \"bbox\": [240.8037872314453, 905.0767211914062, 150.7874298095703, 19.72967529296875], \"score\": 0.9999969005584717, \"association_id\": 2, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"i]h86_S16M101O00001O00000010O000010O0001O1O1O001O1O1O2OO0001O001O00000000000000O100O1O1O1O1O1000000O100000000000000000000000000000000000000000000O100000000000O10000000000000O100000000000000000O01000000000000001O0O100000000000000000000O101O0000000O1000001O000O100000000000000O10001O0O101OejoX1\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [57.399044036865234, 801.6732788085938, 90.22593688964844, 40.95782470703125], \"score\": 0.9999723434448242, \"association_id\": 4, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"dlS2>XS14N1N2O2O0O1M3O1O1O1O100O100O2O0O100O10000O100O10000O1000000000O10000O1O10000O100O100000000000000O100001O01O000100O1O00100O0010O0000001O00000O2O0H[mN\\\\OeR1b0_mNZOcR1d0;N1O1O1O2O0O1O2N101N3M]h`a1\"}}, {\"image_id\": 105, \"category_id\": 2, \"bbox\": [124.99240112304688, 982.3408203125, 128.03070068359375, 13.49517822265625], \"score\": 0.999911904335022, \"association_id\": 3, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"Vo^41gS1101O0O10000000000000001O0000000O1000000000000000000000000000000000000O1001O0000000000001O0000001O00000000000000O100000O0100O1000000O1000O100000000000000O1000000O1000000O10000O101O`VQ_1\"}}, {\"image_id\": 105, \"category_id\": 2, \"bbox\": [56.23470687866211, 836.9912109375, 103.39060974121094, 9.5213623046875], \"score\": 0.998526930809021, \"association_id\": 4, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"Tbc21gS11001O00cY<1\\\\fC00000O10000O2O00000O100000000000000000000000000001O01O01O001O00001O00001O00O10000O100O10000O1000000000000000000000O2O00001O1OPmXa1\"}}, {\"image_id\": 105, \"category_id\": 2, \"bbox\": [1289.0208740234375, 897.4515380859375, 206.2684326171875, 10.97845458984375], \"score\": 0.8833901882171631, \"association_id\": 0, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"ci]^11gS11000000000000000000000000000000000000000000001Oncn10R\\\\QN1O000O101O00000O1000000000000000000O1000000000000001O00000000000000000000000000000000000000001O00000000000000000000000000000000000000000000000000001O000000000000001O0OV`^2\"}}, {\"image_id\": 105, \"category_id\": 2, \"bbox\": [1297.0484619140625, 896.1871948242188, 173.73974609375, 8.37451171875], \"score\": 0.4953448474407196, \"association_id\": 0, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"ci]^11gS110000000000O10000000000000000000000001O0000000O10neX20RZgM0000000000001O000000000000000000000000000000000000000001O0000000000_Wk3\"}}, {\"image_id\": 105, \"category_id\": 2, \"bbox\": [140.44024658203125, 897.7755126953125, 128.346435546875, 92.70660400390625], \"score\": 0.4855527877807617, \"association_id\": 0, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"jYW71`h]11U[cN3O1O2N1O1N10001O0000000100O001N2O1N2Nd_Z]1\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [231.509765625, 801.5303955078125, 104.66000366210938, 127.1729736328125], \"score\": 0.053315117955207825, \"association_id\": 0, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"Tda82`02cR1OZmN:_R1a0O1O0O1O2O0O2N3M3N1O1O1N100O3M3M3N7I2N3M3L6Kd0[O6K2N1O1N2O2N1N2O2N00001O000000O10000O10000O1O100O2O0O1O1O1000000000001N1000O11O000000000000O11O0O101N100O100000000000000001O01O010O3Jm]mZ1\"}}, {\"image_id\": 105, \"category_id\": 1, \"bbox\": [32.94330978393555, 808.0944213867188, 200.13143920898438, 34.80303955078125], \"score\": 0.051965221762657166, \"association_id\": 0, \"segmentation\": {\"size\": [1145, 1565], \"counts\": \"nlS21]S1>I4O2N1O2O0O1O2N100O1O10000O100O100O1000000000000000000000O100000000O100000000O10000O100O100O100001O00000000000000001O000000000UmNTOiR1l0WmNTOiR1l0WmNTOiR1i0WmNXO0OiR1h0XmNXO00hR1h0[mNXOeR1h06N2O2N1O1000000000000000001O00002N002N001O00mlNDjR1;VmNEjR1<TmNEJOQS1?PmNCONQS1d0olN\\\\OQS1f0001MnlN]ORS19QmNGM3US12llNK^S13501OTdl`1\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [106.31080627441406, 462.2701721191406, 104.8701171875, 49.812469482421875], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [639, 735], \"counts\": \"aWS26ic01O1N4M1O1O1N101O001O0010O01O001O1O5K10O01O1O0010O01O000010O0001O001O00001O00000O100O2N1M300O10001N10000000000O100000000000000001O001O1O001O1O1O001O1O001O2N001O001O1O1O001O1N10001N100O100O1O1N2L4N3N1O2L4OYTW:\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [578.063720703125, 509.48114013671875, 151.8924560546875, 43.4344482421875], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"YjY;4jc010001O001N2O001O001O1O00001O001O00001O001O0O20O000001O000O101O0O100O100O2O0O100O100O2N1000000O101O00000000001O00000001O00000000001O00001O0000001O0001O0001O0001O01O000001O01O0001O01O01O01O000001O000001O000001O0001O000000000000000000O10000000000O100O100O010O10O10O10O100N201O0O100O2N2O001N100O2O1N2O1N^c2\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [289.08642578125, 504.9707336425781, 134.00201416015625, 69.56552124023438], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [639, 735], \"counts\": \"d_e54jc02N2N2O1N2O0O2O0O2O1O001O2N2N1O0O2O2N1O1N101N1O2O0O101N10000O1N2N2N200O2M2O101N1000000O100000001O000000001O000001O0000000001O05K4M3L100O0100O010O001O001O01O0000001O01O01O0000001O1O010O001O001O001O1O1O1O001N2O2N001N101O1O001O0O100O100O2O0O1O101N101N100O2N2N2N2N2NmhR6\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [17.754064559936523, 139.79803466796875, 84.771240234375, 274.00091552734375], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [639, 735], \"counts\": \"Z^;c0Wc0;F8H8I5K5M4h^OVNT`0P2Y_OeN_`0c2J3L4K6K6K3M6l@WMg<^5B5J4ZOnHRD\\\\7d;d0L5K4M3O1O100O10O01000O1O1N2M3M3N2M3N20000O11O00001O1O1O1O2O0O00010O0101O0O2N1O2M4M>A6I5J5K6J5mNhBPKf=i4k0I7[Od0C>C>C=oL`@X1\\\\a0E9K6J7GS`^<\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [181.75350952148438, 113.03704071044922, 110.84176635742188, 331.02386474609375], \"score\": 0.9999997615814209, \"association_id\": 5, \"segmentation\": {\"size\": [639, 735], \"counts\": \"jZd3f0ob0`0C;]Oc0^Ob0_Oa0J5L5K5i_OaLe?o3L3M4L4M4J5J6K5K4K3O1O1O100N3M3N110O01O0100O10000O0O10001000100O1N3M3M3M2O1O2O2`NUJfDP6T;RJkDV6l:lISEZ6g0VIY8a0PGd6DUICOh99PGP8o8QHPGo7o8SHPGn7o8SHPGm7P9SHPGm7P9THnFm7R9SHmFo7R9RHlFo7T9QHlFP8S9PHnFY2JT2X9cKnFh1>b2d8fKnFd1d0d2^8hKnFa1j0d2Y8kKkF_1Q1c2U8PLgF[1Z1b2o7ULdFW1d1`2h7YLdFT1k1_2b7XLgFS1o1c2[7VLjFQ1Q2h2V7SLmFc0`2S1\\\\MYO^9gNjFd0f2o0fM]Om9D_Hd0YN\\\\OX90cH=dNWOf8?jHD[LE_22b8=jHDYOLW85eHIYOOX80dHMXO1PhT9\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [518.4848022460938, 147.9786376953125, 173.863525390625, 367.88458251953125], \"score\": 0.9999995231628418, \"association_id\": 2, \"segmentation\": {\"size\": [639, 735], \"counts\": \"dfV:3jc03N2m\\\\OI[b09`]OM^b03a]O0]b00d]ON^b0Oe]OOaa0Oc^O0ONd`0n0]_OROYa0T1d05L4M3M3L4M3L5L3N[1dN=e_O`L_?V4I7J8F9F7J5K4L5_O`0M4K3M4H8F:M3M3O1O001O00001N1O1N2O1N2M3N210O10O0100O1M4K6K5J5J7SOYGXFm8f9VGTFP9h9h0L5K5K2M3N2N1O2M3N3M3M1O2N1O1O1N2O1O1O1O1O1O001O01O0000000O1000001O000O101M3J6TOl0J;F9I5K5J4L4I7]Oc0G9H7_Ob0_O`0F;I6L6J7G8G9K5L4L3O4K3N3L3N2N2M4M3L3N3L3M3N3L4L6J7J4K4M3M3M3M4L2M3N1O1N2N2N2N2N3L6I9Ea0XO`]Q1\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [29.41819190979004, 401.6120300292969, 71.9155502319336, 28.450958251953125], \"score\": 0.9999986886978149, \"association_id\": 1, \"segmentation\": {\"size\": [639, 735], \"counts\": \"STc05ic01O2O000O1O2L3O1O2N100000001O000000010O0001O0000100O000001N1O1O1O100O2O0000000O100000000O100000000000000001O001O1O1O1O1N2O1O001O1N2N1O2O1M3NUW[<\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [408.5962829589844, 452.7307434082031, 150.35604858398438, 52.660064697265625], \"score\": 0.9999984502792358, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"cYP81jc05O1O001O1d\\\\OMjb04T]OOkb00U]O1kb0OS]O3lb0Oo\\\\O4Rc0=O0000010O0000010O1O0010O01O0010O0001O00001O0000001O000O2O00000O1000001O000000001O0IkNd]OU1[b0lNf]OS1Yb0oNf]OQ1Zb0oNf]OQ1Zb0oNf]OQ1Zb0oNf]OQ1Zb0oNg]OP1Yb0POg]OP1Yb0POg]OP1Yb0POg]OP1Yb0POg]OP1Yb0POg]OP1Yb0POg]OP1Yb0POg]OP1Yb0POh]Oo0Xb0QOh]Oo0Xb0QOh]Oo0Xb0QOi]On0Wb0ROi]On0Wb0ROi]On0Wb0ROi]Oo0Vb0QOj]Oo0Vb0QOk]On0Ub0ROk]On0Ub0ROk]On0Ub0ROk]On0Ub0SOk]Ol0Ub0UOj]Ol0Ub0TOk]Ol0Ub0TOl]Ok0Tb0VOk]Oj0Ub0VOk]Oj0Ub0WOk]Oh0Ub0YOj]Og0Vb0YOj]Og0Vb0YOj]Oh0Ub0YOk]Of0Vb0YOj]Og0Vb0YOk]Of0Ub0ZOk]Of0Ub0ZOl]Oe0Tb0[Ol]Oe0Tb0\\\\Ok]Od0Ub0\\\\Ok]Od0Ub0]Oj]Oc0Vb0]Oj]Od0Ub0\\\\Ok]Od0Ub0]Oj]Oc0Vb0]Oj]Oc0Wb0]Oi]Ob0Wb0^Oi]Ob0Wb0^Oi]Ob0Wb0SOd]O85e0[b0ZOe]Of0[b0ZOf]Oe0[b0ZOe]Of0[b0ZOe]Of0[b0ZOf]Oe0Zb0[Of]Oe0Zb0[Of]Oe0[b0ZOe]Of0[b0[Oe]Od0[b0\\\\Oe]Od0\\\\b0[Od]Oe0\\\\b0[Oe]Oc0]b0\\\\Oc]Od0^b0[Oc]Od0]b0\\\\Oc]Od0^b0ZOd]Od0^b0[Ob]Oe0mb0O0O100O2O1N2O0O2O001N2O000O2O1O1NmNM^^O0ca00V101O0000Ul_3\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [201.40603637695312, 434.5367126464844, 104.10513305664062, 48.24139404296875], \"score\": 0.9999983310699463, \"association_id\": 5, \"segmentation\": {\"size\": [639, 735], \"counts\": \"XPQ42lc02N1O2O000O100O10000O1000000O2O0O1O1O1^OCb]O>\\\\b0Da]O>^b0D`]O=`b0C_]O>bb0A^]O?Qc00O101O000O100O101M21O0000001O000000O101O0000000000000000O10000000O100000O10O10000O1O1000000O101N1O1O10001N100O100O2O000O1O1001O000N2BX]ODjb03f0N1M4H[\\\\O0]\\\\^8\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [339.4096984863281, 137.2775421142578, 207.20138549804688, 316.14947509765625], \"score\": 0.9999977350234985, \"association_id\": 6, \"segmentation\": {\"size\": [639, 735], \"counts\": \"fb[71c03bb05S]O2d>HWEl0f:VOXEm0e:TOZEo0c:SO\\\\En0c:SO[EP1a:TO\\\\EP1_:WO[El0`:_OWEc0g:ASEc0l:]OVE>m:CSE8P;IRE3P;MQE0Q;1PEKQ;6PEHQ;8QEDQ;=aCkNYOi0V=<`C]OfN:k=8^CJXN3Y>5\\\\CS1d<POXCR1h<QOSCP1m<ROPCo0Q=QOmBQ1R=nNnBU1mNbMc=W1aCY1eNjMe=l0fCW2S<eMPDh2c;UM_DT3Z;YLTCMc1o3T;SL\\\\CL_1V4Q;oK^CM`1V4o:aLoDb3_9ZK]FV13b3^9[K[FU15d3\\\\9[K[FT17d3[9ZK\\\\FT16f3Z9YK]FS17h3Y9WK^FQ1F`NNY5m9WK_Fm0HcNK[5m9VK`Fi0IhNIY5m9XKaFe0JjNHZ5l9WKcFNUONf0DDZ5n9WKcFJYONc0GC[5m9VKfFGYOOb0J@[5o9VKfFC\\\\O0`0M^OZ5P:WKeFA_O0>L@]5m9WKeF_OB0n0Y5k8YKdF^ODNn0\\\\5h8ZKfFZOFOm0\\\\5g8]KdFXOHOm0\\\\5g8_KbFUOLNm0^5d8`KcFSOLNO18^5Z9bK^FTO2JN28^5Z9cKYFXO6EO27_5[9cKTF\\\\O:@025a5]9bKnEAT1\\\\O@b5^9\\\\LSGQN]Oe5`9[LRGPN\\\\Og5b9YLRGoMYOl5e9VLQGoMXOm5f9TL\\\\FnM241d5a9ZL[FoM254_5_9^LZFoM159[5\\\\9aLYFPN15>V5X9eLYFPN15d0P5R9lLWFQN14m0h4k8SMVFRN23n0g4j8UMRFVN3OQ1f4j8WMmEYN5KS1f4k8jMQF`MQ1i4n8iMnE`Mo0j4T9WNjFk1V9UNhFm1X9SNfFn1[9RNcFP2^9oM`FQ2b9PN\\\\Fk1k9TNTFd1V:eKWEm1c0T2a:iKTEP2:n1i;PNWDm1m;RNSDm1o;SNPDk1T<SNkCm1W<RNiCl1Z<SNeCj1_<WN`Cd1f<[NYCb1k<^NTC_1P=bNmB^1U=cNgB`1Y=b210O010O001N5K4aCZI_;l6UDcIa;e7F7I6K5L3N2M2N3M2M4N101OO100O1O100O101`GXEf7i:XHfEZ7]:aHnEV7V:dHQFW7_;L3M3M2M10100N2O1O1O1O11O100O100O0010O1O2M2O1O1O2N`0kIYBQ5n=_JbBZ5\\\\>D`0TLe@W2m`0Cc0]O9G:C=BdVk3\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [77.54034423828125, 120.60894775390625, 120.91410827636719, 349.6290588378906], \"score\": 0.9999957084655762, \"association_id\": 4, \"segmentation\": {\"size\": [639, 735], \"counts\": \"Yhg17_c0k0oNe0G<D5L4L6m^O`M\\\\`0W3J5l_OcL^?c3U@kLe?l3Jb0_O6lCUKh8T5nFWKg8Q5PGVKk8P5aFcK[9b4WFkKe9]4oEiKo9d4_EaKa:o6O1N3L4L3M4M2N1N0100011O00001OM1N2O3J6K5K6J5K5M3O1O1O001001O10O1O10iEeGj8\\\\8cFaHR9_7dFSIT9o6fFZIT9i801O001O0010O0010O0001O011M3N2N2N1O2N2[FdFc8`9VGiFd8]9RGiFl8W:M3M3L4_N]1M3000004L4K4K6YKlBbN0k0i0K^<e0RCYNOQ1l0GW<l0iCeNS?W1m1L5L7J6J9C8EbTe:\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [250.01556396484375, 157.2789306640625, 155.19882202148438, 352.42047119140625], \"score\": 0.9999823570251465, \"association_id\": 3, \"segmentation\": {\"size\": [639, 735], \"counts\": \"b\\\\T5i0cb0n0C<D4L4L4L3_Oa0I6I8J5L4[@WLS?j3k@XLU?i3j@WLU?k3j@ULU?l3j@ULU?m3h@UL`0Kn=_4mAnKHK^=W4jBkLm<T3SC[M_<e2aC`MZ<`2fCdMV<]2hCgMU<Z2kCfMU<Z2mCaMV<_2kC]MX<d2iCTM]<l2dClLc<T3^CdLg<h3kBPL_=n491O1jD]Jo7e5lGkJi7W5RHQKk7P5PHWKm7j4oGYKQ8i4kGZKS8i4hGYKY8h4dGZKZ8k4aGVK\\\\8R5[GSK`8Z5QGlJj8e5]FhJ_9n7J6J5N300O01000O0100O010O1O1gGSG_5n8aJSGkMIT7U9PKSGfM0W7n8SKSGeM0V7n8TKcGm4\\\\8SKeGn4Y8RKgGP5W8PKjGP5U8PKmGo4R8PKSHm4l7SKRHo4n7QKQHP5o7oJRHQ5n7oJRHQ5n7oJQHS5n7nJQHR5P8nJnGS5R8oJkGR5U8TKdGn4[8WK^Gk4c8VKnFW5R9k2001YHmF`4S9_KnFa4R9_KnF`4V9SIhFd11Y5\\\\9m28hMjFXIZ9`6QGYIQ9c6ZGSIi8i6aGlHe8P7dGdHd8U7Y2F9H6L4L3M3I7F:fKSB]2R>`MSB[2Q>aMTBm1FQMX>n0YBm1DoLU>6eA=h0Y2CPMQ>2nA?e0Z2`>`MgA0nNh1^?SNiANROl1X?oM`BP2j?<FK2O1O1O001N10000O2N2N4L3M1O2L3L5^Ob0H8JiZf6\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [685.5648803710938, 371.0058288574219, 40.54083251953125, 16.180694580078125], \"score\": 0.9987316727638245, \"association_id\": 7, \"segmentation\": {\"size\": [639, 735], \"counts\": \"YR]=2mc01N1O1000001OO0101O000O100000000000000010O0001O010O001O00001O0001O010O001O1OmW5\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [593.286376953125, 459.9601135253906, 108.58819580078125, 38.852081298828125], \"score\": 0.9968787431716919, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"bkU<9dc03N1O1M3M3N2M3M4O0O1001O00000000001O00001O0000001O0000001O0001O0000001O01O00010O0000001O0001O0001O000000001O01O0001O1N10001O0O101O0O2N1O2N101N2N102L3Nj[e0\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [123.30032348632812, 400.3969421386719, 106.21865844726562, 40.34173583984375], \"score\": 0.9915879368782043, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"[m`24hc03H9M200O1O1O2OO0\\\\Oh\\\\Oc0Xc0]Oh\\\\Oc0Xc0]Oh\\\\Ob0\\\\c0LaW`0H`h_O>J3N1N2N2O1O1O1O1000000000O1000O1001O0O10000000000000000000000O10001O00O10O10000O10000O10000Em\\\\OJSc05n\\\\OKRc05o\\\\OIRc06o\\\\OJQc05P]OJQc06P]OHQc08o\\\\OHQc07Q]OGQc08;O3M4J`cl9\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [46.222373962402344, 368.456787109375, 56.521141052246094, 36.257659912109375], \"score\": 0.960213303565979, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"Tid18dc06]\\\\OC]c0c0L4L1000000000000O10O2O002M3M3N2L`PZ<\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [30.39198875427246, 126.57569122314453, 231.75022888183594, 322.44830322265625], \"score\": 0.9439724087715149, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"Yfi1`0dc0J6Ik\\\\28`bMa0M2N1O1O100S@SOg<o0UCYO\\\\MLh>l0kCKS<6lCLVOGT:<hFMnNOX:2PGQOYM:\\\\1g0\\\\:KbG[OjMS1f:mNXDG]37]M_1a?`N\\\\@d1c?]NZ@g1e?XNY@k1f?UNY@m1g?QNi_O7Dj1Za0mMl^OQ2ca0J5K5G9E;J6N2N2O1N3M3KZKKkE1V:n0PElNQ;\\\\1_CkNc=[1VBhN];CmDg1CgN^;FmDd1DfN];HnDc1FdN[;KnDb1FbN\\\\;NmD`1GbNZ;0oD_1FaNZ;2oD^1F_N[;4oD^1E]N];5nD^1F[N];7mD_1EYN_;8lD_1EYN_;8lD_1FVNU1MZ7=jGa1GSN7M_Nb0h9MkGa1f1ZO^6UOkGc1i0RNiMZ1c9QOkGc1g0WNaM]1m9hNjGe1f0[N\\\\M\\\\1T:dNiGf1F`NbNl1P:lMhGj1A[1h8jLeGm1SOmMVO_3S:eLdGQ2QOi1[9TLdGT2POi1]9RLaGX2POg1`9oK`G\\\\2nNf1c9lK^Ga2mNd1g9iK\\\\Gc2mNc1d;]N[Dc1f;TMSD^O7^3g;PMYD]O1b3g;\\\\LfCKh04Jd3Z;RLXD9Y10UO`3^;ZLRD5_1NQO`3_;`LPD1c1LnNT3o;oL`C0f1KlNS3P<RM^C0g1IlNR3S<TMZC1h1HlNo2U<XMXC0g1HoNk2U<]MUC0h1GPOg2W<`MRC2g1GROb2X<eMoB2g1GDm1i;ZNlB2g1FLe1d;bNiB3h1EN^1g;gNfB5e1F4R1h;nNdB:T42o9NR501?@0010O4J6J8G6KcSe:\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [676.5221557617188, 157.51425170898438, 50.94091796875, 235.14263916015625], \"score\": 0.9281381368637085, \"association_id\": 7, \"segmentation\": {\"size\": [639, 735], \"counts\": \"alW=l02[Oab0X1J8H4K6L4K4L3TBQNg9Q2PFYNn9i1fEcNX:`1_EhN_:\\\\1WEkNh:U2nCUNR<R2eCQNZ<R2bCoM^<V2ZCmMf<V2UCkMl<d401OO1O100O1N2O1O1000000001O001O2O0001N2N2N1O1O3TLoBd0S=SOVCl0j<ROTCP1m<oNVCP1j<iN`CS1b<fNhCT1\\\\<cNeDEaM^OVX5\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [484.2050476074219, 440.1424560546875, 98.79800415039062, 50.220611572265625], \"score\": 0.882262110710144, \"association_id\": 6, \"segmentation\": {\"size\": [639, 735], \"counts\": \"nf_95ic01O1N2O1O1O1O0100O0011O1_\\\\OE\\\\c0`0O01O0Ie\\\\OJ\\\\c03g\\\\OLZc06b\\\\OL]c04b\\\\OM_c0:X]OBma0>S^OBna0>Q^OBoa0>Q^OBPb0>c]ODNN_b0>c]ODNN_b0?b]OCON_b0`0a]OBOO`b0?a]OBOO`b0?a]OB0N_b0`0b]OAON`b0b0a]O_Ohb0a0Y]O^Ogb0b0:1J^Oo\\\\Oc0Pc0^On\\\\Oc0Rc0^Om\\\\Ob0Rc060001O001N1000000000001O000000000001O01O01O1O1O2N0010O01O001O1O1O000000001O01O0001O010O001O1O1O001O00g\\\\O_OUc0`0j\\\\OAVc0`0h\\\\OAYc0c01^Of\\\\OM1:ec0H\\\\\\\\ONfc00ado2\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [193.0583953857422, 368.0992126464844, 43.625457763671875, 13.11492919921875], \"score\": 0.8690177798271179, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"bih35ic02N1O100000001O000000001O000000000001O000001O01O00000001O000001O000OcPl9\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [326.1557922363281, 474.78387451171875, 89.30572509765625, 34.461212158203125], \"score\": 0.731048047542572, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"lSl6=ac04M7I3M1O0000000001O01O00010O0001O0001O01O0000001O001O00000000000000000O100000000O00100O1O2O000O100000001O001N101O1N2O2M3HY\\\\O1_^X6\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [69.01560974121094, 369.6012268066406, 34.788230895996094, 26.88818359375], \"score\": 0.6481780409812927, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"Tid18ec05L8H3M10000O10000001O0O2O1N102M101N2Na\\\\Y<\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [623.8387451171875, 375.014404296875, 79.63372802734375, 32.5247802734375], \"score\": 0.5700833201408386, \"association_id\": 2, \"segmentation\": {\"size\": [639, 735], \"counts\": \"hXX<112gc05K5M2L4N101L301000O1000000000000001O00000000]Om\\\\O:Tc0El\\\\O;Tc0Dn\\\\O;Rc0En\\\\O;Sc0Bo\\\\O>Yc00001O01O01O00010O00001O01O01O00001O000000000000O10000O10000000000000O2O00001N101O001N100O2N2Nebc0\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [682.1752319335938, 235.60328674316406, 42.3983154296875, 140.1981658935547], \"score\": 0.5172083973884583, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"gi[=7^c0=[Oc0J7L3nMlNUAW1g>mNWAT1f>QOWAP1i>@d@c0[?A`@b0_?A\\\\@b0f?^OV@d0h?]OW@d0h?^OU@c0l?i1OO0010D;010000000000000100O1OcMa@:c?B]@?W`0mNh_OS1\\\\`0hNe_OX1]`0T13MM3002cM__OX1Qa0QN`_Oe1WS7\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [434.358642578125, 444.3570861816406, 178.78411865234375, 55.6702880859375], \"score\": 0.38936394453048706, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"WY_8a0Vc0:M2N2O0000001O00001O00001N10001O000000001O000HmNd]OS1[b0nNf]OQ1Yb0POg]OP1Xb0QOi]On0Wb0ROi]On0Vb0SOj]Om0Vb0SOk]Ol0Ub0TOk]Ol0Ub0TOk]Ol0Tb0UOm]Oj0Sb0VOm]Oj0Sb0VOm]Ok0Rb0UOn]Oj0Sb0VOn]Oi0Rb0WOn]Oi0Rb0WOn]Oi0Rb0WOo]Oh0Qb0XOo]Oh0Pb0YOP^Og0Pb0YOP^Og0Pb0YOP^Og0Pb0YOP^Og0Pb0YOQ^Of0oa0ZOQ^Of0oa0ZOQ^Of0oa0ZOQ^Of0oa0ZOQ^Of0oa0ZOR^Oe0na0\\\\OQ^Od0na0]OR^Oc0na0]OR^Oc0na0]OR^Oc0oa0\\\\OQ^Od0oa0\\\\OQ^Od0oa0\\\\OR^Od0na0[OR^Oe0na0[OR^Oe0oa0ZOQ^Of0oa0[OP^Oe0Qb0ZOP^Oe0Sb0XOm]Oh0Sb0XOm]Oh0Sb0XOn]Og0Rb0YOn]Og0Sb0XOm]Oh0Sb0XOm]Oh0Tb0WOl]Oi0Ub0VOk]Oj0Ub0WOj]Oi0Vb0WOi]Oj0Xb0VOg]Oj0Zb0UOg]Oj0Yb0VOg]Oj0Yb0VOg]Oj0Yb0VOg]Oj0Yb0VOg]Oj0gb0000001O00000000001N1000000O2O001N2O0O101O1N2O1OBEY]O:fb0HY]O8fb0I[]O5_b0GW]O0O5;3_b0HV]O9<NXb0K\\\\]O0O7=NTb0I`]Oc0=CRb0a0n]O_ORb0a0o]O]OQb0d0P^O[OPb0f0d00000001O01O1O10O0001O001O0001O01O00001N1Jg\\\\OD\\\\c0:f\\\\OD[c09c\\\\OHbc07^\\\\OI70Rc06Q]OLnb03R]OOmb00R]O2Pc0JQ]O7[c0000001O0010O01O1N4L[lk2\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [509.5771789550781, 449.3981628417969, 190.77835083007812, 50.129852294921875], \"score\": 0.36893782019615173, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"_V`:3kc02O0001N10ZOO^]O1bb01e01IL]\\\\O8bc03HHh\\\\O:Vc0Hh\\\\O9Xc0Gh\\\\O:Wc07O1001O01O01O0000010O00000001O01000O100O00100O010001O00O04YOg\\\\O`0_c0M3M4K5KbZh0<odWO5O1N2O0N2O1N101O100000000001O000000001O01O0001O00001O01O0001O0001O01O000000010O00010O00001O00000000001O00001O0000001O0O101N2O001N10001N101N1O2O0O2Nl[e0\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [250.89939880371094, 149.28895568847656, 408.6229248046875, 330.79144287109375], \"score\": 0.13943123817443848, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"kl_7;bc09F9H6J4M3OK6I7G:H8E[[;1hdD6K6K5L4L6ML7G9Dfj\\\\18XUcN3M2N3M3M2N3M2N2N2N2N2N2N101O0O2O001O0O2O1O1O1O1O1O00O2L4I7K:H8H5K5K5JTdo3\"}}, {\"image_id\": 106, \"category_id\": 1, \"bbox\": [111.69092559814453, 127.4715805053711, 217.29522705078125, 293.8143310546875], \"score\": 0.08109432458877563, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"Ude22lc02O000O100O100O1O11O1O1O001N101O000000TD6c3JlKk0P4UOeKX1Y4hN]Kc1b4^NVKj1i4VNnJS2S5kMiJZ2_;O1O1N2N2N1N3M4K;E:F6JmJYOXGa0`8NcGC]8l0V58g^OlNY?T1g@mN[?P1e@oNb?j0`@TOg?f0n1JciQ1_OjVnN:H:I5J3N1O001O1O1N2O01O[_OjNa>U1`AmN^>S1bAnN]>Q1dAQOZ>o0fASOX>l0iAUOW>j0iAWOW>g0jAZOW>c0iA^OY>?hAA[>7jAIY>0kA0Ra00000001O000000000O100O1O1O1O1O1O1O12M3L6F]Yj8\"}}, {\"image_id\": 106, \"category_id\": 2, \"bbox\": [538.9344482421875, 363.0436096191406, 162.35247802734375, 65.46157836914062], \"score\": 0.05124688893556595, \"association_id\": 0, \"segmentation\": {\"size\": [639, 735], \"counts\": \"P[a:4ic06L00O2O2M2N1O^jc13lT\\\\NMk\\\\Of0nb09KPOX]OQ1gb04L4O1M3O]Oe]OD[b0;g]ODYb0;i]ODWb0;j]ODWb0<j]OCVb0<k]ODUb0<l]OCTb0=l]OBUb0>l]OAUb0>m]O_OUb0`0f0O0O2O103L2N0010O01O1O10O01O000010O0001O01O000000001O0000000000000000O100000000O1000000O2O000O2Mbff0\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [386.4233703613281, 86.48715209960938, 71.324462890625, 204.65179443359375], \"score\": 0.9999997615814209, \"association_id\": 0, \"segmentation\": {\"size\": [384, 592], \"counts\": \"`Tc47b;=E8J5K6J5L3UFdNh8f1QGfNb8c1UGfNd8_2L6K4Ki0VO=F5J5L2M4L3M2M4M4L2O1O3M5K3M4L3M2N001O01O001O2N1O1nNYJiKh5R4dJeK^5V4jJfKX5V4nJfKT5V4SKdKP5W4`1L5J6J5L5J4M3L4L5L5K3N1O2O2L4M3L4N4L4TNYFi0U;YO>@h^b1\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [326.6506042480469, 65.31743621826172, 52.297576904296875, 137.6842041015625], \"score\": 0.9999997615814209, \"association_id\": 0, \"segmentation\": {\"size\": [384, 592], \"counts\": \"Ygj3U1i:3M2N1O010O1O3M200N2N2_FbN_8`1]GgN_8\\\\1ZGlNa8^2I6L2M2J6N2N2O1O10001O001O1000O4K8G4L3I7K5K5M3L5K4K6K5\\\\Od0J5ZObEDe:6f0KnPb2\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [20.037065505981445, 129.33677673339844, 82.1885757446289, 254.66322326660156], \"score\": 0.9999997615814209, \"association_id\": 0, \"segmentation\": {\"size\": [384, 592], \"counts\": \"Ub9>`;<D8\\\\FTOd7X1jGVOo7d2I4N1N4L4J6K6K4L5L4L3L5L8G5J6I8H5K4D=H9I:H`0@6K5L1M3N1O2O00000O10000001O0O1000000O1XOQL^IP4[6aLZIa3a6R1Jb0@7H7A`0I7H5I8K4H8H8K3L5J7M4kN_GeNg8m0mGfN[8R1kGgN\\\\8T1gGgN_8W1X1M1O3N4K6I8F7I^eh5\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [148.1110076904297, 61.24943161010742, 74.36386108398438, 170.22215270996094], \"score\": 0.9999996423721313, \"association_id\": 0, \"segmentation\": {\"size\": [384, 592], \"counts\": \"Tog12j07WON`:a0cELZ:o0N2N2N2O000O2O3L5L4L1N2N4L4L4ZG]Mh7n2eG]MY8X3M2M101O1N2N3L3N2N2N=UOc0K100000O011L3bN^1I8O1jLeGe2n8M02N5gG\\\\Mm6f2hHeMU7e3N2N2N1NWOfHnLZ7P3nHjLR7T3SIjLk6V3XIgLh6Y3[IeLd6[3_IcLa6[3S1O2N1M3N4K6E;G7N4J5L5L4J6L6J?@=Cl[[4\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [216.979736328125, 69.38666534423828, 36.61567687988281, 140.47052001953125], \"score\": 0.999998927116394, \"association_id\": 0, \"segmentation\": {\"size\": [384, 592], \"counts\": \"b_a26b;d0C5Lf0XO5[F_Nc8m2\\\\O;ZOc0N0O10O1M3K6G9I7L4N2O2N2010O00O3M1N2L5AhFnM]9n1=L2M4N202UNUF`0G9o:VOUEi0U;G[\\\\o3\"}}, {\"image_id\": 107, \"category_id\": 1, \"bbox\": [61.15206527709961, 73.57051849365234, 68.95866394042969, 164.73800659179688], \"score\": 0.9999680519104004, \"association_id\": 0, \"segmentation\": {\"size\": [384, 592], \"counts\": \"i[g0e0Y;2L3M3M5M2N001O101O101O1O3M2N1O1O3L3M2N3iFYNP8i1PHWNn7k1QHUNm7l1VHPNf7U2o05K5mFaMa8b2WGhMf8Q3H4cGbLP8_3nGbLR8_371O=QH[Li6j3oH]Lm6]4N2N1O0000oN[IgLg6T3dIdL^6W3hIdLe6n2dIhLe6P3W1N2N2O1N2O1M3WOh0M4N2N3O2N1O3M3N2M2O0N4M4K4L6J9EoS]5\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [0.0, 188.77391052246094, 345.8164978027344, 191.00318908691406], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"`n0d0Yg09I4L3XYOnN]f0]1N1d[OfNRN0`c0]1Y^OTOfa0m0W^OWOfa0n0T^OUOka0o0o]OTOoa0o0m]OTORb0n0g]OXOWb0k0d]OXO\\\\b0o0P]OeMM_1Qc0o0o\\\\OfML\\\\1Uc0o0m\\\\OhMK[1Wc0n0l\\\\O^OTc0b0k\\\\O@Sc0a0n\\\\O]OSc0c0m\\\\O\\\\OTc0d0l\\\\O[OUc0e0k\\\\OhMKW1[c0Q1j\\\\OhMKU1]c0S1i\\\\OfMLR1_c0X1e\\\\OeMNo0`c0\\\\1c\\\\ObM0n0`c0`1a\\\\O_M4k0]c0g1o\\\\OUNSc0k1o\\\\OQNSc0o1o\\\\OmMSc0S2n\\\\OkMSc0U2o\\\\OgMSc0Y2n\\\\OeMSc0[2o\\\\OaMSc0_2W10O100000O100000O10O10O1000O01000O01000000000000000001O000O100000000000000000O1000000000000001O00001O00001O00001O001O001O1O1O1O001O1O001O001O1O1O001O1O1O1O1O1O1O1O001O1O1O1O1O2N2N2N1O1O1O1O001O00001O00001O001O1O1O001O1O1O1O001O00001O0000001O1O001O1O9G1O2N1O2N1O1O1O101N3M3M100O1O01O010O000010O000001O00000000001O00000000001O00001O00001O00001O00001O000000001O0001O000000001O01O01O00001O0001O00001O00010O1O00001O00001O01O000001O000001O01O00001O001O1O1O1O1O1O001O1O001O100O1O1O100O100O001O1O1O1O1N101O0O101O000O101O00001O00001O001O001N10001N100O2N100O1O1O2O0O100O2O0O2N1O2M3M3J6J5N3N1O2FjZOfMXe0d1gZOiNde0U1^ZOgNee0W1`ZOaNfe0Z1c0L5L3N2N2N2M3N2N2N2M4Lg_n?\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [388.2453918457031, 156.0906524658203, 147.26943969726562, 291.11114501953125], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"UeU92lg06K6I3N2N2M3M2N2O2N1000001O0O1O1O1M3M3O1O000O1O1O1O01N3MPMTOP_Oi0Ra0]Oj^Oa0Xa0Be^O>[a0Cc^O>^a0B`^O>aa0C^^O;ea0D[^O;fa0FY^O9ha0HW^O9ga0IW^O9ga0IU^O:ja0GT^O<ka0ES^O<ma0EQ^O<Pb0Cn]O>Sb0Bj]O`0Wb0@f]Ob0[b0]Od]Oe0\\\\b0[Od]Oe0[b0\\\\Od]Oe0\\\\b0[Oc]Od0_b0]O]]Od0eb0\\\\OZ]Od0gb0]OX]Oc0hb0^OV]Od0hb0^OV]Od0gb0_OV]Oc0ib0@R]Od0lb0^OR]Oc0nb0_Oo\\\\Ob0Pc0Bj\\\\O`0Sc0W2iMnK_@U4_?TLY@n3e?XLV@i3i?ZLS@h3m?[Ln_Of3S`0]Lg_Oe3Z`0]Lc_Oc3^`0`L^_O`3d`0bLY_O^3h`0fLR_O\\\\3n`0hLg^O^3Za0c11N2N2N2N2O1O1O1N2N2N2O1O100O1O1O1O2O000O2O002N7iJd]O\\\\4Wc0I2M2O_\\\\O^Leb0a3Z]OdLbb0\\\\3^]OhL_b0`4M2O0O00001O2N2M2O0O2O3L2O1OTNU^ObMla0P2b^OPN^a0f1l^O[NSa0a1Q_OQO]`0k0g_OTOY`0i0k_OVOU`0g0m_OYOU`0b0o_O^OT`0;P@ER`05S@Jn?0Z@Mj?K^@0i?E`@6jc0K3M3M3M4L4L4L3M2N3L8H7I5Kej_;\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [167.98764038085938, 346.1837158203125, 225.04183959960938, 54.568511962890625], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"gko36gg04KG`XO<^g04K401O0O101N100O1O2O0000000O1000001O0O100000000000000001O01O00000001O0001O00000001O0001O000010O0000001O01O0001O0001O000000000000001O0001O01O0000000000001O000000000001O0001O0000000000000000000001O000000000000000000000001O0000000000000O100000000O10000000000O100000000000000000O10000000000000001O0000000000001O000000000000000000001O0000000O2O000000001N100000001O0000001O00000O10001O000O10001O000O101N1000000O2O0O2N101O0O10001N1O2M4LUdl>\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [325.03863525390625, 303.435791015625, 63.9400634765625, 20.855377197265625], \"score\": 0.9999980926513672, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"aQd71ng02O1O2N1N4M3M4L1O0000000001O01O000000000001O00000000000010O00O1000001O000000001O000O101O0000000O2O00000000000O2O00000O2O0O2NXfl>\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [544.23974609375, 396.6148986816406, 246.33563232421875, 52.414764404296875], \"score\": 0.9999938011169434, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"eTd=1og01O001O1O001O001O2N000010O000001O001O000010O000000000001O0001O00000010O0000000000001O0000000001O0001O00000000001O01O000001O0001O0001O01O01O001O01O0000010O000001O01O00001O010O00010O0000001O00010O0000001O01O01O000000010O0000001O01O0001O000010O0000010O000001O01O0000001O0001O0001O0001O0001O0001O0001O0001O01O000001O01O00000001O0001O000000000010O0000000000000000000000001O0000000O10001O0O100000001N101NYje5\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [400.8140869140625, 429.23040771484375, 450.17376708984375, 101.541015625], \"score\": 0.9999464750289917, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mmU:1og0001N101O1O00001O0000000000000000001O0000000000000000001O0001O0001O00001O0001O0000000001O01O000000000001O01O000001O0001O0000001O01O000001O00000000000000000000001O000000000000000000000000001O0O101OBOnXO0Pg03PYOLPg04PYOLPg05=0000000000000eXOKkf05UYOKlf04SYONlf02TYONlf02TYONmf01SYOOmf01SYOOmf02RYOOmf01SYOOmf01SYOOnf00RYO1mf0ORYO2nf0MSYO4Sg0JgXO7Yg0JeXO7Zg0KdXO6\\\\g0KbXO7]g0600000001N1000001O01O100O1O010O01O010O001O01O0001O01O001O001O010O00001O0001O01O00000001O01O0001O01O01O00001O01O000001O0001O0000010O00010O000001O0001O000001O01O0010O010O0001O01O01O0000001O01O01O000010O0001O01O01O0000001O00001O100O001O001O001O1O001O001N3NWP?0ho@1O2O0O2O1O0000001O000O2O00001O001O0000001O0000000000001O00000010O000001O0000000010O00001O0001O01O0001O01O0001O0001O0010O01O10O010O000010O000000010O00000100O010O1O10O0001O01O0000001O01O0001O0000010O000001O01O00001O00010O00001O00001O01O0001O0000000000001N100000001O0000001N2O0N3NhWT4\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [753.0447998046875, 328.45269775390625, 28.7042236328125, 86.9974365234375], \"score\": 0.9813358783721924, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fRea0]1`f04M3M201N011XZOYNTe0h1eZOaNYe0W2K101N100O10000001O9G3M3M3M1O002N1O2N2N3FoTe5\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [158.38540649414062, 560.163330078125, 57.03153991699219, 23.46435546875], \"score\": 0.979993462562561, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"eQg38eg05M101N10001O00001O001O00010O001O00010O01O010O001O001O00001O01O00O2O01O01N11O01O001O001O001O001O1O001O0O2O1O2Mmmnb0\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [833.382080078125, 445.1294250488281, 100.59173583984375, 24.083648681640625], \"score\": 0.9682335257530212, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"oUgc02ng00O2O1O00001O000000010O00000000010O0000001O01O0000010O0000000010O000001O000001O01O000001O01O0001O000001O0001O0001O0000010O0000001O01O00010O000001O0000001O00ca]2\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [527.2059326171875, 197.38589477539062, 42.06689453125, 162.841552734375], \"score\": 0.89564049243927, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"RP]<;P`1FgWO;DFnXO8Pg0LoXO4nf00mXO4jf02^YOEie0V1ZZOgN_e0a1>1O2i[O^Noa0c1P^O_NZNLVc0f1`^O_NVN2Vc0_1b^ObNWN0Vc0^1c^OPO`a0R1]^OaNQb0a1m]O_NSb0j1a]OYN^b0h1d]OWN[b0i1f]OUN[b0m1d]OQN]b0P2d]OmM]b0T2d]OgM_b0Z2c1100U\\\\OgMma0[2P^OfMPb0X2m1O2O00mZOkM[d0h0W[O;>nN\\\\d0b0`[O84UO_d0?f[O<ETOi0Ofb0=S]O>\\\\OTOl02gb07^]O6`f0A__f:\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [525.9775390625, 292.7801513671875, 39.019287109375, 107.40164184570312], \"score\": 0.8670139312744141, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Zj`<9bg07L4L5L4L3nYOYOfd0l0Q[O[Omd0m1N2N1O10O10N210O0O20O000gNY[OYOkd0`0\\\\[O[O`d0EeZOn0P1ZO[d0GgZOm0R1YO]d0=`ZOBU1N^d07[ZOH8NPg04c0M10WXO0cg0?JMdeh:\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [817.2685546875, 372.19207763671875, 30.73114013671875, 75.054443359375], \"score\": 0.35617828369140625, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"h]Wc01ag0c0G4I8L3M3N2L4L4K5L4H8N200O100O1001O00000O2N1C=J7ROjYOJ\\\\f00P]T4\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [777.7946166992188, 281.9329528808594, 39.9332275390625, 101.35214233398438], \"score\": 0.3316429555416107, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ViWb0151bg09K6J7Id0^O2O1N3N3M1O1O002N001O2N4L2N1O4L1O1O1O8H1O1O0000FeZOoM[e0P2jZOkMWe0U2:00O10000O1L4K5J7H\\\\Wj4\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [486.1359558105469, 403.8760986328125, 327.1097717285156, 84.16946411132812], \"score\": 0.3199433386325836, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Uf];3lg02O0O101O0000000000000O1000000001O0000001O0001O00000001O0001O000000001O000000000001O001O010O0000AKTYO5jf0MUYO3kf0MUYO3kf0NTYO2lf0NUYO1kf0OUYO1kf00TYO0lf00TYO0lf00TYO0lf00UYOOkf02TYONlf02TYONlf02TYONlf02TYONlf03SYOMmf03SYOMmf03SYOMmf03SYOMnf02RYONnf03QYOMof03QYOMPg03oXOMRg03mXOMSg04lXOLTg04mXOLSg04lXOLTg04lXOLTg05jXOMVg02jXONVg02jXOOVg01iXOOWg01hXO0Xg01gXO0Yg00eXO1[g0OeXO2Zg0;O000002N1O100O0010O0010O0000010O001O01O01O01O01O0001O000000000001O000001O000000010O00000bN_O^[Oa0ad0_O_[Oa0ad0_O_[Oa0`d0A_[O?ad0A_[O?ad0A_[O?bd0A^[O>bd0B^[O?ad0A_[O?ad0B_[O=ad0C_[O=bd0B^[O?ad0A`[O>`d0B`[O>`d0B`[O>`d0B`[O?_d0Aa[O?`d0@a[O?_d0Aa[O?_d0B`[O>`d0Ba[O=_d0Ca[O=_d0Cb[O<^d0Db[O=]d0Cc[O=]d0Cd[O<\\\\d0Dd[O<\\\\d0Ec[O;]d0Ed[O;[d0Ee[O;[d0Ee[O;[d0Ef[O:[d0Fd[O:\\\\d0Fd[O:\\\\d0Gd[O8\\\\d0Hd[O8]d0Hb[O8^d0Hc[O7]d0Jb[O7]d0Ic[O7^d0Hc[O7]d0Ic[O6^d0Ka[O5_d0Kb[O4_d0Kb[O4^d0Ld[O2\\\\d0Nf[OO[d01f[OM\\\\d03c[OL^d04g1000010O00000001O00010O00001O000001O0000001O0000001O01O0001O0010O001O01O00000000001O00001O00000001O01O000001O1O00001O001O01O01O0001O000001O000000000010O0001O000001O01O00000001O000000000000010O0000000O10000000000000001O001O00000O2O000000Wbc5\"}}, {\"image_id\": 108, \"category_id\": 2, \"bbox\": [780.0050048828125, 414.39544677734375, 38.95947265625, 8.4371337890625], \"score\": 0.2918998599052429, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"od[b01og0001O0000001O00001O000000001O01O0000010O00000000001O000000001O0000mjj4\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [466.4383544921875, 318.44525146484375, 75.8419189453125, 107.78970336914062], \"score\": 0.1374628096818924, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Vk_;<dg07H7I3L5SZOoNbd0U1\\\\[OlNZd02iZOU1j0jN\\\\d02iZOV1j0iN\\\\d02iZOV1h0jN`d0OgZOZ1g0gNbd00fZOZ1f0gNdd0c1W[O_Njd0f1hZObNXe0S20`MfZOZ2be0N2N3M3M1O1O1O001O1O4L1N5L6K1N3M3M2N4L3M2N3M4L4L6J2M10Pc`;\"}}, {\"image_id\": 108, \"category_id\": 1, \"bbox\": [8.646862983703613, 177.7847442626953, 275.1508483886719, 401.66290283203125], \"score\": 0.06774753332138062, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"en6j0l4h4h7gKWE4nNg4f;`KlDo6Q;WIdDQ7Y;TI_DR7_;QI\\\\DR7c;QIVDU7h;nHmC[7R<Z2O1O1O100O10000O1000000000000O10000000000000000000[FYDQ7g;nH`Dl6`;TIbDj6^;UIeDi6Z;XIfDh6Z;XIgDg6Y;YIgDg6Y;XIiDg6W;YIiDg6W;YIjDf6V;ZIjDf6V;ZIkDe6T;\\\\IlDd6T;\\\\ImDc6S;]ImDc6S;]ImDc6R;^IoDa6Q;_IoDa6Q;_IoDa6Q;_IPE`6o:aIQE_6o:bIPE^6P;bIPE^6o:cIQE]6o:dIPE\\\\6P;dIPE\\\\6P;dIQE[6n:fIREZ6n:gIQEY6n:hIREX6n:hIREX6n:hISEW6m:iITEV6l:jIUEU6k:kIUEU6k:kIVET6j:lIWES6i:mIXER6h:nIXER6h:oIXEP6h:PJYEo5h:oI[Eo5e:QJ]Em5c:SJ_Ek5b:TJ`Ej5`:VJcEg5]:XJfEf5[:YJfEf5Z:ZJgEe5Y:[JhEc5Y:]JgEc5Y:]JgEc5Y:^JeEc5[:]JeEd5Y:]JgEc5Y:^JeEc5[:]JeEc5[:^JdEb5\\\\:_JbEb5]:`JbE`5^:aJaE_5_:cJ_E]5a:eJ^EZ5b:hJ]EW5d:kJYEU5g:nJVER5j:PKUEo4k:RKTEn4l:TKSEk4m:VKREi4o:WKQEi4o:WKQEi4o:XKPEh4P;XKPEh4o:YKPEh4P;XKPEi4o:XKPEh4P;XKPEh4P;XKoDi4Q;WKoDi4Q;XKnDg4S;ZKlDf4T;\\\\KkDc4U;_KiDa4W;cKfD\\\\4Z;gKdDX4[;jKfDT4Z;mKjDm3W;TLlDh3T;XLPEd3P;\\\\LREb3n:^LUE_3k:aLYE[3h:cL\\\\EY3e:gL^EV3b:jLaES3_:mLeEo2[:PMiEm2W:SMlEj2T:VMoEf2R:[MPFb2P:_MRF]2P:cMRFY2o9hMTFS2m9mMWFn1j9SNYFg1i9ZNYFb1i9]NYF`1h9aNYF\\\\1h9eNXFY1i9gNXFW1j9iNVFU1k9lNVFQ1k9POUFn0l9ROUFk0n9UORFi0o9XOQFf0P:[OPFc0Q:]OPFa0R:@mE>T:ClE;U:GkE6V:LjE1W:2hELX:6iEFY::hECY:?fE_O[:c0dEZO^:g0bEWNeMoMj<k3aERNmMkMe<T4^ElMWN^Mg<g4SESMnMWNl=g4WDmL]<T3dCfLa<Y3cC`L`<a3aCYLc<h3_CRLd<n3]CnKf<S4[CiKg<W4ZCfKi<Z4^30010O0100O010O01O010O1O1O001O001O0100O1O10O01O10O01O00001O00000001O0000000001O000000000001O00000000001O00000000001O1O1O1O2N1O1O1O1O010N10001O00001O001O001O1O1O1O2N2O1N1O1O1O0010O01O0010O01O101M4K7H:Fa0TOngZa0\"}}, {\"image_id\": 109, \"category_id\": 1, \"bbox\": [329.956787109375, 147.6610870361328, 148.808837890625, 257.4708251953125], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [533, 799], \"counts\": \"]ga5;X`04M101N2O1O1O1O2N001O1O001O00001O1O002Ni0WO>B2N101N2N;E4L2N1O3M3M3L4nChLa:Y3\\\\ElLa:V3YERMc:o2VEYMh:j2oD]Mo:h2iD]MV;f2eD]MY;P4N2M3N2N2O1N2K5L4N2O001O0001N10O1O10O10001N2M3K5YOf0L5N1O00O1eNdK`F[4a9hK\\\\FX4d9kKYFV4g9nKTFS4l9YLfEi3Y:Z1000O1O1O1O100O100000000O2O1N3NRF`Jl8Z5QGmJP9n4oFWKV9a4iF`Kc9U4ZFnKh9P4TFSLn9l3lEYLU:i3eEZL]:o40O2O0O001O1N3L4M3L3N101O0O10001N3M3L4L4L3N2N4J<]M]2I6K5K6I`0XOVa[5\"}}, {\"image_id\": 109, \"category_id\": 2, \"bbox\": [242.75662231445312, 289.06640625, 184.43753051757812, 137.20513916015625], \"score\": 0.9999853372573853, \"association_id\": 1, \"segmentation\": {\"size\": [533, 799], \"counts\": \"\\\\bT41b`03M3N101O001N2O1O001O001O001O1O100O1O1N2O2N3M1O1O1O1O1O1O1O1O1N2O1N2N2N2N2O0O2O1M3N2N2N2N1O2M2M3YNoMhDT2T;PNiDR2V;QNgDQ2X;oMgDR2Y;oMdDS2[;oMYD]2f;cMoCh2Q<YMlCi2T<XMjCi2V<XMhCi2X<XMfCi2Z<i00O10O10O10000O101O000O010O100O1O1O100O100O1N2O100O1N2J6L4O100O100O100O1O10001N100000001O1O5K;E8G3N1O1O7I3M9F3N1N2N2N2M4L4K4M4M2M4L6I`n:1]QE7M2M3N1N2N2O2N1001O0010O00010O0001O000O2O1O2M4K_XQ6\"}}, {\"image_id\": 110, \"category_id\": 1, \"bbox\": [525.562255859375, 4.29957914352417, 103.9659423828125, 96.12369537353516], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [640, 640], \"counts\": \"UhX:<dc02N000000001O4L6I2O1P^O[Oc`0f0W_OBf`0d0R_O@l`0c0k^ODTa0e1O1O1O1O001O001O001O1O1O1O001O1O001O00000000O100000000O100000000O1O100O100O1O1O10001O000001O001O1O001O010O001O000001O01O0001O0000001O001O1O1O2N2N10O000100O2N1O000O2O0O2O1N3L3M:DPk6\"}}, {\"image_id\": 110, \"category_id\": 1, \"bbox\": [135.91619873046875, 425.3937683105469, 336.8470764160156, 204.94119262695312], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [640, 640], \"counts\": \"jQU35hc05M3L3O1N1O2O0O2N2O1N3c]OATa0a0f^OHUa0:i^OITa09i^OJVa07h^OJWa07h^OKWa06h^OKWa06f^OLZa04a^O2]a0Z1O1O3M6J3M2N2N001O1O1O1O1O1O3M2N3M1k@_Li=a3WB`Lh=a3WB_Li=a3VB`Lj=a3UB_Lk=a3UB_Lk=b3UB]Lk=c3UB]Lk=d3TB[Ln=e3QB[Lo=f3U1100O10O010O10O01000\\\\@`Lh>_3VAcLj>_3RAbLP?^3l@dLU?`3e@bL[?Q41O001O1O002N1O3M2N1O10O01O00001O00000O10000000000000001O001O0O2O1O1O0O2O001O00000001O1O00100d@VL`>k3\\\\A]L_>c3_AaL_>`3^AcLa>^3]AdLb>]3\\\\AeLd>[3ZAfLf>[3XAgLh>Z3UAgLl>Z3PAhLQ?T40O0010O01O010O001N101O1O1O1N101O000O2O0O2N1O2N2M3jNQAUMT?h2T1M3N1O2N101N101O1N2O001N10001O0O2O00001O1O100O001O00001O00001O001O1O001O100O00001O00001O001O001O1O1O001O001O00001O001O1O1O001O1O001O001O001O2N1O1O001O001N11O01O001O001O1O1O001O000000010O0001O001O001O1O01O0001O01O000010O01O1O00001O0000001O01O000001O0000001O010O00100O0010O010O0001O00001O00001O001O001O1O1N2O1N2O2M2N3M2O1N2N2NYnZ3\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [9.3846435546875, 568.623291015625, 403.88665771484375, 67.9180908203125], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [640, 640], \"counts\": \"Xj6a0_c01N100O2O000000001O001N101O1O0000001O000000001O00001O001O001O00000000001O000000001O001O1O00001O0000000000001O00001O000000001O0000000000001O0000001O000000000000001O0000000000001O00000000000000000000000000001O00000000000000000000000000001O0000000001O00000000000001O00000001O000000000000001O000000001O01O0000000001O000000000000001O0000000000000000000000000000000000000000001O0000000000000000001O001O001O001O010O00001O0000001O000000000000001O0000000000001O0000001O00000000001O00000000001O000000000001O000001O00000000001O1ZOU]O5lb0HW]O7Zc0O001N100000000000000000000000000000000000000000O100O1N2O1O1N2O100O10000O10000000001N100000000000000010O0000010O000000010O0000000001O0001O00000001O00000001O000000000000000000000000000000001O000000001O000O10001O000000001O0O2O002Ldhn4\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [72.49165344238281, 332.4353942871094, 271.48980712890625, 20.948638916015625], \"score\": 0.9999988079071045, \"association_id\": 3, \"segmentation\": {\"size\": [640, 640], \"counts\": \"`n]12mc02O1O0O100000000000000000000000001O000000000000000000000000000000001O00000000001O00000000000000001O00000000000000000O1000001O00000000000000000000000000001O000000000000000000000001O000000000000000000000000000000000000000001O000001O0000000000000000000000000000000000001O00000000000001O0000000000000000000001O000000000000001O000000000000000000001O000000000000000000000000001O000000000000000000000000001O000000000000000000001O000000000000000000000000000000001O0000000000000000000000000000000000000001O0001O000000000000000O10000000001O000O10Wmi5\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [221.8938446044922, 239.5956268310547, 399.3778076171875, 64.12452697753906], \"score\": 0.998374342918396, \"association_id\": 0, \"segmentation\": {\"size\": [640, 640], \"counts\": \"RTa52nc0001O0O10001O0000000O100000001O00000O10001N10001O00000`\\\\OFYc0<d\\\\OIYc0`0O001O00000000000000000O2O00000000000000001O000000000000000000000000000000000001O000000000000000001O00000000000000001O000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000000000000000001O00000000000O10000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000O10000000O100000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O00000000000000000000000000000000001O00000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000000O2O0000000O2O0O3JVd;\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [130.81004333496094, 203.6165008544922, 507.78271484375, 42.66656494140625], \"score\": 0.9955007433891296, \"association_id\": 0, \"segmentation\": {\"size\": [640, 640], \"counts\": \"dRb29ec03O0000001O00000O100000000000000000000000000000000000000000000000000000000000000000000000000000000000010O0000000000000000000000000000000000O10001O000N200O1NZ\\\\OJfc053O100000000O2MT\\\\O0lc0020Pd:0P\\\\E3L6K1O1O00001O00000O2O000000000O1000000000000000000000000000000000000000000000001O00000000000000000000001O000000000000001O0000000000000000001O0000000001O01O0000000000000001O001O00001O01O0001O00000000001O00O1000000000000000000K@j\\\\O`0Uc0Ak\\\\O?Uc0Ak\\\\O?Uc0Ak\\\\O?Uc0Aj\\\\O`0Vc0@j\\\\O`0Vc0@j\\\\O`0Uc0Ak\\\\O?Uc0Bj\\\\O>Vc0Bj\\\\O>Vc0Bj\\\\O>Vc0Bi\\\\O?Wc0Ai\\\\O?Wc0500000000000001O0000000000000000000000000000000000000001O0000001O0000001ZOg\\\\Oc0Yc0]Og\\\\Oc0[c000O10000000001O00000000000000000000000000000000000O10O11O000000000000000O100000000000000000000O1000000000000000000O1000000000000000000000000000000000000000000O100000000000000000000001O00000001O000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000O2O000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000Ga\\\\O1_c0Ob\\\\O0^c00c\\\\OO]c00d\\\\O0\\\\c00d\\\\O0\\\\c0Of\\\\O0Zc00f\\\\O0[c0Oe\\\\O1[c0Of\\\\O0Zc0Og\\\\O1Yc0Og\\\\O1Yc0Oh\\\\O0Xc0Oi\\\\O1Wc0Oi\\\\O1Wc0Oi\\\\O1Wc0Nj\\\\O2cc0Noh4\"}}, {\"image_id\": 110, \"category_id\": 1, \"bbox\": [202.1602020263672, 206.8860321044922, 178.8246612548828, 138.3310089111328], \"score\": 0.9920613765716553, \"association_id\": 3, \"segmentation\": {\"size\": [640, 640], \"counts\": \"lcV4?cb0ET^Oa0fa0FU^O>ga0DW^O>ga0DW^O>ha0CV^O`0ga0Q1K4M1N2O001O2N5K1O1O010O101O1O0O1000O2O000000001N5L2N1Oi^OjMc`0V2Z_OmMf`0T2X_OmMg`0T2W_OmMj`0e20O1O1O100O100O2O0O010O01001O0O010O001O2M101N2O0O2L4G9L4M3O0O2N2N2N2N2O0O2O1O2N1O1O1O10O01O1O10O01O1O2O1O000O010O1O2N3M2N010O1O1O2N1O1O100O2N1O2N10O010O010000001N1000010O0O100O10O001O10O01O001N101N101N3M3M`ia5\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [445.3407287597656, 89.22557067871094, 157.34011840820312, 9.49908447265625], \"score\": 0.9874173402786255, \"association_id\": 2, \"segmentation\": {\"size\": [640, 640], \"counts\": \"njP91oc00O1000000000000000000000000O100000000000000000000000000O1000001O000000000O10001O000000001O00000O1000000000000000000000000000000000000000000000000000000000000000000000000000000001O1O001O000000000000000000000000O10000O10O10O100000000000000000000000O1000001O002N1O002NQ]k0\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [172.0583953857422, 206.51707458496094, 453.34173583984375, 122.31010437011719], \"score\": 0.9760921597480774, \"association_id\": 0, \"segmentation\": {\"size\": [640, 640], \"counts\": \"VlS64kc01O101N100O2O000O10001O000000001O00000000001O01O0000000000000001O000000000000001O00000000001O000O1000001O000O1000001O0000000000001O000O10000000001O0000000000001O000000001O00000000000000000000000000000nNXOl^Oh0Ta0YOk^Og0Ta0[Ok^Oe0Ua0[Ok^Oe0Ua0\\\\Oj^Od0Ua0]Ok^Oc0Ua0]Ok^Oc0Ua0]Ok^Oc0Ta0_Ok^O`0Va0@j^O`0Va0@j^O`0Ua0Ak^O?Ua0Aj^O`0Va0Ai^O`0Va0@j^O`0Va0@j^O`0Va0@j^O`0Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0Ak^O?Ua0@l^O`0Ta0@l^O`0Ta0@l^O`0Ta0@l^O`0Ta0@l^O`0Ta0@l^O`0Ta0@l^O`0Ta0_Ol^Ob0Ta0^Ol^Ob0Ta0^Ol^Ob0Ta0]Om^Oc0Sa0]Om^Oc0Sa0]Om^Oc0Sa0\\\\On^Od0Ra0\\\\On^Od0Ra0\\\\On^Od0Ra0[Oo^Oe0Qa0[Oo^Oe0Qa0[On^Of0Ra0YOo^Og0Qa0YOo^Og0Qa0YOo^Og0Qa0YOo^Og0Qa0YOo^Og0Qa0YOo^Og0Qa0YOo^Oh0Pa0XOP_Oh0Pa0XOP_Oh0Pa0XOP_Oh0Pa0XOP_Oh0Pa0YOo^Og0Qa0YOo^Og0Qa0YOo^Og0Qa0YOo^Og0Qa0YOo^Og0Qa0YOn^Oh0Sa0WOm^Oi0Sa0WOm^Oi0Sa0WOm^Oi0Sa0WOm^Oi0Sa0WOm^Oi0Ra0YOm^Og0Sa0YOm^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0ZOl^Of0Ta0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ua0[Ok^Oe0Ta0\\\\Ol^Oe0Sa0[Om^Oe0Sa0[Om^Oe0Sa0[Om^Oe0Sa0[Om^Oe0Sa0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Ol^Od0Ta0\\\\Om^Oc0Sa0]Om^Oc0Sa0]Om^Oc0Ra0^On^Ob0Ra0^On^Ob0Ra0^On^Ob0Ra0^On^Ob0Ra0^On^Ob0Ra0^On^Ob0Ra0^Oo^Oa0Qa0_Oo^Oa0Qa0_Oo^Oa0Qa0_Oo^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0Am^O?Sa0Am^O`0Ra0@n^O`0Qa0Ao^O?Qa0Ao^O?Qa0Ao^O?Qa0Ao^O?Qa0Ao^O?Qa0An^O`0Ra0@n^O`0Ra0@n^O`0Ra0@n^O`0Ra0Am^O?Sa0Am^O?Sa0Al^O`0Ta0@l^O`0Ta0@l^Oa0Sa0_Om^Oa0Sa0_Om^Oa0Sa0_Om^Oa0Sa0_Om^Oa0Ra0@m^Oa0Sa0_Om^Oa0Sa0_Om^Oa0Sa0_Om^Ob0Ra0^On^Ob0Sa0]Om^Oc0Sa0]Om^Oc0Sa0]Om^Oc0Sa0]Om^Oc0Sa0]Om^Oc0Sa0]Om^Oc0Sa0]Om^Od0Ra0\\\\On^Od0Ra0\\\\On^Od0Ra0\\\\On^Od0Sa0[Om^Oe0Sa0[Om^Oe0Sa0[Om^Oe0Sa0[Om^Oe0Sa0[On^Od0Sa0ZOn^Of0Sa0WOP_Oh0Pa0TOT_Ok0Rb0O1N2M4MPT9\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [91.437255859375, 210.27403259277344, 548.562744140625, 63.39561462402344], \"score\": 0.5025287866592407, \"association_id\": 0, \"segmentation\": {\"size\": [640, 640], \"counts\": \"RTn13mc01N2LK[\\\\O5ec04O00000O100000000O100000000O10000000000000001O000000000000000mNCc^O=]a0Cc^O=\\\\a0Dd^O<\\\\a0Dd^O<\\\\a0Ed]ONl0=`a0Ec]OOm0<`a0Ec]OOm0<`a0Ec]OOm0<`a0Ec]OOm0<`a0Ec]OOm0<`a0Ec]OOm0;aa0I_^O7aa0I_^O7aa0I`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6aa0I`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6`a0J`^O6ba0H^^O7fb001O00000000000000001O00000O101O0000001O00001O000O2ORXa2Oog^M1N101O00000hNMb^O5]a0Kc^O5]a0Kc^O6\\\\a0Jd^O6\\\\a0Kc^O5]a0Kc^O5]a0Kc^O5\\\\a0Ld^O4\\\\a0Ld^O5[a0Ke^O5[a0Ke^O5[a0Lc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Lb^O4^a0La^O5_a0Ka^O5_a0Ka^O6^a0Jb^O6^a0Ka^O5_a0Ka^O5_a0Ka^O5^a0Lb^O5]a0Kc^O5]a0Kb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Ka^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O7\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O7[a0Ie^O7[a0Ie^O7[a0Ie^O8Za0He^O9[a0Ge^O9[a0Ge^O9[a0Ge^O9[a0Ge^O9Za0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Ie^O7[a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Jd^O6\\\\a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Lb^O4^a0Lb^O4^a0Lb^O4^a0Lb^O4^a0Lb^O4^a0Lb^O4^a0Lb^O4^a0La^O5_a0Jb^O6^a0Jb^O6^a0Jb^O6^a0Jb^O6]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kc^O5]a0Kb^O6^a0Jb^O6^a0Jb^O6^a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Ic^O7]a0Hd^O8\\\\a0Hd^O8\\\\a0Hd^O8\\\\a0Hc^O9]a0Gc^O9]a0Gc^O9]a0Gc^O9]a0Gc^O9]a0Gc^O9]a0Gc^O9]a0Gc^O9^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;]a0Ec^O;^a0Db^O<^a0Da^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=_a0Ca^O=^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Db^O<^a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;_a0Ea^O;^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Fb^O:^a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0H`^O8_a0Ia^O7_a0I`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0H`^O8`a0Ga^O9`a0F`^O:`a0F`^O:`a0F`^O:`a0F`^O:`a0F`^O:`a0Fa^O9_a0Ga^O9_a0Ga^O9_a0Ga^O9_a0Fb^O:^a0Fc^O9aa0B`^O>`a0Ba^O=`a0^Og]OOj0b0`a0^Od^Oa0_b0N3M4L7HiG\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [66.45074462890625, 220.10989379882812, 350.4375305175781, 124.03866577148438], \"score\": 0.13230600953102112, \"association_id\": 0, \"segmentation\": {\"size\": [640, 640], \"counts\": \"aRf11oc00O2O000O10000000001O0bMLPA4o>NPA1P?0PA0n>3QAMo>4QAKo>5QAKn>7QAIo>7QAIo>8PAHP?8PAHP?8PAHo>9RAFn>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>;QAEo>:RAFn>:RAFn>:RAFn>:RAFn>:RAFn>:RAFn>:RAFo>9QAGo>9QAGo>9QAHn>8RAHn>8RAHn>8RAHn>8RAHn>8RAIm>7SAIm>7SAJl>6TAJl>6TAJl>6TAJl>6TAJl>6TAJl>6TAJl>6TAJl>5UAJl>6TAJl>6TAIm>7SAIm>7SAIm>7SAIm>6TAIm>7SAIm>7SAIm>7SAIm>7SAHn>8]2000000000000000i^OHg>7YAIg>7a200O2O1N2O1O0OUl\\\\21jScM1O2N1O2O1O00001O001N100000001O00000000001O000000001O00001O01O0O1000000O2O00001N1000000000001O00]^OH^?8a@I_?7`@K_?5_@Ma?3_@Ma?4^@Ma?3^@Nb?2^@Oa?1_@Oa?1_@Oa?1_@Nb?2_@Ma?4^@Lb?4^@Lb?4^@Lb?4^@Kc?6]@Ic?7]@Ic?7^@Hb?8_@Fb?:^@Fb?:R21O00000000000000000001O00000001O00000000001O01O0O2N101N2N1Ohcb4\"}}, {\"image_id\": 110, \"category_id\": 2, \"bbox\": [496.5609436035156, 89.1240005493164, 123.60018920898438, 7.7095489501953125], \"score\": 0.05539294332265854, \"association_id\": 0, \"segmentation\": {\"size\": [640, 640], \"counts\": \"lZg93mc00O2O00000O100O101O000000000000000000000000000000000000000000000000000000000000000000000000000000000001O1O001O001O000000000000O10000000000O1000000O010O1000000000000000000000000001N101O001O1O001O00Rij0\"}}, {\"image_id\": 111, \"category_id\": 1, \"bbox\": [128.70802307128906, 284.3067626953125, 230.94908142089844, 329.7520751953125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [640, 418], \"counts\": \"P\\\\l23P1h0a`06m^O0k`0>k^OFPa0h1I6L4K6eNZ1H8K6L3K4J7J5L5K4O101O0000O1O0O2O00001O2O00cMaJZF^5g9hJSFW5n9PKjEo4Y:TKcEk4^:XK^Eh4c:[KYEf4h:\\\\KUEd4l:]KREc4o:_KnDa4S;bKhD^4Z;eKaD\\\\4`;hKZDX4h;o1001O1O1O1N2O1O1O1N2O100O1O1O100O100O100nIaCb4`<\\\\KhC\\\\4Z<bKjCZ4X<cKlC[4U<cKnCZ4T<dKnC[4S<dKoC[4Q<dKQDZ4Q<dKPD\\\\4P<cKQD\\\\4P<dKQD[4o;dKRD[4o;eKQD[4o;dKSD[4m;eKSDZ4n;eKSDZ4n;fKSDY4m;gKSDX4n;gKTDX4l;hKTDW4m;hKUDW4k;iKVDU4k;jKVDU4k;kKVDT4j;kKWDU4i;kKWDU4i;kKWDU4i;kKWDV4h;jKYDU4g;kKYDU4g;jKZDW4e;iK[DW4e;iK[DX4d;gK]DZ4b;eK`D[4_;eKaD[4_;dKcD\\\\4\\\\;dKdD]45`KP92mF^4LkKR9GRG^4HRLT9^OUGa4BWLW9XOWGb4^O[LY9ROZGd4YO^L]9mN[Gf4ROcLa9fN^GP5cNaLn9]N`G]7]8bHdG_7\\\\8`HdGa7\\\\8]HeGd7\\\\:O1N2N3M3L4M2N2N2N2N2M4L5J5K4M3M3M2M4K5J7J6J5L5L2O2N1O101N1O100O2O000000001N1000000010O001O000010O01O001N2O2N1O1O1O00001O01O0000000000001O001O0O101O1O1N2N1O2N2N2N1O2O1N3gL]@Q2g?cMh@U2j`0H6I3N2M2O1N2N3N3L4K5K4M2M3M4K=^Od\\\\T1\"}}, {\"image_id\": 111, \"category_id\": 1, \"bbox\": [317.2243347167969, 0.0, 88.90414428710938, 167.10093688964844], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [640, 418], \"counts\": \"kj[6b0Qc0U1S]OnNYa0P2K5K4M4L2QN_M[Bf2c=bMTB`2l=gMkA[2T>i10000000000000O1000000O1O100O100O10000O1O1O1N2O1O1N2N2N2N2I7N2N2F:M3M3J6L4N2L4J6N2G9B>L4O1O100O2O0O11000O20O1O1O10b0_O1N03K>C6I9G9G2M6Ji0TOZe8\"}}, {\"image_id\": 111, \"category_id\": 2, \"bbox\": [258.012451171875, 130.28929138183594, 74.49447631835938, 33.785400390625], \"score\": 0.9999992847442627, \"association_id\": 2, \"segmentation\": {\"size\": [640, 418], \"counts\": \"TXS51nc02O1O001N10001O00001O001O00001O000000001O010O1O3M10O01O0010O0001O01O01O00001O000010O0001O001O001O010O00001O00010O1O001O0010O0000003An\\\\OIffh1\"}}, {\"image_id\": 111, \"category_id\": 2, \"bbox\": [103.18479919433594, 421.230224609375, 198.4577178955078, 172.94415283203125], \"score\": 0.9998998641967773, \"association_id\": 1, \"segmentation\": {\"size\": [640, 418], \"counts\": \"X]V27ic01N101O1O1N3Z]OOYa02e^O0Za00e^O2Za0Oe^O2Za0Ne^O4Za0Md^O5[a0Kc^O<Xa0Ef^O?Wa0Ai^Oa0Ua0@i^Ob0Va0^Oj^Ob0Va0^Oi^Od0Va0]Oi^Oe0Ua0\\\\Oi^Oj0Ra0YOf^OU1Qa0o0O1N1O2O0O2O2N2N000OO2N1O2BU_OcMn`0W2Z_OeMh`0X2b0N3Lb0]OUU=aN\\\\lB2N100O1001N2N2N]]9OdbF2N2M2N2L5L3N201N1O1O2N1O2N101O0O1000000O10001O1O1O2N2N1O100O1O2N2N3M1O1O1O1O1O1O2N1O001O00O2N100O1O1M3SOc]O4ab0Id]O0`b0Nj0N3L4I_m;:[RD4L3N1N2N2M3N2N3N100001O010O001O0010O01O1O1O1O002M2M?\\\\OhnW2\"}}, {\"image_id\": 111, \"category_id\": 2, \"bbox\": [144.72479248046875, 480.4729309082031, 174.27981567382812, 112.64578247070312], \"score\": 0.10325063765048981, \"association_id\": 0, \"segmentation\": {\"size\": [640, 418], \"counts\": \"[[[35hc04N20O01O01O0K[\\\\OOec01]\\\\OMdc00_Q:2enE2N1L5I6M3O1O100O1N2O2N100O1O100O2O0000000O100000001O1O2O1N1O1O002O1N6J3N0O1O2N1O1O1O101NO1000001O000O1gNl]Od0Vb0\\\\Ok]ODO6hb05[]OBTc0>701N2N2M3Lh]9L\\\\bF4E:H9N1O1O2M2N2O1O1O101O010O0010O00001O100O1O001O1O1O1O1O0O2O1O1O1N3N1O0O3M3LeRQ2\"}}, {\"image_id\": 112, \"category_id\": 1, \"bbox\": [278.443115234375, 832.611572265625, 184.2767333984375, 146.35009765625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"XRh;;^Z1>C6I4L5I7I6G9M2oNWNlhNm1SW1UNfhNP2YW1RNZhNH5W2aW1YN\\\\hNi1bW1YN\\\\hNh1`W1]N^hNd1`W1n0M2N2N2N3N1O100O1N2O1O1O2N1O100O100O10000O1O1O1O100O011O0O10000000000O1000000O10000O10000O1000000O1000000000000000000000000000000000000000000000000000001O000000000000000000O1000000O100000000O10000O1O1O100O100O100001O00000000000000000000000000000001O0000000O10001O0O2O0O101N10000000001O0000000O101O000O2O001O001O001O0000000O2O00002K5Kb0hMQhN7^gZR2\"}}, {\"image_id\": 112, \"category_id\": 2, \"bbox\": [371.40728759765625, 952.9046630859375, 190.5440673828125, 23.4847412109375], \"score\": 0.9999953508377075, \"association_id\": 2, \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"Pk``01kZ11O001O00001O000O10001O00001O00000000000O100000000000000O10O10000000O1000000000O1000000000O10000000O10O10000000000O10000O01000O100N2O10000O010000000O01000000000000O100000000O10000000O10O1000000000000O1000000000000O10000000000O100000000O100000000000000O100000000O100000000000000000000000000O1000000000000000000000000O10000O100O10WhXn1\"}}, {\"image_id\": 112, \"category_id\": 1, \"bbox\": [535.4780883789062, 81.01852416992188, 1473.572998046875, 1186.795166015625], \"score\": 0.9999740123748779, \"association_id\": 1, \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"Y`Xh0k0[Y1l0E<C=D=A>C>A>A?B>B?QK^JmSOP6ek0iKcROb4Qm0TLZROU4\\\\m0`L^QO\\\\4Xn0Z4G9H7I8L3L5L3M3L4M3M3M3M2N3M3M5K5K5K5K5L4K4L5K5K6K3L3M3M4L3M4K5L4L4L4K5L5QWOh_OPg0^`0`XOS@Yg0S`0WXO_@bg0da0H9F:FP1QOQ1nN?B=C<C6K4L5K4L4L4L4L4L4M3L3M4L3N3M2N2N3M2N2N2N2N2O1N2N2O1N2N2O1N1O2N2O1N2N2N2N2N2N2N2N2N2N1O2M3N2N2M3N2M3M3N2M3M3M3N1O2M3N2N2N2N2N2N2O1N2N2N2O0O2N2O1N2O1N101N2O0O2O0O100O2O0O100O100O1O101N1O100O1O1O1O2N1O1O1O1O1O1N2O2N1N2O1O1O1O1O1O1O2N100O1O100O1O100O2O0O1O10000O100O1000000O10000O1000000O10000O1000000O10000O100O10000O10000O100O1000O0100O100O100O100O100O100O1O100O100O1O1O100O2N1O1O101N1O100O2N100O101N100O100O2O0O100O101N10000O10001N10000O10001N10000O10001O0O1000000O101O000O10000000000O10000000000O100000000O1000000O1000000O1000000O1000000O10000O2O00000O10000O10000O100O10000O100O100O1O1O100O1O1O1O1O1O1O100O1O1O100O1O100O100O100O100O100O100O100O10000O10000O10000O10000O1000000O10000O2O00000000000000000000000000000000000000000O10000000000000000000000000000000000000000O10000000000000000000000O10000O10000O10000O100O100O10000O100O1O100O100O1O100O1O1O100O1O1O1O1O1O1O100O1O1O1O100O1O100O100O100O100O100000000O100000000O10000000000O10000000000O10000000000O1000000000000O1000000000000O10000000000000000O10000000000000000000000000000O1000000000000000000000000000000O10000000000000000000000000000O1000000000000000000000000000000O10000000000000000000000000000000000O100000000000000000000000000000000O10000000000000000000000O100000000000000O1000000000000O1000000000000O10000000000O10000000000O10000000000O100000000O100000000000000001O00000000001O00000000001O0000000000001O0000000000001O000000000000001O000000000000001O0000000000000000001O00000000000000000000000000001O000000000000000000000000000000001O000000000000000000000000000000001O00000000000000000000000000000000000000001O0000000000000000000000000000000000000000001O0000000000000000000O10000000000O1000000O100000000O1000000O10O10O1000000O10000O10000O10000O100O10000O100O100O100O100O100O100O1O1O100O1O1N2O1O1N2O1O1N2N2O1N3M2N2N2N2N2M3N2M3M3M3M3N2N2N2N2N2N2O1N2O1N2O1O1O1N3N1O1O1O1O100O1O1O1O100O1O2N100O2N1O2N1O2N1O2N1O2N1N3N2N1O2N2N2N1N3N2N2N2N2N2N3M2N2N3M2N2N2N2N2O1N2N2N2N2N1O2O1N1O2N2N101N1O2N101N1O1O2O0O2N101N1O2O0O2O0O2N2O0O2O1N2O001N2O1N2O1N2O1O1N2O2M2O1O2M2O1O1N2O1O1N2O1O1N101O1O1N101O001N101O1O0O2O001O001O0O2O1O001O1O1N2O001O1O1N3N1O1O1O2N1N3N2N2N2M3N3M2N1O2M3N1O2N1O2M2O1O1O1N2O1O1O1N2O1O1O0O2O1O1N101N2O1N1O2O1N2N2O1N2N3M2N2N3M2N2N3M2N2O2M2N2N3M2O1N3M2O2M2N2O2M2N3N1N3N1N3N1N3N1N2O1O1N101N2O1N2O001N2O001N101N2O1N101N2O1N2O0O2N2O1N3M2O1N2N2N3M2O2M3M2N3M3L4M3M3M2N2N2N2N2O1N2N2N2O1N2N101N2N101N2O0O2O0O2O0O2O001O0O2O001O001N101O1O001N2O1O1O001N2O1O2N1N2O1O2N1O2M3N1O2N1N3N1O1O1N2O1O1O1O1N2O001O1N101O1O0O2O001O1N101O0O2O0O2O001N2O0O2O1N2O1N101N2O1N2N3N1N3M3N2M3M4L3M3N2M3M3M2O2M3M3M2O2M2O2M2N3N1N3N1N2O2M2O1N2O1N2O1N101N2O1N2O1N2O1N2N2O1N2O2M2N2N3N1N3M3M2O2M3M3M4L3M8H`0@`0_O`0A?PNjoN]HeP1o6RPO_HYP1Q7S2D:E<E:F:E:E<D<D`0A?A>B>TO`^R3\"}}, {\"image_id\": 112, \"category_id\": 2, \"bbox\": [714.420166015625, 923.8572387695312, 1332.579833984375, 448.14276123046875], \"score\": 0.9999113082885742, \"association_id\": 1, \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"\\\\TiQ11kZ11O2N4K6K3M1O00001O00001O000O2O00001O0000001O0000001O0000001O0O10001O00001O0000001O001O00001O00001O001O001O001O001O001O00001O001O00001O001O00001O00001O0010O0001O001O010O001O00100O001O010O001O010O001O00010O001O000010O0001O001O000010O000001O00001O000010O0001O00001O001O001O001O001O001O0010O01O001O001O001O001O001O001O001O0010O01O1O1O2N1O2N1O1O1O1O10O01O1O1O0010O01O000010O0001O01O01O0001O01O01O0001O01O010O000010O0001O01O0001O01O0001O01O000001O000001O0001O000001O000001O0001O00000001O01O00000000010O000000000001O01O0000000001O0000000000001O00000000001O0000001O000000001O0000001O00001O00001O00001O0000001O000000001O0000001O000000001O000001O01O0000001O00000010O000001O00001O0000001O01O01O001O00001O00001O00001O01O01O00001O0000000000000000000000000O100000000000001O00000000000000000000000000000000000O10000000000000000000000000000000000001O000000000O10000000000000000000000000000O101O000000000000000000000O10000000000O10000O10000O10000O1000000O10000O100000O01000000O100000000O100000000O100000000O100000000O10O1000000000000000000000000000000000O100000000000000000000000000000000000000O10000000000000000000000000000000000001O000O1000000000000000000O2O0000000000000O10000000001N100000000O1000000O10001O00O01000000O10000O10O10O10000O1000O0100O10000O010O100O100O010O100O10O0100O10O10O1000O0100O10O1000O100O010O10O0100O010O0010O01O1O001O000O2O001O000O2O00000000000001O001O10O01O00100O00100O10O0100O10O0100O010O10O010O01O0010O01O001O00000O2O000K5I7J6J6L4O2N1O1O2O0O101O001O001O00100O010O010O10O010O10O10O01O010O0010O01O001O010N1O1N3M2N2N3M2N2O1N2O1N3O0000001O010O00010O010O10O010O10O10O01000O01000O0100O010O010O0010O01O001O10O01O001O1O001O1O001O1O001O100O010O100O010O100O01000O1000O10O100000O1000O1000O100000O010000O10O10O10000O010O100O01000O10O10O10O10O1000O0100000O0100000O0100000O010000O01000O010O100O010O010O1O0010O01O1O010O0010O00010O010O00010O010O010O010O010O01O10O01O10O01O1O010O1O1O001O001O1O0O2O001N2O0O2O0O2O0O1O1I7I8O00000000001O0001O000010O01O00010O010O10O10O0100N101O0O2O1N101N1O2N101O001O001N10001O0O10000O10O10O01O100O10000O1000010O00010O010O010O010O1000O01000O0100O10O10O10O010000O010O100O010O1O010O1000O0100O010O100O01000O010O10O10O100O01000O010000O01000O10O01000O0100O10O10O10O0100O01O010O010O0010O01O001O010O00001O0O2O0010O010O010O0100O02O000O100O2O0O100O101N1O100O2N1O100O2M2N2N3N1N3M2O2M2N3M2M4L3N3M2N3M2K6J5K6L3M4M2O2N1N3O0O2N101Mhjl2\"}}, {\"image_id\": 112, \"category_id\": 2, \"bbox\": [588.722412109375, 454.26397705078125, 1430.023681640625, 917.7360229492188], \"score\": 0.10889168083667755, \"association_id\": 0, \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"eShS11kZ11N3N1O1O2M2O2N1O2M3N2N2N2M4M2N3M3M1O1O010O1O1O1O001O100O1O1O1O00100O1O1O1O1O100O1O1O1O100O1O100O1O1O101N1O1O2N1O100O1O1O1O1O1O1O100O1O001O001O000010O01O00001O001O00010O00001O00001O000010O0001O00001O00001O00010O0000001O00001O0000001O01O0001O00000010O000001O0001O01O0000001O01O0001O000000010O00000000001O01O000001O00000001O0001O0000000000010O00000000000001O0000000000001O0000000000001O0000000000001O00000000001O00000000001O00000000001O000000001O00001O00001O001O00001O001O001O001O001O1O001O1O001O001O001O001O001O001O001O00001O00001O001O0000001O0000001O0000001O000000001N1000001O000000001O0000001N1000001O0000001O0000001N1000001O00001O0000001O0O10000001O0000000O1000001O000000000O1000001O000000000O100000001O00000000000O100000001O0000000000000O100000000000000001O000000000000000000000000000O100000001O000000000000000000000000000O101O00000000000000000000000000O10000000000O10000000001N100000000O10000000000O10000000000O100000001O0O10000000000O1000000000000000000000000000000000000000000000O10000000001O000000000000000000000000000000000000000O101O00000000000000000000O100000O1000000000000000000O1000O100000000000O10000000000000000O100000O10000000O1000000000000O10O1000O10O10O1000O10O01000O010O010O010O0010O0010O01O001O001O1O001O001O001O1O010O001O1O00100O001O10O01O10O000001O0000001O000O1O100O100O100O1O001O001O1O0O2N3N1N2N201N10001N101O001O001O1O00100O001O100O010O1O010O0010O00010O000001O0000000O1O1O001O001O1O0O2O1N2O1N2N11000O2O000001O000001O01O00010O0010O01O010O010O00100O010O00010O010O01O010O001O001O1O001O001O001O1O001O010O1O00100O010O10O0100O010O100O01000O01000O10O10O10O100O010O10O0100O010O010O1O010O001O010O00001O010O0001O01O01O001O0010O01O010O10O01O010O10O01000O01000000O01000000O0100000O10O1000O10O1000O1000O010000O01000O10O10O1000O010000O010O100O10O010000O00100O010O1O0010O01O001O001O00001N10001O0O100O100O1O2O0O1O1O1O2O0O100O2O00001O00001O00001O010O00010O0010O010O01O01O010O01O00010O00001O00000000001O00000010O000001O01O00010O00010O00010O010O0010O01O010O010O010O10O01O010O01O01O010O00010O000001O00010O0010O00010O01O010O010O10O010O010O0100O01000O010O10O10O10O10O10O10O10000O010O100O100O100O10O01O100O1O1O10O01O1N2O1O1O001O1O1O1O001O1O1O00100O1O010O1O10O01O10O0100O010O010O2O0O101N100O2N101N101N1O2N1N3M3N2M2N3M3M3M3N3L3M3M4L4L4L4Lf`S3\"}}, {\"image_id\": 112, \"category_id\": 2, \"bbox\": [1381.026123046875, 926.6226806640625, 614.6102294921875, 406.8443603515625], \"score\": 0.05069386214017868, \"association_id\": 0, \"segmentation\": {\"size\": [1372, 2047], \"counts\": \"SYni1?[Z13L4N2M3M2O2N1O2N11O1O001O001O00001O0O101O000000001O00000000001O000000000O2O00000000001O00000O10001O00000000000O010000000000000000O10O100000000000O10O10O00010O001O001N2N1O2N2O1N1O2O1O1N200O1O010O1O1O010N2O001N2O0L5E;J5L5L301O0010O01O01O0100O010O001O001O1N1O2L4L3N3N2O1O001O100O010O10O01O010O00001O0O1O1N2O1N3N1O2O001O0010O0100O10O01O001O001O0O2O0O10000000010O01O010O0100O010O10O01O010O1N10001O010O0001O0010O0100O010O010O01O0010O01N10001O01O01O01O00100O0010O010O010O10O01O1O001O1O1O00100O1O010O100O01000O010O0010O01O000000000000010O010O010O01000O01000000O10O10O100O10O0100O10O01O10O01O100O10000O101O00000O101O00000O101O00000O1000001O0000000O101O00000000001O000O10001O000000001N01000000000000O100000000000000O1000000000001N100O100O1O1O1O1MSN`iND\\\\V1=iiNARV1a0SjN]OiU1e0[jNYObU1g0bjNXOZU1j0ijNUOUU1l0mjNSOPU1o0SkNoNkT1R1VkNnNiT1R1YkNmNfT1T1[kNkNdT1U1^kNjNbT1V1_kNiN`T1W1akNiN^T1X1ckNgN\\\\T1Y1ekNgN[T1Y1fkNeNZT1[1gkNeNXT1\\\\1hkNdNVT1]1lkNbNST1_1mkNaNQT1`1QlN_NkS1e1UlN[N_S1P2blNoMPS1_2RmN`MfR1g2[mNYM_R1l2bmNTM]R1l2dmNTMZR1m2hmNRMWR1o2imNPMWR1P3jmNPMUR1P3lmNPMTR1P3mmNoLRR1Q3omNoLPR1R3PnNmLQR1R3PnNnLoQ1S3RnNkLoQ1U3QnNkLnQ1V3SnNgLoQ1X3RnNgLnQ1Z3SnNcLnQ1^3RnNaLnQ1`3SnN]LnQ1d3SnNYLoQ1f3SnNVLoQ1k3`2100O1O01000O100O1000O01000O10000O00100O1O1O010O1O1O10O0100O10O01000000O100000000O1000O10O100000000O10000O10000O100O1O100O1O1O100O100O101N100O10000O10000O10000O10O01O100O1O001O1O1O1O010O100O01000O10O10O010O01O010O010O01O00001O000010O0010O01O001O0010O01O0O101O000O01000O1O1O100O2O000O2O1O010O01O010O1O001N2J6J5K6L4L4M2O2O1N2O1O001O100O2N1O1N2N2O1N3M2N2O1N2M4M2M3N2N3M2N2N3N1O2M3N4K6J5K]^[2\"}}, {\"image_id\": 113, \"category_id\": 2, \"bbox\": [183.3209991455078, 794.589599609375, 566.308837890625, 61.75030517578125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"Y\\\\i82\\\\`17J3N101N101N100O2O001O0O101O0000001O0000000O10001O000000000000001O00000000000000000001O00000001O000000000001O0000000000001O00000000000000001O00000001O0000000000000001O000001O0000000000000000000000000000000001O000000000000O101O0000000000001O00000O10000000000000000000001O000000000O10000000001O00000O1000000000000000000000000000000000000000000O100000000000000000000O10000000001O00000000O01000000000000O100000000000000O100000000O100000000O1000000O100000000O1000000000000O1000000O10000O10000O1000000000000000000000000O10000000O100000000000000000001O000000001N100000001O000000001N100000000000001N100000000O100000001O0O1000000000000000000O100000O10000000O100000000O100000O10000O10O10O1O100O1N2O1O1O10O1000O100000000O1O1O1O1O1O1O01000O10000000000000O1000000000000000000000000000000O1000000000000000000O1000000O100000000O100000000000000O1000000000000000000O10000000000O10000000000000000O100000001O0000000000000000000O10001O000000000O10000000001O0O100000000000001O000O100000001O00000O10001O00000000000O10000000000O1000000000000O1000000000000000000000000O10000000001O0000000O100000000000000O100000000O1000000O10000O10001N100O2N2N5IoYdm1\"}}, {\"image_id\": 113, \"category_id\": 2, \"bbox\": [1588.9737548828125, 1207.33251953125, 450.7025146484375, 306.1051025390625], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"geb[21X`19K5N1O2N1O2N2M2O2N2N2N2N2O1N1O2O1N101O00001O00001N101O00001O001O001O001O001O001O001O001O00001O00001TgN[NjR1e1UmN]NjR1c1UmN`NiR1a1UmNaNjR1_1UmNcNjR1^1SmNeNlR1\\\\1RmNgNkR1Z1SmNiNlR1X1QmNkNnR1V1PmNkNPS1V1nlNlNQS1U1mlNlNRS1V1llNkNTS1U1llNlNSS1U1klNlNTS1V1jlNkNVS1U1jlNkNVS1V1hlNlNWS1T1hlNmNXS1S1hlNmNYS1S1elNnN[S1R1dlNnN^S1R1`lNoN`S1R1_lNnNaS1R1^lNoNbS1R1]lNnNdS1R1ZlNoNfS1R1XlNoNhS1Q1XlNoNhS1R1WlNnNjS1R1TlNoNlS1Q1TlNoNlS1R1RlNoNnS1Q1RlNoNoS1P1QlNPOoS1Q1PlNoNPT1Q1okNPOQT1P1okNPORT1o0nkNPOST1Q1lkNoNUT1P1jkNQOVT1o0jkNQOVT1P1ikNPOXT1o0gkNROZT1m0fkNSOZT1n0ekNRO]T1l0bkNUO`T1i0`kNVOcT1i0[kNXOgT1f0YkNZOhT1f0VkN[OkT1d0UkN\\\\OlT1d0SkN\\\\OnT1c0RkN]OnT1d0QkN[OQU1e0mjN\\\\OSU1d0mjN\\\\OTU1d0kjN\\\\OUU1e0jjN[OVU1f0ijNZOXU1f0fjNZO[U1f0ejNZO[U1g0djNYO\\\\U1h0cjNXO^U1h0ajNXO_U1h0ajNXO_U1i0`jNWO`U1j0_jNUObU1l0]jNTOcU1m0egNoMb1T1hV1P1_gNVNe1i0lV1S1YgNZNi1c0nV1U1RgN^No1=oV1^2ohNbMQW1_2nhN`MSW1a2mhN^MSW1d2khN\\\\MUW1e2jhN[MUW1g2jhNYMVW1h2jhNWMVW1j2ihNVMWW1j2ihNVMWW1k2hhNUMWW1m2hhNRMYW1n2ghNRMXW1P3hhNoLWW1R3ihNnLVW1T3ihNlLVW1V3ihNjLVW1W3jhNiLVW1X3ihNhLVW1Z3ihNeLWW1]3hhNcLWW1_3hhNaLXW1`3ghN`LXW1a3hhN_LWW1c3hhN]LXW1c3hhN]LWW1d3ihN\\\\LVW1f3ihNZLWW1f3ihNYLWW1h3ihNXLVW1i3jhNWLVW1j3hhNWLWW1j3ihNVLWW1j3ihNVLVW1k3jhNULVW1k3ihNVLWW1k3hhNULWW1l3ihNTLWW1l3ihNSLXW1m3hhNSLWW1n3hhNSLXW1n3ghNRLXW1o3hhNQLXW1o3hhNQLXW1o3hhNQLWW1Q4hhNnKXW1S4hhNmKXW1S4hhNmKWW1U4hhNjKXW1W4hhNiKXW1X4ghNhKXW1Z4ghNeKYW1\\\\4ghNdKYW1]4fhNbK[W1_4dhNaK[W1`4ehN_K\\\\W1b4chN^K]W1b4chN]K]W1e4bhN[K^W1e4bhN[K^W1f4ahNZK_W1f4ahNZK_W1g4`hNXKaW1h4_hNXKaW1i4^hNWKbW1i4^hNWKbW1j4^hNUKbW1l4]hNTKcW1l4]hNTKcW1m4\\\\hNRKeW1o4ZhNQKfW1o4ZhNQKfW1P5YhNPKgW1Q5YhNnJgW1R5YhNnJgW1S5XhNlJiW1T5XhNkJgW1W5YhNhJgW1X5YhNgJhW1Y5YhNfJgW1[5YhNcJhW1]5XhNcJhW1]5YhNbJgW1_5YhN_JhW1a5YhN^JgW1b5YhN]JhW1d5XhN[JhW1e5YhNZJgW1f5YhNYJhW1g5YhNXJgW1i5YhNTJiW1l5^100001O00001O00001O001O001O1O002N1O1O100O2N1O1O1O1O1O001O001O00100O1O1Ob0^O5K1O100O1O00100O001O001O1O00100O1O001O1O001O001O001O001N2O001O1N2O2N1O1N2O1O001O1O1N101O1O2N1O1O1O1O2N1O1O1O1O001O1O001O0000001O0000001O00000000001O0000001O001O01O01O001O001O01O01O0001O001O1O1O001O10O01O1O001O001O001O001O001O001O1O1O1O1O00100O1O1O1O001O001O001O001O000eHShNR6nW1lI^hNi5bW1UJbhNi5_W1UJdhNi5\\\\W1UJhhNi5XW1VJkhNh5VW1VJlhNi5TW1VJohNh5QW1WJQiNh5oV1lI^iNS6cV1kI_iNT6aV1kIaiNT6_V1kIdiNS6]V1kIeiNT6[V1kIgiNT6YV1kIjiNS6WV1kIkiNT6UV1lImiNR6SV1mIoiNR6RV1lIQjNR6oU1nISjNP6mU1oIUjNP6kU1PJVjNo5VX101O00001O001O00001O0O101O001O00001O000O2O001O0O2O001N2O2N5J6K4J8I5J8Hi0WO;D`0\\\\OnX<\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [578.5813598632812, 502.6149597167969, 254.1282958984375, 174.53573608398438], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"cT`k0g0X_1e0K4L3O1O0O2O1O0O2O00001O001O1O00001O001O00000000001O0000001O000000000000001O0000001O00001O00001O00001O001O0TdNUNRY1l1ffN\\\\NYY1d1dfN`N[Y1a1bfNbN]Y1_1afNbN_Y1_1_fNaNbY1`1UfNhNkY1Y1RfNiNnY1Y1neNiNRZ1X1keNjNUZ1W1geNlNYZ1U1deNlN]Z1U1aeNlN_Z1T1`eNmN`Z1T1^eNkNdZ1W1YeNiNhZ1Z1SeNdNQ[1c1^dN_Nj[1n26O1O1O100O1N2M3L4N2N2O1O1O1O1O1N2O1O1O1O10000O100000000000000000000001O000000001O001O1O1O2N3M2N2N1acNULV\\\\1l3gcNWLX\\\\1R4O1O2mcNhKi[1c4M3M3M2N00O1PO`dNmLb[1m2ldNbL][1X3Q1L4O1O1O1O1O1O1O100O1O10000O1000000001O000000000000100O1O001O001O0001O0000001O01O0000O2O00000000000000000O1000000O101N11O001O00001N100010O01O001O2N1O1O2N1PcNfL22:OZ[1o3bdNTL][1m3`dNTL`[1o3YdNULh[1Y4100O010O10O02N1N2N2M3M3O1O1O1O1O1M3K6K4K5F:O1O1O2N101N1N3N1N3N1cN_bNIc]14bbNHf]1ZOPbN7?;f]1WOSbNNm0=[cfi1\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [30.999704360961914, 534.9515991210938, 529.54150390625, 299.94854736328125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"Qcb1i0b_18K4M4N1N2N2N2O1N3N1N3N2M3N3L6K6I8H?B5J7J9G2M2aeNULVX1l3ggNYLVX1h3ggN^LTX1d3igN`LUX1a3igNeLRX1]3jgNiLRX1X3`gNXM]X1o2UgNZMiX1Y5N1O2N1O1O2N2VhNkHTV1V7jiNmHSV1V7jiNlHUV1U7jiNlHUV1V7hiNmHVV1T7hiNnHWV1S7hiNnHWV1S7giNoHXV1R7giNoHXV1P7jiNlHYV1S7jiNiHXV1V7kiNdHYV1[7_1O100O100O10000O10000O1000000O10000000000000001N1000000000001O000O11O0010O01O001O1O010O1O1O1O10O01O1O1O001O0010O01O1O1O2N1O5K4L4L5K3M1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O001O1O001O1O00000000000000O010eMnhNWKRW1f4XiNSKhV1i4_iNTKaV1j4ciNTK]V1k4fiNRK[V1n4giNPKYV1o4iiNPKWV1o4miNnJSV1R5QjNjJoU1U5UjNgJlU1Y5VjNeJjU1[5XjNbJhU1_5YjN_JhU1a5ZjN\\\\JgU1d5[jNYJfU1g5\\\\jNVJeU1j5a20000O1000O10000000000000000O100000000O100000001O0O1000000000000000000000000000001O00000000000000000000000O100000000000O100000000000000000000000000O1000000000000000000000000O100000000O10000O10000O1O100O100O10000O1000000000ZgNoIlV1Q6TiNQJjV1o5UiNTJiV1l5ViNVJiV1j5ViNWJjV1i5UiNYJjV1g5UiN[JjV1e5TiN]JlV1c5SiN^JmV1b5RiN`JmV1`5RiNaJnV1`5PiNaJPW1_5ohNbJQW1^5ohNcJPW1]5ohNdJQW1\\\\5ohNdJQW1\\\\5nhNeJRW1[5ohNcJRW1^5mhNaJTW1_5lhN_JVW1b5jhN\\\\JWW1e5ihNYJXW1g5hhNWJZW1j5jhNoIXW1R6c11O1O1O1O1O1O1O006K5J1O2N1O1O1O2N1O2N1O001O1O0010O01O001O00001O1O1O1O3M3N3L3M4L3M2N1O1O1O1O10O01O0010O00001O0010O0001O01O0001aNlgN`JSX1\\\\5RhNcJnW1Z5VhNeJjW1Z5XhNfJgW1Y5[hNfJfW1X5\\\\hNgJdW1X5^hNfJcW1Y5_hNeJbW1Z5ahNbJaW1]5bhN`J`W1]5dhN_J^W1`5ehN\\\\J]W1c5fhNYJ\\\\W1f5ghNPJaW1P6a1N100O101N100O101O0O2O0O2O1O1N2N3N1N3M2O2M2O0O2O1O0O2O000O2O001O001O1O1O1O2N2N1O1O2N1O1O0O2O1O010O10O010000O1010O5L3M1O000_dN\\\\KR[1e4fdNcKY[1l4O1O0ReNjJ`Z1W5ZeNnJeZ1`5000001O010O1035J9E1N100O1O00O01000O01O001O001O1O1O1O2N2N3M1O2N1O2N001O1O001O0O2O0O2O1O1N2O1N2O1N2O0O2_NdcNkN]\\\\1n0lcNnNU\\\\1n0RdNoNo[1n0VdNnNk[1P1XdNoNh[1o0\\\\dNmNg[1o0]dNlNg[1R1]dNhNh[1U1\\\\dNdNi[1Z1[dN_Nk[1]1g1B=L5K5XOW[[W2\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [962.0658569335938, 505.0049743652344, 732.9113159179688, 899.2008056640625], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"a]]_13]`14K5L4K6J5K6J7H8I4K4L5K4K5L4M3L4M3M3M3M3N1N3N2N1O2N1O2M2O2N1O2M2O2L3L5K4L5K5J6SMYLihNl3kV1hLehNa3XW1jLngNh3PX1^2N2N2N2N3M2N2O1N2N2O1N001O1O001O001O00001N10001O001N10001O0O2M2O2M2O2M2O2O1O0O2O1O001N2O001N2O1N101O1O1O010O1O10O01O100O010O100O1O10O0100O100O100O100O10O0100O100O100O100O1000O0100O1000000O1000000O1000000O1000000O10000O1000000O10000001O001O0010O0001O001O00001O0000001O0000001O0000001O00O101O00000000001O00000000001O000O10001O000000001O001O00100O1O1O1O2O0O2N2N3M2N3N1N3M2N2N2N2N2O1N2N1O2N2N1O1O2N2N2N2N2N3M3M3M3M5K8H=C=C6J5K5K4L4L4L3M2N1O2N100O1O1O2N1O2N1O2N2N2N2N3M2N3N4K4L5K4L3M3M3M3M3M3M3M3M3M3M3M4L3M4L;Ek0UO4M3L5K5K5K6J3M3M2N2N2N2N1oXOS\\\\O\\\\a0oc0`^OU\\\\O^a0lc0`^OV\\\\O_a0lc0]^OW\\\\Oba0jc0\\\\^OX\\\\Oca0ic0[^OY\\\\Oda0hc0Z^OZ\\\\Oea0gc0Y^O[\\\\Ofa0gc0V^O[\\\\Oja0fc0T^O[\\\\Ola0gc0Q^O[\\\\Ona0fc0o]O\\\\\\\\OQb0fc0l]O\\\\\\\\OSb0fc0i]O\\\\\\\\OWb0gc0d]O[\\\\O\\\\b0hc0_]O[\\\\O`b0gc0[]O\\\\\\\\Oeb0gc0U]O\\\\\\\\Okb0gc0o\\\\O]\\\\OPc0fc0i\\\\O^\\\\OWc0ec0b\\\\O_\\\\O^c0dc0\\\\\\\\O_\\\\Odc0dc0V\\\\O`\\\\Oic0cc0Q\\\\O`\\\\Ooc0cc0k[O`\\\\OUd0ec0d[O]\\\\O\\\\d0hc0\\\\[O\\\\\\\\Ocd0hc0W[OZ\\\\Oid0jc0Q[OX\\\\Ood0kc0lZOW\\\\OTe0lc0fZOX\\\\OZe0jc0WZOb\\\\Oie0ac0iYOj\\\\OWf0Wc0cYOn\\\\O]f0cf000000000000000000000O10000000000000000000000000000000O10000O1000000`LdZOj[O]e0Td0jZOg[OVe0Wd0P[Oe[OPe0Yd0W[Oa[Ojd0^d0\\\\[O][Odd0ad0c[OZ[O]d0ed0h[OV[OYd0hd0m[OT[OSd0U`0dYOQC]2fLPd0V`0iYOPC[2gLlc0V`0oYOoBX2iLic0U`0UZOnBV2iLfc0V`0YZOnBT2jLcc0V`0^ZOlBR2kLac0W`0aZOkBP2mL_c0V`0eZOkBn1lL^c0X`0fZOjBn1mL\\\\c0W`0jZOiBm1mLZc0X`0mZOhBk1nLYc0X`0P[OgBj1nLWc0Y`0S[OfBh1oLVc0Y`0W[OcBg1PMTc0[`0Z[OaBdh0^=aWO]B`h0a=fWOYB\\\\h0f=jWOPB[h0n=lWOgAZh0X>mWO\\\\AYh0c>UXOi@Rh0V?cXOo_Odg0P`0_4O1O00100O1O100O1O100O100O11O0000001O000000001O000000001N10000000001O00000000001O1O0O2O1O1O1O1O1O1N3N1O2N2N2N2oXO]_Oo=f`0Q]O\\\\_Oe08Xb0^`0o\\\\Of_O84hb0W`0m\\\\OP@K2Vc0P`0k\\\\OW@B0bc0j?i\\\\O[@_OOgc0g?g\\\\O_@]OMjc0f?g\\\\O`@[OMmc0d?f\\\\Ob@XONQd0a?d\\\\Oe@XOLSd0`?c\\\\Og@VOKWd0_?a\\\\Oi@TOKZd0\\\\?a\\\\Ol@QOJ]d0\\\\?_\\\\On@POHad0[?]\\\\OPAnNGed0Z?Z\\\\OSAlNFjd0Y?W\\\\OUAiNEPe0W?T\\\\OYAfNDUe0T?R\\\\OkDnc0W;m[OlDSd0V;e[OPEZd0S;[[OVEed0l:oZO^EQe0Qc001O00000O100000001O0O1000000000000000000000000000000000000000000000000000000001O0O2O0O2O0O2O0O2N1O2O0O2dM_YOeZObf0Qc0b\\\\OT\\\\O`c0Qb0f^Ob]O\\\\a0oa0Z_Oh]Og`0ma0j_Ok]OY`0Qb0P@h]OQ`0Vb0V@c]Om?Zb0V7N3N1O2N2N1O2N1O1O2M2O1O2M2O1N3M2M3N3I6J6J7J5J6L4N3M2N2O2M2O1O2M2O3MR1nN2N3L3N2M4L3M4L3L5F9_Ob0WOh0[Of0K5J5M4K5L3M4L4M3L3N3M2M3N3M2N2M4L4L3L5K5L4J6K5J7I7I7J6K5K5L4K5L5K4L4L5K3M2N2N2O1N3M2N3M3L4M3L4M4K5J6H9E=D<E:E:G:F8H9G;F9G8I7H8D<A?AXf^b0\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [826.242919921875, 505.2323303222656, 371.7904052734375, 260.0967102050781], \"score\": 0.9999997615814209, \"association_id\": 0, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"\\\\\\\\UW1c0[[1I]hNW1aV1DSiNe0dV1GPiN>kV1HohN<nV1HnhN;nV1JjhN=SW1FghN>WW1FfhN:ZW1IchN6]W1N`hN2`W11]hNNcW17YhNHiW1=QhNBPX1b0lgN]OVX1f0fgNZO[X1h0bfNnL:0FZ2^Y1W1ffNVO[Y1m0_fNUObY1l31O001M3N2L4N2N2M3N1M4M3M300O100000000000000O100000000001O001O2N3M4L5K5K:F3hfNbIWX1_6egNhIWX1Z6egNiIZX1X6cgNjI]X1W6`gNlI_X1S70000000001O0000000000000000000000O1000000O10000YOcgNYI^X1b6igN\\\\IWX1a6mgN]ITX1a6PhNZISX1d6QhNWIRX1e6o0M3M3M3N2N2O1O100O1O1O1O1O1N2O1O1N2O1O1O100O100O0100000O100000000O100000000O10000000000O1000000O10000000000000000000000000000001O000000001O000000001O000000000001O00000001O00000000000000001O0000000O1000001O00O101O00000000000O1000000000000000000O10O10000000000000O100000001O000000000000000000000000000001O0000000000000000O1000000O100O100O1O1N2N2N2L4K5K5K5I8J5M3O1N2N2O1O2N1O1N3N1N2N3N1N3M2O1O2N1O1O2N1O1O1O2N1O1N3M2N2N3N1N2O2N2M2O2M3M3M2O2N2N2O0O2N100O2N1O2N1O2N1O2N101N2O001N2O1O1O1N2O1O1N2O1O1O1N2O1N2O2M4L3M3M6J5Kgn\\\\Y1\"}}, {\"image_id\": 113, \"category_id\": 2, \"bbox\": [1727.0523681640625, 720.8770141601562, 306.5916748046875, 83.828857421875], \"score\": 0.9999990463256836, \"association_id\": 4, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"Smka2;Q`18K3N2N101N100O2N1O2O0O100O2O0O100O1O2O0N2O1N2O1N2O1O2O000O1000000O2O000000000O2O0000000000001O0000000000000000001O0000000000000000010O0000000000000000000001O00000000000000001O00000000000000000000001O0000000000O1000000000000O1000000000000000000O10000000000000000O100000000000001O01O000000000000000010O000000010O000001O0001O01O000000010O00000010O0001O01O000001O01O000001O01O0001O01O0000000010O000001O0001O00000000000001O0000000000000000000O1000000000001O0O1000000000000000000O100000O1000000000000000001O0O10000000001O000O2O1O00001N10001O001O1O1O001O1O001O001O00001O1O2N1O1O001O4L1O001O1O2M2O1O1O1O1O001N3N2L4Fo_NM\\\\Pi0\"}}, {\"image_id\": 113, \"category_id\": 1, \"bbox\": [1496.1806640625, 469.3450622558594, 259.5400390625, 249.44619750976562], \"score\": 0.9997084736824036, \"association_id\": 4, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"`ZmV2\\\\1g^1g0D6K4M3M2O1N2N101N2O0O2O001N2O1N2O001N2O1O1N101N2O1N2O1N2O1N101O2N2N1O2O0O1O1O1O1O1O001N2O001O1O001O002N1O2N1O1O1O1O001O001O001O001O001O00100O1O2N101N1O100O1O00001O0000001O00001O2N1O2N1O2N1O2N1O1O1O1O1O1O1O001O1O1O1O1O1O1O1O1O1O1O1O000010O000000000000000001O0000000000000000000001O000000000000000000000000010O01O001O001O100O0O2O00001O001O00001O00100O1O1O1O1O1O001O1O1O2N5K8G8I3M2O1N1O100O001O01O010O1O1O3M2N3M2N2N1O1O2N2N3M4L3M1O1O1O1O1O1O1O6J5K2N2N2N1O1O2N2N3M9G3M2N1O1O1O2N7I7H>C3M3M2N2N1O1O2N2N2N2N2M2O1O1O2M4L4XOZ1eNfWj=\"}}, {\"image_id\": 113, \"category_id\": 2, \"bbox\": [630.625244140625, 657.126220703125, 153.1202392578125, 23.79559326171875], \"score\": 0.9679056406021118, \"association_id\": 2, \"segmentation\": {\"size\": [1553, 2048], \"counts\": \"[gVn01``11N2N10O0100O1O1O1O1O2N100O10000O100001O00001O0000000001O0001O0000001O0000001O0000001O000000000000001OO1000000000000000O10001O0000001O001O002M1O_o[11aPdN1N10001O0O101O000000001N100000000000001O000O100000000001N1000001O0O2O0O101O1NVmSl1\"}}, {\"image_id\": 114, \"category_id\": 1, \"bbox\": [245.34320068359375, 325.2181396484375, 474.3475341796875, 339.47705078125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [929, 1600], \"counts\": \"d`c7>bl02N3M2N3M3L4M3L5K5G:EP1VO5K5K4L5L2M3N2M3N1N2O1N2O2M2N3M4L3M3M3M2N2O1N2O1O0O2O1O1O1O001O1O1N101O1O001O0O2O001O1N101N2N2O1N2N2N2O1N2O1O1N2O1N2O001O1O1O1O1O1O2N2N6JS1mN;D6K5K1O1O1O1O001O1O001O1n\\\\OeG\\\\a0\\\\8_^OiG`a0X8Z^OmGfa0U8S^OQHka0S8m]ORHSb0S8a]OUH^b0W901O00001O00000O10000000000000WN^]OSIab0X6_]OaH7T1Zb0Z6Z^OeIfa0Z6]^OdIca0[6_^OdIaa0[6a^OdI_a0[6c^OdI\\\\a0\\\\6e^OdI[a0\\\\6f^OcIZa0\\\\6i^ObIWa0]6k^ObIUa0^6l^OaITa0^6o^O`IQa0_6Q_O`Io`0_6T_O_Il`0`6W_O]Ij`0b6X_O]Ih`0c6Z_O[If`0d6\\\\_O[Id`0e6]_OZIc`0e6__OZI``0g6a_OWI``0h6a_OXI_`0h6b_OWI^`0i6b_OWI_`0h6b_OVI_`0k6a_OTI_`0l6b_ORI``0n6`_OQI``0P7`_OnHa`0R7g2100O2N1O2N100O2N1O1O1O001O1O100O2N2N2N4L7I4L1O1O1O1O1O00100O1O1O1Oj0VO1OYMS]O[Kmb0b4[]OYKdb0d4d]OXK[b0f4h]OYKXb0f4k]OYKTb0e4o]OZKQb0e4Q^O[Koa0c4R^O]Kna0b4T^O]Kla0c4U^O]Kka0a4W^O^Kia0b4X^O]Kia0b4X^O]Kia0a4Y^O^Kha0a4Z^O]Kga0b4Z^O]Kfa0c4[^O\\\\Kfa0b4[^O^Kea0b4\\\\^O]Kea0b4\\\\^O]Kda0c4\\\\^O]Kea0b4\\\\^O]Kda0b4^^O]Kba0c4^^O]Kca0b4^^O]Kba0c4^^O]Kba0c4^^O\\\\Kca0d4^^O[Kba0e4_^OZKaa0f4_^OYKba0h4^^OWKba0i4_^OUKba0k4`^OSKaa0l4`^ORKaa0o4_^OPKaa0P5a^OmJ`a0S5a^OlJ_a0T5b^OjJ_a0W5a^OhJ_a0X5b^OfJ_a0Z5b^OdJ_a0]5a^OaJ`a0_5a^O`J_a0a5a^O]Jaa0b5`^O\\\\Jaa0e5_^OYJba0h5_^OUJca0j5^^OTJda0l5\\\\^ORJea0n5]^OnIfa0R6[^OjIha0U6c200O1O10O01O0010O00000010O01O0000100O001O100O001O10O001O100O1O1O001O10O01O001O000010O0001O1O001N2O3M3M3M2N2N1O1O1O001O001O1O001O1O1O2N2N3M3L3N2N1O1O1O1O001N101N2O1N2O2M3N1N2N2O1N101N2O000O100O101N1O100O1O1O1O2N1O1O1O100O2O000O10000O2O0O100O101N1O101N1O1O100O1O100O1O100O1O1O100O1O1O1O1O100O2O000O100O2O00000O2O00000O10001O001N101O001O1O1N101O1O1O1O1N2O1O001O1O1O001O2M2O2N2N2N1O1O2N1O1O001O1O1N2O1O1O1O1O1N2O2N1O1N2O1N2O1O1N3L3M4KU]Ri0\"}}, {\"image_id\": 114, \"category_id\": 1, \"bbox\": [677.0433959960938, 246.6441650390625, 814.1001586914062, 418.54150390625], \"score\": 0.999998927116394, \"association_id\": 2, \"segmentation\": {\"size\": [929, 1600], \"counts\": \"mZjd0R1fj0]1XOg0H8I7J5L4M3M3M3N1N3M2O2M3M3N1N2O2M2N3N1N3M2N2N2M4M2N2N2M3N3M2M3N2N7I8H4L101N2O1N101N2O000O101N10000O100O100O2N100O1O1O1O100O1O100O2O0O100O10000O10O1O010O0010O01O001O01O@`001O00000000001O001O010O1O00100O001O10O01O001O1O00010O00000000001O0001O010O010O10O01000O01000O10O1000O0100O10O0100O1O001O100O00100O1O01000O010O1000O10O1000000O0100000O10000O10O010000O100O1O100O010O100O10000O1000000O1000Oj^OPI]<P7bCRI]<n6cCTIZ<m6eCUIZ<k6fCVIY<j6fCXIY<h6gCZIW<f6hC\\\\IW<d6iC]IU<d6jC^IU<b6kC_IT<a6kC`IU<`6kCaIT<_6kCbIU<^6kCbIT<_6kCbIU<^6kCcIT<]6kCdIU<\\\\6kCdIU<\\\\6jCeIV<[6jCeIV<[6iCgIU<Z6kCfIU<Z6kCfIU<Z6jCgIV<Y6jCgIV<X6jCiIV<W6jCiIV<W6iCjIW<V6iCkIU<V6jCkIV<U6iClIW<T6hCmIX<S6hCmIX<S6gCnIY<R6fCoIZ<Q6dCQJ[<P6dCQJ\\\\<o5cCSJ\\\\<m5cCTJ]<l5bCUJ^<k5`CWJ`<i5_CXJ`<i5^CYJb<g5]CZJc<e5]C\\\\Jc<d5\\\\C]Jd<c5[C^Jd<c5[C^Je<b5ZC_Jf<a5YCaJf<_5YCbJf<_5YCbJg<^5XCcJh<^5VCcJi<^5VCcJj<]5UCdJj<]5UCdJk<]5RCeJm<\\\\5QCfJo<Z5oBhJQ=Y5lBiJS=X5kBjJU=V5jBkJV=U5hBmJX=T5eBnJZ=S5dBoJ\\\\=R5aBPK_=P5_BRKa=o4[BTKd=n4YBTKg=l4VBWKj=j4SBXKm=i4QBXKo=h4oAYKQ>i4mAXKS>i4kAXKU>h4jAYKV>h4hAYKX>h4fAYKZ>g4eAZK[>g4cAZK]>g4aAZK_>f4aAZK_>g4_AZK`>g4`AYK`>h4^AYKb>h4]AXKc>h4\\\\AYKd>h4ZAYKf>h4XAYKh>g4WAZKi>g4UAZKk>g4SAZKm>f4RA[Kn>f4PAZKQ?f4n@[KR?f4l@[KT?f4j@[KV?e4i@\\\\KW?e4g@\\\\KY?d4f@]KZ?d4d@]K\\\\?c4c@^K]?b4b@_K^?b4`@_K`?a4_@`Ka?a4]@_Kd?a4[@`Ke?a4Y@`Kg?`4X@aKh?_4X@aKh?^4Z@aKf?_4Z@`Kg?`4Z@_Kf?a4[@^Ke?a4\\\\@_Kd?a4]@]Kd?c4\\\\@]Kd?c4]@\\\\Kc?c4^@\\\\Kc?d4^@[Kb?e4^@ZKc?f4^@YKb?f4_@YKb?g4_@XKa?h4_@WKb?i4_@VKa?i4`@VKa?j4_@VKa?j4`@TKa?l4_@TKa?l4_@SKb?l4`@SK`?m4`@YJWORNZ`0e7_@UJ\\\\OUNU`0f7`@PJ@XNQ`0g7`@mID[Nl?h7`@kIG[Nj?j7`@hII]Ng?j7b@fIJ^Ne?k7c@eIJ_Nc?k7e@bILaN`?l7f@`INaN]?m7i@^IMcN[?n7j@\\\\IOcNX?o7m@YId`0f6__OVId`0g6a_OSIb`0l6b_OnHa`0Q7c_OiH``0U7e_O]Hf`0b7]2N200O100O100O100O10000O10000O2O000O10000O10000O100O100O101N100O100O1O100O100O101O0O100O1000001N10001O000O2O001O000O2O00001O0O10001O0O10001O000O1000001N10000O100O100O100O1O100O100O10000O100O1000000O2O00000O1000000O100O100O100O100O1O1O100N2O100O1O1O101N100O100O10000O10000O100O100O1O2O0O1O1O1O1O100O1O101N1O100O10000O100O2O000O101N101N101N1O2O0O2N1O2N101N100O2N101N100O101O0O100O2O000O100O101N1O100O1O2O0O1O101N1O101N100O101N101N100O2O0O2O001N101N1O101N100O2O0O101N100O2O0O10001N100O10001N1000000O2O00000O10000O10000O10000O100O100O100O100O100O100O10000O10000O10000O1000000O10000O10000O10000O10000O10000O10000O1000000O10001O000000000000000000000000000000000000000000000000001N10001O000O2O001O001O1N101O1O001O0O101O001O000O2O00001O00000O2O00000O2O00001N10001O0O101O0O2O001N3N2M3N1O2M3M2K5J7JfX_3\"}}, {\"image_id\": 114, \"category_id\": 2, \"bbox\": [246.6239471435547, 631.8240966796875, 355.48468017578125, 60.4345703125], \"score\": 0.9999903440475464, \"association_id\": 1, \"segmentation\": {\"size\": [929, 1600], \"counts\": \"cdQ79fl03O1N101O000O101N100O101N10000000000O100000001N100000000O1000000000000000000O100000000000O1000000000O100000000000000O10O100000000O1000O100000O1000000O100000000O10000000000000O1000000000000000000001O01O0001O00001O00001O001O2N1O3M1O1O1O1O001O1N2OWZj0KmeUO3N1N100O100O2O0O1O1O1YOK`TO7`k0NYTO4gk0MSTO8lk0?00000000000000000000000000001O00000000O100000000000000001O00000O2O00001O00001O00000O10000O100000000O100000000O1000O0100O0010O10O100O010O10O10O10O1O1O0O2O1O1000000O10000O100O10O010000000000001O0000001O0000003SOmSOf0Sl0ZOnSOe0Rl0[OnSOe0Sl0ZOnSOe0Rl0ZOPTOe0Pl0[OQTOd0Wl0O1O1O000000000000000O10O10000O100000000O100O100O100O1000O100000O1001O0000000000000000000000000000001O000O10001O00000000001O0000001O001O00001O0O101N1O3MheYl0\"}}, {\"image_id\": 114, \"category_id\": 2, \"bbox\": [682.3458251953125, 631.2145385742188, 689.0045166015625, 59.509765625], \"score\": 0.9999624490737915, \"association_id\": 2, \"segmentation\": {\"size\": [929, 1600], \"counts\": \"g]dc03nl00O2O00001O0000001O0O101O00001O000O2O000000000000001O0O100000000000001O00001O00001O0000001O000000000O110O000000001O00001O0010O01O001O1O00001O000001O00000001O00000000010O000000001O0001O01O000000000001O01O000000000000010O00001O00010O0000001O000001O0001O00000000001O01O0001O00001O001O0000001O000001O000000000000000001O00000O10000000001O000000000000000000000000000000000000000O1000000000000O100000000O11O000000000000000[OUTO4kk0KWTO4jk0IYTO6gk0G]TO8ck0H^TO7bk0H`TO7`k0I`TO6bk0GaTO8Tl00000000001O00000000000000001O00000000001O01O0000000000000001O000000000O1000000O2O0000000O10000000000O1000000000001N10jZn21TeQM2L4O0O2M2O101O0O10000O2O000000000000000000001O000000000000001O000000000001O0001O0000000000000000000000001O0000000000000000001O0000000001O00000001O0000000000000000001O0001O0000000000000000000000000000000001O000000000000000000000000000000000000000000000001O0000000O1000000000000000000000000000000000000O10000000000000000O1000000000000000000O1000000000O10O10000000000000000000000000000O100000000000O1000000000000000000000O10000000000000000000000000001O000000000000000000000000000000000000001O0000000000001O000000001O000000000O101O0000000000001O0000001O001N10001O001N101O1Ojik6\"}}, {\"image_id\": 114, \"category_id\": 2, \"bbox\": [885.7000732421875, 642.48291015625, 484.4842529296875, 40.34356689453125], \"score\": 0.9475799798965454, \"association_id\": 0, \"segmentation\": {\"size\": [929, 1600], \"counts\": \"e[Ui07gl04CGPTO:hk00RTO4lk0MQTOE0>ok0a0O101O00000000[OSTOD0?mk0MSTOD0?mk0MSTOC1`0lk0MVTO3jk0JSTOG4>jk0JRTOH5=ik0KRTOG7=gk0L[TO4ek0IRTOK9<ek0IRTOK9<ek0IRTOK9<ek0ISTOJ8=ek0I_TO6ak0J_TO6ak0J_TO6ak0J_TO6ak0J_TO6ak0J_TO6ak0J_TO6ak0J_TO6ak0J_TO6bk0I^TO7bk0I^TO7bk0I^TO7bk0I^TO7bk0IRTOK9<ek0IRTOK9<ek0IRTOK9<ek0IRTOK9<ek0IRTOK:;dk0J_TO6ak0J_TO6bk0FQTOO>9bk0HaTO8_k0HaTO8Ul0O001O001O001N2O1O1O0Oeme31WRZL5M4M1O1O1000001O00000O10000000000000000O2O0000000000000000000000001O01O000000000001O00000000000000001O00000000000000001O00000000001O00000000001O0000000000000000001O000000000000000001O01O000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000000O1000000000001O000000O1000000000000000000000000000O100000000000000000000O1000000000000O100000O10000000O1000000000000O10000000000000000O10000000000000000000000000000000000000000000000000000000000000001O0000000000001O00000000000000001O000000001O000010O000001O000000001N101O001N2O1O1N1O2Oiik6\"}}, {\"image_id\": 115, \"category_id\": 1, \"bbox\": [18.78205680847168, 91.70921325683594, 332.5877380371094, 483.83453369140625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [799, 533], \"counts\": \"\\\\Qf06[h0h0_O>C8I7K5L4L3T[OYN\\\\b0j1^]O_N\\\\b0d1`]ObN[b0b1`]OeNZb0_1`]OjNYb0_1\\\\]OjN]b0R4J8H4L3M2N3M2O1O1N2O2N2N2N2M3N1O1O1O1N20O01O2N1O2O1N2N2N1O1O1O1O001O1O1O1O2N2N2N1O2M2O1O1O1N2O1O1N2O1N3N2M2O2M2N2M3N3M2O2M2N3N2M4M3L4M2N2N3M4L6I8I6J3M3M2N1O2M2O1O1O2M4RDXE_9k:PFjEi9Z:oEkEo9Z:iEkEV:^:]EgEb:\\\\<N2O1N1O100O1O1O100O1O1O2O0O100O0100oLSFPGm9j8lFcFS9W9WGfFi8T9_GjFa8Q9fGlFZ8Q9kGnFU8n8QHPGo7j8XHUGh7g8]HWGd7i8^HUGa7k8bHRG_7n8dHnF]7Q9hHjFY7V9kHeFV7[9oH\\\\FU7d9R4100000O1001N100000000000000O10000000001O00000000000O10000010O000001O000000001O00010O2N1O1O2N7J:Eb0^O:F8H2N2N3N2M3M;UDnCS:a=N1O1O1O100O00000010O00001O00001O00001O2N1O2N1O1O1O1O1O001O001O000000001O0000001O00001O1O2N1O2N2N2N1O1O1O1O001O1O1O1O1O2N1O2N2N2N1O1O1O1O1O2N2N2N3M2N1O1lKmCZKT<b4XDUKi;h4\\\\DUKe;g4cDSK`;g4iDTKX;f4REUKP;^4_E^Kc:V4jEgKY:m3TFnKo9k3YFRLi9i3]FTLd9i3`FULa9h3cFUL_9h3eFVL\\\\9h3fFVL\\\\9g3hFVLZ9f3lFVLW9e3oFVLU9d3UGPLS9j3ZGfKP9R4S6K5K5M2M3N2N2N3L4L7I8E;F7H8I6K5L3M4M2M4K7I:@nSg4\"}}, {\"image_id\": 115, \"category_id\": 2, \"bbox\": [170.22518920898438, 424.1558837890625, 315.79376220703125, 154.3421630859375], \"score\": 0.9999973773956299, \"association_id\": 0, \"segmentation\": {\"size\": [799, 533], \"counts\": \"llU4c0[h05K3N1N3N2M4M2M3M2O3L3N2M2O2M3N1O1O1O001O1TYORNef0U2O1O001O0000000001O0000001N1000001N1O2H9L5Go0hNlWOMTbm1:TVSN<E5L3N2M3N1O2O1M4M2N6J;E4M2M3N1N101O1O1O1O4Lb0^O5K2N1O001O00O10000O10000O10000O10000000O10O1000000000000O10001N100O1O100O100000000O1000000O10000O100O10000O10001N100000000O1000000O10000O1O10000O100000O10000000000000000000000000001O000000001O001O001O00001O0000000010O000000010O01O000010O001O001O1O01O01O00001O0O2O001N2O1N2O1N101O1N101O1O1N3N2N2M2O1O1N101N101N2N2O1N2N101N1O2O0O2O1N3M5HgVT1\"}}, {\"image_id\": 115, \"category_id\": 2, \"bbox\": [350.2601623535156, 345.45281982421875, 82.83270263671875, 29.99554443359375], \"score\": 0.9999005794525146, \"association_id\": 1, \"segmentation\": {\"size\": [799, 533], \"counts\": \"U^a87gh03M1O101N100O10000O101O0O10001O0O100O10001O00001O001O001O0001O01O01O00001O0000001N1000001O001O00001O0000001O001O0000001O10O01O001O00001O00001O1N2O1O2M3MW[b2\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [66.2243423461914, 144.08018493652344, 160.8203125, 394.4599609375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [608, 738], \"counts\": \"TQ[1j0Sb07J5L2O0O2O1O0O2N2M4K5L4L4L3O0O2N100O2N1M3L4J6L4M3N2M3M4K4M3N3M3L5J7J5K5M2N2M3L5K4L5M4M3L4M5K4K4M2N3M8Hk0UO5K3M4L5K8H4K3N2N1YGRHZ6Q8]IWHa6o7RIZHl6j7nHYHQ7j7iHZHV7h7gHYHY7k7`HXH_7o7XHTHh7b9O1aNlERJT:l5PFQJQ:n5RFPJn9o5VFmIk9R6ZFiIg9W6fF\\\\IZ9c6b1000000000000000000000000000000000000000000000000000O1iNaImE_6j9nIRFR6l9RJQFo5l9XJnEj5n9dJgE]5S:k1L4N2O1N2L4L4M3N3]GTGn6n8oHWGm6l8PIWGh6P9UITGc6T9ZIoFa6V9\\\\IlFb6X9RISGj6d:K5_O`0I8J7K5J6H8G8K5L4L5J6G:D;kN_A_Mk>U2W1G:I8H>@?@?Colc9\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [478.2345275878906, 79.07388305664062, 186.69772338867188, 421.9306945800781], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [608, 738], \"counts\": \"lRY99ab0c0BLo]OFea0:_^OD`a0;a^OFd0Dm?g0__OG?LKOU?=b@I:m0P?ZOf@K6Q1o>UOk@L2S1P?SOk@M3R1o>0o@1n>4o@Mo>T2M2M3O10O1O1O1O1fMTA>n>@WA<g>GXA<d>G[A<a>F]A>_>C`A?^>C`A`0]>T2M3M5K8G<Db0^O7I5K5L6J[1eNc0]O;D9H5J7H9C`0B:H5M3M1010O0000000000000000000000O1O1O100O1O1fNkDhJV;l4XERKg:m4]EPKc:P5`EmJa:R5bElJ]:T5fEiJZ:W5iEfJg8OSG\\\\57cJg82PG\\\\5:`Jk80lF_5a0WJj84fFe5P;VJREj5Q;PJREo5P<0001O1O001O001O000000001O0001O00001O001O001O2M3N3L3N2M4QNoIhFU6j8[JoFo5]8jJlFg5b8e2lNT1H6K6J6J6I6J5K6K4L4K4M4XOUCTKQ=`4n0H8K5N3000O1O2N1O002N2N2O1O01O0O1N101N2nKcA^3]>[LkAc3m>L4L6J6J7I6J4L3M4L9F9G8H:E9G9D=BPX\\\\1\"}}, {\"image_id\": 116, \"category_id\": 2, \"bbox\": [303.7420349121094, 514.3583984375, 163.66473388671875, 28.832763671875], \"score\": 0.9999918937683105, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"Tce51nb02O000000001O01O00000001O0001O0001O0000000000010O0000000000000000000000001O000000000000001O00001O000O10001O01O01\\\\]OH^b0=01O000010O0000001O0000001O00000010O0000000001O0000000000001O000000001O000O10001O000000001O000000010O00000000000001O01O00000001O0001O000001O000001O01O000001O01O000000010O0001O001O0000001O001O000O2NWnS5\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [351.938232421875, 99.67792510986328, 170.1822509765625, 354.492919921875], \"score\": 0.9999910593032837, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"Pal64jb04M20O10O011O4L03KkY25SfMD^i30jVL6_MLmA8i=1TB3e=2ZB0c=2[B0b=2^BN`=5^BM^=7aBJ[=:cBG_7JeKk0kL\\\\O^7KcKo0jLXOb7KaKR1hLVOe7IiJ@aLe1l0UOi7HeJDbLb1l0UOl7FfI[OkMa0A\\\\1o0UOl7FdIAgMg2e0TNP8DcIU3[NYMQ8BcIX3XNYMT8_OcI\\\\3PN\\\\M[8YOdIf3`MZMj8POPHB9W6e7WJPHF8V6g7SJQHH5W6j7QJQHH4X6l7PJoGH3Y6o7oInGI1Y6R8mInGKLZ6W8jImGNIY6[8hIlGOHY6^8eIlG0HY6a8bIiG3HZ6i8VIcG?EY6R:dIoE\\\\6R:cIlE_6T:bIZFn5h9SJXFj5h9YJWFP4QOkLj:WOWFi3WOcLe:J[FX3WOeL[:;^Fm2[OcLU:e0`Fg2]OaLP:m0QFT3OnKn9P1PFV31jKn9R1nEX33fKl9T1PFX34cKk9V1lE]39]Ki9X1kE^3<ZKh9k6WFTIi9m6WFSIh9o6WFQIi9P7WFoHh9S7WFlHi9W7TFjHl9Y1dEX4<`JP:U1kE\\\\40`JT:P1SFb4C_JZ:k0YFW5g9fJ_FW5a9gJbFX5]9hJdFY5Z9fJiFY5U9hJlFX5S9hJnFY5o8iJQG]3mNSMQ:@TGZ3POTMj9CVGY3POTMj9CVGX3QOVMg9DWGV3ROVMf9EXGT3SOXMd9DYGS3TOZMa9D[GQ3UO\\\\M`9BZGR3YO]M[9kNaFEj0S4]O]MX9fNgFGc0V4A^MX9]NgFN2d42RMW9UNhF4Ne47RMi9XNnEg4;RMf9TNQFj49SM[:n2dETMZ:\\\\NPEn3e0iMX:UNXEQ4`0lMV:QN^ER4;PNQ:oMhEP45TNm:k1UESNl:m1UERNk:m1WEQNk:n1XEnMl:n1WEoMl:n1VEQNg:Q2[ElMe:U2]EiMb:Y2aEcMh8aN\\\\Gl30_Mk7bNVG:LG=o3h0[Mg7mNPG7OFa0k3k0YMf7mNQG63C>o3k0WMh7jNSG8n0f3;QM_8RO[Gl3d0ZLU:d3e2N2O1N2M3K5WMc@O3I3;\\\\?[ORA;h0JX?2m1O1O1N2Oj]OL`a04_^ON`a02_^O0ba0M_^O3Yb0M3LkRW4\"}}, {\"image_id\": 116, \"category_id\": 2, \"bbox\": [20.203659057617188, 494.6642150878906, 267.903564453125, 46.243377685546875], \"score\": 0.9999889135360718, \"association_id\": 1, \"segmentation\": {\"size\": [608, 738], \"counts\": \"kn<5jb02N10001O0000001O00000O2O01O00000001O000000001O00000000001O0O1000000O101O0O1O10000O2O0000000O1000000O2O00000000000000001O01O00000000001O0000000O10001O001O000O101O0000000000000001O00000000001O0000001O000000000000001O001OO1O1L4M3K6M2O3M10jU41UjK1O000010O0001O1O000001O000010O0001O0000000O2O00mN5R_OJl`0<P_ODPa0=h0101N1000001O010O00001O000000000000O100000001O0000000000001O00000000001O000000001O00000001O0001O000000001O0001O001O001O00001O00001O1O2N1O10O000001O1O1O3M5K1O00000001O1O=C8I2M1O2N001N101O00001N10VUd8\"}}, {\"image_id\": 116, \"category_id\": 2, \"bbox\": [396.8648986816406, 464.97216796875, 171.78756713867188, 62.43170166015625], \"score\": 0.9999862909317017, \"association_id\": 2, \"segmentation\": {\"size\": [608, 738], \"counts\": \"kYc72lb04M2O00001O1O1O00001O010O0000010O01O1O0000001O00001O00000000000O100000HJe]O8Xb0Kf]O6Yb09O1000001N100000001O002N1O001O00001O1O00001O001O00000010O010O001O00100O1O001O010O0010O0000010O0010O01O001O001O1O10O0001O100O1O00001O1O10O01O0001O01O00010O001O0010O0000010O00000001O00010O0001O1Oe0[O2N010O1O1O2M3N1O001O0O2NjnZ3\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [219.64097595214844, 112.74363708496094, 161.7368927001953, 394.42681884765625], \"score\": 0.999982476234436, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"QmY4j0?XOSa0`1K3L5L6J5J6K3L4M4L5K4^BjMT:Y2bERNY:Q2cETNZ:o1`EVN^:n1[EYN`:o1RE[Nk:T2oCfNn;W4O2N2M4L6K5J5K3M4K6J7I7J4L4M3L:G;SHdF`5d9QJeFm5^9lIgFR6\\\\9iIhFV6\\\\9bIhF^6m:01N1001O001O001O1O001O001O1O1O1O1O1O1O1O0O2O0O2N1N2O2L4J5gNZ1[Oe0WHVFU6P:]IhFU6`9cIeFY6c9^IbF[6U;UNbC`LR=Y3ZCSLT=j3TCdKZ=X4j0M4M1N3N3L4L3M3L6K:F6J3M2N3M4L3M7I3L5G8H8H8J8I7I6@^P_7\"}}, {\"image_id\": 116, \"category_id\": 2, \"bbox\": [331.90478515625, 343.85113525390625, 77.1318359375, 16.449066162109375], \"score\": 0.9995728135108948, \"association_id\": 3, \"segmentation\": {\"size\": [608, 738], \"counts\": \"k^U67hb02O00000O100000000000000001OO010000000000000000000000000000000000000000001O0001O0000000000010O000000000001O000000000001N1N3MUZ[6\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [288.74224853515625, 92.97071075439453, 93.53253173828125, 257.50067138671875], \"score\": 0.9891285300254822, \"association_id\": 3, \"segmentation\": {\"size\": [608, 738], \"counts\": \"U]f5;cb04K4j@Ag;b0QDGn;8RDHn;8RDHo;7QDHS<4oCKW<NQDJVMOh05h:=kD]OQ3o1f7iNSI[1i6fNYEWOX39RL\\\\1\\\\;UOYEXOg38gKi0X;GYEZOk3MkKP1Q;KVEZO\\\\4h0_60QE[O`4d0_69fDWOk4?_6l0`ISOa6n0^IROb6n0^IROb6o0]IQOc6P1^HYNoLd0d:U1UH]NYM;c:Y1SH]NZM8d:]1nG_N]M2f:a1jG_NaMLg:h1cG_NhM\\\\Oo:X2WG\\\\Nh9f1WFYNi9j1UFUNk9U2kEkMU:X2iEhMV:Y2iEgMW:Z2iEfMV:Z2jEfMV:Z2lEdMT:]2lEbMT:_2mE]MU:e2mERMX:Q3j22\\\\AkLY=T3Z1O100O1O1dMX@_1j?^N^@[1e?^Nb@^1b`0K6I7J6J6L5K7H7H6IZ_20j`M7K11N1O0O00001OO2O1O2kfc6\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [403.805419921875, 89.50951385498047, 139.43841552734375, 285.43060302734375], \"score\": 0.6995548605918884, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"dRc76cb0>I76F2NN3M=B2M100O00ZOW^O7ia0JU^O7ja0`0O1NGX^O^Oea0d0`^OXO`a0h0`^OYO`a0f0]^O^Oca0o010000O10002020L3J4K4M2O2aLYNbEi1]:YN_Ei1a:YNVEo1h:UNlDU2R;PNfDU2Y;mMbDV2\\\\;mMbDT2];nMaDR2_;QN]DQ2a;TNZDn1e;TNYDl1g;VNXDj1h;VNXDi1h;YNWDg1h;ZNXDe1h;\\\\NXDb1i;^NWD`1k;`NVD_1j;`NiCnNTO`2T=ZNPDYOjN\\\\2V=[NQDYOiN[2V<bM^Dj0S1b1a:gMYDg0W1_1b:PNQDa0]1^1c:UNlC=b1[1c:[NiC:e1Y1d:[NhC;d17nMOf<_OhC:d13\\\\NLX<GiC8c12aNNS<GjC9S1\\\\OdNd0`0Oo;GlC9P1_OdNb0a0OY<ZOfCe0m0]OgNe0;0U=MZD4]N2X=J]D4WN5\\\\=E_D6SN7^=BaD5QN:]=AeD0QNa0X=^OjDITNk0Q=ZOoEe0P:ZOTFOWLa0X=@RGe0mKRO]<7lGS1Q8mNWHn0g7PO[HQ1e7kN^HV1b7hN^HX1d7eN\\\\H\\\\1c<100N2O01N10O110O01N1N2J6O2E>]OWX[4\"}}, {\"image_id\": 116, \"category_id\": 2, \"bbox\": [622.7000732421875, 315.66046142578125, 110.4609375, 90.8792724609375], \"score\": 0.4867560863494873, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"mjb;:eb02M3L3J7O1O1O1O0O2N1O2O0O2O0O2O1O0O2O001N2O0O2O001N101O001O001O00001O001N101O1N101O00001O000001O001O010O010O010O1O100O01000O00100O0100O0010O001O01O1O10O00001000O0100O1O1O2O0O0010O010N1O1M4XO[Ni_Oo1R`0WNk_Ok1T`0WNg_Om1Y`0=O2O00001O00O2gMg_O12V1X`0fNi_O25S1T`0gNj_OO9X1Wa0Igc2\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [228.7382049560547, 108.1387939453125, 341.86553955078125, 315.46051025390625], \"score\": 0.3592883050441742, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"PY_41ma05j^O5Pa06c^O1Wa0o0M5K5L01oNl^ON]a0Hm^O0Xb0Hjd:Ia[E0O20N1000_a?2X^@7M4M3L4M2eM^OUBf0f=_OXBb0i=]OWBc0j=\\\\OWBc0k=ZOWBe0j=ZOVBe0l=ZOUBe0l=ZOTBf0T`0N2O001N102MnXk1B_gTN2O2N2fALe95ZF1a90^Fe0l8]ORGi0i8YOSGj0k8XOVFCbMW1W<XOTFCcMV1Z<VOSFDcMV1[<UORFEcMV1[<UORFDdMV1\\\\<VOoEDeMV1\\\\<VOoEDeMV1\\\\<VOoEDeMV1]<UOnEEeMU1^<WOlEDfMU1_<VOkEDhMU1]<WOkEDhMT1^<YOjEBhMU1_<XOfFg0[9YOeFe0]9[OcFb0`9]OaF`0b9@_F9g9GYF5k9KUF3m9LUF1n9MSF1o9ORFNP:1RFLP:3RFJQ:3_4MeAO_>1N1O2N3bN0g_O2U`07d_OKX`0<c_OGj?n0Q@TOk?S1o_OQOl?S2N30N3nNU@iNo?P1\\\\@bNn?:__O;ca0@c^O8ca0Cd^O5Yb0IVU:2kjE2M3H8N7I2ZOYOo^Oi0o`0\\\\Om^Of0b=VO]E7nLd0b=YO^E6lLc0d=ZO^E5lLb0c=]O_EW1_:lN`ET1a:jNbEOmL27C\\\\=;bEJTMOn=5QEFi;9a3M2O1N3NhPV4\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [331.5833740234375, 104.15526580810547, 114.59368896484375, 279.75323486328125], \"score\": 0.16475322842597961, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"Rbi62ng47kjK02MO1O010O00001O0010O01O00010O10LbX1MbgN201gJNbG2Z89_GG^8?^GBb8>]GDb8;^GGW73mD6l3HT76mD3Y1_O?9Z98jD5Y1\\\\Oc06Y9>fD4X1\\\\Oi02W9b0dD3R1AS1KU9d0cD4m0DZ1CU9f0cD5k0F[1@T9g0fD4i0G\\\\1^OT9i0fD4e0K^1XOV9j0gD3c07V1lNa9f0mDLm0JQ=6fBZOl0Nc<e0c3N3O002N2N4J:FUl7MfT[5\"}}, {\"image_id\": 116, \"category_id\": 2, \"bbox\": [329.4548034667969, 87.97712707519531, 230.01443481445312, 384.83514404296875], \"score\": 0.15637421607971191, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"PVf=\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [479.561767578125, 88.87681579589844, 78.18035888671875, 241.79652404785156], \"score\": 0.10324934124946594, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"RkS95gb05@?VOj0K6O000J6N3N10000O1O1O1nLlMXET2h:kMYEU2i=1lLlMYEU2f:nMWES2h:PNTER2j:QNWEm1Z:bNgE]1V:gNkEW1T:kNjEV1U:lNiEU1V:lNiEU1V:kNlET1S:kNQFS1n9kNUFT1k9mNQFW1o9hNQFY1n9fNSF[1l9aNZF^1e9`N_F3VM3Z<J]EXOlNi0LJk;3WE4\\\\NEIGQ19d;6UEP1CiNY;3ZET1[OhNU>T1kAkNW>R1jAmNS>U1PBjNV;IjE_1SOcNU;1iEX1R<jNYDa0n;Ah31O1000N2M3M4M\\\\Rc3\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [85.8964614868164, 121.55133819580078, 387.2181396484375, 398.6656494140625], \"score\": 0.07970187067985535, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"Tj\\\\21ob00O101N1O2M2N3M3L4M3M2O2M2O2RFBV1`0eNF0MfH?T7IO8bH1[7L0:^HM\\\\7O48\\\\HJ]716^1FfN9[1CiN<W1@nN?S1\\\\OROc0o0XOVOg0k0RO\\\\Om0d0mNCQ1?lNER1<lNFS1;jNHU18jNJU17jNJU17hNLX14dN0\\\\11]N4d1LXN8h1IUN8l1IPN:P2FlM=U2DfM`0Z2@^Mg0b2[ORMP1n2QOdL[1]3eN^L_1c3QOhKS1Y4oN`KU1a4lNYIZLW1m4b5iNVIZLV1V4VN\\\\L`7T3TIZLU1U4\\\\NYL]7W3RI[LS1S4eNVLX7\\\\3PI[LR1Q4lNSLU7`3lH]LR1l3VOPLP7f1YH5>ZNR1e3[7iMZG@G@b0B6K1d3^8lMXG_OBCa0C6LO]3h8nMVG@]OE`0D5NNV3Q9PNSG@ZOH?D5OMP3X9QNQGCVOI?D41Lk2_9QNoFDUOJ=E32Lh2c9QNPGDQOM<D43Jd2j9RNlFFPOM<E25I`2Q:oMkFHnNO;E16I^2U:mMkFJkN0:G15Ia1KdN`:1iFMhN29G07HZ18`N[:5gF0eN38H08GU1d0YNW<?UC49HN9GP1m=jNUB67INn1j=RNQB76LLi1Q>RNnA:3MLe1W>RNjA=2OJa1]>PNiAa0NW2k>jMSAU2n>mMo@T2R?l01N5mLm@Q2Z?eMSAKJg00]O[?FoA2@Nige0`0YiZO<E9J5K8H:G9F6J3M3M2O1N2N2N2O0O2O0O101O0O1QHWMFk25aMC_29gMD[27jMHV24oMJS22PNNP2bMlLaMW1l4n1XM^MkKDQ1T1j5l1UMVNnLOm5j1UMYNkLOQ6f1UM\\\\N_L8\\\\6\\\\1TM^NZL;c6U1TMaNUL>g6P1TMdNPL?l6l0UMiNfK`0V7f0TMX1m2fNSM[1m2eNSM[1n2dNRM\\\\1o2bNRM^1n2bNRM^1o2aNQM_1o2`NRM`1o2_NRM`1n2_NSMa1n2]NTMb1l2]NVMeN_K_1\\\\7KgMoMXKMH[1[7g0lMfMUL[1o5n0NlN3R12jNOT16gNLW18fNILZH>P8DGIbH<j7IL@bH`0f7ONWOcHd0c74g0J[O4f0J^O3c0K@2c0KAOl9IUUb0Lgj]Oe0F8H4M2O01O010O1O001O001O001O001O002N1O2M2O1N2O1N2O1N2M`dl6\"}}, {\"image_id\": 116, \"category_id\": 1, \"bbox\": [233.9989471435547, 122.74370574951172, 182.65611267089844, 242.3050537109375], \"score\": 0.07130661606788635, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"RT[4=Rb0LP^Ob0Ua0Q1K3M4M4L6J5K4L4fBWMW:k2bE`MX:b2XEPNd:S2UEUNf:P2kD_NQ;e1hDfNP;fNgCo1S1_OT;`NQDP2f0BX;^NSDT2:Db;XNVDX2JIP<oMWDT5g;lJ[DS5e;mJ\\\\DR5d;mJ^DR5b;mJ_DS5a;lJaDS5`;kJbDT5^;jJeDV5[;hJhDV5Z;fJhDZ5Z;bJiD]5g:cJbD2f0[5g:eJbD1f0Z5h:eJbD1g0X5g:iJaDOh0X5g:iJ`D1h0V5g:jJaD0i0U5f:kJaD0i0U5f:kJaD0i0U5g:jJ`D1i0U5g:jJ`D2i0S5g:kJ_D3j0R5h:kJ]D4j0Q5i:kJ\\\\D6j0o4j:jJ]D7i0o4k:hJ]D9h0n4l:hJ^D9f0[4QOkKl;A]D9e0Y4XOjKf;C^D:d0X4]OcKR<5jCH;_4_<jKPCM`0Z4^<VLdCi3Z<XLgCj3U<WLlCm3o;SLTDl3i;UL^De3`;[LbDe3^;ZLcDc3`;\\\\L`Db3c;\\\\L_D`3f;^L[D_3h;`LXD_3k;_LVD_3l;^LVDa3o;WLVDh3o;PLUDo3a=M6J2M101O001O001O001O1O00101N2O2N1M3M5L4K5K6K2M3M3N1N2N2N2N1O1N2M4TOk0O1O2N4L4L3M2N3J5MXW>0ihA5L100O01O00101N2OO100O2N3M10O1N2O5KbZa6\"}}, {\"image_id\": 116, \"category_id\": 2, \"bbox\": [659.7732543945312, 274.49139404296875, 40.97442626953125, 11.87188720703125], \"score\": 0.05358471721410751, \"association_id\": 0, \"segmentation\": {\"size\": [608, 738], \"counts\": \"ggX<5kb01N2O00000O10000O1O100000O1O10000O10000O100O10000O1000000O10001N10\\\\Xi0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [129.72640991210938, 490.4679870605469, 103.672119140625, 83.01248168945312], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"P`V36ig04L3M3L?A<D5K4K8J4L1O2N1O101O0O2O0O101O0000000000000O1000000000000000O100000000000000000000001O00000001O00000000000001O010O01O001O001O2N1O1O2O0O1O2N1O001O1O0O2O000bNQZOk0oe0SOVZOj0je0UOZZOh0fe0WO\\\\ZOg0fe0WO]ZOf0ee0WO_ZOf0`f0N1N3M4K4M3Mjnbb0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [779.6044921875, 468.2260437011719, 123.447509765625, 78.39456176757812], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^_Yb0a02@Xg0g0M2N4K3N101N1O2O0O2N100O10000O1O100O10000O1O101N101N1O2O001N2O1N101O1N2O001O001O0000000000000000000000001O00000000000000000000000001O0000000000000000000001O001O0000000000000000000000000000001O001O00100O0010O100O10O01O0O2O001N2O3L2N1O1XOoYO_OSf0>i0O3JmPk2\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [554.2620849609375, 637.6264038085938, 165.22900390625, 130.37359619140625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[eT=T1if04N2N1O2L4ZOe0L5N2N100O1N2O1O100O100O1O100O2O0000001O00001O3M2N5K1O1O1O:F9G2N1O001O001O1O001O0000000000O100O10000O1000000000000O1000000O100000000O1000000O10000O100000000000000O2O1O1O001O00001O001O1O001O001N101O002N1O001O001O1O1O1O1O010O4L2O0O1O10003M1O0O010O1O1O10O0010O00001O001hMgZOg1[e0UNjZOg1je0N2M2O1O0O2O0O2O1N1O2N2O1N2N2N2N2L5AnQT7\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [800.7103271484375, 378.900146484375, 61.74945068359375, 39.2635498046875], \"score\": 1.0, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Qlib0295Rg0`0M100O101O0O2O2N1O2N1O001O000000000000O100000000000000000000000000000000000000000001O001O001O1N20O01O2N001O1N4L3M3J6KeSi3\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [329.5439453125, 390.515869140625, 67.24600219726562, 51.142791748046875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fdh79bg0c0^O5M4M1O2N2O1N2O1O1O000000000000000000000001O00000O1000000000000O10000001O01O0000001O00001O1O1O3M1O2N001O001O001N2O2N9G3M2M4K4Mebf>\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [639.5061645507812, 388.2107849121094, 84.7645263671875, 77.60610961914062], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ZmQ?9dg08J2N2N3M1O2N2O0N3N2N1M4G8N3M3M2O2N1N2O1O1O2O00000O10001O00000000001O0000000000000000000000000000001O0000000000001O0000000000000001O00001N3N1N2TOnYOGUf03RZOHQf05TZOFoe08SZOERf06o0K5K^kR7\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [234.10501098632812, 382.2258605957031, 60.133758544921875, 48.722747802734375], \"score\": 0.9999998807907104, \"association_id\": 7, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^\\\\a5h0Ug04N3L4M1N2O1O0O2O2OO01000001O0O011O0000O100000000001O00000001O0000001O001O1O001O0O2O1O1O1O1O1O1O1O1N2O4VOmXO?\\\\g0N3M1O3JRkSa0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [524.3457641601562, 329.343994140625, 51.12994384765625, 37.988739013671875], \"score\": 0.9999998807907104, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fRZ<>_g04N2M3O0M3O1O2O1N2O1O0O2O00001O000000000000000000000000000000000010N11O01N10001O001O2M2O5H8B_eb:\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [153.30740356445312, 359.37310791015625, 58.95036315917969, 44.205108642578125], \"score\": 0.9999996423721313, \"association_id\": 8, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"P\\\\e33bg0a0H8I2L4M2O1O1O100O10O0100000000000O0100000000000000000000000000001O0000001O1O001O1N2O3M1O1O1N2N101N2O;D5KlSRc0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [873.6727294921875, 459.1334533691406, 66.73687744140625, 60.692779541015625], \"score\": 0.9999949932098389, \"association_id\": 10, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"en_d03jg03O2N100O3M2O0O2O1N2O1O1O0O2O2N2N3M5K5WYOnNZf0]1O1O010O1O1O000000001O00001O0000000000000000000000000001O00000001O0000O2O00001O001N2O1O1O4LYan1\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [819.7991333007812, 529.734619140625, 97.34173583984375, 26.04888916015625], \"score\": 0.9999939203262329, \"association_id\": 4, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hXXc01og0001O1O0O3N4L1O0010O01O001O00010O00001O00001O000000001O00000000000000001O0001O00000000001O0000000O11O01O001O001O1O1O001O1O001O000000010O0000O10000O1N1L5N2O100O11O001O000O2O00001O00000O101O0OUg_2\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [665.3744506835938, 451.9518127441406, 68.0977783203125, 14.630859375], \"score\": 0.9999663829803467, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"YVc?2lg03O00001N1001O0001O00010O00000000001O0000000000001O0001O000000000000000000000001O0000000O10O1000O10000000000001O000O2O001N10001NfQl6\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [239.28863525390625, 422.6950988769531, 61.635009765625, 9.709320068359375], \"score\": 0.9998937845230103, \"association_id\": 7, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\Ue51ng02O000000001O000000000000000000000001O00000000000001O00000000000000000O100000000000000000O10000000O100O100O001000O103Khjm`0\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [167.05084228515625, 391.44366455078125, 57.35552978515625, 14.27288818359375], \"score\": 0.9995741248130798, \"association_id\": 8, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"b\\\\n31og01N1000000000000000000000000000001O000O11O0O1000000000O10001N1000000O1O100O10O01O100O1000000O11N100O1O2N2NfSib0\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [469.3340148925781, 352.5643310546875, 52.020843505859375, 11.478759765625], \"score\": 0.9987049102783203, \"association_id\": 9, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"VSP;2mg02O1O001O00000000000000001O00000000000000000000000000O10001O00O100O1000000000000000000001N10001OjTk;\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [812.6298217773438, 408.942626953125, 58.42938232421875, 10.563385009765625], \"score\": 0.998458743095398, \"association_id\": 6, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"nlXc02ng00O1000001O00000000000000001O0000000000000000000000000000000000001O0O10000001O0O101O001O0O2OPkc3\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [251.19924926757812, 326.2054443359375, 43.0186767578125, 32.948486328125], \"score\": 0.9968403577804565, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"aRn5a0^g04L1N2O2N100O1O100O100000000000000001O000000000O10001O001O1O1O1O001O1O1O1O1O3L3LZUSa0\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [898.5999145507812, 511.0973205566406, 43.27716064453125, 11.818817138671875], \"score\": 0.9947550296783447, \"association_id\": 10, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"RXTe01ng06K1O0010O002N001O000001O0000000000000001O0000000000000000O1000000000O2Ojoo1\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [764.2319946289062, 342.4320068359375, 46.772216796875, 47.1517333984375], \"score\": 0.9925227761268616, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"kjna05gg09]XOCO0Vg0m0I3L2O4L2N1O1O1O000000000000000000000000O10000001O000010O0001O01O2N1N2lN[YOk0Rg0H6L4M3L4H`lP5\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [348.1075439453125, 432.3746032714844, 57.538543701171875, 9.657318115234375], \"score\": 0.961330771446228, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"gmV81og0000O1000000000000000000000001O0000000000000000000000000000000O10000000000000000O1000O01O1O100O1001N100O1O`b`>\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [150.60995483398438, 545.4053955078125, 96.11398315429688, 25.98162841796875], \"score\": 0.9521000385284424, \"association_id\": 5, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"fid31og000001O00001O000000001O00000O1000000000000000001O00000000000000000000000001O0000000000000000001N1000000000001N1000og30QXL0O100O1N200_OMTYO4lf0MPYO6Pg0LiXO8Xg0800O010000O10000O1O2N1O1N2N2O6Hl^Xb0\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [193.64105224609375, 302.73944091796875, 27.45947265625, 22.18536376953125], \"score\": 0.9506617784500122, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hYc4410bg0;M2N101N001O10001O0000000000000001N2M3O1O0O2J6M]nib0\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [703.52587890625, 738.5338134765625, 30.29620361328125, 27.4725341796875], \"score\": 0.8823862671852112, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"kob`02mg01I7N3O0O11O0000N1L5M3O100001O1O1N3N1O1N2O1N2MbPk6\"}}, {\"image_id\": 117, \"category_id\": 1, \"bbox\": [461.03460693359375, 327.8712158203125, 43.554779052734375, 30.539703369140625], \"score\": 0.8345006108283997, \"association_id\": 9, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^jj:5jg03M9G3N5J100000000000000O1000000000000001O000000000000000000000000001O100O1N2O1O3L5F_mT<\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [598.6007080078125, 743.1683959960938, 134.031494140625, 22.5599365234375], \"score\": 0.6495947241783142, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"jg^>3lg01O1O10000O1000001O0O100000000000000O100000000000000O1000000000000O1000000000000000000000000000000000000000000000000001O000000001O0000001O00001O001O0O2O1O1O1O1O0000000000O100O100N2N2N2M3O10000N2O100N200N200O10000001N102N1O000O2O1N5FYXOOYXj6\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [210.2303466796875, 539.8461303710938, 31.9486083984375, 28.0076904296875], \"score\": 0.24631507694721222, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"giT51ng010000O100XO2]YOObf04ZYONff04TYOOmf0b00000000O2N101N1O2O0N3N2M3MPoYb0\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [531.115966796875, 360.6203308105469, 41.38507080078125, 7.638153076171875], \"score\": 0.19610396027565002, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Z[_<2ng00Rh3OPh00o_K00000P`10Ph60PhG001N1000O010Ofdb:\"}}, {\"image_id\": 117, \"category_id\": 2, \"bbox\": [255.2752227783203, 351.3141174316406, 45.58482360839844, 9.589691162109375], \"score\": 0.12428076565265656, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"V[S61og00000000000001O00000000001O0O1000000000000O100000O2O0000000O1000O001000NolPa0\"}}, {\"image_id\": 118, \"category_id\": 1, \"bbox\": [953.2904052734375, 491.6644287109375, 68.0333251953125, 62.84283447265625], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ZX^f012<Xg0<K4L4L3O3L2N1O2N1O2N1O100O1O0O11O0010O00O2000O010O2O001O001O1N2O1O001O1N101O000010O00000O100O10000O101N1O1O100O1001O010OO3M3M^h1\"}}, {\"image_id\": 118, \"category_id\": 1, \"bbox\": [385.86517333984375, 491.0289001464844, 94.7147216796875, 233.21817016601562], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^QR9Q1jf09J8H8H5K6J9G5L4L:D<E;F7Ij0VO5J3M3M2O1N2M3N2N2O1N1N3L4L4N1O2M3O1O101N2O1O3M6J8H8H5K1O1O0000000001O0O2N2N3K5I6G:K5M4M2M2O1O2O0O10000O02N10O010O1O1O2N100O001N2`Na^O\\\\Laa0`3f^OZL^a0g2o]OWMec0c2P1K3M4M4M2M5K4M4K6Ja0^Ob0VOXVh<\"}}, {\"image_id\": 118, \"category_id\": 1, \"bbox\": [525.2111206054688, 486.6246032714844, 96.82696533203125, 242.59426879882812], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ia_<=_g09H5J6J6K5J8F<G6K5J:_[OUNWOJZb0m4L2L3M2O100O1N3N1M4J5M3O2M3O2M2N5L6J5L4K6K3M1O001O00000000000000000000O100N2J6K6J5ZOf0M4O2M3N1N2O0O2N2O2M3N1N3N1kNl]O[LVb0b3n]O\\\\LTb0`3P^O]LTb0X3X^OdLja0S3a^OhLca0P3e^OkL`a0k2P2I7K6K5K5L4M3M4L3L5L;D6J4K8H9Eo]^9\"}}, {\"image_id\": 118, \"category_id\": 2, \"bbox\": [449.9440002441406, 712.6235961914062, 115.03549194335938, 18.48297119140625], \"score\": 0.9999992847442627, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"[Vh:1ng02O001O001O0O101O00001O000000001O01O000O1000000000000000000O1001O000000000000000000001O000000000000000000001O00000000000000001O00000000001O000001O0001O000000001O000001O0001O000001O0001O0000000001O0010OO2N2Kcii:\"}}, {\"image_id\": 118, \"category_id\": 2, \"bbox\": [318.4745788574219, 716.8383178710938, 124.97018432617188, 19.4739990234375], \"score\": 0.9999991655349731, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_n_74lg0001N10000000000000000000001O00000000000O100000000000O100001O0000000000001O000000001O00000000001O000000001O0000001O00001O001O00001O001O00000000001O0000000000000000000000O10000001O000001N1000000NaXOC_g0<3N2O101N1O1O10aaP>\"}}, {\"image_id\": 118, \"category_id\": 2, \"bbox\": [950.4072265625, 548.7345581054688, 67.4974365234375, 8.63720703125], \"score\": 0.9997612833976746, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"ViYf03lg01000001O000000010O000001O001O00000000000001N1O1O10000000000000001O0001O00001O1O001O00001O0000000000000000O1000000O10000O1000000001Nin3\"}}, {\"image_id\": 118, \"category_id\": 1, \"bbox\": [126.1532974243164, 469.41632080078125, 42.635536193847656, 190.39544677734375], \"score\": 0.45090770721435547, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"UoQ33ng0NN5N0Q\\\\O8h?E__OW1X`0SO^_OX1\\\\`0mNX^O]2ga0eMc]OP3[b0RMf]O_O\\\\OT3nb0_Mo]O_2Rb0`Mo]O_2oa0bM]^OS2ba0nM_^OQ2ca0lM^^OT2`a0nMa^OQ2^a0PNb^OP2`a0nM`^OR2_a0oMa^OR2]a0PNc^Oo1^a0QN`^OP2_a0RN`^Oo1_a0RN`^On1ba0SNZ^OQ2ea0kM\\\\^OV2fa0fMZ^O]2ja0`MR^Oc2la0aMQ^O_2ma0eMP^O[2Tb0cMj]O_2Ub0aMk]O^2Vb0`Ml]O]2Zb0_Mf]Ob2fb0PM^]Ol2Yb0ZMQ^O`2Xe0nM\\\\YOC[\\\\Td0\"}}, {\"image_id\": 119, \"category_id\": 2, \"bbox\": [644.0005493164062, 813.1437377929688, 123.99945068359375, 106.28729248046875], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"eiTd07co09L2O2N1O1O001001O1N100O5K3N1O001N1000000O100O0100O1O1O0010O0100O001N1000001O00100O001O0000001O1O1O1O1N2N2O001N2O1O1O000O2O1N2N101N2O1N2O1N1O2O001N2O001O1O001O1O00001O001O1O100O1O100O100O100O1O1O2O0O101O1N1O100O1000O010O01O1O001O001O10O001N2O2M>]OYUO\"}}, {\"image_id\": 119, \"category_id\": 1, \"bbox\": [381.4837646484375, 546.093017578125, 257.814697265625, 477.906982421875], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"\\\\eY<:_o0:I6L2N3L4M3L3L5L4L3M4M1O2L3O2M2O2N101N1O2N2N1N3N2RWOfMTd0[2[[ODVc0>e\\\\OMSc05j\\\\O2Pc00m\\\\O6nb0KP]O:lb0HQ]O>kb0BS]Oe0gb0\\\\OV]Ol0eb0TOV]OX1bb0iN[]Oa1^b0^Nb]Oi1Xb0TMmZOoNm2R4dN]K_a0a1T]OiNk2^4]NdKYa0U1a]OhNh2b4[NbKZa0W1_]OlNg2^4\\\\NaK]a0X1[]OQOe2[4_N]K_a0`2m_OY2]NZKda0c2f_O]2^NSKja0f2Z_OQ3XN]J\\\\b0i2Q_OX4m`0lKm^OW4Ra0mKi^OU4Va0nKg^OS4Ya0oKc^OT4\\\\a0oK_^OS4`a0RLY^OR4fa0U5O001O00001N100O100O100O100O1O1O1N2N2O2M2O1O1O1O1N2N2M3M3N2O1N2O1000000000000O0100000000O100O100O100O10000001O00001O0000001O00001O000000001O0000000001O0001O000001O001O001O001O1O1O2N2N2N3M2N2N3M2N4L6J6J8H8H4L3M2N1O1O2M2O2N2N2M3N2N1N3N1M4L4I8hNZ1ZOh0D;F7J4M4L3N3M2M4M3M4K4M3ZORXOUIQh0i6oWOWISh0g6lWOZIVh0d6jWO\\\\IXh0U72N1O2N3M2N2000000101O00O0O100N2O1O2M6K8G5L2N1N2OO01N2O1O1O1O1PN[VOcMfi0Z2_VOcMoi0m1VVOoMQj0i1UVORNoi0i1XVOPNmi0j1^VOkMfi0P2jVOaM[i0e1m2PO\\\\RO[Omm0?m0K5K[W]4\"}}, {\"image_id\": 119, \"category_id\": 1, \"bbox\": [214.46194458007812, 538.5863647460938, 153.17312622070312, 440.36529541015625], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"_eW7Y1`0ZOol0P1fRO_Onl0Q2F9D<C=C?C9H7J6K5I8I8I5M2N2N2lZOjI[`0W6^_ORJ_`0P6Y_OYJd`0h5Z_O[Jd`0g5W_O]Jg`0f5S_O`Jj`0d5k^OeJRa0_5e^OiJYa0[5[^OPKca0S5Q^OWKna0l4g]O]KXb0e4_]OcKab0_4X]OfKgb0\\\\4T]OhKlb0Y4o\\\\OkKQc0X4e\\\\OoKZc0U4S\\\\O[Lmc0l700O1O1O1N2L4M3N1N3K5_Oa0L4N20O0000001O0O1L4L3L5M202M2M4K5M2N3N3M2M4L3M3O1XNT\\\\O[Gmc0^8^\\\\O^Gbc0Z8h\\\\OcGYc0Y8l\\\\OeGUc0Y8n\\\\OfGTc0V8P]OgGSc0V8o\\\\OgGUc0V8n\\\\OfGUc0X8n\\\\OXGbc0e8n1N2L5J6L6J6J7I?@=@8I7J6K5J6I8F:G8K4L4K4L5L4L3O3M3N4K4L3N2N3N2bMUTOa0Qn0E4K3N7H:F5L2M10N3N2M4M4K3O0O0100O010O1O1N3L3NcXc<\"}}, {\"image_id\": 119, \"category_id\": 1, \"bbox\": [638.6751708984375, 563.8497314453125, 125.76043701171875, 303.0902099609375], \"score\": 0.9999992847442627, \"association_id\": 2, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"kgWd03co0e0_O9I7I=Dc0\\\\O8F8H9I9H7J5K4L3L4L4L4O1N3M4K4\\\\OdK[UOb4_j0b0J6J7iNdJcWOa5Xh0V1I7J6M3M3N2L4M3N2N2M3M3N2O1O1N2O1N2N2O1O100O1000000000O1000000O100O1O1O100O1O100O100O10000O1000000O100000000000000000000001O0000000000001O2N2N2N001O1O00001O0000001O00000000O100O2M4bL]ZOVK[N>fU4\"}}, {\"image_id\": 119, \"category_id\": 2, \"bbox\": [220.01124572753906, 859.533447265625, 113.67008972167969, 74.0882568359375], \"score\": 0.9997163414955139, \"association_id\": 1, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"lkm68go02N1M3N3N1N1L5N01001N2O100PQOVOjn0U1J6J3N1O1N2O1O000O101N1O101N1O2O0O100O1001N1M3O1M4C>C<L4KnPk0^OgoTO2K6L3K4M3K6O0O10001O010O00010OJlPO@Uo0?kPO@Wo0>kPO@Vo0`06OjPO@mn0?;O10O01O1O1O1O1O1O1O002MXTj=\"}}, {\"image_id\": 119, \"category_id\": 2, \"bbox\": [349.9884948730469, 714.11865234375, 29.38629150390625, 6.3975830078125], \"score\": 0.9784950613975525, \"association_id\": 3, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"\\\\fo:3mo01N100000000000000000000000000001N10000O10dYZ<\"}}, {\"image_id\": 119, \"category_id\": 1, \"bbox\": [561.13818359375, 551.1696166992188, 95.218994140625, 197.74114990234375], \"score\": 0.6728553771972656, \"association_id\": 0, \"segmentation\": {\"size\": [1024, 768], \"counts\": \"]bca0d0Yo06K6J5L1O01002N2N3M2M2VOeNmRO]1Pm0hNlROZ1Rm0gNmROa1jl0aNUSO`1jl0`NWSO`1hl0aNVSOa1il0bNSSO`1ll0cNPSO`1nl0bNoROa1nl0h01O1O11bSOSMgk0k3H6J2N3M2O0O1O2NRLcTOa30TLlj08XUOn3gj0lK_UOS4Wk0100O010O100O1O001O01N1NaTOhKZk0X4dTOjK\\\\k0V4aTOnK_k0Y40GdTOnK]k0P4eTOPL\\\\k0l3fTOTL[k0j3fTOWL2Mij0l3UUOWLZk0h3fTOXL7M`j0l3YUOVL7Naj0i3ZUOYL31bj0d3^UOYLM7ej0_3_UOZLI:hj0\\\\3_UO\\\\LE;lj0X3^UORMdj0m2ZUOTMhj0l2VUOTMjj0j2WUOWMjj0h2TUOZMlj0e2TUO\\\\Mmj0c2RUO^Mnj0b2TUO\\\\Mlj0e2SUO[Mmj0g2QUOYMPk0k31O1O2N1O1N2POjTOTMYk0m0cTO74iN^k0a0QUO=MeNVk0g0WUO?EgNXk0a0[UOe0^OiNXm0U1hROjNZm0T1j0O1M4L5J5N2M3NWZ`3\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [210.05291748046875, 9.38147258758545, 66.26260375976562, 132.02749633789062], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [599, 506], \"counts\": \"cWm34bb02N2M201OOj_OMi=4UBNk=1TB2l=LSB6m=JRB7m=IRB9n=ESB<m=CSB>m=ASB`0l=ARB`0m=BRB?m=BRB?m=BPB?R>AjAb0U>AgAa0Y>@aAd0`>]O^Ac0c>_OXAc0R>PO^A>=b0S>XO[A8JT1m>hNUA7GT1T?iNQAP2P?h04N2M2O2N2O1O101O01O0101O1O;F0O1O0ZOR@bNo?[1T@dNl?Z1V@gNj?V1W@lNi?R1W@SOf?k0[@XOc?f0^@\\\\O`?c0a@]Ob?>`@Ao?0R@OQ`0NP@1S`0Kn_O4U`0Im_O5V`0Hm_O6V`0Em_O8[a0N5J]PW4\"}}, {\"image_id\": 120, \"category_id\": 2, \"bbox\": [271.11602783203125, 195.10992431640625, 63.4505615234375, 83.12576293945312], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [599, 506], \"counts\": \"Shn45_b06K3M4[^OBk`0b0Q_OBb`0k0[_OWOc`0k0Z_OWOd`0`1O1N100O1O1O1000000O10O012M2N3M3M2N100O1O10000O100O0010O001O1N101O0O10[OT_O@l`0=Y_OAg`0?[_O@e`05f_OJZ`05g_OJZ`06g_OIY`07h_OHX`08i_OGX`07j_OIV`05k_OKV`04j_OMV`00l_O1T`0Mm_O3T`0Jn_O6S`0Go_O9Xa0O010O003LSoT3\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [293.2082824707031, 56.07792282104492, 68.36715698242188, 155.0182342529297], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [599, 506], \"counts\": \"\\\\b[59[b08I1O20N2O0000O0101O01jM9\\\\AG_>b0]A^Oa>e0]A[O`>j0_AVO`>l0^ASOc>P1ZAPOg>Q1WAoNh>T1VAlNi>W1VAhNj>[1TAdNm>a1m@_NT?d1h@\\\\NY?g1a@\\\\N_?i1Y@ZNf?]2O2M3L5K5K2B?M3N1O2OO0O2O100@`AhLa>R3iAdL]>U3f0M3M3I8K5oMR@T1R`0jNR@R1P`0kNT@S1m?jNW@S1m?gNY@W1g`0L4L6J3N3M2N1O3M1N3M2N3L7IQ^f2\"}}, {\"image_id\": 120, \"category_id\": 2, \"bbox\": [259.6142272949219, 468.55059814453125, 87.76678466796875, 96.6173095703125], \"score\": 0.9999997615814209, \"association_id\": 6, \"segmentation\": {\"size\": [599, 506], \"counts\": \"cch4d0Rb04L4K3N2N2_OQOS_OU1i`0>00O1O1O1N2K5M3K5N2O1N2L4L4M3N2O10000O100O1O10O1N2O1N2M3N2mMn_O`1S`0ZNT@b1P`0[NS@b1o?]NS@a1n?_NS@]1P`0bNR@[1Q`0cNQ@[1Q`0cNQ@[1g`0M200N3N1O1O1N110O1O1O1O000O110O0E;O001O1000010OO10000000000000O2O001O1O1O1O1O1N6Ikfm2\"}}, {\"image_id\": 120, \"category_id\": 2, \"bbox\": [329.4403076171875, 458.9969787597656, 111.1011962890625, 100.31118774414062], \"score\": 0.9999997615814209, \"association_id\": 5, \"segmentation\": {\"size\": [599, 506], \"counts\": \"]XS64bb02O0O100O1O10O01e^O1n?NP@9l?GS@<k?CT@f0e?ZOW@n0f?QOX@T1e?lNZ@V1e?jNY@X1g?hNU@]1j?cNQ@b1o?^NP@d1o?]No_Oc1S`0]Nd_ON0g1\\\\`0=10O100O1O10OM3O1O2L4O1O00100O100O100000000000000000000000000000O1AS@UNl?i1W@VNi?h1[@VNe?i1]@VNc?i1^@WNb?h1_@XNb?e1`@[N`?d1a@[N`?d1a@\\\\Nb?a1^@`Nf?Z1[@fNg?X1Y@hNh?W1X@iNj?U1V@jNn?S1R@mNn?T1Q@kNQ`0T1P@kNQ`0U1n_OkNS`0T1n_OjNT`0V1k_OjNU`0V1l_OhNU`0X1l_OgNT`0Y1f00O010000O01O0001O001O1O2N001N101N1O1000O02O0O1N2O2N2L4J7L8HjSV1\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [211.82891845703125, 435.25360107421875, 72.2025146484375, 64.44253540039062], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [599, 506], \"counts\": \"i^l38^b03L9H4L2N3M2O1N1O2O1N2N2O0O2O1N2N10000O1O2O0O10000O1O1000O1000O01O10O0100O1O1O101N101N1O1O100O101N101N1O2N1O2O0O1O2N1N2N2O2M2O2N2N2N2M4LcUS4\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [369.88323974609375, 301.69866943359375, 88.96554565429688, 164.970947265625], \"score\": 0.9999994039535522, \"association_id\": 5, \"segmentation\": {\"size\": [599, 506], \"counts\": \"^kh63^b0:K0O2O1kNCo_O<n?KP@3o?1o_OOQ`04m_OKQ`09n_OFR`0<m_ODQ`0?m_OBP`0b0n_O^Oo?h0m_OZOQ`0j0k_OXOS`0m0h_OUOS`0j1M3N3I6L4M4J7K6K4l@dLf>l3I5M2N2L3N[OmAjLQ>V3VBgLe=Z3^BfL_=[3cBeLZ=]3gBdLV=\\\\3lBdLR=]3oBcLP=\\\\3RCbLo<^3QC]LT=c3mBYLV=g3iBYLX=g3hBYLX=h3dB[L\\\\=d3l0L4N200OXAcLX>\\\\3hAiLT>W3kAjLU>W3iAfL[>\\\\3aAcLb>h3100aLbAe2_>VMgAh2Z>TMjAk2R?E^@_Me?N`@S2KnMe?Ii@V2EoMa?Mo@l1CVN]?OYA`1]O_NZ?2ZA\\\\1^ObNV?4^AW1]OgNQ?5dAo0]OVOd>LPBl0]OXOc>LPBj0^O\\\\Oa>JRBh0^O^O`>ITBg0]OAJBT>4gBf0]OFDCi?a0f@2ZO@P`08k@d0U?\\\\Ol@`0W?@j@;[?Df@9\\\\?Id@4]?Oa@0`?0`@Na?1a@M`?3b@Hb?6b@Db?9b@Ac?<_1L4Ig^m0\"}}, {\"image_id\": 120, \"category_id\": 2, \"bbox\": [199.0960693359375, 481.4548645019531, 69.697998046875, 53.627105712890625], \"score\": 0.9999990463256836, \"association_id\": 1, \"segmentation\": {\"size\": [599, 506], \"counts\": \"aWg35Yb0=K2N4L3N0O2N1O2O1O1N3M2O100O02O0O100O00100O00010O01O01O00001O00000000000O100O101N101O0O2O000O2O1O0O2N2O0O2N2O1N1O2O002N1O1N2N4KSgZ4\"}}, {\"image_id\": 120, \"category_id\": 2, \"bbox\": [185.70877075195312, 130.51577758789062, 77.4248046875, 61.487457275390625], \"score\": 0.9999988079071045, \"association_id\": 2, \"segmentation\": {\"size\": [599, 506], \"counts\": \"Va]32db02N2N10000[^OJi`06V_OMh`03V_O0i`00V_O1j`0OV_O2i`0NV_O3k`0LU_O5j`0KU_O6k`0KS_O6m`0JR_O7o`0Io^O7Ra0Il^O9Ta0Hj^O9Va0c0100O100O10000000O101N100O1N2O1O1M3O010000O010O1O101N001O1O1O1O001N2YO_^O7ba0H`^O6aa0Ia^O5_a0Kc^O3^a0Mb^O2_a0Mb^O2^a0Nc^O2]a0Nc^O1^a0Ob^O0_a00a^OO`a00c^OM^a03b0010O10000O10O010O10O12M`P^4\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [333.33721923828125, 153.91409301757812, 56.203643798828125, 144.2725830078125], \"score\": 0.9999988079071045, \"association_id\": 3, \"segmentation\": {\"size\": [599, 506], \"counts\": \"X]U64Q`04UB4f=NVB5i=MSB7h=NUB4g=2VBOf=5WBMf=7WBLf=7WBLh=5VBLj=7PBKQ>7jALU>9dAKZ>:^AJb>?o@FR?n101O10O01O1O10O1000N2O00100010M3O1WOSAeMP?Z2g000O10000O1O1N2O1N2M3O1N3M3M4^N]_Oi0^a0J5G;[OeXT2\"}}, {\"image_id\": 120, \"category_id\": 2, \"bbox\": [308.5776672363281, 292.6407775878906, 49.2783203125, 44.283905029296875], \"score\": 0.9999864101409912, \"association_id\": 3, \"segmentation\": {\"size\": [599, 506], \"counts\": \"hif5145Vb0`0F3M2O2N1N2O1N102M1O2O0000001O0000O100O01O0100N2O1O1O100000000O11O1N101O001OPOc^Og0]a0UOi^Oh0fa0N1N2N7I3DbXf2\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [258.61993408203125, 332.01470947265625, 102.20053100585938, 154.3912353515625], \"score\": 0.9999334812164307, \"association_id\": 6, \"segmentation\": {\"size\": [599, 506], \"counts\": \"PgP56`b02OM3O101MQY51[fJ1_^O3_a0c0O2N1M3O1N3N0O2YOXO\\\\_Oi0d`0e0OF[_OdNb`0^1651OQOdNe@]1W?Q11100O1000010N101O1O1O101N2N2M3O0N3M3N3N1N3N4L3M2N3M3M2N1O3M2N1O1O10O0kNmAgMR>T2[BdMe=X2cBdM]=Z2gBdMZ=Y2hBgMX=i0YB@c0FW=EiAb0b2Ig;AlA?b2O[?01O1O1O0000001O000O2N^nd2\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [397.43499755859375, 414.28662109375, 55.23626708984375, 72.08255004882812], \"score\": 0.9475911259651184, \"association_id\": 0, \"segmentation\": {\"size\": [599, 506], \"counts\": \"oWY7e0Qb04L4L2O2N1O1N2O1M2I8N2N1O2O1N1O2O1N101N1000000O1000000001O1O1O1O1N10000O001O0N2N3N1O2M4N3L3M3K5L4M3M2N4L4JYUo0\"}}, {\"image_id\": 120, \"category_id\": 2, \"bbox\": [449.6134033203125, 0.5076665282249451, 45.65576171875, 23.629161834716797], \"score\": 0.9234335422515869, \"association_id\": 7, \"segmentation\": {\"size\": [599, 506], \"counts\": \"gWW88^b02O002N1O1O1OJk]OKTb04n]OKQb05P^OKoa06Q^OJna06S^OJma05T^OKka05U^OLja03X^OMha02Y^ONga01Z^OOfa00[^O0ea0M^^O3Qb01100O01O01OO1011M2O02XV?\"}}, {\"image_id\": 120, \"category_id\": 1, \"bbox\": [453.3144836425781, 1.370643973350525, 44.59844970703125, 20.76555633544922], \"score\": 0.8691022396087646, \"association_id\": 7, \"segmentation\": {\"size\": [599, 506], \"counts\": \"WhZ83db01O1O0O2OOKKi]O5Xb05O1LEk]O<Tb040000000O01001O000N1O200000000001O001O00000000Mj]OEO0Ub0:m]OHSb08n]OHQb08o]OHQb07P^OIPb06R^OJna05S^OJma05<Mge4\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [84.7850570678711, 516.0736083984375, 37.18622589111328, 93.734375], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [768, 782], \"counts\": \"gZP2:Zg0<H9K4K5K5L4K5J6L4J6J6J6N2N2N2N200O100O10O1O1M4F9C=K5K5L4M3N2M3L5F;I8HhW_?\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [44.842830657958984, 340.4462585449219, 57.62393569946289, 158.26165771484375], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [768, 782], \"counts\": \"cTR1m0mf0;D;I4K5L5J6M2M2N3L6J4M3L3L6I7J7K7H;G3N6J7I4L4L1O00001O0000000001O1O2M4M<D2N3L3M2L4N2N3M2oMP\\\\ONRd0OT\\\\OMoc0NX\\\\ONic00[\\\\OLic0O\\\\\\\\OYOVOLcd0f0U]OROPc0k0V2L3K8FlSo?\"}}, {\"image_id\": 121, \"category_id\": 2, \"bbox\": [263.0644836425781, 427.4900817871094, 105.56539916992188, 16.1204833984375], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [768, 782], \"counts\": \"e]V64kg01000000O1000000O100O100O1O1O1O100O101OO100000000000000000000000000001O0000000000000000000000000000O101GW`73n_H5K1O001O000000000000O1000000000000000000000000O10000000000000001O0H^XO2cg0L^XO4hg0O001O1N[jf9\"}}, {\"image_id\": 121, \"category_id\": 2, \"bbox\": [108.26409149169922, 580.5167236328125, 38.87604522705078, 26.91973876953125], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [768, 782], \"counts\": \"eZb29eg02N2O01O10O0010O01O010O01O010O000010O010O0100O001O100O1O100O1O100O2N2NjUm>\"}}, {\"image_id\": 121, \"category_id\": 2, \"bbox\": [86.75146484375, 476.955322265625, 46.50251770019531, 20.121063232421875], \"score\": 0.9999940395355225, \"association_id\": 1, \"segmentation\": {\"size\": [768, 782], \"counts\": \"[gQ23lg02N1O010O01O01M2O2O10O002OO1000O100000000000O010000000O100000001O00001O0000O101N1O2N10QiW?\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [402.0332946777344, 291.2071228027344, 186.27676391601562, 379.0262145996094], \"score\": 0.9999778270721436, \"association_id\": 4, \"segmentation\": {\"size\": [768, 782], \"counts\": \"oZc9`0^g04M2N1N3N1N3M2N2N3N1O1N2N2M4M2N2N2N2M3K5M2O2N2M3K5F9L5M3M2oLQMQAP3o>aM_@_2c?mMn_OS2T`0oMj_OP2W`0SNe_On1[`0TNc_Ol1]`0VNa_Oj1^`0XNa_Oh1_`0ZN__Oe1b`0]NZ_Oc1h`0_NU_O`1m`0aNQ_O^1Ra0cNl^O]1Sa0fNk^OZ1Sa0iNl^OV1Ta0mNj^OS1Ua0oNk^On0Va0UOh^Oj0Xa0YOf^Of0Za0\\\\Oe^Oc0Ya0Ae^Oa0Q>ZMWDZ2fM=S>\\\\MQD\\\\2iM9V>^MkC]2mM6X>`MaCb2UNO[>aMPCo2bNA^>cMjBR3eN[Ob>fMcBW3dNSOk>hM[BY6g=iISBY6n=jImAX6T>jIgAX6Z>iIcAX6^>jI^AW6c>nIVAS6k>UJk@l5V?]12M3N1O2N2N2L4M3N2N2N2O1O1N2N2N2O1O2O01O000000001O2N3M2N2N101N2O2M4M2N1O1002O3N5J4NM00O2O001O10O3L3M2O5J9H2N2N2ObHPAh5o>PJZAn5f>iIfAU6Z>dIoAY6R>cIVBY6j=cI]BY6d=dIaBV2^N\\\\1R?YLfB9QN\\\\1>o1l>ZLjCo0`Md2ab0\\\\Lo]Ob3\\\\c0K9G5K3N1N1O1O1O1O1O1O001O001O0O2O00O01O00000N2O2N2M3L4ROn0M3N2N2O2N1O1O1M4M3M8F9IZVf4\"}}, {\"image_id\": 121, \"category_id\": 2, \"bbox\": [394.2860107421875, 528.2649536132812, 205.352294921875, 130.66168212890625], \"score\": 0.999958872795105, \"association_id\": 4, \"segmentation\": {\"size\": [768, 782], \"counts\": \"VcY99eg03O1O00001O0001O00101N1O1O1O10O01O1N3LZgk09^XTO2N2N1O1O1N1L5M3N101O001N1O2N2N2O0O2O1O1O2M2O1N2O100O2O1N2O1O1O001O002N1O1O2N001O00100O1O1O0010O100O1oNSZONQf0LRZO3ne0KUZO4ke0KVZO4ke0KVZO5je0JVZO6ne0FSZO:Sf0_OmYOb0Wf0YOjYOh0Vf0WOiYOj0JSOVf06lYOi0IXOXf0OoYOR1_f000100O2O0O1O01O2O0O3WO[YO1if0I_YO2df0I_YO5df0G^YO9Tg001O1O1N101N101N1Ob\\\\13ZcN4L4M2N2O0O2O2N1O1O000000O2O0O2N2M4L6IUWU5\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [569.7214965820312, 318.8000793457031, 143.80615234375, 298.8018493652344], \"score\": 0.9997256398200989, \"association_id\": 0, \"segmentation\": {\"size\": [768, 782], \"counts\": \"TQ\\\\=R1Qf0S1C9H5M3ZOfMd[O]2Wd0h0N20OVLeLj@9;o2m>QM_@<8^2_?ZMT@>7W2g?^MP@>5T2k?aMk_Oa05n1Q`0eMc_Oc09e1W`0lMT_Ol09l0Va0^34M4L3K5N2O2N010O1N2O2N111N100000h_O`JQ>_5nAcJQ>^5nAbJR>_5mAaJS>_5nA`JR>a5mA_JT>a5kA_JV>`5iAaJY>^5fAcJZ>\\\\5eAfJ[>Y5eAhJ[>X5dAhJ\\\\>[5bAbJa>a5m15L2M20O1O1fMl]OfNTb0S1U^OlNia0o0^^OPOaa0j0m^OoNSa0n0T_OnNl`0n0W_OSOj`0j0W_OXOh`0e0Z_O\\\\Of`0a0[_O@g`0<X_OHQa0Lo^O6Va0Cj^O`0Wa0\\\\Oi^Og0Xa0VOg^Ol0Ya0TOg^Ol0Za0SOe^On0[a0QOe^Oo0]a0POc^OP1]a0oNc^OR1^a0kNd^OU1]a0hNe^OX1`a0^Ne^Ob1aa0nMi^OS2\\\\a0`Mk^Oa2Yc02N2N3L4N110O1O<C2N2L4M1O2N1N3OOaL]LaBd3X=iLbBW3X=QMeBP3X=TMgBl2W=WMgBh2Y=ZMeBf2Z=^MdBa2[=bMdB]2[=fMcB[2[=fMfBX2\\\\=fMfBX2[=dMjBY2X=cMnBV2X=gMkBT2Y=lMhBR2Z=mMiBm1Z=TNiBc1]=\\\\NmBU1X=kNRCg0R=WOUCc0l<\\\\OmCJU<5UD[OR<d0e501N200O1O100O01000O0100O1O3L5K7I5Y\\\\OYOn?l0n_OVOn?P1n_OROo?\\\\1e_OdNX`0`1f_ObNV`0a1j_O`NS`0a1o_O^No?c1T@[Nk?d1X@[Ng?e1\\\\@YNb?g1a@XN_?e1g@UN`?X1cac1\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [629.0203247070312, 314.56475830078125, 104.99481201171875, 264.94903564453125], \"score\": 0.9987389445304871, \"association_id\": 0, \"segmentation\": {\"size\": [768, 782], \"counts\": \"SPi>g0Vg05L4L4M3N11N3N1NO3L3N3ChNmYO[1me0a0L5K4K5K5K6J4O0O010O0O100O10000cL]M]Ac2`>bM^A^2a>dM^A\\\\2`>gM`AX2]>lMbAT2\\\\>oMcAQ2[>RNeAm1Y>VNgAi1W>ZNjAd1T>^NmAa1Q>dNlA\\\\1S>gNjAZ1U>iNiAW1V>nNfAR1X>VOaAk0[>LQA5m><d@D\\\\?e0\\\\@ZOc?m0Z@POf?R1]@iNc?Y1`@bN`?a1c@ULROf1]`0Z2SAYMn>k2[A_Lo>b3g21N2O1N2M3M3N3M2O2N3M1O2M2O3L3M3K5L5L4L4M3M6I5KeM^^ObNZa0T1Z_OhN_`0U1h_OlNS`0Q1R@POl?>h@BV?1[ALc>3aAK_>2fALY>1mANR>0UBKk=3\\\\BHd=5eBE\\\\=]OiC;W<CnC:S<DQD9P<EVD7j;HZD4g;J]D3c;M`D0a;NcD0];NhDNY;OkD0V;KoD3U;GQE4S;GRE5_a0MRWT1\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [260.2741394042969, 312.90185546875, 36.840484619140625, 116.50347900390625], \"score\": 0.9568195343017578, \"association_id\": 3, \"segmentation\": {\"size\": [768, 782], \"counts\": \"Y]S62gg09M1O1UOJfYO<ke0<iYOGYe0FgZOk1Ve0a0N2O1N2O100O100O1N2O100O1001UOY[O[Nhd0^1g[OXNZd0h1P1O0O2N102K4Aa0Hk0XOiT_;\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [390.4406433105469, 364.44677734375, 45.8465576171875, 126.66897583007812], \"score\": 0.612104058265686, \"association_id\": 0, \"segmentation\": {\"size\": [768, 782], \"counts\": \"SWW95ig04PN?Z[ODcd0c0X[O^Ofd0f0W[O\\\\Ofd0i0T[OZOhc03o[Ok0MYOQd03m[Og1Rd0T1O1O1O1000000001O0UOm[OiMTd0S2P\\\\OkMSd0Q2Q\\\\OmMRd0R2h[OSN\\\\d0f26J5K3M1O1O4L2N1O3M2N2M2O0\\\\OiZO]NZe0Y1R[OaNPe0]1i0gNbYOQ1af0oN`YOo0af07N35cNaYOo0mf0KfRS8\"}}, {\"image_id\": 121, \"category_id\": 2, \"bbox\": [468.37677001953125, 534.8682861328125, 186.36614990234375, 72.03173828125], \"score\": 0.3469315767288208, \"association_id\": 0, \"segmentation\": {\"size\": [768, 782], \"counts\": \"cYP;S1lf02N2O1O0O101O00000O2O001O0O101O00000000001O00000001AbYOZO^f0f0cYOXO^f0g0eYOUO]f0k0cYOUO]f0k0dYOSO`f0i0bYOUO`f0i0>M3M2O1O1O3M4L1O1O1JXXO6gg0Lgfe03YYZO5K3N2N2N1N3N1O001OO101O0O101O0O2L3O3IZYd0IWf[O7^YOK_f08_YOJ`f07_YOIaf07_YOIaf08]YOIcf08\\\\YOIcf07]YOIbf09\\\\YOHdf09[YOGff08[YOHdf07^YOHbf08_YOGaf09`YOG_f09bYOF^f0:bYOF^f0:cYOE]f0;cYOE^f09dYOF\\\\f0:eYOE\\\\f0:eYOE[f0;eYOE[f0:fYOF[f09eYOG[f09eYOHZf08fYOIZf06eYOMZf01fYO1Zf0NeYO4[f0KeYO5]f0IdYO7Sg000O01O001O1O001O1O001O01O01O100O1O10O010O00100O010O001O1O010O1O3L2M]]o2\"}}, {\"image_id\": 121, \"category_id\": 2, \"bbox\": [475.943115234375, 521.9661254882812, 129.00225830078125, 196.666259765625], \"score\": 0.3219183087348938, \"association_id\": 0, \"segmentation\": {\"size\": [768, 782], \"counts\": \"gaU;P1mf08I4N1N2N2O1O1O1O001O010O100O2O0O001O001O001O2NSOnYOMQf0OVZOOje0MZZO3fe0K\\\\ZO4fe0I\\\\ZO6ee0H]ZO8de0F\\\\ZO;de0D]ZO<de0A]ZOb0ae0]O`ZOd0be0XO^ZOj0[f01O3M3L3M3M2N4M2N1002N0001O01O;F?BO0O001O1O1O1O002M5L1O00002N1O10OXLPOa@o0Y?WOo@d0k>_OWA`0e>CZA>d>C]A<c>C_A>^>CdA<[>CfA=Y>DgA;Y>DjA:W>EkA9U>GmA7S>InA7Q>HQB7P>GRB7P>GSB7n=GUB7m=DWB;ib0N2N6I5K^ce06[\\\\ZO3M3O0001N101N_fU4\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [295.08306884765625, 321.9877014160156, 36.459075927734375, 115.3787841796875], \"score\": 0.07522043585777283, \"association_id\": 0, \"segmentation\": {\"size\": [768, 782], \"counts\": \"P[T7339Ug0>K4O0N8I5K4L=aYOZNle0m102N000100OjZOnMad0Q2Z[OTNfd0o1Q[OWNod0n0mZOUON9Ue0b0oZORO53md0g0T[OoN58id0e0l[OZOVd0c0k[O]OWd0>P\\\\O^OVd08o[OFXf0JeRd:\"}}, {\"image_id\": 121, \"category_id\": 1, \"bbox\": [479.8101806640625, 287.86187744140625, 218.14306640625, 351.21282958984375], \"score\": 0.060181859880685806, \"association_id\": 0, \"segmentation\": {\"size\": [768, 782], \"counts\": \"]iR<1bg0a0J9H6I6K4L4L3N1N2N3M3L4M9Gb0_O4M2N2O2ON1O2N1O1N1O00100O001O001O2N2N2N3M2N3M1O1O1O1O001N2O1O1O0O1O1N2H8F:G9D;M2O1O1M4NkNO_ZO2[e07bZOJZe0<cZOE[e0=eZOB_d0Dh[Oh0M@[d0Kh[Oa02@Vd03k[O63BSd0<R\\\\OERe0a0mLCi@f0n>@m@c0P?Bk@>U?Eg@<Y?Fd@;\\\\?Gb@:]?H`@9`?H_@9`?I_@6`?L_@5a?K_@3b?O]@Oe?1\\\\@Lf?4Z@Jg?7Z@Ei?<V@Bl?>U@@l?`0U@]Ol?d0S@^Ol?b0Q@Bn?=P@Go?9n_OKQ`05h_O3W`0Mb_O;]`0E`_O?_`0@a_Ob0^`0^Ob_Ob0_`0]Oa_Od0_`0[Ob_Oe0^`0ZOb_Og0_`0VOb_Oj0``0TO`_Om0a`0QO`_On0b`0PO^_OQ1b`0mN`_OS1a`0kN`_OU1``0jNb_OV1]`0hNf_OX1Z`0eNh_O]1W`0aNj_Oa1W`0ZNj_Oh1Wc01N3L4M3M2N2N2N20N4L3N2M3N1N2N2OO010O1O2O1N4L2N1N001O00O100001O1P\\\\OgMYb0Y2d]OkM[b0V2b]OmM^b0R2`]OQN_b0Q2\\\\]OSNcb0o1X]OUNgb0m1U]OUNkb0m1Q]OVNob0DW\\\\Ok1e0bNec0`1U\\\\OaNmc0a1o[O_NTd0a1h[O`NZd0`1c[OaN^d0_1_[ObNed0[25K7iNP[OoN_e0`0lZOVO]e0`0\\\\1DYg_2\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [617.847900390625, 558.0972290039062, 165.5115966796875, 433.96649169921875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"QRff02YU1;H4L4L4K5K4M4L2N3M3M2M4M3N1O2N2N1bnNCQOjNWm0e1SSO<SObNbm0T1RSOg0mNdNjm0i0PSO[2nl0iMgRO`2Um0P3L4L4M2N2M2N3L3L4M3M3O0O2O010O100O1OlN^SO\\\\Ibl0c6dSOYI\\\\l0f6iSOUIXl0m6jSOkHZl0Y7`SOgHcl0P82O101N14M4L2M3N0O2O0O2N1N2O2nUO\\\\GTg0f8jXOaGPg0a8mXOcGQg0_8kXOeGRg0]8iXOhGVg0Z8eXOkGXg0W8dXOmG[g0T8bXOoG]g0S8`XOoG^g0U8]XOnGbg0X8UXOlGjg0o:M2O1O01N2N2M4L3N2M4K5J5]Od0]Ob0M3000O100O2O1N5L5aUOjEXi0X;0000O1N1N3N1O1O1O2O01O0ZKZXObKfg0W4mXO]KSg0^4WYO]Kif0^4_YO_Kbf0[4hYO`KXf0[4YZOYKhe0c4iZOoJWe0n4R[OkJPe0R5U[OkJld0S5W[OkJjd0S5Z[OiJhd0T5^[OhJcd0V5k[O\\\\JWd0a5S\\\\OVJoc0h5W\\\\ORJkc0h5_\\\\OQJdc0k5S6L3M4L4L4O1O1O100O2N2L4L5K5K6K5K3M3\\\\OjnNiLZQ1T3onNbLUQ1[3c0L3M2H8I6K5M4WOVmNfNnR1Y1ZmN^NiR1^1f0L5K4K6J6L4N2N2N1O1O2LcUg<\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [336.0552062988281, 641.8236083984375, 101.44436645507812, 150.74224853515625], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"nY`<3YU1;F<Df0\\\\O4K5H=D7I:F6K3N3K4K5M3O11O0O2OO0000O1O2O1O1N20O100O10000O2O000O2N1000000O10O10000010O001O010O00000001O0O010O1O2M2O2M2N1O2O00101O000001O1N4N2M3O11M0O010O2OO0LjLPnNT3PR1nLnmNS3XR1N2]OjmNgMYR1R2g0mMjlNg1[S1TNklNg1iS1A:I7J6K7I8Hl\\\\[i0\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [697.5151977539062, 982.4775390625, 132.47113037109375, 34.460205078125], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"gSdi05[U111N2N01O10O2N000000koX15oofN2O1N2O3M3VkNB\\\\T1`0akNB^T1a0^kN@bT1g001O00001O0001O000001O0000000001O000000000000000000000000001O0000000000000O101O00000000001O0O1000000000000000001O000000000O100DZkNNfT11\\\\kNNeT11\\\\kNNdT11]kNNdT11^kNNcT11^kNMcT13=Nc^T;\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [806.7945556640625, 606.8187866210938, 127.29864501953125, 287.49554443359375], \"score\": 0.9999990463256836, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"ggkm0h0_T1c0B?C=nlNPNLO[Q1W2dnNmM1I[Q1`2^nNlMnQ1R3H4L3N1N3N1O1O1N2M3L3M6Ic0^O<D=C8I8H8J6YQOaIRn0S7K3N3M1N2O2N1O3eROiHWl0Z8N1O1O0000000000000000000000000000000000000000000001O001O1O1O2N2N2N1O2N2N3M1O2N3L4M2N1N2N1dMeROgJ]1NPl0S5c2G:I7L3L5L3N1O2M4L3N23N2M1O0[oNoKio0R4SPORLmo0l3SPOVLlo0j3SPOXLlo0h3TPOYLlo0e3TPO]Llo0`3WPO^Llo0Q3dPOnL^o0k2iPORM[o0e2lPOZMVo0`2oPO^MTo0]2QQO_MTo0\\\\2oPObMUo0X2V2L3L7J8H:QOgQ\\\\7\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [133.4597625732422, 918.7905883789062, 80.06996154785156, 19.64825439453125], \"score\": 0.9999977350234985, \"association_id\": 3, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"PZW542MUU19L4N1O2N1000000000O1000000O100O100O1O1000000O1000001O0000000O1000000000000000000000001O000000000O100000000000001O0O10000O101O1NTbgQ1\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [57.11307907104492, 613.487548828125, 137.0243377685547, 343.7410888671875], \"score\": 0.999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"Vib2:lR1f0fmN7kQ1T2E9H4L4K4M4L3N2N1N2N1N3O1O2N2N2O2M1O000O1N3O1O2N3M2O2M2N3L3M4I7K5J5L4L4N2L6I6L6J7J4M4L6Jc0]O5K3M3M3M3M3M3M5^TOXGRj0m8fUOWGYj0R9ZUORGfj0l9010O1O001O2L5J7I7J6K3J6B?I8G7J5N2M2L5J6K4K5I8H7L5L4L4L5K5L3N2N2N1N3M3N3O1O2N0O2O01O001O01O2O1O2N1N2N3L5K`0A:E4L5K9G9G7I:QMhmNR2WS1D7I5J5L3K<AalVR1\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [744.17138671875, 885.8558959960938, 250.711181640625, 24.03704833984375], \"score\": 0.9999798536300659, \"association_id\": 5, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"^mYk03ZU13O1O2O000001O001O0000000000000000000000000000001O0000000000000000O10000000000O1000000000000O10000000000000000000000000000001O0001O00000O100000001O000001O000000000000000000001O000000000O2O1O0NPPR1NSPnN2N10000O1N2M3N200O10000O0100000O100000O10O100000000000000000000000000000000000000O10000000000O1000O10O100O10000000000O100000000000000000O100000000000000000000000000000000O1000000000000O10000O1O1O100O1O100O2O0O100O2O00000O2O1MVlW5\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [903.0752563476562, 814.85107421875, 188.49847412109375, 48.70709228515625], \"score\": 0.999957799911499, \"association_id\": 4, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"TXbQ11_U11O1N10000O2N101N100O2O0O2N3N1N101O001N2O1O001O001O001O1O001O00001O00001O000000001O0001O0000000001O0000000000O10001O01O01O001O00001O1O1O1O100O001O001O001O001O002N2O0O00001O1O2M2O2N0Lfgb09TX]O3M2O0O2N2O0O2O0O100O100O100O100O10000000O10000000000000000000001O1O003M0\\\\OWkNa0lT1_OUkN=SU1L4K`UZ2\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [357.3853454589844, 773.7626342773438, 81.907958984375, 15.83770751953125], \"score\": 0.999911904335022, \"association_id\": 1, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"n^U=2^U10O2O00000O100O10001O001N10001O000O10000000001O00000001O0000000000000O10O100000000000000O100000O100000001O00001O001O00001O000O10O1O100O1O1000O2O0O2O2Me`\\\\i0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [509.9471435546875, 608.7777709960938, 40.0943603515625, 132.94061279296875], \"score\": 0.9998286962509155, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"PYfb0b0]11^Q18SnN5bQ17PnN6kQ1j1M2N1N4N0O1O2OO02M200O100O101N01O001YOg0N101M3N1N3M2N3N3K4L6I7J5G9G8Ni0ROdWXe0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [889.3232421875, 662.6831665039062, 200.2728271484375, 202.07806396484375], \"score\": 0.9993621706962585, \"association_id\": 4, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"]lfQ1b0lT17I2N3M2O1O1O2N100001O1O1O2N1O0ON3M3N110O010N2N2N2J6M4M2O1O1O1N2N1O2N2O1O1^OTN_mNn1WR1_NcmNc1\\\\R1j0N101N1O2O001O1O1O1O1O1N3N1O2M3N2M2O2N1O001O1O1O001O1O1O1O1N2O2N1O1N2N2N2EbK`oNa4]P1<M2O1N2N3N1N3M3N2M2O1O0O2O001O000010O001O10O01O001N101O0O2N1O2N2M4C=J7J4L4L4N2M3M3K5J7L2N3N2O0O2N3M2O1N2O1O1O1O1O2N1N101O1O1O2N2MkmNgMVQ1X2inNPNQQ1o1mnNTNRQ1k1onNVNQQ1i1nnNXNSQ1f1nnN[NSQ1c1lnN_NTQ1_1mnNbNSQ1]1lnNfNRQ1Y1onNhNQQ1W1onNiNQQ1V1QoNjNoP1U1QoNkNPQ1S1RoNmNnP1R1RoNoNnP1P1SoNPOmP1o0SoNSOlP1k0VoNTOkP1k0UoNUOlP1i0VoNWOkP1g0VoNXOkP1f0XoNXOjP1f0WoNYOjP1e0YoNYOgP1f0\\\\oNWOfP1g0^oNQOiP1j0_2PORo\\\\1\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [429.24310302734375, 746.3671264648438, 32.865631103515625, 9.30413818359375], \"score\": 0.9817236661911011, \"association_id\": 6, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"]dh?3]U11N2OO10O11O000000000001O0000000000000000000000O101O0O1Od]`h0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [432.26361083984375, 624.246826171875, 50.969696044921875, 112.85858154296875], \"score\": 0.9806120991706848, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"kSl?319mT171N5L2KSj50SUJ6fkN9UT1c0N2N2O1OOO8I3M1\\\\lN[N\\\\S1Q2J5L3M4J9H3M2N200O10000000O1O100O2N1N4K5J:G4L4N1O1O4M3M1N7ZOVlNRO0Na`eg0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [750.0094604492188, 600.5925903320312, 76.10394287109375, 301.21429443359375], \"score\": 0.9767674803733826, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"hk`k0=iT1c0D<Fb0`lN_NPR1b1hmNeNXR1[2N2M2N2O000O1O3L3H8I5nJZL[XOg3\\\\g0dL`XO^3`g0cL^XO^3ag0dL]XO^3ag0eL[XO\\\\3fg0hLRXO\\\\3ng0hLlWOY3Uh0hLiWOY3Vh0cLkSOFa3g3eh0bLgSO0\\\\3`3nh0]MlVOd2Ui0fM_VO[2ai0gMZVO[2gi0eMgSOWNR2X4Wj0PNcUOR2_j0nMaUOQ2`j0oM_UOP2bj0RN[UOo1ej0RNWUOP2ij0RNPUOT2Qk0oMkTOQ2Uk0SNnTOe1Sk0\\\\N^TOWMZO\\\\4Xl0]4O0101N2M3L3N3M3J6[Oe0I7K5J7lLoQObNUn0Z1nQObNXn0[1iQO_N^n0[1hQO_N^n0\\\\1gQO^N`n0_1cQO\\\\N_n0c1aQO^N^n0c1aQO]Nan0b1aQO\\\\Nbn0b1bQOWNdn0f1^QOUMoNb0go0h1iQOeMcN?lo0P1TROCTn0NQRO4iQ1oNTlN3UT1EoaZ;\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [421.069580078125, 626.21533203125, 38.42303466796875, 115.896240234375], \"score\": 0.8321348428726196, \"association_id\": 6, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"daa?c0gT1f0[O=E5IXNblNk1kR1e0L4N3M1010O2OO01N3nNgmNhN[R1o0Y1N00002N01O2O0O2O01YOnkNLTT14mkNITU1DmjNO^kih0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [760.1818237304688, 622.9113159179688, 249.1295166015625, 259.030517578125], \"score\": 0.6934115290641785, \"association_id\": 5, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"^[bm0i0aT18L4L4L<D5K6K4L4L4L3N3L4M1N3M2O3L5K3N2MO1M3M201O01N10O001202M5K8I3L3N1O2N3LM4^O]nNTMfQ1i2bnNoLaQ1n2b0D<L4M3N2F:000000O1000000O1M31XROUNig0k1SXOYNmg0g1RXO[Nmg0e1SXO[Nmg0e1RXO]Nmg0c1RXO^Nng0b1QXO_Nog0b1PXO^NPh0b1oWO`NPh0`1PXO`NPh0`1oWOaNRh0^1mWOcNSh0]1kWOeNUh0\\\\1hWOfNXh0Z1gWOgNYh0Z1dWOgN]h0Z1aWOgN_h0Z1`WOfNah0W1aWOiN`h0U1aWOkN`h0S1aWOmNbh0o0_WOQOeh0j0\\\\WOVOhh0f0XWOmNkJMPn0S1WWOoNjJMQn0R1UWOPOkJNQn0Q1XWOgNoJ6jm0S1gWOhN[h0Y1TROgN\\\\S1Z1_lNmN^S1e1N1N2O1N10002N001O100O2N4SoNhMYn0]2`QOgM^n0\\\\2foN_M72?5do0U2^POfML6fo0P2cPOmMmN0:4WP1m1dPO]NPOG]P1j1dPObNkNEcP1f1cPOWO^o0g0dPOXO]o0f0dPOZO\\\\o0f0dPOZO]o0d0dPO\\\\O]o0c0cPO\\\\O_o0b0aPO_O`o0?aPO@bo0<`POCfo05]POKho0K]PO4fo0I[PO7fo0G[PO8ho0EYPOOYN\\\\O`Q1c0ZPONTP1NPPOObP1^OaoN`0iR1N2N2N1O2N2HfbR7\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [356.36956787109375, 715.759765625, 80.19525146484375, 72.12188720703125], \"score\": 0.24922886490821838, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"m_\\\\=3]U1001O001N1O1O20O00O1000000O2O000000000001OO100001O00000000O1000000000000O10000001OO100001O0000001O0000001N2O1O00O100O1O10O0100000O2N1O4LTf]i0\"}}, {\"image_id\": 122, \"category_id\": 2, \"bbox\": [440.16314697265625, 725.02587890625, 50.219268798828125, 7.61322021484375], \"score\": 0.24602144956588745, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"geV`03]U10000000000O11O00O1000O100000000000000000001O0O1000O1000000000000000000O1000000O2OjTcg0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [712.6212768554688, 567.9708862304688, 96.59332275390625, 333.104248046875], \"score\": 0.14667364954948425, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"oe^j08UU16K4WkNEXT1k00O00001O000004Lf0ZO:F;F1O1O2L9Hb0bmNmL[Q1]4XO1OO2MgoN]Kjo0_4VPOcKko0\\\\4SPOgKlo0Y4QPOlKmo0Z2noNlN4\\\\O_o0b1ePOQOFBio0Z1\\\\PO?io0]ORPOg0RP1UOloNm0YP1cN]POS1go0`NlPOW1To0TNbQOV2Qn0gMQROY2Qn0fMmQOR2Pm0`M[ROj06b1ml0\\\\NmRO25b1ml0\\\\NoRO\\\\On0[2kk0XNZSOZOS1_2[k0WN\\\\RO]Oe3V2li0nNUSOkNo1S2ZNPNoj0S1fTOoNR2k1aMSO]k04oTOQOT2h1WM[Ock0OnTOROX2f1PM]Ohk01hTOPOa2V3fh0PO[WOP1ch0QO^WOn0ah0SO_WOn0`h0SO_WOn0_h0TOaWOl0]h0UOcWOk0]h0TOcWOm0]h0SObWOn0^h0ROaWOo0`h0PO^WOR1bh0POXWOU1gh0lNUWOX1kh0gNTWOZ1nh0dNRWO]1oh0`NRWOa1oh0]NQWOc1Qi0ZL`TOV1Z2c2Wi0PLkTOX1i1k2cj0RMXUOQ3jj0nLSUOU3lj0mLnTOW3Rk0lLgTOY3Yk0iLbTOUOdNo2ml0nMZTOROoNj2kl0TNRTORO[Ob2fl0^NjSOPOE]2dl0eNfRO_N8d0<W2hl0]OdROaN>m1Xm0n0bROfNom0Z1oQO[N]n0e1cQOTNcn0l1^QOoMgn0Q2YQOjMmn0U2RQOhMRo0d1fPOUM6V1Xo0l0cPOnM6MLU1bo0e1aPOiNho01UPOS1PP1hNSPOU1QP1SNmoNYO4a2aah;\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [452.53167724609375, 619.421875, 40.1077880859375, 112.73516845703125], \"score\": 0.1456715613603592, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"a_c`0U1YT12N9I4VlNiNRS1X1olN`NJ4RS1`1QmNPO_R1Y1_mNiN\\\\R1X2M2O101N100000O100N3N1O2M4L2L7L4L2N2O1O002OO01N7J2M2N9G5K7WOkkNJZT1NokNLST1JgkNNdoZg0\"}}, {\"image_id\": 122, \"category_id\": 1, \"bbox\": [472.4115295410156, 618.5206298828125, 25.869873046875, 106.73333740234375], \"score\": 0.08654049038887024, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1129], \"counts\": \"eX]a0Z1ST1;G8B:K300O01OO2O000O11OO12M1O2N5J5Gk0QOcZVg0\"}}, {\"image_id\": 123, \"category_id\": 1, \"bbox\": [530.8508911132812, 194.93408203125, 89.739990234375, 349.9918212890625], \"score\": 0.9999990463256836, \"association_id\": 1, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"^ogc02WU1:G:J5J7J4K5J8I7I5J7H8I8dQO]M_i0i2SVOeMhi0`2QVOfMki0_2mUOhMoi0\\\\2bUORNZj0U2WUOXNej0n1TUOVNij0o1RUOTNlj0o1nTOVNoj0n1mTOUNPk0o1mTOSNQk0Q2lTOPNQk0U2kTOmMSk0W2hTOlMSk0\\\\2fTOhMQk0d2iTO_Moj0k2lTOXMlj0R3nTORMlj0W3kTOoLRk0i6001O1O001O0_NRUO`Hnj0[7ZUObHgj0Y7_UOeHaj0Y7cUOeH^j0X7fUOfHZj0[7eUOeH[j0[7eUOeH[j0[7fUOdHZj0]7dUOdH\\\\j0\\\\7dUOdH\\\\j0\\\\7dUOdH\\\\j0^7aUOcH_j0`7\\\\UObHdj0R3mTO5;jLhj0l2\\\\UOjMVOe0a0gNlj0e2cWOZN]MSOPk0_2PXOZNhLZOXk0Z2VXOBjg0<YXOCgg0;]XOCbg0;cXOC]g0;iXOAWg0>lXO@Tg0>PYOAof0?RYO@nf0`0SYO_Olf0a0VYO^Ojf0b0XYO\\\\Oif0b0[YO[Off0b0_YO[Oaf0d0bYOZO^f0b0hYO[OYf0b0lYO\\\\OUf0`0RZO\\\\Ooe01bTO]Ng5^1he0IU[O3ld0F^[O6ed0Cb[O:ed0YOd[Od0gl0M4K4M3L4KemkS1\"}}, {\"image_id\": 123, \"category_id\": 1, \"bbox\": [759.427001953125, 0.0, 304.9796142578125, 1018.5346069335938], \"score\": 0.9999985694885254, \"association_id\": 2, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"ciZl02\\\\U16J6I7I6J7G8H9F9F:J5L4K5M2N101O0O2O001O001N101O001N101N2O0O2VGSMo^Om2W`0WN__Oj1W`0gN`_OZ1\\\\`0PO^_OQ1^`0XO\\\\_Oh0a`0@Z_O`0a`0I[_O7``04Z_OM_`0>\\\\_OB\\\\`0l0^_OTO]`0Y1[_OgNa`0d1X_O\\\\Ne`0l1V_OTNg`0S2U_OmMi`0X2T_OiMi`0\\\\2T_OdMj`0a2S_O_Mj`0g2S_OYMj`0o2R_OPMi`0Z3R_OfLi`0f3P_OZLf6[OgKe4_MoKe61YKW4nMhKe6b0mJl3[NbKe6j0iJh3aN^Kc6o0gJg3dNZKb6U1eJe3gNVKb6Z1bJd3kNQK_6c1`Ja3nNlJ_6k1[J_3SOfJ]6U2WJ]3XO^J]6`2oI\\\\3_OTJ^6j2gI]3FiI`6Q3bI]3KbIa6V3^I^3O[Ia6\\\\3ZI^33VIa6`3XI^35RIb6c3UI_37nHc6f3RI_3<iH`6m3nH^3b0dH^6S4jH^3h0]H\\\\6[4cH_3Q1THZ6d4UHe3a1eGW6Wb0jIf]OS6`b0mI]]OP6ib0oIU]Om5Sc0QJk\\\\Ol5\\\\c0RJb\\\\Ol5bc0RJ^\\\\Ol5fc0QJ[\\\\On5hc0oIY\\\\Oo5kc0nIV\\\\OQ6mc0lIT\\\\OS6oc0iIS\\\\OV6Pd0gIQ\\\\OW6Sd0eIo[OZ6Td0bIn[O]6Ud0_Im[O`6Vd0]Ik[Ob6Yd0YIi[Oe6\\\\d0WIe[Og6bd0RI`[Ok6ld0iHW[OT7Ue0^HP[O_7\\\\e0PHlZOk7_i0M3M3N2N2N2N2N2M3N2MVEbHhG[7Y8iHeGS7\\\\8SIaGi6a8ZI^Gc6c8aI[G]6f8fIXGY6j8eIWGZ6m8aIUG^6n8^ITGa6P9ZIRGe6R9TIRGk6jc000O1O100O1O1N2O1N2N2N2N2M3M3M3N2N2N2O1N2O1O1O1O1O1O100O1000000000000O1000000000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000jB`J\\\\J`5\\\\5nJ^JR5X5^KbJb4Q5SLgJm3g4PMnJP3]4oMYKQ2_4^NZKb1`4jNZKV1b4SOYKm0c4[OYKe0d4AYK?d4HXK8d40WK1d4:VKF`4k0YKUOZ4b1\\\\K^NV4^2WF[IZOW4V:o3\\\\DTIP1m2]:c9WE]Fc:n9XERFd:U:YEkEc:\\\\:ZEdEb:a:]E_E]:h:bEXEU:R;jEnDl9];SFcD_9k;aFUDQ9Z<nFfCf8g<YGYC\\\\8S=cGmBX8Y=gGgBT8^=lGbBP8c=oG]Bm7h=RHXBj7m=UHSBe7S>[HmA]7\\\\>aHeAQ7j>nHVAb6[?]Ie@m5R`0RJn_O[5f`0dJZ_OV5m`0iJS_OR5Sa0lJn^Oo4Xa0PKh^Ol4^a0RKb^Oh4ga0UKZ^Od4oa0XKR^O`4Yb0]Kg]O]4cb0_K]]O\\\\4kb0`KV]O[4Rc0bKn\\\\OZ4Xc0dKh\\\\OY4]c0dKd\\\\OY4ac0eK_\\\\OX4ec0gK\\\\\\\\OU4ic0gKY\\\\OU4mc0hKT\\\\OV4Pd0fKR\\\\OW4Td0dKo[OY4Vd0bKl[O\\\\4Yd0_Ki[O`4dj0N3N1O1N3N2N2M3M3M4K4L4K6I7K5K7J7I6J7I7I7H9H=C>A=Cmlic0\"}}, {\"image_id\": 123, \"category_id\": 2, \"bbox\": [925.2705688476562, 816.8483276367188, 301.03021240234375, 238.13543701171875], \"score\": 0.9999985694885254, \"association_id\": 2, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"QQYT1d1dS1<J5L2N2O1N2N1N3N1M4M3M2N3O1N1O2N100O1N2O2M2O100O1O100O1O1O1O1O001O1O100O100O010O1O1O1O1O1O1000000O101O00000O2O00000O2O0O2N1O2N101N101N101OO100O1O1O1O1N2O1O1O1O010N2O1N2K4M4O1O1O00100O100O1O1O001O1O100O010000000000O10000000000O100000000000000O100000O10000000O100000O10000000O01000000O10O10O100O100O10000O10000000000O100000000000000O101N1O100N3O0O1O100O101O0O100O1O1O1O2N100O101N101O001O1N101O001O0O100O1O1O1O1O2N100O100O2O0O2N1O2N2N2N2N1O2O0O2O1N2N1O3L3N2O1N2N2O1N2N2N;@V^e=\"}}, {\"image_id\": 123, \"category_id\": 2, \"bbox\": [545.3087158203125, 526.8527221679688, 201.19879150390625, 37.18438720703125], \"score\": 0.9999855756759644, \"association_id\": 1, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"iaQd05ZU13N00001O1O00010O000000000001O00001O001O001O00001O001O10O000001O000000000000N200O100O10000000000O1000000000000000O100000000000001O0000001O00001N100000_nj0OaQUO100N22M10C482N10000O100000O10O1000000000O10000000000000000000000000000O1000O1000000000000O1000000000O10O100000000O1000000000O01000000O10O100000000O10O1000O1000O10O10000O1000O01000000O1000000O2O0O4LZXXo0\"}}, {\"image_id\": 123, \"category_id\": 1, \"bbox\": [1139.74951171875, 336.5537414550781, 58.4517822265625, 84.43594360351562], \"score\": 0.9998723268508911, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"VejY17TU1<F:F4H8L4M3@fNalNa1YS1>N2L4O1O1O1000000000001OO1001O00001O001O00000000001O0000000000O1000000O2O1N2N2I8cNamN]OlR12cjm>\"}}, {\"image_id\": 123, \"category_id\": 2, \"bbox\": [1152.3541259765625, 413.6571350097656, 50.4591064453125, 10.105316162109375], \"score\": 0.061350397765636444, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"_mYZ12]U12O00000000001O00001O0000000010O001O001O001O1N11N100O101N10000000O1000O10000000O2O00000O101NPPb>\"}}, {\"image_id\": 123, \"category_id\": 1, \"bbox\": [26.83175277709961, 34.63849639892578, 371.0574645996094, 358.5837097167969], \"score\": 0.058724116533994675, \"association_id\": 0, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"ieo0S9[l0:G6J4L3N1N3N00001O001O00001O0000001O0000001O0000001O00001O00001O00001O00001O00001O0000001O00001O001O1O001O1O001O1O1O1O001O1O001O001O001O00001O00000000000000000000001O000000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000O10000O100O100O100O1O100O100O1O10000O100O100O100O10000O100O100O10000O100O100O10000O100O10000O1000000O100000000O100000000O1000000000000000000O100000000000000O10000O10000O100O100O1O100000000O1000000O100000000O100O100O100O10000O10000O100O1O100O1O1N2N2N2N2N2M3O1O1O1O1O1O1O1O1O1O1\\\\O`SOWHal0f7fSOUH[l0h7h0M3M3O1N2O1O1O1O1O1O100O10000O10000O1000000O1000000O1000000O100O2O0O100O1O1O1O1O2ZJ`QOW4cn0gK`QOU4cn0iKaQOP4en0mK^QOn3gn0nK^QOj3in0QL]QOe3ln0RL^QOc3^P1K6J5K6J5K5M3L4M2N2M3N3L4L4K6J6J6J6J7J6J6J<D[nU]1\"}}, {\"image_id\": 124, \"category_id\": 1, \"bbox\": [445.37823486328125, 72.73228454589844, 175.938720703125, 367.84967041015625], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [533, 651], \"counts\": \"S`\\\\71]`0?^O`0A<K4L4L3M4L3M3M3N2N2N2O1N3N1N4M3L<E:Fc0\\\\O8H:ZGULP4T4_KVL^4n3ZKXLd4l3TKZLi4k3PKYLn4n3gJYLW5P4WJZLh5n3gI\\\\LX6h3aI\\\\L_6g3ZI]Le6h3jHeLV7]60001O0O100000000O10000O100O00100N2O1N2O1O1O10O01O01OO10000N1O1M4H8J6N2O2O2M3L3M4M3N2O1N3N2N2M2O001O000O1000000000000000cIQKT1o4jNXKQ1h4lN`KABhLn4c3iK]OEgLb4X3dLHPOlL\\\\4S3PMLiNmLX4U3SMJjNnLS4V3VMJkNmLo3X3XMGoNlLk3\\\\3ZMCPOmLg3^3^M@]4>jK[OX4c0mKYOT4e0PLXOQ4h0RLSOP4n0SLlNo3U1ULcNn3FfHIa39k3LkHG\\\\38k3OoHEY38i32QIEV37j33RIGU31k35UIIV3Hh3<VIMb3iNd3X1mH0g;N\\\\D4b;J`D7^;IcD8\\\\;FhD9W;FkD:T;DoD<Q;BQE?n:^OTEc0l:ZOVEf0l:UOWEj0m:lN[ES1V=M2O1N1O010O001O1O1O1O3M3M1O2N100O1O2O2M3N1N100O2O013L3L10O0001O00002MS^`0\"}}, {\"image_id\": 124, \"category_id\": 2, \"bbox\": [389.4351806640625, 380.7092590332031, 130.4066162109375, 68.02294921875], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [533, 651], \"counts\": \"d\\\\[68\\\\`02N2N2M2O2N2O0N3N1O2O0O2O1N1O2O001N100O2O001O1O1N10@g@EX?;j@CV?<m@CQ?>o@CP?=QABo>>QACn>=SADk><UAEj>;WADh>=XADf>>YACf>=ZADe><\\\\ACc>>]ABc>?\\\\ABc>>]ACb>>^ABa>>_AC_>>bAA^>`0bA@]>b0cA\\\\O]>e0cAZO]>f0dAYO\\\\>g0dAYO\\\\>h0gATOX>m0kAPOU>P1lAoNT>R1lAlNU>T1f01O00001O001O00001O001O001O0000100O0000100O00001O0000001O001N11O1BTAPOm>7X1K`1LePh2\"}}, {\"image_id\": 124, \"category_id\": 1, \"bbox\": [99.51834106445312, 104.97064971923828, 222.45852661132812, 365.2761535644531], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [533, 651], \"counts\": \"V_d1P1b?4L5M2M2O1O1O1O1O1O100000000O1000000000000O10000O100O1O0O2N2_JlN_KV1[4SOaKm0^4WO_Kj0`4XO^Ki0a4YO^Kf0c4[O[Ke0e4^OXKa0j4BRK=P5GjJ8X5LcJ4_5O\\\\J1f51VJ0j53TJKm57QJHo5<oIBQ6b0mI]OS6f0kIXOU6m0gISOY6Q1dImN]6U1bIjN_6X1_IgNb6[1[IdNg6^1VIaNl6b1PI^NQ7U2ZHkMg7f2fG\\\\M[8k2\\\\GVMe8l2WGUMj8m2SGSMm8P3PGPMP9T3kFmLU9_3]FbLd9V501O1O11O0100O1000O0100O1O001N1N3N100000O1000O0O2O2N10001O00000O00001000010O100000O010000O1O010O1O1N2N20O000O2N1M4M2O2N2N2L4I7VOi0J7M3N2N200O010O10O01000N3N3J5K5J7K4M2N3N2N3M3L8G:]HWIj4o6nJXIn4h3eIYNX1TNP5b3PJWNj0[NU5\\\\3UJYN`0aNY5T3[J[N5gN^5k2dJi0Z5SOlJl0S5QOTKm0j4QOZKn0d4QO`Kn0_4QOcKo0\\\\4POfKo0Z4oNhKQ1X4mNlKQ1T4mNoKR1Q4lNRLS1n3lNULR1k3mNXLQ1h3nNZLQ1f3nN\\\\LQ1d3oN^Ln0c3RO^Lm0b3SO`Lk0`3UObLh0_3XObLg0^3YOdLe0\\\\3[OfLb0[3@dL`0[3CdL;\\\\3HcL7\\\\3NbLO^3<ZLBe3`0\\\\L]Od3d0]LZOc3g0]LXOc3h0_LVOa3j0bLbMbM\\\\Ok5R3iLTMfMHa5T3lNkLT1U3nNiLR1W3oNhLQ1Y3oNfLQ1Z3POeLQ1Y3ROeLn0[3TOcLl0]3UObLk0j0\\\\J<l4hNh0h0eJ;d4lNh0e0kJ:^4QOg0c0PK9Z4SOg0a0SK9X4UOf0?VK8V4XOf0<YK9S4ZOf07]K;P4\\\\Oj0N]Kc0k3^On0]OeKR1_3_O\\\\3>hL^O\\\\3>lLZOW3c0`6M3M4L_V_5\"}}, {\"image_id\": 124, \"category_id\": 1, \"bbox\": [326.8683776855469, 85.31805419921875, 151.09524536132812, 271.496826171875], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [533, 651], \"counts\": \"VaZ54W`0`0G6K3M2N200O2N1O1O001N2RLPOVHP1g;0cJkN[KU1b4PO\\\\KQ1`4TO^Kl0b4VO]Ki0c4XO[Kj0e4VOZKk0f4VOUKm0l4SOQKP1n4QOPKP1R5oNlJS1T5mNjJS1Y5lNdJU1_5jN_JT1g5jNVJW1m5hNQJX1Q6hNnIW1U6hNjIW1Y6hNeIY1\\\\6fNcIZ1`6eN^I\\\\1d6cNZI^1g6aNYI^1k6^NVI`1P;O001O000000100O01OO1M4M3O10lLiNkFV1V9kNhFU1Z9lNbFT1a9mN\\\\FS1e9nNZFR1h9mNWFS1j9mNTFT1n9jNQFV1Q:iNnEV1h<10000O2dMnNRET1n:nNPER1Q;oNlDR1U;nNjDQ1Y;nNfDQ1\\\\;oNbDR1_;nN_DS1c;lN[DU1f;lNXDU1h;lNVDT1k;mNQDT1Q<mNkCT1X<mNcCU1]<nN_CS1b<POXCS1g<VOhBS1X=U1O1O1N4M3M2N3N1N2O2YNYL\\\\Fm3\\\\9hLPF`3h9cLVF_3g9dLVF^3i9cLTFa3j9`LRFe3l9\\\\LoEk3n9WLeEV4Y:T1N2O2N2N2N1O1O001O001O002O0O3M1OQNgFdLW9Z3QGaLn8\\\\3XGcLf8Y3`GhL]8U3hGmLU8VOnFi2R1QN_OdN`7`0SHg2R1TNSOmNh75WHf2Q1\\\\NfNnNS8MZHd2Q1ZOf6fMgHj2g0_Oj7=YHBi78\\\\HFg74^HKe7nNaIo0i6aNaI[1o:L3N2N3M2O2N2M2O2M3L8H8H4K5K5Jbkl2\"}}, {\"image_id\": 124, \"category_id\": 2, \"bbox\": [602.4940795898438, 296.8146057128906, 44.9871826171875, 28.352783203125], \"score\": 0.9999996423721313, \"association_id\": 0, \"segmentation\": {\"size\": [533, 651], \"counts\": \"dVk92b`02M3O1N2O00000000001O0O2O001O0O2O1N2O0O100O2O000O100001O00002N3M5K3N0O2N10O01O10O0gh1\"}}, {\"image_id\": 124, \"category_id\": 2, \"bbox\": [96.8724594116211, 363.7240905761719, 182.93063354492188, 122.71356201171875], \"score\": 0.9999985694885254, \"association_id\": 1, \"segmentation\": {\"size\": [533, 651], \"counts\": \"hSg12b`02003MO001O001O01NgP>`0fnA:I3M2N1O1O1O1O1O100O10O01N20O0100O1O100N3O0O1O010N110O1O010O1O1O0O2O100O00000000001O01O01N100010O000100O010O10O10O0010O00100O10O0100O0010O010O0001O001O010M2H9M20100O100O100O1O10001O00000O2O01O001O10O0000000O1O1N2O1O2N3M4IZX^6\"}}, {\"image_id\": 124, \"category_id\": 2, \"bbox\": [298.6781311035156, 307.9054260253906, 163.67800903320312, 61.1434326171875], \"score\": 0.9999984502792358, \"association_id\": 3, \"segmentation\": {\"size\": [533, 651], \"counts\": \"mQn43a`02N101O0000001O000000000000001O0000001O01O01O0O1000000000000000O1000000O1O010O10O001O001O010O1N1O10100O010O00010O10O01O0DH\\\\@8d?KY@6f?<01O00000100O100000000O1000000000000000001O0O10010O01X@ZO_?g0_@ZOa?m0001O0O2O1N101O001O1N101O0O2O0O2N2O000O2O1N1O2N100O101O0O2O000000001O00001N101O002Nbb_3\"}}, {\"image_id\": 124, \"category_id\": 1, \"bbox\": [367.6532287597656, 80.41571044921875, 204.76254272460938, 339.9873046875], \"score\": 0.5245128273963928, \"association_id\": 0, \"segmentation\": {\"size\": [533, 651], \"counts\": \"hio5e0h?=I5L2N01N2O001O001O000O0O1O1O200O101N2M3N1O2N2N1O2O0OO2M2M4M3O2O001O0O2O0bNYOnBh0m<@`BIEf0l=EYBl0h=WORBl0n=m00001O1O002N3M4K7I6K5K4L3VN^L\\\\Ff3`9fLTF^3i9eLSFc3e9aLVFl3^9WL]FP4_9RL\\\\FU4`9mK\\\\FW4a9mKZFV4e9mKVFV4i9nKQFU4n9Z1O001N2O01O0010ORNlF[LU9^3VG]Lj8`3\\\\G^Ld8KTFi2\\\\1ZMa8IaF`2Q1gM`8CeFa2o0jMc8YOgFg2j0oMc9k1cFTN^9f1hF[N_NXOR:Y2cGaNQN]O]:n1fGWOY88]EgN\\\\2P1Y84_EkNZ2P1Y8oN\\\\E9=ER2P1m9nNYFl0i9RO[Fk0f9RO\\\\Fm0h<N1OiLXOdFd0Z9@hF=U9GnF4Q9NSGMT8IQE=m2FR81kD=V3]On7;fD<l>i0L4M3N3M3M3M2N2N3M4L5[B\\\\NP<Z3nFZLj4l3lJZLQ5k3iJZLT5i3gJZLX5j3bJYL]5k3[J[Lc5b4YIfKf6U7O100O100000000O10000O1O1N2N2O1O1O100000000O100000000O001O1O1O001N2M3M3L4A?L4M3N2O1O2N1O101N100O1000000000000000O1000000PJYJR1g5iNdJP1]5jNnJP1S5iNZKP1g4aNlKU1Z4iNkKe0d4YOaK=f4B^K9d4F_K7b4HaK5`4JbK4_4KeK1\\\\4NiKLY43lKFW49lKCV4<mK@U4`0mK]OT4b0oK[OR4e0QLWOP4i0SLSOn3m0WLlNk3T1YLeNj3MZHDo3;i3O^HBk3<i3ObH@h3=i3OgH\\\\Oe3b0g3LnHXOf3b0eTY1\"}}, {\"image_id\": 124, \"category_id\": 2, \"bbox\": [89.1178207397461, 470.1244812011719, 35.611900329589844, 18.212249755859375], \"score\": 0.08467832952737808, \"association_id\": 0, \"segmentation\": {\"size\": [533, 651], \"counts\": \"QZ_11``07L1O2O0O2O00000000000000O0101O0011O001N1O1O001O1O00001O2N2N2NO1Z[b8\"}}, {\"image_id\": 125, \"category_id\": 2, \"bbox\": [257.3742980957031, 461.2758483886719, 525.773193359375, 236.96981811523438], \"score\": 0.9999994039535522, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\mR63jg05K5M3M2O1N100O2O000O10000O100O100O00100O100O10000O10000O0100O001O00001O00000001O0010O0100O010O10O00010O00001N1O101O00001O01O0100O010O0001O000000O01O100O101O0010O010O01O010O00001O0N2oNVOiZOo0ld0W1N2O20O010O10O0100O010O1O010O001O0O11O00010O010O10O10O10O10O10O010O01O0O1O2N1O2O00100O0010000O010O1O100O1N101N2N1O2O1O010O10O10O01001O0O10001N100O2O0O100O2N100O10001N10000000000000001N10000000000000000O10001O000O100O2O0O100O1O2N100O2O0O2O000O2OO1000000O10O1000O1000O10O1000O100O100O010O100O100O10000O10O0100O1000000O100000O10000O100O100O1O100O10O010000O10001O0O1000000O101O0O100O1O101N100O10000O2O001O000O2O000gMi[Oi0Wd0WOl[Of0Ud0XOo[Od0Rd0[OQ\\\\Oc0oc0\\\\OS\\\\Oc0nc0\\\\OS\\\\Oc0mc0\\\\OT\\\\Oc0mc0]OT\\\\Ob0mc0\\\\OU\\\\Oc0kc0]OV\\\\Ob0jc0^OV\\\\Oa0kc0^OV\\\\Ob0kc0]OT\\\\Od0lc0\\\\OT\\\\Oc0nc0[OS\\\\Oe0nc0WOT\\\\Oi0nc0oNY\\\\OQ1hc0mNY\\\\OR1ic0lNX\\\\OT1ic0jNW\\\\OV1jc0iNW\\\\OW1jc0gNW\\\\OY1jc0eNW\\\\O[1Ve0010000000O010O100O001N2O0O1O2_O`N]ZOd1be0=01N101O0O2N2N101O1O1O001O1O001O0O10001O010O0010O10O1000O01000O0100O01O01O01O1O010O10O10O1000000O10O10O100O1O100O1O100O10000O1000000O1000O0100O100O1O10O0100O100O0100000O010000O10000O1000000O1000000000000O0100000000000000000O10000000000000O01000000000O10000O2O0O10000O1000000O100000000O10000000000O1000000O1000000O10001N10000O1O100O2O0O100O101N1O100O101N100O2N101N3M2M4M3L8DfXd5\"}}, {\"image_id\": 125, \"category_id\": 1, \"bbox\": [387.9173889160156, 207.75753784179688, 414.5703430175781, 336.8439636230469], \"score\": 0.9999974966049194, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"enV91og01N2O01N10g[56RdJd0[O3M3N2N2M3M4L3M4J9G<A:I7L3M3M3M2N2O0O2O1N2N2N3L4L4K4L5L3M2O2N2N1O2N1O2N2M2N3L5D;^Ob0L4L4L4D;O2N101N101O0O2O1O1N2O2N1O1O0O2O001O00001O001O001O00^JX_Om3g`0jKe_OS4[`0jKj_OU4U`0jKn_OU4Q`0jKR@U4m?jKV@U4i?jKY@U4h?iKZ@W4e?hK]@W4c?hK^@Y4b?eK`@Z4`?eKb@Z4^?cKe@^4[?XKo@g4Q?TKTAl4m>PKWAo4i>oJYAR5g>kJ\\\\AT5h`001O0001Q_OPKl>P5n@VKR?k4l@WKR?j4m@XKR?i4l@XKT?h4j@[KU?f4i@[KW?f4f@\\\\KY?f4e@\\\\KZ?f4b@\\\\K^?e4`@\\\\K`?e4]@^Kb?b4]@_Kc?b4\\\\@^Kc?d4[@]Ke?c4Z@_Ke?b4Z@]Kg?c4X@^Kg?d4X@\\\\Kh?e4V@\\\\Kj?d4U@]Kj?e4T@\\\\Kl?f4P@\\\\KP`0Z6O1O100O100O1O01000O10O10OhNo_OcJQ`0[5X@]Ji?b5[@[Je?d5`@XJ_?h5e@TJ\\\\?k5i@QJW?o5n@lIR?S6QAjIP?V6RAhIn>W6^100O100O1O1O100O010O10000O10000000000000000000o_O^Jd=b5XBcJg=]5UBhJj=X5TBjJl=V5TBkJk=U5TBlJl=T5SBnJl=S5RBnJo=Q5PBPKP>Q5nAPKR>Q5kAQKU>P5iAQKW>Q5eAQK[>Q5aAQK_>Q5]AQKc>P5[AQKe>Q5WAQKi>P5UARKj>o4TARKl>P5QAQKo>S5l@nJT?U5i@kJW?Y5d@gJ]?n600000O10000O100O010O100N2N3L3N2L4N2N2N2M3N3L3N2N2N2O1N3N1O1N2O2L3N2M4M2N3M2O2N1O1O2N1O1O2O0O1O2O0O100O2O000O10000O100O2O0O1O1O1O1O1O1O100O100O1000000O100000000O1000000O10000O100O1O1O1O1O2O0O1O10000O10001N101O1O1O1O1O1O1O0O2O1O00001O0O2O0O2N1O2N2N2N2N2O1N2N2O001N100O101N1O101N1N2O2N1O1O2O0O101O1O1O1O2N1N2O1O1O1O1O1O1O1O1O1O1N2O1O1O1O1O2N1O1O2N001O1O001O1O1N2O2N1O2N2N1O1N101O1O001N101N2N2O2M3M3N2M3N2M2O2M2O1O1N2N3M3M3M9CoSV5\"}}, {\"image_id\": 125, \"category_id\": 1, \"bbox\": [324.8216552734375, 23.06771469116211, 412.1341552734375, 537.4181518554688], \"score\": 0.9456055760383606, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"WhV81lg08H7I7I7H6J5M3M3N1N3N2OO01O000000000O1O1O001O1O1O1O001O0O10000O010O2O0O2N2O100O100000000O00001O0O110O100O011O01O01O1N101N2N1O2i]OaNk=a1iAUOk=l0TBZOg=g0XB\\\\Od=f0ZB]Od=c0\\\\B_Ob=b0]B@a=a0^BAa=>_BD_=<aBF]=:cBH[=9cBJ[=6eBMW=4hBOV=2gB3U=NiB8R=IlB;R=FlB>Q=BmBb0Q=_OnBc0P=]OoBf0o<[OPCg0n<ZOPCi0n<XOQCi0n<YOPCi0n<XOQCi0n<YOoBi0o<ZOoBh0o<YOoBi0n<[OPCg0m<\\\\OQCe0m<^OQCd0m<_OoBd0n<_OPCb0o<AnBa0P=BmB`0Q=BmB`0Q=CkB`0T=BiB`0U=BiB`0U=DaBd0]=_O[Bh0b=m4N2M2N3M3M2O2N1O2N1O1O2N1O1N2N2N3L3N2N2N3N1N2O1O2N1O2N2M3N2M3N3L3N3M2N2N2N101NhIfDMY;FZE4e:GhE2X:LPFOn9OVF0j9MZF3e9J_F5`9IdF7[9EjF:U9EnF;Q9CRG=l8CUG=k8BWG>g8BZG?e8@]G?b8A_G`0`8_OaGb0^8\\\\OdGe0[8ZOgGf0X8XOjGi0U8UOmGk0S8ROPHo0o7lNVHU1i7gN[HZ1d7cN_H^1`7^NdHb1\\\\7\\\\NfHe1Y7YNjHg1U7WNmHi1S7VNnHk1R7RNPIn1R7oMoHQ2R7lMoHU2R7hMPIX2R7cMQI]2P7`MRIa2o6XMVIh2k6RMZIn2h6kL]IU3e6dL`I\\\\3b6_LaIa3`6[LbIf3a6ULaIk3a6QLaIo3`6oKaIQ4a6kKaIU4b6fK`IZ4b6aKaI_4c6ZK`If4h6dJfI\\\\5_<O100000000000000O10000000000000000000000O10000001O001N2O1O001O1O001O1O1O001O1O1O2N3M3M2N3M3M3M5K4L4L3M2N2N00N2N2O1N2O1N2O1O1O1O1O1O1O100N2O1O1O1O1O1O100O1O100O100O100O1O100O1O1O1O1N2O1N2O2N1O1O100O10000O1000000O1000000O1000000O101N100O1O100O1O100O2N1O1O1O2N2N2O1N3M2O2M2O1N2O1N2O1N101O000O1000000O2O00000O10001O0O100O101N100O2O0O1O2O0O2N1O2O001N2O1O1O1O1O1O001O1O1O001N101N3L3L5K5K6Fd0XOnmf6\"}}, {\"image_id\": 125, \"category_id\": 1, \"bbox\": [325.789306640625, 38.46137619018555, 276.6663818359375, 354.9137878417969], \"score\": 0.10954668372869492, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"mXl71kg07I7K3L5M1O2N1O1O1O1O2N1O1O1O0O1O1O1N2N3M20001O0000O01O000O200O1000000000OO101O100O101O000O1N1O2M2O2O1000001O000O1O1M2O10001O100O1O1O1O0O100`]OPNR?Q2j@ZNo>f1o@_Nn>a1QAcNl>]1TAeNk>[1SAhNk>X1TAlNh>T1XAQOd>n0ZAYOa>g0_A@Z>`0gAES><lAIP>7PBLm=4TBNj=1VB1h=OXB3f=MZB6c=J\\\\B;`=D`Ba0\\\\=^OdBe0Y=[OgBh0W=XOhBk0U=VOkBl0S=TOmBn0R=ROmBo0R=QOnBP1Q=POoBQ1o<POoBS1P=lNoBW1P=hNnB\\\\1P=dNoB_1o<bNPC`1o<`NPCb1o<^NPCd1o<]NoBe1o<\\\\NPCf1o<ZNoBi1o<XNmBm1P=TNjBT2T=lMkBW2S=iMmBY2R=gMmB[2R=eMnB\\\\2Q=dMoB]2Q=cMnB^2Q=bMnB`2Q=aMmBa2Q=`MkBe2S=]MeBk2Y=VMeBm2Y=UMdBn2[=RMdBP3[=PMeBQ3Z=oLeBS3Z=nLeBS3Z=mLdBV3[=jLcBY3[=hLcB[3[=gLaB]3]=dLbB^3\\\\=cLcB_3\\\\=bLcB_3[=bLeB_3[=bLcB_3\\\\=cLbB^3\\\\=eLaB]3^=eL\\\\B`3`=b3M3M3N2N2N2O1O1NYLgBkLX=m2SCQMj<m2\\\\CRMb<l2cCSM[<m2hCRMW<l2mCSMR<l2PDTMo;l2SDSMl;l2VDTMj;k2WDUMi;i2YDWMg;f2\\\\DZMd;c2_D]Ma;`2bD`M^;^2dDbM\\\\;]2eDcM[;\\\\2fDdMZ;[2gDeMY;Z2hDfMX;Y2iDgMW;W2kDiMU;T2nDlMR;o1SEQNm:i1YEWNg:g1[EYNe:f1[E[Ne:d1\\\\E\\\\Nc:d1^E\\\\Nb:c1_E]Na:b1`E^N`:`1bE`N^:^1cEcN]:Y1gEgNY:R1nEnNR:n0RFROn9l0SFUOm9j0TFVOl9i0UFWOl9g0TFZOl9d0VF\\\\Oj9b0WF_Oi9?XFBh98^FHb92cFO]9NeF3[9LeF5[9JeF7[9HfF8Z9GgF9Z9DhF<X9CiF=X9@jF`0W9oNYGQ1h8aNeG_1\\\\8^NfGb1[8[NgGe1[8XNfGg1\\\\8VNfGj1[8TNfGl1\\\\8PNeGP2^8_MQH`2S8PMZHo2\\\\>O1B>N101N2O2O0O2N2O0O2N2N2O1N1C<0001O0000010O010O100O1O1O1O1N2I7L4M4M2O11O001O001O00001O00000O100000000000O101C>L4K9G[Wl9\"}}, {\"image_id\": 126, \"category_id\": 2, \"bbox\": [396.6848449707031, 770.5930786132812, 136.96786499023438, 49.0048828125], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"fle;1nm02O1O1O001O001O100O1O00011N1O1O010O4L2N001O010O1O001O00001O00001O00001N1000001O000O1000000O10000O1000O10001O00001N101O000O01000O0100O010O100O001O001O1O1O1O00100O001O001000O10O10O100O101N10001N1O101N100O100O2O000O1000001N100000001O001O001O0O2O1O0O2O001O1N2O1N2M\\\\Yoe0\"}}, {\"image_id\": 126, \"category_id\": 1, \"bbox\": [936.7381591796875, 476.8986511230469, 109.05517578125, 320.7835388183594], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"Rn_k0>\\\\m0b0@;D8G9G6M4L4M2L9H7J4M2L?A8lXOiLSc0a3\\\\\\\\OjL`c0]3U\\\\OkLic0b3eZOdMZe0b5O1O1O2N2N2N001N1000001O00000O01N2O0O01N10NUOQHk[Oj7Zd0dHX[OS7Re0TIeZOh6ae0g05K5E;F9K6L4J6M3N200O1O10010O000102N<D4M2Ne0[O4K3N2N3O1N0N1N2O0O2O2N2N2M2N101O0O1O001O0002M3M2M4M2N2O1_MfZOYL]e0_3nZO[LVe0Z2Z\\\\OYMnc0V2P]OTMXc0c2X]OnLQc0m2V]OcLXc0X3k3L4L4K7F>B;E:Hehk6\"}}, {\"image_id\": 126, \"category_id\": 1, \"bbox\": [829.038818359375, 482.0425720214844, 110.73492431640625, 325.4235534667969], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"W`]h0k0Pm0;G7H9F:G6K5K6K6J5K3K6J8I6L3L5J7J5L3M4M5J3M3M5J6J5UOjJbXO[5Rg0YK_XOQ5[g0P1I6L3M6I7J4L2N4L5K3M2N4L5K3M3Ma0V[OXGZc0i9N1O001O0000000001O001O001O2N4L7I:F5K3XNX[ORJjd0`5n[OTJTd0g5R\\\\OVJPd0d5X\\\\OXJjc0[5e\\\\OaJ\\\\c0V5n\\\\OhJSc0S5S]OkJnb0m4\\\\]OPKeb0j3d^ORL\\\\a0f3n^OWLTa0f3P_OXLPa0g3Q_OYLo`0g3R_OWLPa0h3P_OXLPa0h3Q_OVLRa0h3o^OVLUa0h3j^OXLYa0g3f^OWL\\\\a0i3_42O0000O002M2N3L4L4L3M2N1N3M2M5J6I8D;C>F=SOjmQ:\"}}, {\"image_id\": 126, \"category_id\": 2, \"bbox\": [953.3153686523438, 779.9848022460938, 133.24456787109375, 79.48828125], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"^jSl04km02O2N2N3M2N1O002N3M1O1O2WSO[OSl0b1H3L1O100O2N100O10O010O00010O2N001O101O2M00100O10O010O01O1O001O001O2N1bN`SO[1cl0O1O5K5K00001O0001O01O001O00001O00001O00001O0000010O00001O001O1O1O001O0000001O001O1O001O001O1O001O001O1O000O2O0O2O0O2NckV6\"}}, {\"image_id\": 126, \"category_id\": 1, \"bbox\": [404.75531005859375, 533.5248413085938, 112.56396484375, 255.30072021484375], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"_X[<<Zm0;I7L4M3M3N2M3\\\\VOROSf0P1bYO\\\\O\\\\f0e0]YOBaf0a0YYODef0b0lWOfN@[1bh0[301M2N2M3N2O1N3M2M3N3L3M4N1O2O1O1O001O2N2N3M3eYOaIod0c6eZOfIZe0c6VZOeIie0Y701O0000001OO100000000000000O100O1O2N1O2N3M2N3L3M3M4oNbYOXJef0`5R1L3N2N2N2M3N4J6K5mNRWOmLQi0e2hWOlL]h0j2c1I7I6L4M4L3N2M3N1O2M3N4L5J5K4L4M6J5K2N3HafZf0\"}}, {\"image_id\": 126, \"category_id\": 2, \"bbox\": [853.0718383789062, 793.0816040039062, 106.49676513671875, 55.0347900390625], \"score\": 0.9999681711196899, \"association_id\": 2, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"e]Qi08fm05M1N3M2O1N2O0O10CjRO1Um0NnRO0Rm0NQSO2nl0MSSO3ml0MTSO2ll0NTSO3kl0MUSO3kl0MUSO4jl0LVSO4il0MWSO3il0MWSO3jl0LVSO4jl0LVSO4jl0ORSO3ml0MSSO3nl0LQSO5ol0LmRO7Tm0:01O0001O00001O00001O00010O001O000010O0001O000000001O00000000001O001O1O010O001O001O001O001N101O00001O000O10000O10O0100O01O1K5N2O10000000O10001N101O2M5KQn^9\"}}, {\"image_id\": 126, \"category_id\": 2, \"bbox\": [513.8314819335938, 541.9705810546875, 80.50823974609375, 61.28936767578125], \"score\": 0.9997643828392029, \"association_id\": 0, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"`^R?8cm06M3N10001O0O10000N2N2O1O1O1O100O100O100O1O0000001N100O01O10001O1O010O1O0O1010OO1010O010O00010O01O01O100O10O0010O0001O1N101O1N2O0O2O1O1O100O100O1O1O2N1O2HZRO1im0NTaSd0\"}}, {\"image_id\": 126, \"category_id\": 2, \"bbox\": [712.758056640625, 541.3974609375, 126.9169921875, 61.2613525390625], \"score\": 0.9938222169876099, \"association_id\": 0, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"nlmd03mm01O001O001O001O00001O001O1O1O001O2N001O001O1O1O001O1O1O1O001O1O1O001O1O1O001O1O1O1O001O1O00001O010O001O010O1O1O10O10O1O010O10O10O010O010O000100O1O0010O01O00001O000000001O01O01O000001O01O2N1O1O003N0O1O100O1O1O00100O010O0010O01O1O1O1O001O2M3N2M3Ldgn<\"}}, {\"image_id\": 126, \"category_id\": 2, \"bbox\": [13.067980766296387, 16.99658203125, 486.24420166015625, 857.7053833007812], \"score\": 0.08664681017398834, \"association_id\": 0, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"bY>a0Rm0T2mMd0gNW1D<D:ZWObK]g0g5k]OgIX<_6dCdIS2Jb2g6XKaIU2I_2l6XK]IX2HZ2S7ZKXIY2HS2[7_KQI\\\\2Gk1c7dKiH^2Ie1g7gKfH`2I_1j8jJcGb30U1c8fI\\\\FEX1[5KQ1l8hI[FBZ1[5Bl0Y9QJUF]OZ1_5ZOe0h9WJRFVOU1i5TO3^:eJQHU5dMKh:gJkG[5_MKl:`JlGc5[MJ\\\\?3f@K[?4g@J[?4g@JY?9d@F^?;_@Fa?<]@Dd?=Z@Cf?>Y@Bg?`0V@Aj?a0T@^Om?c0Q@^Oo?c0P@]On?g0o_O[Oj;aJTGW6oLZOn;_JnF\\\\6nLZOT<\\\\JhF`6oLWOZ<YJeFb6nLWO^<XJbFb6nLWO`<XJ`Fc6nLVOc<XJ]Fd6nLVOd<VJgDKLk6cNYOj<RJdDMKY=`;kBcDOIX=c;jBbD2GW=f;hB`D6DU=l;gB\\\\D9BT=Q<eBXD?^OQ=X<cBUDg0WOj<d<bBnCS?Q<SAdCR?\\\\<Z1N2O001O1N100O1O1O1WJ^_OSNc`0\\\\1[@VNf?Hb^O`MS2g2Z?Fk^OWMU2P3Q?B\\\\_OiLn1d3f>YOVCe0k<PO`CP1`<kNfCS1[<hNkCW1U<WMc^OO\\\\5i2Q<VMi^OJY5P3n;UMn^ODX5U3j;WMS_O^OU5[3h;VMX_OYOS5`3f;UM]_OTOo4g3d;SMfEm2\\\\:PMdEQ3^:mLbET3`:hL`EY3b:cL`E]3b:`L^Eb3d:ZLZEi3h:SLVEQ4m:jKQEZ4Wb04L3M3M3K5I7K4L5J3M20O2M3N4N2O11O01O\\\\Oe0G91O1O1O1O001OO10O10000O1O1O1O1A?D<M3O2N100O101O2N2M2O2M3N`G\\\\KgGa4Y8bKgG[4Y8hKgGU4Z8mKeGP4]8RLcGk3^8WLbG_3e8dL[Gf2Y9\\\\MgF^2^9cMbFZ2_9gMbFV2_9lMaFQ2`9PN`Fo1`9SN_Fl1b9TN^Fj1e9UN[Fj1f9VNYFj1h9VNXFh1j9WNVFi1k9WNUFg1m9YNSFe1P:ZNoEf1R:ZNnEd1T:\\\\NkEc1W:]NhEb1Z:]NgEa1\\\\:^NcEc0];]ObD?c;A]D<f;DYD;i;EXD6l;JWDOl;2WDFm;<UD\\\\Oo;e0e81O2N1O2N1O1O2N1O1O1O2N1O1N3O000O100O10000O100O10000O1000000O100O1O1O100O100O1O100O100O100O100O10000O1000000O10000O1000000O10000O100O1O1O1O1N2O1N2N2N2O1N2O1O1O1O1O1O100O100000000O1000000O100000000O1000000O010O1000O01O100O010O1O010O1O001O1O001N10001O001O00000010O00001N101N2N2M3N2N2M3N3M2M3N2N2M3M3N2M4M2N2O1N2O1N2O1O1O1N2O1O1O1O1O1O1N2M3N1O4J5J7J6I7JUbPk0\"}}, {\"image_id\": 126, \"category_id\": 1, \"bbox\": [26.26204490661621, 112.17646026611328, 468.91143798828125, 650.8078002929688], \"score\": 0.0614142008125782, \"association_id\": 0, \"segmentation\": {\"size\": [960, 1280], \"counts\": \"STV1X1^l0a0F:eTOUNPj0Q3K6K5K5K5K5L4K4L3L4L3N3cLYK\\\\]Ok4bb0XK[]Oi4cb0[KY]Oh4db0\\\\KX]Of4eb0_KX]Oc4cb0dKY]O]4cb0jKX]OY4`b0QL\\\\]OP4bb0VLY]Ol3db0YLX]Oh3gb0[LW]Oe3gb0_LV]Oc3hb0_LV]Ob3hb0bLT]O`3kb0bLR]O`3lb0dLP]O_3mb0eLo\\\\O]3ob0hLk\\\\O[3Sc0iLi\\\\OY3Uc0lLf\\\\OV3Wc0SM`\\\\OP3]c0cMQ\\\\O_2mc0k3N2N2O1N2O1O1N3N1O1N2O1N2O1O1N2O1N2O1N2O1N2O1O1N2N2N2M3N2N2N2N2O1N2N2N2O1N2O1N2O1N2O1N2N2N2N2M3M3M3M3N2N2N2O1N2O1O1O1O1O1N2O1N2N2M3M3L4M3M3N2M3N2M3O1O1N2O1O1N2O1O1N2O1O1N2O1N2O1O1N2L4M3M3L4K5L4M3M3N2M3N2M3N2M3N2M3N2N2YM`_OlGb`0Q8c_OjG``0S8e_OiG]`0S8j_OfGZ`0V8m_OdGV`0X8R@hFj`0T9^_O^Fl`0`9k1O1N2O1O1O100O1O10000000000O10000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000O1000000O10000O100O100O100O100O1O1O1O1N2N2N2N2N2O1O1N2O1O1O1O1O1N2O1N2N2O1N2O1N2O1O1N2O1O1O1O1O1O100O1N3N1O1L4M3L4L4L4L4N2N2N2O1N2O1O1eLWD^Dj;];aD\\\\D`;^;kD\\\\DV;\\\\;WE^Dj:\\\\;`E_Da:\\\\;oEXDR:d;dFhC^9U<lFcCU9Z<SG`Cn8]<ZG\\\\Ch8`<^4K6I6J6K5O1N2O1N2O1N2O1N3M2N2N2N2N2N2N2N3N1O1N2N3M2N2N2M4L3N2N3M2N3M2O1O2N1O2N10001N101N2O1O0O2O1N101O0O2O0O2N010N2O1O1O1O001O1O1O1O001O1O10O02N2O1N2N2N101N2N1O2N2N1O2N1O2O0O2N1O001O1O1N101O1O001O1O001O1N101O1N3M4K4M4K4L5K5J5K6H7H8oN\\\\XO]Kkg0`4m0L4M3L4M3M4K5K6F:A?kN\\\\1nNVgPg0\"}}, {\"image_id\": 127, \"category_id\": 2, \"bbox\": [375.3800048828125, 245.9574432373047, 199.7757568359375, 156.5547332763672], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [520, 644], \"counts\": \"jWo53P`0<X@DL3o><o@0P?e0O1O1O1O001O001O0O100000001O0001O00000001O00O2O001O1O001N11O0010O01O1O1O00100O1O1O1O10O01O100O100O1O001O100O1O1O001O001O1O1O1O001O0000001O0000001O1O1O001O001O0000001O00001N101O1N101N100O10001N10000O101N1O2M3N2N1O1O2M2O1N2L4M3N2N2M3L4L4H8M3L4O1010O000001O0001O2N2N1N2O1O001O1O1O0O2O001O001O1O1O001O0O101O000O101N100O2O1N2O1O0O2O001O0O3M1O2N2O0O2O0O3N1N2O1N1O2O1N2O1N3N1N2O1N2N2N2N3M2O1N2N2N4J]gR1\"}}, {\"image_id\": 127, \"category_id\": 1, \"bbox\": [339.2513732910156, 82.66181182861328, 180.33071899414062, 253.08221435546875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [520, 644], \"counts\": \"oY\\\\5c0[?c0F4K4M3M3K8J5K3M3M2N2M4L8jBjMl;^3L3L3N1N4Mf0YO8I2M3M1O1O2N1O100O2O000O100O100N2O1N2O1O10O01O1O1M3N3M2O1O1O1N2O1O100O100O1O100O1O1O2N1O2O0O2O1N2O001O0O101O00001N1000000O2O0000000O1000000O100000000O1000000000000O1000000000000001O00000000O2O0O100000000001O0000000000O10000000O100O10O01O10000O1hIPGX5P9dJ]GS5c8kJaGR5`8lJcGS5]8lJeGQ5]8nJeGP5\\\\8oJhGl4Z8SKiGj4X8VKiGg4Y8XKiGd4Z8\\\\KfG]4b8aK_GY4g8fK[GR4l8mKUGk3T9SLmFi3W9ULkFh3Y9TLjFj3X9ULjFQ3cN_Me:^OjFn2kN\\\\M_:DhFk2j;M3N3M1O2M3M2N2O2M2O2N2L4E;G:K5L4K5FbZo1\"}}, {\"image_id\": 127, \"category_id\": 1, \"bbox\": [186.7914276123047, 4.446969509124756, 81.02772521972656, 129.67144775390625], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [520, 644], \"counts\": \"UQP38h?`0F4J7H;I3kNiNPCa1j<dNnBc1n<aNmBc1Q=o0K3M2N2N2N1O1O2N1O2N1O2N3M2N001O00001O00000000000000000000O100000000000000000000000001O00001O5K:F5J2O1O1O2N1O3XMjBV2i=K4L7J3M3L6I4M2O3M2M4J6H8J8@]kn5\"}}, {\"image_id\": 127, \"category_id\": 2, \"bbox\": [4.759197235107422, 276.9757080078125, 180.98013305664062, 95.0443115234375], \"score\": 0.9999991655349731, \"association_id\": 1, \"segmentation\": {\"size\": [520, 644], \"counts\": \"ni2U2S>1N2O000000O10000000000O100O10000O101O0000001O2N1O1O10O01O1O101N100O1O1O1O3M3M1O00100O2N2OO10O00001O2N1O10O01O001O1O001O001O1O1O1O001O010O10O01O10O01O001O001O00010O010O01O01O00001O010O000001O010O0000001O00000000000010O010O01O0001O0000000001O00001O00001N100O100O2O000O1O1O100O100O100N3N1O100O001O1O1O00100O0O1M2011O01O001J301101O001O1N2N2O002O0O1O2N2N1O2N2N2N4KQYX7\"}}, {\"image_id\": 127, \"category_id\": 1, \"bbox\": [8.535550117492676, 214.83816528320312, 170.6560516357422, 136.27490234375], \"score\": 0.9999618530273438, \"association_id\": 1, \"segmentation\": {\"size\": [520, 644], \"counts\": \"eZ51U`03N2M4K5M2N2O1O1N2O001O00001O001O1O1O001O1O2M4M1O1O001O1O1N2O1O001O1O2N1O001O001O2M3N1O1N2O1N2O1O1N2O1O1N3N1N101N1O1O2N101N1O2N2M2N3N1O101N1O2N2N2N2O0O1O2N101N2O1O000000001O000000000000001O001O1O1O0000000001O000000010O001O00001O0000000O10001O00000O1O1O101N10001N2O0O2O0O1O1O1O1O1O2N100O2M3M3N1O2N2N2M3L4M3N1O2M3M:E8H7J4L9G=_OXm^7\"}}, {\"image_id\": 127, \"category_id\": 2, \"bbox\": [208.17294311523438, 28.605806350708008, 63.092254638671875, 120.30870056152344], \"score\": 0.6794257760047913, \"association_id\": 3, \"segmentation\": {\"size\": [520, 644], \"counts\": \"QZ]33T`04M1N3O0O1O001N100O10000O1O100000000000000000000000000000000O100O10000N2000000O1O100O1N2N2N110O0N2N2O000002L6TOg@>\\\\im5\"}}, {\"image_id\": 127, \"category_id\": 2, \"bbox\": [137.76556396484375, 291.2951965332031, 53.09922790527344, 70.62643432617188], \"score\": 0.05417988449335098, \"association_id\": 0, \"segmentation\": {\"size\": [520, 644], \"counts\": \"n\\\\V2k0\\\\?1O2O000O1O1O100O1O1O1O1O100N2O1O1O1O1N2N2N2O1M3M3N1O1O2N1O02O002N1N3N1O2M2O2M3N1O2N2M3N3N1M4L4M2M3M4K:C^gU7\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [430.284423828125, 271.22930908203125, 148.5384521484375, 110.1824951171875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [600, 800], \"counts\": \"fUo74bb04M2N2N1O1^_OHd>9SA0k>1l@9S?Hj@;U?Eh@>X?Ce@`0Z?@d@b0\\\\?^Ob@d0^?]O`@d0`?\\\\O_@d0a?^O]@b0d?_OZ@a0g?@V@4C@X`0>Q@2^`0O`_O1a`01[_O1d`0Q110O10O02O1a_OZNi?j1P@\\\\Nm?Z2M2O00000O1000nNX@TOh?j0\\\\@YNMe0g?m0l@nNT?m0TAoNm>P1VAnNj>Q1WAoNi>P1\\\\AjNf>U1^AfNd>Y1^1O1O10000000000001O001O00001O001O1O0000001O0000000000O1000000001N1000001V_OPOi?R1n_O[Ol?l1M1O1O000O010000O100mNm@`NT?^1PA]NS?c1Q1O1000000001O1O3M4K3N1O1N3N3LmNg_OKW`02n_ONo?OV@Nl?O\\\\@Kd?2d@I\\\\?7h@FW?:k@DV?<m@@T?`0QAWOS?i0]11000001O001O1N20O01O2N1O01O1O3M6J3M1O2N3M2M2N2N2NUiQ4\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [0.0, 296.61724853515625, 105.27369689941406, 113.71231079101562], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [600, 800], \"counts\": \"i9_2W`08I3M101O0OL5N1M4N1O2M3N2O1O100O1001N4M?A2N2M101O0O2N101N101O000000O10000O1O1F^@ZMf?c29L4L4N2M3C=M3O1O100O2O0010O01000O3N011N;F4K1N6K1O0O0100O001O001O00001O001O0UNk_OR1W`0lNl_OQ1U`0oNm_Oo0T`0POn_Om0T`0SOn_Od0Y`0]Oa10O2N5K2N2O0O3N0O10O0010O02N2M4M6I5Iidh<\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [257.8204040527344, 309.53570556640625, 151.63290405273438, 128.37860107421875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [600, 800], \"counts\": \"]il48^b09F3M3O1O01O1O0010O0n_O@m=a0oADl=`0jAJS>9gAMU>7jAJS>9kAIT>:hAHW>;dAG]>;]AIb>:XAHj>:RAGn>b0i@^OU?m15N3O2O10O10O1000O101N2O2M8H4M1O1OfNUAhNj>R1bAhN]>X1fAgNY>X1jAgNU>X1PBcNQ>[1VB`Nj=_1[B\\\\Nf=c1^BYNc=g1_BWNa=i1bBRN`=m1f10000000001O01O00000000001O01O000O10000001O01O01O0O2O1O0`@UNS>l1hAYNW>h1QAQN7:g>g1o@VN35m>h1k@VN72m>[2QAeMo>R30001O\\\\Oo@`MR?[2VA`Ml>Y2cA^M^>S2Z1O2O1O1O100O101O0001O000100002M4M2N001O3M7H4M1O002N2N1O0O11N2O1N2N1O1O2N5K3M1O1N2O6I6J2N2N2O2MT1kNgQW7\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [195.79623413085938, 295.60546875, 95.21420288085938, 97.50375366210938], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [600, 800], \"counts\": \"Wmd3b0na0V1PO8G:F6L3N0O1O1O1000O1N1VOV@aNQ`0V1l0N3O100O11O1O1O1N6K7I2N1O4L4L1O1O3M100O2N1O1O10PO[@lNd?S1_@lN`?T1b@jN^?U1e@iN[?W1f@iNY?W1j@bN[?]1P1000000001O000000001O000k_OaNQ?d1e@cNY?_2O2N001O0O101O0000O1N2N2O1O1K5O2J5M3N3N1O2M3M2[Oa_OPOa`0m0f_OmN\\\\`0Q1g0N3M2N2N3L4L5Kf`Z9\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [140.1146697998047, 305.95318603515625, 64.97372436523438, 120.02374267578125], \"score\": 0.9999992847442627, \"association_id\": 0, \"segmentation\": {\"size\": [600, 800], \"counts\": \"f]b21ab0>D5H7I7\\\\Od0L3AXNi_Om1o?`0N20N32M5K4M2N2N3O2M010O1O2O0001Oj@nLf>T3SARMl>^30O1O000000000000000000001O000O1O1O2O0O2N2QOm@WNS?c1VAXNl>f1[ARNi>k1S1M3M4K6K3N3M3O1O1N1O1N5J6Gnll:\"}}, {\"image_id\": 128, \"category_id\": 2, \"bbox\": [487.4102478027344, 342.0941162109375, 119.68649291992188, 30.04998779296875], \"score\": 0.999998927116394, \"association_id\": 1, \"segmentation\": {\"size\": [600, 800], \"counts\": \"UmU91fb01O1M3M3O10O1O10O01N5KoP70nnH7M3N001O0O2O0000000001O001O2L3Nom2KURM4N0O1K40100O01000000000000000000O100000000O10O0100N110O100O10O1000O100O100O010O1000O10O10O10000000000000O100000001NiZa3\"}}, {\"image_id\": 128, \"category_id\": 2, \"bbox\": [366.40435791015625, 387.2430725097656, 114.67953491210938, 38.590301513671875], \"score\": 0.9999842643737793, \"association_id\": 3, \"segmentation\": {\"size\": [600, 800], \"counts\": \"Q^g6=Zb02O0000001OO2O00001O3M6J001OPl<0aaB2Sb02i]O0Vb08010000O1000O10000O010O1000O010000O1O01000O1O100O010O100000000O1000O100000O01000000O1000O10000000O010000O100O010O10O10O10O0100000000O100000001N10002LQaj5\"}}, {\"image_id\": 128, \"category_id\": 2, \"bbox\": [73.51619720458984, 375.7032470703125, 78.34803009033203, 29.20733642578125], \"score\": 0.9981350898742676, \"association_id\": 2, \"segmentation\": {\"size\": [600, 800], \"counts\": \"lR^13db02N2N1J6O1O10000O10000O0100O010O01000O10O10000000000000000000O10000000O10O1000000O10000O10000000O1000000000O10001N10000O1Fd]O8`b0L[hQ<\"}}, {\"image_id\": 128, \"category_id\": 2, \"bbox\": [189.23541259765625, 382.0226745605469, 102.87576293945312, 27.144378662109375], \"score\": 0.9672003388404846, \"association_id\": 4, \"segmentation\": {\"size\": [600, 800], \"counts\": \"mf_34cb0100O2O0O10000O10O1000O010000O1O100O1000O10001O000O10O10O1000000000000000000000001O00001O001O001O000O2O00000O101O0O100000000002N3M1O1O010O0001N10001N010O00100O001000O100O101O0O4M2M101L`a^9\"}}, {\"image_id\": 128, \"category_id\": 1, \"bbox\": [148.93202209472656, 304.1228332519531, 301.72637939453125, 124.81973266601562], \"score\": 0.4676942825317383, \"association_id\": 0, \"segmentation\": {\"size\": [600, 800], \"counts\": \"^XR31fb01N2O1O1O1O1N200001O3Jb\\\\=NbcB1O2N101JLe]O5Zb0Me]O3ab0O1N3M[\\\\`00fc_O0\\\\i80bVG4M4M1g]OIna09o]OJPb07o]OIQb07n]OKRb0=0001S_O]Oc?e0Y@^Of?c0S@Dm?;P@Io?8n_OKR`05l_OMS`03k_OOW`00f_O2Z`00Z_O:g`0j0100O101N1000000O20O0R@bNb>_1[AeNc>[1YAiNh>[1g@RN0g0X?X1g@TNKg0^?V1e@RO[?o0a@SO_?S2010OO10O1WOa@YN_?f1c@XN^?`1a@nM4`0\\\\?a1UATNl>k1YAnMj>Q2o00O10O2N1O1O1O100O10000O101O0000000000001O0001O0000001O0000001O0000XNW_Oa1i`0_NX_O`1h`0`NW_Ob1h`0_NW_Oa1i`07000001O00001O001O00000000001O00001O001O01N1000001O00001O0002O0000001N2O1O3M0O10O100Od_OkMW`0T2f_OoMZ`0X2O2O3MO010O010O002N1O2N3M;E4L2N1O1O1O1N102M4JWmU7\"}}, {\"image_id\": 128, \"category_id\": 2, \"bbox\": [69.96349334716797, 372.1238708496094, 170.2822265625, 32.607513427734375], \"score\": 0.07737964391708374, \"association_id\": 0, \"segmentation\": {\"size\": [600, 800], \"counts\": \"cZ\\\\12eb03M1N201O0N2L400O2O0O1000000O10O10O10O100O10O1000O10000000000O1000000000O1000O1000000O101O0O1000O1000O1000000O1000O1000O100000000O2M[\\\\j0LicUO3O001N100000000000O100O10000O100O2N1O100O10WS40ilK000O101O00000O10000000000001N2O000000o_Y:\"}}, {\"image_id\": 129, \"category_id\": 1, \"bbox\": [106.20794677734375, 3.82605242729187, 321.16131591796875, 566.591796875], \"score\": 0.9999961853027344, \"association_id\": 1, \"segmentation\": {\"size\": [800, 616], \"counts\": \"XSc2j4Pd0V1kN;F8I7J5L3M2N3M2O1N2N101N2O001N101O1O1O2N1O2M3N2N1O2N1O1O00001O0O101O0000001O0O101O001N2O001N2O001N2O1N101N2N2N2SJ]GZKd8c4bGYK`8c4fGYK\\\\8c4jGYKW8e4mGYKT8d4PHYKQ8e4SHYKn7d4VHZKk7d4XHYKi7f4ZHXKg7e4\\\\HZKd7f4]HYKd7e4^HZKc7d4_H[Ka7d4aHZK`7f4aHYK`7e4bHZK^7f4cHXK_7g4bHWK_7i4bHVK^7j4dHSK]7l4fHQK\\\\7n4gHnJZ7R5hHjJZ7V5gHhJ[7W5gHfJZ7Z5gHcJ[7]5fH`J\\\\7`5eH^J]7a5dH\\\\J_7c5bH[J_7e5bHXJa7g5aHUJb7j5`HRJd7l5_HoId7P6`HjIc7U6bHdIa7[6bH_Ib7`6aHPHm8o7UGgGS9X8QG^GV9b8d300000000000000000000O100O100O101N1O100O1O1O1O001O0010O000010O01O00100000O01000000O0100000O1O1N2O1N2N2N2N2M3N2O1N101N2N2M3M3L4L4L4N2M3N2N2N2M3M3M3L4M3L4M3N2O1N2O1O1O1O1O1O1N2O1N2O1N2O1O1O1O1O1O100O1O100O1000000O100000000000gGiBi4W=UKlBj4T=TKoBk4Q=TKPCl4P=TKPCl4P=SKRCl4n<SKSCm4n<QKSCo4m<PKTCQ5k<nJVCR5j<mJWCS5i<kJYCU5g<iJ[CW5f<fJ\\\\CZ5d<dJ^C\\\\5b<dJ_C[5b<cJ_C]5a<cJ_C]5b<aJ_C_5a<aJ`C^5a<`J`C`5a<_J_Ca5b<]J_Cc5a<\\\\J`Cd5a<[J`Ce5`<YJaCg5`<VJbCj5^<UJcCk5^<RJdCo5[<PJfCP6[<mIgCT6X<kIiCU6W<iIkCW6V<fImCZ6R<dIPD\\\\6P<bIRD_6n;_ISDb6l;]IUDd6j;[IWDf6i;YIVDi6i;WIWDj6h;UIYDl6f;TIZDm6e;RI\\\\Do6d;PI\\\\DQ7c;oH\\\\DS7c;mH]DT7b;kH_DU7a;kH_DV7`;jH`DV7_;kHaDU7^;lHbDT7];mHcDT7^7]GgJ_1kMT7\\\\7aGfJ\\\\1nMS7Y7eGgJY1PNS7V7gGhJW1RNR7T7jGhJU1TNQ7Q7nGiJR1VNP7X4]GeLd0k0P1XNP7T4`GeLd0m0m0ZNo6Q4dGcLe0Q1i0ZNo6l3iGcLe0U1d0\\\\Nn6m2iH]MJX1a0]Nm6h2nH]MJ\\\\1<`Nl6a2TI^MJ^17cNl6Z2ZI_MIc12cNl6V2_I_MHe1NfNl6S2aI^MJf1KhNk6P2dI_MIg1IjNk6n1fI]MIj1GjNl6k1gI_MHj1FlNm6g1iI`MFk1FmNl6e1lI`MDk1HnNj6c1WJXMYOU2InNh6`1ZM_OTLmNe6X1bMHmKnNb6l0PN1dKPO^6>^N?XKPO\\\\6:dNb0TKROY67iNd0PKSOY65lNd0oJUOW63oNd0lJXOV60TOc0kJYOT6OXOc0nJTOd?f0g@PO_?h0VUV5\"}}, {\"image_id\": 129, \"category_id\": 2, \"bbox\": [158.84254455566406, 484.6670227050781, 351.51641845703125, 224.71530151367188], \"score\": 0.9999948740005493, \"association_id\": 1, \"segmentation\": {\"size\": [800, 616], \"counts\": \"i_X49fh03N3L3N1O1N2O2M3N4L5K1N3N001O1O001O00100O3M4L2N1O100O001O01O01O100O2O3L2O000O10O0001N101O1O1N2O1O1]OiM[ZOX2ce0kMZZOW2ee0jMZZOW2de0kMZZOV2ee0lMWZOW2ie0jMSZOZ2le0>N101N3N2M3N2M2O1N101N1O2N1N3M3M3L5K4N1O2N101N1O2O0O2N1N3M4K4M3N1N3N1O2N1O2N1N3N2L4M3M3M2N3N1O2N101N101O1O1N2O1O1O0O2O001O001O001O1O1O001O1O00001O0000[Kj]O`2Vb0^MQ^OUNKo3Sb0kM^^OT2ba0jM`^OW2_a0hMc^OW2]a0gMf^OY2Ya0fMh^OZ2Xa0eMj^O[2Ua0eMk^O\\\\2Ta0cMm^O]2Sa0bMo^O]2Qa0cMo^O^2Pa0aMR_O^2n`0aMS_O_2m`0aMT_O^2l`0aMU_O_2k`0_MX_O`2h`0^M[_Oa2e`0^M\\\\_Ob2d`0\\\\M__Oc2a`0\\\\M`_Od2``0[Mb_Od2^`0[Mc_Oe2]`0ZMd_Of2\\\\`0ZMe_Oe2[`0[Me_Oe2[`0ZMf_Of2Z`0ZMg_Oe2Y`0ZMh_Og2X`0XMh_Oh2X`0XMi_Og2W`0XMj_Oh2V`0XMk_Og2U`0YMk_Og2U`0XMm_Og2S`0YMn_Of2R`0ZMn_Of2S`0XMn_Oh2R`0XMn_Oh2R`0XMn_Oh2R`0XMn_Oh2R`0XMn_Oh2R`0WMP@h2Q`0WMo_Oi2Q`0WMo_Oi2Q`0VMP@j2P`0VMP@j2Q`0UMo_Ok2Q`0TMP@l2Q`0SMo_Om2Q`0SMo_Om2R`0RMn_On2S`0QMm_Oo2T`0oLm_OQ3T`0nLk_OS3V`0lLj_OT3X`0jLh_OV3X`0iLi_OW3X`0hLh_OX3Y`0fLh_OZ3X`0fLh_OZ3Y`0eLf_O\\\\3Z`0dLe_O]3[`0cLd_O]3^`0bLa_O_3_`0aL__Oa3a`0_L^_Ob3c`0]L[_Oe3e`0[LZ_Of3g`0YLX_Oh3i`0WLW_Oi3i`0WLV_Oi3m`0ULR_Ol3o`0SLQ_Om3o`0SLQ_Om3Pa0QLP_Oo3Qa0QLo^Oo3Ra0PLm^OP4Ua0oKj^OQ4Wa0oKi^OQ4Xa0mKh^OS4Za0kKe^OW4Qc010ObNjKS^OW4la0kKS^OU4ma0kKS^OT4Pb0jKo]OW4\\\\c0001O0N2O10000O101O001N101M2N2E;1O1O010O1O01000O2O0O2N101M3N2N1O2N1O2N1O2O0O2N2N2M3N2M2N3N1O2N100O2O1N1O2M4K8G7F8N2N2N2O0O2O1O0O3N1O2M4M2M101N2N101N101O001O1O2N1O1O001O001O001O01000O01000O010O0010O000001O001O001O010O1O1O001O0O2N2N2N3KZWb2\"}}, {\"image_id\": 129, \"category_id\": 1, \"bbox\": [41.30678176879883, 250.47300720214844, 403.341796875, 408.86431884765625], \"score\": 0.42174282670021057, \"association_id\": 0, \"segmentation\": {\"size\": [800, 616], \"counts\": \"Xk\\\\1P1mg08H6K5L4K4M3M3L5L4K6J6J7IZ1fN`0@7J5K3M3N1N2N2O2M2N3M4M4J5L5K4K4M2N2N2N101N2O0O2O0O2O1N2O0O2N2O1N2N2O1N2N2O1N101N2N2O1O1N2O0O2O1O1N2O1O1O1O0O2O1O001N101O00001N101O001O1O1O1O1O1O1N2O1O1O1O001O0O10001O001O0O101O000O2O001O1O010O1O1O1O001O001O0010O0001O000000000000001O000000000000000000000001O0001O0000000000000O1000000O10000O100O100O11O0000000O10000000000000000000000O0100000O01000O10O0100O100O101O0O100O2N1O1O1N3M2M4K4O1O0O2N2N1O2M3G9G8N3M3M3N2N2O1O1O1O1N2N3M2N2M3N2N3N1O^@bId<]6XCiIh<U6UCPJj<o5VCRJj<n5VCSJh<m5XCUJg<k5XCWJg<h5ZCYJd<g5\\\\C[Jc<d5^C]Ja<b5_C`J`<_5aCcJ\\\\<]5eCdJZ<[5fCgJY<X5hCjJV<U5jCmJU<R5lCnJS<S5mCnJR<R5mCPKR<o4oCQKR<n4nCRKR<n4nCRKR<n4mCSKS<m4mCSKS<l4nCTKR<l4mCVKR<j4nCVKR<j4nCVKR<j4mCWKT<g4mCYKS<g4mCYKS<g4lCZKT<f4lC[KS<e4lC\\\\KS<f4lCZKT<f4kC[KU<e4jC\\\\KV<d4jC\\\\KV<e4hC\\\\KX<d4gC^KW<c4hC^KX<b4gC_KY<b4fC^KZ<b4eC_K[<a4dC`K\\\\<a4cC`K\\\\<`4cCaK]<_4bCbK^<_4aCaK_<_4`CbK`<_4^CbKb<^4^CbKb<_4\\\\CbKd<_4ZCbKf<^4YCcKg<^4WCcKi<]4VCdKj<]4TCdKl<]4RCdKn<\\\\4QCeKo<\\\\4oBeKQ=\\\\4mBeKT=Z4kBgKU=Z4jBfKV=Z4iBfKX=[4fBfKZ=Z4eBgK[=Y4dBhK\\\\=Y4bBhK^=X4aBiK_=X4_BhKc=W4]BiKc=X4[BiKe=W4ZBjKf=V4YBkKg=V4WBkKi=T4XBkKi=7XAT1P1eNi=gM]An1Lf1n0eNi=`MeAS2Ff1n0fNX>Bk@g1n0gNX>_Om@h1l0hNY>YNc@<9b2l0iNX>XNf@:8c2k0kNX>UNh@;6d2k0lNX>SNj@:5d2k0nNX>QNk@;3e2k0oNX>oMm@92h2j0POX>mMn@92i2h0QOY>kMPA80k2h0QOZ>iMRA15S3a0ROY>hMXBT3@SOZ>fMYBU3_OTO\\\\`0k0f_OSO]`0j0e_OUO\\\\`0h0g_OVO[`0h0f_OXO[`0e0h_OYOZ`0d0h_O[OZ`0b0i_O]OX`0a0i_O_OX`0?j_O_OW`0a0i_O_O\\\\=RNZC]2\\\\O_O[=TNZC[2\\\\OAZ=TNZCZ2^OAV`0>k_O@X`0=i_OCX`0:k_ODX`09j_OFY`05j_OJZ`0Ko_O2V`0@U@?o?VOY@i0o?jNX@S1lc0O1O1N2O1N3N1N2OjGPO^Hm0a7VOfHb0Y7@lH9S7JP92N2N2N101N2O0001O1O1N2O1N3M3Ld__4\"}}, {\"image_id\": 129, \"category_id\": 2, \"bbox\": [149.28382873535156, 234.00973510742188, 348.3753662109375, 549.7911376953125], \"score\": 0.24342721700668335, \"association_id\": 0, \"segmentation\": {\"size\": [800, 616], \"counts\": \"Q^c41lh05K4N3L3O1O2M2O2N2N3M3M2N2N1O1O1O1O1O1O1O1O1N3N2N2N1O2N1O1O1O2N100O2N2N2N3N1N2N1O2N1O001O001O1O1O001N2O1O1O002M2O2N2M2O1O1N2O1N2O1N101O0O2O1O1N2O1N3N1O2M3N1N2N2M2N3M2M3M4J5L5K4L4K5L5N1O1O2N1O2N1O2M2N3M2M4L4K5J6I7J7L3M3M3M3N2N1N3N2N1O100O101N100O1O1O1O1O10\\\\Kj^O^1Ua0aNm^O_1Ta0\\\\NR_Ob1o`0YNW_Oe1j`0WN[_Og1f`0UN__Oj1a`0iMn_OT2S`0`MZ@^2g?`M\\\\@^2e?`M^@^2c?`M`@^2a?aMa@]2_?bMc@]2^?bMd@\\\\2]?bMd@^2]?`Md@`2]?^Md@b2^?[Mc@e2_?XMb@h2`?UMa@l2`?PMc@o2b?kL_@U3f?dL\\\\@\\\\3i?^LX@b3l?XLV@h3ab00001O00001O00001O001O00001OO10000O100O10000O10000O1000O0100O100O100O1O00100O1O1O1O1O001N2O1N1O2M3N2M2010O2OhFeKkMZ4T2lKhMS4W2SLeMl3[2XLaE_OW7W4Y3]LSEL^7f3_3iMULV2l3lMSLR2m3PNRLo1o3RNPLm1P4VNnKi1R4YNXK[2h4gMWKX2i4kMUKU2j4mMUKR2k4QNSKn1m4UNQKk1n4XNoJh1Q5\\\\NlJc1T5`NjJ_1V5cNiJ]1W5dNhJ[1Y5fNfJZ1Z5gNeJX1]5gNbJY1`5gN_JX1c5gN]JX1f5fNZJY1h5gNWJW1m5hNQJX1Q6gNoIW1T6iNkIU1X6kNgIT1\\\\6jNdIU1^6kNaIT1a6lN^IS1d6kN^IS1c6mN]IR1e6mN\\\\IP1g6oNZIo0g6QOZIl0i6ROYIk0j6TOWIh0l6XOUIe0o6YORIe0P7ZOQIc0S7[OnHc0U7[OkHd0X7ZOiHe0X7ZOiHd0Z7ZOfHf0[7YOfHe0\\\\7ZOeHe0\\\\7ZOdHe0]7[OdHd0]7\\\\OcHc0]7]OcHb0_7]ObHb0^7^OcHa0^7^ObHa0_7_ObH`0_7^OcH`0_7_ObH`0_7^OdH?^7@cH?_7^OdH?^7@cH?^7_OdH?^7@cH>_7AaH?`7_ObH?`7@aH>a7A_H>a7BaH=]7EdH:[7GeH9Y7HiH7U7KlH4R7NoH0P71RINm63TILj66WIIh67[IGd6:]IEb6;aIC`6<aIBa6<aIBb6;`IDa69cID`68cIFb65`IJd60_INg6J^I3g6G\\\\I7j6BYI=o6XOUIf0U7oNoHn0c?O2L7I9GQnm2\"}}, {\"image_id\": 129, \"category_id\": 2, \"bbox\": [220.41940307617188, 218.84173583984375, 266.51763916015625, 188.93759155273438], \"score\": 0.18934687972068787, \"association_id\": 0, \"segmentation\": {\"size\": [800, 616], \"counts\": \"UoU84ih05L3N2M2O3L3N2M3N3L3N1O2N2N1O1O2M2O2M2N2O2N1O1O2N1N2O1O2N1O10001N10001O1O001PZOZNdd0f1[[O]Ncd0d1Z[O`Ndd0`1[[ObNdd0_1Z[OdNdd0]1Y[OgNed0Y1X[OmNed0T1X[OPOfd0Q1W[OTOfd0m0X[OVOfd0j0X[O[Oed0f0Y[O\\\\Ofd0e0X[O^Ofd0_2O1O000000001O000000001O1O001O1O1O001O001O0001O01O00010O0010O0100O01O10O00010O01O001fLj[Oh1Wd0UNR\\\\Od1oc0XNZ\\\\Ob1fc0[N_\\\\Oc1bc0XNc\\\\Og1]c0XNe\\\\Og1\\\\c0WNg\\\\Oh1Xc0XNh\\\\Oh1Yc0VNh\\\\Oj1Xc0UNi\\\\Ok1Xc0TNi\\\\Ok1Xc0SNi\\\\Om1Xc0QNi\\\\Oo1Xc0PNh\\\\OP2Yc0nMi\\\\OQ2Xc0mMi\\\\OS2Wc0mMi\\\\OS2Xc0kMh\\\\OV2Yc0hMh\\\\OX2Xc0gMi\\\\OY2Xc0dMi\\\\O]2ld001000O100O1000O01000O0010O010O001O0O2O001N1N3L32N3N1N2N2N1O2N2N101N4L4L8G9Ee_T3\"}}, {\"image_id\": 130, \"category_id\": 1, \"bbox\": [58.28911590576172, 508.1010437011719, 305.4063415527344, 158.91952514648438], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Zl`1S1Zi0>G4N2N2N2N101M3N2M4L4M2M3N1O2N1O2N1O2N2M4L5K3M3O1N1O2O0O2O0O2O1N3N1N2O1O1N101O001O001O001O001O00000000001O0000000000000000000000000000O1000000000000O10000000000O10000O10000O100O101N100O10000O10000O10000000000O100000000O1000000O010000000001O0O100000000O10000O100O101N10000O1000000O10000O10000O10000O100000000O1000000000O100000O10000000000000000O0100000O10000O011O001O000O2O0000001O00010O010O100O001O010O000010O0010O010O01O00010O0000001O0000000000000000000000000ZNTZOcNle0R1dYOQNc0l0ie0P1kYOnM?Q1fe0o0fZOQOZe0l0jZOSOVe0j0oZOSOQe0m0R[OQOnd0n0Y[OlNgd0R1b[OfN^d0Z1g[O`N[d0_1i[O\\\\NXd0d1\\\\201N2O00100O1O1O1M3I7G9O1O1O100O1O1O1N2N2L4L4M3N2O10000O101N10000O10febh0\"}}, {\"image_id\": 130, \"category_id\": 1, \"bbox\": [716.5862426757812, 474.7649841308594, 499.67584228515625, 266.3624572753906], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"dffb02;8lh0f1jNg0I7J5L3M4M2M3N2N2N3M4L4K7J6kYOhKae0P5E5K5L2N3M2M3O0O2O0O2N101N2N1O2M3N1O2M3O0O2N1O101N100O100O100O1O1N2O1O1N2N2O1O100O100O10000O10000000O10O1000000O100000000O10O1000O1000O1000O10O1000O1000O1000O10000000O2O00000000000000001O000000000000000O10000000O1000000000000000000O10O1000000000O10O100000000O10O100000O100000000000O1000O10000000000000O0100000O01000O10O01O100O100O01000O1000000000000O0100000000000000000000000O10000000000000000000000000000000000000O10000000000000000000000000000000000001O0O10000000000000001O00000000000000001O000000001O0000001O00001O001O0000001O0000001O000001O01O00000000000000000000000000001O0000000000000001O0000000000000001O0001O00000000000001O000000000000001O00000000010O00000O2O00001O0000001O00000000001O00000000000000000001O00000000001O0001O01O001O1O001O1O1O2N1O1O1O1O1O001O001O00001O001O001O001O1O001O1O001O001O0000001O0000001O000000001O0000001O00001O001O001O001O001O001O00001O0000001O00001O001O001O1O1O1O001O001O00001O00001O001O001O1N3N4L4L3M2N1O2N1O5K4K3N2N1N3N3aLZZOh1ne0^MQZOG<d2ne0SMg[Oj1Sg0[O[_e1\"}}, {\"image_id\": 130, \"category_id\": 1, \"bbox\": [251.4075469970703, 492.3001403808594, 539.3131103515625, 209.90304565429688], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"[V]79Yj06J8G9E7M2mMkNdZOX1Xe0oNbZOS1\\\\e0SOUZOY1ae0TOYZOo0ee0VOUZOl0je0XOQZOk0me0XOPZOi0Pf0ZOlYOh0Rf0\\\\OjYOf0Tf0]OiYOd0Wf0_OeYOc0Yf0@dYOb0[f0l1O2N2O1N2O1N2N2O0O2N2O0O2O0O101O0O101O001N101O001N101O000O101O000O101O0000000O10000000001N1000000000000O2O00000000O100000O1000O10000000000O100000O10000000O1000000O10000O10000O100O10O10O10000000000O10000000O1000000000O1000O1000000000000000000000000001O0000000000000000000000000000000000000001O0000000000000000000O1000000000000000000000000O1000000000001O0000000O10000000001O00000000000O101O0000000000001O000O100000001O000000001N10001O00001O00001O0O101O00001O00001O00001O0000001O0000001O0O1000001O00000010O0001O00001O001O00001O0000000010O0000000000000000000000000000001O0000000000000000000000000O10001O000000000000000O10000O10000O1O100O100O10000O2O000O10O10O100O1O1O1O1O1O1O1O1O1O100O10O010001O0O10000O2O000O2O0O101N10001N10000O2O0000010O001O00001O1O010O1O001O1O01O01O001O001O010O1O1O1O2N1O1O1O1O001O010O00001O0O101O001O001O1O1N101O1O000O2O001O0000001N1000001O00001O001O0000001O0O101O000O101N101N1O2O1N3M2N3M2N8mLaXOe1MPOPh0i0VXOTOmg0@nWOf0a0]OAK`h00dWO1X\\\\a=\"}}, {\"image_id\": 130, \"category_id\": 2, \"bbox\": [66.1993408203125, 653.3777465820312, 230.212646484375, 21.06378173828125], \"score\": 0.9999836683273315, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"g^h12bj02O1O00000O1000001O0000000000000000000O10000000000000000001O000000001O000000001O01O0001O000000000000001O000000000000000000000000000000000000O10000O100O101O0O100N2O10000O1000000000000O1000000000000000000000000000000000000000000O10000000001O000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000O1001O00000000000000001O001O1O001O00001O001O1O1N3N0000001O00001O000000001O000O10001Od`Tj0\"}}, {\"image_id\": 130, \"category_id\": 2, \"bbox\": [302.2686767578125, 669.8311157226562, 425.12225341796875, 34.6890869140625], \"score\": 0.999974250793457, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"^Rm73`j03N2O0O10000O110O000000000000000000000000000001O00001O001O000000001O000000001O00000000001O00001O000O101OO101O00000O1000001O0O10000000000O1000001N100O1O1O1O10000O2O000O10000O100O100O1O1000000O1000000000000O10000000000000000O10000000000000000000000000000000000000000001O0000000001O00000000000001O0000000000000001O00000000000000000001O000001O00000000000000000000000000000000000000001O0000000000000000000000000001O000000001O00001O001O001O0000001O001O00001O1O1O00001O001O00000000001O001O0000001O00000000001O00001O0000001O001O0O2O0000001O0OeT\\\\10[kcN2N2N10000O1O010O001O1F:M30000O10000000O10O100000O10000O01000000000O1000000000000O100O1000001O000O10000000000000001N10000000000000000O100000000000000000000000000O100000000O1000000000000O10000O2O000O2N2N^j\\\\>\"}}, {\"image_id\": 130, \"category_id\": 2, \"bbox\": [732.0606079101562, 707.861328125, 452.02410888671875, 51.9927978515625], \"score\": 0.9989759922027588, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"RZSc06^j01O2O0O100O1010O000000001O0000000000000000001O01O00000001O00000000000001O01O0000000000000000000000001O00000000000000000001O0O10000000000000000000001N100000000O10000000000O2O00000000000O1000000O1000001O0O100000000000000O100000001N100000000000000O10000000000O2O00000000000O100000000000000O10000000000O100000000000001O00000O1000000000000000000O1000000000000O1000000000000O10000000000000001O00000O1000000000000000000000001O00000000000001O000000000001O000001O000000000000000010O0000000001O0000000000001O00000000000000000000001O000000000000000010O000001O00000000001O0000000000001O00001O00001O01O000001O000000001O0000001O0000001O0000000000001O000000001O00000000001O000000000000000000000000000000000000001O000000000000001O000000000000000000001O0000000000001O0000000000001O00001O001O00001O00000000001O001O001O0000001O0000001O00000000000O2O0000000000000000001O00000000001O000O10001O0OnQj2\"}}, {\"image_id\": 130, \"category_id\": 1, \"bbox\": [99.70369720458984, 500.8666076660156, 441.7601013183594, 184.95956420898438], \"score\": 0.860161304473877, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"djc2`3Sg03M3N2N2N101N1000001O00000O10001O0000000000001OO1000000O1000000O2O00000000000O10000000000O1000O10O100000000O10000O10000O10001N100000000O1000000000O1000000000000000000O1000000000000O100000000O10001N10000O10000O10000O10000O10O10O10000O10000O100O1000000O10000000O010000000000000001O000001O00000000000001O01O01O010O01000O10O01O100O1O000001O00000000000O100000001O00000oN`YOmM_f0o1fYOQNZf0S1hYOZN5d0Rf0R1kYOUN6i0oe0Q1oYOPN6n0ke0R1eZOnN[e0Q1fZOoNZe0P1gZOPOYe0o0hZOQOXe0n0jZOROUe0l0mZOTOSe0k0nZOUOQe0k0Q[OTOod0k0R[OUOnd0k0R[OUOnd0j0S[OWOld0i0T[OWOld0h0T[OYOkd0h0T[OZOkd0f0U[OZOkd0f0T[O[Old0d0U[O]Oid0d0V[O]Ojd0b0V[O@id0`0W[O@id0?W[OBhd0>X[ODgd0;Z[OFdd0:\\\\[OGdd08][OIbd06^[OKad07][OKbd08U[ONkd06mZOORe02jZO2Ue0OhZO4Ve0OZZOSN1Q2de0NVZOb0ie0V2O001N2O001O1O001O00000O101O0000000000001O0000000000000O10000000000O10001O000O1000000000000O100000000000O1000O100000000000O10000O01000O100O10000O100000000O10000000O0100000000O10000000001N100000000O100000000000000000000001O0000000001O0001O00000000001O00000000000010O00000000000000000000001O000000000000000000000000000000000001N1000000000000000000000001O000000000000001O000O101O000000001O0O1000000000000000000000000001O0000000000001O1N1RMiZO>[e0XOnZOc0Xe0UOoZOf0\\\\e0hMcZO1c0P2UgWc0\"}}, {\"image_id\": 131, \"category_id\": 2, \"bbox\": [52.24679946899414, 551.8626708984375, 359.9081726074219, 182.1451416015625], \"score\": 0.9999994039535522, \"association_id\": 2, \"segmentation\": {\"size\": [800, 600], \"counts\": \"oXY19_h0?F6J5M1N3N1O2O0O2N1O101N1N3O1N1O2O0O2O0O2O0O2N1O2N2N2O0O2N2O0O101N101N101N2O1O1N2O1O0O2O001N101N2O0O2N10001N10000O2O0000000O1000000O100O10000O100O1000000O1000O01000000O10000O010O100O10O0100000O10O10O1000O010O01000O010O1000O010000000O10O10O100O11O1N2O001O1N101O0O2gZO`LPe0`3nZOcLPe0g301N10001O001N101O1O001N1000001O0O100O100O100O101N1000000O10000O1000000O100O10000O100O100O1000O010000O10000O10O010O0010O0100O010O10000O10O01O1N2N2N2O10O0100O0100O01O010O1O001O010O10000O10000O010O1O1O100O10000O2O001O1O1O1O1N2O001O1O0O101N101N2O1O1N2O1O001O1N101O00001O0000001O00000O101O00001N101N1O2O0O2O0O2O0000000O101O0000000O2O00000O2O000O1O100O10000O10000O0100000O1O100O001O1O1O100O1O010O10O01O000O101O001O010O100O10O01O1O1N2O1O1O001O1O101N2N2N2M7H]Yb4\"}}, {\"image_id\": 131, \"category_id\": 1, \"bbox\": [113.32093811035156, 58.33974838256836, 236.8241424560547, 399.6275939941406], \"score\": 0.9998819828033447, \"association_id\": 0, \"segmentation\": {\"size\": [800, 600], \"counts\": \"mS[33kh04c\\\\O;Z=HeAl1e=WNbAe2X>^M`Ak2]>WM^An2_>TM]AQ3a>QM[AS3c>nLZAV3d>lLWAZ3f>fLVAa3f>aLWAe3e>\\\\LXAh3f>XLYAk3e>VLYAl3f>TLYAn3e>SL[Am3e>SL[An3c>SL\\\\Ao3b>RL^Ao3a>RL^Ao3`>RL_AQ4^>PLbAQ4]>oKbAT4\\\\>mKbAU4]>mK`AV4^>lK^AW4b>jKYAZ4f>hKVA[4i>gKSA\\\\4l>eKRA]4m>dKRA]4l>eKQA]4n>eKPA^4m>eKo@^4P?dKl@`4Q?i2N2N2O1O1N10000000001O00001N101O1O1N3N2N1O2N2N2M4M4L4L2N2N1O2N2N5K6I6K2N2N1O1O001O001O1O1O1O100O1O2N1O1O001O1O0001O000000000000001O0000001O00001O1O2O1N3M2N3M2N2N3RG`BV6d=^IeBa6_=TIjBj6[=iHnBV7`?N1O1O100O10000O1001O00000O1O10O010O010O002N1O2N3M7I6J3M2N001O0010O01O1O001O1N2O001O001O0O2O000000000000001O00O01O1O1UNh]O\\\\L7POSb0_4[^OoKWb0i3S^OSLna0k3W^OQLka0m3Y^OPLha0Q4V^OPLja0Q4T^OPLla0S4P^OoKoa0Z4g]OgKYb0_4`]OcK_b0b4Z]O`Kfb0]501OO1N1ZOW]OVKjb0h4]]OPKfb0o4e001O10O04M2N3L4M2fLV\\\\OZ1mc0QNg\\\\Ol1bc0fMe\\\\OY2bc0\\\\Mc\\\\Ob2od0^OjYOVNYf0f1d0J7GVPS6\"}}, {\"image_id\": 131, \"category_id\": 2, \"bbox\": [86.90064239501953, 419.88580322265625, 178.06231689453125, 49.674560546875], \"score\": 0.9998579025268555, \"association_id\": 1, \"segmentation\": {\"size\": [800, 600], \"counts\": \"U]T2111ih08K2N1O2O0O2O0O10000O2O0O10001N1000000O100000000O10000000000000000O10O1000000O0100000000O0100000O10O10O1000000O010O1000000O10000O10000000001N100000000O100000000O10000000000O10000O100000000O10001O0O1000000O10000O100000000O1000000O01000001O00001O001O1O1O4K2O1O00001O2N1O1O0O101O0000001O00000000001O0O1000000000001N10000O1O1O2O000O2N1O101N101N10SoY8\"}}, {\"image_id\": 131, \"category_id\": 1, \"bbox\": [136.71620178222656, 58.817665100097656, 324.48370361328125, 549.0750732421875], \"score\": 0.9988163709640503, \"association_id\": 1, \"segmentation\": {\"size\": [800, 600], \"counts\": \"id]3;^h0h0YO`0]O>[Ob0G9E:G:F9I7M4L3M3N2M4M2cLSLcAQ4Z>RLbAR4[>QLaAR4]>QL^AT4_>PL[AT4c>oKYAT4f>oKUAT4i>PLRAS4l>QLo@R4P?PLm@R4Q?PLm@Q4S?QLj@Q4U?PLi@Q4W?QLf@P4Z?QLd@Q4Z?RLc@o3]?SL`@o3_?SL[@Q4e?PLT@W4j?mKk_O[4U`0Y2O1O0010O01O001O10O01O001O1O1O1O2N1O2N1O2N2N2M3N3M4L4L5K4L3M2N2N1O2N2N1O1O2N1O2N1O2M2O2N1O1O1O1O1O001O100O1O001O1O1O100O2N1O100O1O1O100O1O1O100001O10102N2kE`Bd8a=fFWCV9^>M3NM3N2M3M4L5K4K6K7I3M2N2M2O1O1N102N2N101N2O1N2O0O1O0O1O1O1O1O1O1O1O1O001O1O1O1N2N2O1N2N3M2M3N2N2N1O2M2N3N1M3M4L3[Oe0M3M4M2N2N3M2M3M3L4L5K4K5L5K4M3M3N2N2M3N2N2O1N2O1O1N2O1O1O2M2N2N2N2O2N1OSFlGP3S8gLYHW3f7gL^HY3a7fLaHY3_7gLbHX3]7hLeHV3\\\\7jLeHU3[7jLgHU3Y7kLhHT3X7kLjHT3V7lLkHR3V7nLkHQ3U7nLnHP3Q7QMPIn2P7QMRIn2n6RMSIm2m6SMTIk2m6UMSIk2m6UMTIj2l6UMUIk2l6TMUIk2k6UMUIk2k6UMVIi2k6WMUIi2k6VMVIj2j6VMWIi2i6WMWIi2i6WMWIh2j6XMUIi2k6WMUIi2k6WMUIi2k6XMTIh2l6XMSIh2n6XMRIh2o6WMQIi2o6WMQIi2o6WMPIj2P7VMPIi2Q7WMoHi2R7fIcGi0Z1b5S7bIjGi0R1e5T7^IPHk0l0f5V7[ISHm0f0i5W7^HcGXOd0_2b0k5X7XHQHQO9l2<l5Z7SH[IP2[Ol5\\\\7PH]IS2VOn5^7kG_IX2ROm5`7hGaIZ2nNn5b7YGoIi2_Nn5d:QJ[En5h:PJXEP6i:oIWEP6k:oIUEP6m:PJSEn5o:QJQEm5R;RJnDm5T;RJlDm5U;TJjDk5X;TJhDk5Y;UJgDj5[;VJeDh5[;YJfDf5Z;[JeDe5[;[JfDc5Z;_JfD`5Z;aJeD_5Z;bJgD]5Y;dJgD[5X;gJgDX5Y;iJhDV5X;kJgDU5X;lJiDS5W;mJjDQ5W;nJkDQ5V;mJmDP5T;oJnDP5S;nJPEo4Q;PKREn4n:QKUEl4m:RKVEl4j:SK[Eh4f:XK^ETNXNW5[<cLbEnM^NW5Q<kLdEgMeNW5f;SMjE^MiNY5\\\\;[M`G_2_8bMgGW2W8mMmGm1n7ZNTHb1g7dN[HY1l6BVI;f6J[I3i5dKVF^4R4Lg5RLjET4a4He5Q1\\\\JmNf5R1[JlN\\\\6>fI_O]6>fI_O]6?eI]O_6a0dIZO`6c0dIXO`6f0cIUOb6h0aIROe6k0]IROg6SMTDR3X5G^76dHGa75aHHe71_HKj7L[HOm7IYH1Q8CYH4j8bNgGd0We\\\\3\"}}, {\"image_id\": 131, \"category_id\": 1, \"bbox\": [197.86322021484375, 263.5705261230469, 266.7005310058594, 338.5920104980469], \"score\": 0.9796765446662903, \"association_id\": 2, \"segmentation\": {\"size\": [800, 600], \"counts\": \"WPk4R2^f0f0C;iNQMP\\\\OV3lc0oLm[OV3Rd0kLk[OX3Td0hLj[O[3Ud0fLg[O^3Yd0bLd[Ob3Zd0_Lc[Od3\\\\d0b0N2N2N2N1O1O2N2N2M3N1O1O1O1OWKY\\\\OY4hc0eKZ\\\\OZ4fc0eK\\\\\\\\O[4cc0cK`\\\\O\\\\4`c0cKa\\\\O]4`c0`Kc\\\\O`4\\\\c0^Kf\\\\Ob4Zc0\\\\Ki\\\\O`0_Ob3Zd0\\\\Li[Oc3Wd0]Li[Od3cc0PLi\\\\O=Bc3fc0TLc\\\\O;Fa3fc0XL_\\\\O]4ac0`00100O10O1bK[\\\\Oa3dc0n001O00001O001O001O001N101O00001O00001O1O1O1k]OgJa`0[5W_ORKb`0P5i^OjKPa0Y6J4L3M1O1O2N1O2N3M6J6J3M1O1O1O1O1O2N2N3M2N1O1O1O001O2N2N2N2N2N1O1O001O1O1O2N2N1O1O1O1O00001O001O001O001O001O1O001O001O001O001O000001O01O000001O1O1O1O3M4L4QFZA\\\\9V?O1O1O001O00001O000010O0000O10001O000000000000000O100000O10O10000O1O2N1O1O100O1O100O1000000O100000000O100O1O1O1O1N2O1O2N100O1000001N1000001O1N2N1O2N2N1N3N1O1O2N10000O2YIn_O^4R`0^KY@Z4h?cK]@[4c?bKa@]4`?aKb@]4_?bKc@]4^?`Ke@^4]?`Ke@^4^?^Ke@_4b?YKc@c4oa0M5L4L3M2O2M3M4K5K6G9G7K5M3M2N2N3L6J6J8C=D;I4L3N2N3M3L6JViY3\"}}, {\"image_id\": 131, \"category_id\": 2, \"bbox\": [289.35260009765625, 524.4447021484375, 157.99905395507812, 112.93035888671875], \"score\": 0.09832057356834412, \"association_id\": 0, \"segmentation\": {\"size\": [800, 600], \"counts\": \"\\\\[R79Xh0f0G4M2000000O1O10O1O3N1N1O010O00100O10O0100000O10O01O010O010O10O01O010O00100O0010O010O000100O0000000000000010O00001O001O00001O000000000000001O0000000000O1000000000000O100O100O100O1000000O1O2N100O100O1O1O100O10000O101O0O1O2N1O1O1O2O0O010O1O1O1O01_OlXOTOUg0k0?O00210N1N3YOXXOOng01a0O1O0O101N110O010O1O00001O4L\\\\[P4\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [121.49668884277344, 136.25856018066406, 145.67286682128906, 86.43403625488281], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [428, 535], \"counts\": \"[Uc1k0]<5N2N2O2M2L3O2O0O1O1N2N2O1O1N2N200O01O100O1000O10000O1O1N3I7N2O0O2O001N4M2N001N2O00001O0000000O1000O100000O1000O10O10O10000O1O100O100O100O100O10O10O1000000O1000000O1000000000000O100000000000000O101N10000000001O0010O0010O00010O21N3M0O1001O1N1O001O2N1O100N2O1O1N2N101N2M3I6O2N101N2N1O101N1OYk_3\"}}, {\"image_id\": 132, \"category_id\": 2, \"bbox\": [69.6600341796875, 207.254638671875, 173.09095764160156, 25.871932983398438], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [428, 535], \"counts\": \"h\\\\P13X=1O2O0O2N1O2O000O101N10000O101O00000O1000001O0000000O10001O00000000001N100000001O000000000001O001O001O000000001O000000O100000000O1000000O1001O00001O0O10001O4L2N1O00001O1O1O1O00000O1000001O00O1000000O1000000O100O100O1O100O100O10O1000O100000000O100000000000000000000001O000000001O00001O0000001O00000000001O0000000000001O0000000O2O2Maji3\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [70.76872253417969, 137.4821319580078, 84.23919677734375, 62.79777526855469], \"score\": 0.9999992847442627, \"association_id\": 4, \"segmentation\": {\"size\": [428, 535], \"counts\": \"d`o06R=5M4M3L4L4M2O1O0O1O2N1O11oNgCm0c;WOoDi0P;YO^DJ?l0S;]OaDJNNOl0b;\\\\O`DLMN0j0c;]O^DP1b;>O001O001O00000000001O00O10000B`DPO`;P1aDoN_;Q1aDoN_;^11000000O1000000O10000O100O1N2_O[DoNO101g;n0a0N2N2N2N2M3N2N2O10000O1M3O100N2O1O10000O10001N2N3M_So4\"}}, {\"image_id\": 132, \"category_id\": 2, \"bbox\": [37.66673278808594, 191.901123046875, 76.78233337402344, 11.619949340820312], \"score\": 0.9999990463256836, \"association_id\": 4, \"segmentation\": {\"size\": [428, 535], \"counts\": \"i_`02Z=0O2O001N10001O0O100000000O1000001O0000000000000000000000000O1000000000000001O00001O001O001OO100000000O100001O001O1N101O00000000000000000000O11N101Oeh_5\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [31.96462631225586, 133.3946990966797, 78.19050598144531, 55.12042236328125], \"score\": 0.9999979734420776, \"association_id\": 3, \"segmentation\": {\"size\": [428, 535], \"counts\": \"Wi>211U=6M0O1O2N3L3M3M4M2O1N101]OUObDm0];TOaDn0^;SO`Dn0`;SO\\\\DP1d;ROYDo0g;;000O10000000000O100CVDXOj;h0VDWOk;U100000ATDQO37j;g0ZDXOf;W100001O00000001O001O1TDbNh;e0[D2d;JcD4\\\\;GlD6U;IlD6T;JmDD\\\\O?g;MoD3Q;LWEMi:2]EId:6]EGe:8X100N2O1O2O000O2O00iee5\"}}, {\"image_id\": 132, \"category_id\": 2, \"bbox\": [13.516432762145996, 179.06918334960938, 61.26203918457031, 9.806655883789062], \"score\": 0.9999606609344482, \"association_id\": 3, \"segmentation\": {\"size\": [428, 535], \"counts\": \"nP63Y=1N1000000O10001O000000001O000000000000000000000000000001O1O2N001O001N10000000^P51`oJ2O0O100000O10V`P6\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [12.401753425598145, 131.45684814453125, 60.0075569152832, 42.92884826660156], \"score\": 0.9966290593147278, \"association_id\": 2, \"segmentation\": {\"size\": [428, 535], \"counts\": \"Pb52Z=0000000[C7g;JVD:h;FXD=e;CZD?e;A[D>f;BZD>f;BXD`0h;@XD`0h;AkCM9b0l;ERD<n;EPD=o;EoC<P<EnC<R<EmC;S<ElC<T<>0O1UOkCF1b0X<GkCGM0O=Y<MoC2S<KoC5Q<KnC6R<IoC8P<HPD8P<FRD:n;FRD:n;FRD;m;DUDNC8Y<JUDMB86Hl;0^D<DER<3RD8LER<3RD8KFS<2RD9IFU<1oC=KBV<1oC=KBU<1QD>IAV<0QDe0o;ZOSD?HBU<NWD7MHk;0UfW6\"}}, {\"image_id\": 132, \"category_id\": 2, \"bbox\": [0.0, 170.58677673339844, 41.80787658691406, 6.72637939453125], \"score\": 0.993598222732544, \"association_id\": 2, \"segmentation\": {\"size\": [428, 535], \"counts\": \"]52Y=2O0O2O00O1000000000000000000000000000000000001O0000001O001O00000000O1000O10cT_6\"}}, {\"image_id\": 132, \"category_id\": 2, \"bbox\": [3.4435811042785645, 319.8175964355469, 77.84544372558594, 104.638916015625], \"score\": 0.2993059456348419, \"association_id\": 0, \"segmentation\": {\"size\": [428, 535], \"counts\": \"jl26U=2eMJ^F1D6k92[FND0Q:4XFODNU:3YFLC0U:4ZFHC2U:6SGHP97oFGU98kFFW9:fFH[98bFK^96^FLc93\\\\FOd91YF1h90UF2k9NUF1k91SF1l9OTF1l9OTF1n9NRF1o9MQF4Q:JoE5S:JnE4S:LmE5X:EhEa0Q:BRF9g;I1O\\\\NLRF1P:Ng14NO0010O001O10O000001O1N201mBIl<>N0001O001O000001O00010O1N2O2M13N0O1N2N2N1O2N2N2MS^o5\"}}, {\"image_id\": 132, \"category_id\": 1, \"bbox\": [50.667236328125, 132.996826171875, 161.0455780029297, 67.48881530761719], \"score\": 0.1618509441614151, \"association_id\": 0, \"segmentation\": {\"size\": [428, 535], \"counts\": \"hjh01ce64aZILO1i<:UCIh<?O1O2N1SOZOREg0m:ZOREf0n:[OREd0n:\\\\O_E7a:J_E5a:KaE3^:OcEO]:1eEM[:3_1001O000000000000000000000000000001N100000nBMU1N\\\\:4`DO1Nj0Oe:4aDL22g0Nf:4jDNW<1jCNV<1kCOU<1kC0U<OlCOV<0jC0_<542nCJl:6QENm:3PE0P;1oD0P;3mDNR;3mDNR;4lDMS;5kDLT;4lDLT;3iDA@=h;OiD5X;GkD9S<100O100O1O1O100L4QOUO[Em0e:SOZEn0f:ROYEo0f:ROZEn0f:SOQENCR1\\\\;POPE0CP1];POoD2Bo0_;oNmDY1S;gNlDZ1T;fNlDZ1T;fNkD[1U;eNbD2O\\\\1_;;0000000000000000O10000001O000000O100000000000aNgDk0Z;SOhDl0X;POmDo0S;POoDo0Q;POPEP1P;nNRER1n:nNSEQ1m:oNSEQ1m:nNVEP1j:POWEo0j:POVEP1j:POWEo0i:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POXEP1h:POYEo0i:SOSEm0m:SOSEm0o:QOQEo0f;0O10000002M6ZOWkV4\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [467.3451843261719, 323.8680419921875, 209.22537231445312, 187.76358032226562], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [683, 782], \"counts\": \"Q`l9f1bc0c0^O9H5K3M3M3M5K=C7I4L3N3L4L3N2M4M4L2N1N01O1O10000O10000O1O1O001O100O0100O10O0100O1000000O010000000000001O00000O101O1O1O2N2N2N1O2N1O2N3M7I4L2N1O000000001O00000000000000000000O100O1O1O1O1O1O1O1N2M3M3N2O1O100O100O1O1O1O100000000O100000001O0000000000000000000000000000001O000000001O0001O00000010O01O1O10O01O1O1O00100O1O1O1O1O1O001O010O001O001O000000000001O0O1O2M2N3M3K5L4M3M3L5J=ULe^Oo2Wb0E5K4K5K5K7I7C=]Od0_Oa0GiYX2\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [153.3727264404297, 344.8234558105469, 150.8870086669922, 97.97686767578125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 782], \"counts\": \"k]X32We0:C;G8J;D4J5N2N2M2N3N1O100O010O1O1M20100O010O100O100O1O100O10001O001N101O001O0O2O2N1N102N2M3N1O1O1O0000001O000O10000000000001N100000O01O1O1O1O1N2N2O100O1O100O100O10000O100O100000000O100000000000000000001O000000001O01O0001O01O001O010O001O011O1O0000O2N2O2M2O0O1O2N2N2M3N002L4L5]Ob0Ah[OB^d07`0K[^R:\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [20.371198654174805, 324.81884765625, 119.94125366210938, 134.19949340820312], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [683, 782], \"counts\": \"Q[>T1Vd02N4L8_\\\\O_Nhb0[2G3N1N3M7I3M2O1Nd0]O3L2O1O001O00000O0100O100000000O010O100O100O1O1000000O100000000O10001O0000O10O100000000O1000000O1000000000001N1000000O10000O100000001O0000000000010O01O0010O01O1O2O0O2N100O1000O1O2O2M6J2L3M4B`0H6L6lNT]O\\\\O\\\\c0Cja`=\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [1.1036012172698975, 443.5357971191406, 122.6036376953125, 30.5604248046875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [683, 782], \"counts\": \"lh1=nd01N3N1O1N2O0000001N10000N2000000O01000000000O11O00001O1O7I1O1O00001O00000O1000O100000000O100O10O0100O1O1O1O1O1O1O1000O010000000000O100000O01000000000O10000000001O00000O1000000000001N10000000001O0O101O000O101O000O10001N10000O2O1O000O2O1NP^h=\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [151.05950927734375, 421.6512451171875, 136.95138549804688, 27.919403076171875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [683, 782], \"counts\": \"eiW32Xe02O1O00001O1O001O00001O001O00001N101O00001O001O001O001O00001O1O00001O010O0001O2O0O001O1O8H1O010O1O1O1O0000001O0000000000O10000O100O10000O100O100O1N2K5O100O1N20000O1000O0100000000001O0O10001O00000O2O0000000000001O000O101O0000001O000O2O00001O0O10001O00001N100O2Nj\\\\Z:\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [474.20428466796875, 480.5483703613281, 202.57073974609375, 60.393768310546875], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [683, 782], \"counts\": \"_dP:2Xe02N4L2M2O2O0O1O2O0O2O001O0O10001O0O10001O001O001O0O101O00001O1O0000001O010O1O001O00001O001O001O01O01O1O1O0010O001O10?@4MO010O01O1O2O0O1O01O00010O000010O00O2O000O2O0O101O0O1O2I6M3O2O0O101N1M3M301N10000O10001N1000000000000000000000000001O0000000000001O000O101O001O0O2O0000001O000O101O0000001N101O00001O0O2O001O001O000O10001O0O10000000O010000O1000000O10000O10001O001O00001O0O101O1O1N101O000O4L6Ho`U2\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [674.939208984375, 372.830810546875, 85.78021240234375, 123.92825317382812], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [683, 782], \"counts\": \"QYS>2T21l`01R_O2k`02Q_O0n`03m^O1Ra0Ol^O4Sa0Lf]OHb0`0ia0E\\\\]O;=8Xb09b]OIja0ZOU^OW2ja0lMV^OS2ia0RNo]OR2oa0f0K50001O000O010O100000000000000000000000000000000000000000001O00001O1O2N3M3M1O1O0010O1O1N101O001O2N1O1N1O2N2N2M2N3N2K:G5L3SOe]OcNgb0U1i0hNS\\\\Oo0oc0oNS\\\\Oo0nc0POT\\\\Oo0lc0QOT\\\\Oo0lc0ROR\\\\Oo0nc062O5[Ok[OIYd0Fk[OO39jd0KoW>\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [669.482177734375, 480.65753173828125, 100.95172119140625, 23.82183837890625], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [683, 782], \"counts\": \"e[Q>2Xe02O0000000O100000001O0O10000O10000O2O000O10001N10001O00001O000001O01O000000000000000010O00100O100O3N000O1O00000000000000000000O100O1000OMGT[O;kd03O10000001O00001O001O0O101O1O001O1O001N1000001O1M[k8\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [308.7117919921875, 349.9316101074219, 54.209808349609375, 94.04522705078125], \"score\": 0.9999727010726929, \"association_id\": 5, \"segmentation\": {\"size\": [683, 782], \"counts\": \"o^^64Re0:J2N100O2N3M2N4L4M6I2O2M?B=B3M10hNS]O3kb0MZ]OOeb01\\\\]OOcb00W]O8hb0FZ]O;eb0FY]O<cb0X100O1000O10000001O1O1O001O1O1000O101N2N3mM\\\\]OY1fb0dN[]O[1gb0cN\\\\]O[1\\\\c0M3M3E;K5TOg[O?ld0BR]h8\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [359.39208984375, 390.616455078125, 58.116363525390625, 19.9151611328125], \"score\": 0.9999339580535889, \"association_id\": 6, \"segmentation\": {\"size\": [683, 782], \"counts\": \"ija72Xe02N101O000000001O0000000000001O0O10000000001O000000000000001O1O001O001O1O0010O001O011N002N1O1O002N2N3LjQe7\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [311.5537109375, 431.54876708984375, 41.119781494140625, 13.085845947265625], \"score\": 0.999861478805542, \"association_id\": 5, \"segmentation\": {\"size\": [683, 782], \"counts\": \"YVa61Ye0101O001O0O2O0000010O00000O101O01O1O001OO100N200001O2N2N_QU9\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [120.29212951660156, 325.7503967285156, 57.62309265136719, 71.52099609375], \"score\": 0.9961603879928589, \"association_id\": 7, \"segmentation\": {\"size\": [683, 782], \"counts\": \"QQa2b0hd04L2N3M2O2M3N2h[OnNSd0V1N2O4L1O4L0O2O1O1O0000001O0000000000O10O10O100O100O2O000O1010O1OO1N20010M2M4O1N10002N1N2O1L5L3K9@Z[OJiUc<\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [436.9842224121094, 405.8433532714844, 32.240753173828125, 14.145538330078125], \"score\": 0.9949742555618286, \"association_id\": 0, \"segmentation\": {\"size\": [683, 782], \"counts\": \"_aT93Xe00O1O2N1000000O1N3O00000000001O000000000000000000000001O3M2Nf[`6\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [373.271240234375, 329.0401611328125, 106.29327392578125, 82.06704711914062], \"score\": 0.9759078025817871, \"association_id\": 6, \"segmentation\": {\"size\": [683, 782], \"counts\": \"nii77^d0Kd[O50L1=nc0BS\\\\O`0M0mc0CT\\\\OS1ic09OQOY\\\\O:fc0Ag\\\\O8Yc0Gk\\\\O6Tc0GZ\\\\OIc0`0Rc0GR]O:mb0ET]O<lb0AV]O?jb0@W]Oa0jb0\\\\OW]Oe0gc01O2CYOQ\\\\Oh0oc0YOP\\\\Oh0oc0]Ol[Oc0Sd0_Ol[Oa0Pd0VOR\\\\OU1mc06O0O2O0010O000001OdNV\\\\OW1kc0gNV\\\\OY1nc0100O1O10O02O2M2N2O1N10O10O01O001O1O1O1O1O1O001O00000000000000O100O010O10000N2N1010O01N2O1O010O10000O2L3O1O1L4O100000^Ok\\\\OnNUc0P1P]OmNQc0R1Q]OlNob0S1U]OiNmb0e0i\\\\O]OQd0a0>K5M3L3N3M6JXh[6\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [299.3256530761719, 407.8459167480469, 16.805206298828125, 7.9630126953125], \"score\": 0.9385692477226257, \"association_id\": 0, \"segmentation\": {\"size\": [683, 782], \"counts\": \"cjW62Xe02N2O00000000000000O10001O1N2OfRg9\"}}, {\"image_id\": 133, \"category_id\": 2, \"bbox\": [130.87466430664062, 399.0621032714844, 34.85655212402344, 10.903594970703125], \"score\": 0.8989092707633972, \"association_id\": 7, \"segmentation\": {\"size\": [683, 782], \"counts\": \"ahg27Se02O0000001O000000O1000000000000000000000000000000000001N3Mnim<\"}}, {\"image_id\": 133, \"category_id\": 1, \"bbox\": [322.5816345214844, 342.82977294921875, 36.416168212890625, 58.0408935546875], \"score\": 0.1778963804244995, \"association_id\": 0, \"segmentation\": {\"size\": [683, 782], \"counts\": \"big67Qe03O1ONR[OHmd0<N4M1O3L3K4H9N2O2N1O1O001O1OO101O001O2N1O5L1N3NO0e[OSOYd0P1h[OoNVd0S11VOP\\\\O8ad0O5K1N3KYhi8\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [445.7167663574219, 626.2116088867188, 265.3136291503906, 268.93341064453125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"[hR>>io09G5L3M3M2M3L3I8M2N3N1O2N1O1M3J6K5J6RNWNlTOZ2oj0PNcTOX2[k0\\\\1N200O1O1N2N2N2N2N2O1O1O100N2M3L4M3N2O1O1O1O1O0O2M3K5M3O1O1O1O1O1O001L3K6N1O10001O001O1M3H8L3O2M3O1O1N2N1N3M3N2N2O1N2O1O100O1O100O01000O1000000000000000000000001O00000000000000000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000O10000000000O1000000O10000O10000001O0000000000001O00001O0000001O00001O1O001O1O2N001O1O0001O01O00000000000001O000O2O00001N10000O2O000O100O10000O1000O1000O1000O11O001N2O1O001O1O001O001O2N1O8Hd0UIjWO[4[j0G4L3M3M2M5L7IR1mNb0_O4K2O1N2N2N3L3M:ET_Z?\"}}, {\"image_id\": 134, \"category_id\": 2, \"bbox\": [11.567121505737305, 628.947021484375, 218.7196807861328, 101.10968017578125], \"score\": 0.9999994039535522, \"association_id\": 4, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"oh<2510N[o0Y1cPOgNdn0b1N1O100O100N2O1O1O10000O2O000O1000000O10000O01000O10000O1000O0100O010O100O010000O01000O0100000O0100O010O01O1O0O2000000O1000000O1O1O1O0N3N2O010O10O010000O10000O010O10O01O010O01000O10000O100O10O0100O010O01O001O1000O0100O10O01O001N10100O010O010O01O100O10O010000O1000000O1000000O100O10000O1000000000000O1000000O2O0O10000O10000O2O00000O2O1O0O2O001O001O0O2O001O1N101O0O2O001O1O1N101O1O1O1N2O2N2N1N3M2O2L`PQo0\"}}, {\"image_id\": 134, \"category_id\": 2, \"bbox\": [836.1307983398438, 646.609130859375, 289.56256103515625, 214.78765869140625], \"score\": 0.9999979734420776, \"association_id\": 1, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"ZTgj06ho0?K3L5M1N3M2N3L3N2N3M2N2O1N2O001O1O1N2O1O3M4L3M1O100O2O1N5L9F4M1N2O0O100O001O2N1O3M2N2N1O1O10O010O1000O010O100O10O10010O02N3jSOeLXk0W4O010O000001O2N3M9G1O001N010O1O2N1O3M2N3M2N1O100O1N2O1O2N2M4M2M3N2N1N2O1O2M3N2ZNYROf0jm0ROdROe0dm0jNjROP1Yn0O1O000000O001O1N2M3N2_OiNoQOX1Pn0jNnQOW1dm0hNZRO3OW1gm0hNVRO50T1im0ZORROi0nm0i0O010O1000O010000O10000O100O100O100O100000O10O10O1000000O1000O10000000O100000000000000000O10O10000000O10000O100O1O0100000O10000000O100000000O10000O10000O10000O10000000000O2O000000001O0O2O001O00001O000000001O0000001O0001O01O001O1O1O101N1O1O2O0O00101N101N3eN^QOi0fn0nNfQOl0on0L3M1O2N001O1O1N2O1N2N2N1O2N2N2Nmmd2\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [835.4163818359375, 589.1477661132812, 329.36328125, 218.84259033203125], \"score\": 0.9999656677246094, \"association_id\": 1, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"]n\\\\j09oo02N3M2O0O2O1O1N3N1N2O2N1N2O1N2O1O001N2O1O4Lf0ZO3M3M2N1N2O0000001O0000000000001O000000000000000000000000001O001O1O1O1N3N1O1O1O001O1O1O2N1N4M=C4L1O2N0O2O1O1O1O2M3N3M3L4M2N1O1N2O001O1O2M3N4L3M1O2N1O001O0O2O1O2N3M3M2N1O1O1O1O001O002N2M3N3M2N2N2N2N1O00O1O100O1O100O10000O101N1O1O1O2N1O1O2N1O2N1O2O0O2O0O101O000O101O00000O2O001O001O1O1O001O1O00001O001O1O1O1O1O1N101O00001O001N2O1O1N2O1N101N10001N101O001O1O1N2O001O001O00001O1O001O1O100O001O00001O001O00001O001O1O001O1N101O1O001O1O001O0O2O1O001N101O0O101O0O101N101N101N2O2N1O1N101O001O1O002N1O1N3N1O1O1O1O001O010O1O100O2N10YROPNSm0Q2hROUNWm0_2O100USOSMXl0e3KO010O100O1O1O100O001O001\\\\N[SOBfl0:^SOEcl07aSOH`l06bSOI_l05dSOI]l05eSOK[l04fSOK[l03hSOKXl05hSOKYl03iSOLXl03iSOLWl03jSOMWl02jSOMWl01kSONVl00lSOOUl0OnSOORl00PTOOQl0OQTO0Pl0OQTO0Pl0NRTO1ok0MRTO3nk0MSTO1ok0NQTO2Pl0LRTO3ok0KTTO2nk0LWTO0kk0K]TOOj^R1\"}}, {\"image_id\": 134, \"category_id\": 2, \"bbox\": [1017.3074951171875, 492.4048767089844, 174.5643310546875, 21.140533447265625], \"score\": 0.9998792409896851, \"association_id\": 3, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"`_YP14TP1101N101O001N2O00000000001O0000000000000000000000000000001O001N100001O01O0000000000000000001O0001O001O00000000000O10000000O100000O100000000000000000000O100001O000000000001O001O002N0O2O1O0O3N1ON2O2M200N3O0O1O100O100000000000000000000000O10000000001O00001O000000001O0000000000000000000000O1000001O00000XPOEbo0;]POFco0:]POFco0:\\\\POGdo09\\\\POGeo08ZPOIfo0=00001O0IYPOLho03ZPOKgo03ZPOMgo01ZPOOgo0OZPO1PP1OYb7\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [1060.1417236328125, 284.9732360839844, 79.3544921875, 217.58285522460938], \"score\": 0.9998294115066528, \"association_id\": 3, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"^h^Q1`0go02O1O1M4M2O2N1N2N3M4K4K5I9H8G7E;D<F8G9L4K5G8M3M4J6M3O1O2N1L4L5M3L3L5M3N3M2O2N2N1O1O101O1O0O101O010O1O5Ld0\\\\O01O1N1O1O2N2ROVVOYKki0b4`VOVKbi0e4eVOXK[i0f4mVORKWi0k4V1M4K6_K^TOf1k0_NG3Ul0LoUO1Vm0O0N3N2NQTQ2\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [729.4673461914062, 184.84715270996094, 101.40057373046875, 447.7584228515625], \"score\": 0.9997679591178894, \"association_id\": 0, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"VmPg0]1dn0?^O>UOj0D;G9E;ZOe0@`0G9H8L4L4M3K6L3L4J6]Oc0E;D<[Oe0D<I7K5K5K5M3M3N2N2M3K5M3N200O100N2N2N2L4N2N3M2N2M3L4M3O1N2O1O100O2M210O2N2O1N2N2N10002N4K=U\\\\OkCjb0j<2N2N2M2fNn\\\\OXEVc0b:Z]OQEhb0j:a]OoDbb0l:g]OkD`b0m:c1J5K4N2M2O3N3L5K5K5hGbYO^6cf0aH^[OFI]4_j0XO6I8I;D6J4L8H:F7I4L7H7J4J=ZOnfa;\"}}, {\"image_id\": 134, \"category_id\": 2, \"bbox\": [4.81978178024292, 464.0628356933594, 242.15957641601562, 278.9602966308594], \"score\": 0.995854914188385, \"association_id\": 0, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"Qh85;4So0g0I:E5G8J5O100O1O2O0O101N101O0O101O0O1O001O1O1O1O10O010O1O1O1O1N20O01O1O10OO2O1O001O010O0100000O100000000O010O00001O001O100O010O10000O10000O00100N101O1O001O1000O10O1000000O010O01O00O2O001O010O1000O10O1000O1000O01O001N100001O00100O010000O01000O1O1O0O2N2O1N101N2000O2O000O100O1O2O0O100O10000O10000O10001O0000001N1000000000000000001O000O2O001O001O0O2O1O1O001N101O001N101O1O1O2N2N1O1O1O0O2O1N2N3N2M2O1N2O1N2N2N3LaPQo0\"}}, {\"image_id\": 134, \"category_id\": 2, \"bbox\": [442.7663879394531, 777.189453125, 241.16567993164062, 110.2010498046875], \"score\": 0.9887874722480774, \"association_id\": 2, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"_gQ>1TP1?A4O00N11K6L3J7O1N2O0000L4N1N2M401O0L6M3M:AWdof0\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [0.0, 356.9320983886719, 293.88983154296875, 343.8246154785156], \"score\": 0.8799530267715454, \"association_id\": 4, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"Sc1[1gn0h0\\\\O7J5K4L2N3L3L4L4N3M2O1O1O1O1OO100000O1001O00001O1O001O1O1N2O1O1O00100O100O100O1O100O1O100O10O01O100O10O10O100O01O1O1O1O1N2O100O1O100O1O1O1O001O1N2O001O100O001O1N2O1N1O2O1O1O001O000O100O2O0O20O01O100O1O100O1O1O1O1O1O0010O001O010O001O1O001O1O010O010O010O01O001O001O00010O000001O000001O1O001N2N1O2N2O0O2N2N2M3L3M4K5K5G9J6TNaJiXOd5Vg0aJbXOc5^g0aJ\\\\WOa6ch0c000O2O000O1000001O000O100000000000000000000000001O00000000001O010O2N2N4L4TXOoGXg0e8L3M4M0000O102N6J3L3N1O2ZMZXOdLgg0X3_XOdLdg0V3aXOiL`g0R3eXOmL]g0n2fXOSM\\\\g0g2gXOYM\\\\g0a2fXO`M]g0[2eXOeM]g0V2fXOjM\\\\g0XOPXOi0e00^g0PORXOm0b02_g0lNRXOP1`05ag0eNSXOR1?8bg0_NTXOV1;;eg0VNXXO[15?gg0hM^XOg1La0[i0^OfVOa0[i0]OfVOc0[i0[OgVOd0Yi0\\\\OgVOd0Zi0ZOhVOe0Yi0ZOgVOf0Yi0ZOgVOg0Xi0XOjVOf0Wi0ZOjVOe0Wi0YOkVOf0Ui0YOmVOf0Si0YOoVOf0Ri0YOoVOf0Qi0YOQWOf0oh0XOSWOh0nh0VOTWOi0lh0WOTWOi0mh0UOUWOj0kh0UOWWOi0jh0WOXWOg0ih0XOXWOf0ih0YOZWOe0gh0ZO\\\\WOb0eh0]O^WO`0ch0@_WO>bh0A`WO<ah0CaWO;`h0EbWO8`h0FcWO6_h0JeWOO^h00gWOI\\\\h06l3O1N3M2O1O1O1O2O1N1O3M2NRdSm0\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [1183.119140625, 424.9111328125, 14.880859375, 66.62539672851562], \"score\": 0.2960563898086548, \"association_id\": 0, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"VkZU13ko0?B_OePOj0Wo05O1O1O100002M3J8ZO^POOnb2\"}}, {\"image_id\": 134, \"category_id\": 1, \"bbox\": [113.79430389404297, 271.9477233886719, 143.81317138671875, 309.1982116699219], \"score\": 0.061087075620889664, \"association_id\": 0, \"segmentation\": {\"size\": [1033, 1198], \"counts\": \"d_c3T1Ro06L3N1O1O1O1O1ON2O1N2O0O3O0O1O101N001M2O101O01O001O1N1011M2N3N1N20M4L5I7G8J6L3K5L4M3M3N4H7]Nc1G9K5K4K5A?F:L4L4E;\\\\Oe0I6M4M1N2M3N2O1O2N101O0010O001N2N2N2N2nNeWOTJ]h0h5jWOSJXh0l5kWOiI^h0W6cWOfI_h0Y6i01O2N3M3M4M<C101O00O101N2N2UJSWOV4oh0eKYWOW4gh0eK`WOY4fh0WKkWOb4Yj0F5K2N1N3L6K5K3L2O3<EN2M0O1O2M3M3N1N2N7IX1gN4M1O2M2N4K4N2M2N2O1N101N2N101N2N2N2N2N2N2M3N2N3MSiem0\"}}, {\"image_id\": 135, \"category_id\": 1, \"bbox\": [391.1319274902344, 208.44888305664062, 322.3011169433594, 281.0777282714844], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [544, 800], \"counts\": \"beV8>W`0c0F5K4M3L3^AaN`=a1]BbNb=`1YBdNe=_1UBfNi=_1nAhNP>U2N2O2M2O1N2nFTMn3m2nK\\\\Ml3e2QL^Mm3d2PL^MP4c2mK_MR4c2kK`MT4a2iKaMV4b2eKaMZ4h2ZK\\\\Me4Q3lJSMR5P3iJSMV5o2eJUM[5l2[J]Md5e2ZJ\\\\Mf5e2XJ\\\\Mh5e2VJ\\\\Mj5d2UJ]Mk5d2RJ^Mn5d2nI]MS6h2cI]M]6[3gGhMZ8_50O100O1O1O1O1O1N2N2N2YOg0K5H7L5M3N3M2L5E;L4I7E;N2M3N101N1000001O01O0001O01O01O0010O1001OO5LU1_EkIl7b6\\\\GiIc8[700O010O000001O00001O00001O001O000O100O1O1TLZJYLj5`3o3O1O1O1O1O2O0O2N1O1N3N1N2O2O0O1O10001O0000000000000O1001O010O1O2N1O3M3M2N2N2N1O1O1O2N2N5K:F=ZDgKk9b4jEbKT:f4bE]K]:g4]E\\\\Kb:b5O1N2O1O001N_NTFQLk93RFE6R12UOe92[FVOd0U1@D`9NbH_O]Nc0P9M]J4a5M_J3a5L`J4_5LcJ3]5MdJ2[5OgJNZ51hJNX52jJKV56mJFT5:nJDR5;PKBQ5?QK^OP5b0QK[OQ5e0PKYOQ5g0oJXOR5h0oJUOS5k0a50000000000O100000000000000000000000O100000O100000000000000000001N2N1O2M4Jko]1\"}}, {\"image_id\": 135, \"category_id\": 1, \"bbox\": [59.46066665649414, 95.33409881591797, 314.2319030761719, 368.3032531738281], \"score\": 0.9999992847442627, \"association_id\": 0, \"segmentation\": {\"size\": [544, 800], \"counts\": \"bjY13f`09I7L4M3M2N3M3M2N101O1N101O001O001O00001O0001O0001O01O00000010O00001O010O1O001_LcN]G^1`8nNVGR1h8TOUGl0i8WOUGj0i8ZOTGf0j8_OSGa0l8DQG<k8JRG6l80PG1m86oFIo8;oFEP9=PGCn8`0PG@o8a0RG_Ol8c0SG]Ol8e0SG[Ol8g0TGYOj8i0UGWOj8k0UGVOh8n0VGSOh8o0XGPOg8S1WGnNg8U1WGlNg8V1YGjNe8X1ZGiNe8X1ZGhNe8Y1[GhNc8Z1\\\\GgNc8Y1\\\\GiNa8Z1^GfNa8\\\\1^GeN_8]1`GeN]8^1bGbN]8a1aG`N]8b1aG_N^8c1aG^N]8d1bG\\\\N^8e1`G]N^8e1aG[N_8f1_G\\\\N_8f1_G[N`8h1]GZNa8h1]GZN`8j1^GVNa8n1[GTNb8P2[GRNc8P2[GQNd8R2XGQNg8P2WGRNg8P2VGRNi8P2SGSNl8P2mFVNP9n4O1M4M2O1O1O2N1O1O1O1O1N2N2N2N2O1O1O1O1O1O1N2N2M3M3N2N2N2O1O1O1O1O1O1O1O1O1O1O1N2O1O100O1O100O00100O100O10000O1O100O1O1O1O001O10001N10000O1000000O1000000000O10000000000O10000O100O100O1N2O1N2N2O1N2N2J6J5K6K5K5K5L4M3N2O010O01O1O1N101N2O001O1O10O01N101N1O101O001O00100O1O1O1O1N2O1O1O_MYHkKf7T4^HjKb7U4aHhK`7V4dHfK]7Y4gHaK]7\\\\4iH^KZ7_4mH[KU7b4RIXKP7d4WIWKj6h4[ITKf6i4^IUKc6i4aITK`6k4bISK_6l4cIRK^6l4eIRK\\\\6m4gIoJ[6o4hInJZ6P5iImJY6P5kIiJ[6U5hIeJ]6Y5f2O1O1O1O1N2O1N2L4K5L4M3M3N2N2N2N2N2^NXCiNk<l0iChNZ<T1k1M3M3M3N2N2O1O1O2N1N2O1N2N3M2O1N2O2N2M3M>^OeXS7\"}}, {\"image_id\": 135, \"category_id\": 2, \"bbox\": [40.92845153808594, 412.9146423339844, 605.844970703125, 97.77423095703125], \"score\": 0.9999969005584717, \"association_id\": 1, \"segmentation\": {\"size\": [544, 800], \"counts\": \"\\\\gf095H?9n>2_@H`0:o>4k@2S?0d@^O3c0X?6e@LZ?n0N100O101O0O1000000O100000001O0O1000000000000O2O000000000000POPA0P?0RANn>2SAMm>2TANm>1TANl>2UAMk>3VAMi>3WAMi>2XANh>2YAMg>3YAMg>3YAMg>3YAMg>3YAMg>3YAMg>3ZALf>3[AMe>3[AMe>3[AMf>2ZANf>2ZANf>2ZANg>0ZA1e>O[A1f>NZA2f>M[A3f>L[A3f>K[A5f>JZA7f>G[A9g>DZA<g>BZA>h>[O]Af0^?00001O01O000001O0O1000001O0000001N10001O00001O001O001N1010O01O001O0010O01O011O000O010O00001O00001O01O000000001O0000001O001O1O1O1O0000001O0000000000001N100000000O10001N100O1O2N1O101O0O100O1000000O010O1O100O10OO2N200O1000O100000O010000O101N10000O10000O101N100M3O10001N1000000O1000000O2O000O100O101N100O10001O0O100000001N10000O2O0O100O2O0O2N100O1O2O0O10000O101O0O1000000O100O2O000O100O101N100O10000O100O2O00000O0100000O10000000000O10000000000000O010000000000000O10O10000000O100000000000000000000O10O1000000000000000O1000000000000O1000O10000000000O1000000O10000O1000000O100000001N0100000000000000000000000O10000000000000O10000000000000O10000000O10000000000O10000000000000000O10000000000000000000000O2O0000000000000000000O1000000000000O1TOSAEm>9WAEj>9WAGi>9WAGi>8XAHh>8XAHh>8YAGg>8ZAGg>9YAGg>9YAGg>9YAGg>8ZAHf>8[AGe>9[AGf>8ZAHf>7[AIe>7\\\\AHd>8\\\\AHd>8\\\\AHd>8]AGc>9]AFe>8\\\\AHd>8\\\\AHd>8]AGc>9]AGc>9]AGc>9]AGc>9]AGc>9^AFc>9]AGc>9]AFd>9]AGc>9]AGc>9]AGc>9^AFb>:_AEb>9_AFb>9`AF`>:`AFa>8aAG`>7aAI`>5bAJ_>4bAKa>3_AMc>0_AOi?0001O0O10SSa4\"}}, {\"image_id\": 135, \"category_id\": 2, \"bbox\": [20.519283294677734, 432.26959228515625, 284.0537109375, 79.38302612304688], \"score\": 0.9686620831489563, \"association_id\": 0, \"segmentation\": {\"size\": [544, 800], \"counts\": \"bb;Z1e?3N1O1O0O10000000001O0000000000000000O100000000UOc@3]?Mf@0Z?Oh@0X?0h@0Y?Nh@2X?Mj@2V?Mk@3U?Lm@3S?Mm@3S?Ln@4R?KPA4P?LPA4P?KQA5o>KQA5o>KQA5o>JRA7m>ISA7m>IRA8n>HRA8n>HRA8n>HRA8n>GSA9m>GSA9m>GTA8l>HTA9k>GUA9k>GUA9k>FVA:j>CYA=g>CYA>g>AYA?g>@ZA`0f>@ZA`0f>@ZA`0e>A[A?e>A[A?e>A[A?e>A[A?e>A[A?e>A[A?e>A[A?e>A[A?e>A[A?e>A[A`0d>@\\\\A`0d>AZA`0f>@ZA`0f>@ZA`0f>@ZA`0f>@ZA`0f>@ZAa0e>_O[Aa0e>_O[Aa0e>_O[Aa0e>_OZAb0f>_OYAa0g>_OYAa0g>_OYAa0g>@XA`0h>@XA`0h>@WAb0h>_OWAa0i>_OWAa0i>_OWAa0i>@VA`0j>@VA`0j>@VA`0j>@VA`0j>AUA`0j>@VA`0j>@VA`0j>@VA`0j>AVA>j>BVA>j>BVA?j>AUA?l>@TA`0m>_OTA`0m>_OSAa0n>_OQAa0P?^OQAa0P?^OPAb0P?^OPAb0P?^OPAc0P?]Oo@c0Q?]Oo@c0R?\\\\On@e0Q?[Oo@f0P?ZOPAh0o>XOPAi0o>WOQAj0n>VORAj0n>VORAj0o>UOQAl0n>TORAm0m>SORAo0n>PORAQ1m>oNRAT1m>kNQAX1o>gNPA[1o>eNPA]1o>cNl@1O]1U?aNl@d1X?10O001O10O00001O01O001O0000001O000000001O00001O001O00001O001O001O001N101O0F[@XOg?c0^@\\\\Ob?d0^@[Od?c0^@\\\\Ob?d0^@[Od?d0\\\\@[Of?d0<O2N2N1O1O1O0001O0000100O1O00001O00000O010O100O10000000O100O10000O101N100O1O101N100O101N1O1O1000N10100O1O1O010O10000O101N1O2O0O100O2O000000000O100000000O100000000O100000000O1000001O00001O0O101N1N200O1O1O1O1O1O2O000O20M5PO\\\\@<[`0HUbW8\"}}, {\"image_id\": 136, \"category_id\": 1, \"bbox\": [613.4199829101562, 557.905029296875, 256.6279296875, 222.2769775390625], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"[[af0i0]T1?I4L3L4I8G9G8I7L3N3M3N1O1O1O1O1N2O1O2N1O100O1O1O2YoNlLon0U3oPORMkn0o2RQOWMjn0j2UQOZMgn0h2WQOZMgn0g2XQO[Mgn0f2XQOZMgn0g2XQOZMgn0h2WQOYMgn0j2WQOXMgn0i2WQOYMhn0i2UQOYMjn0h2UQOYMjn0i2SQOYMmn0g2RQO[Mln0f2SQO[Mmn0f2PQO\\\\Mon0f2oPOZMRo0g2jPO\\\\MUo0`4O1O100O100O100000000O10000000O1000000O010O1O1M3O1L4I6O2N2M3O0O2O1O0O2N1L5H7L5M3N101O1N2O1N2K5K5O1O1O1O001O1N2N101O1O1O1O001O1O1O1N101O1O10O01O010O10O10O10O0100O1O1O1O1O10000O10000000000000001O0000001O00001O001O01N10001O1N2O1O1O001N101O1N3N3M`0@4L3L2O001O000O100000000O00O2O0000001O010O1000000O10000O001003M4L5K5Kd0\\\\O7I3M2N3M000000000000000000O1O1O1G9L4M3N2O1O2N1O1N2N2M4L3N2O1O2N1O2N2L4K5I7K5M3N2N2N2M3L4J7F9L4M3N2N3M2N3K8Hflfj0\"}}, {\"image_id\": 136, \"category_id\": 1, \"bbox\": [1086.7593994140625, 461.21685791015625, 168.0262451171875, 432.2303466796875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"PhSX1c0lT12N2N2N2N2N10001N10000O1O1O1N2N2M3O1UTOjN[c0W1_\\\\OQO^c0P1X\\\\O\\\\Oec0e0n[OIPd08f[O3Wd0OfZOU1Xe0lNeZOY1Xe0hNfZOZ1We0iNeZO[1Ye0gNbZO_1[e0cNYZOi1ee0YNUZOn1he0TNUZOo1je0RNSZOR2ke0oMQZOV2me0jMkYO`2Sf0aMSYOZ3kf0gLQYO^3lf0dLQYO`3mf0aLQYOb3lf0aLPYOb3of0_LlXOg3Rg0ZLjXOj3Ug0WLiXOk3Wg0ULfXOo3Xg0SLeXOo3[g0RLaXOQ4_g0PL[XOU4eg0lKVXOX4ig0jK^WOkMiN_6ii0hKYWOo4gh0TKSWOP5lh0UKkVOo4Ti0YKUVOW5ki0W3O2O0O1N2O0O2N2N2O100O010O100O00nMaVOTH^i0g7kVOVHUi0]7\\\\WO_Hch0W7kWOfHTh0Y7QXOcHog0a7oWO_Hog0j7hWOXHWh0Y8VWOjGih0a8iVOcGUi0]:O2N1O1N2N2O1N2O101O1N101O001O001O010O01O1O3M2O0O2N1O2N2N3M4L3M3L5K5K4L3M2M3M3M3L4cMjUO[IZj0V6dVOWIai0c6hVOTI^i0f6kVOnH]i0k6h2E;WOi0A?J5K6G8G9M4M2O2N101O1O1N3N2N2N2N2N1O1[MknNm0VQ1oNWoNg0jP1UO_oNe0cP1VOdoNe0`P1UOeoNi0eP1gNdoNV1]R1N2M4L4L3L5K3M5J;Cojd<\"}}, {\"image_id\": 136, \"category_id\": 1, \"bbox\": [199.26876831054688, 581.5131225585938, 378.7746276855469, 219.06402587890625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"dlY7>iT1a0E7J3N2N2O0O100O1O1O1N2N2M2O2O1O1O1O10O01O01O0000001O00001O00100O01000O010O01O010O0TnN_Nko0`1RPOiNho0X1WPOlNfo0S1ZPOQObo0o0^PORObo0m0^POUOao0k0^POVOao0k0_POUO`o0k0`POVO`o0i0_POZO_o0f0`PO\\\\O^o0e0`PO^O\\\\o0d0UPOMfo04WPO3eo0NYPO7bo0K^PO8[o0MePO5Wo0MiPO6To0KmPO5Ro0KnPO6Qo0KnPO5Ro0KoPO5on0MPQO4on0MPQO3on0NQQO3ln00SQO0ln02SQOOkn02TQONmn03QQONon02PQONRo02mPONSo02nPOMSo03lPOD^o0<aPOBao0`0^PO\\\\Ofo0e0XPOZOjo0h0SPOWOno0k0PPOSOSP1T1doNiN`P1P34N101O0010O010O100000O01000000O10000000001N101O1O2M2O7I8G5L2M3M2O1O1O0O2O001O001O0O10QNPQO_Mon0U2^QOjMbn0T2`QOlM`n0S2bQOmM]n0Q2eQOoM[n0P2gQOoMYn0P2iQOnMXn0P2kQOoMUn0n1nQOQNSn0k1SRORNnm0j1[ROQNem0k1gROlMZm0Q2PSOgMQm0W2VSObMll0\\\\2YSO^Mjl0b2R3O1O010001O0000001O0000001O00001O00001O01O0001O0000001O00000O2O00001O1O001O1O001O00001O000000000000000000000000000000000000000000000000000000001N10001O001O1O1N2O1O001O001O00000O100000001O0000000001O000000001O000O1000000000001O0000000001O0koNhLRn0X3kQOlLTn0T3jQOPMTn0P3kQORMTn0n2kQOTMTn0l2kQOVMUn0j2jQOWMUn0i2jQOXMVn0i2hQOYMWn0i2eQOYM\\\\n0h2`QO[M_n0g2\\\\QO\\\\Men0R3ePOVMZo0`41O010O1O01O01N100O2O0O1O1dMYPOnNio0l0bPOnN^o0o0^QOWNcn0f1cQOVN_n0g1fQOUN[n0j1gQOUNYn0k1hQOSNZn0k1hQOTNXn0l1iQORNYn0l1iQORNYn0m1iQOPNYn0o1lQOkMVn0S2RROeMom0[2d2O1O1N101O1N101O010O1O001O100O1O1O1O010O001O010O100003M2N6J2N1N100O1O102M3N2N1O202M4L0O1O0O100O100O10O0100010O000O010O10O0001O001O1O3M3M3N3L2N1O1O1O1O1N2N2O1kMnlNi1fS1Dd0WOmn\\\\U1\"}}, {\"image_id\": 136, \"category_id\": 2, \"bbox\": [971.1786499023438, 724.4884643554688, 186.88226318359375, 143.16888427734375], \"score\": 0.9999986886978149, \"association_id\": 1, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"UTfS14ZU15H7L5L2N2N101N100O2N101O000O2O0000001O00000000001O0000001O001O1O001O001O001O1O1O001O001O001O010O1O010O01O010O0010O00100O01O2O0O1O2OO010O100O5K100O100O101N2O0O1O100O1O1O1O2N2OO010O1000O2O0O1O100O1O100O010O10O0100O1O00100O100O100O010O6K2M2VmN^MbR1i20O01O10O0100O01O010O0000001O0kN\\\\mN^OdR1a0amN[O`R1b0cmN]O]R1b0fmN\\\\O[R1b0hmN\\\\OYR1`0kmN_OUR1>omNoN\\\\OOPS1n0hmNnNeR1g0STda0\"}}, {\"image_id\": 136, \"category_id\": 2, \"bbox\": [277.48760986328125, 709.0021362304688, 248.8106689453125, 87.61346435546875], \"score\": 0.9999972581863403, \"association_id\": 2, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"^kX:4ZU14N2M10000000001O0000000001O0000001N10001O1J^a30f^L3N10LPch0>o[WOFSlNb0gS1BUlNa0hS1BUlN?kS1BRlNa0lS1a01O1000O01000000O10000O1O1O1O1O01000O100O1O010O00100O10O0100O1O1O010O10O10O0100O100O01000O2O0000000O101O00001O1N2O1O1O001O001O000000001O000000000001O0000003M1O0O101O1O1O1N2O1OO01M2000ON31O001O10O0100O1O001O100O10O1000001O00000000000O101M2O2L3N3L3N3N2M3K5LPSe02olZO8H6K3N2M101O000000001O000O010O10O0100O010000O100O2O0O1O1O1O101N100O2N1O101N1O2O1N2MUR[W1\"}}, {\"image_id\": 136, \"category_id\": 2, \"bbox\": [649.629638671875, 691.603759765625, 165.9891357421875, 86.9031982421875], \"score\": 0.9969887137413025, \"association_id\": 3, \"segmentation\": {\"size\": [1200, 1600], \"counts\": \"bhgh02\\\\U13O00001O000001O0O1O101Nmj53kTJ9K4K4M3H7M3O1O1O10O0100O10O01O0100O010O010O001O0010O010O0100O1000O01000000O01000O010O100000O10O100000000000000O01000000000000000000O1000000O1000001O00000000O01O100O1001O001N2\\\\OhkNNYT10ikNOWT11ikNOXT10hkN0XT11fkNO\\\\T1b02N001N100N3N1N2O2O0O1O101N1000000O2O00000O2O0O2O1O0KUbil0\"}}, {\"image_id\": 137, \"category_id\": 1, \"bbox\": [359.1248474121094, 474.97442626953125, 119.43197631835938, 126.94818115234375], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [799, 547], \"counts\": \"Sik86ch0<Db0mNk0J;F6I3L3N200O1O1N1L30G;L3M2L5K5O100N2N20100000dYOTNee0l1XZO[Nde0f1ZZO]Nce0d1[ZO`Nce0d1WZOaNfe0a2K2M2O1O0O2OO100M2I8N2K5K5N2O1O1O1N3N1O1N2N2O1O2N1N2O1000001O01N3N8H4L1O001O1O002O0O3M5K1N2O0O100000O01000O1O1O1O01O0000O101O1]Oc0N2N2O1K5L4O1O1N2J6M3O1M3I7O1O1O1N2O2M3N9_Ogbd1\"}}, {\"image_id\": 137, \"category_id\": 2, \"bbox\": [0.6827170848846436, 567.9701538085938, 335.0064392089844, 169.28570556640625], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [799, 547], \"counts\": \"P^1c1Zg04M3M2N2N2O0O100O100O100O101N100O10000O01000O100000000O1000000O10O01O100O010O1000O10O01000O1O010O1O100O01000O100O100O001O100O1000O01000O01O010O1O010O100O10O10O100O100O1O10O010000O10O100O010O010O0100O0100000O0100O100O00100O10000O0100O10O01O100O0100O010O10000O1000O10O100000O100O010O0100O00100O1000O01000000O10000O01000O100O010O01O10O10O10O01000O10O0100O1O0O2N2O010O10O010000O100O10O01O1O100O10000O100O100O1N2O1O100O100O100O1O1O1N2N2O100O100O01000O100O1O1M3N101O100O10O10O01O010O01O001O1O010O0010O01O0O2O001O00100O1000O0100O1O001O10O0100O011O0O100O1O2N1O101N10001N2O1O1N2N3M2M3N3M5Kh10XN1O2O0O1O1O1O100O10000O100001O0O2OZYh5\"}}, {\"image_id\": 137, \"category_id\": 2, \"bbox\": [272.4627685546875, 586.534423828125, 176.36666870117188, 76.47119140625], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [799, 547], \"counts\": \"^el65ih02O1M3N2O0O100O2N1O2N1O101N1O1O2N1O100O1O1O2O0O1O100O1N20O0100O1000O01000O10O010O100O010000O1O010O10O010O0001O010O10O01O10O10O010O1O010O010O1O010O1O10O100O01000O01000O00100O100O0100O100O0100000O10O10O10000O010000000O100O01000O1000000O10000O010O1O10O01000O0010O0010000O00100O10O10O100O100O100O1O0M4O1O100O2O0O10001N100O2M4LYi_2\"}}, {\"image_id\": 137, \"category_id\": 1, \"bbox\": [271.5890808105469, 263.26190185546875, 145.61904907226562, 317.7529296875], \"score\": 0.9999997615814209, \"association_id\": 2, \"segmentation\": {\"size\": [799, 547], \"counts\": \"mTn68ah07J6\\\\IHVDb0c;JoC>n;IkC;Q<JlC7P<0jC3P<6iCNT<9eCJY<;bCG\\\\<<bCE\\\\<?`CC]<c0\\\\CAb<f0TC_Oj<e0QC^On<e0nB]OP=g0lB[OQ=m0gBVOW=Q1bBQO\\\\=V1]BlNb=W1ZBkNe=X1VBkNj=X1QBjNn=Z1lAiNS>[1gAhNX>[1cAgN\\\\>\\\\1bAeN\\\\>_1aAbN]>b1`A_N_>e1]A\\\\Nb>g1[AZNe>h1XAYNg>j1UAWNl>l1k@YNV?l4d@UH[?i7n@PHS?o7PAoGo>o7SARHl>l7WATHh>k7ZAVHc>j7_AVH`>j7aAVH^>i7eAUH\\\\>i7gAVHX>g7mAWHT>h7a1O20O0_@XHm=f7TB[Hl=d7UB]Hi=b7XB_Hh=_7[B`Hd=`7n1O0010N1O2O1O001000001O00001O0O10001O001O1O1O1e@VH_=l7[BYHd=g7ZB[Hf=f7VB]Hj=d7iAiHV>Q5_AgKODb>a4gAhKCJf>[4mAjKYOMj>U4TBmKnN0n>P4\\\\B_Md=_2eB[MZ=d2kBXMU=g2nBVMT=i2mBUMT=k2nBQMT=n2nB_Ld=a3]BTLm=^Oo_Oa3U2hLU>DP@[3k1oLY>CR@Y3f1RM\\\\>@R@\\\\3b1TMa>ZOQ@_3_1VMf?h2[@XMf?e2\\\\@XMj?b2Y@SMW`0f2l_OUM\\\\`0S1f]O^OQ2]O^`0k0n]O_Og1Db`0a0S^OAa1Mje0O101O0O3M^Sf3\"}}, {\"image_id\": 137, \"category_id\": 2, \"bbox\": [17.828289031982422, 313.8961181640625, 255.85618591308594, 18.72210693359375], \"score\": 0.9998739957809448, \"association_id\": 0, \"segmentation\": {\"size\": [799, 547], \"counts\": \"a[>8fh0100O1000000000000000000000000000000000000000000000O10O1000000000000000O100000000000O10000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000000O100000000000O10000000000000000000000000000000000O1000000000000000000000000000O100000000000000000O1000O1000000000O10000000000O10000000000000000000000O01000000000000000000000000000000001O00001O0JYWO4ih000000001O000000000000O100N2N20000000000000000000000000000000O100000000000000O10000000000000000O10000000000000000000000O10000000000000001Nd_e6\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [1436.8759765625, 167.0560302734375, 159.1396484375, 246.01693725585938], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"iW[_1:mP17TQOH]l0=kRO_OQOa0lm0<aRO<Zm0o1K8H;E5L2N2M5L4L3M3M2O2N1O001O001O1O1O2N2N1O00001O0000001O1O1O1O1O1O1O1O00001ZUObJ]i0^5cVOcJ\\\\i0^5cVObJ]i0_5bVObJ]i0^5bVOcJ^i0_5`VObJ_i0_5`VObJ_i0`5]VOdJai0]5]VOfJai0\\\\5\\\\VOfJci0[5[VOfJei0\\\\5WVOgJhi0Z5WVOfJii0\\\\5UVOdJli0\\\\5SVOeJli0\\\\5SVOdJmi0\\\\5RVOeJoi0[5PVOeJQj0[5mUOfJTj0Z5hUOiJ[j0R62N2N1O4L:Fa0_O5K2N1O1N2O2M2O1N2O00100O0010O001O1O1O1O1O1O2N1O1O1O001O00001N10001O000000000O000010O1O1O1N3M2N2N2M3M2M4L3O2O2M2N3M2N2N201O01O00001OO2N2M2N3K5J6]Oi0eNRo3\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [148.9115447998047, 328.4837341308594, 202.59754943847656, 459.2179870605469], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"]UU55jP1f0A<D;Ca0Bb0@7I6K4L3L4K7H8I6J6K5L4K5J6J9H8H8I4K5L4K4K6G:G8eMjIWYO]6cf0nIQYOX6jf0RJiXOV6Rg0UJ_XOS6[g0R2_Oa0I7M3M3N3L4M5b[OTESb0R;b]O^ERb0h:c]ObEXb0e:Y]OhEcb0b<N2O000O2O000O2O00000000000000000000000000000000000000000000000001O00001O001O2N2N3M3M2N2N2M3N6J`0@:F6J3L3N3M4M2M2N2N2N001O001O1O001O1O1O001N2O2N1O1OO10000O010O1O10O000O1kMlZORHVe0i7^[OeGed0X8e[O^G]d0a8k[OWGWd0h8U20O101O01O001O3L3N2M3M3N3M2N3M4M2M7J7I7H5L3M3K5J6H8H8I7J:H:Eb0\\\\O>B?A?Cj0VO7F9G:C>[Ol0ZO`hYY1\"}}, {\"image_id\": 138, \"category_id\": 2, \"bbox\": [475.92620849609375, 899.131591796875, 669.5831909179688, 157.792236328125], \"score\": 1.0, \"association_id\": 6, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"UnX`07RQ14M2O1N2O1O1N101O001N10001O00001O01O01O00001O0000001O000000001O000000000000001O0000000O100000000000000O101O0000000O100000000O1000000O10000O101O000O10000O10000O10000O1000001N10000O100O101N1O1O1O1N2O1O2N100O1O100O10000O2O0O10000O2O0O1O101N1O2N1O101N1O100O2O0O1000001O0O100000001N1000001O0O10001O000O10001O0O1000000O100000001N10000000000O10000000000O10000000000O1000000000000O1000000O1000000O1000000O100000000O1000000000O10O10000000O1000O10O1000000O10O1000O1000000O1000O010000O100O1O100O10O010000O1000000O10O100000O10O01000O010000O010O10000O01000O1000O10O100000000O10O100000O100000J\\\\MeQOc2[n0700O1FTMUROk2jm0WMVROi2hm0ZMVROg2im0[MVROd2km0\\\\MUROd2jm0^MTROc2lm0]MTROc2lm0]MTROc2km0^MUROb2km0_MSROb2mm0^MSROa2mm0`MSRO`2mm0?O100O1O1000000O010000000000000O100001N100000001N100_MSROd1Sn0TNQROi1Qn0TNTROi1lm0VNWROh1im0VN\\\\ROf1fm0XN]ROf1cm0YNbROb1_m0]NcROb1]m0^NdROa1]m0]NeROa1\\\\m0_NdROa1\\\\m0^NfROa1Zm0_NgRO`1Ym0`NgRO_1[m0`NfRO_1Zm0`NhRO_1Xm0aNiRO^1Wm0aNkRO]1Wm0bNjRO]1Vm0bNkRO^1bn0N2O0O100O1O2O0O1O1O101N100O101O0O10001OO010O10O01O1O001O1O0O2C=E:O2H8O00100O010O10O0100O10O01000O01000O010O100O010O1000O01000000O1000O10000000O10O1000O10000O1O10O10O100O1000000O1000O10O100000O0100O010O010O00010O1O0100O01000O1O010O100O100O010O1000O01000O100O10000O0100000O100000000O1000000O1000000O10000O10O100000O10000000000O10000000000O100000000O100000000O10000000000O1000000000001O000O100000000000000O1000000000000O10000000000O2O000000000O10000000001O000O101O00001O001O000O10001O000O10000O101N100O100O10001N10000O101O001N101N2O001N2O0O101N101N2N1O2M3N3L4KjjX?\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [1035.920166015625, 238.8645782470703, 207.7674560546875, 410.20257568359375], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"]VgR1_1go09H5L5L3L4mROlMhj0[2RUOnM`j0]2ZUOjMZj0a2^UOhM]j0]2]UOgM`j0^2ZUOfMcj0^2XUOeMfj0_2UUOdMhj0c2iTOhMSk0Z4M3N3N1O1O1N2N2M3M3O1N2O1`LjIj[OW6Rd0nIl[OR6oc0VJm[Ol5oc0YJn[Oi5Qd0YJk[Oi5Td0ZJj[Og5Ud0ZJi[Og5Xd0ZJf[Of5[d0[Jb[Of5_d0ZJ_[Og5bd0ZJZ[Oh5fd0ZJV[Oh5kd0YJR[Oi5nd0XJoZOi5Re0YJkZOi5Te0ZJgZOh5Ye0[JaZOi5^e0[JUZOn5ke0WJZYO`6ef0T2O1O00000000000000002N1O2N1O001O1O00000000000000001O001O001O001O1O1O2N2N1O1O100000O101O1O1O001O1N3M5K9H5J2N2N3L5L6I6K5J3N2M3N1N3N3L3N3L3N1O1N3N2M3cNbWOiI`h0n5kWOmIYh0n5lWOnIVh0o5oWOmISh0o5YXOgIig0U6i1K4M4N1N3N2N2N3M3M4K4K4N2N1O1O100O2O0O100N2O1N2L4N2O1N3N2N3M3L5K4K5L3N1N3M3L4I7J6K6K5K7H9E`0WOjki;\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [1247.318359375, 22.89158058166504, 142.0716552734375, 188.29356384277344], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"YSSY1b0bP1=G5M2M4K6J6K4N1M3M3M3M4M2M3K5J6M3M3M4K4H8J6L4L4`SOYLWk0n3XTOcL\\\\k0k4M2O2N1N2O2N1O1000000000000000000000000000000000000000000000000000000000000001O001O1O1O2N2Nh0XO1O1O1O3M1O1N2O2N1O2M2N2O1N3M2N2N2N1O2O0O2N2N2O1N2N101N101N2SO[RORNgm0i1_ROSNcm0j1aROSN`m0k1dRORN^m0l1eROPN]m0n1R1N1N3N2N3N1N2O001N2O2N2N1N3M5K4L4L3L8Hggk6\"}}, {\"image_id\": 138, \"category_id\": 2, \"bbox\": [936.6337280273438, 587.7769165039062, 235.89093017578125, 112.75225830078125], \"score\": 1.0, \"association_id\": 5, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"ScQo03VQ13N2O1O001O001N2O001O1O0000001O001O001O1O01O01O00001O010O1O00010O1O100O1O10O01O1O1joN]Obo0Y1N0O1O0010O01O1O2N2N1O1O001gQOVNol0k1PSOUNPm0l1oROUNPm0l1oROTNQm0n1lROSNTm0P2iROPNWm0Q2gROPNYm0Q2dROQN\\\\m0P2cROQN\\\\m0o1cRORN]m0o1bROQN^m0Q2_ROPNam0S2[ROnMem0l200000O11O00001O0O10001O00O100O1000000O10000000O10001O000000000000O100000000000000000000001O000000O2O00000000001O000000000000000O2O0000000O101O0O10000O100O10000O1O2O0N2N2N2N2N2O2N1O1O1M4M2O100O100O2O0O1N2N200O100O100O100O1O100O10000O1O1O1O1O100O100O2M2O2M3M2O2O0O2O0O2M2N3N1O2O0O101N2N2M3M5K4LXaQ>\"}}, {\"image_id\": 138, \"category_id\": 2, \"bbox\": [22.925029754638672, 669.2191772460938, 230.2233428955078, 125.5089111328125], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"Wno03PQ1:M3M1O2N10000O101O0O1000000O10000O10000O100O10000O100O1000001N101O001N101O01O01O100O00100O00001O100O1O1O00100O0010O010O01O010O1O001O1O1O001O001O001O1O1O001O001O001N2O1O001O00001O001O1O1O0O2O00001N2O1O001N101O001O1N2O1O001N101O1O1N2O1O1O0O2O001O1O1O001O001O000000001O00000001O01O00001O010O0001O0000010O001O001O1N101O0O2O0O2N2M3L6K4D<I6L4M3N2K6I9CQn]]1\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [278.330078125, 237.35299682617188, 432.9208984375, 406.4624328613281], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"o^_97PQ16K5L3N3N1O2N1N2O1O1O0O0100000000O1000O10O100000000001O1O001UTOQOXg0P1eXOTOYg0m0eXOWOXg0j0eXO\\\\OWg0d0gXOBTg0`0gWOjNRNj0Tj0<hWOROlMf0Zj09gWOVOjMf0\\\\j05gWOZOdMi0cj0MhWOU1Vh0lNgWOh1gg0XNWXOk1ig0TNWXOl1ig0TNWXOl1ig0SNXXOAlMT1li0[OXXO@oMS1ji0[OXXOAQNR1gi0]OYXO^OTNS1ci0_OYXO\\\\OWNT1ai0^OYXO]OXNT1_i0_OZXOZOZNV1]i0^OZVOoNX1:TOW1[i0@XVOQO\\\\12TOZ1[i0AVVOUOf2W1kL[O\\\\i07SWOYO>0k07SN:42]i02SWO^O87l0G_N=M6^i00RWO@3<o0]OgN?G:]i0LTWOB1;h1KgM=\\\\i0HVWOE08k1LcMa0Zi0EZWOD16k1NaMd0Yi0B[WOF13n1M^Mk0Wi0]O^WOF13P2G_MU1Pi0ZOaWOF22`2P1iLlNch0:TXOF22^2d1\\\\e0bNUXOF42\\\\2g1Ze0`NbYOGT1k1Ze0]NW]Of1gb0YNZ]Oh1fb0VNZ]Ol1eb0TN[]On1db0PN]]OQ2bb0nM_]OT2`b0kM_]OW2jh02M3N3L4M3L4L2O1N2O1N2N2YOnLSSOS3hl0TMTSOn2gl0XMVSOi2gl0\\\\MVSOg2gl0[MXSOf2fl0\\\\MXSOf2gl0\\\\MWSOe2hl0Q1N1N101N2O1O1N101N2O0O2O0O1O101N100O2O0O101eLiJ_ZOX5ae0hJTXO0]1X5^f0jJSXO5X1S5cf0iJTXO<R1k4if0jJTXO=Q1k4if0jJUXO;b0GmNT5\\\\h0jJUXO;a0\\\\5Yg0ZJUXO;b0\\\\5Xg0YJVXO;a0]5Yg0YJTXO:c0_5Xg0WJUXO9d0`5Wg0WJUXO9c0a5Xg0VJUXO8c0d5Wg0UJTXO8e0c5Wg0UJTXO8d0d5Xg0TJTXO8d0d5Xg0TJTXO7d0g5Wg0RJUXO7c0h5Xg0QJTXO8d0g5Wg0SJTXO6d0h5Yg0PJTXO8b0j5Yg0nIUXO9`0j5\\\\g0kIUXO<=j5_g0iITXO=;l5bg0eITXO`04P6ig0^ITXOg0IP6Th0WITXOQ8ch0000000000000000000000O1000000000000000000000000O1000000O10000O1]OkGPXOT8Qh0mGnWOS8Rh0oGkWOR8Th0PHkWOP8Uh0QHiWOP8Wh0RHgWOn7Xh0UHfWOk7Zh0VHdWOk7\\\\h0VHcWOj7]h0VHcWOj7]h0VHcWOj7]h0VHcWOj7]h0VHcWOj7]h0WHbWOi7^h0WHbWOi7^h0WHbWOi7^h0WHbWOi7_h0VHaWOj7_h0VHaWOj7_h0WH`WOi7`h0WH_WOj7ah0VH_WOk7`h0UH`WOk7`h0`0O10001O0O10001O0O10001N100O101O01O1O1O010O1O1O1O100O2N100O1O01O1N101O1O001O0O2O0PNZG[[Og8dd0`GU[O`8jd0dGS[O]8ld0eGR[O[8nd0fGQ[O[8md0gGR[OY8nd0eGT[O[8ld0cGW[O\\\\8jd0`GZ[O`8ed0^G][Ob8cd0[G`[Oe8`d0YGc[Of8]d0YGd[Oh8[d0VGh[Oi8Xd0VGi[Oj8Wd0UGk[Oj8Ud0UGl[Ol8Td0SGm[Ol8Sd0SGn[Om8Rd0SGn[On8Qd0QGQ\\\\On8oc0SGP\\\\Om8oc0UGP\\\\Ol8oc0UGP\\\\Ok8Pd0VGP\\\\Oi8oc0XGQ\\\\Oh8oc0YGP\\\\Oh8nc0ZGQ\\\\Of8oc0ZGR\\\\Of8mc0[GR\\\\Oe8nc0\\\\GQ\\\\Od8nc0]GR\\\\Od8mc0\\\\GS\\\\Od8mc0]GS\\\\Oc8lc0]GT\\\\Oc8kc0_GT\\\\Ob8kc0^GU\\\\Ob8kc0]GW\\\\Oc8ic0\\\\GW\\\\Oe8hc0ZGZ\\\\Of8ec0ZG\\\\\\\\Of8cc0YG^\\\\Oh8ac0XG`\\\\Oh8_c0WGb\\\\Oj8^c0UGc\\\\Ok8\\\\c0TGe\\\\Om8Zc0SGf\\\\On8Yc0QGi\\\\Oo8Vc0QGj\\\\OP9Uc0oFm\\\\OQ9Sc0nFm\\\\OS9Rc0mFo\\\\Ok5dMUM\\\\e0oLQ]Oj5fMVMYe0PMR]Oh5gMWMWe0QMR]Of5jMYMSe0QMT]Oc5lM[MQe0PMU]Ob5mM]Mnd0QMU]O`5QN^Mid0RMW]O]5SN`Mfd0SMX]OZ5UNbMcd0SMZ]OX5WNdM^d0TM\\\\]OU5YNfM[d0UM^]OR5ZNgMYd0VM^]OQ5[NhMWd0WM`]On4[NkMTd0VMc]Om4[NlMRd0WMe]Oj4ZNoMQd0WMf]Oh4[NPNRg0n1PYOQNPg0n1RYORNnf0l1SYOTNnf0i1UYOVNlf0h1VYOWNkf0g1WYOXNkf0;aWOVNf1^1kf05hWOXN`1a1kf00mWOZNZ1f1jf0KTXOYNU1j1jf0ITXOZNT1l1jf0HTXOYNT1n1if0GVXOXNR1Q2if0FWXOVNR1S2gf0FYXOUNR1T2ff0FXXOWNQ1S2gf0GWXOVNS1R2ff0IUXOVNU1Q2gf0IRXOXNV1o1hf0JQXOWNX1n1gf0MnWOWNZ1l1if0MkWOXN]1j1hf00iWOVN_1j1hf01gWOWNa1g1hf03fWOVNb1g1if04bWOWNd1e1jf05`WOWNg1c1jf06^WOXNg1a1lf08[WOXNj1_1kf0:ZWOXNj1^1lf0:ZWOXNk1]1kf09^WOWNh1`1jf07bWOUNg1b1gf08fWOSNe1d1ef07kWOPNb1i1cf06nWOnMa1k1`f06lZOHcc0TNlYOR2d2I_c0XNjYOo1i2H[c0]NiYOk1m2GZc0`NhYOh1P3GVc0eNgYOd1U3FSc0hNfYOb1X3EQc0lNeYO_1\\\\3Dnb0oNdYO]1_3Cmb0ROcYOZ1b3Cjb0TOcYOZ1c3Ajb0WObYOX1d3@jb0YOaYOY1e3\\\\Okb0\\\\O_YOX1f3[Okb0_O]YOW1h3XOlb0DYYOU1l3UOkb0IWYOR1n3TOkb0MTYOP1R4POkb03oXOo0V4mNkb07lXOm0Z4jNjb0<hXOm0]4fNkb0?eXOm0a4aNkb0V3U]OiLjb0Y3W]OdLjb0]3W]O_Ljb0c3X]OXLkb0g3\\\\]OPLfb0Q4Y5010O0010O010O01O010O10O011N2N101L4\\\\L[SO^2hl0^M^SO^2dl0^MaSO^2cl0]MbSO_2al0]MdSO`2em0M3N2N2N2N2O0O3M2N2N2N2N2N2M3N2N2N2N3M4M4K3M5K4K5K5K6JfQPm0\"}}, {\"image_id\": 138, \"category_id\": 1, \"bbox\": [691.5281982421875, 433.5757751464844, 422.0103759765625, 556.1751708984375], \"score\": 0.9999997615814209, \"association_id\": 6, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"QVkg04VQ13L4L4L4L4M3L3M4M2Q^OoN`3U1XDoNl1Oj9T1RDVOP2Il9S1lC]OT2Cn9R1jC@U2_OQ:R1gCCV2[OR:S1eCHU2VOV:S1aCMT2ROZ:S1]C3T2kN_:S1WC>P2bNh:Q1jBT1R2nMT;o0\\\\Bi1U2YM`;o0RBV2W2lLf;o0mA`2W2bLl;o0jAe2U2]LQ<o0hAg2T2[LS<o0gAi2U2XLT<P1eAk2T2VLW<o0dAm2T2ULW<o0cAn2T2TLX<P1bAn2U2RLY<Q1`Ao2V2PLZ<R1]AR3V2mK]<R1ZAU3W2iK_<T1VAW3X2gKa<S1SA\\\\3Y2aKd<U1d@j3d2RKh<V1Y@T4l2fJk<W1V@W4l2cJm<Y1S@W4o2`Jn<Z1P@Z4o2^JP=Y1o_O[4P3\\\\JQ=Z1m_O\\\\4P3[JS=Z1k_O]4Q3YJT=[1i_O^4Q3XJV=\\\\1e_O_4S3VJX=\\\\1c_O`4S3TJ[=^1^_Ob4T3QJ^=`1Y_Ob4W3oI`=a9]B_Fd=d9XB]Fh=e9TB]Fl=e9QB[FP>f9nA[FR>g9kAZFU>g9iAYFX>h9fAYFZ>h9dAYF\\\\>h9bAXF_>i9^AXFc>i9[AWFf>j9XAVFi>l9SAUFn>m9n@TFS?R:d@PF]?W:U@oEl?_:Y_OmEh`0Z=1O1O1O1O1O1O1O2N1N3N1M4L3M4K5L3M4M3M3N2M4M2O1N2O1N2N2O1N1O2O1N2O1N2O0O2N2O101N1O4L5L5J8H8I6I4L3M`\\\\OaDVa0^;^^OQE`a0m:\\\\^O[Eaa0d:[^OdEca0[:Y^OjEfa0V:U^OoEla0Q:n]OTFTb0j9f]O\\\\F\\\\b0c9]]ObFib0Z9P]OkFWc0b;6I3M2O2M1O2O0O1O1O1N2O2M4M4K5K6J6I4M3M2N2N2N2M3N2M3M3M4L3L6I7I8H7I7A>nNR1M3L4M2N3L3N3L5K5J6J7J5K5L3L3N2N3M2O0O2O1N3M2O2M3M3M2N3M2M3N2N1O2N2N20O0101N1O2O1N2O1N100O100O100O1O1N2O1O1O2M3N2N2M3N1O2M2O001O1N2O1O2M2O2N2N3L2O2N1O1N2O1O1O1O1O1O1O2N2N2N1O2N1O1O1O1O001O1N101O001O1O1O0O3N1O1O2N1O1O2M110O1O010O001O010O10O01O010O010O1O1O1O2N2N2N1O2N1O1N2O1O001O0100O101N101N100O10O1000000O100O1O001O1O001O1O00001O001O00001O000000001O000000001O0000001O001O1O1O1O1N3N2N1O2N1O2M2O1O1O1N101N3M2O1N3M8H9GVUX`0\"}}, {\"image_id\": 138, \"category_id\": 2, \"bbox\": [1350.363037109375, 390.85546875, 245.124267578125, 57.6929931640625], \"score\": 0.9999996423721313, \"association_id\": 3, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"n\\\\V\\\\12WQ13N2O0O101O0O10000O1O1O10000O10000O100O1O1O10000O100O1O1O100O1000O010000O10O01000000O10000O10O1000O10000O1000000O10O100000O2O000000000O100000001O0000000O100000001O000O10001O00001O00001N1000001O000O2O001O0000001O000000001O0000000000O01000000000000000O10000000000O100000000000O100000000000000000000000000000000000O100000O1000000000O2O000000000000000O101O000O10001O0000000O101O00001N1000000O10000O10000000000000O0100000000O1000000000000O1000001O000O10000000001O000O101O000O2O1NSj4\"}}, {\"image_id\": 138, \"category_id\": 2, \"bbox\": [1197.6788330078125, 189.98399353027344, 149.9217529296875, 46.282318115234375], \"score\": 0.999998927116394, \"association_id\": 2, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"XfTW15UQ1101N100O100O2O0O100N2J6O1O100O10O0100O10000O10O10O100000000O0100000O1000000O100000000O10000O10000O10000000000O1000000000O010000000000O1000000O100001O001O010O3L9H3M1O001O001O010O0O101O0000000000001O000000000O10000000000000000O100000000000000O10O1000000000000000O0100O1O010O01O1N2N2N11P`Y8\"}}, {\"image_id\": 138, \"category_id\": 2, \"bbox\": [319.66986083984375, 556.1968383789062, 341.25921630859375, 102.39056396484375], \"score\": 0.9999464750289917, \"association_id\": 4, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"[S`:9kP1JWoN9aP1FaoNa0]P1AaoN`0^P1BaoN>ZP1^OgoNk0oo0WORPOo0jo0<M200O1O2O0O100000000O10000000000000000000000000000000001O000001O000000100O1O1O100O001O0000100O001O1O1O100O1O001O01O01O001O00100O1O1O001O00000010O0000000010O01O0000000010O000000000000010O00O10001O00000O2O000000001N100000001O001O00001O0000000000001O0000000000001O00000010O0000010O0000001O01O0001O01O0001O01O0001O00001O01O01O00001O0000001O01O01O00010O0010O0000010O0001O010O010O10O0001O0010O0001O000010O0001O00010N10001O0000001O0000001O0O101N1O101N1O2N100O2N10001O00001O00001O001O001O00001O01O01O00001O00001O0000001O01O0001O0000001O0000000000000O1000000000000O100000000000000O10000000000O10000O100O100O1000000O100O2N1O2N][:0ddE0000001O000ZThn0\"}}, {\"image_id\": 138, \"category_id\": 2, \"bbox\": [1022.8829345703125, 857.0707397460938, 191.08203125, 62.343994140625], \"score\": 0.22133634984493256, \"association_id\": 0, \"segmentation\": {\"size\": [1067, 1600], \"counts\": \"VRZR11ZQ11O1N2O1O001O000O2O1goNH]o09bPOKYo07ePOKZo06ePOJ[o07dPOJZo07fPOH[o08ePOH[o09ePOF[o0:ePOFZo0<fPOCZo0=fPOB[o0>gPO@Yo0a0gPO^OYo0b0hPO]OXo0d0g00O2O00001O00001O0010O01O001O0010O00100O100O010O1O00100O1O010O00100O1O010O01O01O001O10O00010O01O1O00XoNBdP1c0OJ]oNDcP1;_oNDbP1:_oNFaP1:`oNEaP19`oNG`P19`oNG`P19aoNF`P18:0001N2O00001O01O010O000000000001O0001O01O000000010O000001O00010O0000010O000001O1O001O001O010O1OhV50XiJ001O1O0000001O00001O01O0000001Ofch<\"}}, {\"image_id\": 139, \"category_id\": 1, \"bbox\": [88.9266357421875, 193.65892028808594, 163.35455322265625, 183.0630340576172], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"mnl2=fP1d0A8I3M3L4L2O2N2N1O1N2N2O1O1O001L4K5O1O1O1N2I7N2O1O1O2M2M3N2N200O1O1O1N2O1O1O1O1O1O1N200O1O1O100O1O1O1O1O10000O100000000O2O0O1O1O1O2O0O2O1O1O001N1O2N1N2O2O0O1O2N1N2N3N1O1O1I7G9N2O2M200O100O101OO10000000000001O0000000000O100O100O1000000O100O1N2N2N2O1O1N2N3L3N2O1O2M4M3I7K4M4M3M5J8H4M2O1N3L4L3M3M3M3O2M2N2N2N2N2N2L5L3L4M3M4L5JdQk[1\"}}, {\"image_id\": 139, \"category_id\": 2, \"bbox\": [321.0020751953125, 452.2283630371094, 296.4354248046875, 70.55892944335938], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"Wmd:1YQ11O000O100O10000O10000O100000000O1000000000000O10000O10000O100000O0100000O01000000O10000O1O10O0100O1000000O10000O1O010O10000O10O10O100O1O010O10000O10O10O100O100O10O10O10000O100O010O100O10O100000O1000000O10000000000O100O100O100O10000O100O100O00100O10000O1O10O01O100O1O10000O01000O100O0100000O01000000000000O0100000O1000O100000O010000000O10O10000000O1000000000O100000O1000O1000O10000000000O1000000000000O100000000O1000000000000O1000000O1000000O10000000000O1000000O1000000000000O1000000O10O1000000000O1000000000000O10000O10000O1000000O10000O10000O100O100O2O0N2N2O1M3N200O100O2O1NZmoo0\"}}, {\"image_id\": 139, \"category_id\": 2, \"bbox\": [646.4703369140625, 842.248046875, 422.01416015625, 87.43817138671875], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"`Sde02XQ11O1N10000O2O0O1000000O101O0O1000000O101O0000000O2O000000001N10000000000O101O0O10001N100O10001N100000000O10001N100O101O0O100O101O000O1000000O1000000O2O00000O100000000O2O0000000O10000O1000000000000O10001O0000000000000000000000000001O00000000001O000000001O00001O010O1O001O1O001O010O00001O0O2O1O001O2N3M2M3N3M1O1O0O10000M3O2N1ORZ8NPfG9G2N2L5M3L2M3M4N100O2O0O10001O1N2O1N100O2M3M3N1O2N101O0O2O001O0O101O0O2O001O0O101O00001O00000000000000000000000000O100000000000000000000000000000000000O1000000000000000000000O10000000O1000000000000O1000000000000000000000O10000000O10O10000000000O1000000O10000O1000000O10001O0000000O101O000O10001O0O10000000001N100O10000O101O000O1000001O0O100O1O1O100O100O2O0000000O10000O100O101N100O100000001N100000000O10001N100O10000O100000001N100O101O0O100O2O000O10001O000O2O1O1LZoXa0\"}}, {\"image_id\": 139, \"category_id\": 1, \"bbox\": [633.777099609375, 607.730224609375, 312.18707275390625, 322.89300537109375], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"hjdd0S1SP18I5L3N2N10001N10000O100000000O10000O10000O1O010O1O1O1O1O100O100O100O100O1000O01O1O1N2M3N2N1O2N2O1O1O001O001O1O001O001O001O1O001N2N1O2N2O1O1O00100O1O1O1O1N2N2O1O1O1O010O1O1N2O1N2N2O001O100O10O01O0O101N1000001O010O001O1O1N101N2O100O00100O1O1O1N2N2O1O1O1O100O1O1O1O1N2O1O1O1O1O100O100O1O100O1N2O1O1O1O10000O10001O0O1000000O101N1O101M3N1O2N2N2N2N2O1O1O1O1O2N2N2M3N2N1O2N1N2N3N3K6K5J3N3M2N3N1N101N1O2N1O2M2O2NmNiVOUJVi0f5UWOVJih0g5_WOUJ`h0k5cWOSJ]h0k5fWOTJYh0l5hWOTJWh0l5kWOTJRh0l5QXOSJng0m5TXORJjg0n5XXORJeg0o5^XOPJag0P6aXOoI^g0Q6cXOoI\\\\g0Q6fXOnIYg0R6hXOnIWg0S6jXOlIUg0T6mXOjISg0V6oXOhImf0]6i16D<O100O100001O01N10001O00010O01O010O1O1O1N2O1O100O1O100O100O1O1O1O1N2O1O100O1O100O1O1O1O1O1N2O1O1O1O101N1O2N2N2M3aJlTOd4[k0QKWUOb4Pk0TKVUOi4hk0M2N3N1N3M3M5K6I9G7H8J5J6K3M3M3L5K;D`0_OXbce0\"}}, {\"image_id\": 139, \"category_id\": 1, \"bbox\": [290.5438232421875, 232.71432495117188, 236.38067626953125, 290.8856506347656], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"R]_9S1TP14L5L3N1O1O1O001O1O1O1O1N2O1N101N2N101O1O0O2O000O2N1O2O001O1O0O2N2N1O2O100O1O1O1N1O2O1N200O1O1O1N2N1O2O1O1O1O1O0O2N2N2N101O1O000O2N101N101O001O1N2O001O1O100O1O001O1N2N101O001O00000O1N2N2N201N10100O1O1N2O001N2O10O01O0010O00001O01O1N2O1N110O1O100O1O001N2N2O1O1O1O1O001N2N2N2O1O100O1N101YOg0F:L3N3M3O0N2N2M3M3O1O1O1010O0O2O0O2M201O1N2O1O1N2N2N2O1O1O1O1O1N2O1O100O1O100O1O1O1N2N2O1O100O1O1O1O1O1N2O1O1O1O1O1O2ZKTUOj2nj0SMWUOi2kj0UMYUOh2hj0WMZUOg2hj0WMZUOg2gj0WM\\\\UOg2fj0WM]UOf2fj0UM`UOg2ej0QMcUOi2dl0K3L3M3N2O1N2N3L5K5K4L3N2N3M3M4K6Ej0oNfnSS1\"}}, {\"image_id\": 139, \"category_id\": 2, \"bbox\": [119.92560577392578, 353.230224609375, 227.73257446289062, 48.16900634765625], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"TZn34UQ13M2O001O1O001O1O001O0000001O001O001O01O0001O0010O01O001O010O01001N102MN3N3N2M4M2M2O1O1M^fa03_Y^O3N2O1N2O00000O101N10000O2O0000000000000O100N3K4N2N2N200O2O000O100O100O1000001O00000000000000000000000001O0000000000001O0000000000001O000O10000000000000000000001O0000000000000000001O00000O10000000001N10000000000O100000000O10000000000000000O1000000O10000O1000000O100O1O10001N100O1O1O101N100O101O0O101N10001O1N2Na[PY1\"}}, {\"image_id\": 139, \"category_id\": 2, \"bbox\": [635.87646484375, 765.5029296875, 347.21942138671875, 160.8104248046875], \"score\": 0.0617329478263855, \"association_id\": 0, \"segmentation\": {\"size\": [1066, 1600], \"counts\": \"jPbe01YQ11O001N101O1O001O1O001O000O101O00000000000O10000000000O2O000O10000O2O000O2O001O0O101O00000O1000000O100O101O0O10000O100000000O100000000O10000O101O0O100O1000000O101O000O10001N100O2O000O10001O0O10000000000O101O0000000000000000000000000000000001O00000000001O00001O001O001O001O1O001O01O01O1O001O1O1O001O1O1O1O001N1O2M2N3M9E8Ij\\\\:4ScE7I6I3O2M201N101N1O2N1O2L4L8J3M3M2N2O0O2O1N101N2O1N101N100O2O000O10000O100O10000O10000000000000000000000000000000000000O10000000000000000O10000000000000000000000O1000000000000000000000000O10O10000000000000000O10000000000O1000000O0100000000000O1000000000000000O10000000001O0O1000000O101O001N2N2N2N3JW_Qd0\"}}, {\"image_id\": 140, \"category_id\": 1, \"bbox\": [337.50128173828125, 154.30398559570312, 307.28302001953125, 342.3630676269531], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"bY^82mg03L2O2N1O2N1N2M3M4K4M3M3O2N1O1O1O2N1O1N3M3L3M4N2M3N1O2N3L3L5K6Ib1`N9G3M3N2O1N2N2N1N4J5I8J6M2N2N2N2N101N2N2M3M4L3N3M2N2N2O0O2N2N2N2M4L6K4L3M3M3N1N3N2M4M3L<D5K2N3M2N2N101N2O2M3N2N2N1N2O1O1O1O0O2O00001O1O001O000O10O0010O01O01O001O0010000O010O1000000OdNeAUI[>i6hAUIY>j6iAUIW>j6kAUIU>j6lAVIT>i6nAVIR>i6oAXIP>g6RBXIn=f6TBZIk=f6VBZIj=e6XBZIh=f6XBZIh=e6YB[Ig=e6ZBZIf=f6YB[Ig=e6YB\\\\If=d6ZB\\\\Ie=e6ZB]Ie=c6[B^Id=c6[B]Ie=c6ZB_Id=b6\\\\B_Ic=a6]B`Ia=a6_B`I`=a6^BaI`=`6`BaI^=b6_B_I`=d6^B]I`=e6^B\\\\Ia=g6\\\\B[Ic=h6YBYIf=b8N101N101O0000000000VN]BWIc=g6_BYIa=f6aBYI_=f6bBZI^=d6eB[IZ=e6gB[IY=c6jB\\\\IV=a6mB_IS=_6PC`IP=_6QCaIo<^6SC`In<^6TCbIl<]6VCbIj<]6WCcIi<\\\\6YCcIg<\\\\6ZCdIf<[6[CdIf<[6\\\\CdId<Z6_CeIa<Z6`CfI`<X6cCgI]<W6fCgI[<W6hChIX<U6lCjIT<U6nCjIR<T6QDjIP<U6RDjIn;V6RDjIn;U6SDkIm;U6SDjIn;U6SDkIm;U6RDkIo;U6QDkIo;T6RDkIo;U6QDjIP<V6PDiIR<U6nCkIS<U6mCkIS<T6nCkIS<T6nCkIS<T6oCkIQ<T6PDkIQ<T6QDkIo;T6SDjIn;U6SDkIn;S6TDkIm;T6TDlIl;S6VDkIk;U6UDkIk;T6WDjIk;U6VDiIl;U6VDiIl;U6VDiIm;T6Q3N2N2M3N1O2N1O2O1N101O1N3N2N1N2O1N2N1O2N2M3L4L4K6L3M3N2N2O1N2N2N2M3K6B>L3L4N2M3N2N2M3M4K5Jca\\\\9\"}}, {\"image_id\": 140, \"category_id\": 2, \"bbox\": [378.9508972167969, 487.5520324707031, 297.9927673339844, 208.74270629882812], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"f\\\\n8:cg04M3N2N1O2N1O1O2N1O2M2O2N1O1O1O1N2L4K5I8M2N2O1N2O1O1O1N2J7QOn0kNU1L4N2O1O1O2N1O1N3M3N1N3N2O1N1O2O0O1O2N1N3N1N2O2N101N1O2N1O2N1O2N1O1O1O2O0O10000O100O100O10000000O100000001O0000000000000000000O100000000O101O000000000O100000000O10000O1000000O100000O10O10000O100O010O1O100O01000O10O100O0100000001O00001N1000001O000O2O0O101N101O0O2O1O001O1O1O100O001O1O10OO2O001O001O000O2O001O001O0O2O1O001O0O2O00000000000O10001O00000O101N1O2N2N2N2O0O2N100O2O0O1O1O101N1O10001N101O0O2O1O0O2N101N100O20O001000O100O010O1O00010O010O010O10O1000O010O1O010O1O100O1O010O1O00100O1O2O0O100O1O010N101O1O2N1O1N3N001O1N2N3M2M5KmeS8\"}}, {\"image_id\": 141, \"category_id\": 1, \"bbox\": [1136.710693359375, 439.48187255859375, 375.81591796875, 589.9530639648438], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"Wg^V1:nQ1<C>A>@?C?D<E9G9H6I8J4M3M3L3N3M3M2M4L4M4J5L5J5M3L3N3M2N2N2N2N3M2N3L4M3L4M3L3M4M2N2N1O2N2O1N3N1N2O2M2O2M3N1O2N1O001N2O1O1O1O001O1O001O1N101O1O1O2M2O2N2N1N2O1O1N2O1N2O1N2O2M3M3M4L4L3M4M1N3M2O1N3N2M3N3L6K5J6K4L3L3N2N1O1N2O2N1O1O2M2O2N2N2N2N1N3N1N1i[OWDha0k;T^O[Dga0g;U^OaDfa0a;U^OfDea0];V^OkDda0Y;V^OPEca0T;Y^OSEaa0o:\\\\^OUE`a0o:\\\\^OVEaa0l:[^OXEba0k:[^OXEca0i:[^OZEba0i:[^OZEca0g:[^O[Eda0f:Y^O]Eea0e:Y^O]Eda0f:Z^O]Eaa0g:]^O[E^a0j:`^OXE[a0m:d^OTEWa0Q;h^OPEUa0S;j^OoDSa0T;l^OlDSa0U;l^OlDRa0V;m^OkDRa0V;n^OjDQa0W;n^OjDQa0Y;k^OiDTa0[;h^OfDWa0Y>O2M2N2N2N2N2N2O1O1O1O100O100O1O100O1O1O1O1O1O001O1N2O1N2O1O1O100O1O100O100000000000000000001O000000000000000OmL[@_Ce?X<l@`CT?X<\\\\A`Cd>]<bA`C^>^<fA`CZ>^<jA`CV>_<lA`CT>_<oA_CQ>`<QB_Co=`<SB_Cm=_<VB`Cj=_<XB`Ch=_<ZB`Cf=^<^B`Cb=]<cBaC]=]<gBaCY=]<lB`CT=_<oB_CQ=_<SC_Cm<`<UC_Ck<`<WC_Ci<a<XC^Ch<a<ZC^Cf<b<[C]Ce<c<[C]Ce<b<]C]Cc<c<^C\\\\Cb<d<_C[Ca<e<`CZC`<f<bCXC^<h<cCWC]<h<fCUC[<k<gCSCY<m<iCQCW<o<jCPCV<P=kCoBU<P=mCoBS<Q=nCmBS<S=nClBR<T=PDiBQ<W=PDgBQ<Y=PDeBQ<[=QDbBP<^=RD_Bo;`=TD]Bm;c=_400000000O10001N100O100O100O100O1O100O1O2O0O101N100O2bEmZO_7Se0^HS[O_7nd0^HW[O^7jd0aHZ[O\\\\7gd0aH][O\\\\7ed0bH^[O\\\\7cd0aHb[O[7ad0aHd[O\\\\7`d0]Hg[O^7`d0YHi[O`7_d0kGV\\\\On7Zg0J3M3M3M2N2M3N2M2N3M3L5K4L5J6J6K5L4M2M3N2N1O2N1O1N3M2M4L3N3M2N3N1N3N2N2N3M3L4L5K5H7K5J6L4K5L3N3L3M5K5J7H9G?_OXQj3\"}}, {\"image_id\": 141, \"category_id\": 1, \"bbox\": [448.5544128417969, 56.03545379638672, 399.3804626464844, 612.2911987304688], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"hgm`01XQ1_1POR1hNS1G8G9I4K4M3M3M2N3M2M5K4L5J7J:F9G8I3M3M2N3M2O1N2N2O1N2N2N3L4M2M4L4L4M2N2N2N2O1N2N101N2N2O0O2N2O1N1O2N2N2M4M2K6J6J5K6J5K5M3N2M3N2N2N2M3N3L3L5K5J6I7J6K5L3L5M2M3M3M4K5K5bZOlCZd0e=]O6F:D;G9L4M3N2M2O2M2O1N2N2M3H8SOm0D<L4M3M3N2N3N1N2N2N2M3M3M3M3L4L4N2N2O1O1O1O10000O1000000O100000000O100000000000O100000000000000000000000000O100000000O1000000O100\\\\Ng@S@Y?l?j@R@V?m?n@P@R?o?QAn_OP?Q`0TAl_Ol>R`0XAl_Oh>S`0\\\\Aj_Od>U`0_Ai_Oa>V`0cAf_O^>Y`0fAd_OZ>[`0iAc_OW>\\\\`0lAb_OT>]`0oAa_OQ>]`0RBa_Oo=_`0SB__Om=``0UB^_Ol=a`0WB]_Oi=b`0ZB[_Og=e`0ZBY_Og=f`0Q2O1O100O1O100O100O11O0001O00000000000000Y@__Ob=Y2QAU;[1eBc=S2VAW;V1gBd=o1[AX;P1kBe=j1^A[;j0mBi=d1bA];d0QCj=]1hAa;;TCn=V1mAe;2WCS>m0SBk;F[CX>a0]BQ<XO`Ch>J\\\\Bc<hNfCQb0Y<j]OkCWb0T<f]OnC[b0R<c]OoC_b0P<^]ORDcb0n;[]OTDeb0l;X]OVDib0j;U]OWDlb0j;Q]OWDPc0i;m\\\\OZDSc0g;i\\\\O[DYc0e;b\\\\O^D_c0c;\\\\\\\\O`Dgc0R=3M3N2M2N2N2N100O2N1O2N2N2N2N1O2N1O2N1O1O1O1O100N3N2N2N2N2N1O2N1O1O1O1O1O001O1O1O1O2N2N2hLP[OnHQe0g6[[OWIgd0`6b[O^I_d0^6e[OaI\\\\d0]6g[OaIZd0\\\\6i[OcIXd0[6j[OdIWd0Z6k[OeIVd0Y6l[OfITd0Y6m[OgITd0V6o[OiIRd0U6o[OkIQd0S6R\\\\OlIoc0Q6T\\\\OnImc0o5U\\\\OQJlc0k5X\\\\OTJic0h5Z\\\\OXJgc0d5]\\\\O[Jdc0b5_\\\\O]Jcc0_5_\\\\O`Jcc0^5_\\\\OaJcc0\\\\5_\\\\OcJbc0Z5`\\\\OfJbc0W5`\\\\OhJac0U5b\\\\OjJ`c0R5c\\\\OlJac0o4b\\\\OPK`c0g4i\\\\OWKZc0\\\\4Q]OcKRc0j3_]OTLeb0]3h]ObL[b0X3i]OfLZb0U3j]OjLXb0R3l]OlLVb0P3m]OnLUb0P3m]OoLTb0o2n]OoLUb0m2o]OPMSb0n2o]OPMTb0l2P^OQMSb0k2Q^OnLWb0k2Q7@f0YOd0SOooUj0\"}}, {\"image_id\": 141, \"category_id\": 2, \"bbox\": [118.52821350097656, 291.4232177734375, 550.3909912109375, 383.617919921875], \"score\": 0.9999942779541016, \"association_id\": 1, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"ShT42YR16K5WOGUoN=hP1HRoN<kP1HPoN;oP1EQoN;oP1FPoN:PQ1GnnN9SQ1GmnN9SQ1HlnN7UQ1a00O1O1O100N3N1N2N2M3O1O1O100N2N2N2K5K5N2O1O100000O10O[UOOob01ZYO8SO5_g0DZYO<SO5`g0@\\\\YO=RO7]g0^OaYO;QO:Ug0BiYO6oN<Vg0^OjYO8nN<Wg0\\\\OkYO8mN>Wg0TOQZO?hN>Zk0CdTO?Zk0BfTO>Zk0BeTO`0Zk0AeTO`0Yk0AgTO?Yk0BeTO`0Zk0@fTOa0Yk0_OfTOc0Xk0_OgTOb0Xk0^OgTOd0Xk0]OgTOc0Xk0^OhTOc0Wk0^OgTOd0Xk0]OgTOe0Wk0[OhTOg0Vk0[OiTOf0Vk0[OhTOg0Wk0YOiTOh0Vk0YOiTOg0Wk0ZOhTOf0Xk0[OhTOd0Xk0]OhTOb0Yk0^OgTOa0Yk0@fTO`0Zk0AfTO>Zk0CfTO<Zk0EeTO;[k0FeTO:Zk0GfTO8[k0IcTO7]k0JcTO5]k0MbTO2^k00`TO0`k01`TON`k04_TOKak06^TOJbk08]TOGdk09\\\\TOFdk0;[TOEek0;\\\\TOBfk0>[TO^Ohk0b0ZTO[Ogk0f0YTOXOhk0h0ZTOUOgk0k0ZTOROhk0n0ZTOoNgk0Q1[TOlNfk0U1\\\\TOfNfk0Z1^TOaNck0_1bTOZN`k0f1eTOTN\\\\k0m1b20000001O0001O00000O101O00000O101O0000001N10001O0O2O0N3N2M4M2M4L4K5I8GnXd0Bcg[O7J6J5K4L3N2N1O2N101N10001N10000O2O00000O2O00001N1000001O0O101O10O01O1O1O1O2N1O1O2fQOPORk0Q1dTOYO[k0h0^TO@`k0a0ZTOEek0<VTOIik08RTOMmk04oSO0Pl02mSO0Rl01lSO1Sl00kSO1Ul00iSO2Vl0OhSO3Wl0NgSO4Yl0MdSO5[l0MbSO5]l0L`SO8^l0NZSO6dl0l2N1O3M2N2N1O010O1O001O0010O010O10O10000001O01N1000000O10O0100O01O10O10O10001O01O100O001O001O00000000O010O010O00100O010O0010O0100O0010O01O010O10O010000O1000000O10O10O1O100O100O1O1O010O00100O010O100O010000001O000O01000O010O0000001O0000010O0000001O0000YVO`J`f0`5_YObJaf0^5]YOcJcf0]5\\\\YOdJdf0\\\\5[YOeJef0[5[YOeJff0Z5ZYOfJff0[5ZYOdJff0\\\\5ZYOdJgf0\\\\5YYOcJgf0]5ZYObJgf0^5YYOaJgf0_5YYO`Jif0`5S3000O101OO0100O1O010O0010O010O010O10O1000001OO3N1O1O1N101N100O101N1O2O2N1N3N7I?A^1bN7I3M00O10O10O010O010O01O100O1O101N2O1N2O1N2O0O2O00000O10O0100O010O100O100O2O001N10000O1000O100O2O0O0001O0O2O1N2N1O2O2M4M2N2M2O2N1N3N1N3N1N2N2N3M2N3M3L<Eh_PR1\"}}, {\"image_id\": 141, \"category_id\": 2, \"bbox\": [120.7617416381836, 755.61474609375, 722.4974975585938, 330.30322265625], \"score\": 0.9999929666519165, \"association_id\": 0, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"`U_55TR17J6L4L4L4L4K4M3M2N3M3]QOeNVNOSm0_1cTOiNQN0Wm0Y1dTODXk0=dTOJWk08`TO4[k0MaTO:[k0HcTO<Zk0DeTO`0Wk0BgTOd0Sk0]OlTOh0Pk0YOnTOj0oj0WOQUOj0nj0WOPUOk0oj0VOoTOl0Pk0UOnTOm0Qk0TOmTOn0Qk0TOmTOn0Rk0TOjTOo0Uk0ROhTOR1Vk0POfTOS1Yk0nNdTOU1[k0QO\\\\TOS1ck0m2N3N1O1O1O1O1O1N2O1O001O001O000O2O000O1000001N10000O1000001N100000000O101O000000000O2O00000000001O0000001O0000001O00001O0000001O00000000001N1000000000000000000O10000000000000001N10000000000000SO[UORJdj0V5cUOYJN`0^j0U5YVOkJgi0R5]VOnJbi0o4aVOQK_i0m4dVOSK[i0j4hVOVKXi0h4kVOXKSi0f4PWOZKPi0d4RWO]Kmh0b4UWO]Kkh0b4VWO_Kih0`4XWO`Kgh0a4ZWO_Keh0`4\\\\WOaKch0^4^WOcKah0]4_WOcKah0\\\\4aWOdK]h0]4cWOcK]h0\\\\4dWOdK\\\\h0\\\\4dWOeK[h0Z4fWOfKYh0[4hWOdKXh0\\\\4hWOeKWh0[4iWOeKWh0[4iWOeKVh0\\\\4jWOeKUh0[4kWOeKUh0\\\\4jWOdKVh0\\\\4kWOcKUh0]4kWOdKSh0]4mWOcKSh0]4mWOcKSh0^4lWObKTh0^4lWOcKSh0]4mWOcKSh0]4nWObKQh0_4oWObKPh0^4PXObKPh0^4PXObKPh0_4oWOaKQh0_4oWObKPh0^4PXObKPh0^4PXObKPh0^4QXObKmg0_4SXOaKmg0_4SXOaKmg0_4SXObKlg0_4SXOaKmg0_4SXOaKmg0_4SXOaKmg0_4RXObKng0^4RXOaKog0_4PXObKPh0^4PXObKPh0^4oWOcKPh0^4PXObKPh0^4PXObKPh0^4oWOcKQh0]4oWObKRh0^4nWObKRh0^4mWOcKSh0]4mWOcKSh0]4lWOdKTh0\\\\4lWOdKTh0\\\\4lWOdKTh0\\\\4kWOeKUh0[4kWOdKUh0]4kWOcKUh0]4jWOdKVh0\\\\4jWOdKVh0\\\\4jWOdKVh0\\\\4jWOdKVh0\\\\4iWOeKWh0[4iWOeKWh0\\\\4hWOcKYh0]4fWOdKZh0\\\\4fWOdKZh0]4eWObK\\\\h0^4cWOcK\\\\h0^4dWObK\\\\h0_4cWO`K^h0`4aWOaK_h0`4`WO_Kah0b4^WO]Kch0c4\\\\WO^Kdh0c4[WO\\\\Kfh0e4YWOZKhh0g4VWOYKkh0g4UWOYKkh0h4TWOWKmh0i4SWOVKmh0l4QWOUKoh0k4QWOTKPi0m4oVORKRi0n4nVORKRi0o4lVORKTi0n4lVOQKUi0o4kVOQKUi0P5jVOPKVi0P5iVOPKWi0Q5iVOoJWi0R5hVOnJXi0S5gVOlJZi0T5eVOmJ[i0T5dVOlJ[i0V5cVOjJ^i0W5aVOiJ_i0X5_VOiJai0X5^VOgJbi0[5\\\\VOfJdi0[5[VOdJfi0]5XVOcJii0^5UVOcJji0_5SVObJni0^5oUOdJRj0]5jUOeJWj0\\\\5fUOfJYj0[5fUOeJ[j0g6000O010O10000000000000001O0000000000001N100000001O000000001O000000001O00000000001O0000000000001O000O10000000001O000000000000000000000000000000001O00000000000000000000001O00001N10001O001O00001O001O1O001O1O2M2O2N1O2N1O1O1O001O00100O001O1O1O1O101N1O1O2N1O1O1O2O0O1O1O001O1O001O010O1O00100O1O00100O2N100O2O0O1O10000O100O010O0001O01O0001O00001O000010O01O00001O001O001O010O1O001O00000000001O0000000000000000000000000000001O0eMiI_XOW6`g0jI`XOV6`g0jI`XOV6`g0kI_XOU6ag0kI_XOU6ag0jI`XOV6`g0jI_XOW6ag0iI_XOW6ag0iI_XOX6ki00O1000000O101N10000O100O2O000O10000`M_IRYOb6nf0_IQYOb6mf0_ISYOa6mf0_ISYOa6nf0^IQYOb6Pg0]IQYOc6of0]IQYOc6]i0O1000O0100O100O10O01nN`TOUKak0e4W1M3M3M201O1O1O1O00100O10O1000O010000O010O10O10O10O10O10O100O010O100O1O001O1O1O1O1N2O100O1O10O0100O1O100O100O100O100O1O010O10000000000O100000000O10000O100O10O01O1N2N2N2O10000O100O10000O2O0O100O1O100O1O1O2N1O100O1O1O100O100O2N10000O1000001OgQOSMQm0m2nROXMnl0g2RSO[Mml0e2SSO\\\\Mll0d2SSO_Mll0_2TSOcMkl0]2TSOfMjl0Y2WSOiMgl0V2YSOlMfl0S2ZSOoMfl0P2ZSOQNel0n1[SOTNdl0l1[SOUNfl0i1[SOXNdl0g1\\\\SO[Ncl0e1]SO[Ndl0c1\\\\SO_Ncl0`1\\\\SObNdl0]1[SOfNel0Y1ZSOiNel0V1ZSOlNgl0R1[SOmNel0Q1_SOmNbl0Q1`SOnN`l0P1cSOoN^l0o0cSORO\\\\l0l0gSOSOZl0k0hSOTOXl0k0iSOUOXl0h0jSOXOWl0f0kSOYOUl0f0lSOZOUl05iRObNS1Z1Tl01]TOOdk0N_TO1bk0L`TO4ak0UOfROHj1S1bl0gN`SOY1jn00001O01O00010O01O1M3M4L3M4L5J7G8G[Pbi0\"}}, {\"image_id\": 141, \"category_id\": 2, \"bbox\": [706.0523071289062, 588.5679321289062, 608.5349731445312, 357.3638916015625], \"score\": 0.9999701976776123, \"association_id\": 2, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"m^Th06SR16K4N3M2N3N1N3N1N2000001N100000000O101O0000O100O1O1O100O1O1O1O1N2O1O1O2N1O1O100O100O100000001N10001O2N2N2N1N3N1O1OSgY10iXfN5L5K4M4M2O1O1N3N1O7H8I1PPOfNjn0[1TQOhNjn0Z1RQOjNln0W1RQOmNkn0U1QQOoNmn0R1PQOSOln0P1QQOTOln0n0PQOWOmn0X2M4L5K5K3M2N1O1O1N2O001O001O1O010O1O001O2N2N2N3N1N3M1O1O1O00100O001O010O010O0100000000000000000O100000000000O01000O010O10O10O100O01000O1000O010O100O010O1O010O001O10O01O100O1O100O2N1O100O1O010O1O01O01O00010O00001O000010O01O001O10O01O1O1O0010O01O0010O01O001O010O1O001O100O1O1O1O1O010O001O0000001O0000001O000010O0001O001O00001O001O1O001O00100O1O00100O2N1O101N1O1O10O0100O00010O01O0001O01O0001O01O01O00010O00001O0010O01O001O001O1O001O001O010O00001O001O001O001O001O00001O1O001O001O001O001O00001O00001O0000001O0000001O00001O001O1O001O1O1O1O010O001O001O0000000010O0000001O000010O01O001O001O1O010O001O00100O1O00101N101N:F>jIjSOY5gl0N2O1N2O0O100O2N2O1N1O2N2N2O1N2N1O2N3M3M>B7J4K1O1O1O1O1O1O001O00100O001O1O1O1O2N1O2N2N1O2N2N1O1O001O1O001O001O001O1O010O1O1O1O2N1O2N100O1O001O1O1O010O001O00101N1O2O1N2N2O4K5L4L4L4L1O10O000O2O001NO2N2M3M3M3L4K;CRQk;\"}}, {\"image_id\": 141, \"category_id\": 2, \"bbox\": [50.06702423095703, 278.5232238769531, 869.1302490234375, 823.476806640625], \"score\": 0.7044169306755066, \"association_id\": 0, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"PVm56TR17I7J5K6J6K4ooNjNdn0[1QQOPOjn0T1mPOVOnn0o0iPOYOTo0j0ePO^OWo0o1M3M3mEdLPE^3n:eLnD^3P;cLoD_3o:cLoD_3o:cLoD_3o:dLnD_3o:eLmD^3P;iLiDZ3T;mLdDX3X;iLgDZ3V;fLjD^3R;bLnDb3n:_LQEe3k:[LUEi3g:XLXEk3e:VLZEm3c:SL]EP4`:QL^ES4_:mKaEU4]:lKbEV4\\\\:kKcEX4Y:jKfEX4X:hKhEZ4V:gKiE[4U:fKjE\\\\4T:eKkE]4S:dKkE^4T:cKkE^4T:cKkE_4S:bKlE_4S:bKlE_4S:bKlE_4T:_KmEb4T:XKoEj4X:kJkEU5\\\\:aJfEa5a:SJcEm5^c02O001N101O0O2O001O0O2O001O0O101O001O0O101O00001O000O2O001O1O0O2O1O1O1N102N1O1N2O001O1N2O1O0O2O001O1N101N10001N101N100O1O2N1O1O100O2N1O100O2N100O1O2O0O100O2O0O100O2O000O2O0O10001O0O1000000O100fAiG]2W8`MYHS2g7kMeHj1\\\\7TNhHj1X7UNkHi1V7TNnHj1R7UNQIh1P7XNRIf1n6YNUIe1k6ZNXId1h6\\\\N[I`1f6_N]I_1c6aN_I]1a6bNcI[1]6eNfIW1[6iNgIU1Y6kNiIS1W6lNlIR1T6nNnIo0T6POnIn0R6ROoIm0Q6SOQJk0o5TOSJj0n5WORJh0n5XOTJe0m5\\\\OSJc0l5_OUJ>l5CTJ<l5EUJ8l5HVJ5k5LVJ2i50XJMi54XJIi57YJFh5;ZJBf5?[J^Oe5d0]JXOd5i0^JSOc5m0`JoNa5R1aJjN`5W1bJfN]5\\\\1eJ`N\\\\5a1fJ[N[5e1hJXNX5i1hJUNY5l1hJRNW5P2iJnMX5S2hJkMY5U2hJjMX5W2hJgMY5Z2gJeMY5[2hJdMW5^2iJ`MX5a2hJ^MX5b2hJ]MY5d2gJ[MX5g2hJXMX5h2hJXMX5i2hJVMX5k2gJUMX5m2hJRMX5n2iJQMW5P3iJoLV5S3jJlLV5U3iJkLW5U3jJjLU5X3kJgLU5Z3lJdLT5\\\\3mJcLS5^3mJaLR5a3nJ^LR5b3PK\\\\LP5e3QKYLn4i3RKVLn4j3SKULm4l3SKSLl4n3UKQLk4P4UKoKk4R4UKmKk4S4VKlKi4V4WKiKi4X4VKhKj4X4WKgKh4[4XKdKh4\\\\4XKdKh4]4XKbKg4_4YKaKg4`4YK_Kf4b4ZK^Kf4c4ZK\\\\Ke4f4[KYKe4g4[KYKd4i4\\\\KVKd4j4]KUKb4m4]KSKb4o4^KPKb4P5_KoJ`4S5_KmJ`4U5`KjJ`4V5aKiJ^4Y5bKfJ]4[5dKdJ[4^5eKaJ[4_5fK`JY4b5gK]JX4e5hKZJX4f5jKXJU4j5kKUJU4k5lKTJS4n5mKQJS4o5nKPJQ4R6oKmIQ4S6PLlIo3V6PLjIP4V6QLiIn3Y6RLfIn3Z6SLfIl3Z6TLfIl3Z6ULeIj3]6VLcIi3]6WLcIi3]6XLcIg3]6ZLbIe3`6[L`Id3`6]L`Ib3`6^L`Ib3`6_L`I`3`6aL`I]3b6cL]I]3c6eL\\\\IZ3d6gL\\\\IX3d6iL\\\\IU3e6lL[IS3f6nLXIR3h6oLXIP3h6RMWIm2i6TMWIj2j6XMUIg2k6[MTId2m6]MQIc2o6^MQI`2P7bMnH^2R7cMnH\\\\2R7eMnHZ2R7gMmHY2T7hMkHV2V7kMiHU2W7lMiHS2X7mMgHS2Y7nMfHR2Z7oMeHQ2\\\\7oMcHQ2^7oMbHP2^7QNaHo1`7RN^Hn1b7SN]Hm1d7TN[Hk1e7VNZHk1f7VNXHj1i7WNUHi1k7YNTHf1m7[NQHe1P8[NoGe1R8\\\\NlGd1U8]NjGb1W8_NgGa1Z8_NeGa1\\\\8`NcG_1_8aN_G_1b8aN]G_1d8bNZG^1g8bNYG]1h8dNVG\\\\1l8cNSG]1n8dNQG[1P9eNoF[1R9fNlFZ1U9fNkFY1V9hNhFX1Z9gNeFY1]9fNcFZ1^9eNaF[1a9dN_F[1c9dN\\\\F\\\\1g9cNXF\\\\1n9_NRF`1U:ZNjEf1]:TNcEk1f:mMYES2k:jMUEU2P;hMoDW2V;eMiD[2\\\\;aMdD^2a;^M_Da2g;ZMXDf2k;XMUDg2n;XMQDg2R<WMnCh2T<WMlCh2W<VMhCj2Z<UMfCj2[<VMeCi2]<WMbCh2`<WM_Ci2b<WM^Ch2c<XM]Cg2e<XMZCh2g<XMYCh2g<XMXCh2i<XMWCg2j<YMUCg2l<YMTCf2m<ZMRCg2o<XMQCg2P=YMoBh2Q=YMnBf2T=YMkBg2V=ZMiBf2X=YMgBg2Z=ZMeBf2\\\\=ZMbBf2_=[M_Bf2b=ZM]Be2d=\\\\MZBe2g=\\\\MWBc2k=]MSBd2n=\\\\MQBd2o=]MoAc2S>]MkAd2V>[MjAe2W>ZMhAf2Z>ZMeAf2[>ZMdAg2\\\\>YMdAg2]>WMcAj2]>VMbAj2_>VMaAj2_>VM`Ak2`>TMaAl2_>TM`Al2a>SM_An2a>QM_Ao2b>PM_AP3a>oL_AQ3b>nL^AS3b>lL^AT3c>lL\\\\AT3e>kL\\\\AU3e>iL[AW3f>hLZAX3h>fLXA[3h>dLYA[3i>cLWA]3k>aLUA`3l>^LTAb3n>\\\\LRAd3P?ZLQAf3P?XLPAh3R?VLn@j3T?TLl@m3T?RLm@m3U?QLk@o3W?oKi@R4W?mKi@S4X?lKi@S4Y?kKg@V4Y?iKg@W4Z?iKe@W4]?gKd@Y4\\\\?fKd@Z4]?eKc@[4^?dKc@[4^?dKb@]4_?`Kb@`4`?^K`@c4b?ZK_@e4c?YK]@g4f?VKZ@k4g?RK[@m4h?PKX@P5j?nJV@S5l?jJT@V5n?gJT@Y5m?eJS@[5P`0bJP@_5Q`0_Jo_Oa5S`0]Jn_Oc5R`0\\\\Jn_Oe5S`0XJn_Oh5S`0WJm_Oj5S`0UJm_Ok5T`0TJm_Ol5S`0SJm_On5S`0QJm_OP6S`0nIn_OR6R`0nIn_OS6R`0lIo_OT6Q`0kIo_OV6P`0jIP@V6Q`0iIo_OX6P`0gIQ@Y6P`0fIQ@Z6n?fIR@Z6o?dIQ@]6P`0aIQ@_6P`0_IP@c6P`0[IQ@e6P`0YIP@h6Q`0UIQ@k6P`0SIP@n6Q`0oHQ@Q7P`0mHP@T7Q`0iHQ@W7P`0fHQ@[7P`0bHQ@_7^e01O1N2O1O1O1O2N1O1O1O1O1O1O1O1O1O1O100N2O1O001N101N2M2O2N1O2N100O01VN`VOeJai0Y5hVO`JYi0\\\\5PWO^JQi0_5VWO[Jlh0b5[WOXJgh0d5aWOVJah0g5eWOTJ]h0l5fWOnI^h0P6P20001O00001O010O1O100O10O0100O100O100O100O1O10O01O100O1O100O1O1O00100O1O1O1O1O3M2O1N1O2N2O0O2N1O2O1N1O2O0O2N2O0O2O0O2N101N101N101N101N100O2O001O001O00001O0010O01O001O00001O001O0010O01O001O1O001O001O1O100O1O1O101N1O1O2O0O2N101N1O2O0O2O1N2O0O101N100O10000O1000UIjM]]OV2`b0lMa]OT2]b0mMd]OS2Yb0PNf]OP2Xb0RNi]Om1Ub0UNk]Ol1Rb0VNn]Oj1Pb0YNo]Oh1kLkLZb0_1l@f1hLnLXb0^1PAe1fLoLWb0^1SAd1cLQMWb0^1VAb1`LSMVb0]1ZAc1[LSMXb0\\\\1]Ac1XLSMXb0\\\\1`Ad1SLSMZb0\\\\1cAc1oKSM]b0Z1dAg1iKRMab0X1fAk1bKPMfb0W1hAm1[KoLlb0T1iAS2SKlLRc0R1kAX2kJiLYc0o0mA\\\\2cJhL^c0n0nA_2]JfLcc0l0PBd2TJcLlc0i0QBT4m=mKSBT4l=mKSBT4k=mKVBR4j=nKVBS4h=nKXBS4g=nKYBQ4g=oKYBR4e=oK[BR4d=nK]BQ4c=PL]BP4b=QL]BP4b=PL_Bo3b=QL^Bo3a=QL`Bn3`=SL`Bm3_=SLdBk3\\\\=ULfBh3Z=XLhBg3W=ZLkBc3V=\\\\LmBa3S=`LPC]3P=bLSC[3m<fLUCW3k<iLYCS3h<mLZCQ3e<oL\\\\CP3e<PM\\\\Cn2d<RM]Cm2d<SM\\\\Cl2d<UM]Cj2c<UM^Cj2c<VM]Ci2c<WM]Ci2d<WM\\\\Ci2c<WM^Ch2c<XM]Cg2c<YM]Cg2d<YM\\\\Cf2d<ZM\\\\Cg2d<YM\\\\Cf2d<ZM\\\\Cf2e<YM\\\\Cf2e<YM[Cg2e<YM\\\\Cf2e<YM[Cg2e<ZM[Ce2f<ZMZCf2g<YMZCf2f<ZMZCf2g<YMZCf2f<ZM[Ce2f<[MYCe2g<[MZCd2g<[MZCd2f<\\\\MZCd2g<[MZCd2g<\\\\MYCd2f<\\\\M[Cc2f<\\\\M[Cc2e<]M\\\\Cb2e<^M[Ca2e<_M]C_2d<`M^C^2b<bM`C\\\\2a<dM`CZ2`<fMbCX2_<gMbCX2^<iMcCU2^<jMcCU2]<kMdCT2]<kMdCT2\\\\<kMhCQ2[<jMkCS2V<iMPDS2R<hMUDT2n;fMYDV2i;eM^DW2e;bMcDZ2P<oLXDh2me0F;C=_Na1@aTof0\"}}, {\"image_id\": 141, \"category_id\": 2, \"bbox\": [31.527084350585938, 662.95166015625, 1195.2506103515625, 422.3507080078125], \"score\": 0.6112897396087646, \"association_id\": 0, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"[hf55SR1;E;enN[OXP1e1SPOZNkm0LWROT2JTNmm00jQOV24mMQn0i2kQO[MRn0h2iQO\\\\MVn0f2fQO]MXn0_3N1O1N2O1O1O2M2O1O2M2O2N1N3N1O2M2O1O1N2O1O1N2O1O1N2O1N2O1O1N2O0O2O0O2O0O101N101O0O10001O0O101O0O101O00001N101O000O2O001O1N101O1O0O2O1O1N101O1O0O2O1O001N101O001N101O0O101O000O2O000O2O000O2O001N101O1N101O1N101O1N2O001N2O1O0O2O1O1N101O1N101O0O2O00001N10001N10001N1000000O2O000O10000O10001N1000000O10000O2O00000O1000000O101O000O1000000O101O00000O100000000O100000000O10O100000O1000000O100000O01000000O1000000O1000O10O100000000O1000O1000O100000O100000000O1000000O10000O1000000O10000O10000O100O1O100O1O100O1O1O100O1O1O1O1O1O100O10001N100000001O000O101O0000001O000O2O00001O00001O00001N10001O0000001O00000O101O0000001O00001O001O00001O001N101O1O001O1O1O1O001O1O1O001O1O001N2O001O001O001O1O001O001O001O001O010O0010O01O0010O01O010O010O010O1O0100O010O10O010O01O0010O01O010O000010O01O00010O000000001O000000001O0001O01O0000001O00001O001O00010O00001O00001O00001O01O01O00001O0000001O000O10001O0000001O000000001N1000001O000000001O0000001O00000O100000001O0000000000000O1000000O10000O10000O10000O1O100O1O100O1O1O100O1O100O1O100O100O1O100O100O100O10000000aNfUOjJZj0V5gUOiJYj0V5iUOiJXj0U5jUOjJVj0U5mUOiJSj0W5nUOgJSj0X5oUOgJQj0X5RVOfJoi0X5SVOgJmi0X5VVOfJji0Y5XVOfJhi0Y5[VOeJei0Z5]VOdJei0Z5]VOeJci0Y5`VOeJai0j4eUOgJl0>_i0i4SWOWKmh0h4UWOVKmh0g4VWOXKjh0g4WWOYKih0e4ZWOYKgh0e4[WO[Keh0c4^WO\\\\Kch0a4_WO^Kbh0`4`WO`K`h0_4bWO`K^h0_4cWO`K^h0_4cWOaK]h0^4eWOaK[h0]4gWObK[h0\\\\4fWOdKZh0\\\\4fWOdKZh0[4hWOcKYh0\\\\4hWOdKXh0[4iWOeKWh0Z4jWOfKVh0Y4lWOeKVh0Z4jWOfKVh0Y4kWOgKUh0X4lWOhKTh0X4lWOgKUh0X4lWOhKTh0W4mWOjKRh0V4nWOjKRh0V4nWOjKRh0U4oWOlKPh0T4PXOlKQh0R4oWOPLPh0P4PXOPLPh0o3QXORLng0n3RXORLng0n3RXOSLmg0l3TXOTLlg0l3TXOULkg0j3UXOWLkg0i3UXOXLjg0h3VXOXLjg0g3WXOZLhg0f3XXO[Lhg0c3YXO^Lfg0b3ZXO^Lfg0b3YXO_Lgg0`3ZXO`Lfg0`3ZXO`Lfg0_3[XObLdg0^3\\\\XObLdg0^3\\\\XObLdg0]3\\\\XOdLdg0\\\\3\\\\XOdLdg0\\\\3\\\\XOdLdg0[3]XOfLbg0Z3^XOfLcg0X3^XOhLbg0X3]XOiLcg0W3]XOiLcg0V3^XOjLbg0V3^XOjLbg0V3^XOkLag0U3^XOlLbg0S3_XOmLag0S3_XOmLag0S3_XOmLag0S3_XOmLag0S3^XOnLcg0Q3]XOoLcg0P3]XOQMdg0n2\\\\XOSMcg0m2ZWOVLSOn0di0k2XWOYLROm0fi0j2WWO\\\\LQOj0ii0i2UWO_LPOi0ki0h2TWObLnNg0oi0f2RWOeLlNg0Rj0d2QWOhLhNg0Xj0`2oVOlLdNg0^j0\\\\2lVOQM`Ng0dj0X2kVOeNVi0Z1hVOhNYi0W1fVOjNZi0V1dVOlN]i0S1_VOQObi0n0[VOUOei0k0XVOXO]d0fMZ_OQ3WL[O_d0dM[_OP3SL@ad0aM[_On2iKMkd0VM]_Ol2cK3Pe0QM]_Ol2`K6Se0oL]_Oj2_K8Se0oL__Oh2]K:Te0oL^_Og2]K;Ue0nL__Of2[K=Ve0mL__Of2[K=Ue0oL`_Oc2ZK?Ve0nL`_Oc2ZK?Ve0nLa_Ob2YK`0Ve0nLa_Ob2XKa0Ve0oLb_O_2XKb0Ve0oLc_O]2XKd0Ue0oLc_O]2XKd0Ue0PMc_O[2XKe0Ue0PMd_OZ2VKg0Ve0oLe_OY2UKh0Ue0PMh_OV2SKj0Ue0QMh_OT2SKk0Ue0QMi_OS2RKl0Ue0QMj_OR2QKn0Te0PMl_OQ2PKo0Te0PMm_OQ2mJP1Ve0PMm_Oo1mJQ1Ve0PMn_Oo1kJQ1We0PMo_On1iJS1Xe0oLP@P1gJoN0R3Ye0oLQ@o0fJPO0R3Ye0oLR@n0eJQ3Ye0RLR@l0fJQ3Xe0SLR@k0gJR3We0SLS@j0fJS3We0SLT@i0fJS3Ve0TLT@i0fJS3Ve0ULT@f0gJU3Ue0ULU@e0fJV3Ue0ULV@d0eJW3Ue0ULV@c0gJW3Se0VLW@b0fJX3Se0WLW@`0fJY3Se0WLX@>fJ[3Re0WLX@C`J56Q4Re0WLY@@bJ73R4Re0XLY@\\\\OeJ81T4Qe0XLZ@YOgJ:OT4Pe0YLZ@WOjJ:MV4od0ZLZ@SOmJ<JW4od0ZL[@POoJ>GX4od0ZLRA]OPJY4md0[LTA[OoIZ4md0\\\\LTAXOPJ\\\\4ld0\\\\LUAWOoI^4kd0[LVAVOPJ_4jd0\\\\LVATOQJ_4id0]LWAROQJa4hd0^LVAPOSJb4gd0_LVAnNSJc4gd0`LUAlNUJd4fd0`LVAjNUJf4ed0aLUAiNVJf4ed0bLjA^3V>cLiA]3W>cLiA]3W>dLhA\\\\3X>eLhAZ3X>gLgAY3Y>hLfAX3Z>iLfAV3Z>kLeAU3[>kLeAU3[>lLdAT3\\\\>mLdAR3\\\\>oLcAQ3]>PMbAP3^>PMcAo2]>RMbAn2^>SMaAm2_>TMaAk2_>UMaAk2_>VM`Aj2`>WM`Ah2`>YM_Ag2a>ZM^Af2b>[M^Ad2b>]M]Ac2c>_M[Aa2e>`M[A_2e>cMYA]2g>dMYA[2g>fMXAZ2h>fMYAY2g>hMXAX2h>iMXAV2h>jMXAV2h>kMWAU2i>lMWAS2i>mMWAS2i>nMWAQ2i>oMWAQ2h>QNWAo1i>QNWAo1i>RNWAm1i>SNWAm1i>TNVAl1j>TNVAl1j>TNWAk1i>VNVAj1j>VNVAj1j>VNVAj1j>VNVAj1j>WNUAi1k>WNUAi1l>VNTAj1l>WNSAi1m>WNSAi1m>XNRAh1n>XNRAh1n>XNRAh1n>YNQAg1o>YNQAg1o>ZNPAf1P?ZNPAf1P?[No@e1Q?[No@e1Q?\\\\Nn@d1R?\\\\Nn@d1R?]Nm@c1S?]Nm@c1S?^Nl@b1T?^Nl@b1T?_Nk@a1U?_Nk@a1U?`Nj@`1V?`Nj@`1V?aNi@_1W?aNi@_1W?bNh@^1X?bNh@^1X?cNg@]1Y?cNg@]1Y?cNg@]1Y?dNf@\\\\1Z?dNf@\\\\1Z?eNe@[1[?eNe@[1[?fNd@Z1\\\\?fNd@Z1\\\\?gNc@Y1]?gNc@Y1]?gNc@Y1^?gNb@X1^?hNb@X1^?hNb@X1^?hNb@X1^?iNb@V1^?jNb@V1^?jNb@V1^?kNb@T1^?lNb@T1^?lNb@T1^?mNb@R1_?mNa@S1_?nN`@R1`?nNa@Q1_?oNa@Q1_?PO`@P1`?POa@o0_?RO`@n0`?ROa@m0_?TO`@l0a?SO_@m0a?SO`@l0`?UO_@k0a?UO`@j0`?WO_@i0a?WO`@h0`?YO_@g0a?YO_@g0b?YO]@g0c?ZO]@e0c?[OP[OmLj4h3V`0\\\\OlZOPMm4d3W`0\\\\OiZOSMQ5`3V`0^OfZOUMS5]3X`0^OcZOWMT5[3Y`01g_OOY`02g_OMY`04f_OLZ`04f_OLZ`05e_OK\\\\`04d_OL\\\\`04e_OK[`06d_OJ\\\\`06d_OJ\\\\`06d_OJ\\\\`06d_OJ\\\\`07c_OH^`08b_OH^`08c_OG^`08b_OH^`09a_OG``08`_OH``08`_OH``09__OGb`08^_OHb`08__OGa`0:^_OFc`09]_OGc`0:\\\\_OFe`09[_OGe`0:Z_OFf`0:[_OEf`0;Y_OEg`0;Y_OEh`0;W_OEi`0<V_ODj`0=U_OCl`0<T_ODl`0=T_OBm`0nMXZOk0j4W1n`0mM[ZOk0f4X1Pa0lM[ZOl0d4X1Qa0lM]ZOk0a4Y1Ra0nLWZO889H2f4^2Ua0lLWZO;86G5f4]2Ta0kLZZO<65G8d4\\\\2Va0hL[ZO>64F;b4[2Wa0gL\\\\ZO?61H>^4[2Ya0dL^ZOa03OKb0Z4Z2Za0cL_ZOb03IOi0T4Y2\\\\a0aL_ZOc04B4R1l3X2]a0`LaZOc03\\\\O9Y1g3W2]a0_LaZOe0Q1f0P3V2^a0^LbZOf0P1g0o2U2Rb0SMP[Oh0n2U2Sb0RMnZOk0n2S2Tb0RMnZOk0o2R2Sb0SMkZOo0Q3n1Tb0SMiZOQ1S3k1Ub0TMgZOS1T3h1Vb0TMdZOW1U3e1Wb0SMeZOY1S3d1Xb0SMdZOZ1U3b1Wb0TMcZO\\\\1U3`1Yb0SMbZO^1T3^1[b0SMaZOa1T3[1[b0TMaZOb1S3Z1]b0SM`ZOd1S3X1]b0SMaZOf1R3V1^b0SM_ZOj1R3Q1`b0UM^ZOk1Q3P1bb0SM^ZOm1Q3o0bb0SM]ZOo1Q3m0bb0SM^ZOQ2P3j0db0SM]ZOT2o2h0eb0RM\\\\ZOW2P3f0eb0QM\\\\ZOZ2o2d0fb0PM\\\\ZO]2n2a0Wd0_Oj[O`0Vd0Aj[O>Vd0Bk[O<Vd0Ej[O8Xd0Ih[O5Yd0Kh[OYOcKlNgh0l1e[OVOiKjNch0Q2d[OROnKhN`h0V2c[OoNQLgN^h0[2`[OmNVLcN\\\\h0`2_[OjNYLbNZh0e2\\\\[OhN]e0Z1cZOcN_e0]1aZObN`e0_1`ZO_Nae0a1_ZO]Nce0d1]ZOYNee0h1ZZOWNge0i1ZZOUNhe0k1WZOSNke0m1UZORNle0n1UZOPNle0Q2SZOnMne0R2SZOlMoe0S2QZOkMQf0V2oYOhMRf0X2nYOgMTf0X2mYOdMVf0]2jYO`MXf0`2hYO^MZf0c2fYOYM^f0f2cYOVM`f0j2b410O010O00010O010O00100O010O10O0100O10O01O1O01^NXM]SOg2cl0YM]SOg2cl0YM^SOf2al0\\\\M^SOe2al0[M`SOd2`l0\\\\M`SOd2Un000001O000000001O000O101O00001O000O2O00001N101N1O2O0O2O1O001N2O1O001O001O1O00QMQQO`2nn0_MSQOa2ln0`MUQO_2kn0`MVQOa2jn0[MYQOe2gn0XM]QOg2dn0SMaQOm2Qo0N101O001N2O001O1O1N2O1O1O1N2O1O2M3N2N1N3N1N3M2N2O1N3M6J6K4K2N1O2N2O0O2N2N1O2N101M3N2N1N3N2M3M3N2Mm_m<\"}}, {\"image_id\": 141, \"category_id\": 2, \"bbox\": [212.7700653076172, 399.77093505859375, 1153.5274658203125, 616.7501220703125], \"score\": 0.3947480320930481, \"association_id\": 0, \"segmentation\": {\"size\": [1102, 1600], \"counts\": \"f\\\\Z7c0_Q1h0]O;G9TQOiNTl0_1eSOfNVl0a1cSOcNYl0d1`SOaN^l0c1[SObNcl0k1nROXNQm0d3N1O2N1O2N100O2N100O1O100O1O1O1O100O1O1O1O1O1O1O1O1O100O1O100O100O100O100O100O100O10000O10000O100O10000O101O0O100O100O100O100O1O100O1O100O100O100O100O10001N10000O10000O10000O10000000000O10000000000000000000000O1000000000000000000000000O100000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000O1000000]EWIaIi6_6bIVI^6i6jIPIV6o6lIPIT6Q7lInHT6R7lInHT6R7mInHR6R7oImHQ6S7oImHQ6S7PJlHP6T7QJkHo5U7RJjHn5W7RJhHn5X7SJgHn5X7SJgHm5Y7TJfHl5Z7UJeHk5[7WJdHh5]7YJaHg5_7ZJ`Hf5`7]J]Hc5c7_J[Ha5f7`JXH`5h7bJVH^5j7dJTH\\\\5m7eJQH[5o7gJoGY5Q8jJlGV5T8lJjGT5V8oJgGQ5Z8QKdGn4\\\\8TKbGl4^8UKaGk4_8VK`Gj4`8VK`Gj4`8WK_Gi4a8XK^Gh4b8YK]Gg4c8ZK\\\\Gf4d8[K[Ge4f8ZKZGf4f8[KYGe4g8\\\\KXGd4h8\\\\KYGc4g8^KXGb4h8^KXGb4h8_KWGa4j8_KUGa4k8`KTG`4l8`KTG`4l8aKTG^4l8cKSG]4n8cKQG]4o8dKPG\\\\4P9eKoF[4Q9fKoFY4Q9hKnFX4S9iKkFW4U9kKiFU4W9mKhFR4X9PLfFP4Z9RLdFn3\\\\9TLbFl3_9VL_Fi3a9YL]Fg3c9\\\\LZFd3f9_LWFa3i9`LWF_3i9bLVF^3j9cLUF]3l9cLTF\\\\3l9eLSF[3m9fLRFZ3n9fLSFY3m9hLRFX3n9iLRFV3n9jLRFV3n9kLRFT3o9kLQFU3o9lLQFS3o9mLQFS3o9nLQFQ3o9oLQFQ3o9PMQFo2o9QMQFo2o9QMRFn2o9RMPFn2P:RMPFn2Q:QMPFn2Q:RMnEn2R:RMoEm2R:RMnEn2S:RMmEm2T:RMlEn2T:SMlEl2U:SMkEm2V:SMjEl2W:SMjEl2W:TMhEl2Y:SMhEl2Z:SMeEm2]:QMdEn2_:PMaEo2a:PM^EP3d:oL\\\\EP3g:nLYEQ3i:nLVER3l:nLSEQ3P;mLPER3R;mLmDS3U;mLjDR3X;mLhDR3Y;nLgDQ3[;nLeDQ3\\\\;PMcDo2_;PMaDo2`;QM_Do2c;PM]Do2d;QM\\\\Dn2e;RMZDn2g;RMYDm2h;SMXDl2i;TMVDl2k;TMUDk2l;UMSDk2n;UMRDj2o;UMQDk2P<UMoCk2Q<UMoCk2R<UMnCj2S<VMlCj2U<UMkCk2V<UMiCk2X<TMiCk2W<VMhCj2Y<VMfCj2[<UMeCk2\\\\<UMcCk2^<TMcCk2^<UMaCk2`<UM_Ck2b<TM^Cl2c<TM\\\\Cl2e<TM[Ck2f<TMZCl2g<TMXCl2i<TMVCl2k<SMUCm2l<SMTCl2n<SMQCm2P=RMPCn2Q=RMnBn2S=RMlBn2U=QMlBn2U=RMjBn2W=RMhBn2Y=RM_\\\\OkM`5S5R>iMmAW2T>iMkAW2V>jMiAU2X>kMgAU2Z>kMeAU2\\\\>kMcAU2]>lMbAT2_>kMaAU2`>kM`AT2`>mM_AS2b>lM^AT2c>kM]AU2c>kM]AU2d>jM\\\\AV2e>hM\\\\AX2e>gM[AY2f>eM[A[2f>dM[A[2f>cM[A]2f>bMZA^2h>_MYAa2h>]MYAc2h>\\\\MXAd2j>YMWAg2j>XMVAh2l>[L[[OROj5c4l>YL_[OoNf5h4m>VLb[OnNa5l4o>SLe[OlN]5Q5P?oKh[OlNY5U5P?mKl[OiNV5Y5P?jKnAV4V>eKkA[4X>`KjA`4Z>ZKhAf4\\\\>TKfAl4]>oJeAQ5^>iJeAW5ie0000001O00000000000000001O0000O10000000000O10000000000O1000000000000O100000000000000O10000000000000000O1000000000000O100000000O100000000O1000000O100000O010000O10000O10000O10000O100O10000O100O1O10O010000O01000000O01000O100O01000O100O100O010O1N2L4hN\\\\SOaLil0^3^SOXLfl0g3`SOoKel0P4l001O100O1O1O100O100O10O02O0O1000000O2O000O100O101O0O100O10001N100O100O2O0O100O100O2O000O10001N10000O10001NaJiLi[OW3Vd0jLk[OU3Ud0lLj[OT3Ud0nLk[OQ3Td0QMk[Oo2Ud0QMl[On2Sd0TMl[Ol2Td0UMl[Oj2Sd0XMl[Oh2Sd0ZMm[Oe2Rd0]Mm[Oc2Sd0^Mm[Oa2Rd0bMl[O^2Sd0dMm[O[2Rd0gMm[OX2Td0iMl[OV2Sd0mMk[OS2Td0oMl[OP2Sd0SNk[Om1Td0UNl[Oj1Sd0YNk[Og1Td0[Nl[Od1Rd0^Nn[Ob1Qd0`No[O_1Pd0cNo[O]1Pd0eNP\\\\OZ1nc0hNR\\\\OW1mc0lNR\\\\OT1lc0oNT\\\\OP1jc0ROV\\\\On0hc0UOX\\\\Oj0fc0XOZ\\\\Oh0ec0ZO[\\\\Oe0cc0^O\\\\\\\\Ob0bc0@_\\\\O?`c0B`\\\\O=_c0Fa\\\\O9^c0Hb\\\\O8\\\\c0Kc\\\\O5\\\\c0Le\\\\O3[c0Nd\\\\O2[c0Of\\\\OO[c01e\\\\OO[c02d\\\\ON\\\\c02e\\\\OMZc05e\\\\OK[c05f\\\\OJZc07e\\\\OI[c07g\\\\OGYc0:g\\\\OEYc0;h\\\\ODWc0>i\\\\OAWc0?j\\\\O@Vc0a0k\\\\O\\\\OVc0e0j\\\\OZOVc0f0k\\\\OYOUc0h0l\\\\OVOTc0k0l\\\\OTOSc0m0o\\\\OQOQc0P1o\\\\OoNQc0R1P]OkNQc0U1P]OjN]LXNjd0o2j^OgN\\\\LZNjd0o2l^OeNZL]Nid0o2m^OcNZL_Ngd0o2P_O`NYLbNgd0o2P_O^NYLdNfd0n2R_O]NXLfNed0m2S_O\\\\NYLgNdd0n2S_OZNYLiNcd0m2U_OYNWLlNcd0l2V_OVNXLoNad0k2W_OUNYLQO_d0j2Y_OTNXLSO]d0k2[_OPNYLVO[d0j2\\\\_OoMYLYOZd0h2^_OmMYL\\\\OXd0h2^_OkM[L_OUd0f2a_OhM\\\\LCRd0e2b_OhM[LFQd0c2d_OeM\\\\LIoc0b2e_OeM\\\\LKmc0`2h_OcM\\\\LOjc0^2j_ObM\\\\L1jc0^2j_O_M]L3ic0^2k_O\\\\M]L8hc0\\\\2j_O[M^L:hc0[2k_OXM_L=fc0[2l_OUM_Lb0dc0Z2m_OQM`Lf0dc0X2m_OoLaLi0bc0Y2m_OkLbLn0`c0W2o_OgLcLS1^c0W2ZBjMg=U2YBkMg=V2WBlMh=U2VBlMj=T2UBmMl=S2SBmMm=T2QBnMn=R2QBoMP>Q2nAPNR>Q2lAPNT>Q2jAPNW>P2PAbLjI`1Ue0o1PAhNQ?X1n@hNR?Y1m@gNS?Y1m@gNT?Y1k@hNT?Y1j@hNV?X1j@hNW?X1h@hNX?Y1g@gNY?Y1g@gNY?Z1f@gNZ?X1f@hNZ?Y1e@gN[?Y1e@gN\\\\?Y1c@gN]?Y1c@gN]?Y1c@gN]?Z1b@gN]?Y1c@gN^?Y1a@gN_?Y1a@gN`?X1a@gN_?Z1`@fN`?Z1`@fNa?Z1_@eNa?[1`@dN`?]1_@cNb?\\\\1_@cNa?^1^@bNb?^1_@aNa?`1^@`Nc?_1^@`Nb?a1]@_Nc?b1]@]Nd?b1\\\\@^Nd?c1\\\\@\\\\Nd?e1[@[Nf?e1Y@[Ng?f1Y@YNg?h1X@XNi?g1W@YNi?h1W@WNi?j1V@VNj?k1U@UNl?j1T@VNl?k1T@TNl?l1T@TNm?l1R@TNn?m1Q@SNo?m1R@RNo?n1P@RNP`0n1P@RNP`0o1o_OQNQ`0o1o_OQNR`0o1n_OPNR`0P2n_OPNR`0Q2m_OoMS`0Q2m_OoMT`0Q2k_OoMU`0Q2l_OnMT`0R2l_OnMU`0R2j_OnMV`0R2j_OnMV`0S2i_OmMW`0S2i_OmMX`0S2g_OmMY`0S2h_OlMX`0U2g_OkMZ`0T2f_OlMZ`0U2e_OkM\\\\`0T2d_OlM\\\\`0U2c_OkM]`0V2b_OjM_`0V2a_OiM_`0W2a_OiM``0W2__OiMa`0Y2]_OgMd`0Y2[_OgMe`0Z2Z_OfMg`0Z2Y_OeMg`0\\\\2X_OdMh`0]2W_OcMj`0]2U_OcMk`0^2T_ObMl`0_2S_OaMn`0_2R_O`Mn`0a2Q_O_Mo`0b2P_O^MQa0b2n^O^MRa0c2m^O]MSa0d2l^O\\\\MTa0e2l^OZMUa0f2j^OZMVa0g2i^OYMWa0h2h^OXMYa0h2f^OXMZa0i2e^OWM[a0j2e^OUM[a0k2e^OUM[a0l2d^OTM]a0l2b^OTM^a0l2c^OSM]a0n2b^ORM^a0o2b^OPM^a0P3b^OoL`a0Q3`^OnL`a0S3_^OmLaa0S3`^OlL`a0U3`^OjL`a0W3`^OhLaa0X3^^OhLba0X3_^OgLaa0Z3_^OeLaa0\\\\3_^OcLaa0^3^^ObLba0_3^^O_Lda0a3\\\\^O^Lda0c3[^O]Lea0d3[^O[Lea0g3Z^OXLfa0i3Z^OVLga0j3X^OVLha0k3X^OTLha0n3W^OQLia0P4V^OoKka0R4U^OmKla0S4T^OlKla0U4S^OkKma0V4S^OiKma0X4S^OgKma0Z4S^OeKna0[4Q^OeKoa0\\\\4Q^ObKPb0_4P^O`KPb0a4o]O_KQb0b4o]O]KQb0c4o]O]KRb0c4n]O\\\\KRb0e4n]OZKRb0g4n]OWKSb0j4n]OTKRb0l4o]OSKRb0m4n]ORKRb0o4n]OPKSb0P5n]OmJSb0S5n]OlJRb0U5n]OjJSb0U5o]OhJRb0Y5n]OfJSb0Z5n]OdJRb0\\\\5o]ObJSb0^5m]OaJTb0_5m]O^JTb0b5m]O]JTb0c5\\\\500O10000O1000000O10000000O010000000O100000O010O10O10O01O00001O0000001O00001O00001O0O101O001O00001O00001O001N10001O001O001O00001O0O2O001O0010O010O010O10O10O10O0100O1O100O010O100O100O100O1O100O2Of0YO010O1O010O010O0010O01O010O001O001O1O1O001O1O1O100nK_ROU3bm0eLeROY3]m0`LiRO_3Xm0[LoROc3Pn0N2N2N2N3M2N2N4L5K4L4L3M3M1O1O1O001O001O1O001O0010O0001O001O1O001O001O1O1O001O1O1O1O1O2N1O2N2N1O2N1O2N1O1O1O2O0O1O1O1O1O001O2N1O1O2O0O1N2N2N3L3N3L3M4K5K5JkU[<\"}}, {\"image_id\": 142, \"category_id\": 1, \"bbox\": [327.703125, 15.423477172851562, 39.8248291015625, 84.3184814453125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [300, 400], \"counts\": \"YgP36S94L3N2N2L5M?A5L5^HgNh6^1nHiNP7j100O1OO00OM1320O2O002N1O00000000O2N1O3N6UOiHTOY7g0QIPOS7m0h0N2M3O2K`d9\"}}, {\"image_id\": 142, \"category_id\": 1, \"bbox\": [104.16839599609375, 104.1822280883789, 173.524169921875, 164.35104370117188], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [300, 400], \"counts\": \"^eP11X99I2M3N2N4Le0[O4M2M1O1N3N1O1O1O1O1N2N1O2O1O1O0O1O1O101O001O101N100O101O1O0jJoMQ3R2lLRNR3o1kLTNT3l1kLVNT3k1jLVNU3l1iLVNU3m1gLUNY3n1bLUN\\\\3n1`LTN`3m1^LUNa3l1]LUNc3m1ZLTNf3o1VLRNj3g3100000000O10000O10000O1000000O11O00001O0000001O00001O0000001O0O10000000O100O100O1O1O0N3M3M2O2M3G9G8L5N2O001O0N3N2N2O0O2N2N2N2N2M3N2N2O1N2O1N2M3N2N2N2M3M3N200O10000O100001O1O001O001O2N2N3M5Ka0_O7I1O1O1O1O0000O1YOQJoMQ6`1eJYN]5l0QJoNa7j0d0M3O1N2N2O2N1O1O2N<_OYdS1\"}}, {\"image_id\": 142, \"category_id\": 1, \"bbox\": [287.7491760253906, 11.312516212463379, 38.68157958984375, 83.64576721191406], \"score\": 0.9999997615814209, \"association_id\": 0, \"segmentation\": {\"size\": [300, 400], \"counts\": \"fce2a0f87K5L3N3L2O1M4J5M2N3O1N1000O000O2OO01O2O2J4N3O01H71O2O1O1000001^OSHBn74Vfe0\"}}, {\"image_id\": 142, \"category_id\": 2, \"bbox\": [83.86072540283203, 227.781982421875, 147.81146240234375, 71.0631103515625], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [300, 400], \"counts\": \"Rak04V92O101N1O1O1O1O1O100N2M3O100O100M3O1O100O1O1O1O100O100O1O10000O1M3O100O1O1N2N2O100O1O1O1O1000000000cNVHX1k7fNXHX1o7O1O2N001O001O000000000000O1O10000O0010O010O10ON3N110O1O1O1M3O1O2O1M2NTO]OTIa0m6@XHOf0a0R7@XHNg0a0R7@SI>o6ARI<P7DQI9R801N100N2N3N1OTi37eVL3M1L3N1110O010O1O100O1000000001O001ONUGDj8;WGEi8;WGE85P85iGG49P8OnGF3;P8LZHOPaa1\"}}, {\"image_id\": 142, \"category_id\": 2, \"bbox\": [315.14788818359375, 89.79661560058594, 39.61663818359375, 12.736587524414062], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [300, 400], \"counts\": \"fnl25W91O1O001O000010O000001O000000001O01O003M2O0O00000000O10000000000000O1O1OX\\\\=\"}}, {\"image_id\": 142, \"category_id\": 2, \"bbox\": [127.92792510986328, 213.59921264648438, 132.35556030273438, 74.0408935546875], \"score\": 0.8448809385299683, \"association_id\": 0, \"segmentation\": {\"size\": [300, 400], \"counts\": \"UhU1j0_83O2N1O1O100O100O1O10000001O3M1O2N2N1O1O00001O001O00O10000O100O100O100O100O1O10000O10O1000001LXOVOTIi0h6]OWIb0i6@WI?h6BXI=h6FWI6k6KVI3j6OUI1k6NWI0j60VINl61UIMl64TILm62UILl63UIMP7HWI5R8L]e42[ZK:L4N1O1M2K50001O000100O100001O01O01O2jGYOc7\\\\1L2N01O00000000000M3N2N2N2O1SOPH:T8DPH8R8GoG8R8HoG6R8IPHKI3X81i0O010O2N1O100O1NeXZ1\"}}, {\"image_id\": 142, \"category_id\": 2, \"bbox\": [188.09938049316406, 209.58834838867188, 73.49595642089844, 58.944183349609375], \"score\": 0.11205655336380005, \"association_id\": 0, \"segmentation\": {\"size\": [300, 400], \"counts\": \"ibl15S96M1L4M3M2M4N1O1N210O0002O1O001O10O2Nd0\\\\O2N1O00000O100001O0O1O1O1N2O1O1O1O1ROoGN03U8NnGL21R82nGL01S82[HLf73[HLf73i00100O1O2N1OeXZ1\"}}, {\"image_id\": 142, \"category_id\": 2, \"bbox\": [286.5443115234375, 88.53289031982422, 19.5706787109375, 8.795875549316406], \"score\": 0.05728929117321968, \"association_id\": 0, \"segmentation\": {\"size\": [300, 400], \"counts\": \"]Ud22Z900001O001O0010O10O0010O2N01OO2OQQl0\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [454.9520568847656, 587.5072021484375, 80.34402465820312, 78.275634765625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [800, 691], \"counts\": \"_QT;5hh05N001O001O0QXO1mf00PYO4nf0MnXO8Pg0JhXO>Vg0EfXO=Yg0f0O1O10000O02N101N6K1O1N5K1O2O1OO10O001000O001O00001O10O0100O10O000010O010O01O0000000O100O2N2M3N1O1O2N1O0O2N2N3K4N2M2O2N_YP4\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [92.07718658447266, 628.4334106445312, 76.33654022216797, 108.658203125], \"score\": 0.9999997615814209, \"association_id\": 4, \"segmentation\": {\"size\": [800, 691], \"counts\": \"U^\\\\26ih05K3M2N2M3N3M4L2N2O2M5K3M2N8I:D4L4L3M2ZOf0N2N2000O1O1O100O10000O105QMPZO3NZ2gf0G3O3N02NO1N002O0O1O0O101N3M1O2N1O1O1N2O1O2N2N2M3N1O3M3M3M5I7I8HYjk<\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [34.89460372924805, 374.4600524902344, 102.9422607421875, 92.2525634765625], \"score\": 0.9999991655349731, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"Qhk0T1jg04M2K4B>L4N3E:N2N200O100000001O001O00001O0O10000000000001O0000000000000000000000001O001O0000001O0O100001O0001N1001O0001O01O001O010O00001O001O1O00001O00000001O00000000001O000000001O0000001O1O1O0O2dNgYOO[f0_OnZOHTe0J[YO2j11Qg0Nb[`=\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [197.45361328125, 605.0577392578125, 93.90420532226562, 106.9561767578125], \"score\": 0.9999982118606567, \"association_id\": 1, \"segmentation\": {\"size\": [800, 691], \"counts\": \"noo4:bh0;G6K6SOPO^YOX1[f0nN_YOV1^f0mN^YOX1^f0iNZYOa1bf0`0O001O00001O1O001O0010O02N10O9H2M10010O0O100000O101O1N1000O10O01O001O1O1O1N3N2N1O1O2N2N001O2N1O001O2N001O0O2O1N101N1O101N1N2O1N2O2M2M3N3M4J5J<FTZm9\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [373.1270751953125, 585.2549438476562, 51.141693115234375, 61.03887939453125], \"score\": 0.9999957084655762, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"ihT92eh0?I5J9H2N2N2N2O1O1O2N1O1O1O1O1O1003L8I1N2N1O001O1O00001O010O0000001O1O00001O1O002M2O1O1N3M2N5Laaa6\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [316.20733642578125, 604.2109375, 81.8931884765625, 78.8387451171875], \"score\": 0.9999953508377075, \"association_id\": 3, \"segmentation\": {\"size\": [800, 691], \"counts\": \"SXh78gh02N2N100O100O2O0XXOEkf0;SYOHkf0:SYOHlf09gXO4Wg0l0O1O2N1O1O1O1O1O00001O01O10000O3M8I1N5L000O2OO2aXOgNTg0Y1fXOlN[g0\\\\101O0001O001O0O2O000O2O0O101N101N1O1O2N2L5J5L5K5I7LiZb7\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [606.4660034179688, 542.1737670898438, 42.4586181640625, 47.41552734375], \"score\": 0.9999943971633911, \"association_id\": 6, \"segmentation\": {\"size\": [800, 691], \"counts\": \"ahj>;ch03M4L3C=O2M2O2N1O000001O9G1O2N1000O10O01O100O01O1O1O1O1N2O2N2N1O2M2O1O1N101O001O1NoWP1\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [50.33446502685547, 617.2096557617188, 48.43584442138672, 93.33349609375], \"score\": 0.999966025352478, \"association_id\": 7, \"segmentation\": {\"size\": [800, 691], \"counts\": \"jfW1f0Vh07K6J:F4M2M4lN^NdZOd1[e0aNRZOK0g1me0n002O1O000000000000O2N1O7I7I4L3M3SNUYO`1\\\\g0J1N101N3N001N2M3M2O2N1N2O2N2N2N2AiWO1fh0Jnga>\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [271.16949462890625, 671.9201049804688, 53.000946044921875, 31.3692626953125], \"score\": 0.9999517202377319, \"association_id\": 1, \"segmentation\": {\"size\": [800, 691], \"counts\": \"hUe66ih01O2N1O1M3O1N101O1O100O010O1000O010000O001O0010O1O0010000O100O10001N103L2O1N2O001O00001N10001N101Ncco8\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [144.9449462890625, 566.2371215820312, 45.3587646484375, 83.29986572265625], \"score\": 0.999919056892395, \"association_id\": 8, \"segmentation\": {\"size\": [800, 691], \"counts\": \"n]c37fh09H6J4nN^O`YOf0^f0]O\\\\YOh0af0[OZYOj0df0j0O2N1O0010O011O2N;E3M1N20OO1O2N100O1O1O2N1O2M2O3L3M4M4K5K4K4L_fZ<\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [365.1879577636719, 648.8123779296875, 53.9066162109375, 31.74395751953125], \"score\": 0.9998468160629272, \"association_id\": 3, \"segmentation\": {\"size\": [800, 691], \"counts\": \"SRT91lh04J5I7N2I7O1O10O10001O1[OkWO=\\\\h001O0O100000O10OO2O100O01000O10O1000000O101O0AeWO8bh0O2O0O100O1O2O1NZ]e6\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [569.676513671875, 586.7656860351562, 105.70489501953125, 80.71490478515625], \"score\": 0.9996392726898193, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"Qmm=d1Xg06N11N2N010O8I2M1O101N1O1O1O2N001O001O001OcNQOjZOn0Ve0UOhZOj0Xe0VOiZOi0We0XOhZOh0Xe0YOhZOf0Xe0[OgZOe0Ye0\\\\OfZOd0Ye0^OgZOa0Ye0_OgZOa0Ye0@fZO`0Ze0CcZO=]e0EaYOIc0c0le07lYOJTf0a100000000000000001O<D2N1O2N1O1N2O0N3N2L300O100O100O10000O100O1000001O001O010O000O2O001O00001O0O101N101O000O101N1YOUXO8ng0CXXO:Yh0L6IQfc0\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [144.45094299316406, 699.3541870117188, 72.89688110351562, 28.12750244140625], \"score\": 0.998529314994812, \"association_id\": 4, \"segmentation\": {\"size\": [800, 691], \"counts\": \"cle34kh01N2O1O1O1O1O1N2O1O100O1000O10O10O100O100O1000000O10000O010000O010000000O10000000O10000O10000O100O10000O0100O0100O10O0100O1O1O1O1MT]b;\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [550.5505981445312, 539.9439086914062, 53.03338623046875, 73.722412109375], \"score\": 0.9967445135116577, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"_P_=:`h09F8J7M2O2N1O2`XOiNSg0Y1hXOmNUg0_101O1O1O2NO2O101O00O10O0108HO002N1[OoXOPO0NRg0m0aYOoN`f0P1j0O1N2O000O1O100O1O1O1O1O1O102M2N1O1O1N3N2M6J2MfUT2\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [632.3359985351562, 580.4964599609375, 20.03485107421875, 10.60150146484375], \"score\": 0.9742336869239807, \"association_id\": 6, \"segmentation\": {\"size\": [800, 691], \"counts\": \"Ze`?2nh01O001O0O1000000000O1000O100001Nhlm0\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [180.2847137451172, 640.3017578125, 25.203872680664062, 9.0247802734375], \"score\": 0.9252648949623108, \"association_id\": 8, \"segmentation\": {\"size\": [800, 691], \"counts\": \"T\\\\`43mh000000O100O10000O10000000000000O100OPdl;\"}}, {\"image_id\": 143, \"category_id\": 1, \"bbox\": [551.0932006835938, 563.4996337890625, 79.43634033203125, 108.71685791015625], \"score\": 0.9073452949523926, \"association_id\": 5, \"segmentation\": {\"size\": [800, 691], \"counts\": \"ei_=5jh02O2N2N1WYOKed08U[O2dd00PZOFh0=Ve0OoYOFg0>Ze0<cZOH\\\\e09bZOI]e07bZOJ^e06_ZON`e03]ZO0be01XZO5ge0MTZO6le0a1N1O0010O000UMXZO_2Uf0000O_MgYO[2`f0O010fMlYOg1]e0ZNeZON>Z1kd0iNgZOL`0[1hd0jNgZOJd0Z1ed0oNdZOGh0Y1dd0YOZZO_OR1X1dd0^O][Ob0bd0^O^[Ob0bd0_O^[O`0dd0^O][Ob0dd0]O[[Oc0gd0[OZ[Oe0fd0[OZ[Od0gd0[OZ[Od0hd0[OW[Oe0kd0ZOU[Oe0nd0YOR[Of0od0ZOQ[Oe0od0\\\\OQ[Od0od0[OR[Od0nd0]OQ[Oc0Pe0\\\\OQ[Oc0od0^OP[Ob0Pe0O`ZO0`e02^ZONae0:oYOgN0Q1Qf0g1001O00001O001O1YNTZO8oe0CVZO:ne0^OXZO?Rf0XORZOd0Qf0XOQZOf0Rf0XOoYOe0Xg0HXOC\\\\YO;ff0D[YO:gf0E[YO8hg0NoNJZYO5ff03mXOGB5bg06iXOGD2dg07hXOHB1gg07gXOL\\\\g0MVXO0\\\\j_1\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [76.72412872314453, 683.9122314453125, 31.89788818359375, 32.228271484375], \"score\": 0.8467811942100525, \"association_id\": 7, \"segmentation\": {\"size\": [800, 691], \"counts\": \"l`Q25gh05M3N2M3N3K\\\\OjWOd0Vh0\\\\OjWO2O>_h00N1O2O000O100001O1O1O1N3N2M3M3NPbW>\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [587.5387573242188, 609.0047607421875, 78.5560302734375, 58.1568603515625], \"score\": 0.8356890082359314, \"association_id\": 5, \"segmentation\": {\"size\": [800, 691], \"counts\": \"fTX?4kh02N1O1O1000O2O0O1O1O1O1N]Re01fcN\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [488.022705078125, 621.4125366210938, 50.97705078125, 46.9207763671875], \"score\": 0.7097519040107727, \"association_id\": 2, \"segmentation\": {\"size\": [800, 691], \"counts\": \"d_V<3mh01O1O001O001O0000000O1O1O1O1O1O1O100O1O001O010]OAeXO?Zg0CUXON>?\\\\g0GcXO8]g0HeXO7[g0GgXO8Yg0HhXO7Zg0FiXO9og000O100O101N1Nfeg3\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [96.11740112304688, 673.0316772460938, 97.11123657226562, 53.139892578125], \"score\": 0.5673344135284424, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"he[29dh06M1O000002O1N1O004L4Ka\\\\R11^cmN1O1M3O100N2SOFRYO<mf0FQYO;nf0FRYO:nf0FQYO;of0EQYO;Pg0DoXO=Qg0CoXO<Rg0CoXO=Qg0CoXO=Qg0BPYO>Pg0BPYO=Qg0CoXO=Qg0CoXO=Pg0DPYO;Qg0EoXO;Qg0EoXO;Qg0EoXO:Rg0FnXO:Qg0GoXO9Qg0FPYO:Pg0FoXO:Qg0GoXO8Rg0HnXO8Rg0HnXO8Rg0GnXO:Rg0FnXO9Rg0HnXO8Rg0HnXO8Qg0HPYO8Pg0GQYO9of0GQYO9of0GQYO9Pg0FoXO;lg0Nn[T<\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [199.1387939453125, 643.06005859375, 109.46328735351562, 62.933349609375], \"score\": 0.21533656120300293, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"j\\\\d64kh01N2N200O1M3N2O100XOCkXO?Tg0EhXO<Xg0FeXO;[g0FcXO:^g0G`XO:`g0H\\\\XO:eg0EZXO<fg0B\\\\XO>dg0@^XO?dg0@[XOa0eg0^O\\\\XOa0dg0@\\\\XO`0dg0_O]XO`0dg0@\\\\XO?eg0@\\\\XO?eg0@\\\\XO`0dg0@[XO`0fg0_O[XOa0gg0]OYXOb0Sh0O1O100O100O1O2NmaZ9\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [151.1339111328125, 671.8919067382812, 60.661773681640625, 58.42333984375], \"score\": 0.20936919748783112, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"a^g37hh01N3O0O1O1O1N2O1O0010OXOAQYO?of0APYO?Pg0BPYO>Pg0BPYO>Pg0AQYO>Pg0BPYO=Rg0BnXO>Rg0BnXO>jg0O10000OWOEoXO;Qg0FnXO:Qg0GoXO8Rg0HnXO8Rg0HmXO9Rg0HnXO7Sg0ImXO7Sg0HmXO9Sg0GmXO9Rg0HnXO8Rg0GoXO9Pg0HPYO8Pg0HPYO8of0HRYO8nf0HRYO8mf0HTYO7mf0ISYO7nf0GSYO9lf0GUYO8lf0HTYO7nf0GSYO9jg000O1000O10O10O0100O10O02N1OPZf;\"}}, {\"image_id\": 143, \"category_id\": 2, \"bbox\": [119.903564453125, 649.6293334960938, 89.81236267089844, 74.9974365234375], \"score\": 0.06911987066268921, \"association_id\": 0, \"segmentation\": {\"size\": [800, 691], \"counts\": \"aPi33jh03O1O1N2O1TOFQYO;of0FoXO;Qg0EoXO;Qg0EnXO<Rg0DnXO<kg00O100000000000O100O10000O1OWOFnXO:Rg0FmXO;Sg0EmXO;Sg0EmXO;Rg0FnXO9Sg0GlXO:Tg0FlXO:Tg0FlXO:Tg0EmXO;Qg0FoXO;Qg0DPYO<kg0O000000O0100O100O1000000O0010000O011NPlg;\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [360.3734130859375, 229.3598175048828, 37.48675537109375, 58.17945861816406], \"score\": 0.9999995231628418, \"association_id\": 1, \"segmentation\": {\"size\": [535, 800], \"counts\": \"lkl55a`02M2O1L3M3N3N2O1O1O1X@[O]?g0_@^O_?b0`@_O^?b0c@^O\\\\?a0g@^OV?d0l@ZOR?h0PAVOk>P1VAnNj>S1<0010O100001O0001N1N2Ne@TOR?Ne@f0:^OW?=k@BX?9j@FZ?3j@Mbjc6\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [457.445556640625, 230.27200317382812, 51.14337158203125, 56.186614990234375], \"score\": 0.9999995231628418, \"association_id\": 5, \"segmentation\": {\"size\": [535, 800], \"counts\": \"`a_73d`00O1O100O100O1O1O2M2O1N2N101O2N1V@_O]?c0`@A^?a0]@Ca?j000OBe@A[??g@@Y??j@^OW?b0k@[OT?f0>1L4N2O1O1O100000N2O1O1O2M2N3M3LZ`l4\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [484.8775329589844, 192.95396423339844, 70.45211791992188, 69.77583312988281], \"score\": 0.9999994039535522, \"association_id\": 6, \"segmentation\": {\"size\": [535, 800], \"counts\": \"eTn73d`00O2O000__OM]`0700000M4N2Ncc2O^\\\\M2O1O001N100O1O100O1O1O101N1YOGo@:j>0SA1j>1UA0MAa>b0_AONAb>a0_ANOAb>a0_AN0_Ob>d0\\\\ANo>2QAMP?4n@MR?4j@OV?h02O003M1O1O2O0O21N2M10Ai@^OU?a0n@^OQ?b0RA\\\\On>a0UA_Oj>`0YA^Og>`0[A_Og>?i0D<O10001N10^lQ4\"}}, {\"image_id\": 144, \"category_id\": 2, \"bbox\": [359.61370849609375, 285.34686279296875, 35.893341064453125, 13.11663818359375], \"score\": 0.9999994039535522, \"association_id\": 1, \"segmentation\": {\"size\": [535, 800], \"counts\": \"Rnm54a`0201M2O100O00100000O1000000O0100001O0O10000O100O100O100O2Nc[d6\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [175.947265625, 194.1656036376953, 28.864120483398438, 44.00537109375], \"score\": 0.9999994039535522, \"association_id\": 3, \"segmentation\": {\"size\": [535, 800], \"counts\": \"Zfl23b`02D<M200O101N2L4M3O2M203M3M1O00Gd@VO]?i0f@UOZ?j0:00O2O0[OX@:i?DZ@:h?D[@9S`0Lefg9\"}}, {\"image_id\": 144, \"category_id\": 2, \"bbox\": [261.29443359375, 277.35504150390625, 28.885711669921875, 10.792083740234375], \"score\": 0.9999992847442627, \"association_id\": 2, \"segmentation\": {\"size\": [535, 800], \"counts\": \"keY42d`02N2N100O2O00001O0O100000000000O100O1000001N101N1OcfZ8\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [257.2013244628906, 227.08888244628906, 45.56488037109375, 53.31501770019531], \"score\": 0.9999992847442627, \"association_id\": 2, \"segmentation\": {\"size\": [535, 800], \"counts\": \"WfZ47]`04L7J4L4M3L4M3M1M3O2N2O2N00001O0001O01O1001N000Lj@gNV?n0>N2O3M2M5L101O2N1O1O3K7Jj]S8\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [555.806396484375, 229.23275756835938, 46.6561279296875, 57.57958984375], \"score\": 0.9999990463256836, \"association_id\": 4, \"segmentation\": {\"size\": [535, 800], \"counts\": \"fjT91f`01N1O10001O0O2O0N3N1O3k_OHf?9X@Jf?e0L5M1OO2Ce@[Om>1YAc0S?\\\\Ok@f0T?[Ok@g0S?[Ol@e0T?]Og@g0X?[Oe@g0Z?YOf@Q1P?<O001O\\\\NPAa1T?O0@PAYOQ?e0SAXOm>f0WAWOj>h0XAUOk>i0c0M2M4N1AV@NnXY3\"}}, {\"image_id\": 144, \"category_id\": 2, \"bbox\": [459.0621337890625, 283.617431640625, 42.444061279296875, 14.24102783203125], \"score\": 0.9999970197677612, \"association_id\": 5, \"segmentation\": {\"size\": [535, 800], \"counts\": \"dfb75b`00O100O1O100O00100O1000O0100O1000O100000000000000O10000000000O100O2O0O2NPnk4\"}}, {\"image_id\": 144, \"category_id\": 2, \"bbox\": [174.4015655517578, 235.9443817138672, 23.4996337890625, 6.7639007568359375], \"score\": 0.9999932050704956, \"association_id\": 3, \"segmentation\": {\"size\": [535, 800], \"counts\": \"oUl21e`02O1N100000000O10000000O2O001O0000000O2Onij9\"}}, {\"image_id\": 144, \"category_id\": 2, \"bbox\": [562.3320922851562, 284.822998046875, 40.694091796875, 13.838409423828125], \"score\": 0.9999864101409912, \"association_id\": 4, \"segmentation\": {\"size\": [535, 800], \"counts\": \"enV91e`01O2N1N2O1O1N101000O010000O010000000000O100000000O1000000O1000000O2N100NUfW3\"}}, {\"image_id\": 144, \"category_id\": 2, \"bbox\": [504.5165100097656, 255.98114013671875, 44.027618408203125, 7.354095458984375], \"score\": 0.9999576807022095, \"association_id\": 6, \"segmentation\": {\"size\": [535, 800], \"counts\": \"RSX83d`01O0O0100000O100000000O10000000000000000000000000000000000000000000000000O10001Nb^T4\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [256.24176025390625, 199.4889678955078, 49.86669921875, 43.452178955078125], \"score\": 0.9996707439422607, \"association_id\": 0, \"segmentation\": {\"size\": [535, 800], \"counts\": \"abX41e`03M101O0O1O1O1O2N100O2N1N2O1N3N0O2O1O100O2N101O0O1000001O0O10000000002NI8N1N2O1O1O1L4G:MklQ8\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [566.8699340820312, 208.42205810546875, 25.49310302734375, 37.811920166015625], \"score\": 0.9978087544441223, \"association_id\": 0, \"segmentation\": {\"size\": [535, 800], \"counts\": \"WoX9:1IV`0=N2O100O0O2N2N2N200O10000001O001O4L2N1O1O12ONc^\\\\3\"}}, {\"image_id\": 144, \"category_id\": 1, \"bbox\": [562.9188842773438, 208.8020782470703, 32.642578125, 66.56968688964844], \"score\": 0.07319187372922897, \"association_id\": 0, \"segmentation\": {\"size\": [535, 800], \"counts\": \"X^W93c`0101kNOaA2]>8[AIa>;[AId>6\\\\ALb>4]AOa>2]A3_>N`A3^>0`ANE]Oi>g0aALa>7]AJb>6_AJa>7]AJc>5^ALa>4^ANa>3]AOb>0_A1a>O]A3c>L\\\\A6f>HYA9h>j01O1ISAaNo>]17M3iNl@i0b?M3N7GU[Z3\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [1420.466552734375, 121.09798431396484, 166.429931640625, 149.30801391601562], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"i[V^19iP13L5J6H;E8VOoN[QOT1cn0POXQOS1gn0oNTQOU1jn0e0N1O1N2O1O1O2N3M1O1RRO[M[m0f2YRO_MMNjm0d2VROfMjm0j20O10000O1O1N3N100O101O1O1O1O001O011O0O100O10O02O0O1O10O100000O1000O0100010N10O0101O100N3N001N200O;E0O10O0O2O2N1N101O001O0O2N2O1O00001O00000O101O0000001L4N2N101N1O1O2N2M2O2N2N2N2O0O2O1N2O1O0O2O1N101N2O100O1O10O01O10000O010O10003M1N010O100O101N10O0100O101N1O002N3M3L3N1N2M9G]X>\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [3.3683371543884277, 473.3489990234375, 93.36787414550781, 50.30621337890625], \"score\": 0.9999998807907104, \"association_id\": 6, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"cc4:iP12N3N1O1N101O0010O10O0O2O00001000O01O1O100O100O1O1O1O10O0001O01O01O00000000000000001O000000O2O00000000000000O100000000O10O11O0000O100O100O2O0O100O100O2N1HfoND\\\\P1;7N2O101N2M3Nbdk`1\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [662.9135131835938, 256.7046203613281, 192.39703369140625, 212.22488403320312], \"score\": 0.9999998807907104, \"association_id\": 7, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"oe[f03iP1n0SO<G6J5L4M2G:M3M2EeMlQO]2Sn0:O2O0O101N100O1O10O01O1O1O1O2M3N3L3N1O2O2M2O1N2O0O2O0O1N3M2M3O1N2O1O100O100O1O100O1M3N101N2O010O0001M2L5F:M2O100001O1O010O10000000O100000000000O10000000000O100O100O1O2N1O101N101N2N6J<C4M3M2N2N2M2L5J5N3N1O1O1N2O2NVOQUOaKnj0]4VUObKjj0[4ZUOdKfj0Z4]UOeKcj0Y4aUOeK_j0X4fUOeK[j0V4lUOhKTj0U4QVOiKoi0U4TVOiKmi0V4UVOiKki0W4VVOhKji0W4XVOhKhi0V4_VOdKbi0[4d1N2O1O1O1O1O1N2O1O1O1O1O1O1N2N200O2N2N2mLoROo1Xm0cMXSOQ2Rn0K5M2N3L4L6G;@?B=F^oUh0\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [470.4881286621094, 276.6415710449219, 251.26998901367188, 321.0642395019531], \"score\": 0.9999995231628418, \"association_id\": 5, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"ikX?5nP16RXO3Z>1bA2[>1bA0^>1`A1_>1]A1c>0[A1d>2XA0h>2SA1m>1n@3Q?Ok@3U?Nh@4X?Mf@4Z?Ke@7[?Ib@:^?E_@?a?@X@h0h?WOm_OU1S`0jNi_O[1W`0eNf_O_1Y`0`Nf_Ob1Z`0_Nd_Ob1\\\\`0_Nb_Ob1^`0_N__Oc1a`0_N[_Oc1e`0^NV_Of1j`0[NR_Oh1n`0YNo^Oi1Qa0XNlYOTOj3e2Zb0XNaYO^OS4[2\\\\b0WNeYOZOn3`2]b0TNPZOROb3l2^b0PNj^OR2Va0lMh^OX2Xa0eMf^O`2Za0_Mc^Oe2]a0ZMa^Oi2_a0VMa^Ok2_a0TM`^On2`a0QM`^OP3`a0PM^^OR3ca0mLZ^OV3fa0iLX^OY3ia0gLR^O^3na0bLP^O`3Pb0_Lo]Oc3Rb0\\\\LT[O@<U4`d0[LS[OB;T4bd0YLS[OE9S4ed0VLS[OH6S4hd0SLR[OL3S4ld0oKR[OONU4gg0lKnWO^4Qh0dKlWO^4Th0dKiWO]4Wh0eKeWO]4Zh0eKdWO\\\\4\\\\h0eKbWO\\\\4_h0dK_WO]4ah0dK]WO]4ch0dKZWO^4fh0cKWWO_4ih0cKRWO`4nh0bKnVO_4Si0cKiVO_4Wi0bKfVO`4Zi0aKdVO_4]i0bK`VO_4ai0bK\\\\VO_4ei0aKYVO_4ii0cKSVO]4oi0eKmUO[4Uj0Y10O2O1O1O0O2N1O2N1O1O00111N100O2SJ`UOT5`j0^JoUO`5lj0O1O[NkJ\\\\WOT5ch0oJ\\\\WOo4eh0SKXWOn4gh0`KlVO`4Si0aKmVO_4Ri0bKnVO^4Qi0cKoVO]4nh0gKQWOY4nh0hKRWOW4nh0jKRWOV4mh0kKRWOV4mh0kKRWOV4mh0kKRWOV4kh0mKSWOU4jh0mKTWOV4kh0^KiVOTO:`5lh0[KPWOQO3e5lh0ZKSWOPO0g5lh0YKUWOPOOg5mh0YKRWOROOf5oh0XKQWOTONe5Ri0XKkVOZON_5Xi0WKeVO_5[i0T17J7I2N2MM4G9L3O2O1N2N1O2N110O1000O010000O10000O2N1N3M2M4MZK[WOQ1bh0QO`WOo0^h0QOeWOm0Zh0SOgWOm0Xh0TOiWOk0Uh0VOlWOk0Rh0VOoWOgLEa3\\\\h0GPXOfLFc3Yh0HRXOcLGd3Wh0HSXObLHg3Sh0HUXO`LJg3Qh0HWXO_LIi3Ph0HXXO\\\\LKk3mg0JiXO5Xg0JhXO7Wg0IjXO6Vg0JkXO5Ug0KlXO4Tg0MlXO2Ug0MlXO2Tg0NmXO2Rg0NnXO2Rg0MoXO3Qg0MPYO2Pg0NPYO2Pg0NPYOQNVOJig0T2SYOoM_OD^g0]2SYOmMCDZg0_2TYOkMDFWg0_2UYOjMFFUg0b2TYOfMIGUg0c2QYOcMNHRg0g2nXO_M3HPg0j2kXO\\\\M9Hnf0k2iXOZM=Jjf0m2gXOXMb0Ihf0o2eXOXMd0Hhf0o2eXOWMe0Igf0AbWOQ3Q1dMh0Iff0@bWOS3P1dMh0ISg0c2UXOcMj0Jof0c2YXOaMi0Lnf0c2YXO`Mk0Llf0d2YXO_Mm0Lif0e2\\\\XO\\\\Mn0Oef0e2]XOZMQ10bf0f2]XOXMT11_f0g2^XOUMV13`f0e2YXOXMW13cf0d2SXOYM\\\\11df0e2oWOYM_10ef0g2jWOXMc1Off0R3\\\\WOnLP2Mhf0W3TWOkLV2Lif0Y3nVOjL\\\\2Khf0[3kVOhL`2Kgf0]3hVOgLc2Ihf0_3fVOfLc2Jif0_3eVOdLbl0Z3^SOeLel0Y3e0N2N2N2nMSROi0om0SOUROk0nm0RORROn0Qn0oNoQOQ1Tn0lNlQOT1Vn0iNkQOX1Xn0bNjQO^1Qo0MH[NVQOa1hn0`NYQO_1gn0aNZQO_1Uo0O2O1O001O001N101O0O2O0O2N2N2M4H8F;I6Kelhl0\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [1024.3389892578125, 144.4161834716797, 162.6737060546875, 85.62812805175781], \"score\": 0.9999995231628418, \"association_id\": 3, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"SXWQ18iP14M4N2M2O1O1O1O1O3M2N1O2N2N3M3M1O1O1O1O3M3M1O1O1O1O1O2N1O1O1000O0100O0010O00010O01O001O0010O01O1O1O3oPOnMjn0X2N1O1N101O1O00000000000000O100O100N2O1O1N3M2O1O1O100O1O101N1O101O0O1O101N10001N2N100O2N101M3N1O2O0O2N2N2N2O001N2O2N001N101O1O1O1O00001O1O001O0000O1000O1000001O001O1O001O1002NO010O001O00001O0O100FZoN6jP1M3O2Ofcc=\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [121.71186065673828, 210.93589782714844, 233.81399536132812, 355.46734619140625], \"score\": 0.9999991655349731, \"association_id\": 4, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"VSY42QQ13L2O2M2M3L5M2N2O2N1O1N2N2N2O1O1O1O1O1O1O001M3N2O1N110O100O1O010O1O001N1O2M2N3L3O2N101O1O1O1O1N2M3N2O1O1O2N2M3M3M3M2N3N2N1O1N2N3L3N2O1O2N1O1O2N1N3L3O2N2N2O1N2O1O1O001O1O0O100O2O0O1O1O1O1N2O2N2O0O2N2O1N3N2N2M3bTOlJhj0X5mTOVKkj0g5SNjIbXOX6Wg0oIfXOS6Vg0RJgXOP6Ug0TJiXOn5Tg0UJjXOl5Ug0WJgXOk5Wg0YJeXOi5ke0`IZZOk0Fi5ne0aIWZOl0Bg5Vf0bIRZOm0^Og5_f0`InYOo0WOi5jf0[IkYOY8Uf0hGhYOZ8Xf0hGdYO[8[f0gG`YO\\\\8_f0hG[YO[8ef0R1000001O0000O10000`N\\\\YO`Hdf0^7`YO_Haf0_7cYO_H]f0`7fYO^HZf0a7hYO^HXf0`7lYO]HUf0a7oYO]HQf0a7RZO]Hoe0a7UZO\\\\Hle0c7VZO[Hke0d7WZOZHje0e7XZOYHie0g7WZOWHke0h7WZOUHke0j7WZOSHke0l7WZOQHke0n7XZOmGle0Q8d1N2O1O1O100O2O0O10001N100O2M3M2N3M3N2N2N1O00000O2N1O2N102N1O2N4K5K4L3M4M2N4L4K6J=B:F6oMmSOhNXl0m0VTOlNnk0o0YTOlNhk0S1aTOcNbk0[1kTOQN^k0n1m1N2O1aNSQOd0on0[OQQOe0Po0YORQOf0Po0WORQOg0Qo0UORQOj0Po0POVQOo0`o0O1O2N1O2N2O11OCloNIRP14SPOImo05UPOLko02VPONjo01XPOOho00XPO0ho0NZPO2`P1JY`ZX1\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [1146.0771484375, 102.00130462646484, 140.8135986328125, 108.61681365966797], \"score\": 0.999998927116394, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"hTfU12PQ15K3N2N1O101N1000O0100O0001O0O2N2O00O1O1L5N101O1N2O1N101O1O1O1O1O1O1O1N2O1O1O1O1O1N200O1O1O1O2M200O1O100O1O1O100O2N1O2O0O2O1O1N2O1N2N101O1N100O1O1O10O1O001K5O0O10001O01O000001O00010O00000001N10000000001O01O001O1O1O1O1XOlPO_OUo06_QO@bn07mQO^OXn0>Y1N2O100O2N1O3MSoU:\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [1034.5771484375, 244.02127075195312, 368.026123046875, 474.5900573730469], \"score\": 0.9999980926513672, \"association_id\": 1, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"eVfQ12QQ18H7I4L2N2N2N1O2N2N1O2O0O1O2N1N2O1N2O1O1O2N100O100O1O100O1O1O1N2O1N2N2N2O1O1O100O1O1N2O1N2M3O1N2O1O1O1O100O10O0100O100O10O01O1O100N10001O000000O2O001O001O010O01O1O1O1O001N2N2N2O1O001O100O1O1N2O1N2N2N2N2O1O1O1O10000O1000000O10000O10000O001O1O1O1N2N2O1O1O1O1O100O1kHjK[AW4c>lK\\\\AT4c>oKZAR4d>QLZAP4e>RLYAn3g>TLVAn3h>ULUAm3j>ULSAm3l>ULQAm3n>ULPAl3o>ULo@m3P?ULn@l3Q?VLn@i3S?WLl@j3S?XLk@j3S?XLk@i3U?XLh@j3W?XLg@i3Y?XLd@j3[?XLP_OInK`4Re0gKj^Ob5Va0_Je^Oe5Za0]Ja^Og5_a0YJ^^Oj5ba0WJ\\\\^Oj5da0VJZ^Ol5ea0VJY^Ok5ga0VJW^Ok5ia0WJS^Ol5la0UJR^Ol5na0VJn]Ol5Rb0VJi]Om5Wb0VJc]Om5]b0UJ[]OR6db0oIT]OX6lb0jIm\\\\O[6Sc0fIj\\\\O\\\\6Vc0eIh\\\\O\\\\6Xc0eIf\\\\O]6Yc0dIe\\\\O]6[c0dIc\\\\O^6\\\\c0dI`\\\\O_6^c0cI`\\\\O_6_c0cI]\\\\O`6bc0cIX\\\\Ob6fc0`3N2N3M1O2N1N3N1O002N1O104K5K5\\\\\\\\OoBkb0f=O1O1O1O1O1O0O0100000000O1000O1000O1000000000000000000000000O10000000000000000000000000000O100O10000O1]No\\\\ORERc0l:Y]OjDib0S;_]OfDbb0Y;b]OcD_b0\\\\;d]OaD]b0^;f]O_D[b0`;h]O]DYb0b;i]O\\\\DXb0c;j]OZDYb0d;i]OYDYb0g;h]OWDYb0h;j]OTDYb0j;a1O1O2M2O2O0O2N101O1O1O1O1O1O001O1O1O010O001O01O010O10O010O10O0100O10O01O1O010O1O10O01O1O1O100O1O000O1000000bMbZOVI^e0f6iZOWIWe0e6oZOXISe0d6R[OZInd0d6U[O[Ikd0c6X[O\\\\Ihd0c6Z[O\\\\Ifd0b6][O\\\\Ied0a6_[O\\\\Ibd0b6a[O\\\\Iad0a6c[O\\\\I^d0a6j[OYIXd0a6R\\\\OTIUd0X6c\\\\O[Iec0\\\\6g3J6M2O2M3N1O2N1N3N1N3M2N3M3M2N3M2O2N1N3N2M3M2N3M3L4M3M3M4L3L5M4K7I7I6I8H;^O_d\\\\6\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [1426.1805419921875, 259.122802734375, 166.5333251953125, 13.1458740234375], \"score\": 0.9999920129776001, \"association_id\": 2, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"RSi^11SQ100000000001O000O1000000000000000000000000000001O00000O1000000000000000000000000000000000000000000001O000O100000000000O1000000000000000O100000001O0Oa]<0_bC1O101OO10O100000000000000000000000O100000000O1000000O1000000000000000fjo0\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [0.0, 256.2538757324219, 111.86620330810547, 250.93841552734375], \"score\": 0.9999897480010986, \"association_id\": 6, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"T8;i0l3nj0cL^TOQ4>cKei0b6L2N2N2N0O100000O100O100O100O100O10000O1001O000000001O00001O1O1O1O001O0100O2N3M1O2N3M3M1O3M3M3M1O1O100O2N2N1O1O1O2M4M2N001O0000000000001O1N2O2N2N001O1O000O101N1O3O2M2N0O2N1fN[TOTMfk0g2gTOPM[k0m2jTOnLYk0n2mTOmLVk0n2e1K3M3L3N3M3N3K7G7J5L5IaX\\\\`1\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [488.6742858886719, 102.61451721191406, 178.14523315429688, 170.38121032714844], \"score\": 0.9999845027923584, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"o]k?4nP16K2N1N2O2O0O100O10O0100O00100O010O10O010O01N101O001O100O1N2O100O1O010N2O0O10100O1O1O1O1O100O101O0O1O1O2O0O2O001UQOaNnm0`1fQOaNJ3^n0]1fQOcNI1an0\\\\1eQOeNH1bn0[1eQOdNI0cn0]1bQOeNIOen0]1aQOmN^n0U1`QOkNan0W1\\\\QOjNdn0k10O0100OJ]QOmMcn0S2_QOkM`n0V2`QOjM_n0W2cQOgM]n0X280O100000000O010000000000O1O1H7O2N200001N100^NRNnSOn1lk0YNRTOh1nk0ZNPTOe1Pl0^NmSOb1Tl0bNgSOW1al0nNZSOQ1fl0TOUSOl0ll0WOPSOi0Qm0YOlROh0Tm0[OeROi0[m0ZO\\\\ROl0dm0Z10[OZROmMem0S2\\\\RObMN0fm0]2^ROaMN0em0_2]ROaMN0em0_2]RO`M1Ncm0b2\\\\RO`M3Kbm0e2[RO_Mom0a2QRO_Mom0a2QRO_MPn0a2nQO`MRn0j20O1O1O10oQOTMim0j2:EhQOfM\\\\n0V2jQOeMZn0n1cQOSNjn0j1;L3N2O1N2I7O1N2O001O0O2O001O001O2O2M2O11N3MO2GmoN]OXP1>:M2N1O0005K1O001O00000O101O000000N3N101N1O2N4IRgWn0\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [1016.890869140625, 655.8394775390625, 261.361328125, 85.98858642578125], \"score\": 0.999980092048645, \"association_id\": 1, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"h^mP19iP14M2N2O001N100O10000O2O00000O10000O1000000O2O0000000O100O10000O10O10O10000O10000O10O10O100O1O010O100O01000O1O100O100O0100000O10O01O001O100O10O1000O100O1000O0100O010O1O010O10O1000O010O100O01000O01000O010O100O010O10000O100O01000O1000O010000O100O100O10O100000O10000O100000000O010000O10000000O01000000000O100000O1000000O100000000O100000000000000O10000000000000000O10001O0000000000001O0O10000000000^O]POCco0;`POD`o0;bPOD^o0;dPOD\\\\o0;ePOE[o0;fPOC[o0<gPOCYo0<iPOCXo0:kPOCWo0<i0O1O2N101N2NY[n;\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [117.2367935180664, 507.38232421875, 160.24462890625, 57.256591796875], \"score\": 0.9999614953994751, \"association_id\": 4, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"Ufj31SQ12M101N1O101N100000000O2O00000O1000000O10000O1O10000O10000O100000000001OO10O1000000000O1000000000000000O100000000O1000000O_OE[PO;do0F[PO;do0G[PO8eo0I[PO7eo0I[PO7eo0JYPO6go0KXPO6go0LXPO3io0MVPO4io0NUPO3jo02QPONPP1c0O1O01000O10O10000O0100000000O10O10000O1000O1000O1000000O10O10000000O1000O010001O0O100O1O2N3[Og\\\\Q\\\\1\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [985.7501831054688, 218.26458740234375, 164.55792236328125, 18.688201904296875], \"score\": 0.9995989203453064, \"association_id\": 3, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"ac_Q11RQ1101O0O100000000000000O100000001O000000000000000O1000000000000001O000000001N100000001O0O10Pb`00P^_O1N10000O10000000000000000O1001N1000000000000O10000000000O1O`]<0abC00000oig>\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [285.4359130859375, 158.09274291992188, 108.3951416015625, 143.08917236328125], \"score\": 0.9942240118980408, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"bhf93PQ14QoNNdP1;M2M4M2O2M2O2N1O105J2O0O2N1O1N2O2N1O1O1O2N1O1N3O0O2O001O0O1O101N1O00100O2M2O2N1O1O1O2N1O2M4L4M3M0O1O2O1O1N2O2N0O101O001000O1O1O1O100O1O1O1O1O1O1O1O1N2O1N2N2O1N2L4N2N2O1L4L4M3VOoPO[OWo0:o0N3N3IlWPW1\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [443.8020935058594, 425.9079284667969, 100.18154907226562, 16.639404296875], \"score\": 0.9907733201980591, \"association_id\": 8, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"^^h>1RQ110001O0O100000000000000001OO100000001O00000000001O000000001O00001O0O10001O0000001O000000001O0O1000001O0000000000000000000000001OO100O101N1O3K4M`WdR1\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [276.5506286621094, 74.537841796875, 74.42803955078125, 75.43693542480469], \"score\": 0.9607856273651123, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"ajn8h0ZP14L4M3M2K6M1N4M2N2M3N101O001O001O001O1O001O00001O1O0106RNgPOb1`o0N2M3M102N1O2N1O0O2O1O1O002N001O1O1O1O1N2O2O0O2N1O1O2N1O1O3M1O2MjWjX1\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [469.3965759277344, 415.2380676269531, 227.48062133789062, 166.82009887695312], \"score\": 0.9183478355407715, \"association_id\": 5, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"VRX`03PQ14M1N100000000000000000000000O2N1N3MeXQ12ZgnN1O2N1O1O1O1O1O1O1O1O100O100O100O1000000O10000O100000000000O10000000O010000O10000O0100000O100000000O1000000O10000O100O100O101O0O1000001N102N1N2O1Nod^o0\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [676.314453125, 441.122802734375, 133.69677734375, 30.4371337890625], \"score\": 0.8495500683784485, \"association_id\": 7, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"YaUf01QQ13N1O100O1N2N2O1001O0000000O102N000O01O100000O100000O01000000O2O0000000O10000000000O1000000000O010000000000000O01000O10000O1O1O1IM\\\\oN4cP16O200O10000O010000000O1000000000000000000000000000O100000000000001O000O1000001N101HZoN0fP1L`oN1jP101N[joi0\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [421.88543701171875, 478.790771484375, 69.44683837890625, 10.749267578125], \"score\": 0.7602599263191223, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"mYe=2QQ1101N10000000000000000O10000000000000O100000O1000000000000000000000000000O10000000000000000000O10000001O000000001O000OgjTT1\"}}, {\"image_id\": 145, \"category_id\": 1, \"bbox\": [463.2852783203125, 259.3684997558594, 218.9986572265625, 203.86141967773438], \"score\": 0.51548171043396, \"association_id\": 8, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"aao>1eP1f0G3M2N2O1O1O1O1O1O001O001O001O1O2N4L2N2M3N2cPOcNon0S2F6J3M2N1O100O2N2N3M2N1O1O1O1O1O2N2N3M1O100O1O100O10000O10000000O1000000O100O2O3L3N2oMWROb0im0[OZROSOLW1im0E\\\\ROROLZ1hm0B]ROTOK[1im0_O]ROTOK]1jm0\\\\O]ROTOL`1gm0YOiROg0Xm0XOhROi0Xm0UOkROi0Vm0VOnROg0Qm0YOPSOf0Pm0ZOPSOf0Pm0ZOQSOe0Pm0ZOPSOf0Pm0[OPSOd0Pm0\\\\OPSOe0ol0[O^RO[O4Z1]m0[O_RO[O4]1Zm0XObRO[O4]1Zm0YOaROZO5]1Zm0ZOTSOf0ll0[ORSOf0ol0ZOPSOf0Pm0\\\\OmRO5XO]Okm0a0jRO1\\\\O_Oim0a0_RORONn0K^Ogm0d0kROL@^Ofm0f0kROIC_Obm0i0lRODjm0<\\\\ROQOPn0P1R1000000001O00001O1O3MO1M3O2ORROnNjk0Q1UTOQOlk0n0STOTOlk0l0STOUOnk0j0RTOVOok0i0PTOYOPl0e0QTO[OPl0d0PTO\\\\ORl0a0nSO@Sl0?mSOAUl03mQOKn13Vl00oQOKj16Xl0MRROIf1;Yl0JPTO8Pl0FQTO;Pl0CoSO?[n0000O1000000O1000000O100O10000O10O10O10000O100O100aMXObTOh0]k0ZObTOf0^k0ZObTOf0]k0[OcTO4eM7hm0EcTO1jM7cm0IdTOMmM8^m0LTUOLnj07h28N2N2O1N2N3N1N2O1O1O2O00O1000iROROXj0n0gUOTOXj0l0^TOPO[O1^O6fl0i0aTOROYO1^O6gl0f0cTOTOiNN04K6hl0d0dTOTOiNN04K6hl0e0cTOSOjNN04K5hl0g0cTOROjNN13I7il0g0bTOQOkNN13I7il0g0bTOPOlNO04H6jl0g0bTOPOlNO13F8jl0g0cTOoNlN00b0al0`0aTOPOlNO1c0al0`0_TOPOmN1Mc0fl0>]TOQOlNQ1gl0:oSO6Pl09_SOIal0^2O1O1O1O0001gMZSO2fl0M`SOO_l01cSOM]l02eSON[l01fSONZl03dSO0Zl0_2O1O2M3O1O1O00O0O1QNoSOiNRl0R1VTOjNlk0Q1YTOmNik0o0[TOoNfk0m0`TOPOak09eSOiNP1l0_k0oN`SOf0l16Tn0I^mfm0\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [0.0, 536.0315551757812, 36.487388610839844, 12.24444580078125], \"score\": 0.4711763262748718, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"Rb14oP12O0000001O0000000000000O10O100000000O011OO10O2OO11N1000\\\\ehb1\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [111.18588256835938, 326.95318603515625, 193.1358642578125, 223.34771728515625], \"score\": 0.1540309339761734, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"ZWX51SQ10SZ91keF2M2O2O00OFKgoN5XP1OdoN2[P11coNO]P12boNN]P13boNM^P1>O0O1O2N1O1O10000O1000000O100000000000000O10000000000000000000000000000O100000000000O1000O100001N1O1M4L4L4K5HXVS1MQjlN3N3L8I3L2O1O0000000000001O0000000O100001O0O101O1N1O2O2L4L5JjQmY1\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [1000.4912109375, 204.6900634765625, 164.5811767578125, 32.10520935058594], \"score\": 0.06828732043504715, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"TidQ13QQ1000O101O0000000O1000000000000O10000000000000000000000000000001O0O10001O0O2O\\\\ec01bZ\\\\O100O10001OO100000000000000O100000000O1000001N1000]`\\\\?\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [520.4398193359375, 530.3577880859375, 114.14080810546875, 60.25152587890625], \"score\": 0.06639561802148819, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"ahea03PQ11O1M201N2N2M2O2O1N1N3N101O1O1000O0100O100000000000000000O10001O0O100O2O00000O101N10O10000O010O01000O1000O100000O010001N10001N2O1O1N2O1O001N1000001O0O2O001N2OQf_o0\"}}, {\"image_id\": 145, \"category_id\": 2, \"bbox\": [1416.9010009765625, 222.93426513671875, 49.761962890625, 9.110671997070312], \"score\": 0.06231611967086792, \"association_id\": 0, \"segmentation\": {\"size\": [1060, 1600], \"counts\": \"[[T^12RQ100000O2O00000000000000000O1000000001O0000001O000000000O1001O01N0100000O10ma\\\\4\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [515.62939453125, 104.13467407226562, 122.87579345703125, 299.0652770996094], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [422, 656], \"counts\": \"ajd6U1m;9H6I8H9I5K4K5K7J5L3jH]Md3f2VLbMd3d2mKlMl3[2gJQOR5V1eJQOX5T1`JQO_5U1UJQOk5U4O100000O10000O0O2N2N1001M4M2N3N3L4K4L5L3K6J7K5L2N2O0O1O2N1N3M2O2N2O1O1O1O1OO1N2N3M2N2M3eMYLgJX4k4]KQJ`0?e4]5nJSJ>;j4a5eJVJd01n4h5YJ\\\\Ji6Q60002N\\\\OXIlJe6Q5_IoJ`6n4cIRK^6j4dIWK_6d4cI\\\\Kd6\\\\4\\\\IdKg6X4[IgKg6V4ZIkKg6S4YImKj6Q4UIoKn6n3RIRLo6m3PITLQ7j3QITLP7l3QIRLQ7l3RIPLP7P4QImKR7S4S10gGdKd7]4WHfKj7[4PHiKP8f41O010O1O2N3L4K5D<J6jMnF6U9[OZGe0b:0O00001O0001O1N3M2N1O202M2N1O1N2N3M2N1N4J]T7\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [75.11219787597656, 307.6036682128906, 309.2965087890625, 65.03619384765625], \"score\": 0.9999997615814209, \"association_id\": 1, \"segmentation\": {\"size\": [422, 656], \"counts\": \"kVo01S=:G2N2O1N101O00001O002N1O001O0000001O001O1O1O1O1O00001O00010O02N3N5J100O0010O01O1O1N2O8GYi5LiVJ5M2N5L0O2N2N2O1N10001N100O1O10000O101N1000000O1000000000000000000O1000000001O00000000000000000001O000000000000000000000000000000000000000000000000000000000000000O1000000O1000000000000O10000000000O1000000000000000000O10000000000001O0000000000000000000000001O000000000000000000000000001O00000000000000000000000000000O100000000000000000000000000000000000000000000000001O000000001O0O10000000001O0000001O0000001O001O00001O0000001O001O1O1O1O1O0O2O001O00001O001N2O001O001O00001N10001O4LZ^c3\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [397.5284423828125, 110.06561279296875, 70.40243530273438, 169.89462280273438], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [422, 656], \"counts\": \"hVT5R1S<101O0O1O1N2ROVO_El0]:[O]Eg0^:U1G:I5J8K4M3L7J4K8J2M3M2M3M3N1M2N200N200001O100001O2N3M2N4L2O1N1101M2O7G3M1O2M3N3K5`M]HSOi7j0]HPOg7l0\\\\2M4N2M2O6K6J7I2N1O1N1O1O1N3L6IUT^2\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [390.41546630859375, 262.04534912109375, 43.81903076171875, 11.8587646484375], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [422, 656], \"counts\": \"b\\\\Q53R=4M00001O000000000000000000001O00000000000O2O01O002N001O0000000000O12N00001O001O1N]dk2\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [405.4457092285156, 384.97650146484375, 161.98342895507812, 32.12591552734375], \"score\": 0.9999986886978149, \"association_id\": 3, \"segmentation\": {\"size\": [422, 656], \"counts\": \"XfW54R=1O0O2O001O000000001O0000001O000000001O00000O101O00000000000001O00000O10000000000O100000000000000001O0000000000001O00000000001O0000000000001O000000000000001O0000000000001O00001O00001O001O00001O001O1O00001O001O1O00010O001O10O000000010O00001O010O0000001O0000000000000000000000000O10001O001O1O1O1O1O3M1O2N1O001O1O1N3M^`U1\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [78.34237670898438, 118.51734924316406, 332.40093994140625, 240.8878631591797], \"score\": 0.9998672008514404, \"association_id\": 1, \"segmentation\": {\"size\": [422, 656], \"counts\": \"lgP1V1f;?F8J4K6J7J6K5K4L3M4M2N3M3M3N1N2N2N3M2N4L3N3L6J4L4M3L3N2M2O1N3M4L4L6K4K4L4L2N2O1N2N2M3M3M4L4M3M3N2M2O1O1O0O101O00001O00000O2O000000000000O1000000000000O100000000000000O10000000000000000000000O10000000000000000000000000000O10000001O000000000001O00000O100000001N1000000O2O00001O00001O0O2O001O001O001O001N10001O00001O000O110O0000001O0000010O00000000001O00000000001O0000000001O00001O00001O00001OO10000O2O000O101O000O1000000010O0000000000000001O0O2O001O0O101O00001O00001O1O1O1O100O1O001O010O000010O01O10O01O100O1O1O101N1O3N2M3N1N1O100O10O0100O2O1O1O1O1N100O10O01O10O0101N101N101N100O1O001O1N2O1N2O1O1N2mKUGi3k8QL]Gl3Q9N1N2N2N3M3M5J6E9F;F9I>B8_O`0E<ZOTZ[3\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [245.40359497070312, 92.33637237548828, 58.637664794921875, 51.100074768066406], \"score\": 0.3870564103126526, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"Z_U3`0e<1O101N1O2M2O1N4N1N2O1O2N2nCmNm;W1O004L1O00O1L4N2N20000O10000000000001O000000000000001O1O00001O0000O2LQDnNP<T1001O1O1O2N2M4K=C`n`4\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [69.25408935546875, 233.92703247070312, 313.717041015625, 138.10513305664062], \"score\": 0.3835083246231079, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"gVo02S=3M2M4N2M3lNA]E`0`:B_E`0^:CaE>]:CbE>]:CcE>\\\\:CbE>`:@`Ea0`:]O`Ee0`:ZO_Eh0a:WO_Ej0a:UO^En0`:RO_EP1Z;1O2N1O2]OjNTEV1^;010O01O010000000O00100O1O101N6K`0@2M1000O010O01O1N3MZR2LjmM2O1O1O1O1O4L2N1O1O1O001N2O001O0O2O0O101N101N1000001N10000O100O100O10000000000000000000000000001O0001O0000000000001O00000000001O0000000000000000000000000000000000O1000000O10000O1000000O10000000000000000O100000000000000O100000000000000001O0000000000000000000000000000000000000000000000001O000000000000000000000O010000000000001O0000000000000000000000000000O101O01O00000000000000000000001O00001O000O2O00001O000000001O00001O00001O001O001O001O0O2O1O001N101O000O101O001O0O2O1O1O1O1O1N2O1O1N2OfXd3\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [526.1785278320312, 238.0642852783203, 77.7449951171875, 172.6913604736328], \"score\": 0.2952742576599121, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"XSi6P1Q9_OaIj0Z6XO`Im0_6WOYHn1e7UNWHn1h7UNRHn1n7f1O0000001O001O00O1001O001O1O001O00001O3M2N1O1O1O1OYLlG\\\\2T8aMWHVOGb2Q8WNkHg1U7XNmHg1S7XNoHh1P7WNRIi1m6VNUIk1i6RNZIQ2c6gMeI^2V6`MlIa2S6\\\\MSIELo2Q7[MSIIJl2S7ZMTIKIk2S7YMUIJIm2R7XMWIJTOO;n2Z7YMXIGTO4:k2Z7ZMXIGUO3:k2Z7ZMWIHUO3<h2Y7\\\\MaIJZO0@]2e7hMcIJHV2h6kMeINGm1P:B?[Od0J9AZQ2KSoM4M1O2N0O1O00O\\\\<JhC6K6J8E7M3O1Xbe0\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [63.712013244628906, 214.6074981689453, 43.55015563964844, 79.37193298339844], \"score\": 0.20367641746997833, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"hbj03R=2`COi;1VD1i;1SD2l;NTD3k;NTD3k;NTD4j;MUD4k;LSD6m;KoC8Q<>XDXOT;f0kD\\\\OX;a0gD_O];=cDC];=cDC_;;aDE_;<`DE_;<`DDa;;_DF`;IZD86O`;I\\\\D721c;F\\\\D:O2e;A_DQ1k;000001Oc0^O2N3M3M10N102M2N2N2N001O00O1O2MZoQ7\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [51.2874641418457, 211.14247131347656, 34.3879508972168, 26.80169677734375], \"score\": 0.19076259434223175, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"VWe02S=3N1O1O2N1O1O1O001O000000000001ObCER<:nCFT<8lCHV<6iCLV<4jCLW<4gCMY<4fCLZ<5eCK[<9_CIb<O^C8j<N1N2N1000001O002MeeZ7\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [58.63633346557617, 217.30636596679688, 38.93049240112305, 53.9647216796875], \"score\": 0.1267954260110855, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"Rnh03R=101O001^CMo;3oC1o;OPD3o;MQD5m;KSD6l;KSD6l;KSD6l;JSD8l;ISD8m;HQD:n;HnC::AW;k0hDWO[;e0eD[O^;b0aD_Oa;HVDe0:Bb;>^DBb;1TD3:Lc;NUD77Kc;OWD65Ke;MWD84Ke;MXD73Le;MXD91Jg;MYD9NKj;KXDh0i;VOYDi0R<00@kCOi<M2N2MQ[W7\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [96.49656677246094, 150.31411743164062, 320.71697998046875, 184.26504516601562], \"score\": 0.08655712753534317, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"jmY15o<5M1N1O2N101O1O0O2O1O1O01O1O9ESjm6\"}}, {\"image_id\": 146, \"category_id\": 2, \"bbox\": [574.8822021484375, 404.1445617675781, 19.10430908203125, 7.575592041015625], \"score\": 0.07878125458955765, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"fh]72S=10000O1000000001O000000001O0000Tbi0\"}}, {\"image_id\": 146, \"category_id\": 1, \"bbox\": [77.92949676513672, 79.33590698242188, 31.062461853027344, 162.1958770751953], \"score\": 0.061362817883491516, \"association_id\": 0, \"segmentation\": {\"size\": [422, 656], \"counts\": \"hYP1d1_;8hN^N`Fd1]9aN_Fc1[9aNfFZ1o8UORG`0^OeN\\\\9n0YG;\\\\OhNV9o0`G7[OjNT9P1bG5[OlNn8R1iG0XOnNo8T1fGO_OlNg8X1iGLg87TGLj8DeF_Oa0h0k8HhFA9h0n8FnF@8b0m8KRG]O6b0j81WHKk73aHA`7<eHB\\\\73QIJo68SIEn65T3O012Ln\\\\S7\"}}, {\"image_id\": 147, \"category_id\": 1, \"bbox\": [144.0771026611328, 148.04214477539062, 454.6514892578125, 801.66748046875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1071, 1600], \"counts\": \"[RV5<cP1g0_O`0C=D;E9I6L4L4L3N3M2N2O1N2N2O1N2N2O1N2N2O2M2N2O2M2O1N2N2N2N2O1N2M3N2N2N2M3N2N1O2N2N1O2O0O100O100O100O10000O100O10001N10001N101N100O2O0O2O0O2O1N101N2M2O1N3M2N3M2O2M2O2M2O2N1O2O0O1O2N1O2N1O2N1N2N3M2N3L3N2N3M2N2O1O2N1O1N2O1N2N3RDkH^MW7_2UIUMn6i2\\\\IlLg6Q3eIbL_6\\\\3fI^L]6_3iI[LZ6c3jIYLX6e3lIVLW6h3mITLU6k3nISHXMoNl8k8oIPH]MPOf8n8PJkGdMQO^8Q9SJfGiMSOV8U9UJ^GRNUOl7Z9WJ^FTOMj6b9WJYFC^O_6W:SJRFe0\\\\Nb5_;QJhE[;U:VEoDX;n:f5M4L3M3N2N2N2O1O1N2O1N2O1N2O1N2N2N2N2M3N2M3N2N3N0O2O1N2O1N2O1N2N2O1N2N2N2O1O1N20000000000000000000000000000000000000O1000000O1000001O0O10000O10001O1O1O1N2O1O1O001O1O0O2O001O1O000O2N1O101N1O1O2N1O1O2N1O1O101N1O10001O0O101O000O2O001O000O2O001O1O1O1N2O1O2N2N2M2O2N1O1O1O1O1N2O1O001O1O001O1O1O1O001O1O1O1N3N1O1O2N2N3M4L3M5K4L5K7I7I6J5J6K4L4L3M3M4L4L4L5K4L3M3M3M2M4M3M2N2N2N2N2N1N3N2N2M3N1O2M4M2M3N2M4M2M3N1N3N1N2O1O1O2N1N2O1O1O1N2O1O1O1N2O1O2M3M3M3M3M3M2N3N1N2N3N1N2O1N2O0O2O1O1N3N1O2N2N2M3N2N1O2N1N2O1O1O1O1N2O1N2O2N1N2O2M3N2M3M4M1N3N1N3M2N2N3M2N3M3M3L5L4L4L5J5L4L4L3M3M4L3eK[YOgMif0c1nYOXNVf0U1_ZOdNme0a0iZOYOde04iZODme0@aZO9Zg0dMVYOS2[k0G8G9ZOUiSQ1\"}}, {\"image_id\": 147, \"category_id\": 2, \"bbox\": [229.94708251953125, 726.6469116210938, 1317.326904296875, 203.56500244140625], \"score\": 0.9999984502792358, \"association_id\": 1, \"segmentation\": {\"size\": [1071, 1600], \"counts\": \"ak`=1]Q11O100O1O1N3N1000000O1000001O0O10000000000O1000001O000000000O100000000000000O100000000O1000000O100O100O1O1O100O1O100O100O100000000O1000000000000O100000O1000000000O10000000O10O1000000O10O1000O100O0N3N2N110O010O10O1000O0100000O01000000O010000000O1000O1000O100000O10O1000O1O010O10O01O10O01O0N3K4N3N2M201N2O001O1O100O01000O1000O010000O100000000O01000O1000000O010O1O100O010O1O1O000N2N2M3K5XNg102O0O2O1O010O10O010000O0100000O10O1000O100000O1000O10000000O1000O1000O1000000O10O100000O100000000O1000O100000O100000000O1000000000O010000000000O10000000000000000O10O100000000000000000O1000000000O100000000000000000O100000O100000000000000000000O100000O1000000000O1000000000O1000000000O1000O10000000000O100000000000O0100000000000000000000O100000000000000000000000000000000000000000000000000000000000O10000000000000000000000000O100000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000O100000000000000000000000000O1000000000001O0O10000000000O10000000000O100000000000000O101O0000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000001O00000000O100000000000000000000000O1000000000000000000000O100000000000000000000000000000000000000000000000000000O10000000000000000O1000000000000O100000000O100000000000000O1000000000000000000O10001O000000000000000000001O0000000000000000001O0000000000000000000000001O00000000000O100000000000000000000000001O000O1000000000000O100000000O10000O100000000O100000001O000O10000000000000000O10000000000000000000000O1000000000000O100000001O0O1000000O1000000O1000000000000O1000000000000000000O1000001O0000000000000000000000000000001O00000000000000000000001O0O10000000000000000000000000001O000000000000000O10000000000000000000000O10000000000000001N100000000000000O10000000000000000000000000000000000O100000000000000000000000000000000000001O000000000000000000000000000000000000000O1000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000O101O00000000000000001O0000000000001O0000000O2O0000001O000000001O00000000001N10000000000000001O00000O100000001O000000001N1000001O00001O1O1O1O2M2O1O001O001O001O00001O000O2O0000001O0000001O00000O2O0000001O00001O0O101O001O001O0O2O001O1O1N2O1O1O1O1N2O000O2O001N101O0O101O000O101O0O10001O0O2O1O1N1O2M4M2MfWT2\"}}, {\"image_id\": 147, \"category_id\": 2, \"bbox\": [329.2384948730469, 728.5775146484375, 705.7774658203125, 230.356201171875], \"score\": 0.18271185457706451, \"association_id\": 0, \"segmentation\": {\"size\": [1071, 1600], \"counts\": \"YhY;1^Q10O101N10000O2O000O10000O2O000O1000000O2O000O100O1O1O100O001O1O1O100O10000O1000000O100000000O100000000000O10O100000000O1000O10O10000O10000O0100000O10O10O1000O10O10000O10O10O100O10000O10O01000000O1000000000000O1000O1000000000O100000000000000000000O0100000000O10O10O10000O10O10O1000O0100000O10O1000O10000O01000O10000O01000O100O010O10O01000O01000O010000O01000O010O1O010O1O001N2O001O0010O01O10O10O10O1000O10O01000O01O10O01O0O2N2N1O2O0010O10O01000O10O1000O10O10O100O010O10ON3E:G:M3O0000100O00100O010WNiMYTOX2ek0jMZTOW2ek0kMZTOT2fk0nMXTOS2fk0PNXTOQ2gk0QNVTOQ2ik0RNTTOn1lk0TNRTOm1mk0WNnSOk1Ql0YNjSOi1Vl0[NcSOg1]l0_N[SOd1el0b110O0100O10O010000O10O1000O100000000O100000000O1000000O10000O10000O100O10000O10000000000O100000O100000000000O10000000O10000000000000O01000000000000O10O10000000000O10O1000O1000000O1000000O10000000000O1000000000O100000O10000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000O10000000000000000000O1000000000000000O1000000000000000000000000O100000000000000000O10000000000000000000000O1000O100000000000O10000000000000000O10000000000000000O1000000000000000000O2O00000000000000001N1000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000O10001O0000000000000000000000000000000000000000000000000000000000000000000000001O0000000O10000000000O1000000O1000000O10000O1000000O1000000000000000000000O10000000001O0000000000000000000000000000O1000000000000000000000001O000000000000000O101O0O2O0O101N3N3L4LWV^b0\"}}, {\"image_id\": 148, \"category_id\": 1, \"bbox\": [99.74705505371094, 670.9982299804688, 459.64752197265625, 159.8046875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"i^Y5T2jc15K4M2N2O1N2O0O2O001N101N2N102M2O1N2O1N101O1N2O1N2O0O2O1O1N101N2O1N2O0O2N10000O2O000O100O100O100O100O10000O1000000O10000O100O100O100O100O10000O1000000O0100000O10O10O01O10O0100O100O00100O10O01O1O001N2O0010O01O100O010001O00000O100000000000000O2O0000000O2O000O2O000O101O000O2O0000001O000000000O101O000000000000000000000O10O10000O1000000O100000O010000000000O10000000000O10000000000O1000001O0000000O10000000000000001O0000000O100000000000000000000000000000001O000O100000000000000000001O0000000O101O000000001O0000001O000001O0001O01O00000010O0001O000010O01O0010O01O100O0010O01O0010O0001O010O001O010O0010O00010O0000001O01O000001O000000001O0001O0001O00001O1O001O1O1O1O1O001O00001O0O1000000O2O000O10001O0O10001N10001O0O101O0O2O0O101O0O101N10001N10001O0O101O0000001O0000001O0000001O00001O001N101O00001O00001O00001O00001O0000001O0000001O00000001O01O0O10001O001O001N101O00001O00000O2O00001O0O101O1O0O2O0O2O001N10001O0O101N2O1N2N3Jhnok2\"}}, {\"image_id\": 148, \"category_id\": 1, \"bbox\": [1062.6199951171875, 394.4599914550781, 281.353271484375, 794.965087890625], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"VVTi1T1`d1h0A6J6K4L3M3M3L4K6J5J6hkNjLae0Z3\\\\ZOjL_e0[3]ZOiL_e0Z3^ZOjL_e0Y3^ZOiL_e0[3^ZOfLae0]3\\\\ZOdLbe0`3[ZObLbe0c3ZZO^Lce0f3[ZO\\\\Lae0i3\\\\ZOXL`e0m3]ZOUL`e0P4]ZORL`e0Q4]ZOQLae0Q4]ZOQLce0P4YZOSLfe0n3UZOXLie0i3SZO[Lle0f3oYO_LPf0c3jYObLVf0^3[QOiK\\\\3o0Wk0Y3YQOPLW3l0`k0U3UQOUL\\\\1dNbLX2lP1P3SQOZLW1hNaLR2UQ1m2PQO]LT1lN^Lo1]Q1j2nPO_LP1SOZLi1gQ1h2iPOcLm0YOVLb1SR1e2dPOhLg0a2dn0k0XPOTMc0\\\\2To0e0boNnMc0h1jo0c0PoN[Na0\\\\1]P1c0bnNbNb0Q1kP1d0XnNfNb0k0TQ1f0PnNiNd0d0ZQ1e6^nN^I_Q1j6XnNZIfQ1a=N2O1N2O1O1O1O1O1000O0100O0100O010O00O1OO0UN_nN[_O`Q1T`0\\\\2H9N3N2O1O2O001M2K5G8E;H6M2O2O20OO1NN1L4N15K502O100010O2O000O1O1O1O1O1O2O0000000001O00001N1020O2N1O1O100O001O10O01N2O001O001O001O001O0100O100O100O2O0O2OO2O1O1N4M2M3N1N2O1N101N2O2M3N4K<E8G8I5J4M4K6K6I5L4Ke_ObnNVOCR:hQ1bFdoNYNZOk:oP1iFXPOgMTO`;aP1eFiRO\\\\9Tm0bFPSO^9ol0^FWSOb9gl0[F]SOf9al0VFeSOj9Yl0QFoSOn9ok0lE[TOQ:ek0iEdTOT:\\\\k0fElTOX:Tk0cERUO\\\\:nj0`EWUO_:lj0ZEXUOf:cKZD[l0i0VXOm:XKaDel0;XXOT;iJjDTm0HYXO`;VJQEeU1T;giNYE]V1l:fhNgE^W1\\\\<3M2N2N1O2M3M4L4L4L4bF^hNV4eW1bKmhNQ4VW1hKViNP4mV1jKYiNS4jV1hK[iNU4hV1fK\\\\iNX4gV1bK_iN[4dV1_KbiN^4aV1[KhiN`4[V1WKRjNb4QV1VKcjN[4aU1]KPkNV4SU1dKUkNW4oT1bKWkN[4PU1\\\\KUkNa4RU1UKSkNi4UU1mJPkNP5T[1L5Kd0\\\\O]1cN6J4L4L2N3M2N4L3M3M3M2N1O2N1O1O1O1N2O1O1O2M3N2N3M2M2O1O2M2O1O1N2O2M2O2M6H8H]Wfb1\"}}, {\"image_id\": 148, \"category_id\": 1, \"bbox\": [1545.8912353515625, 614.0046997070312, 307.5145263671875, 109.5462646484375], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"Uaaa2`0[e1c0@3M3N2N3M3N2N1O1O1O00001O1O001O01O000O2O00000000001O0001O000000000000000O100000000000000O1000000000000000000000O100000N2O001N2O1O01000O010000O010O1O010O001O00100O010O10O10O10O01O10O010000000O1000000O2O0O100O101N101N2O001N100O2O000O100O100O1O100O101N100O010O100O1O10000O1000000000000O10O10000000000000000000O10000000000000O10000000001O0O10000000000000000000000000000000000001O000000010O00001O001O0000001O00001O00001O01O000001O0001O00001O00010O0000000000001O00000000001O00000O0100000000000000000O2O000O2O0O10000O2O000O100O101M2O1N200O2N100O100O001O1N2O1O100O100O100O2O000O100O1O101N100O1O2O0O2O0O2N100O2N1O2IS[NXOod1d0<Hi]hg0\"}}, {\"image_id\": 148, \"category_id\": 2, \"bbox\": [646.2662963867188, 1183.763671875, 549.8082885742188, 66.7669677734375], \"score\": 0.9999978542327881, \"association_id\": 3, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"noTR12ie17L3N3N000O101O0000000O2O00000O2O001O0O2O00001O0O10001O000000001N1000001O0O2O00001N10001O00000O1000001O00000O101O00001O000O101O00000000001N10000000000O2O00000O100000001N100000000000001O000000000O10001O000000000O100000001O000000000000000000000O100000000000000000000000001O0000000000000000000000000000001O0000000O100000000000001O0000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O0100000000000000000000O10000000000000000000000000000O10000000000000000000000O100000000000000O100000000000000O10000000001O00000000000000001O0000000O2O0000001O1O3M2dN^[NU1id1N2N001O0000000O2O000000001O000000001O0O100000001O0000000000000000000O101O000000000000000000001O00000000001N100000001O000O2O00001O0O101O00001N101O0O2O1K9JQbS30k]lL5K6N1O2M200O2O000000000O10000000000000000000000000000000000000O010000000000000000000000000000000000000000000000000000000O1000000000000000000000000000000000O1000O100000000000000000001O0O1O2N2N5G[ZNNhm_j1\"}}, {\"image_id\": 148, \"category_id\": 2, \"bbox\": [76.82017517089844, 816.6709594726562, 460.5953369140625, 22.8131103515625], \"score\": 0.999984622001648, \"association_id\": 1, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"QhR42le14M101O0N2000001O0000000000O1000000000O10000000000000000000000000000000000000000000000001O00001O00001O000000001O000000001O00001O2N1N10kYd13Qf[N2N101N1000000O1000000O10000O10O01O10000000000O1000000000000000000O1000000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000O100000000000000000000000000000000O10000000000000O100000000000000000000001O000000000000000000O010000000000000000000000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000001O0000000O101O001O00001O1O00001O0N20Q^n10oaQN1N1O100O100O100O2O000000O10O10000O10000O100000000000000O1000000000000000000000000O100000000000000000O1000000000000000000000000000000000000O100000001O00000000000O100000000000001N1000001O1O0O10Zh[m2\"}}, {\"image_id\": 148, \"category_id\": 2, \"bbox\": [1525.6817626953125, 710.9771728515625, 335.6971435546875, 17.22265625], \"score\": 0.9997832179069519, \"association_id\": 2, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"bld`22ne11N100000000O10000O100000001O0000O10001O00000000001O00001O1O001Omil20SVSM000O1000000O100000000000000000O10001O0000P^80PbG00000000000000000kWP40UhoK1O00000000000000001O01O00000000001O0000c_Tm0\"}}, {\"image_id\": 148, \"category_id\": 2, \"bbox\": [873.1630859375, 1188.0550537109375, 512.644287109375, 52.3272705078125], \"score\": 0.9949323534965515, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"QRT^1`0Re1DoZNN2g0hd1AV[Na0hd1<00O2O0O100000000000000001O000000000000000000000000000000O1000O10O1N2O100O100N2000O1000000000000000000000001O00000000000000001O6eNY[NQ1kd103M0000001O0000001N100000000000001O000000000000001O0000000O100000000000000000000000000000000000000000000000000000001O000000000000000000000000001O00000O101O00001O00001O0O10001O1O001N10002M3NR`n2GR`QM6M4M2O000O2N100O10000O100O100O1O100O10000O10001O0O1000000000000O1000000000000000000O100000000000000000000000000O10000000000000O1000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000000000000000000000000000000O10000000000000000000001O000000000000000000000O101O00000000000000000000000O100000000000000000000000000000000000O10000000001O0000000000000O100000O10000000O100000000000000O10O100000000000O1000000O10O1000000000O10001O00001O00001O000O2O001O001O00001O001O0O2O001O1N2N3N4JRZUb1\"}}, {\"image_id\": 149, \"category_id\": 1, \"bbox\": [156.12020874023438, 231.7835693359375, 1059.902099609375, 645.1203002929688], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1073, 1599], \"counts\": \"iVc65UQ17I8J6N2N2N2O1N2O0O2N2O1O0O2O1N101O1N2O001N2O1N2O1O0O2O1O1N3N1O1N2O2N1N3N1O2N2M2O2N2M3N3M2N2M3N2N2M3N1N1O2O1N2N2N2O0O2O1N2O0O2O0O2O1N2O001N2O1O1N2O1O1N2O1O1O1N3N1O1O2M2O1O001O0O10000O10000O2O0O10000O100O100O1O100O1O1O1O2N100O1O1O100O1O100O10000O100O100000O01000O10O10O100O010O1O1O010O10O01O10O10O10O01000O01000O010O1000O0100O100O010O1O1O1O1O1O1O1O1N2O001N2N2O1N2N2N2N2O1O1O001O100O1O100O100O100O100O01000O100O100O100O010O1O1O100O1O1O0O2K5K5K5L4L4O1O100O001O100O100O100O100O10O10O10000O100O100O100O1O100O1O1O1O1O1O1O1O1O1O1O1O100O1O100O100O100O10000O1000001N10001O0000001N10001O0000001N1000001O000O101O0000001N1000001O0O10001O0O10001N101N101N2O0O2N2O0O2N2O1N5L4K5L4K5L6I:G9F9H8H8G8I7I6I6K4L4L3Y\\\\OVC`b0n<W]O[Cfb0h<R]O_Clb0c<n\\\\OdCob0^<j\\\\OiCSc0]=N2N1O1O1N2O1O1O1N2O1O000O101O000O10001N10000O101O0O100O101O0O100O100O2N100O1O100O100O1O00100O1O1O001O1O1O1O1O001N2O1N101N2N2M3N1O2N2N2N100O100O10000000jNm^OoATa0o=Q_OlAPa0S>U_OhAl`0W>Y_OdAh`0[>^_O_Ad`0_>b_OZA``0e>f_OUA[`0i>W1RO]]OmBeb0Q=j]OaBWb0\\\\=P1N1N3M2N3^Oa0K5N2O1N3M2N2N3K4M4L4L400O0100001O000000001O00001O1O001O001O1O001N2N2N1O2N2N1O2M3N2M2N3O1O1O1O001O1O1O010O1O100O10O0100O100O10001N10001N100O2O0O101N101N101N1O2O0O2O0O2N2O0O2O1O0O2O0O2O1O0O101O0O2O00001O001O001O001O001O001O1O001O1O1O1O1O1O1O1N2O1O1O1O1O001O1O001O1O001O00001O010O00010O00010O010O010O10O010O010000000O1000O1000000O01000O0100O010O0010O01O001O010O1O001O1O001O1O1O1O100O2N2N1O2N1O2N1O1O2N1O2N1O1dXOhF^f0Y9UYOXGef0o9K5Kj0VO1O2N1O1O2N1O1O1O1O1O1O1O1O1O2N1O1O1O1O1O001O001O001O1O001O001O001O1O001O001O001O1N101O001O1N101O1N2O001N2O1N2O1N2O1NYLW\\\\OiIhc0U6\\\\\\\\OjIbc0T6d\\\\OjIZc0U6j\\\\OjITc0T6Q]OjIob0R6W]OmIgb0Q6^]OnI`b0o5f]OPJXb0P6j]OPJTb0P6o]OoIoa0R6S^OnIka0Q6W^OoIga0Q6\\\\^OnIca0R6^^OnI`a0R6b^OnI\\\\a0R6f^OnIYa0R6h^OnIVa0R6l^OnISa0R6n^OnIQa0Q6Q_OoIm`0R6S_OoIl`0P6V_OPJh`0Q6Y_OnIg`0R6Z_OmIf`0R6\\\\_OmId`0S6]_OlIb`0T6`_OjIa`0V6`_OiI``0V6c_OhI\\\\`0X6f_OgIZ`0Y6g_OeIZ`0Z6i_OdIV`0\\\\6l_ObIU`0^6l_OaIT`0^6n_O`IR`0`6P@^IQ`0a6Q@^Io?a6S@]Im?c6U@\\\\Ik?c6W@[Ii?e6Y@ZIg?d6\\\\@[Ic?e6^@ZIc?e6_@ZI`?g6a@WI`?h6b@WI]?i6d@VI\\\\?j6f@UIZ?k6g@TIX?l6i@SIW?m6k@RIU?n6l@PIT?P7m@PIR?Q7o@nHP?R7RAlHn>U7RAkHn>U7SAjHl>V7UAiHk>X7VAgHj>Y7VAgHi>Y7YAeHg>\\\\7ZAcHf>]7ZAbHf>^7\\\\A`He>`7bAXH^>h7kAoGU>R8SBeGn=[8WB_Gi=a8]BYGc=h8bBRG^=n8hBlFX=U9a5000O2O000O1O2E:G9M3O1N2O100O2N101N10001N2O0010O0100O010O10O10O10O1000O0100O01000O1O10O0100O1O10O01O1O1O1O010O1O100O1O10O0100O10000O100O100O2O0O100O1O1O101N1O1O1O1O1O2M2O1N2N2O2N1O1O1O1O2N1O1O101N1O101N1O101N101N101N1O2O1N2M3N2M3M3M3M3M3L5K4L4L4K5L5K4L4L5L3L4N2M3N2M3N2N2N2N1O2N2N1O2O1N1O2N2N2N2N2M3N2K5J7I6J:D=@`0@d0hNP2aNZdP?\"}}, {\"image_id\": 149, \"category_id\": 2, \"bbox\": [23.12898063659668, 771.348876953125, 596.525390625, 128.826171875], \"score\": 0.999194324016571, \"association_id\": 1, \"segmentation\": {\"size\": [1073, 1599], \"counts\": \"okh0:RQ1:J3M2N2O0O2O0O101O0000001O0000001O0000001O0O100000001O0000000000000000000001O0001O0000000000000000000000000000001O00000000001O0000001O000001O000000O2O0000000000000O100000001O00000000000O10001O000000001O0000001O000000000O101O000000000000001O000000001O00001O0000001O00000000001O01O000001O00001O1O1O1O2N001O1O001O001O01O0001O00001O01O01O1O00100O2N010O001O000001O01O000000001O00001O00001O1O1O010O001O001O00001O00001O00001O1O1O2N2N2N2OO01O0000001O00000000010O00000010O01O1O001O0010O01O00001O00000001O01O000001O01O01O0001O00010O0000001O000001O000000001O0000010O00001O00000000001O000000000000001O000000001O0000001O01O00000001O000O10001O0000001O00001O00001O000000000O2O000000000000000000001O0000000O10000000001O0O10001N100O2O000O2N100O2O000O2O000O101O0O10001O0O101N100O2O1N2O2M2O1N10000O2O0000000O101O0000001N10001O001N2O001O001N101O0000001O0O10001O001O001O3M2N2N1O1N2O0000001O000000001O000O2O00001O001O001O00001O0O1000001O00000O10001O001O000O2O00001O0O1000001Olkc02QT\\\\O100JMmnN4XQ101O1O1NY^QS1\"}}, {\"image_id\": 149, \"category_id\": 2, \"bbox\": [604.6474609375, 741.8146362304688, 261.52130126953125, 59.87603759765625], \"score\": 0.9970704317092896, \"association_id\": 0, \"segmentation\": {\"size\": [1073, 1599], \"counts\": \"da]d0e0jP14M2N1O2N100001O00000000000000001O0000000000000001N100O1000000O10001O0000001O0O1000001O00000000001O000000000000000000001O0000000000000001O00O100000000O100O10000O10000O10000O10O10000000000000000000000000O1000000000000000000001O0000000000000000001O000000010O00000010O0001O0001O010O001O010O0000000001O000HjoNUOVP1k0koNUOTP1k0loNUOTP1j0noNUORP1k0noNUORP1j0ooNVOQP1j0PPOUOPP1j0QPOVOPP1i0PPOWOPP1h0RPOWOno0i0RPOWOno0i0RPOWOno0h0TPOWOlo0i0TPOVOmo0j0SPOVOmo0j0=00000001O000O10001O00001N101O00001O00000O1000000O2O1O1N3N2N1N2O1O0000001N10000000000O1000000000000000010O00O100000001O00000000000001N101N10acUh0\"}}, {\"image_id\": 149, \"category_id\": 1, \"bbox\": [261.1226501464844, 647.7169189453125, 60.31939697265625, 27.6004638671875], \"score\": 0.8958747386932373, \"association_id\": 0, \"segmentation\": {\"size\": [1073, 1599], \"counts\": \"l[g87XQ14M2N1N3O1N2N1O2O0O2O00001O000000000000000000001O000000000000000000000000000000000001O00001O1O1O00001O1O001O_`jY1\"}}, {\"image_id\": 149, \"category_id\": 2, \"bbox\": [278.942138671875, 717.2009887695312, 610.2890625, 95.41326904296875], \"score\": 0.2654532194137573, \"association_id\": 0, \"segmentation\": {\"size\": [1073, 1599], \"counts\": \"gUe<1_Q12N3M101O0O1000000O2O0000000O10001O000000001O0O2O001O001O0000001O0000000000001O000000001O00001O000O101O000000001O00000000001O001O001O001O001O000000001O000000000000001O0000001O0000001O00000000001O0000000000001O000000001O00001O00001O00001O00000000000001O01O0000000000000000000000001O000000000001O00000001O00010O00YOkoN3TP1JSPO2no0GioNL<<jo0GmoNJ;?go0EaPO:`o0DaPO=^o0BdPO=YP10O10O1O0O2O1O1N2O1N2O]jW1MeUhN3M2N2N2N2N2L4N2N2N101N1L401N10001N2JnNkoNR1UP1oNjoNQ1VP1oNjoNQ1UP1QOjoNP1UP1POjoNQ1VP15O1LfNQPOZ1no0500O100O1000000O2N100O1O1000000000O2O00000000001O00000000000000000000000000001O0001O0000000000010O0000000000000010O0000001O001O001O1O001O001O00001O00001O00000000001O0000000000000001O00000000000000000000000001O00000000000000000000001O00000000000000000000000000001O00000000000000001O0000001O0O1000000O101N100O10000O101O000O1000000000001N10000O1000001N100000001O0000001N1000001O0000001O00001O000O2O001O00001O00001O001O000O2O00001O0000001O000000001O000000001O001O1O1O001N101O1O00k^Rh0\"}}, {\"image_id\": 149, \"category_id\": 2, \"bbox\": [251.91595458984375, 658.988037109375, 68.1756591796875, 20.4361572265625], \"score\": 0.250881165266037, \"association_id\": 0, \"segmentation\": {\"size\": [1073, 1599], \"counts\": \"ciZ8313VQ15O001O0000000O10000008H1O2Nco`[1\"}}, {\"image_id\": 150, \"category_id\": 1, \"bbox\": [430.7857971191406, 102.89115142822266, 370.4824523925781, 466.6655578613281], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hZc:=bg06J2N1O2O0O2O0O2O0O2O000O101O000O10000O101O0O10000O1O001O1O1O001O1O10O01O10O10O10000O10O10O10000O10O10O2N1O2M2N3K4L5L3L4N3M2O1O2N1O2N2N1O3L3M3M4hMRMX_OR3e`0QMX_OR3e`0PMX_OS3f`0PMW_OR3g`0PMW_OR3g`0oLY_OR3e`0oLZ_OS3d`0nL\\\\_OT3``0oL__OR3_`0oLa_OS3[`0oLd_OT3W`0oLi_OT3R`0nLn_OT3n?nLQ@U3l?lLT@V3i?kLW@V3g?kLY@V3e?kL[@V3d?kLZ@W3d?jL[@X3c?iL]@X3a?iL^@Z3^?hL`@\\\\3]?eLb@`3X?cLe@d3T?^Li@k3o>WLo@o3l>RLRAS4j>nKTAV4i>lKUAW4h>jKVAY4h>hKWA[4f>fKYA\\\\4f>eKXA^4d>eKWAa4f>aKTAg4h>[KRAo4f>UKVAS58]I_=Q:B:F8QDZEW:k:_E]E^:g:\\\\E]Eb:g:XE]Ef:f:UE^Ei:e:SE]El:l;O1O1N3M2O1N2O1O100O1O100O100O001O1O001NbLSFlHl9P7_FkH`9R7hFiHX9V7nF^GPOFQ:l8SGWGROJk9o8VGPGUOOf9P9YGhFYO5_9R9]G]F_O:X9W9lH`FY7`9kHWF[7g9`3N2M3N101N2O1O1000000O1000000000O10000000O1000000000000001O0000000000000001O00000001O00000000001O1O002O0O2N6K6I5K2O0O2O0O10O101O000001O1O2N2M2O1TCdEQ<]:hCPFP<R:jCXFP<g9RD[Fk;d9VD`Fg;]9\\\\DgF_;V9dDoFW;m8mDUGQ;_8\\\\EbGb:e7WF]Hh9`7ZFbHd9\\\\7^FeHa9Z7`FhH^9W7dFjH[9T7fFmHY9R7hFPIW9n6kFRIU9l6mFTIS9j6PGUIP9i6SGVIn8f6VGYIl8b6XG]Ii8_6ZGaIf8\\\\6^GcIc8Z6_GeIb8Y6`GgI`8W6aGjI_8T6cGkI^8T6cGlI]8R6dGoI]8o5dGPJ]8n5eGQJ]8l5fGSJ[8j5gGUJ[8g5jGWJX8d5lG[JU8a5oG^JR8^5RH`JQ8\\\\5QHdJP8Y5RHgJn7W5UHhJk7V5WHiJj7V5VHkJj7S5XHlJj7Q5YHmJh7R5YHmJi7P5ZHnJh7o4\\\\HoJf7m4]HQKf7j4^HTKd7i4^HVKd7f4_HYKb7d4bHZK_7d4cH[K^7c4dH\\\\K]7b4dH_K\\\\7_4fH_K\\\\7_4fH`K[7^4hH`KY7]4kH`KX7\\\\4lHbKV7Z4nHcKU7X4PIfKQ7W4SIfKo6W4UIgKl6W4`6N2N1O2N2O1N2N2M4M3L4L4M2N2N2N2N1000001O000000001N100000001O1N2O1N3M3M4L3L4L3M4M2M3N3M2N3M4L4L5J;CZg`5\"}}, {\"image_id\": 150, \"category_id\": 2, \"bbox\": [13.70261287689209, 407.8431091308594, 726.1297607421875, 172.77798461914062], \"score\": 0.9999986886978149, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"nn:^1`f05L2N2N1O2O0O10001N10000O100000000O1000000O10000O1000000O101O0000000O10000000000O100000000O10001N100O10000O10001N100000001O0O1000000000000O10001O0O10000O1O100O100O10000O10000O1000000O10000O10000O100O100O1O2O0O1O1000000O1000000O1000000000000O10000000000O100000000O10000O1000000O100000000000000O1000000000000000000000000O101O000000000000000000000O100000001O00000O10001O000O10001O00000O2O00000000001O0O100000001O000000001O0O101O0000001O00001O0O10O100000O10O1000O100O0100000O10O1000O10000000O1000O1000O10000O1000O01000000O010000000O010000000000O0100000001O0000000000000000000O100000000000000000001O00000000001O0000000000001O00000001O000001O0000000000000]Mf[O`1[d0^Nf[Ob1Zd0]Ng[Oc1Yd0]Nh[Ob1Xd0]Ni[Oc1Xd0[Nj[Od1Vd0[Nk[Of1Td0YNn[Of1Sd0XNo[Og1Qd0XNQ\\\\Og1Pd0VNS\\\\Oi1oc0TNU\\\\Oi1lc0SNZ\\\\Oj1gc0QN]\\\\Oo1kd0O010O00001O000001O01O0000010O0000010O001O10O01O001O010O001O01O01O0001O01O000001O01O000001O0001O0000010O0000001O00010O00000000001O01OXOQZO_Ooe0<WZOCie0<XZODhe0<YZOCge0<ZZODge0;ZZODfe0<[ZODde0;]ZOEce0;^ZODbe0;`ZOD`e0<aZOD^e0;cZOE]e0;dZOD]e0;cZOE]e0:eZOFZe0:fZOFZe0:fZOFZe09hZOFXe0:hZOFXe0:hZOFXe0:iZOFWe08jZOHVe08jZOHVe08jZOHVe08kZOGUe09kZOGUe09kZOGVe08kZOGUe09kZOGUe09lZOFUe09kZOGUe09lZOFTe0:lZOFTe0:lZOFUe09lZOFTe0:lZOFTe09mZOGSe09mZOGSe09nZOFRe0:nZOFSe09mZOGSe09mZOGSe09nZOFRe0:nZOFRe0:nZOGQe09oZOGQe09oZOGQe09oZOGQe09P[OFQe09oZOGQe09oZOGQe09P[OFPe0:P[OFPe0:Q[OFnd0:R[OFnd0:R[OFod09Q[OHnd08S[OGmd09S[OHld08T[OIkd07U[OIld07S[OImd07T[OIkd07U[OIld06T[OJld06T[OKkd05U[OKkd05U[OKld04T[OMkd03U[OMkd03V[OLjd04V[OMjd02V[ONjd02V[ONjd02V[ONjd02V[OOid02V[ONjd02V[ONjd02V[ONjd02V[OOid01W[OOid01W[OOid01W[O0hd00X[O0id0OW[O1id0OW[O2hd0NX[O2hd0NX[O3gd0MY[O3gd0MY[O3gd0MY[O4fd0LZ[O4fd0LZ[O5ed0K[[O5ed0K[[O6dd0J\\\\[O6dd0J\\\\[O6ed0I[[O7ed0I[[O8dd0H\\\\[O8dd0H\\\\[O8dd0H\\\\[O8dd0I[[O7ed0I[[O8dd0H\\\\[O8dd0H][O7dd0H\\\\[O8dd0H\\\\[O8dd0H\\\\[O9cd0G][O9cd0G][O9dd0F\\\\[O:dd0F][O:bd0G][O9dd0F\\\\[O:dd0F\\\\[O;dd0D\\\\[O<dd0D\\\\[O<dd0D][O<cd0C][O=cd0C][O=cd0C][O=dd0B\\\\[O>dd0B][O>bd0B^[O>cd0B\\\\[O>dd0B\\\\[O>dd0B][O=dd0A][O?cd0A][O?dd0@][O?dd0@\\\\[O`0dd0@][O?dd0@\\\\[O`0dd0@][O?dd0@\\\\[O`0dd0_O^[O`0cd0_O][Oa0cd0_O^[O`0Qf00001O001O00001O001O1O001O001O0O101O00001O0000_Vd0Ldi[O5K1O2O0O101N101N10001O00001N100000001O00000000001O000000000000001O000O1000000000001O01O00000001O000000001O01O01O00001O000010O000001O0001O0000010O001O0010O01O1O010O0100O010O1O100O0010O0000002N2N2N1O1O0O2O1O001O1N3KU_V7\"}}, {\"image_id\": 150, \"category_id\": 2, \"bbox\": [576.0939331054688, 502.9196472167969, 180.828857421875, 66.52218627929688], \"score\": 0.922468364238739, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"XXj=6jg0001O000000000001O000001O01O01O00010O000010O0001O01O01O00001O01O000001O0O10000000000O1000001N100000000O101O0000001O00001O000010O01O010O0000001O000O100O1O1O2N1O1O101O1O1O2N1O1O1O1O102MYOXYO6hf0F^YO8cf0F_YO9bf0DbYO:Sg00O01O001O2N001N101O001O1O001N2O000O3MYh<2gWCOL0XXO0hg00XXO0hg00XXO0Sg00]YO1@OSg02[YOOCNRg04VYOMG2ONTg06SYOJJ5Sg02RYOJJ4Tg03PYO3Pg00lXO0Tg0>000GlXOEUg0:mXOCUg09jXOD^g0<30000O100O101O0O10000O100O100O5LQWY6\"}}, {\"image_id\": 151, \"category_id\": 1, \"bbox\": [154.7798309326172, 222.94595336914062, 372.30340576171875, 155.93310546875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"hZe3f0of0>ZOe0J5J6L4N1N2XOgMh[OZ2Xd0gM\\\\[ONO]2dd0fM\\\\[O1K[2hd0fMZ[Od2fd0^MW[Oc2hd0;O001J6O1O100O000I8N1O2N1O2N2O001O1O1O001000O010O00100O010O010O10O10001O000O10001O000000000O2O00001O001O001O00001O00000000001O00000000001O0000001OO100000000000000O1000O1001O000O10000O10000O10000O10000O1000000O10000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000000000001O000000O100000000000000000000000000000O10000000000000001O0000O10001O00000000000000001O01O01O001O010O1O001O000010O0001O001O001O001O01O0001O00000001O000000001O00001O000O2O001N10000O2O0O101N10000O2O001N2O1N1O2O1N2O0O2O001N101O001O1O0O2O1O1O1N101O001O1N2O1O1O1N4M4L1O1O001O1N101O3M4L3L:G;E:F3L8I2N1N2O1N2N5K4L3L]mn;\"}}, {\"image_id\": 151, \"category_id\": 1, \"bbox\": [538.3433837890625, 254.7857208251953, 103.9537353515625, 229.50779724121094], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"`Rf<9`g0=G5L3M4M4L2d[OmNha0V1U^OmNha0V1T^OnNga0W1U^OmNha0W1T^OjNka0X1X^O^Nma0e1U24L2L2O2O1O1O1M3N3O5J9H7H5K6I7I5K5L6I9G6H8Cg0]O>E7I4K4N3M2O2N1N2O1O001O001O001O001O1O1O1O0010gMY_OWMg`0j2W_OXMi`0i2U_OWMl`0h2S_OYMn`0l42N2N1O2N4L5K3M2N2M4M2N001O1O1O2N1O1N2N2N2L3XOi]OdK\\\\b0i3i]OiK52_b0Z3i1_O>L4aNjZOK_e0Fg1F9KmUT9\"}}, {\"image_id\": 151, \"category_id\": 2, \"bbox\": [596.4871215820312, 461.21063232421875, 178.276123046875, 33.475006103515625], \"score\": 0.999996542930603, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"RW_>1og00O2O0O1O2N2O0O1O1O100O100O100001O0000000000001O0000000000001O00000001O000000001O0O100LDdXO=Zg0DfXO<Yg0FfXO:Zg0FfXO:Zg0601O00001O01O000001O01O000000000001O00O10000000000000001O0000000000000000000000000000000000001O00000000000O10000000I_OPYO`0Pg0AoXO?Qg0AoXO?Qg0BnXO=Sg0CmXO=Sg0CmXO=Sg0CmXO=Sg0CmXO=Sg0CmXO=Sg0CmXO<Tg0DlXO<Tg0DlXO<Ug0CkXO=Ug0BlXO>Tg0BlXO>Tg0BlXO>Tg0BlXO=\\\\g0000O100O100O100O100O1000000001O00000O101O00000000000000001N10001O0O3MVYj5\"}}, {\"image_id\": 151, \"category_id\": 2, \"bbox\": [159.44647216796875, 359.6733703613281, 396.097412109375, 21.1708984375], \"score\": 0.9999924898147583, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"c[f42mg02O1N10001O0O100000000000000000000000001O000000000000000000000000000001O00000000000000001O00000000000000000000O100O10000O100O1000O10000000O10000000000O1000000000000000O1000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000001O0001O0000000000000000000000000O1000000000000000000000O2O000000000000000000000000000000001N10000000000000000001O00000010O01O0000001O000000001O001O1O001O0000000000000000001O00000000000000000000000000O10000O10000000000000000O10000O1O100O10000O1000000000000000000000000O10000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000O1001O0000001O001O0O4M002N0O2O00ZlS;\"}}, {\"image_id\": 151, \"category_id\": 1, \"bbox\": [138.42568969726562, 275.37518310546875, 42.56298828125, 102.65789794921875], \"score\": 0.32186752557754517, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"^iZ3c0Zg07VZOZOQd0i0k[OYOTd0k0f[OYOXd0R1][OoNbd0X1V[OjNid0a1gZOeNXe0R2O100O11O001O001O00000000000000O1000000002N4L5TO_ZOnNee0d0lZOVOWe0=U[OAnd00a[OM[f0M\\\\ljc0\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [1232.2840576171875, 707.1939697265625, 336.88623046875, 680.00830078125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"kjki1>\\\\_1=E:F8I5M1N2N2O1N3N1N2O1O1O1O1O101O000000001O0000001O000O10000O101O0O100O10000O2N1O1O2M2O1N2O2N1O1O2N100O2O000O2O001N2O0O2O001O1N2O2M3N3L4M3L4M3L4L3N3Lm0SO5L3L6J6J7I4L3M3M3M3M3N1N2WlN]ISn0e6hQOaITn0b6gQOdIVn0_6dQOhIWn0^6`QOlIZn0Y6\\\\QOSJ]n0R6ZQO[JWNdN[m0W7VTOaJ^MSOYn0`6RTOVMkk0P3nSOTMPl0P3jSOTMSl0Q3gSOSMVl0R3dSOQMZl0T3_SOPM_l0W3WSOnLfl0_3iROiLTm0f3RROgLkm0j:L3M4K4L4L4M3L4L3M4K4M3N2N3M2N2O2M2O1O1O2N1O1O1N2N2O1N3N1O1N20000O10000O10000O10O1000000O1N2O1O1N2N2M4L3N2N2N2N2N2N2M3L4E;D<B?D;L4M3N3M3M3M4L3M4K5K5L3L4M3N2M3O1O2N1O1O2N2N2N2N3M2N2N2O10002O0O3OO2N1N3M3L2N2N1O1O3M2O3L5L3M3M1N2O1N2N1O1O2M3N3L4M3L4M2M2O0O2N1O101N1O2N3M3M3M3N1N2N2N1000000001O00100O1000O1O000O100O10O0100O010O0010O1O1O1O1O1O1O2N2N2N2N2N2N2N2N2N3M4L4L6J4L3N2]FmoNS3VP1hLmoNW3UP1fLnoNX3TP1eLmoNZ3XP1bLhoN^3\\\\P1]LeoNc3aP1WL`oNh3gP1PLZoNP4nP1gKSoNY4RQ1bKnnN]4XQ1]KinNc4[Q1XKgnNg4\\\\Q1UKenNk4^Q1QKcnNi0EnNjQ15dnNi0JlNfQ16cnNl0NgNbQ1:bnNl07`N\\\\Q1>`nNo0c0UNQQ1g0_nNR1o0hMhP1P1[nNV1W1`MdP1S1XnNZ1_1XM`P1X1SnN^1m1iLUP1e1PnN`1\\\\V1^NeiN`1_V1\\\\NbiNc1`V1[NbiNc1aV1ZNaiNd1aV1ZNciNb1`V1ZNdiNd1_V1WNeiNf1^V1UNgiNh1_V1QNeiNl1dV1iM`iNU2hV1bM\\\\iN[2mV1ZMWiNe2SZ1N2N2N2N3K5K5I8Hj0kNa\\\\]f0\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [0.0, 763.1681518554688, 160.58502197265625, 76.7225341796875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"Yh0321O0[_1f0H5J4N2O4Kl0UO1O1O001O1N2O1O000000000000000000000000O1000000000000O1000000000000O10000O1000000O1000000O100000000O100000000001O00000000O10000000000O10000O10000O100000000O1000000O2O0001O01O1O00001O0O2O00001O001N10001O001O00001O01O0010O01O01O0000010O0010O01O0000100O002N2N1O001O1O001O001O00001N101O001N2N101N2N2N4L2K7H7L5IlV_h2\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [947.130615234375, 988.7593383789062, 114.7120361328125, 68.18438720703125], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"^Pk\\\\1220c_1?E7D;J5N2O1N2O1N2O010O1O0100O010O100O1O1O010O1O100O001O1O10O010O0100O1O010O10O0010O0010O01O1O001N2O1O1N1O2O1O100O100O100O100O2O001N102N1N2O000O2O1O001N2O001N100000001O0O3N2McPQ_1\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [529.8488159179688, 713.5035400390625, 118.39178466796875, 80.5255126953125], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"\\\\gkh0n0P_15L10000000000000000000001O0O1O1N2O100O2N1O2N1N2O1N200O1O1O001O1N1O110O1O1000000O10001O0O1000000M4O00001O0O100000001O00000000000000001O000001O001O0001O000010O001O001O001O10OO2O1O1N101O1O1O001O1N3N1O1N3N3M2N1N2N3M2M3N2N4J4M3M<A[XiQ2\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [895.841064453125, 733.2713012695312, 102.78173828125, 326.56866455078125], \"score\": 0.9999997615814209, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"WiSZ1;b_17J4L5J8I<E8H4L7I:G6H6K7H9VcNSM;KP[1a4Gf0ZOj1VN<E5K6J4K4N2O0O2VOhGhiN[8jU1WHfiNT8WV1n0L2N3N3L3N1O0O2O0000001O00000000000000000001O0O1gF_jNS8bU1iGdjNT8]U1hGhjNU8[U1gGhjNW8aV1O2L4L3N3L5XOg0I8L3M2M4K5H8G8I8L3N3M3L4K6I7B>G8J5oM]eNnM[[1l1`1L4L5K6K6K4L4L4L2N3M3M2O1M4M4K5L3N1O1OfeWa1\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [294.20928955078125, 750.1854248046875, 137.7459716796875, 61.2515869140625], \"score\": 0.9999788999557495, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"nWk=c0[_16K3N2M3N2M2O1N3N2M3M2O1O0O100000001O000O10000O1000O0100O1000O010O010000000O100000O10O100000000000000000000001O00001O0000001O0000001O000000000000000000000000000000O1000000000001O00000O100000001O0O10001O0O10001O00001O001O001N101O001O0O2O001N1O2N1ZO[aNNi^1M_aNJg^12e0OUXV\\\\2\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [0.0, 826.209716796875, 175.3900909423828, 16.28662109375], \"score\": 0.9999366998672485, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"VZ23m_11N10000000000000000001O00000000000000000000000000000000O1000000000000O100000000O100000000O10000000O1000000000000000O100000000000O1000000000O100000O10O100000000O10O1000000000O1000000000000000000O10O1000000000000000000O1000001O0000001O000O101O000000001O0000001O0O1000o_:1P`E0O100O1000000O100O1O0100000000000000000O101O0001N10000000001N2NTfhg2\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [538.2606811523438, 778.794189453125, 110.78094482421875, 18.06256103515625], \"score\": 0.9998693466186523, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"ehoi01o_11O0O2O001O0O10001N100O1N200010O0000001O00001O001O0000001O000000000000000000000000001O0000000000000000001O000000000000000000O010000000000000O1000O1000O1000O01000O100O2O0O^WoQ2\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [1273.0340576171875, 1149.521240234375, 81.7203369140625, 83.5274658203125], \"score\": 0.992195725440979, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"mTkk18f_14M2N2N2N1O2O0O2O1N1O1O1O10001O0O10000O2O1O0N200O]OVaN3i^1LZaN2f^1L]aN4b^1K_aN5`^1JcaN6\\\\^1IeaN7Z^1JfaN7Y^1GiaN:U^1FlaN;R^1EPbN:o]1FRbN;l]1FTbN;j]1FWbN:h]1FXbN;f]1FZbN;c]1G]bN;`]1FabN;\\\\]1FdbN;[]1EfbN:Z]1HibNAXO80Jn]1?TcN_OQOOl]1c0[11O001N11O0000000001OO101N10001O1N4J5MjkQQ1\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [1374.1533203125, 1190.76953125, 232.9739990234375, 144.3377685546875], \"score\": 0.9846373200416565, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"[gfQ22d_1=K5I8G7K4M4L3N2M3N2O1N2O2M2O2M2O1N2O1O1O2M2O2N2M2O1O10001O010OO10000O101N200O1O1O2N1O0O2N5KV1kN3L3N2M101N1N5Jh]T3NUbkL:L6J2N2N2N2N3M4M4K9G2O1N2O1O1O4L2N001O001O00O010O100O100O100O100O01000O100O100O1O1O001N2O100O10000O100O1O100O100O1O1O0O200O002O0O2N1N3M2O2M2O2N3L4K5I9Ghind0\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [1481.9930419921875, 1203.19677734375, 125.433837890625, 108.2713623046875], \"score\": 0.9579361081123352, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"_VbW27e_1a0B5K3M3M4L4M2M3N1N3N2N2N3M2N2N00001O0000O2O0O1O1000O01000O01O1N2O00100O10O10O001O00100O10O01O1O100O1O00100O010O1O100O1O1O100O2N100O2M3M3L3O2K6J5M3M4Mjikd0\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [326.3454284667969, 796.8676147460938, 105.553466796875, 14.69952392578125], \"score\": 0.8264955282211304, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"UYg?2n_10O10001O00000000000001O0001O000001O0000001O000000001O0000000000000000000000O100000000O10000000000000O0100000000O100000O010000O100000000O100000O1000O10000O0010000O1000O100O10001O1N2NQgn[2\"}}, {\"image_id\": 152, \"category_id\": 1, \"bbox\": [1072.5726318359375, 733.3594360351562, 39.4744873046875, 65.63128662109375], \"score\": 0.23952485620975494, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"aWZb1=b_13M10O03M00102M101N11O001O010N1100OO12O0NNj`N\\\\OV_1g00O1N22NN3N102K3O1O3J4O10]XQ\\\\1\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [1305.2366943359375, 1163.895751953125, 254.7054443359375, 138.369140625], \"score\": 0.1688448041677475, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"QUXm1?__16D@n`Ne0m^19O1O1O1O2N3M2N1O1N24K2N4K7H;YOlai3`0m]VL:I3N2N2M3M4M4L3M3M2N2N2N10001O01O0001O00001O010O010O001O010O0010N2O4L3M3L7J2M3hN\\\\aNP1l^1ROXaN?mmd3]ORR[L3^_1:M2O6I2N1O3L3N7I5K6J3L3O1O101O1N100O00100O01O001O00001O0O101O1M5L4K5J`ijf0\"}}, {\"image_id\": 152, \"category_id\": 2, \"bbox\": [1271.5426025390625, 1156.9986572265625, 184.2315673828125, 93.2061767578125], \"score\": 0.15240813791751862, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 2048], \"counts\": \"QUkk11m_14J8K2N101N1O101N2N100O101N100O1O100O2O001O001N101O0O1O2N2M2O2O00VO\\\\aN9d^1F]aN:b^1E`aN<^^1BfaN=Y^1BjaN=V^1AlaN?R^1BnaN>Q^1BQbN>n]1ASbNa0k]1_OVbNd0e]1WOkaNOb0k0a]1VOmaNOd0k0]]1VOoaNOe0j0\\\\]1WOPbNNf0g0]]1YOoaNNf00ZO>V^1Dd1Ngbc30S]\\\\L7I7N3N1O1N2O1O1O100001O000000000000O1000000O10000000000O11N101O1O2N1O2N2N2N2N1N4KYYWl0\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [779.9989624023438, 666.11865234375, 275.90423583984375, 101.90106201171875], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"UY`d05\\\\j06J:F8J4M2O1N101O0O2N2N2N2N1O2O1N101O0O2O0O2O0O2O0O2O00001O000O10000000000O1000000000O01000O1000000O100O100O101N10000000000000000000O1000000000000000000O100000000000000O1000000O100O10000O10000O100O1O1O1O1O10000O101N100O2N1O100O2O0O100O2O0O2N2O1N2O1N10001O00001N10001N1000001O0O1000000000000000O100O10000O1000O0100O100O1O100O1O100O100000000O100000000O101O000000000000000000001O01O01O001O000000001O000000001O0000100O002N1O001O1O001O00100O0010000O0010O01O010O1O1000O01O01OO2O001O1O001O001O001O001N1O2M3M2N3N2N1O2N2L5VOQWOBYi0:e0Fi\\\\Q6\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [344.8204345703125, 647.3338623046875, 435.20880126953125, 137.10577392578125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"lg[9175L1gi0o0A5L4O1N2N101N101N10001N2O001N2O001O001N10001O000O101O001N2O0O2O1O0O2O1O000O2O00001O0O101N101O1N10001N10001O00000O1000000000000O1000001N1000000000O100000000000000000000000O10O10000000O100000O100000O0100000000001O00000000000000000O2O0000001O00001O0O2O1O00001O000O2O0000001O001O00001O00001O00000000000000000000000O100000000000000000000000000000000O1000000000000000O10O1000O10O01O1O10O01O1O1O100O1O001O100O100O100O100O1000O01000000000000O101O00000000000O1001O0O10000000000000001O00000001O00000000001O000000001O0000001O0O10000000000000001N100000000000000O1000000O10O1000O10000000000000O10000O1000001O0O100000001O0000001O0O1000001O000000001O0000001O001O00100O1O1O001O001O00000010O0000001O00010O0000001O00000000001O0O100000001N100000000O2O000O2N1O1O1N3N100O1O1O101N1O1O1O1O2N1O1O2M2L5L3M4N1N3N1O2O0O101N101N2N2O1M3M8G7I6J8IUoX=\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [100.68344116210938, 657.2543334960938, 271.731201171875, 85.27410888671875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"hid2j0ji04L2O001N10001O001N2O1O1O0O2O00001O1O1O2N001O00001O000O2O001O00001O0O101O000O101O000000000000000O100000000000001O00000000000000000000O10O1000000000O100O1O1O1O1O1000O01000000O1000000O100000O100000001O000O10000O101N10000O2O000O100O1O1O100O10000O100O100O100O100O100000000O10000000000O100000000O10000000000000000000000000000000000001O000000000000001O0000000000000010O0001O00010O0001O0001O01O01O010O0001O0001O00001O0010O000001O001O1O1O001O0O2O00001N1O2O0O2O0O101O0O2N1O2N1N2O2O0O10001O001N2O001O000O2M2O2O1N2O2M2N2N2O0O2N2N2N5I5KW[jg0\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [1026.999755859375, 679.5194091796875, 163.7803955078125, 87.48858642578125], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"SVRk01cj08H3TVOFD1\\\\i0=nVOIQi09mVOIQi0;kVOFTi0o0O0O101N2O1O0O2O001O001O1O0O101O0O100000001N1000000O100N2O1O1O002N1M3O1O1O101N100O101O0O101O0O2O000000001O0000000000001O0000000000001O00O101O0000000O1000001N100O1O101N100O1O100O2O0O101O00001O00001O010O001O001O001O001N101N2O1N101O001O1O000010O1000000O10O0100O10O010O1O001O001N2O001O1N2N2N2M5EW^Y2\"}}, {\"image_id\": 153, \"category_id\": 2, \"bbox\": [347.8250732421875, 763.8949584960938, 378.73748779296875, 28.11376953125], \"score\": 0.9999994039535522, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"ed\\\\92cj0001N10001O000O100000000000001N100000000000001O00000000001N100000001O00000O1000000000000O10001O0000000001O00001O000000000000000000001O000000000000000000000O010000O10000O10000000000000000O100000000000000000000000000000000000000000000000000001O001O1O1O001O0000001O0000001O2N2N1O0O100000001O0000]Zb00ce]O0000000000000O1M300O100O10000000O10O1O1N2M30000O100000000000000O010000000O100000O10000000000000000000000O010000000000000000O10O1000000000000000000O1000O100000000000000000O0100000000000000O1000000000O1000000000000000000000000000000O10000000000000000000000O10000000000000001O000O10001O00000000001O0000000O101O00001N2O1Nha[?\"}}, {\"image_id\": 153, \"category_id\": 2, \"bbox\": [1038.1751708984375, 754.6253051757812, 135.8187255859375, 10.7359619140625], \"score\": 0.9999921321868896, \"association_id\": 4, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"i\\\\Vk02bj02O00000O10001O000000000001O0001O0000000000000000000000000000000000000000000000000001O00001O001O1O001O00Ree00nZZO0O2N1O10000O100O1O10000000000001O1O0000000000001O001O0O2O000000000000000000faQ3\"}}, {\"image_id\": 153, \"category_id\": 2, \"bbox\": [790.5421142578125, 753.8029174804688, 256.7362060546875, 14.16778564453125], \"score\": 0.9999802112579346, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"kYce01dj0001O0O1000001N100O100O100O10000O2O000000000O1000000O10000000000001O00000000000000000000000000001O000000001O00001O00O100000000000000001O0000001O0000001O0000001O1O001O00000O2OTe`00kZ_O1O2O0000000O100O1O1O1O10000O1000000000000000000000000000000000000000000000000001O000000000O100000000000001O0000000000001O001O0000001O000000001O001O00001OQPl6\"}}, {\"image_id\": 153, \"category_id\": 2, \"bbox\": [87.17877197265625, 726.1028442382812, 271.4084167480469, 14.50408935546875], \"score\": 0.9999206066131592, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"_Pn21dj000000O100000001O000000000000000001O0O1000hT\\\\10XkcN00001O000000000000000000000000000000000000000000000000000000000000000O10000000000000000000001O000000000000000000000O100000000000000000000000000000000000000000000001O0000000000000000000000000000001O00000000000000001O0000000000000000001OO10000000000000000O100000000O1000000O1000000000000000000000000001O0O101O0000ZoZh0\"}}, {\"image_id\": 153, \"category_id\": 1, \"bbox\": [412.5743408203125, 472.65264892578125, 20.0096435546875, 16.93402099609375], \"score\": 0.7380703091621399, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"k_h:6^j03N1O1O1O001O000010O001O1N110N2O1NWhRf0\"}}, {\"image_id\": 153, \"category_id\": 2, \"bbox\": [1042.6719970703125, 754.9571533203125, 70.655029296875, 9.10247802734375], \"score\": 0.05093022435903549, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"hlXk02bj0101O00001O000000000000000000000000000000001O000000000000000000000000000000000001O001O1O1O1O00d[i4\"}}, {\"image_id\": 154, \"category_id\": 1, \"bbox\": [107.70735931396484, 80.91690826416016, 270.7548522949219, 296.685791015625], \"score\": 0.9999996423721313, \"association_id\": 1, \"segmentation\": {\"size\": [566, 849], \"counts\": \"Skk1a0o`07N3M2N2O001N2O2N1O2N1N101O1O00001O00001O001O001O1O010O1O001O001O0001O1O1O2O1N1O1O1O1O1O1O2NcDmN\\\\6n0bIWOd6`0ZIEk63SI1m6MQI6P7HnH;R7DmH=T7BlH>U7@lHa0T7]OmHc0U7YOmHg0Q<010O00000[KYO[Hh0d7YO[Hf0f7ZOYHg0f7\\\\OWHe0i7]OUHc0k7BoG?P8DmG<T8FjG:V8GhG:X8GfG:Z8GdG9]8GbG:_8F_G;a8E^G;c8EUGb0m8^OUF_1k9aNmEf1U:ZNhEg1Y:YNfEh1[:XNcEh1^:YN`Eg1b:XN\\\\Eh1f:YNXEf1j:ZNREf1R;[NhDe1^;ZN_Da1i;`NTD\\\\1R<dNmC[1U<eNjC[1W<fNgC[1Z<dNeC\\\\1\\\\<dNcC[1_<eN`CZ1b<fN\\\\CY1g<gNVCU1Q=kNkBU1Y=jNeBW1]=iNaBW1a=iN^BW1c=iN\\\\BW1d=kNZBU1f=mNXBS1i=nNUBQ1l=ROQBk0m:_NaGi0_Me0o:lN]G_2c8eMZGZ2g8iMVGW2j8lMTGT2k8nMSGR2n8oMjETOMo2X:PNdE[ONe2^:SN\\\\ECO[2d:WNUEJIU2R;S3000O101O01000O10000O1000000O10O100000000000O1000010O00000001O01O0kJcDQ4];nKeDR4[;kKiDS4X;jKlDU4T;gKREW4R;^KWE`4Q<O1O01OO100000001O0001O00000OWL[Cb2e<[McC`2^<[MiCb2Y<WMQDd2S<PM_Df2l=^O1OO01OO10O2N1O4M8G2N3M101N1O101O2N3M:G0O1O0O010O1O001O101N1O1O1O1O0010O00100O1O100O1O10O1O2O0O1O1O1O010O001O1O00101N1O2N100O0001O2N3N5J?A3M2N2N100O10O00010O10O01O1O1O001O00001O001O000O2N3M2J\\\\PT8\"}}, {\"image_id\": 154, \"category_id\": 1, \"bbox\": [530.6642456054688, 97.1822280883789, 216.64727783203125, 418.5982360839844], \"score\": 0.9999995231628418, \"association_id\": 2, \"segmentation\": {\"size\": [566, 849], \"counts\": \"a]o93aa03N3L3N1O2N1O1N2N2XKBVH`0h7DTH?j7CUH=h7GVH:f7JZH6i5>SFMR4F_5S2aJmMY5Y2fJhMV5\\\\2jJdMT5^2lJbMR5a2lJ`MP5d2oJ]Ml4i2RKXMf4Q3QIeLJ;m6X3nHnLMLn6^3QInLJFQ7`3SIoLFDU7a3RIPMBBZ7b3PIRM_O_O^7f3mHRM[O]Od7j3dHhNU7P6K5L4M3O1N2O1O1O1O1O1O1O1N2M3N2N2O1O1O0010^FcIj7]6mG\\\\Jc7e5VHoJ]7P5bHUKY7l4fHXKW7h4hH[KU7e4lH]KQ7c4PI^Km6c4SI`Kh6b4WIaKb6d4]I`K`5`5_JcJ[5a5fJ`JV5b5kJ`JQ5b5nJaJo4`5QKbJk4`5TKcJg4`5WKeJY4i5eK[Jg3V6YLmIb3V6\\\\LmIa3T6_LnI^3T6bLlI]3U6cLkI[3W6dLkIX3X6hLhIT3\\\\6lLeIP3^6oLcIn2`6RM`Im2a6RMaIk2b6SM`Ik2b6SMaIi2b6TMbIg2b6kLYJe2g:E7I4L3M3M4L6J7H7I6G:H7I9Ghc=2W\\\\B5M4K4L3N1N2O1O0O2O001O0O101O0000001N100O100O100O1O1O2N2N2N2N3M3M3N2M100000001O000002L5K8E\\\\Xh1\"}}, {\"image_id\": 154, \"category_id\": 2, \"bbox\": [407.5341491699219, 409.359375, 206.30661010742188, 128.65771484375], \"score\": 0.9999995231628418, \"association_id\": 2, \"segmentation\": {\"size\": [566, 849], \"counts\": \"lRS74b?5TB0g=2XB0e=3YBNe=4YBMf=7VBJi=9UBHj=:SBGm=9RBHn=9QBGn=;QBEo=>nABR>c0iA]OW>h0cAYO]>i0aAWO_>k0_AUOa>l0^ATOb>n0[ASOe>o0YAQOg>R1UAnNl>n1000O100O100O10000O1Dl@VNT?V2000RNi@]1W?bNRAV1n>jNSAU1m>jNUAU1k>kNUAU1k>kNUAU1k>kNVAT1j>lNVAT1j>lNVAT1j>lNWAS1i>nNVAS1i>mNXAR1h>nNXAR1h>nNXAS1g>mNZAR1f>nNZAS1e>mN[AT1d>lN\\\\AT1d>lN\\\\AU1c>lN\\\\AT1d>lN\\\\AT1d>lN\\\\AT1d>mN[AS1e>mN[AT1d>lN\\\\AT1d>lN\\\\AT1d>mN[AS1e>mN[AS1e>mN[AS1e>mN[AT1d>lN\\\\AT1d>lN\\\\AT1d>lN\\\\AT1d>lN[AU1d>lN\\\\AU1c>kN]AT1d>lN\\\\AT1d>mN[AR1f>nN[AQ1e>oN[AP1f>POZAP1f>POZAP1f>POZAP1f>POZAo0g>QOYAo0g>QOYAn0h>ROYAl0h>TOXAk0i>UOWAk0i>UOWAj0k>UOUAk0k>UOVAj0j>VOVAj0j>VOVAi0k>WOUAi0k>WOVAh0j>XOVAh0j>XOVAh0j>XOWAg0i>YOWAg0i>YOXAg0g>YOYAg0g>YOZAf0f>ZOZAf0f>ZOZAg0e>YO[Ag0e>YO\\\\Ag0c>YO]Ah0b>XO^Ah0b>XO_Ah0a>WO_Ai0a>WO_Aj0`>VOaAi0_>VObAk0^>SOcAn0]>QOdAo0\\\\>nNfAU1W>jNkAV1U>iNkAX1U>fNlAZ1Y?000O002OO01O010O00100O10O010O01O010O1O1O010O0010O02O0O100O10001O2N1N100O10O010O100O010O010O1O1O100lN\\\\@4e?J]@5c?J`@4a?Ja@5`?Ge@7\\\\?Ei@9X?Dk@;V?Bm@=W`0O00001O001O001O00001O00001O1O00001N10001O001O0O101LbiU4\"}}, {\"image_id\": 154, \"category_id\": 1, \"bbox\": [137.91497802734375, 343.76116943359375, 53.06916809082031, 41.002777099609375], \"score\": 0.999345600605011, \"association_id\": 3, \"segmentation\": {\"size\": [566, 849], \"counts\": \"mc\\\\29\\\\a02N2N2O1O1N2O1O1N2O1O1O1O1O1O1O1O1O001O00010O0010O01O01O00000010O0001O010O0000002N2N001O0O101O001N1O4K:DlZ[;\"}}, {\"image_id\": 154, \"category_id\": 2, \"bbox\": [8.970306396484375, 288.19903564453125, 247.9085693359375, 81.9996337890625], \"score\": 0.996177077293396, \"association_id\": 0, \"segmentation\": {\"size\": [566, 849], \"counts\": \"aj57^a02N2O001O001O001O1O00001O0001O01O0001O01O00010O01O001O01O01O001O001O10O01O001O0010O02N1O001O010O001O010O00000010O0001O2M10jN3R12O1c_OJW?8h@HX?8h@HX?8R12O0000000O2i_ODR?<n@EQ?;o@FP?:PAGo>:PAGo>9QAHn>8RAIm>8RAHn>8RAIm>7SAIm>7SAIm>8RAHn>8SAHl>9SAGm>;QAEo>i0c@XO\\\\?i0c@WO^?i0a@WO_?i0a@WO_?i0a@WO_?i0a@WO_?j0`@VO`?k0_@UOa?l0^@TOb?n0\\\\@ROd?n0[@SOe?n0Z@ROf?n0Z@ROf?n0Y@SOg?n0X@ROh?n0W@SOj?\\\\1000O10000O2O01O000000001O0001O0001O00010O00001O01O0000000O10000000001O00000000001O0001O0001O00000000001O001O1O1O1O001O00001O0dNj_OX1[`0O1O1O2N1O1^O__OBO2ONf`08e_OHGOkk30feL0J1d^O0[a06O2N1O1O2O000001O000000001O0000000001O01O0000001O001O1O1O01O0001O00001O1O1O0010O0001O1O01O01O01O0010O0O2N10oj[:\"}}, {\"image_id\": 154, \"category_id\": 2, \"bbox\": [123.34817504882812, 373.23883056640625, 43.17762756347656, 14.379730224609375], \"score\": 0.9953863024711609, \"association_id\": 3, \"segmentation\": {\"size\": [566, 849], \"counts\": \"g^U21da02O001O001O001O0O2O001O00001O0000001O01O000010O0001O001O001O001O00001O00001O0000cTi;\"}}, {\"image_id\": 154, \"category_id\": 2, \"bbox\": [39.81892776489258, 264.75238037109375, 316.73388671875, 121.002685546875], \"score\": 0.8907505869865417, \"association_id\": 0, \"segmentation\": {\"size\": [566, 849], \"counts\": \"_]f01ba08I4N1N2N11O001O001N101O001O001O0O2N3MXc6Mj\\\\I3M3N2O1N2O001N101O1O1O0O2O00001O0O20O00010O01O00100O001O00010O1O01001N6J6K2d_OiNS`0_1N2N4M0O00000010O00001O0010O01O00010O01O1O1O010O00001O01O01O00000010O000000000000001O00000000001N1000001N1O101N100O100O2O0O100O2N101N2J6F[_O@f`0?\\\\_O_Of`0?\\\\_O_Og`08V_OI5Nf`07d0JPW77khH1N3M2O0O2O00000000001O000000000001O01O01O0010O010O01O01O0001O00001O000010O0001O00nMBlB>S=DmB<R=DnB<R=EmB;R=GnB9Q=HnB8R=HoB7P=JPC7o<IRC6n<KQC5o<KQC5P=JQC5o<KQC5o<KRC4o<KQC5o<KQC5P=JQC6n<JRC6P=IPC6Q=JnB6S=JmB5Z?00010O00010O0001O010O0000000001O01O0001O00001O00001O00001O00001O0010O000000010O0000001O0001O0001O00000000001O01O000000O1000000000001O00O2O00000000001O0000001OO100000000O10001O000O102M]ia8\"}}, {\"image_id\": 154, \"category_id\": 2, \"bbox\": [131.4737548828125, 315.2299499511719, 227.63580322265625, 73.872802734375], \"score\": 0.8866073489189148, \"association_id\": 1, \"segmentation\": {\"size\": [566, 849], \"counts\": \"nXY2c0o`05I7X@UOc>k0]AUOc>l0\\\\ATOd>l0\\\\ATOd>l0\\\\ATOd>l0\\\\AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]AUOc>k0]ATOd>l0]ASOc>m0^AROb>n0S10000000000001O0000001O0O11O00000000000000001N2O1M5]OWW7IPXHO[a07L4M3M201N100001O000001O000001O010O1O001O0001O000010O0000010O001O001O0010O0000010O01O1O1O00001O01O01O01O0001O01O01O00001O0000001O01O01O01O00000000000000010O0000000001O00000000010O000001O000000000001O00010O00000001O000010O00000000010O000010OO100O10000O101O000000001O01O0000001O1O001O001O001O00001O1O001O01O0000001O000010OO2O1N4LS_^8\"}}, {\"image_id\": 154, \"category_id\": 1, \"bbox\": [585.8656616210938, 22.702505111694336, 13.49005126953125, 11.578378677368164], \"score\": 0.762496829032898, \"association_id\": 0, \"segmentation\": {\"size\": [566, 849], \"counts\": \"emS:3ca02L3O1O1O00000000O102MjfZ4\"}}, {\"image_id\": 154, \"category_id\": 2, \"bbox\": [127.50254821777344, 201.91860961914062, 247.32408142089844, 187.5953369140625], \"score\": 0.10063160955905914, \"association_id\": 0, \"segmentation\": {\"size\": [566, 849], \"counts\": \"VcW27Ya0o0VO5L1O00000000001O0O101O010O001O00001O00O100O101N1000001N101O10O01O0000001O0O1O101N101N101N101N2N2N2N3D>Ij]90VbF4N2N2N1O101N101N101O0010O0010O0000010O000001O0000001O0000100O001O010O00PNBhB?V=BkB=U=DjB<V=DjB<V=DkB;T=GkB:T=FmB9S=GmB9S=HmB7S=InB6R=JnB7P=JQC5o<KQC5o<LQC3o<MQC4n<LSC3m<MTC2l<NTC2k<OVC0k<OUC1l<NUC1l<MUC3m<HWC7T?00010O000001O01O001O001O001O0000001O00000000001O0000001O001O001O001O00000010O00000001O000000010O000001O001O00001O00001O00001O0000001OO10001O0000001O00001O001N10001O0000000001O0O2O001O0O2O2L3N3M2MiP_8\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [1347.4298095703125, 708.0000610351562, 195.9532470703125, 155.58270263671875], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"^XUo1?V_1b1gN5K3N1O1O1N2N2O2N1O1O1M3J6G9M3L4M3L4M201O1O100O1O001O1O100O10001O1O1N2O1O1O1O001O00001O001O0001O0000000O2N1O1O1O1O1O101O0O10000O2O00000000001O00000000001O00000000O100O1000000000000O10000000000O10000000000000000000000O1000000O10000O101O00000000001O00001O00001O1O001O00001O0000000010O000001O00000001O01O0010O0010O01O001O001O1O1O1O001O1O2O0O1O1N4M4L4L2N2N2M2O2M2O2M8H5K3M3M2HbbNkM`]1i1_bNXNn]1d19M3N2L9H5K<B]Wm?\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [1507.0860595703125, 696.50830078125, 240.617919921875, 138.4981689453125], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"effV2;__1;H8J5L3M1O1O001O00001N1O2N1O6J2N1O100O2O001O001O001N2O1O2N2N001N2O1O2N3M3M001N101O0000001O001N2O1O1O0O2O001O001O1O001O1O1O1O1O2N1O2N3M4L1O1N1010O01O1O001O1O1O000000001O00000000000000O2O00000000000O10000000000O100000000000001O000O010000O10000000000000O10000000000O1000000O10001O00000O100000001O0O10001O001O00001O001O001O1O001O1O1O001O001O00010O001O0000000001O01O0000000010O01O010O01O0010O0001O01O001O01O01O00100O1O10O01O1O2N1O3L3N1O1O001N102N2N3L2O1O0O2O1O1N3L5J6L3L3O1N2N2L7HPX[6\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [903.877685546875, 754.8798217773438, 95.01171875, 291.53924560546875], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"T^aZ1W1_^1`0D:F<B<G7I6G9YMhLjgN6_N]3\\\\Y1XMVgNh3cX1eLjfNi3nX1i1L3L4L5K5L2N2N3M2M4L3M3M3M3N1O2M3N3N000000O21N3M2N2N2N1O101O000000000000000001O2N1O1O001O002N1N3N4K:Ef0XO6K4J6K5L4L4J7K5J5I7H8I7H9^Oa0K6L5H7J5M2N3M3M5K5K4K5D>oNT1UOQV`Y1\"}}, {\"image_id\": 155, \"category_id\": 2, \"bbox\": [619.8195190429688, 1218.208740234375, 243.9122314453125, 94.537109375], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"]hTm01l_15M2N2O0O101O0O100000001O0O10000000000000000001O0000000001O0001O0000010O0000000000000001O000000000O2WOAoaN?Q^1BnaN>R^1GhaN;W^1FhaN:W^1GiaN9W^1HgaN9Y^1GgaN9Y^1HeaN:Z^1K`aN6`^1e00000000001O0O10000O2O000O100O2O0O1O1O1O1O100N2O2K4G9O1O1O100000000001O01O0000000000O10001O000000001O001O000000001O00000000001O0O101O000000001O00000000001O00001O0O101O00001O0O2O1O000O101O00001O00001O000O2O000O101O0O10001O001O1O001O001O000O2O001O001O0O1000TOiNobNW1n\\\\1POnbNo0R]1VOjbNj0U]1YOjbNf0U]1]OibNc0W]1^OhbNb0W]1@hbN?Y]1AgbN?Y]1BebN?[]1BdbN>\\\\]1CbbN>^]1Q1100O10001N101O000O2O000O2O001N2O1N3M2N2O1N10001N2N4K3N2N2N1O2O1N3M3L4K7I4M2O2JiXm_1\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [545.9840087890625, 769.9006958007812, 206.720458984375, 497.69415283203125], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"m^ni0f0T_1:G8I6L3L6J5I8H8J4L5L3M2N3K5K5L4M3M3M3M3L5J5L4M3M3M3M1O1L5M2M3O2N1N1O2N2N2O1N2O1O2M201N2O1O1O1O1N3N1M5K4L4M4M3M3N3M2M5L3M4K4M2M4K5L6I>A;F5K4M3M3N4K6J4M3L3M2N3L5K8F:G9G7J4L5J7G:H:F8I7I5K6J6J9Gb0^O:G5K4L3N2N3M4L2M4M1O2N2N3M2N2N2N1O1O0000001O00000001O000000001O001O1O2N1O3L4M2N2N2M4M4L6I7J4K4L3`MUnNfFmQ1T9`nNbFdQ1T9PoNZFWQ1`9]oNiElP1S:c2M3M3M2N3L3H9H7J7K5L5J6H8E:G:K4M2N3M2N3M2L4L5L3N3N1M4L4C=\\\\Oe0SNl1G:I6M4L3N2N1O2O1N2N2N2N2N2N2N3N2L6J8Gb0QOcTnd1\"}}, {\"image_id\": 155, \"category_id\": 2, \"bbox\": [927.384765625, 1039.760498046875, 149.80224609375, 47.9730224609375], \"score\": 0.9999998807907104, \"association_id\": 4, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"g`b[12m_12O1N2O1O0O2O001O001O00001O10O01O00001O1O00001O001O001O000O1O2O0000001O001O00001O00010O0000100O001O001O1O10O01O00100O001O00000000010O000001O00000010O0001O001O0000000O10001O000000001O000000001O00000001O01O00000001O01O000001O000001O0001O00000000001O01O00000000001O000000001N1O2N3M3@a`N010O3VnSV1\"}}, {\"image_id\": 155, \"category_id\": 1, \"bbox\": [1122.0748291015625, 690.3267822265625, 100.2581787109375, 83.8560791015625], \"score\": 0.9999998807907104, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"[fld12l_1k0UO:G7I6K5J2O1O001N100O100O1O101N10O010O10O101O00O01000O010O010O10O10O10000001O000000000000001O010N010000000010O0000O1001O1O001O010O00001O00001O1O100O1O1O1O1O2N1O2N1O1O1O8H4L8H6I7J5K2M7HWhnn0\"}}, {\"image_id\": 155, \"category_id\": 2, \"bbox\": [1370.8046875, 836.4485473632812, 193.5426025390625, 27.618896484375], \"score\": 0.9999992847442627, \"association_id\": 2, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"djXQ22n_11N2O0O101N10001N100O1N3N1O100O1O1000000O101O00000000000000000O101O00000000000000001O00000000000000000000000000001O0001O00000O10000001O000000000000000000000001O00000000001O0O2O1O1O001O1O001O001O00001N1000000O2O000000000000001O0O10000000000O100000000000000000O100000000O100O1O010O100O100000O01000000000000000000000000000000000O2O00000O10001N2Lcem>\"}}, {\"image_id\": 155, \"category_id\": 2, \"bbox\": [1554.287109375, 809.8389892578125, 209.4998779296875, 23.12176513671875], \"score\": 0.9999936819076538, \"association_id\": 1, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"[YmX29f_12O001O0000000000001O00000001O0001O0001O00010O1O1O00100O1O1O001O01O01O00001O01O0000000000001O00000O10001O0000000O10000000000000000000001O000O100000000000000000000O10000000000O100O10000O100000000O10000000000000000000000000000000O100000000000001O00000000001O000000001N1000001O00001O0000001O0000001O0000001O0000001O0000000O1000000000000000O10000O1000000O1000O100000O10000000000O100000000000000000000000000000O101O1O0N^Vc5\"}}, {\"image_id\": 155, \"category_id\": 2, \"bbox\": [1053.0335693359375, 793.6410522460938, 263.82666015625, 94.49688720703125], \"score\": 0.999612033367157, \"association_id\": 5, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"UY]b11l_1301N10001O001N101O000UbNJl[16TdNJl[16TdNJk[17UdNIk[17TdNJl[16TdNJl[16TdNKj[16VdNJj[16VdNJj[16UdNKk[15UdNKk[15UdNKk[14VdNMi[13WdNMi[13WdNMi[12XdNNh[12XdNOg[10YdN1g[1OYdN1f[10ZdN0f[10ZdN1e[1OZdN3e[1M[dN4d[1L\\\\dN4d[1L\\\\dN8_[1IadN8^[1HbdN8^[1HadN:^[1FbdN:^[1EcdN;][1EcdN;][1EcdN;][1EcdN;][1EcdN<][1@fdN`0Z[1_OgdNa0Y[1^OhdNb0X[1^OgdNd0X[1\\\\OhdNd0W[1]OidNc0W[1]OidNc0W[1]OhdNd0W[1]OidNc0W[1]OidNc0W[1^OhdNb0X[1^OhdNb0X[1^OgdNc0Y[1]OgdNc0Y[1]OgdNc0Y[1]OgdNc0Y[1]OfdNd0Z[1]OedNc0[[1]OedNc0[[1]OddNd0\\\\[1\\\\OddNd0\\\\[1]ObdNd0^[1\\\\OadNe0_[1\\\\O[dNHdNl0Q]1]OYdNi0g[1WOXdNj0h[1WOVdNj0j[1VOUdNk0k[1VOSdNj0n[1WOPdNj0P\\\\1WOncNj0S\\\\1VOlcNj0T\\\\1VOlcNj0U\\\\1VOjcNj0W\\\\1VOhcNj0X\\\\1VOhcNj0Y\\\\1VOfcNj0Z\\\\1VOecNk0[\\\\1VOdcNj0\\\\\\\\1VOdcNj0]\\\\1VObcNj0^\\\\1VObcNj0^\\\\1WOacNi0`\\\\1VO_cNk0a\\\\1VO^cNj0c\\\\1UO]cNj0d\\\\1WO[cNi0f\\\\1VOYcNk0g\\\\1VOXcNj0i\\\\1UOWcNk0i\\\\1VOUcNk0k\\\\1VOTcNj0l\\\\1WOScNi0n\\\\1VORcNj0o\\\\1VOPcNj0Q]1UOobNk0Q]1VOnbNj0S]1VOlbNj0U]1UOkbNk0U]1VOjbNj0W]1UOhbNl0X]1TOhbNl0Y]1TOebNm0[]1SOebNm0[]1TOcbNm0^]1RObbNn0^]1SO`bNn0`]1SO^bNn0c]1RO[bNo0e]1f0001O0O2O1O1O001O001O001O1O001O001N101O00001O0O2O1O001O001O001O0O10001O000000001O00001O00001O0000001O0O2O001O00001O000O2O001O001O00001O0O101O1O1O001O001O00001O00001O001O00001O00001O00001O00001O00001O00000O2O00001O00001O000000001O00001O0O101O000O101O0O2O3KPU`j0\"}}, {\"image_id\": 155, \"category_id\": 2, \"bbox\": [753.2446899414062, 933.213134765625, 139.0330810546875, 19.16131591796875], \"score\": 0.9947924017906189, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"\\\\]iS15h_14N1N201O00000O100000001O00000001O00000000001O000000001O00000O1000001O000000000O1000000000000000000000000000000001O0000000000000000001O0000000000000000O1000001O00000O100000000000000O100000001O000000O1001O000000000000000O2O00000000000O2Ocbl^1\"}}, {\"image_id\": 155, \"category_id\": 2, \"bbox\": [1145.5257568359375, 761.494873046875, 83.02685546875, 13.84417724609375], \"score\": 0.08648130297660828, \"association_id\": 0, \"segmentation\": {\"size\": [1536, 1883], \"counts\": \"PX^f12n_1000000001O0O10000001O00000001O00O10000000000000000000001O0O100000oWWP1\"}}, {\"image_id\": 156, \"category_id\": 1, \"bbox\": [147.24728393554688, 35.24049377441406, 205.8394775390625, 484.41253662109375], \"score\": 0.9999996423721313, \"association_id\": 2, \"segmentation\": {\"size\": [533, 665], \"counts\": \"gn_2:n?c0E;F8I6J5M4K5L4L5K4L3eCfMV:]2^EQN^:Q2\\\\EUNa:o1WEWNh:l1oD[No:g3N2N2N2M2M2gKhJXK;TOR5S5iK`J2OZ4_5b4O2N2O001N101N1N2O2N2O0010O01RNZElMg:Q2cEhM\\\\:X2jEbMW:]2oE]MQ:c2RFZMo9f2RFXMn9h2UFUMk9l2WFPMk9o2WFoLi9R3YFhLj9Y3WFbLl9_3WFZLk9h3WFPLn9Q4Y12O0PNoK]GR4_8VL\\\\Gk3`8]L[Gd3b8bLYG`3d8fLXG[3d8QMRGQ3l8XMkFj2S9]2N2O1O1O1O1N2N2O1N2O10000000000001O1O2N4L4L4L5K4M2M4L5K<D?A<D6K4K4M3M3M4L4L3N10O2O0028G6J4L=A3NN2N1O000O2OO0100O1O1O100O1O2N1O1O1O1O1O1UJiLVNX3e1PMWNQ3d1XMWNi2d1`MWNa2c1kMVNV2e1VNSNk1i1\\\\NSNe1i1aNTN`1h1fNUN[1i1hNUNY1j1iNTNY1j1kNRNW1k1ROmMP1Q2ZOdMh0[2KRM7l20mL1Q36iLKV3:dLIY3?_LD_3c0YL@d3g0TL]Oj3g0QL\\\\Om3g0iKCS4a0cKH[4[6N2O1N2N2N2N2N3M2M2O2N1N2O2N3M7I6K6I?BY1fN4M1N3L6K3L5L2L5JY`[5\"}}, {\"image_id\": 156, \"category_id\": 1, \"bbox\": [429.5959777832031, 111.70240783691406, 201.98287963867188, 293.21630859375], \"score\": 0.9999994039535522, \"association_id\": 1, \"segmentation\": {\"size\": [533, 665], \"counts\": \"kPZ71V?;gANk=e1G8N2M200O0100O01000O010O1O1O1O0O2N2N2M4K4J6B>N2N3N100O100O1O1O1O1001O0TL^OeGb0X8FcG:[8JcG6\\\\8LcG5\\\\8LcG4^8LaG4_8M`G3a8N]G2d8OZG1f81XG0h80WG0i81VGOl80SG0o8OPG1S9NkF3W9MfF3\\\\9LcF4^9M`F3a9M^F4b9K^F5d9JZF8f9HXF;>QOU6e0ZI<2E]60]Ia0B0Q7AYIc0_O1X7]OUIf0^O1\\\\7[OSIT1POCm7]OmHU1fNK]8XOZG\\\\O:i1BKj8]500000000O10O10O0100O000O0O1N3K5L5M2O2N2M3G9kNU1N2O002N1N2M3M3M3N2N3N2M4L5J:C:H7J4M4K5F:mNS1E:K5L4M2N2N2L4M3N2O1O0O2O0N200O100000000O1O2O0010O00100N101N2O010OO2N2O001O1O100O001O1N2N1O2O0000N20O1N2010O1O100O1O001N200000000O100001N4L6Ijaa0\"}}, {\"image_id\": 156, \"category_id\": 2, \"bbox\": [410.8359375, 373.2018737792969, 96.57757568359375, 40.51971435546875], \"score\": 0.9999878406524658, \"association_id\": 1, \"segmentation\": {\"size\": [533, 665], \"counts\": \"T^i64]`04O1O100O2O00000000000@Ef@;Z?Ef@;Z?Fe@:[?Gd@9\\\\?Gd@9\\\\?Hc@8]?Hc@8]?Ib@7_?Ha@8_?I`@7`?I`@7a?H_@8a?H_@8a?I^@7b?I^@7c?I\\\\@7d?I[@8f?GZ@8g?IX@7i?HW@8i?IU@5o?62000001O00001O01O01O00001O00010O010O00010O00010O00010O01O010O00001O1O01O01O01O0001O01O01O00O2O0N201N2M3O1MW]c2\"}}, {\"image_id\": 156, \"category_id\": 2, \"bbox\": [165.32443237304688, 427.0738525390625, 122.19378662109375, 82.46170043945312], \"score\": 0.9999825954437256, \"association_id\": 2, \"segmentation\": {\"size\": [533, 665], \"counts\": \"mjf2;Y`06K01O00m_OCn?>Q@Cn?c0O000O2O00O100O0BV@3j?LY@1h?Ma0O100000000000000000000010O000000001O001O000O10001O000000001N101O0O2O001N2O0O2M3]@]OP?g0h@EQ?Q1M3N4K3N1O2N2O2N1O001O2N001O001O1O1O0000100O0100O1O010O2N4M2M2N5K8H5J3N2M4J5K302M4K8B\\\\f^6\"}}, {\"image_id\": 156, \"category_id\": 1, \"bbox\": [297.74853515625, 149.50595092773438, 76.82855224609375, 227.94528198242188], \"score\": 0.9995695948600769, \"association_id\": 3, \"segmentation\": {\"size\": [533, 665], \"counts\": \"\\\\`Q56\\\\`05L4M6J8H4M3[DSO\\\\7Y1RHUOf7W1kEYNk0k0T9S1]EbNa0_OHS1W:S2cEoNX:j3M4Lj0VO1O1O00000000000002N2N2N3Me0]MfEaM[:^2kE\\\\MV:b2RFWMn9i2VFSMj9n2YFnLh9Q3[FlLe9T3_FhLa9Z3eF]L]9d3jFSLW9m3lFnKW9Q4h1OO3N0O2OO101001O0OiNhCQNW<h1TDVNl;d1\\\\DZNc;a1eDZN];a1iD]NW;^1nDbNV;V1oDiNW;k0QETOP;4eC\\\\O^1a0P;MdE4_:GcE9b:^OaEd0X=1N3NO1O3LbTg4\"}}, {\"image_id\": 156, \"category_id\": 2, \"bbox\": [285.3892822265625, 365.56390380859375, 90.92990112304688, 15.077239990234375], \"score\": 0.9919185042381287, \"association_id\": 3, \"segmentation\": {\"size\": [533, 665], \"counts\": \"XWe41c`02O2M5L1O1O00001O0000000000000001OO1000000000000000000000000000O2N1OiQ>J^nA1O1O000000000000000000000000000000000000000000000001NSRf4\"}}, {\"image_id\": 156, \"category_id\": 2, \"bbox\": [287.9075012207031, 365.452392578125, 49.46136474609375, 19.03985595703125], \"score\": 0.9109537601470947, \"association_id\": 0, \"segmentation\": {\"size\": [533, 665], \"counts\": \"bXf42b`04L4M1O1O000000001O1O001O1O0000000000000000000000000000N201N1O1M6IcUb5\"}}, {\"image_id\": 156, \"category_id\": 2, \"bbox\": [340.0400695800781, 369.1468200683594, 37.864990234375, 6.997833251953125], \"score\": 0.5633296966552734, \"association_id\": 0, \"segmentation\": {\"size\": [533, 665], \"counts\": \"n_e54a`01O00000000000000000000000000000000000000000000000001O00]ae4\"}}, {\"image_id\": 156, \"category_id\": 2, \"bbox\": [308.3076171875, 367.25531005859375, 67.78512573242188, 9.643768310546875], \"score\": 0.11158136278390884, \"association_id\": 0, \"segmentation\": {\"size\": [533, 665], \"counts\": \"eeP57]`02O000000000001O0000000N3KbP=0aoB2O1O1O000000000000000000000000000000000000000000000001ORRf4\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [2025.5062255859375, 751.7357788085938, 269.5709228515625, 115.24334716796875], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"ihU[33ie16[ZNH[e1c0J9I4L4L3M2O1J6O1N3M3M3N1N2N2O0O101N101N2O2M2O1N101O001N2O1N3N2M2O1O001O0O101O001O001O000O10000O10O10O1000O1000O1000O100000000O10000O1O100O100O100000000O100000000O100000000001O000000000000001O00000000000000000000001O0000000000010O00001O1O00010O00001O1O1O001O1O00010O001O001O001O010O000000001O0000000O100000000000O100000000O10000O10000O1000000O1000000O100000001O0O10000000001O0O100000010O0000001O0000O10000000000O10001O01O01O01O01O0000010O00001O1O001O001O00000001O001O00001O00000000001O0000001O1O1O1O1N4M4L1O1N2O1O2M3M3M:ES]>\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [536.3091430664062, 1052.4306640625, 324.75531005859375, 402.816162109375], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"^h^l0_1_d17J4L5J5L4K5K6I6U`N[MR\\\\1i2kcNYMS\\\\1j2kcNXMR\\\\1k2lcNVMR\\\\1m2lcNTMS\\\\1n2lcNSMQ\\\\1P3mcNQMR\\\\1P3QdNlLo[1V3UdNdLk[1]3[32N2N2O1N2N2O1N2O1N2N1N3N2M2N3N1N3N1O2O0O2N2M4M2M6I:G=C?A9G6K4K5L3M4K4L4M2M4K4M3M4M2N2N101N2N1O2O2M2N2N2M3N2dcN`GU[1b8ddNeGZ[1\\\\8idN`GW[1`8QeNXGoZ1h8U1O010O0010O01O10O01O001O00010O0001N2O001O10O01O100O100O10000O010O1O1O1N2N3L3M3N2N2N2O1N2O1O1N1O2N2N2M3O1N2O001O100O010O010O0010O010O00010O01O1O001O10O0100O0100000001O00000000000000000000000O1000000O1000001N2O1N102M2N2N2N1O2N1O2O1N101O1N2O1O1O1N3N1O2N1O2N2N2N4aG`cNV7m]1G3N2N1O1O1O001O1O1O1N3N1O2N1O0O2O1O0000000O10001O0000000000000O1000000O10000O100O11O01O01O001O001O01O01O0100O11O1O4L3N4K8GWNgaNTLQ^1c3\\\\bN[Lb]1b3bbN_L\\\\]1^3hbNaLW]1]3mbNcLR]1Z3RcNgLm\\\\1S3YcNmLf\\\\1n2`cNRM`\\\\1g2gcNYMY\\\\1`2ncN_MS\\\\1\\\\2RdNdMm[1Y2VdNhMi[1V2ZdNjMe[1T2]dNmMb[1Q2`dNPN_[1n1cdNoM`[1o1cdNnM^[1Q2fdNkM][1R2fdNdL_Mf0n]1c2YeNWMjZ1e2]eNTMhZ1h2^eNQMgZ1l2X4N2N2N2N2N101M2O2M3M3L4M3M3N2N3M2M4L4J7G9@\\\\^d\\\\2\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [616.8422241210938, 1390.3458251953125, 462.18243408203125, 136.433349609375], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"mojP13je14N2N2O0000000O2O0000000O10001O000O100000001O0000001O00000T]NBV`1>e_NHZ`18d_NK[`15d_NM[`14d_NM[`13e_NMZ`14e_NNZ`12f_NNZ`12f_NM\\\\`12d_NN\\\\`12e_NL\\\\`15c_NK]`15c_NJ^`16b_NI_`17a_NI_`18`_NGa`19__NFb`1:n]N]Og08\\\\a1;l]N@f04_a1<j]NDe0Nba1?h]NEd0Kea1`0f]NGd0Hga1a0d]NId0Eia1b0c]NIgc18W\\\\NJhc16V\\\\NLjc15T\\\\NLmc13Q\\\\N0j0ZOoa1g0U]N1k0XOPb1g0T]N3k0VOQb1j0P]N3m0SOSb1o1i]NQNWb1P2h]NPNYb1Q2e]NoM[b1T2b]NlM^b1V2`]NjM`b1X2]]NiMcb1X2S]NgM11lb1Y2P]NiM3Nmb1b2Q]N_MPc1a2n\\\\N`MRc1g201O001O001O00001O00001O0000001O00001O001O001O0O2O001O001O0000001O0000001O001O001O00001O000000001O000O100000000000000000001O00000O11O000000000001O0000000000000000000000001O0000000000000000000000000001O00001O001O0010O01O1O001O001O0000001O000^M\\\\]Nh1db1aM_]N?1n1hb1hMe]NS2\\\\b1jMj]NR2Tc1N1O1O1O001O00000000eNRNd^Nn1[a1SNe^Nm1[a1TNd^Nl1[a1WNc^Ni1\\\\a1YN^]NMn0k1ca1aN[^N_1ea1cNY^N]1fa1iNU^NX1ja1iNU^NW1ja1kNT^NV1la1jNT^NV1la1kNR^NV1ma1kNS^NU1ma1lNQ^NU1oa1kNg]NRO4T2Sb1ROk]No0Ub1ROi]No0Wb1ROg]No0Yb1\\\\10001N1000000O1000000O2O000O100O1O10000O100000000O1000000000000000lLo]NY2Qb1gMo]NY2Qb1fMQ^NY2oa1gMQ^NY2oa1gMQ^NY2oa1gMR^NX2na1hMR^NX2na1hMR^N]OMd2Qb1oMS^N[OMf2Pb1oMS^NZOOf2na1PNS^NYO0g2la1PNV^NXONh2la1PN\\\\^NP2da1PN\\\\^NP2da1PN\\\\^NP2da1QN[^No1ea1QN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN\\\\^Nn1da1RN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nm1ca1SN]^Nl1da1TN\\\\^Nl1da1TN\\\\^Nl1da1TN\\\\^Nl1da1TN\\\\^Nl1da1TN]^Nk1ca1UN]^Nk1ca1UN]^Nk1ca1UN]^Nk1ca1UN]^Nk1ca1VN\\\\^Nj1ea1UN[^Nk1ea1UN[^Nk1ea1UN[^Nk1ea1VNZ^Nj1fa1VNZ^Nj1fa1WNZ^Nh1fa1XNZ^Nh1fa1XNZ^Nh1fa1XNZ^Nh1fa1YNY^Ng1ga1YNZ^Nf1fa1ZNZ^Ne1ga1[NY^Ne1ga1[NY^Ne1ga1\\\\NX^Nd1ha1\\\\NX^Nd1ia1[NW^Ne1ia1[NW^Ne1ia1\\\\NV^Nd1ja1]NT^Nd1la1]NS^Nc1na1]NP^Nd1Pb1]Nm]Ne1Tb1S100001O00001N1000001O0000001O00001O0000001O000000001O00001O0O2O001O001O00001O00001O000O2O001O001O001O1O001O1O001O001O0O2O001O1O1N2O1O3M2M3N1O001N2O2N2N1N3N1O1N6K2N1N3N2M3N1N2O001N10001O0O2O001O1N2O2N1N2O1O0O2O1O0O2O001O0O2O1N2N2M6Kig\\\\Q2\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1294.6346435546875, 1182.4541015625, 351.70458984375, 289.541748046875], \"score\": 1.0, \"association_id\": 4, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"YnXV2P1ld1e0]O6J5K6K5K3N2N2N1O2M4M3M3M3M2N1O1O0010O001O1O1O001O1O1O1O1O00100O2N101N5L5J5L2M3N000O100000001000O104K8H4Q^NlMi_1W2l_NSNR`1Q2e_NVNX`1T2[_NQNd`1i3L6K2N3Ma0^O5L1N100O10O01O000000000O100000O01000hM``N^L7e0X_1h2faNVMZ^1g2kaNWMT^1f2SbNVMm]1i2XbNSMi]1k2\\\\bNRMc]1m2bbNnL_]1Q3fbNcLc]1Z3b201O1O00100O01O100O1O1J6J6N2N2N101O1O1O1O2M2N2M3M3N2N2O1O1O2N1O1O1O101N1O101N101N101O1N10001N1O100O2N1O1O101N10000O2O00001O001O001N2O1O1O001O001O001O1O001O001O001O0000O100O100O10000O100O100O1O1O001N2O1O1N2O1O1O1O1O100O100O10000O10O10O10000O0100O010O000010O0010O0100000000O10O1000000000000000001N10001O001O001O0O2O1O001O1O1O2N1O3M4L4L3M3M2N2N3M9G2N2maNWIk\\\\1l6mbNbIVOIY]1h6ZcNoIe\\\\1]3VcN]Oi\\\\1`0^cN\\\\Ob\\\\1b0ccN\\\\O\\\\\\\\1b0hcN\\\\OW\\\\1d0lcN[OS\\\\1d0ocN[OR\\\\1c0QdN[Oo[1d0TdNZOl[1d0YdNYOg[1e0^dNXOb[1f0cdNWO][1f0jdNVOV[1g0odNWOQ[1g0TeNVOlZ1h0XeNVOhZ1i0[eNUOfZ1i0]eNUOcZ1j0`eNTO`Z1k0beNTO^Z1k0eeNSO[Z1l0ieNQOWZ1n0neNnNRZ1P1TfNlNlY1S1bfN`N^Y1^1mfNYNTY1d1QgNYNoX1f1UgNWNkX1h1XgNUNiX1j1ZgNTNfX1l1[gNSNeX1l1^gNRNcX1m1^gNRNbX1m1`gNQNbX1m1agNQN_X1n1cgNPN_X1n1egNoM[X1P2kgNjMWX1R2ogNgMVX1V2k5N3M3N1M4L4K5K6K5I:[OdVeR1\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [6.033168315887451, 1319.7822265625, 316.0465087890625, 92.101318359375], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"d^;n0md17L3O1N100O2O00000O100000000O100000000O100O10000O100000000O10000000000O100000000O1000000000000000000O1000000000O10O100000000O1000000O10000000000O1000000O101N100O100O10000O1000000O1000000O10O1001N10000O10000O10000O100000O100000000000O10000000000000000000000001O00001O1O001O1O001O001O00001O2N4La0_O2N1O1O1N2O1O001O1O1O8H3M1O1O0O2O00001O001O2M3N001O001O000O100000WN3d17J2N2N2O1O2M101O0l[N\\\\Olb1e0R]N^Olb1c0R]N^Omb1g0i\\\\N_OWc1e101O000O1001O0000000O10000000001O0000O1000O1000000000001O00000O100000001O0000000O10000O1O100O2O000O100O001O1N2N2N2O1O10000O10O0100N2O1O10000O01000O10000N2M3N2O1O1O1O100O100O100O1O1O1O1O10000O100O2O000O100O100O100O100O2O0O101N100O2N100O2N[RfX3\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [0.0, 1224.7525634765625, 267.72296142578125, 151.3946533203125], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"UY1f0Pe1?G7I5M3M3L3O2M3N2M2N3N1O100N200O10O010O10O10O1000O010O1000000O0100000O010000O10O10O100001O000O101O0000000000O10000O2O000O1000000000000O100000O100O010O010O0100O0010O100O00100O01000O010O1O001O001O00100O0010O010O000010O10O0100O010O01O010O10O10O0100O10O10O10000O100O2N1O1O2N101N100O2O1N3N1O2M2O001O1O001N2O3M3M3M8H2N1O7I3M3M1O2N2N0010O00000010O0001O001O0000001O0000000WN^^NfM0O0j0ba1_1a^NfM2I1P1\\\\a1`1c^NdM;k0Sa1_1d^NdM:m0Ra1^1f^NcM:n0Pa1^1]_NaNc`1^1a_N^N``1a1d_N\\\\N]`1b1k_NWNU`1h1R`NQNo_1n1T`NnMn_1P2S2N1N2N2N2M3M3M3N2O1O1O10000000O01000000000000001O001O001O1O001O001O1O1O3M3M2N1O100O3N2N1N100O7J0O0010O010O1oMa]N`0_b1[Ok]Nb0Tb1XOR^Nh0cc101O0O1O1O2N2M2O2N4K2O1N2N2Nf`_[3\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1858.7410888671875, 751.3934936523438, 50.7650146484375, 138.1826171875], \"score\": 0.9999996423721313, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"[bXR3l0md1:I6K6K7Ia0_O:G5K:T]NWMna1f3H00001N2O0O20OO01O100O1HV^NYLka1d3:J7K4K5N3M2O2L3L6L4L5K4K4M4J6K5K5K5L4M2M3N2N2Mciid0\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [92.88887023925781, 925.9801025390625, 510.8155517578125, 51.87457275390625], \"score\": 0.9999995231628418, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"]mR5210ie15N1O3N1O0O11O00000O10000O1O10001O0O1000000O1O101N1Oeb]1O]]bN000000000L0WZN1le10001O000000000000PPk00ooTO100O10001O0O1000000O101O00000O101O000O100000001O000O10000000000O100O10001O0O1000000O1000000O1000000O10000O100O1O1O1000001O0O1000000000000O100O10000O1000000O1000000O1000000O101O000O1000000O1O1O2O00000O100000000000000O101O00000O1000001O000000000O1000000000000000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000O101O01O0000000001O00001O00000000001O00000nNY[Nk0gd1QOY[N01o0fd1POZ[N10o0id1QOX[Nn0nd1O1O1N2O001O001O1O00001O00000000000000O100000O10000000000000000000O1000000O010O1L4K5O100O100000O0100O10000O1000000000000O0100000000000000000O10000000000000O100000000000000000000000000O10000000000000000000000000000000000000O0100000000000000000000O1000000000000O1000000O101O0000000O1000000O10000O10000O101O000O100O10001N1IiZNEWe17iZNE14Ve16kZNEO100Ve1:oZNEK0Xe19<O2N1N4Lld`l2\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [983.2597045898438, 804.57470703125, 82.72784423828125, 152.30926513671875], \"score\": 0.9999617338180542, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"mTTd1;hd17\\\\[N3Yd1P1L4K5L3M5K4I6L4F:J6K5K4L4L4M3M4N001O1O1N3N10O10O1O1O100O10000O100O1000O01O01O010O10000O010O3M2N3M1O1N3N3M2N3N2M4M1N2N4_Mc]NX1db1_Nc]N\\\\1fc1J3N1N3M4K4K7BYheQ2\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [1444.0535888671875, 1429.0189208984375, 375.657470703125, 92.9156494140625], \"score\": 0.9999599456787109, \"association_id\": 4, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"XlW\\\\21me17I3N2N1O2O00000O10000O10000O100O1jN_Om\\\\Na0Sc1@k\\\\Na0Tc1@l\\\\N?Uc1Aj\\\\N`0Vc1Ah\\\\N`0Xc1@h\\\\N`0Xc1Af\\\\N`0Zc1AY\\\\NJFj0Qd1^OS\\\\Nm0mc1c00O01000000000O10000000000O100000000000000000000O100000000000000000000001O000000001O00000000001O00001O00001O0000001O0000001O0000001O00001O00001O0000001O000000001O000000000010O000001O000000001O00000000010O0001O01O01O00010O00001O2N1O100O7I=D8G3M100O0010O01O00100O10001O0O10O010O000000001O0000001O01O0001O000O10000O100O1N2O1N2O1N3N1O1O100O1J6L4L400O100O2O00000O100000000000000001O0000000000000010O000001O000000001O00000000001O0000000000001O000000001O00001O0000001O0000000O101O00001O00000000000O2O000000000000001O0000001N100000001O00000000001O000O101O000000001O0000000O101O000000000000001O000O100000001O000000001O0000001O00001O00000000001O0000001O00000O2O0000001O0O10001O00001N101O001O0O2O1N2N2N6E]ZNNmdbi0\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [2029.5948486328125, 848.5979614257812, 266.3768310546875, 18.682373046875], \"score\": 0.9998805522918701, \"association_id\": 1, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"gTT[35je12O001O0O1000000O1000001O0000000000000000000000000001O0000000001O01O00000000001O0000000000001O0000000000O100000000O2O00000000O01000000000000O2O00000000000000000000000000000O100000000000000010O000000000000000000O1001O00000000000000001O000000001O00001O01O0001N10001O1O1O001O00001O001O000010O000000000000000000000000000000000O1000000000000O10000O1000000O100000000O1000000000000000000000000000000001O00000000000000000000000000001O000000001O0000001O000000001O00000000001O000000000000001O0000000O1000000000oXT1\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1123.7723388671875, 777.3067016601562, 82.0850830078125, 169.98675537109375], \"score\": 0.9996334314346313, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"PaYk1Z2Yc1d0Z]NVM4>Z`1\\\\2]`NeMb_1Z2^`NhMa_1X2]`NkMa_1Y2\\\\`NiMa_1Y2S`NSNm_1n1Q`NSNn_1n1Q`NSNo_1m1Q`NQMYOi0f`1W2P`NoL\\\\Oh0e`1[2n_NkL^Oj0d`1\\\\2X`NbMg_1b2W`N\\\\Mj_1m2`_N^L0c0a`1R3Z_N^L5>b`1a3\\\\_N`Ld`1]41O100001O0000001O001O001O001O0010O00001O1O100O2N2N3M4L2N002N1O1O001O2N2N2N1O2N1O2N001O2N2N1O1O1O1O1O1SOS^NeMna1W2V^NgMla1S1Z^NjNO2ga1R1_^NeN06da1R1`_NhNd`1U1U2N2O1N3M3L5H8Kggmi1\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1821.4375, 947.1533813476562, 220.731689453125, 260.83502197265625], \"score\": 0.9990227222442627, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"YkXP32nab01nS_O3M3N2M2O1N1000000O10000O1O2K4E;jNXO`\\\\N\\\\1Pc1R1N2M4N10OO2O1M3N1N3O1N2N2\\\\LfLXbN`0>l2Z]1bN]bNc1b]1jNkaN]1T^1iN`aN^1`^1Q3O10O10O1000O010000O1N2O2M2O100O2N100O2O0O2N1O1O1O2N10O0100O1O100O10000O1O2O0O1O100000000O10000000000000001O0O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O00000000000000001O00001O00001O001O0O2O00001O0O10000O2O000UMebNPM\\\\]1g2PcNVMQ]1b2YcNZMi\\\\1b2]cNZMd\\\\1X2ncNcMT\\\\1V2VdNeMl[1f1mdNTNV[1]1[eN^NiZ14eaNJh3MhZ1JWbNKW37gZ1HQgNNXY1KPhl=\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1312.5277099609375, 812.5801391601562, 100.3184814453125, 136.240966796875], \"score\": 0.9987919926643372, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"j`XU21e0`0=@Wb1a0i\\\\NP1;hNfb18o\\\\No0<iNeb1<o\\\\Nc0`0QOab1;Q]Nb0`0RO^b1g1c]NYN[b1i1e]NVN[b1j1f]NUN[b1k1e]NTN[b1l1g]NRNXb1P2i]NnMWb1S2c0nM`\\\\NS2`c141G`\\\\NSNac1l1;O1O1000Y\\\\NTNZc1o1b\\\\NSN]c1o1`\\\\NSN_c1n1^\\\\NUNbc1Z2J2N1O105J4m\\\\N[Mcb1R301N1O1O001O1O001O0O101N1O10O0101N100O10O100O02O0O10000000000O101O000000000000000000010O0001N1010O0001O0000001O010O100N2O1N2O1N101M3L5ZNd^N_Nea1X1P2B`]P_1\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [1354.4591064453125, 942.6666870117188, 263.3543701171875, 16.8392333984375], \"score\": 0.9916003942489624, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"biXX21ne1101O001O00001O00000O1000000O101O0000001O00001O000000001O000000001O00000000000000001O000000000000000000000000000000001O00000000000000000000000000000000000000000000O1000000000000000000000000000000000000000000000000000000001O0000O10000000001O0001O0000000001O000000000O100000001O0000000000000000000000000001O00000001O000000000000000000000000000000000000000000001O0000000000000O100000000000000000000000001O0000000000000000000000000000000O10001O00000000001O000O2O00X\\\\QU1\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1720.562744140625, 956.955078125, 172.181396484375, 243.968017578125], \"score\": 0.96548992395401, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"[PPk23Tb12RaNm0X^1BRaNc1S^1cNVaN_OZN]2W`1XNl`Ni2UOfLP_1f0eaNe3S^1aL\\\\aNo3`^1k1L4M3M2O1N101N101O0O1000000000001O0000000000000O1000000000000000000000000000000000000000000001O0000000000000000000000001O0000000000000000001O00000000001O00001O0000001O000000000000000000001O0000000000000000001O0000000000001O000000000000001O0000001O00001O001O1O001N101O0`KUaNk1l^1kMfaNl1[^1PNZbN^1f]1^NdbN[1^]1^NnbN\\\\1S]1cMVaNJo1`2l\\\\1bMUdNZ2m[1bMXdN\\\\2k[1_MYdN_2i[1\\\\M\\\\dNb2f[1ZM]dNd2b_1N4L5K6I7J4K7J2M3N1O2M2O2N3J<BhSfe0\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [1110.4744873046875, 940.8244018554688, 138.239013671875, 19.88043212890625], \"score\": 0.8856310844421387, \"association_id\": 5, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"i]ej14le11O000000000001O0000000000000O10O2O0O1O1O1N2N2N200O1O100O2O0000O10000000000000000000001O001O1O002N00001O0000001O001O010O00000000000000001O001O001N10000000000O10M3O1N2O1O1N20O010000O1000000000000000O100000000000000000000000000000000O100N2000000001O1O0000001O00000001O00bhfg1\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1096.4471435546875, 770.2871704101562, 81.9803466796875, 184.056396484375], \"score\": 0.5337004661560059, \"association_id\": 5, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"]Pli1S3bb1=N1O0O1M3N20O01E_LW^Na3ca1a0O010L5I7M2O2O0N2M4M3N1H9N101N1O101O00000O1O0100O001O0010O01O1O1O1N1N4N100000000O12N2N1O1O2N10N101N2000000O2N3L4N7I1N1O1O1O1O1O3M4K4J4N101oNY^NfMla1l1g^NdMok\\\\k1\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [21.65567398071289, 759.7781372070312, 67.22390747070312, 161.2451171875], \"score\": 0.3111489713191986, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"okU1m07TONm0`c1`1K6K3N0O010000001O1O2N1O1O1O2N1O1O100O2O1O000000Oh0oLa\\\\NR2ic1M2N7J1N4L2O1N2O3M7I1O001O5K3M0000001O000O00100O1N2M8H5K4Kl`[e3\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1053.3204345703125, 765.9034423828125, 160.1829833984375, 183.0909423828125], \"score\": 0.30817365646362305, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"`XTh1e0We1?C4M3L5K6K4K4M2O1O3M2N001j\\\\NjM`b1V2R]NlM34gb1Q2Q]NRN38bb1k2M3M:F5K2N2N2N1N3N2N101N1O1R_NdKV`1^4c_NiK\\\\`1P5M1O000100O1O2N100O2O4K1O01O0O2K5M2O2N1000001N1000000000001N1000O1000001O00001N101N01000O100O010O100000O011O0O10001N11O00001O000010O010O10O6K4K5K2N2N2N2N2N1O100O001O2N2N2N2N1O1O1O001O001N2O2N1O1PNU^NfN15la1R1W^NeN26ha1S1Z^NbN28fa1P1]_NjNh`1P1X2L3O1N3M4K4J8H7Jdgmi1\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [1648.4722900390625, 785.6990966796875, 31.6988525390625, 163.675537109375], \"score\": 0.2403469681739807, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"jXQg2b0Ue19K7bNXO\\\\]No0]a1a0o]NHka1[2UOXLV_Nl3g`1^Ll^Nh3Ra1d0O2N1O10O02N010000RN[_NaNe`1Z1h_N^NW`1k0e`NoN^_1f0TaNSOk^1k0ZaNoNj^1k0caNmNb^1f0maNQO[iWQ1\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [24.157020568847656, 930.394287109375, 172.55279541015625, 37.63714599609375], \"score\": 0.19114646315574646, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"_i\\\\16he15I9J3M3M2N2O000O100O10001O000000O100001O000000000000001O0000000000000000000000000000000000O1000000O1000000000000000000O100000000000000000000000000O100001O0000001O0000O1000000001O000000000000000000GQ[N@Pe1?S[N_Omd1a0S[N_Omd1a0S[N_Omd1`0T[N@ld1`0T[N@ld1?U[NAkd1?U[NAkd1>W[N@jd1?<00O1O100O10000O1O10000O1O1O100O101N1000000O10000O1O10000O100O2OkXe`3\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [944.9281616210938, 940.1427001953125, 342.27862548828125, 22.31201171875], \"score\": 0.11868377029895782, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"dYma111100ee1:N00000NE_ZN<`e12000000000000000000000000001O00000000001O00000001O0000000001O00000000001O001O001O1O001O1O00000000001O001O001O00001O00000000000000000000O1000000000001N1000000O1O100000000O100O100O100N200O1O100000000O10000O10000O100000000000000000000O10000000000000000001O000000000000000000001O001O00001O001O0000001N1000001O00001O00000000001O000000000000001O00O1000000O10O01000000O10000O10000O10000O100O1000000000000000001O0000000001O000000001O001O0000001O0000000O1000001O0000000000000000O1000O1000O100000000O1000000O01000MEaZN;_e1EaZN;_e1E`ZN<`e1D`ZN<`e120000000000O01000000000000000000000000000000000000000000000000E`ZN5ae1JbZN4^e1LcZN3]e1McZNNL3ae1OfZNNI0ae12fZNN\\\\e1290000000001O0000001O00blUg1\"}}, {\"image_id\": 157, \"category_id\": 1, \"bbox\": [916.1583251953125, 850.8192749023438, 51.56024169921875, 97.64801025390625], \"score\": 0.11673988401889801, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"hQc`18ce1;G8D?F8E9G=B9M3c\\\\N^MUc1h2N101O1O0000010OO2N2O2N2M2N3M5J5K4SNZ\\\\N[1kc1_NY\\\\N^1Vd1N3M4L2L4L9E7G;JeUPW2\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [938.5022583007812, 942.8829956054688, 198.27789306640625, 18.9149169921875], \"score\": 0.061046164482831955, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"gmia12ke1OWZN2he14N101O000000000N20000000000O10000000001O000O101O000000000000000001O01O00000001O001O1O001O00001O1O1O1O00001O001O001O1O00001O00001O0000000000000001O0O100O100O10000O10000O100O10000O1O100N200O10000O10000O10001N1000000O1000000000000000000000000000000000O100000000000000000001N100000001O0000001O000000001O00001N100000000000001O0000001O00001O0000000O2O001O001O1NVZVn1\"}}, {\"image_id\": 157, \"category_id\": 2, \"bbox\": [1041.074951171875, 944.3167114257812, 198.1920166015625, 18.32257080078125], \"score\": 0.06052294373512268, \"association_id\": 0, \"segmentation\": {\"size\": [1728, 2304], \"counts\": \"fcmf18fe13N2N10000000000O2O000000O10000000000000O1000000000000000000001N10000000000000000000001O0000000000001O000000000000000000001O0000000000001O1O00001O00001N1000001O000000O100000000O100O1O1O1O100O100O2O000O10000000000000000000000000000001O000000000000001O001O002N00001O00001O001O0000001O000O100000O1O1O100O100O100O00100O1000O10000000000O100000000000O01010O000000000000O100HZZN6fe1JZZN6fe1JZZN6ee1K\\\\ZN0O0ge1OZZN1O0ge1OZZN010ee107000_f\\\\h1\"}}, {\"image_id\": 158, \"category_id\": 1, \"bbox\": [457.51861572265625, 431.6331481933594, 190.746337890625, 183.57632446289062], \"score\": 0.9999998807907104, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"oog:i2Re09J4N1O1O1O2N1O0O20O010O000010O0001O001O001O00O100O100O1O100O10O01O1O1O010O01N1O101N012N2N2N2N2M3O0O2O001O0000001OO101O0O2O001N101O0O101O000O2O0O101O1O0O2O001O00001N10000O1O1O1O100O10O00000001N110O001N2O0O2O010O0O100O2O00100O1N2O1O1O100O1O1N2O1O1O1N2L5L3N3M3M3N1O2O1O1O1O2N1O1O100001O2^MW]OHmb02Z]OHjb02Y]ONgb01Z]ONib0NY]O1jb0JY]O5lb0DW]O;nb0^OU]Oa0nb0[OT]Od0mb0ZOU]Oe0lb0YOU]Oh0kb0VOW]Oi0kb0UOU]Ok0Qc0nNP]OR1Sc0kNm\\\\OU1Tc0iNm\\\\OW1Tc0hNl\\\\OX1Tc0gNm\\\\OY1Tc0fNl\\\\OZ1Vc0dNj\\\\O\\\\1Xc0bNh\\\\O^1Zc0_Ng\\\\Oa1Zc0^Nf\\\\Ob1[c0\\\\Nf\\\\Od1\\\\c0YNf\\\\Of1]c0SNg\\\\Om1fd001O00001O1O1O1O1O1O1O001N3N2N3M3L3N2M4Fa0@l]i8\"}}, {\"image_id\": 158, \"category_id\": 1, \"bbox\": [664.724365234375, 419.51934814453125, 227.10943603515625, 180.093017578125], \"score\": 0.9999998807907104, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"cYi?4ig08I3N2N2M2O3oNDVZOa0ee0P1L3N3N1O2O0O2O1N2O1O0O2O001N2O1O2M3N1O1N2O1O001N101O000O1000000O1000000O11O001O00001O001O000O2N2M2L5M2O1O2O000O1000000O10000001O00000O10001N100O2N100O2O1O0O2O001O0O101O0O100O1M3M3N2N2O1O0O2N2M2M4M2gNaLQ^Oa3ka0eLQ^O\\\\3oa0fLo]O[3Qb0fLl]O[3Tb0hLh]OY3Xb0jLd]OX3Zb0lL^]OY3bb0R11N110O1O2O0O2N2N2N1O2O1O1O8I7H100O1O100O2O002N1O1O010N1000001O100O000O10001N2O1N1gMe[Ol0[d0QOi[Om0Xd0QOk[Om0Vd0ROk[On0Td0QOn[On0Td0nNo[OQ1Sd0jNQ\\\\OU1Qd0fNS\\\\OY1oc0cNU\\\\O[1mc0aNV\\\\O^1lc0_NV\\\\O`1lc0\\\\NV\\\\Od1Se001O00000000001O00000000000000000000001O01O000001N2O1O001O001O001O001N2O1O2M2N2L4M3K6H8Ff^X3\"}}, {\"image_id\": 158, \"category_id\": 2, \"bbox\": [177.74232482910156, 606.6630249023438, 288.57684326171875, 24.199951171875], \"score\": 0.9999619722366333, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_SQ51og0000N4L2M4N1O1O2O0O1O100N2000000O1000000000000000000O100000000000000000000000000001O0000001O0O101O00000000000000000000001O00000000000O1000001O0000000000000000000000000000001O000000000000000000001O00000000000000000000000000000000000000000000001O00000000000O10000000000000000000000000000000000000000O10000000000000000000000000000000000000001O0O1000000000000000000000000000000000001O000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000000O10000000001O00000O2O0000eTT=\"}}, {\"image_id\": 158, \"category_id\": 1, \"bbox\": [144.5557403564453, 398.6746826171875, 167.7925262451172, 214.4140625], \"score\": 0.9999431371688843, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"\\\\Qa31Xg0R1SOe0G8I6I6K6M3M3M4M2O1N2O0O2O1N2O1O2M2O0O2O0O1O1O100O1O1O1O0O2M2O1O4K4J7\\\\\\\\OTLka0d5C=N2M2M3O1O100O10000000000000000000000000O100lNW_OPKj`0j4i_OhJX`0V5Z1O100O1O1N2O1O1000001O000001O1N2O2N001O1O1O2O1N1O1O1O2M5L4L1O2N0001O01O00000000000000O101N2lJk]O[4Xb0YKX^O_4la0[KY^Ob4ja0ZKZ^Oc4cb0N2O2N1N2O1O1O2N3N1N1O2O001O2N2N1N1000002N2O0O001N3M3N;fLk[OQ2Se0O100O1O1O2N1O010O000001O01O001N3N2M2O1O1N3Lo^e`0\"}}, {\"image_id\": 158, \"category_id\": 2, \"bbox\": [472.6575622558594, 604.6875610351562, 171.15652465820312, 15.77227783203125], \"score\": 0.9998049139976501, \"association_id\": 2, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"SSf;3mg0000O10000000000O2O000000000000000O100000000000001O000000000000000000000000000010O00000000000000O10000000000O10000000000O100000000000000000000000001O00000000000000000000000000000001O00000000100O001O00001O001O001O00000O2O1Oile9\"}}, {\"image_id\": 158, \"category_id\": 2, \"bbox\": [620.0040893554688, 599.2055053710938, 400.53375244140625, 27.41217041015625], \"score\": 0.9991825222969055, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"WcS?1mg00TXO0lg02N200000000O101O00000000000000000000000000000000000000001O000000001O000000000000001O0000000000001O00000000000000000000000000000000000000000000001O00000000000FGmXO9Sg0GnXO8Rg0HnXO8Rg0HnXO8Rg0HnXO8Rg0HnXO8Rg0HnXO8Rg0HnXO9Qg0GPYO8Pg0HPYO8Pg0HPYO8of0IRYO6nf0KQYO5of0KQYO5of0KQYO5of0KQYO5of0KQYO5of0KQYO5of0KRYO4nf0LRYO4nf0LRYO4nf0LRYO4nf0LRYO5mf0KSYO5mf0KTYO4lf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MSYO3mf0MTYO2lf0NTYO2lf0NTYO2lf0NTYO2lf0NTYO2lf0NTYO2lf0NTYO2lf0NUYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NVYO2jf0NWYO1if0OWYO1if0OWYO1if0OWYO1if0NXYO2hf0NXYO2hf0NXYO2hf0NXYO2hf0NXYO2hf0NXYO2hf0NYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1gf0OYYO1hf0NXYO2hf0NXYO2hf0NXYO2hf0NXYO2hf0NXYO2if0MWYO3if0MWYO3if0LXYO4[g0O0000000000000000000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000001O]O0VYO0gf04XYOLhf04XYOLhf04XYOKif05WYOKhf06XYOJhf06XYOJhf06XYOJhf06XYOJhf06XYOJhf06a0000000000000000000000000000000000000001O000000000000000000000000000000000000000000000000000000001O0000000000000000000000000001O000000000000000000000000010O00000000000000001O0000000000000000000000000000000000001O0000000000000000000000000000000000001O0000000O2O0O2OR]2\"}}, {\"image_id\": 158, \"category_id\": 2, \"bbox\": [657.7098388671875, 593.8565673828125, 360.61712646484375, 22.132568359375], \"score\": 0.9961466789245605, \"association_id\": 1, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"Tka?1ng010000000000O1000000000001O00000000002N0000000000000000000000000000000000000001O0000000000000000000000000000000000001O00EMiXO3Wg0MiXO3Wg0NhXO2Xg0NhXO2Wg0OiXO1Wg0OiXO1Wg0OiXO1Vg00kXONVg03iXOMWg03iXOMVg04jXOLUg05kXOKUg05lXOJSg07;0O1000001O000O1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000O100000000000000000000000000000000000000000001O0000000000000000001O0O105KShT1OmWkN2J500O100000001N10000000000000000000000000000000000000000000001O00000000000001O000000000000000000000000000000000001O0000000000000000000000000001O000000000000000001O01O00000000001O0000000000001O000000000000001O0000000000000000000000000000001O000000000000000000000000000000001O000O2O0OTm3\"}}, {\"image_id\": 158, \"category_id\": 2, \"bbox\": [238.84275817871094, 587.1688232421875, 230.07859802246094, 17.37176513671875], \"score\": 0.9949349761009216, \"association_id\": 3, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"_Ri52mg01O2O000O100000000000O100000000001O000000000001O001O0010O0002MSPh01moWO0O1L4O100O100000000O100000000000001O000000000000000000000000000000000000000000001O000000000001O00000000000000000000000000000001O000001O00000000000000001O00000000001O000000000000001O0000001O0000001O000000001O0000000000001O00000000000000000001O00000000000000000000000001O0O1000001O0O[]\\\\=\"}}, {\"image_id\": 158, \"category_id\": 2, \"bbox\": [480.999267578125, 496.84979248046875, 181.7978515625, 122.7357177734375], \"score\": 0.18828992545604706, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"PSU<1og0001N10000000001O00000000000000010O001O000000001OO1000000O2MQh02oWO1O1O0O100001N100O1OQP<OQPD00001N1001O0O100000P]V:\"}}, {\"image_id\": 158, \"category_id\": 2, \"bbox\": [107.54190063476562, 596.3466796875, 377.59014892578125, 32.4193115234375], \"score\": 0.11631376296281815, \"association_id\": 0, \"segmentation\": {\"size\": [768, 1024], \"counts\": \"R[b27ig01M201O000000001O0000000O10001O00000000000000000000001O00000000000000001O000001O000001O000000000000001O00000000000001O0000001O002N3M6J2MRhk00lWTO4L3N2N2O1O101N10000O1000000N20000000000000000000000000000000000000000000000000000001O00000000000000000000000O10001O000000000000000000000000001O00000000000O10001O0000000000000000001O0000000O10000000000000001O00000000000000000000000000001O0000000000000000000000000000000000000O10000000000000000000000O2O0000000000000000000000000000000000000000000000001O0O10000000000000000000000000000001O000000000000000000000000000000000001N100000000000000000000000000000000000000000001O000Ofd[=\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [954.3718872070312, 324.3800354003906, 141.67279052734375, 107.74984741210938], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"hQgo0T16mN\\\\P1`1L4K4L3K5M2O2N2M2N2O100O1M3M3O1O0O1O2O001O1O001O1O1O1O1O101N1O100O100O10O010O0000100O1N2M3O1O1O1O100O10000O10000000000000O20O000000000000000000000O100000000000O10000000001O00001O1O1O001O00100O1O000000001O0000O100000000000000000001O00000000001O00000O100O1O2M2L6@?J7M2N4K;POaoNGhP12h0GVUl:\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [559.1589965820312, 477.4880065917969, 581.5250854492188, 377.7016296386719], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"Ybbb0a3Yn0c0@:F=Dd0\\\\O5L3M3M3N2M3N2M3M3M3M3M3M2N2M3N2M3N2N1O2N2N1O2O0O2O0O2O1N1O2O1N1N3M2N3M2N3L3N2O2N100O101N100O100O100O100O10O01O1O1N2M2N3M3N2N10100O00100O010O01N100O100N101MMM162O1O100000001O0010O01O01O010O001M2M4L3N3N2O1O001O1O010O100O100O100O1O100O1O1O1O100O100O10000O10000O1000001O0O100000001O0O10001O0O10001O0O10001N10001O0O100O2O000O10001N10000O101O00000O100000001O000000000O100000000000001O0000000000000000000000000000000000000000001O0000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000000000000000000000000000000000000O10001O00001O001O1O001O1O2N1O1O1O1O1O1O001O001O001O001O001O1O1O001O1O001O00001O001O000000000000000000000000001O00000000000000000000O1000000000001O000O1000000000000000000O100000000O10000O1000000O100O10000O100O10000001O001O010O1O00001O0010O0001O000010O000001O001O010O1O001O1O1O1O100O001O001O010O0001O00001O00001O01O01O00001O000000001O0000001O00001O000O2O001O001O1O001O1O1O0O2O001O001N101N1O2O1N1N3M3L4L4L4L4M3M3M3M3N2M2O2N1N3N1N3N2M2N3L4N2M3N3M4L4L4L4L2N2N2N2N2O1O1O1O1O1O1O1O1O2N2O2M3M3M1O1O1O1O1O10O01O001O1O1O001O1O2N2O1N2N2N2N2N2N4L3M3M3M2N3M2N3M3L3N2N2M4M3L4L6J5L4K3M3N1N3M2O1N2N2N2O0O3M4K5]Oc0H9G9G:Fm0bNcnZ9\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [1084.573974609375, 363.5130920410156, 247.3935546875, 196.33206176757812], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"UYQT1d1WP18K5K3N2N2M3J6J6I7H8M3L3H9L3M4I6N3M2O2N2N1N2N2N2I6M4O1N2O1O1O000010O01O010O10O01O0000001N101O001O001O0O1O1000001O001O100O10000O100000000000000000000000000000O1000000000000001O0000000000000000O1000000000000000000001O000000O100000000000000000000000000000000000000000000000000000000001O00000000000000000000000000000001O00000001O00000001O00000000001O011N2O3L100O10O0100O106I10O01000O00100O1O001O001O01O01O01O010O2N3N1N3L2O1N101O0O1dKeROS4[m0kKgROU4Zm0iKhROU4Ym0kKgROU4Ym0jKhROU4bm0O100O1N200O1O3dLVRO]2mm0[M]RO`2cn0N4J5J5I7\\\\O^PObNio0R1QPOPO_P1m0;M4L3M4K4L5IeaU3\"}}, {\"image_id\": 159, \"category_id\": 2, \"bbox\": [1097.7860107421875, 528.947509765625, 253.87255859375, 39.5614013671875], \"score\": 0.9999997615814209, \"association_id\": 3, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"][bT11oQ10M4O0O101O0O2O0000000O101O00000O10001O000O101N10000O10000N2O2N1N200O100O1000000O10001N100000000O10000000000000000000001O01O0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O0000000000000000000000001O0000000000000000000000001O00000000001O00000000001O00000000001O00001O00001O00002N1O2N2N1O0000001O001O1O1O00000000001O0000000000000001N1000000O100O10000O1O1N2M3O1O1000000O1O100O010O100000000000000000000000O10001N1O100O10000O10000O100O1O2O000O2N2NolZ2\"}}, {\"image_id\": 159, \"category_id\": 2, \"bbox\": [560.1386108398438, 759.625244140625, 631.1399536132812, 155.2508544921875], \"score\": 0.9999994039535522, \"association_id\": 2, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"SW[c04hQ19I5L2N2N2N1O2O1O0O2O00001O000O2O001O001O001O001O000O101O0000001O00000O1000000O100O101O0O10000O100O10000O10001N100O100O100O2N100O1N2O1N2M3N2O2N100O1000000O100000000O10000O101O0O1O1O1O1O1O100O10000O1000000O10000000000O10000000000O1000000O10000000000O100000001O000000000000000000000O100000000000000000000000001O000000000000000000010O00000000000000000O101O000000000000000000000000000000000000001O00000000000000000000000000000000001O000000000001O00000000000000001O00000000001O000000001O00001O0000001O00001O001O00001O00001O0000001O00001O001O002N3M2O0O1O001O1O00001O1O001O001O001O001O1O10O01O1O001O1O10O01O001O000001O000000000O1000000O100000000O2O00000O1000000O10000O100O100O1O100N2O1O1O1O101N100O10000O10000001O00000000001O000000001O0001O01O001O001O1O1O1O1O1O002N1O2O4K3M3M2N2N1O001O00010O0000001O00001O00001O001O1O001O00001O001O00001O0000000000000000O100000000000000000000O100000O10000000O10000O100O10O01M3L4L4O1O1O100O100O100O10O01O1O1O1O1O0O2O1H8A?G9M2O2O1O1O10O0100O0100000O010000O10O10O100O1O010O1000O0100000O010000000000O2O000O10000O1N2N3N1O100O10000O10000O2O00000O10000O1O1O1O1N2N2M3O1O1O100O100O10000O10000O100O1O1O1O1O2M2M3N2N2N2O100O100O10000O2O000O101O0O1O2O1N1O2N2N1N3N2N2N2N2N1O2O1N101N2O1N2K5K5M`Pk7\"}}, {\"image_id\": 159, \"category_id\": 2, \"bbox\": [1322.96337890625, 486.2054748535156, 92.2972412109375, 19.872589111328125], \"score\": 0.9999983310699463, \"association_id\": 4, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"ZUn[13jQ17L1O1N2O001O000000000000001O00000000001O000000000001O00O2O0001O0000000001O000000000001O0000000001O0000000001O0000001O00001O1O1O1O1O00001O000O2O000000001OO1000000000000O100000O100O3M_n6\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [1239.725341796875, 334.205322265625, 179.824462890625, 164.438720703125], \"score\": 0.999995231628418, \"association_id\": 4, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"WieY16iQ13L3K4L5N101N1O2O1N3N2M3N2M1O1O101N100O101N2O1N3N2N2N1O1N2O001O001O1O2N1O1O1O001O2M101O1O001O1O2N1O1O1O006J4L6YROSMok0P3mSOSMQl0o2lSOTMRl0o2iSOTMVl0n2`SO[M_l0g2\\\\SO\\\\Mdl0j2PSO]Mol0j300000000000O10000O2O0O10000O10000000000000000000000000000000000001O00000000000O1000001O0000001O000000001O0000000000001O0001O01O00001O1O001O1O00001O00001O001O1O00000001O01O000001O002N1O1O2M3N5J4K7J5kLTROm1Pn0iM^ROn1Qo0Gd1gM]f1\"}}, {\"image_id\": 159, \"category_id\": 2, \"bbox\": [979.3087158203125, 417.0537109375, 110.2626953125, 14.34307861328125], \"score\": 0.9999301433563232, \"association_id\": 1, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"]kdP11oQ10O101O00000O100000000O100O1O1O1O10O100000000000000000000O1000000000000000000000000O10O100000000000000000O1000O1000000000000000O10001O00000000001O0000O10000000O1000O100000000O100000O10000000000O101O000OmdU;\"}}, {\"image_id\": 159, \"category_id\": 1, \"bbox\": [0.0, 519.12890625, 399.82647705078125, 568.87109375], \"score\": 0.9966068267822266, \"association_id\": 0, \"segmentation\": {\"size\": [1088, 1422], \"counts\": \"c`0U2U3:nf0k0RWO\\\\1dg0ROcWOe1Rh0]4F9H7I5L2N1O2N1O1O2N1O2N1O1O2M2O2N1O2N2N2N2N2N2N3M2N3M2N2N1O1O2N1O1O1O1O1O1O2O0O2N3M3M2N3M2N2N2N2N2N2N2N1O2N1O1O1O2N1O001O100O100O100O3N5K2M3N1O1O000001N1O2O0O2N101N2N2O1N2N1O2O0O1O1O0010O01O001O0010O0001O00001O1O1O2N1O2N3M2M3N1O2N1O001O1O00100O1O1O1O1O101N2N1O2N1O1O010O1O1O1O1O1O1O100O2N1O2N1O1O2O0O1O1O2O0O1O1O101N1O1O100O101O0001O001O2N1N2O1O1N2O0O1O2N1O2N3M2mZOTDnc0o;l[OUDSd0n;f[OXDXd0k;`[O[D_d0h;Y[O^Dfd0`<N1O1O1O001N100O2O0O100O2O0O101N100O2O001O1O001O1O1O001O001O001O00O101N100O2O0O1O1O2N1O100O1O20O1O2N101N1O1O1O010O1O001O010O01O0010O01O1O1O100O1O1O2N1lDb[Od8_d0YGe[Oe8\\\\d0YGg[Oe8Zd0XGk[Of8Ud0WGo[Og8Rd0VGS\\\\Og8nc0VGW\\\\Og8ic0WG\\\\\\\\Of8ec0VGa\\\\Og8`c0VGc\\\\Oi8^c0SGf\\\\Ol8Zc0RGj\\\\Ol8Wc0RGk\\\\Om8Vc0PGl\\\\OP9Uc0nFm\\\\OQ9Tc0mFn\\\\OR9Sc0lFn\\\\OT9Sc0iFo\\\\OW9Sc0eFo\\\\O[9Wc0\\\\Fm\\\\Oc9]c0QFe\\\\Oo9fe00001O000000001O0000000000000000001O0000000000001O000000001O0000001O00001O001O1O001O1O1O001O1O001O1O000000000000000000000000000000001O001O001O00001O001O001O00001O000000001O0000001O00001O001O2N1O3M3M4L4L4L5K2N3M2N2N2N2M3M3M5K7H9Ge0[OnPmQ1\"}}, {\"image_id\": 160, \"category_id\": 2, \"bbox\": [263.30572509765625, 689.2410278320312, 168.48358154296875, 23.20452880859375], \"score\": 1.0, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"kRT71cj02M3O0O2O00001O00000O2O000000001O000000010O00001O001O001O0000001O0000000100O1O1O0DiUO:[j0N6Hloh01TPWO2O000O100O1O1O100O1O1L4K5O10000O100O1000000000000000000000O10000000000000000000000000000000000000000000000000000000000000000000000000000000000000001O000000001O00001N3MkfQf0\"}}, {\"image_id\": 160, \"category_id\": 1, \"bbox\": [368.27496337890625, 488.5476989746094, 898.8633422851562, 302.8366394042969], \"score\": 1.0, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"V\\\\d:1dj00O2N2O0O2O0O2O00000\\\\YO6ZLLfe0Nh]Om0Wb0ROg\\\\OOYNU1nd0lNe\\\\O7SNV1Ue0cNd\\\\O`2Yc0`Mc\\\\Oi2Yc0VMc\\\\OS3Xc0mLe\\\\OX3Yc0hLd\\\\O\\\\3\\\\c0cLb\\\\Oa3\\\\c0^Lc\\\\Oe3]c0ZLa\\\\Oj3]c0TLd\\\\Oo3od04aNmKV\\\\OU4fc0nKX\\\\OV4ec0jK[\\\\OY4ac0hK^\\\\O[4`c0eK`\\\\O]4^c0dK`\\\\O^4_c0bKa\\\\O`4]c0`Kb\\\\Ob4]c0_Kb\\\\Oc4\\\\c0]Kd\\\\Od4[c0]Kc\\\\Oe4\\\\c0[Kd\\\\Of4[c0ZKe\\\\Of4[c0[Kc\\\\Og4\\\\c0YKd\\\\Og4\\\\c0ZKc\\\\Og4\\\\c0YKc\\\\Oh4]c0XKc\\\\Oh4^c0WKb\\\\Oj4]c0UKc\\\\Ol4^c0SKb\\\\Om4_c0RK`\\\\Oo4`c0PKa\\\\OQ5_c0nJ^\\\\OU5bc0jJ\\\\\\\\OY5dc0fJY\\\\O^5hc0`JW\\\\Ob5ic0^JU\\\\Oe5\\\\d01O100O2O0O10001O0O10001O0000001O000O101O0000O010O10O1O00100O001000O010O10O10O10000O01000O1000000O10O10O10000O10000O01000O1000000O0100000O100000O01000000O100000O0100000000000000000O10000000000000000O100000000000000O1000000000000O1000000O2O000O100O100O2O0O100N2M3N2M4M2O1O1O1O1O2N1O1O1N200O2O0O10000O101O00001N10001O001O1N2O1O1O2N1O1N3N1O1O1O001N2O001O1O001O0O101O001O00001O2M2O2N1O2N2N1O001O1N101O1O001O001N101O1O001O001O0O101O00010O0000001O00000010O000001O0000000010O000001O0000001O0001O01O0000O100000000O10001O000O1000000O100000000O101O00000O1000000O1000000O100O2O0O1O100O1O1O1O1O1N3N1O1N2O1N2O1O2N1O1O1O100O101N10000O101O0O10000O101O0O100O101O0O100O100O2O0O10000O10001N100000000O100000000000001O0000000000000000000000000000000000000000000000000O10001O000000001O000000001O0000001O00001O00001O00001O0000001O00001O0000001O0O10001O00001O0000001O00001O00001O0000001O00001O00001N100000001O000000000000001O000O1000000000001O0000000O10000000001O00O10000000001O00000000000O101O000000000000000O101O000000000000000O101O0O10000O2O0000001N10001O0O101O001O001O0O101O001O00001O0O101O00001O00001O001O001O1N102N1O2N2N2N1O2N1O001N2O001O001O001O000O2O00000000001O00000000001O000000001O0000001O000000001O000000001O00O10000O10000O100O10000O100O100O10000O100O10000O100O10000O2O000O100O100O1O100O1O2N1O1O1O1O1O1O1O100O2N100O100O100O100O10001N1000000O10000O101N100O10000O2O000O10001N10000O101O000O2O00001N1000001O000O101O00001N10001O000O2O001N101O1O0O2O001O0O2O00001N10001O000O101O000O101N10000O2O0O101N1O2O0O1O2N1O2O1N3M3M3N2M3N1N1O2O1N1N3L3]MnYO`0Vf0]ORZO:Rf0C_ZOMbe01dZOI_e03gZOH\\\\e0KX[OKkd02^[OEed07d[O^OSe0MPPZ1\"}}, {\"image_id\": 160, \"category_id\": 1, \"bbox\": [127.56358337402344, 547.5831298828125, 159.11534118652344, 136.88531494140625], \"score\": 1.0, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"Tn\\\\35jh00gXO1Ug05gXOMWg06fXOKYg07fXOJYg07eXOKYg06gXOJYg05hXOKWg06iXOJVg07jXOIUg07lXOIRg09oXOFQg0;nXOEQg0=oXOBPg0a0nXO^ORg0e0mXOYOSg0m0hXOPOYg0U1eXOhN\\\\g0`1\\\\XOfN]g0`1]XOgN[g0c1\\\\XO`Nbg0b1\\\\XO_Nbg0d1\\\\XO^Ncg0c1ZXO_Neg0Y2O2N1N2O1O1O1O100O1O100O001O1O001O010O01O010O10O1O10O0100O100O10000O2O0O100O100O2O0O101O00000O10000000O10O10O0100000O10000O100O100O1O01000O100000000O10O01O1O1000O01O1O1ROlXOYNUg0Z1RYOnMO1cg0]1Z1N2IjVOmNWi0T15O10000O1000000O10000000000000000000000O100000000000001O000000000O2O001O1O1O1O1O1K6Kn^ki0\"}}, {\"image_id\": 160, \"category_id\": 1, \"bbox\": [259.5827941894531, 503.9822692871094, 238.13705444335938, 198.85031127929688], \"score\": 0.9999998807907104, \"association_id\": 3, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"jnb71cj01O1M3G9eXOC]d0>b[OFZd0;e[OIWd09\\\\[O^OnM;cf09_[O^OkM<cf08`[O_OjM<bf08c[O4[d0Mc[O5[d0Ld[O6Zd0Le[O5Zd0Ld[O5[d0Mc[O5[d0Mc[O5[d0M\\\\[O<bd0F\\\\[O<ad0H[[O<ad0G][O=^d0F`[O>[d0K][O9`d0H^[O<^d0E`[O>_d0C_[O>`d0C_[O?_d0C_[O>ad0C^[O>`d0C_[O>`d0D][O>cd0BZ[Oa0ed0AX[Oa0gd0@nZOl0od0^2O1N2N2O1O1O100O1O1O1O1O1O100O10000O10000O10O0100O100O10O0100000O1O1N2L4N1O2N2O1O1O1O1O100O1O100O100O1000000O100000000000000000000000000001O00001O0000000000001O0000000000000000001O00000000O1000000O100O1O1N2M3D<C=L4H8N2N2N2C=F:_Oa0K5M3N2O1O1O100O1O1O1O1O100O1O1O100O1O100O1O100O1O100O10000O100O100O1O1O1O1O100O100O1O1O1O101N100O100O100O1O1O2N2M4K;YO`mZd0\"}}, {\"image_id\": 160, \"category_id\": 2, \"bbox\": [412.18194580078125, 712.54931640625, 703.1792602539062, 84.73931884765625], \"score\": 0.9999744892120361, \"association_id\": 1, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"URh<1aj05N1N100O101O0O1000000O10000O2O0O100O100O1O1O1O1O1O1O1O2O0000000O1000000000001O000000000O1000001O0000000O10000000001O0O1000000000000000001O000001O00000000000001O00000000000000000010O00000001O0001O0001O000010O0000000000010O000000000010O0000010O001O100O001O010O0000010O00000010O0000001O00001O001O2N9G5K3M1O3M1O1O00001O0KdUO2_j001O0O2O0OhdW29mZhM4N1O2N2N1O1O2O0O10001N10000O1O1O1O1O1O1N2N2L4M3O1O1O100O100O100000000O1000001O0000000000000000000000001O00000000000000000000000001O0000000000001O000000010O0000000000000000001O000000000000001O0000000000001O0001O0000000000000000000000O10000000000000000000000000000O100000O10000000000000000000000000O1000000000000000000O10000000000000001O00000O10000000000000001O0O10000000000000000O10001O00000000000O1000001O000000000O10000000000000000O100000000000000O10000000000000000000000O1000001O00000000001O1O001O001O00001O000000001O000000001O00001O0O2O001N3N4K7J2L\\\\Zj6\"}}, {\"image_id\": 160, \"category_id\": 2, \"bbox\": [145.03750610351562, 672.2787475585938, 117.36386108398438, 12.31121826171875], \"score\": 0.9996931552886963, \"association_id\": 2, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"bSZ42cj0000O100000000O10000O10001N10000O10O10O10001O00000000000001O000000000000000000000000O10000000000000000O1001O0000000000000000000000000000001O001N3Mo_Rk0\"}}, {\"image_id\": 160, \"category_id\": 1, \"bbox\": [229.25787353515625, 539.295166015625, 88.56784057617188, 151.72882080078125], \"score\": 0.9832351207733154, \"association_id\": 0, \"segmentation\": {\"size\": [853, 1280], \"counts\": \"nnU62bj02N2N1QYOJ_c07_\\\\O0[c01d\\\\O5Wc0Mf\\\\O7Uc0Ng\\\\O2Yc00e\\\\O1Zc00e\\\\O0Yc04d\\\\OM[c0:^\\\\OH`c0:^\\\\OGbc0:]\\\\OFcc0:\\\\\\\\OGdc09\\\\\\\\OGdc0:X\\\\OIgc09T\\\\OKlc07o[OLQd05m[OLSd04l[OMTd03l[OMTd04j[OMVd04g[ONYd04b[OO^d04R[O9nd0GP[O;Pe0EP[O;Pe0EoZO<Qe0GkZO:Ue0HgZO9Ze0HcZO:]e0H`ZO8ae0I]ZO8de0HZZO8ge0IVZO8ke0HSZO8oe0InYO7Tf0IkYO7Wf0GjYO8Wf0HjYO7Vf0IjYO7Wf0HiYO8Xf0GgYO9[f0FeYO:\\\\f0FcYO:^f0EXYOcN4g1ef0NYYO2hf0NVYO3kf0GUYO>lf0_OXYO?hf0BVYO?kf0e1001PNRYO7of0HQYO8of0GSYO8nf0GRYO9of0FQYO:Qg0DoXO<Rg0DmXO<of0g1O1O1001O000_LSYOY3Vg0O0O2N2^MdXO@<e1Rg0hNeXO@`0b1lf0lN`YOQ1bf0lNbYOP1Xh0J2N7ZO]VOMhi0J]VO5Sj0NTaQi0\"}}]"
  },
  {
    "path": "projects/LISA/output_light/last_checkpoint",
    "content": "last_checkpoint.pth\n"
  },
  {
    "path": "projects/LISA/predictor.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport atexit\nimport bisect\nimport multiprocessing as mp\nfrom collections import deque\nimport cv2\nimport torch\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.engine.defaults import DefaultPredictor\nfrom detectron2.utils.video_visualizer import VideoVisualizer\nfrom detectron2.utils.visualizer import ColorMode, Visualizer\n\nfrom LISA import add_lisa_config \n\nclass VisualizationDemo(object):\n    def __init__(self, cfg, instance_mode=ColorMode.IMAGE, parallel=False):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            instance_mode (ColorMode):\n            parallel (bool): whether to run the model in different processes from visualization.\n                Useful since the visualization logic can be slow.\n        \"\"\"\n        self.metadata = MetadataCatalog.get('soba_cast_shadow_val_full')\n        self.cpu_device = torch.device(\"cpu\")\n        self.instance_mode = instance_mode\n\n        self.parallel = parallel\n        if parallel:\n            num_gpu = torch.cuda.device_count()\n            self.predictor = AsyncPredictor(cfg, num_gpus=num_gpu)\n        else:\n            self.predictor = DefaultPredictor(cfg)\n\n    def run_on_image(self, image):\n        \"\"\"\n        Args:\n            image (np.ndarray): an image of shape (H, W, C) (in BGR order).\n                This is the format used by OpenCV.\n\n        Returns:\n            predictions (dict): the output of the model.\n            vis_output (VisImage): the visualized image output.\n        \"\"\"\n        vis_output = None\n        predictions = self.predictor(image)\n        # Convert image from OpenCV BGR format to Matplotlib RGB format.\n        image = image[:, :, ::-1]\n        visualizer = Visualizer(image, self.metadata, instance_mode=self.instance_mode)\n        if \"panoptic_seg\" in predictions:\n            panoptic_seg, segments_info = predictions[\"panoptic_seg\"]\n            vis_output = visualizer.draw_panoptic_seg_predictions(\n                panoptic_seg.to(self.cpu_device), segments_info\n            )\n        else:\n            if \"sem_seg\" in predictions:\n                vis_output = visualizer.draw_sem_seg(\n                    predictions[\"sem_seg\"].argmax(dim=0).to(self.cpu_device)\n                )\n           # if \"instances\" in predictions[1][0]:\n            #    instances = predictions[1][0][\"instances\"].to(self.cpu_device)\n             #   if instances.pred_masks[0].shape[2] == 1:\n              #      instances.pred_masks = [mk[:,:,0] for mk in instances.pred_masks]\n               # vis_asso = visualizer.draw_instance_predictions(instances,True,'right')\n            if \"instances\" in predictions[0][0]:\n                \n                instances = predictions[0][0][\"instances\"].to(self.cpu_device)\n                instances.pred_masks = instances.pred_masks.numpy()\n                #instances.pred_boxes = instances.pred_boxes\n               # print(instances.pred_masks.shape)\n                vis_output = visualizer.draw_instance_predictions(predictions=instances)\n\n        return predictions, vis_output\n\n    def _frame_from_video(self, video):\n        while video.isOpened():\n            success, frame = video.read()\n            if success:\n                yield frame\n            else:\n                break\n\n    def run_on_video(self, video):\n        \"\"\"\n        Visualizes predictions on frames of the input video.\n\n        Args:\n            video (cv2.VideoCapture): a :class:`VideoCapture` object, whose source can be\n                either a webcam or a video file.\n\n        Yields:\n            ndarray: BGR visualizations of each video frame.\n        \"\"\"\n        video_visualizer = VideoVisualizer(self.metadata, self.instance_mode)\n\n        def process_predictions(frame, predictions):\n            frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)\n            if \"panoptic_seg\" in predictions:\n                panoptic_seg, segments_info = predictions[\"panoptic_seg\"]\n                vis_frame = video_visualizer.draw_panoptic_seg_predictions(\n                    frame, panoptic_seg.to(self.cpu_device), segments_info\n                )\n            elif \"instances\" in predictions:\n                predictions = predictions[\"instances\"].to(self.cpu_device)\n                vis_frame = video_visualizer.draw_instance_predictions(frame, predictions)\n            elif \"sem_seg\" in predictions:\n                vis_frame = video_visualizer.draw_sem_seg(\n                    frame, predictions[\"sem_seg\"].argmax(dim=0).to(self.cpu_device)\n                )\n\n            # Converts Matplotlib RGB format to OpenCV BGR format\n            vis_frame = cv2.cvtColor(vis_frame.get_image(), cv2.COLOR_RGB2BGR)\n            return vis_frame\n\n        frame_gen = self._frame_from_video(video)\n        if self.parallel:\n            buffer_size = self.predictor.default_buffer_size\n\n            frame_data = deque()\n\n            for cnt, frame in enumerate(frame_gen):\n                frame_data.append(frame)\n                self.predictor.put(frame)\n\n                if cnt >= buffer_size:\n                    frame = frame_data.popleft()\n                    predictions = self.predictor.get()\n                    yield process_predictions(frame, predictions)\n\n            while len(frame_data):\n                frame = frame_data.popleft()\n                predictions = self.predictor.get()\n                yield process_predictions(frame, predictions)\n        else:\n            for frame in frame_gen:\n                yield process_predictions(frame, self.predictor(frame))\n\n\nclass AsyncPredictor:\n    \"\"\"\n    A predictor that runs the model asynchronously, possibly on >1 GPUs.\n    Because rendering the visualization takes considerably amount of time,\n    this helps improve throughput when rendering videos.\n    \"\"\"\n\n    class _StopToken:\n        pass\n\n    class _PredictWorker(mp.Process):\n        def __init__(self, cfg, task_queue, result_queue):\n            self.cfg = cfg\n            self.task_queue = task_queue\n            self.result_queue = result_queue\n            super().__init__()\n\n        def run(self):\n            predictor = DefaultPredictor(self.cfg)\n\n            while True:\n                task = self.task_queue.get()\n                if isinstance(task, AsyncPredictor._StopToken):\n                    break\n                idx, data = task\n                result = predictor(data)\n                self.result_queue.put((idx, result))\n\n    def __init__(self, cfg, num_gpus: int = 1):\n        \"\"\"\n        Args:\n            cfg (CfgNode):\n            num_gpus (int): if 0, will run on CPU\n        \"\"\"\n        num_workers = max(num_gpus, 1)\n        self.task_queue = mp.Queue(maxsize=num_workers * 3)\n        self.result_queue = mp.Queue(maxsize=num_workers * 3)\n        self.procs = []\n        for gpuid in range(max(num_gpus, 1)):\n            cfg = cfg.clone()\n            cfg.defrost()\n            cfg.MODEL.DEVICE = \"cuda:{}\".format(gpuid) if num_gpus > 0 else \"cpu\"\n            self.procs.append(\n                AsyncPredictor._PredictWorker(cfg, self.task_queue, self.result_queue)\n            )\n\n        self.put_idx = 0\n        self.get_idx = 0\n        self.result_rank = []\n        self.result_data = []\n\n        for p in self.procs:\n            p.start()\n        atexit.register(self.shutdown)\n\n    def put(self, image):\n        self.put_idx += 1\n        self.task_queue.put((self.put_idx, image))\n\n    def get(self):\n        self.get_idx += 1  # the index needed for this request\n        if len(self.result_rank) and self.result_rank[0] == self.get_idx:\n            res = self.result_data[0]\n            del self.result_data[0], self.result_rank[0]\n            return res\n\n        while True:\n            # make sure the results are returned in the correct order\n            idx, res = self.result_queue.get()\n            if idx == self.get_idx:\n                return res\n            insert = bisect.bisect(self.result_rank, idx)\n            self.result_rank.insert(insert, idx)\n            self.result_data.insert(insert, res)\n\n    def __len__(self):\n        return self.put_idx - self.get_idx\n\n    def __call__(self, image):\n        self.put(image)\n        return self.get()\n\n    def shutdown(self):\n        for _ in self.procs:\n            self.task_queue.put(AsyncPredictor._StopToken())\n\n    @property\n    def default_buffer_size(self):\n        return len(self.procs) * 5\n"
  },
  {
    "path": "projects/LISA/train_net.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\"\"\"\nDetection Training Script.\n\nThis scripts reads a given config file and runs the training or evaluation.\nIt is an entry point that is made to train standard models in detectron2.\n\nIn order to let one script support training of many models,\nthis script contains logic that are specific to these built-in models and therefore\nmay not be suitable for your own project.\nFor example, your research project perhaps only needs a single \"evaluator\".\n\nTherefore, we recommend you to use detectron2 as an library and take\nthis file as an example of how to use the library.\nYou may want to write your own script with your datasets and other customizations.\n\"\"\"\n\nimport logging\nimport os\nfrom collections import OrderedDict\nimport torch\n\nimport detectron2.utils.comm as comm\nfrom detectron2.checkpoint import DetectionCheckpointer\nfrom detectron2.config import get_cfg\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.engine import DefaultTrainer, default_argument_parser, default_setup, hooks, launch\nfrom detectron2.evaluation import (\n    CityscapesEvaluator,\n    SOBAEvaluator,\n    DatasetEvaluators,\n    LVISEvaluator,\n    PascalVOCDetectionEvaluator,\n    SemSegEvaluator,\n    verify_results,\n)\nfrom detectron2.modeling import GeneralizedRCNNWithTTA\n\nimport os\nos.environ['CUDA_LAUNCH_BLOCKING'] = \"1\"\n\nfrom detectron2.data.datasets import register_soba_instances\nregister_soba_instances(\"soba_cast_shadow_train_full\", {}, \"../../../dataset/SOBA/annotations/SOBA_train_relation.json\", \"../../../dataset/SOBA/SOBA/\")\nregister_soba_instances(\"soba_cast_shadow_val_full\",{},\"../../../dataset/SOBA/annotations/SOBA_val_v2.json\",\"../../../dataset/SOBA/SOBA/\")\n\nfrom LISA import add_lisa_config \n\nclass Trainer(DefaultTrainer):\n    \"\"\"\n    We use the \"DefaultTrainer\" which contains a number pre-defined logic for\n    standard training workflow. They may not work for you, especially if you\n    are working on a new research project. In that case you can use the cleaner\n    \"SimpleTrainer\", or write your own training loop.\n    \"\"\"\n\n    @classmethod\n    def build_evaluator(cls, cfg, dataset_name, output_folder=None):\n        \"\"\"\n        Create evaluator(s) for a given dataset.\n        This uses the special metadata \"evaluator_type\" associated with each builtin dataset.\n        For your own dataset, you can simply create an evaluator manually in your\n        script and do not have to worry about the hacky if-else logic here.\n        \"\"\"\n        if output_folder is None:\n            output_folder = os.path.join(cfg.OUTPUT_DIR, \"inference\")\n        evaluator_list = []\n        evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type\n        if evaluator_type in [\"sem_seg\", \"soba_panoptic_seg\"]:\n            evaluator_list.append(\n                SemSegEvaluator(\n                    dataset_name,\n                    distributed=True,\n                    num_classes=cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES,\n                    ignore_label=cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE,\n                    output_dir=output_folder,\n                )\n            )\n        if evaluator_type in [\"soba\", \"soba_panoptic_seg\"]:\n            evaluator_list.append(SOBAEvaluator(dataset_name, cfg, True, output_folder))\n        if evaluator_type == \"soba_panoptic_seg\":\n            evaluator_list.append(SOBAPanopticEvaluator(dataset_name, output_folder))\n        if evaluator_type == \"cityscapes\":\n            assert (\n                torch.cuda.device_count() >= comm.get_rank()\n            ), \"CityscapesEvaluator currently do not work with multiple machines.\"\n            return CityscapesEvaluator(dataset_name)\n        if evaluator_type == \"pascal_voc\":\n            return PascalVOCDetectionEvaluator(dataset_name)\n        if evaluator_type == \"lvis\":\n            return LVISEvaluator(dataset_name, cfg, True, output_folder)\n        if len(evaluator_list) == 0:\n            raise NotImplementedError(\n                \"no Evaluator for the dataset {} with the type {}\".format(\n                    dataset_name, evaluator_type\n                )\n            )\n        if len(evaluator_list) == 1:\n            return evaluator_list[0]\n        return DatasetEvaluators(evaluator_list)\n\n    @classmethod\n    def test_with_TTA(cls, cfg, model):\n        logger = logging.getLogger(\"detectron2.trainer\")\n        # In the end of training, run an evaluation with TTA\n        # Only support some R-CNN models.\n        logger.info(\"Running inference with test-time augmentation ...\")\n        model = GeneralizedRCNNWithTTA(cfg, model)\n        evaluators = [\n            cls.build_evaluator(\n                cfg, name, output_folder=os.path.join(cfg.OUTPUT_DIR, \"inference_TTA\")\n            )\n            for name in cfg.DATASETS.TEST\n        ]\n        res = cls.test(cfg, model, evaluators)\n        res = OrderedDict({k + \"_TTA\": v for k, v in res.items()})\n        return res\n\n\ndef setup(args):\n    \"\"\"\n    Create configs and perform basic setups.\n    \"\"\"\n    cfg = get_cfg()\n    cfg.merge_from_file(args.config_file)\n    cfg.merge_from_list(args.opts)\n    cfg.freeze()\n    default_setup(cfg, args)\n    return cfg\n\n\ndef main(args):\n    cfg = setup(args)\n\n    if args.eval_only:\n        model = Trainer.build_model(cfg)\n        print(model.state_dict().keys())\n        DetectionCheckpointer(model, save_dir=cfg.OUTPUT_DIR).resume_or_load(\n            cfg.MODEL.WEIGHTS, resume=args.resume\n        )\n        res,rela = Trainer.test(cfg, model) # add rela\n        if comm.is_main_process():\n            verify_results(cfg, res)\n        if cfg.TEST.AUG.ENABLED:\n            res.update(Trainer.test_with_TTA(cfg, model))\n        return res,rela\n\n    \"\"\"\n    If you'd like to do anything fancier than the standard training logic,\n    consider writing your own training loop or subclassing the trainer.\n    \"\"\"\n    trainer = Trainer(cfg)\n    trainer.resume_or_load(resume=args.resume)\n    if cfg.TEST.AUG.ENABLED:\n        trainer.register_hooks(\n            [hooks.EvalHook(0, lambda: trainer.test_with_TTA(cfg, trainer.model))]\n        )\n    return trainer.train()\n\n\nif __name__ == \"__main__\":\n    args = default_argument_parser().parse_args()\n    print(\"Command Line Args:\", args)\n    launch(\n        main,\n        args.num_gpus,\n        num_machines=args.num_machines,\n        machine_rank=args.machine_rank,\n        dist_url=args.dist_url,\n        args=(args,),\n    )\n"
  },
  {
    "path": "projects/LISA/utils.py",
    "content": "\"\"\"\nMask R-CNN\nCommon utility functions and classes.\n\nCopyright (c) 2017 Matterport, Inc.\nLicensed under the MIT License (see LICENSE for details)\nWritten by Waleed Abdulla\n\"\"\"\n\nimport sys\nimport os\nimport logging\nimport math\nimport random\nimport numpy as np\n# import tensorflow as tf\nimport scipy\nimport skimage.color\nimport skimage.io\nimport skimage.transform\nimport urllib.request\nimport shutil\nimport warnings\nfrom distutils.version import LooseVersion\n\n# URL from which to download the latest COCO trained weights\nCOCO_MODEL_URL = \"https://github.com/matterport/Mask_RCNN/releases/download/v2.0/mask_rcnn_coco.h5\"\n\n\n############################################################\n#  Bounding Boxes\n############################################################\n\ndef extract_bboxes(mask):\n    \"\"\"Compute bounding boxes from masks.\n    mask: [height, width, num_instances]. Mask pixels are either 1 or 0.\n\n    Returns: bbox array [num_instances, (y1, x1, y2, x2)].\n    \"\"\"\n    boxes = np.zeros([mask.shape[-1], 4], dtype=np.int32)\n    for i in range(mask.shape[-1]):\n        m = mask[:, :, i]\n        # Bounding box.\n        horizontal_indicies = np.where(np.any(m, axis=0))[0]\n        vertical_indicies = np.where(np.any(m, axis=1))[0]\n        if horizontal_indicies.shape[0]:\n            x1, x2 = horizontal_indicies[[0, -1]]\n            y1, y2 = vertical_indicies[[0, -1]]\n            # x2 and y2 should not be part of the box. Increment by 1.\n            x2 += 1\n            y2 += 1\n        else:\n            # No mask for this instance. Might happen due to\n            # resizing or cropping. Set bbox to zeros\n            x1, x2, y1, y2 = 0, 0, 0, 0\n        boxes[i] = np.array([y1, x1, y2, x2])\n    return boxes.astype(np.int32)\n\n\ndef compute_iou(box, boxes, box_area, boxes_area):\n    \"\"\"Calculates IoU of the given box with the array of the given boxes.\n    box: 1D vector [y1, x1, y2, x2]\n    boxes: [boxes_count, (y1, x1, y2, x2)]\n    box_area: float. the area of 'box'\n    boxes_area: array of length boxes_count.\n\n    Note: the areas are passed in rather than calculated here for\n    efficiency. Calculate once in the caller to avoid duplicate work.\n    \"\"\"\n    # Calculate intersection areas\n    y1 = np.maximum(box[0], boxes[:, 0])\n    y2 = np.minimum(box[2], boxes[:, 2])\n    x1 = np.maximum(box[1], boxes[:, 1])\n    x2 = np.minimum(box[3], boxes[:, 3])\n    intersection = np.maximum(x2 - x1, 0) * np.maximum(y2 - y1, 0)\n    union = box_area + boxes_area[:] - intersection[:]\n    iou = intersection / union\n    return iou\n\n\ndef compute_overlaps(boxes1, boxes2):\n    \"\"\"Computes IoU overlaps between two sets of boxes.\n    boxes1, boxes2: [N, (y1, x1, y2, x2)].\n\n    For better performance, pass the largest set first and the smaller second.\n    \"\"\"\n    # Areas of anchors and GT boxes\n    area1 = (boxes1[:, 2] - boxes1[:, 0]) * (boxes1[:, 3] - boxes1[:, 1])\n    area2 = (boxes2[:, 2] - boxes2[:, 0]) * (boxes2[:, 3] - boxes2[:, 1])\n\n    # Compute overlaps to generate matrix [boxes1 count, boxes2 count]\n    # Each cell contains the IoU value.\n    overlaps = np.zeros((boxes1.shape[0], boxes2.shape[0]))\n    for i in range(overlaps.shape[1]):\n        box2 = boxes2[i]\n        overlaps[:, i] = compute_iou(box2, boxes1, area2[i], area1)\n    return overlaps\n\n\ndef compute_overlaps_masks(masks1, masks2):\n    \"\"\"Computes IoU overlaps between two sets of masks.\n    masks1, masks2: [Height, Width, instances]\n    \"\"\"\n    \n    # If either set of masks is empty return empty result\n    if masks1.shape[-1] == 0 or masks2.shape[-1] == 0:\n        return np.zeros((masks1.shape[-1], masks2.shape[-1]))\n    # flatten masks and compute their areas\n    masks1 = np.reshape(masks1 > .5, (-1, masks1.shape[-1])).astype(np.float32)\n    masks2 = np.reshape(masks2 > .5, (-1, masks2.shape[-1])).astype(np.float32)\n    area1 = np.sum(masks1, axis=0)\n    area2 = np.sum(masks2, axis=0)\n\n    # intersections and union\n    intersections = np.dot(masks1.T, masks2)\n    union = area1[:, None] + area2[None, :] - intersections\n    overlaps = intersections / union\n\n    return overlaps\n\n\ndef non_max_suppression(boxes, scores, threshold):\n    \"\"\"Performs non-maximum suppression and returns indices of kept boxes.\n    boxes: [N, (y1, x1, y2, x2)]. Notice that (y2, x2) lays outside the box.\n    scores: 1-D array of box scores.\n    threshold: Float. IoU threshold to use for filtering.\n    \"\"\"\n    assert boxes.shape[0] > 0\n    if boxes.dtype.kind != \"f\":\n        boxes = boxes.astype(np.float32)\n\n    # Compute box areas\n    y1 = boxes[:, 0]\n    x1 = boxes[:, 1]\n    y2 = boxes[:, 2]\n    x2 = boxes[:, 3]\n    area = (y2 - y1) * (x2 - x1)\n\n    # Get indicies of boxes sorted by scores (highest first)\n    ixs = scores.argsort()[::-1]\n\n    pick = []\n    while len(ixs) > 0:\n        # Pick top box and add its index to the list\n        i = ixs[0]\n        pick.append(i)\n        # Compute IoU of the picked box with the rest\n        iou = compute_iou(boxes[i], boxes[ixs[1:]], area[i], area[ixs[1:]])\n        # Identify boxes with IoU over the threshold. This\n        # returns indices into ixs[1:], so add 1 to get\n        # indices into ixs.\n        remove_ixs = np.where(iou > threshold)[0] + 1\n        # Remove indices of the picked and overlapped boxes.\n        ixs = np.delete(ixs, remove_ixs)\n        ixs = np.delete(ixs, 0)\n    return np.array(pick, dtype=np.int32)\n\n\ndef apply_box_deltas(boxes, deltas):\n    \"\"\"Applies the given deltas to the given boxes.\n    boxes: [N, (y1, x1, y2, x2)]. Note that (y2, x2) is outside the box.\n    deltas: [N, (dy, dx, log(dh), log(dw))]\n    \"\"\"\n    boxes = boxes.astype(np.float32)\n    # Convert to y, x, h, w\n    height = boxes[:, 2] - boxes[:, 0]\n    width = boxes[:, 3] - boxes[:, 1]\n    center_y = boxes[:, 0] + 0.5 * height\n    center_x = boxes[:, 1] + 0.5 * width\n    # Apply deltas\n    center_y += deltas[:, 0] * height\n    center_x += deltas[:, 1] * width\n    height *= np.exp(deltas[:, 2])\n    width *= np.exp(deltas[:, 3])\n    # Convert back to y1, x1, y2, x2\n    y1 = center_y - 0.5 * height\n    x1 = center_x - 0.5 * width\n    y2 = y1 + height\n    x2 = x1 + width\n    return np.stack([y1, x1, y2, x2], axis=1)\n\n\n# def box_refinement_graph(box, gt_box):\n#     \"\"\"Compute refinement needed to transform box to gt_box.\n#     box and gt_box are [N, (y1, x1, y2, x2)]\n#     \"\"\"\n#     box = tf.cast(box, tf.float32)\n#     gt_box = tf.cast(gt_box, tf.float32)\n\n#     height = box[:, 2] - box[:, 0]\n#     width = box[:, 3] - box[:, 1]\n#     center_y = box[:, 0] + 0.5 * height\n#     center_x = box[:, 1] + 0.5 * width\n\n#     gt_height = gt_box[:, 2] - gt_box[:, 0]\n#     gt_width = gt_box[:, 3] - gt_box[:, 1]\n#     gt_center_y = gt_box[:, 0] + 0.5 * gt_height\n#     gt_center_x = gt_box[:, 1] + 0.5 * gt_width\n\n#     dy = (gt_center_y - center_y) / height\n#     dx = (gt_center_x - center_x) / width\n#     dh = tf.log(gt_height / height)\n#     dw = tf.log(gt_width / width)\n\n#     result = tf.stack([dy, dx, dh, dw], axis=1)\n#     return result\n\n\ndef box_refinement(box, gt_box):\n    \"\"\"Compute refinement needed to transform box to gt_box.\n    box and gt_box are [N, (y1, x1, y2, x2)]. (y2, x2) is\n    assumed to be outside the box.\n    \"\"\"\n    box = box.astype(np.float32)\n    gt_box = gt_box.astype(np.float32)\n\n    height = box[:, 2] - box[:, 0]\n    width = box[:, 3] - box[:, 1]\n    center_y = box[:, 0] + 0.5 * height\n    center_x = box[:, 1] + 0.5 * width\n\n    gt_height = gt_box[:, 2] - gt_box[:, 0]\n    gt_width = gt_box[:, 3] - gt_box[:, 1]\n    gt_center_y = gt_box[:, 0] + 0.5 * gt_height\n    gt_center_x = gt_box[:, 1] + 0.5 * gt_width\n\n    dy = (gt_center_y - center_y) / height\n    dx = (gt_center_x - center_x) / width\n    dh = np.log(gt_height / height)\n    dw = np.log(gt_width / width)\n\n    return np.stack([dy, dx, dh, dw], axis=1)\n\n\n############################################################\n#  Dataset\n############################################################\n\nclass Dataset(object):\n    \"\"\"The base class for dataset classes.\n    To use it, create a new class that adds functions specific to the dataset\n    you want to use. For example:\n\n    class CatsAndDogsDataset(Dataset):\n        def load_cats_and_dogs(self):\n            ...\n        def load_mask(self, image_id):\n            ...\n        def image_reference(self, image_id):\n            ...\n\n    See COCODataset and ShapesDataset as examples.\n    \"\"\"\n\n    def __init__(self, class_map=None):\n        self._image_ids = []\n        self.image_info = []\n        # Background is always the first class\n        self.class_info = [{\"source\": \"\", \"id\": 0, \"name\": \"BG\"}]\n        self.source_class_ids = {}\n\n    def add_class(self, source, class_id, class_name):\n        assert \".\" not in source, \"Source name cannot contain a dot\"\n        # Does the class exist already?\n        for info in self.class_info:\n            if info['source'] == source and info[\"id\"] == class_id:\n                # source.class_id combination already available, skip\n                return\n        # Add the class\n        self.class_info.append({\n            \"source\": source,\n            \"id\": class_id,\n            \"name\": class_name,\n        })\n\n    def add_image(self, source, image_id, path, **kwargs):\n        image_info = {\n            \"id\": image_id,\n            \"source\": source,\n            \"path\": path,\n        }\n        image_info.update(kwargs)\n        self.image_info.append(image_info)\n\n    def image_reference(self, image_id):\n        \"\"\"Return a link to the image in its source Website or details about\n        the image that help looking it up or debugging it.\n\n        Override for your dataset, but pass to this function\n        if you encounter images not in your dataset.\n        \"\"\"\n        return \"\"\n\n    def prepare(self, class_map=None):\n        \"\"\"Prepares the Dataset class for use.\n\n        TODO: class map is not supported yet. When done, it should handle mapping\n              classes from different datasets to the same class ID.\n        \"\"\"\n\n        def clean_name(name):\n            \"\"\"Returns a shorter version of object names for cleaner display.\"\"\"\n            return \",\".join(name.split(\",\")[:1])\n\n        # Build (or rebuild) everything else from the info dicts.\n        self.num_classes = len(self.class_info)\n        self.class_ids = np.arange(self.num_classes)\n        self.class_names = [clean_name(c[\"name\"]) for c in self.class_info]\n        self.num_images = len(self.image_info)\n        self._image_ids = np.arange(self.num_images)\n\n        # Mapping from source class and image IDs to internal IDs\n        self.class_from_source_map = {\"{}.{}\".format(info['source'], info['id']): id\n                                      for info, id in zip(self.class_info, self.class_ids)}\n        self.image_from_source_map = {\"{}.{}\".format(info['source'], info['id']): id\n                                      for info, id in zip(self.image_info, self.image_ids)}\n\n        # Map sources to class_ids they support\n        self.sources = list(set([i['source'] for i in self.class_info]))\n        self.source_class_ids = {}\n        # Loop over datasets\n        for source in self.sources:\n            self.source_class_ids[source] = []\n            # Find classes that belong to this dataset\n            for i, info in enumerate(self.class_info):\n                # Include BG class in all datasets\n                if i == 0 or source == info['source']:\n                    self.source_class_ids[source].append(i)\n\n    def map_source_class_id(self, source_class_id):\n        \"\"\"Takes a source class ID and returns the int class ID assigned to it.\n\n        For example:\n        dataset.map_source_class_id(\"coco.12\") -> 23\n        \"\"\"\n        return self.class_from_source_map[source_class_id]\n\n    def get_source_class_id(self, class_id, source):\n        \"\"\"Map an internal class ID to the corresponding class ID in the source dataset.\"\"\"\n        info = self.class_info[class_id]\n        assert info['source'] == source\n        return info['id']\n\n    @property\n    def image_ids(self):\n        return self._image_ids\n\n    def source_image_link(self, image_id):\n        \"\"\"Returns the path or URL to the image.\n        Override this to return a URL to the image if it's available online for easy\n        debugging.\n        \"\"\"\n        return self.image_info[image_id][\"path\"]\n\n    def load_image(self, image_id):\n        \"\"\"Load the specified image and return a [H,W,3] Numpy array.\n        \"\"\"\n        # Load image\n        image = skimage.io.imread(self.image_info[image_id]['path'])\n        # If grayscale. Convert to RGB for consistency.\n        if image.ndim != 3:\n            image = skimage.color.gray2rgb(image)\n        # If has an alpha channel, remove it for consistency\n        if image.shape[-1] == 4:\n            image = image[..., :3]\n        return image\n\n    def load_mask(self, image_id):\n        \"\"\"Load instance masks for the given image.\n\n        Different datasets use different ways to store masks. Override this\n        method to load instance masks and return them in the form of am\n        array of binary masks of shape [height, width, instances].\n\n        Returns:\n            masks: A bool array of shape [height, width, instance count] with\n                a binary mask per instance.\n            class_ids: a 1D array of class IDs of the instance masks.\n        \"\"\"\n        # Override this function to load a mask from your dataset.\n        # Otherwise, it returns an empty mask.\n        logging.warning(\"You are using the default load_mask(), maybe you need to define your own one.\")\n        mask = np.empty([0, 0, 0])\n        class_ids = np.empty([0], np.int32)\n        return mask, class_ids\n\n\ndef resize_image(image, min_dim=None, max_dim=None, min_scale=None, mode=\"square\"):\n    \"\"\"Resizes an image keeping the aspect ratio unchanged.\n\n    min_dim: if provided, resizes the image such that it's smaller\n        dimension == min_dim\n    max_dim: if provided, ensures that the image longest side doesn't\n        exceed this value.\n    min_scale: if provided, ensure that the image is scaled up by at least\n        this percent even if min_dim doesn't require it.\n    mode: Resizing mode.\n        none: No resizing. Return the image unchanged.\n        square: Resize and pad with zeros to get a square image\n            of size [max_dim, max_dim].\n        pad64: Pads width and height with zeros to make them multiples of 64.\n               If min_dim or min_scale are provided, it scales the image up\n               before padding. max_dim is ignored in this mode.\n               The multiple of 64 is needed to ensure smooth scaling of feature\n               maps up and down the 6 levels of the FPN pyramid (2**6=64).\n        crop: Picks random crops from the image. First, scales the image based\n              on min_dim and min_scale, then picks a random crop of\n              size min_dim x min_dim. Can be used in training only.\n              max_dim is not used in this mode.\n\n    Returns:\n    image: the resized image\n    window: (y1, x1, y2, x2). If max_dim is provided, padding might\n        be inserted in the returned image. If so, this window is the\n        coordinates of the image part of the full image (excluding\n        the padding). The x2, y2 pixels are not included.\n    scale: The scale factor used to resize the image\n    padding: Padding added to the image [(top, bottom), (left, right), (0, 0)]\n    \"\"\"\n    # Keep track of image dtype and return results in the same dtype\n    image_dtype = image.dtype\n    # Default window (y1, x1, y2, x2) and default scale == 1.\n    h, w = image.shape[:2]\n    window = (0, 0, h, w)\n    scale = 1\n    padding = [(0, 0), (0, 0), (0, 0)]\n    crop = None\n\n    if mode == \"none\":\n        return image, window, scale, padding, crop\n\n    # Scale?\n    if min_dim:\n        # Scale up but not down\n        scale = max(1, min_dim / min(h, w))\n    if min_scale and scale < min_scale:\n        scale = min_scale\n\n    # Does it exceed max dim?\n    if max_dim and mode == \"square\":\n        image_max = max(h, w)\n        if round(image_max * scale) > max_dim:\n            scale = max_dim / image_max\n\n    # Resize image using bilinear interpolation\n    if scale != 1:\n        image = resize(image, (round(h * scale), round(w * scale)),\n                       preserve_range=True)\n\n    # Need padding or cropping?\n    if mode == \"square\":\n        # Get new height and width\n        h, w = image.shape[:2]\n        top_pad = (max_dim - h) // 2\n        bottom_pad = max_dim - h - top_pad\n        left_pad = (max_dim - w) // 2\n        right_pad = max_dim - w - left_pad\n        padding = [(top_pad, bottom_pad), (left_pad, right_pad), (0, 0)]\n        image = np.pad(image, padding, mode='constant', constant_values=0)\n        window = (top_pad, left_pad, h + top_pad, w + left_pad)\n    elif mode == \"pad64\":\n        h, w = image.shape[:2]\n        # Both sides must be divisible by 64\n        assert min_dim % 64 == 0, \"Minimum dimension must be a multiple of 64\"\n        # Height\n        if h % 64 > 0:\n            max_h = h - (h % 64) + 64\n            top_pad = (max_h - h) // 2\n            bottom_pad = max_h - h - top_pad\n        else:\n            top_pad = bottom_pad = 0\n        # Width\n        if w % 64 > 0:\n            max_w = w - (w % 64) + 64\n            left_pad = (max_w - w) // 2\n            right_pad = max_w - w - left_pad\n        else:\n            left_pad = right_pad = 0\n        padding = [(top_pad, bottom_pad), (left_pad, right_pad), (0, 0)]\n        image = np.pad(image, padding, mode='constant', constant_values=0)\n        window = (top_pad, left_pad, h + top_pad, w + left_pad)\n    elif mode == \"crop\":\n        # Pick a random crop\n        h, w = image.shape[:2]\n        y = random.randint(0, (h - min_dim))\n        x = random.randint(0, (w - min_dim))\n        crop = (y, x, min_dim, min_dim)\n        image = image[y:y + min_dim, x:x + min_dim]\n        window = (0, 0, min_dim, min_dim)\n    else:\n        raise Exception(\"Mode {} not supported\".format(mode))\n    return image.astype(image_dtype), window, scale, padding, crop\n\n\ndef resize_mask(mask, scale, padding, crop=None):\n    \"\"\"Resizes a mask using the given scale and padding.\n    Typically, you get the scale and padding from resize_image() to\n    ensure both, the image and the mask, are resized consistently.\n\n    scale: mask scaling factor\n    padding: Padding to add to the mask in the form\n            [(top, bottom), (left, right), (0, 0)]\n    \"\"\"\n    # Suppress warning from scipy 0.13.0, the output shape of zoom() is\n    # calculated with round() instead of int()\n    with warnings.catch_warnings():\n        warnings.simplefilter(\"ignore\")\n        mask = scipy.ndimage.zoom(mask, zoom=[scale, scale, 1], order=0)\n    if crop is not None:\n        y, x, h, w = crop\n        mask = mask[y:y + h, x:x + w]\n    else:\n        mask = np.pad(mask, padding, mode='constant', constant_values=0)\n    return mask\n\n\ndef minimize_mask(bbox, mask, mini_shape):\n    \"\"\"Resize masks to a smaller version to reduce memory load.\n    Mini-masks can be resized back to image scale using expand_masks()\n\n    See inspect_data.ipynb notebook for more details.\n    \"\"\"\n    mini_mask = np.zeros(mini_shape + (mask.shape[-1],), dtype=bool)\n    for i in range(mask.shape[-1]):\n        # Pick slice and cast to bool in case load_mask() returned wrong dtype\n        m = mask[:, :, i].astype(bool)\n        y1, x1, y2, x2 = bbox[i][:4]\n        m = m[y1:y2, x1:x2]\n        if m.size == 0:\n            raise Exception(\"Invalid bounding box with area of zero\")\n        # Resize with bilinear interpolation\n        m = resize(m, mini_shape)\n        mini_mask[:, :, i] = np.around(m).astype(np.bool)\n    return mini_mask\n\n\ndef expand_mask(bbox, mini_mask, image_shape):\n    \"\"\"Resizes mini masks back to image size. Reverses the change\n    of minimize_mask().\n\n    See inspect_data.ipynb notebook for more details.\n    \"\"\"\n    mask = np.zeros(image_shape[:2] + (mini_mask.shape[-1],), dtype=bool)\n    for i in range(mask.shape[-1]):\n        m = mini_mask[:, :, i]\n        y1, x1, y2, x2 = bbox[i][:4]\n        h = y2 - y1\n        w = x2 - x1\n        # Resize with bilinear interpolation\n        m = resize(m, (h, w))\n        mask[y1:y2, x1:x2, i] = np.around(m).astype(np.bool)\n    return mask\n\n\n# TODO: Build and use this function to reduce code duplication\ndef mold_mask(mask, config):\n    pass\n\n\ndef unmold_mask(mask, bbox, image_shape):\n    \"\"\"Converts a mask generated by the neural network to a format similar\n    to its original shape.\n    mask: [height, width] of type float. A small, typically 28x28 mask.\n    bbox: [y1, x1, y2, x2]. The box to fit the mask in.\n\n    Returns a binary mask with the same size as the original image.\n    \"\"\"\n    threshold = 0.5\n    y1, x1, y2, x2 = bbox\n    mask = resize(mask, (y2 - y1, x2 - x1))\n    mask = np.where(mask >= threshold, 1, 0).astype(np.bool)\n\n    # Put the mask in the right location.\n    full_mask = np.zeros(image_shape[:2], dtype=np.bool)\n    full_mask[y1:y2, x1:x2] = mask\n    return full_mask\n\n\n############################################################\n#  Anchors\n############################################################\n\ndef generate_anchors(scales, ratios, shape, feature_stride, anchor_stride):\n    \"\"\"\n    scales: 1D array of anchor sizes in pixels. Example: [32, 64, 128]\n    ratios: 1D array of anchor ratios of width/height. Example: [0.5, 1, 2]\n    shape: [height, width] spatial shape of the feature map over which\n            to generate anchors.\n    feature_stride: Stride of the feature map relative to the image in pixels.\n    anchor_stride: Stride of anchors on the feature map. For example, if the\n        value is 2 then generate anchors for every other feature map pixel.\n    \"\"\"\n    # Get all combinations of scales and ratios\n    scales, ratios = np.meshgrid(np.array(scales), np.array(ratios))\n    scales = scales.flatten()\n    ratios = ratios.flatten()\n\n    # Enumerate heights and widths from scales and ratios\n    heights = scales / np.sqrt(ratios)\n    widths = scales * np.sqrt(ratios)\n\n    # Enumerate shifts in feature space\n    shifts_y = np.arange(0, shape[0], anchor_stride) * feature_stride\n    shifts_x = np.arange(0, shape[1], anchor_stride) * feature_stride\n    shifts_x, shifts_y = np.meshgrid(shifts_x, shifts_y)\n\n    # Enumerate combinations of shifts, widths, and heights\n    box_widths, box_centers_x = np.meshgrid(widths, shifts_x)\n    box_heights, box_centers_y = np.meshgrid(heights, shifts_y)\n\n    # Reshape to get a list of (y, x) and a list of (h, w)\n    box_centers = np.stack(\n        [box_centers_y, box_centers_x], axis=2).reshape([-1, 2])\n    box_sizes = np.stack([box_heights, box_widths], axis=2).reshape([-1, 2])\n\n    # Convert to corner coordinates (y1, x1, y2, x2)\n    boxes = np.concatenate([box_centers - 0.5 * box_sizes,\n                            box_centers + 0.5 * box_sizes], axis=1)\n    return boxes\n\n\ndef generate_pyramid_anchors(scales, ratios, feature_shapes, feature_strides,\n                             anchor_stride):\n    \"\"\"Generate anchors at different levels of a feature pyramid. Each scale\n    is associated with a level of the pyramid, but each ratio is used in\n    all levels of the pyramid.\n\n    Returns:\n    anchors: [N, (y1, x1, y2, x2)]. All generated anchors in one array. Sorted\n        with the same order of the given scales. So, anchors of scale[0] come\n        first, then anchors of scale[1], and so on.\n    \"\"\"\n    # Anchors\n    # [anchor_count, (y1, x1, y2, x2)]\n    anchors = []\n    for i in range(len(scales)):\n        anchors.append(generate_anchors(scales[i], ratios, feature_shapes[i],\n                                        feature_strides[i], anchor_stride))\n    return np.concatenate(anchors, axis=0)\n\n\n############################################################\n#  Miscellaneous\n############################################################\n\ndef trim_zeros(x):\n    \"\"\"It's common to have tensors larger than the available data and\n    pad with zeros. This function removes rows that are all zeros.\n\n    x: [rows, columns].\n    \"\"\"\n    assert len(x.shape) == 2\n    return x[~np.all(x == 0, axis=1)]\n\n\ndef compute_matches(gt_boxes, gt_class_ids, gt_masks,\n                    pred_boxes, pred_class_ids, pred_scores, pred_masks,\n                    iou_threshold=0.5, score_threshold=0.0):\n    \"\"\"Finds matches between prediction and ground truth instances.\n\n    Returns:\n        gt_match: 1-D array. For each GT box it has the index of the matched\n                  predicted box.\n        pred_match: 1-D array. For each predicted box, it has the index of\n                    the matched ground truth box.\n        overlaps: [pred_boxes, gt_boxes] IoU overlaps.\n    \"\"\"\n    # Trim zero padding\n    # TODO: cleaner to do zero unpadding upstream\n    gt_boxes = trim_zeros(gt_boxes)\n    gt_masks = gt_masks[..., :gt_boxes.shape[0]]\n    pred_boxes = trim_zeros(pred_boxes)\n    pred_scores = pred_scores[:pred_boxes.shape[0]]\n    # Sort predictions by score from high to low\n    indices = np.argsort(pred_scores)[::-1]\n    pred_boxes = pred_boxes[indices]\n    pred_class_ids = pred_class_ids[indices]\n    pred_scores = pred_scores[indices]\n    pred_masks = pred_masks[..., indices]\n\n    # Compute IoU overlaps [pred_masks, gt_masks]\n    overlaps = compute_overlaps_masks(pred_masks, gt_masks)\n\n    # Loop through predictions and find matching ground truth boxes\n    match_count = 0\n    pred_match = -1 * np.ones([pred_boxes.shape[0]])\n    gt_match = -1 * np.ones([gt_boxes.shape[0]])\n    for i in range(len(pred_boxes)):\n        # Find best matching ground truth box\n        # 1. Sort matches by score\n        sorted_ixs = np.argsort(overlaps[i])[::-1]\n        # 2. Remove low scores\n        low_score_idx = np.where(overlaps[i, sorted_ixs] < score_threshold)[0]\n        if low_score_idx.size > 0:\n            sorted_ixs = sorted_ixs[:low_score_idx[0]]\n        # 3. Find the match\n        for j in sorted_ixs:\n            # If ground truth box is already matched, go to next one\n            if gt_match[j] > -1:\n                continue\n            # If we reach IoU smaller than the threshold, end the loop\n            iou = overlaps[i, j]\n            if iou < iou_threshold:\n                break\n            # Do we have a match?\n            if pred_class_ids[i] == gt_class_ids[j]:\n                match_count += 1\n                gt_match[j] = i\n                pred_match[i] = j\n                break\n\n    return gt_match, pred_match, overlaps\n\n\ndef compute_ap(gt_boxes, gt_class_ids, gt_masks,\n               pred_boxes, pred_class_ids, pred_scores, pred_masks,\n               iou_threshold=0.5):\n    \"\"\"Compute Average Precision at a set IoU threshold (default 0.5).\n\n    Returns:\n    mAP: Mean Average Precision\n    precisions: List of precisions at different class score thresholds.\n    recalls: List of recall values at different class score thresholds.\n    overlaps: [pred_boxes, gt_boxes] IoU overlaps.\n    \"\"\"\n    # Get matches and overlaps\n    gt_match, pred_match, overlaps = compute_matches(\n        gt_boxes, gt_class_ids, gt_masks,\n        pred_boxes, pred_class_ids, pred_scores, pred_masks,\n        iou_threshold)\n\n    # Compute precision and recall at each prediction box step\n    precisions = np.cumsum(pred_match > -1) / (np.arange(len(pred_match)) + 1)\n    recalls = np.cumsum(pred_match > -1).astype(np.float32) / len(gt_match)\n\n    # Pad with start and end values to simplify the math\n    precisions = np.concatenate([[0], precisions, [0]])\n    recalls = np.concatenate([[0], recalls, [1]])\n\n    # Ensure precision values decrease but don't increase. This way, the\n    # precision value at each recall threshold is the maximum it can be\n    # for all following recall thresholds, as specified by the VOC paper.\n    for i in range(len(precisions) - 2, -1, -1):\n        precisions[i] = np.maximum(precisions[i], precisions[i + 1])\n\n    # Compute mean AP over recall range\n    indices = np.where(recalls[:-1] != recalls[1:])[0] + 1\n    mAP = np.sum((recalls[indices] - recalls[indices - 1]) *\n                 precisions[indices])\n\n    return mAP, precisions, recalls, overlaps\n\n\ndef compute_ap_range(gt_box, gt_class_id, gt_mask,\n                     pred_box, pred_class_id, pred_score, pred_mask,\n                     iou_thresholds=None, verbose=1):\n    \"\"\"Compute AP over a range or IoU thresholds. Default range is 0.5-0.95.\"\"\"\n    # Default is 0.5 to 0.95 with increments of 0.05\n    iou_thresholds = iou_thresholds or np.arange(0.5, 1.0, 0.05)\n    \n    # Compute AP over range of IoU thresholds\n    AP = []\n    for iou_threshold in iou_thresholds:\n        ap, precisions, recalls, overlaps =\\\n            compute_ap(gt_box, gt_class_id, gt_mask,\n                        pred_box, pred_class_id, pred_score, pred_mask,\n                        iou_threshold=iou_threshold)\n        if verbose:\n            print(\"AP @{:.2f}:\\t {:.3f}\".format(iou_threshold, ap))\n        AP.append(ap)\n    AP = np.array(AP).mean()\n    if verbose:\n        print(\"AP @{:.2f}-{:.2f}:\\t {:.3f}\".format(\n            iou_thresholds[0], iou_thresholds[-1], AP))\n    return AP\n\n\ndef compute_recall(pred_boxes, gt_boxes, iou):\n    \"\"\"Compute the recall at the given IoU threshold. It's an indication\n    of how many GT boxes were found by the given prediction boxes.\n\n    pred_boxes: [N, (y1, x1, y2, x2)] in image coordinates\n    gt_boxes: [N, (y1, x1, y2, x2)] in image coordinates\n    \"\"\"\n    # Measure overlaps\n    overlaps = compute_overlaps(pred_boxes, gt_boxes)\n    iou_max = np.max(overlaps, axis=1)\n    iou_argmax = np.argmax(overlaps, axis=1)\n    positive_ids = np.where(iou_max >= iou)[0]\n    matched_gt_boxes = iou_argmax[positive_ids]\n\n    recall = len(set(matched_gt_boxes)) / gt_boxes.shape[0]\n    return recall, positive_ids\n\n\n# ## Batch Slicing\n# Some custom layers support a batch size of 1 only, and require a lot of work\n# to support batches greater than 1. This function slices an input tensor\n# across the batch dimension and feeds batches of size 1. Effectively,\n# an easy way to support batches > 1 quickly with little code modification.\n# In the long run, it's more efficient to modify the code to support large\n# batches and getting rid of this function. Consider this a temporary solution\n# def batch_slice(inputs, graph_fn, batch_size, names=None):\n#     \"\"\"Splits inputs into slices and feeds each slice to a copy of the given\n#     computation graph and then combines the results. It allows you to run a\n#     graph on a batch of inputs even if the graph is written to support one\n#     instance only.\n\n#     inputs: list of tensors. All must have the same first dimension length\n#     graph_fn: A function that returns a TF tensor that's part of a graph.\n#     batch_size: number of slices to divide the data into.\n#     names: If provided, assigns names to the resulting tensors.\n#     \"\"\"\n#     if not isinstance(inputs, list):\n#         inputs = [inputs]\n\n#     outputs = []\n#     for i in range(batch_size):\n#         inputs_slice = [x[i] for x in inputs]\n#         output_slice = graph_fn(*inputs_slice)\n#         if not isinstance(output_slice, (tuple, list)):\n#             output_slice = [output_slice]\n#         outputs.append(output_slice)\n#     # Change outputs from a list of slices where each is\n#     # a list of outputs to a list of outputs and each has\n#     # a list of slices\n#     outputs = list(zip(*outputs))\n\n#     if names is None:\n#         names = [None] * len(outputs)\n\n#     result = [tf.stack(o, axis=0, name=n)\n#               for o, n in zip(outputs, names)]\n#     if len(result) == 1:\n#         result = result[0]\n\n#     return result\n\n\ndef download_trained_weights(coco_model_path, verbose=1):\n    \"\"\"Download COCO trained weights from Releases.\n\n    coco_model_path: local path of COCO trained weights\n    \"\"\"\n    if verbose > 0:\n        print(\"Downloading pretrained model to \" + coco_model_path + \" ...\")\n    with urllib.request.urlopen(COCO_MODEL_URL) as resp, open(coco_model_path, 'wb') as out:\n        shutil.copyfileobj(resp, out)\n    if verbose > 0:\n        print(\"... done downloading pretrained model!\")\n\n\ndef norm_boxes(boxes, shape):\n    \"\"\"Converts boxes from pixel coordinates to normalized coordinates.\n    boxes: [N, (y1, x1, y2, x2)] in pixel coordinates\n    shape: [..., (height, width)] in pixels\n\n    Note: In pixel coordinates (y2, x2) is outside the box. But in normalized\n    coordinates it's inside the box.\n\n    Returns:\n        [N, (y1, x1, y2, x2)] in normalized coordinates\n    \"\"\"\n    h, w = shape\n    scale = np.array([h - 1, w - 1, h - 1, w - 1])\n    shift = np.array([0, 0, 1, 1])\n    return np.divide((boxes - shift), scale).astype(np.float32)\n\n\ndef denorm_boxes(boxes, shape):\n    \"\"\"Converts boxes from normalized coordinates to pixel coordinates.\n    boxes: [N, (y1, x1, y2, x2)] in normalized coordinates\n    shape: [..., (height, width)] in pixels\n\n    Note: In pixel coordinates (y2, x2) is outside the box. But in normalized\n    coordinates it's inside the box.\n\n    Returns:\n        [N, (y1, x1, y2, x2)] in pixel coordinates\n    \"\"\"\n    h, w = shape\n    scale = np.array([h - 1, w - 1, h - 1, w - 1])\n    shift = np.array([0, 0, 1, 1])\n    return np.around(np.multiply(boxes, scale) + shift).astype(np.int32)\n\n\ndef resize(image, output_shape, order=1, mode='constant', cval=0, clip=True,\n           preserve_range=False, anti_aliasing=False, anti_aliasing_sigma=None):\n    \"\"\"A wrapper for Scikit-Image resize().\n\n    Scikit-Image generates warnings on every call to resize() if it doesn't\n    receive the right parameters. The right parameters depend on the version\n    of skimage. This solves the problem by using different parameters per\n    version. And it provides a central place to control resizing defaults.\n    \"\"\"\n    if LooseVersion(skimage.__version__) >= LooseVersion(\"0.14\"):\n        # New in 0.14: anti_aliasing. Default it to False for backward\n        # compatibility with skimage 0.13.\n        return skimage.transform.resize(\n            image, output_shape,\n            order=order, mode=mode, cval=cval, clip=clip,\n            preserve_range=preserve_range, anti_aliasing=anti_aliasing,\n            anti_aliasing_sigma=anti_aliasing_sigma)\n    else:\n        return skimage.transform.resize(\n            image, output_shape,\n            order=order, mode=mode, cval=cval, clip=clip,\n            preserve_range=preserve_range)\ndef __main__():\n    pass\n"
  },
  {
    "path": "projects/LISA/visualize_data.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport argparse\nimport numpy as np\nimport os\nfrom itertools import chain\nimport cv2\nfrom PIL import Image\n\nfrom detectron2.config import get_cfg\nfrom detectron2.data import DatasetCatalog, MetadataCatalog, build_detection_train_loader\nfrom detectron2.data import detection_utils as utils\nfrom detectron2.data.build import filter_images_with_few_keypoints\nfrom detectron2.utils.logger import setup_logger\nfrom detectron2.utils.visualizer import Visualizer\nfrom detectron2.data.datasets import register_soba_instances\nregister_soba_instances(\"soba_cast_shadow_train_full\", {}, \"../../../SOBA/annotations/SOBA_train.json\", \"../..././SOBA/SOBA/\")\nregister_soba_instances(\"soba_cast_shadow_val_full\",{},\"../../../SOBA/annotations/SOBA_val.json\",\"../../../SOBA/SOBA/\")\ndef setup(args):\n    cfg = get_cfg()\n    if args.config_file:\n        cfg.merge_from_file(args.config_file)\n    cfg.merge_from_list(args.opts)\n    cfg.freeze()\n    return cfg\n\n\ndef parse_args(in_args=None):\n    parser = argparse.ArgumentParser(description=\"Visualize ground-truth data\")\n    parser.add_argument(\n        \"--source\",\n        choices=[\"annotation\", \"dataloader\"],\n        required=True,\n        help=\"visualize the annotations or the data loader (with pre-processing)\",\n    )\n    parser.add_argument(\"--config-file\", default=\"\", metavar=\"FILE\", help=\"path to config file\")\n    parser.add_argument(\"--output-dir\", default=\"./\", help=\"path to output directory\")\n    parser.add_argument(\"--show\", action=\"store_true\", help=\"show output in a window\")\n    parser.add_argument(\n        \"opts\",\n        help=\"Modify config options using the command-line\",\n        default=None,\n        nargs=argparse.REMAINDER,\n    )\n    return parser.parse_args(in_args)\n\n\nif __name__ == \"__main__\":\n    args = parse_args()\n    logger = setup_logger()\n    logger.info(\"Arguments: \" + str(args))\n    cfg = setup(args)\n\n    dirname = args.output_dir\n    os.makedirs(dirname, exist_ok=True)\n    metadata = MetadataCatalog.get(cfg.DATASETS.TRAIN[0])\n\n    def output(vis, fname):\n        if args.show:\n            print(fname)\n            cv2.imshow(\"window\", vis.get_image()[:, :, ::-1])\n            cv2.waitKey()\n        else:\n            filepath = os.path.join(dirname, fname)\n            print(\"Saving to {} ...\".format(filepath))\n            vis.save(filepath)\n\n    scale = 2.0 if args.show else 1.0\n    if args.source == \"dataloader\":\n        train_data_loader = build_detection_train_loader(cfg)\n        for batch in train_data_loader:\n            for per_image in batch:\n                # Pytorch tensor is in (C, H, W) format\n                img = per_image[\"image\"].permute(1, 2, 0)\n                if cfg.INPUT.FORMAT == \"BGR\":\n                    img = img[:, :, [2, 1, 0]]\n                else:\n                    img = np.asarray(Image.fromarray(img, mode=cfg.INPUT.FORMAT).convert(\"RGB\"))\n\n                visualizer = Visualizer(img, metadata=metadata, scale=scale)\n                target_fields = per_image[\"instances\"].get_fields()\n                labels = [metadata.thing_classes[i] for i in target_fields[\"gt_classes\"]]\n                vis = visualizer.overlay_instances(\n                    labels=labels,\n                    boxes=target_fields.get(\"gt_boxes\", None),\n                    masks=target_fields.get(\"gt_masks\", None),\n                    keypoints=target_fields.get(\"gt_keypoints\", None),\n                )\n                output(vis, str(per_image[\"image_id\"]) + \".jpg\")\n    else:\n        dicts = list(chain.from_iterable([DatasetCatalog.get(k) for k in cfg.DATASETS.TRAIN]))\n        if cfg.MODEL.KEYPOINT_ON:\n            dicts = filter_images_with_few_keypoints(dicts, 1)\n        for dic in dicts:\n            img = utils.read_image(dic[\"file_name\"], \"RGB\")\n            visualizer = Visualizer(img, metadata=metadata, scale=scale)\n            vis = visualizer.draw_dataset_dict(dic)\n            utput(vis, os.path.basename(dic[\"file_name\"]))\n"
  },
  {
    "path": "projects/LISA/visualize_json_results.py",
    "content": "#!/usr/bin/env python\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport argparse\nimport json\nimport numpy as np\nimport os\nfrom collections import defaultdict\nimport cv2\nimport tqdm\nfrom fvcore.common.file_io import PathManager\nfrom LISA.matchor import matchor\nfrom detectron2.data import DatasetCatalog, MetadataCatalog\nfrom detectron2.structures import Boxes, BoxMode, Instances\nfrom detectron2.utils.logger import setup_logger\nfrom detectron2.utils.visualizer import Visualizer\n\nfrom detectron2.data.datasets import register_soba_instances\nregister_soba_instances(\"soba_cast_shadow_train_full\", {}, \"../../../SOBA/annotations/SOBA_train.json\", \"../..././SOBA/SOBA/\")\nregister_soba_instances(\"soba_cast_shadow_val_full\",{},\"../../../SOBA/annotations/SOBA_val.json\",\"../../../SOBA/SOBA/\")\n\n\n\ndef create_instances(ins_predictions, ass_predictions,image_size):\n    ret = Instances(image_size)\n\n    ins_score = np.asarray([x[\"score\"] for x in ins_predictions])\n    ins_association = np.asarray([x[\"association_id\"] for x in ins_predictions])\n    chosen = (ins_score > args.conf_threshold).nonzero()[0]\n    ins_score = ins_score[chosen]\n    ins_association = ins_association[chosen]\n    bbox = np.asarray([ins_predictions[i][\"bbox\"] for i in chosen])\n    bbox = BoxMode.convert(bbox, BoxMode.XYWH_ABS, BoxMode.XYXY_ABS)\n\n    labels = np.asarray([dataset_id_map(ins_predictions[i][\"category_id\"]) for i in chosen])\n    ret.scores = ins_score\n    ret.pred_boxes = Boxes(bbox)\n    ret.pred_classes = labels\n    ret.pred_associations = ins_association\n\n    try:\n        ret.pred_masks = [ins_predictions[i][\"segmentation\"] for i in chosen]\n    except KeyError:\n        pass\n    ass_ret = Instances(image_size)\n    ass_score = np.asarray([x[\"score\"] for x in ass_predictions])\n    ass_ass = np.asarray([x[\"association_id\"] for x in ass_predictions])\n    chosen = (ass_score > args.conf_threshold).nonzero()[0]\n    ass_score = ass_score[chosen]\n    ass_ass = ass_ass[chosen]\n    bbox = np.asarray([ass_predictions[i][\"bbox\"] for i in chosen])\n    bbox = BoxMode.convert(bbox, BoxMode.XYWH_ABS, BoxMode.XYXY_ABS)\n    \n    labels = np.asarray([dataset_ass_id_map(ass_predictions[i][\"category_id\"]) for i in chosen])\n\n    ass_ret.scores = ass_score\n    ass_ret.pred_boxes = Boxes(bbox)\n    ass_ret.pred_classes = labels\n    ass_ret.pred_associations = ass_ass\n\n\n    return ret,ass_ret\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser(\n        description=\"A script that visualizes the json predictions from COCO or LVIS dataset.\"\n    )\n    parser.add_argument(\"--ins_input\", required=True, help=\"JSON file produced by the model\")\n    parser.add_argument(\"--ass_input\", required=True)\n    parser.add_argument(\"--output\", required=True, help=\"output directory\")\n    parser.add_argument(\"--dataset\", help=\"name of the dataset\", default=\"coco_2017_val\")\n    parser.add_argument(\"--conf-threshold\", default=0.5, type=float, help=\"confidence threshold\")\n    args = parser.parse_args()\n\n    logger = setup_logger()\n    with PathManager.open(args.ins_input, \"r\") as f:\n        ins_predictions = json.load(f)\n    with PathManager.open(args.ass_input,'r') as f:\n        ass_predictions = json.load(f)\n\n    ins_pred_by_image = defaultdict(list)\n    ass_pred_by_image = defaultdict(list)\n    \n    for p in ins_predictions:\n        ins_pred_by_image[p[\"image_id\"]].append(p)\n    for p in ass_predictions:\n        ass_pred_by_image[p[\"image_id\"]].append(p)\n\n    dicts = list(DatasetCatalog.get(args.dataset))\n    metadata = MetadataCatalog.get(args.dataset)\n    if hasattr(metadata, \"thing_dataset_id_to_contiguous_id\"):\n\n        def dataset_id_map(ds_id):\n            return metadata.thing_dataset_id_to_contiguous_id[ds_id]\n        def dataset_ass_id_map(ds_id):\n            return metadata.association_dataset_id_to_contiguous_id[ds_id]\n\n    elif \"lvis\" in args.dataset:\n        # LVIS results are in the same format as COCO results, but have a different\n        # mapping from dataset category id to contiguous category id in [0, #categories - 1]\n        def dataset_id_map(ds_id):\n            return ds_id - 1\n\n    else:\n        raise ValueError(\"Unsupported dataset: {}\".format(args.dataset))\n\n    os.makedirs(args.output, exist_ok=True)\n\n    for dic in tqdm.tqdm(dicts):\n        img = cv2.imread(dic[\"file_name\"], cv2.IMREAD_COLOR)[:, :, ::-1]\n        basename = os.path.basename(dic[\"file_name\"])\n        ins_predictions,ass_predictions = create_instances(ins_pred_by_image[dic[\"image_id\"]], ass_pred_by_image[dic[\"image_id\"]],img.shape[:2])\n        # ins_predictions,ass_predictions = matchor(ins_predictions,ass_predictions)\n        if ins_predictions == None:\n            continue\n        vis = Visualizer(img, metadata)\n        # vis = Visualizer(img,metadata)\n\n        vis_assa = vis.draw_instance_predictions(ass_predictions,True,labels_align='right').get_image()\n        # cv2.imwrite(os.path.join(args.output, 'ass_'+basename), vis_assa[:,:,::-1])\n        vis_pred = vis.draw_instance_predictions(ins_predictions).get_image()\n\n        vis = Visualizer(img, metadata)\n        vis_gt = vis.draw_dataset_dict(dic).get_image()\n        h,_,_ = img.shape\n\n        white = np.ones((h,20,3),dtype=('uint8'))*255\n\n        concat = np.concatenate((img,white,vis_gt,white,white,vis_pred), axis=1)\n        cv2.imwrite(os.path.join(args.output, basename), concat[:, :, ::-1])\n\n"
  },
  {
    "path": "setup.cfg",
    "content": "[isort]\nline_length=100\nmulti_line_output=3\ninclude_trailing_comma=True\nknown_standard_library=numpy,setuptools,mock\nskip=datasets,docs\nskip_glob=*/__init__.py\nknown_myself=detectron2\nknown_third_party=fvcore,matplotlib,cv2,torch,torchvision,PIL,pycocotools,yacs,termcolor,cityscapesscripts,tabulate,tqdm,scipy,lvis,psutil,pkg_resources,caffe2,onnx\nno_lines_before=STDLIB,THIRDPARTY\nsections=FUTURE,STDLIB,THIRDPARTY,myself,FIRSTPARTY,LOCALFOLDER\ndefault_section=FIRSTPARTY\n\n[mypy]\npython_version=3.6\nignore_missing_imports = True\nwarn_unused_configs = True\ndisallow_untyped_defs = True\ncheck_untyped_defs = True\nwarn_unused_ignores = True\nwarn_redundant_casts = True\nshow_column_numbers = True\nfollow_imports = silent\nallow_redefinition = True\n; Require all functions to be annotated\ndisallow_incomplete_defs = True\n"
  },
  {
    "path": "setup.py",
    "content": "#!/usr/bin/env python\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport glob\nimport os\nimport shutil\nfrom os import path\nfrom setuptools import find_packages, setup\nfrom typing import List\nimport torch\nfrom torch.utils.cpp_extension import CUDA_HOME, CppExtension, CUDAExtension\n\ntorch_ver = [int(x) for x in torch.__version__.split(\".\")[:2]]\nassert torch_ver >= [1, 3], \"Requires PyTorch >= 1.3\"\n\n\ndef get_version():\n    init_py_path = path.join(path.abspath(path.dirname(__file__)), \"detectron2\", \"__init__.py\")\n    init_py = open(init_py_path, \"r\").readlines()\n    version_line = [l.strip() for l in init_py if l.startswith(\"__version__\")][0]\n    version = version_line.split(\"=\")[-1].strip().strip(\"'\\\"\")\n\n    # The following is used to build release packages.\n    # Users should never use it.\n    suffix = os.getenv(\"D2_VERSION_SUFFIX\", \"\")\n    version = version + suffix\n    if os.getenv(\"BUILD_NIGHTLY\", \"0\") == \"1\":\n        from datetime import datetime\n\n        date_str = datetime.today().strftime(\"%y%m%d\")\n        version = version + \".dev\" + date_str\n\n        new_init_py = [l for l in init_py if not l.startswith(\"__version__\")]\n        new_init_py.append('__version__ = \"{}\"\\n'.format(version))\n        with open(init_py_path, \"w\") as f:\n            f.write(\"\".join(new_init_py))\n    return version\n\n\ndef get_extensions():\n    this_dir = path.dirname(path.abspath(__file__))\n    extensions_dir = path.join(this_dir, \"detectron2\", \"layers\", \"csrc\")\n\n    main_source = path.join(extensions_dir, \"vision.cpp\")\n    sources = glob.glob(path.join(extensions_dir, \"**\", \"*.cpp\"))\n    source_cuda = glob.glob(path.join(extensions_dir, \"**\", \"*.cu\")) + glob.glob(\n        path.join(extensions_dir, \"*.cu\")\n    )\n\n    sources = [main_source] + sources\n    extension = CppExtension\n\n    extra_compile_args = {\"cxx\": []}\n    define_macros = []\n\n    if (\n        torch.cuda.is_available() and CUDA_HOME is not None and os.path.isdir(CUDA_HOME)\n    ) or os.getenv(\"FORCE_CUDA\", \"0\") == \"1\":\n        extension = CUDAExtension\n        sources += source_cuda\n        define_macros += [(\"WITH_CUDA\", None)]\n        extra_compile_args[\"nvcc\"] = [\n            \"-DCUDA_HAS_FP16=1\",\n            \"-D__CUDA_NO_HALF_OPERATORS__\",\n            \"-D__CUDA_NO_HALF_CONVERSIONS__\",\n            \"-D__CUDA_NO_HALF2_OPERATORS__\",\n        ]\n\n        # It's better if pytorch can do this by default ..\n        CC = os.environ.get(\"CC\", None)\n        if CC is not None:\n            extra_compile_args[\"nvcc\"].append(\"-ccbin={}\".format(CC))\n\n    include_dirs = [extensions_dir]\n\n    ext_modules = [\n        extension(\n            \"detectron2._C\",\n            sources,\n            include_dirs=include_dirs,\n            define_macros=define_macros,\n            extra_compile_args=extra_compile_args,\n        )\n    ]\n\n    return ext_modules\n\n\ndef get_model_zoo_configs() -> List[str]:\n    \"\"\"\n    Return a list of configs to include in package for model zoo. Copy over these configs inside\n    detectron2/model_zoo.\n    \"\"\"\n\n    # Use absolute paths while symlinking.\n    source_configs_dir = path.join(path.dirname(path.realpath(__file__)), \"configs\")\n    destination = path.join(\n        path.dirname(path.realpath(__file__)), \"detectron2\", \"model_zoo\", \"configs\"\n    )\n    # Symlink the config directory inside package to have a cleaner pip install.\n\n    # Remove stale symlink/directory from a previous build.\n    if path.exists(source_configs_dir):\n        if path.islink(destination):\n            os.unlink(destination)\n        elif path.isdir(destination):\n            shutil.rmtree(destination)\n\n    if not path.exists(destination):\n        try:\n            os.symlink(source_configs_dir, destination)\n        except OSError:\n            # Fall back to copying if symlink fails: ex. on Windows.\n            shutil.copytree(source_configs_dir, destination)\n\n    config_paths = glob.glob(\"configs/**/*.yaml\", recursive=True)\n    return config_paths\n\n\nsetup(\n    name=\"detectron2\",\n    version=get_version(),\n    author=\"FAIR\",\n    url=\"https://github.com/facebookresearch/detectron2\",\n    description=\"Detectron2 is FAIR's next-generation research \"\n    \"platform for object detection and segmentation.\",\n    packages=find_packages(exclude=(\"configs\", \"tests\")),\n    package_data={\"detectron2.model_zoo\": get_model_zoo_configs()},\n    python_requires=\">=3.6\",\n    install_requires=[\n        \"termcolor>=1.1\",\n        \"Pillow\",  # you can also use pillow-simd for better performance\n        \"yacs>=0.1.6\",\n        \"tabulate\",\n        \"cloudpickle\",\n        \"matplotlib\",\n        \"tqdm>4.29.0\",\n        \"tensorboard\",\n        \"fvcore\",\n        \"future\",  # used by caffe2\n        \"pydot\",  # used to save caffe2 SVGs\n    ],\n    extras_require={\n        \"all\": [\"shapely\", \"psutil\"],\n        \"dev\": [\"flake8\", \"isort\", \"black==19.3b0\", \"flake8-bugbear\", \"flake8-comprehensions\"],\n    },\n    ext_modules=get_extensions(),\n    cmdclass={\"build_ext\": torch.utils.cpp_extension.BuildExtension},\n)\n"
  },
  {
    "path": "tests/README.md",
    "content": "## Unit Tests\n\nTo run the unittests, do:\n```\npython -m unittest discover -v -s tests\n```\n\nThere are also end-to-end inference & training tests, in [dev/run_*_tests.sh](../dev).\n"
  },
  {
    "path": "tests/__init__.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n"
  },
  {
    "path": "tests/test_anchor_generator.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\nimport torch\n\nfrom detectron2.config import get_cfg\nfrom detectron2.layers import ShapeSpec\nfrom detectron2.modeling.anchor_generator import DefaultAnchorGenerator, RotatedAnchorGenerator\n\nlogger = logging.getLogger(__name__)\n\n\nclass TestAnchorGenerator(unittest.TestCase):\n    def test_default_anchor_generator(self):\n        cfg = get_cfg()\n        cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64]]\n        cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.25, 1, 4]]\n\n        anchor_generator = DefaultAnchorGenerator(cfg, [ShapeSpec(stride=4)])\n\n        # only the last two dimensions of features matter here\n        num_images = 2\n        features = {\"stage3\": torch.rand(num_images, 96, 1, 2)}\n        anchors = anchor_generator([features[\"stage3\"]])\n        expected_anchor_tensor = torch.tensor(\n            [\n                [-32.0, -8.0, 32.0, 8.0],\n                [-16.0, -16.0, 16.0, 16.0],\n                [-8.0, -32.0, 8.0, 32.0],\n                [-64.0, -16.0, 64.0, 16.0],\n                [-32.0, -32.0, 32.0, 32.0],\n                [-16.0, -64.0, 16.0, 64.0],\n                [-28.0, -8.0, 36.0, 8.0],  # -28.0 == -32.0 + STRIDE (4)\n                [-12.0, -16.0, 20.0, 16.0],\n                [-4.0, -32.0, 12.0, 32.0],\n                [-60.0, -16.0, 68.0, 16.0],\n                [-28.0, -32.0, 36.0, 32.0],\n                [-12.0, -64.0, 20.0, 64.0],\n            ]\n        )\n\n        for i in range(num_images):\n            assert torch.allclose(anchors[i][0].tensor, expected_anchor_tensor)\n\n    def test_default_anchor_generator_centered(self):\n        cfg = get_cfg()\n        cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64]]\n        cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.25, 1, 4]]\n        cfg.MODEL.ANCHOR_GENERATOR.OFFSET = 0.5\n\n        anchor_generator = DefaultAnchorGenerator(cfg, [ShapeSpec(stride=4)])\n\n        # only the last two dimensions of features matter here\n        num_images = 2\n        features = {\"stage3\": torch.rand(num_images, 96, 1, 2)}\n        anchors = anchor_generator([features[\"stage3\"]])\n        expected_anchor_tensor = torch.tensor(\n            [\n                [-30.0, -6.0, 34.0, 10.0],\n                [-14.0, -14.0, 18.0, 18.0],\n                [-6.0, -30.0, 10.0, 34.0],\n                [-62.0, -14.0, 66.0, 18.0],\n                [-30.0, -30.0, 34.0, 34.0],\n                [-14.0, -62.0, 18.0, 66.0],\n                [-26.0, -6.0, 38.0, 10.0],\n                [-10.0, -14.0, 22.0, 18.0],\n                [-2.0, -30.0, 14.0, 34.0],\n                [-58.0, -14.0, 70.0, 18.0],\n                [-26.0, -30.0, 38.0, 34.0],\n                [-10.0, -62.0, 22.0, 66.0],\n            ]\n        )\n\n        for i in range(num_images):\n            assert torch.allclose(anchors[i][0].tensor, expected_anchor_tensor)\n\n    def test_rrpn_anchor_generator(self):\n        cfg = get_cfg()\n        cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64]]\n        cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.25, 1, 4]]\n        cfg.MODEL.ANCHOR_GENERATOR.ANGLES = [[0, 45]]\n        anchor_generator = RotatedAnchorGenerator(cfg, [ShapeSpec(stride=4)])\n\n        # only the last two dimensions of features matter here\n        num_images = 2\n        features = {\"stage3\": torch.rand(num_images, 96, 1, 2)}\n        anchors = anchor_generator([features[\"stage3\"]])\n        expected_anchor_tensor = torch.tensor(\n            [\n                [0.0, 0.0, 64.0, 16.0, 0.0],\n                [0.0, 0.0, 64.0, 16.0, 45.0],\n                [0.0, 0.0, 32.0, 32.0, 0.0],\n                [0.0, 0.0, 32.0, 32.0, 45.0],\n                [0.0, 0.0, 16.0, 64.0, 0.0],\n                [0.0, 0.0, 16.0, 64.0, 45.0],\n                [0.0, 0.0, 128.0, 32.0, 0.0],\n                [0.0, 0.0, 128.0, 32.0, 45.0],\n                [0.0, 0.0, 64.0, 64.0, 0.0],\n                [0.0, 0.0, 64.0, 64.0, 45.0],\n                [0.0, 0.0, 32.0, 128.0, 0.0],\n                [0.0, 0.0, 32.0, 128.0, 45.0],\n                [4.0, 0.0, 64.0, 16.0, 0.0],  # 4.0 == 0.0 + STRIDE (4)\n                [4.0, 0.0, 64.0, 16.0, 45.0],\n                [4.0, 0.0, 32.0, 32.0, 0.0],\n                [4.0, 0.0, 32.0, 32.0, 45.0],\n                [4.0, 0.0, 16.0, 64.0, 0.0],\n                [4.0, 0.0, 16.0, 64.0, 45.0],\n                [4.0, 0.0, 128.0, 32.0, 0.0],\n                [4.0, 0.0, 128.0, 32.0, 45.0],\n                [4.0, 0.0, 64.0, 64.0, 0.0],\n                [4.0, 0.0, 64.0, 64.0, 45.0],\n                [4.0, 0.0, 32.0, 128.0, 0.0],\n                [4.0, 0.0, 32.0, 128.0, 45.0],\n            ]\n        )\n\n        for i in range(num_images):\n            assert torch.allclose(anchors[i][0].tensor, expected_anchor_tensor)\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_box2box_transform.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\nimport torch\n\nfrom detectron2.modeling.box_regression import Box2BoxTransform, Box2BoxTransformRotated\n\nlogger = logging.getLogger(__name__)\n\n\ndef random_boxes(mean_box, stdev, N):\n    return torch.rand(N, 4) * stdev + torch.tensor(mean_box, dtype=torch.float)\n\n\nclass TestBox2BoxTransform(unittest.TestCase):\n    def test_reconstruction(self):\n        weights = (5, 5, 10, 10)\n        b2b_tfm = Box2BoxTransform(weights=weights)\n        src_boxes = random_boxes([10, 10, 20, 20], 1, 10)\n        dst_boxes = random_boxes([10, 10, 20, 20], 1, 10)\n\n        devices = [torch.device(\"cpu\")]\n        if torch.cuda.is_available():\n            devices.append(torch.device(\"cuda\"))\n        for device in devices:\n            src_boxes = src_boxes.to(device=device)\n            dst_boxes = dst_boxes.to(device=device)\n            deltas = b2b_tfm.get_deltas(src_boxes, dst_boxes)\n            dst_boxes_reconstructed = b2b_tfm.apply_deltas(deltas, src_boxes)\n            assert torch.allclose(dst_boxes, dst_boxes_reconstructed)\n\n\ndef random_rotated_boxes(mean_box, std_length, std_angle, N):\n    return torch.cat(\n        [torch.rand(N, 4) * std_length, torch.rand(N, 1) * std_angle], dim=1\n    ) + torch.tensor(mean_box, dtype=torch.float)\n\n\nclass TestBox2BoxTransformRotated(unittest.TestCase):\n    def test_reconstruction(self):\n        weights = (5, 5, 10, 10, 1)\n        b2b_transform = Box2BoxTransformRotated(weights=weights)\n        src_boxes = random_rotated_boxes([10, 10, 20, 20, -30], 5, 60.0, 10)\n        dst_boxes = random_rotated_boxes([10, 10, 20, 20, -30], 5, 60.0, 10)\n\n        devices = [torch.device(\"cpu\")]\n        if torch.cuda.is_available():\n            devices.append(torch.device(\"cuda\"))\n        for device in devices:\n            src_boxes = src_boxes.to(device=device)\n            dst_boxes = dst_boxes.to(device=device)\n            deltas = b2b_transform.get_deltas(src_boxes, dst_boxes)\n            dst_boxes_reconstructed = b2b_transform.apply_deltas(deltas, src_boxes)\n            assert torch.allclose(dst_boxes[:, :4], dst_boxes_reconstructed[:, :4], atol=1e-5)\n            # angle difference has to be normalized\n            assert torch.allclose(\n                (dst_boxes[:, 4] - dst_boxes_reconstructed[:, 4] + 180.0) % 360.0 - 180.0,\n                torch.zeros_like(dst_boxes[:, 4]),\n                atol=1e-4,\n            )\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_boxes.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport json\nimport math\nimport numpy as np\nimport unittest\nimport torch\n\nfrom detectron2.structures import Boxes, BoxMode, pairwise_iou\n\n\nclass TestBoxMode(unittest.TestCase):\n    def _convert_xy_to_wh(self, x):\n        return BoxMode.convert(x, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS)\n\n    def _convert_xywha_to_xyxy(self, x):\n        return BoxMode.convert(x, BoxMode.XYWHA_ABS, BoxMode.XYXY_ABS)\n\n    def _convert_xywh_to_xywha(self, x):\n        return BoxMode.convert(x, BoxMode.XYWH_ABS, BoxMode.XYWHA_ABS)\n\n    def test_box_convert_list(self):\n        for tp in [list, tuple]:\n            box = tp([5, 5, 10, 10])\n            output = self._convert_xy_to_wh(box)\n            self.assertIsInstance(output, tp)\n            self.assertEqual(output, tp([5, 5, 5, 5]))\n\n            with self.assertRaises(Exception):\n                self._convert_xy_to_wh([box])\n\n    def test_box_convert_array(self):\n        box = np.asarray([[5, 5, 10, 10], [1, 1, 2, 3]])\n        output = self._convert_xy_to_wh(box)\n        self.assertEqual(output.dtype, box.dtype)\n        self.assertEqual(output.shape, box.shape)\n        self.assertTrue((output[0] == [5, 5, 5, 5]).all())\n        self.assertTrue((output[1] == [1, 1, 1, 2]).all())\n\n    def test_box_convert_cpu_tensor(self):\n        box = torch.tensor([[5, 5, 10, 10], [1, 1, 2, 3]])\n        output = self._convert_xy_to_wh(box)\n        self.assertEqual(output.dtype, box.dtype)\n        self.assertEqual(output.shape, box.shape)\n        output = output.numpy()\n        self.assertTrue((output[0] == [5, 5, 5, 5]).all())\n        self.assertTrue((output[1] == [1, 1, 1, 2]).all())\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA unavailable\")\n    def test_box_convert_cuda_tensor(self):\n        box = torch.tensor([[5, 5, 10, 10], [1, 1, 2, 3]]).cuda()\n        output = self._convert_xy_to_wh(box)\n        self.assertEqual(output.dtype, box.dtype)\n        self.assertEqual(output.shape, box.shape)\n        self.assertEqual(output.device, box.device)\n        output = output.cpu().numpy()\n        self.assertTrue((output[0] == [5, 5, 5, 5]).all())\n        self.assertTrue((output[1] == [1, 1, 1, 2]).all())\n\n    def test_box_convert_xywha_to_xyxy_list(self):\n        for tp in [list, tuple]:\n            box = tp([50, 50, 30, 20, 0])\n            output = self._convert_xywha_to_xyxy(box)\n            self.assertIsInstance(output, tp)\n            self.assertEqual(output, tp([35, 40, 65, 60]))\n\n            with self.assertRaises(Exception):\n                self._convert_xywha_to_xyxy([box])\n\n    def test_box_convert_xywha_to_xyxy_array(self):\n        for dtype in [np.float64, np.float32]:\n            box = np.asarray(\n                [\n                    [50, 50, 30, 20, 0],\n                    [50, 50, 30, 20, 90],\n                    [1, 1, math.sqrt(2), math.sqrt(2), -45],\n                ],\n                dtype=dtype,\n            )\n            output = self._convert_xywha_to_xyxy(box)\n            self.assertEqual(output.dtype, box.dtype)\n            expected = np.asarray([[35, 40, 65, 60], [40, 35, 60, 65], [0, 0, 2, 2]], dtype=dtype)\n            self.assertTrue(np.allclose(output, expected, atol=1e-6), \"output={}\".format(output))\n\n    def test_box_convert_xywha_to_xyxy_tensor(self):\n        for dtype in [torch.float32, torch.float64]:\n            box = torch.tensor(\n                [\n                    [50, 50, 30, 20, 0],\n                    [50, 50, 30, 20, 90],\n                    [1, 1, math.sqrt(2), math.sqrt(2), -45],\n                ],\n                dtype=dtype,\n            )\n            output = self._convert_xywha_to_xyxy(box)\n            self.assertEqual(output.dtype, box.dtype)\n            expected = torch.tensor([[35, 40, 65, 60], [40, 35, 60, 65], [0, 0, 2, 2]], dtype=dtype)\n\n            self.assertTrue(torch.allclose(output, expected, atol=1e-6), \"output={}\".format(output))\n\n    def test_box_convert_xywh_to_xywha_list(self):\n        for tp in [list, tuple]:\n            box = tp([50, 50, 30, 20])\n            output = self._convert_xywh_to_xywha(box)\n            self.assertIsInstance(output, tp)\n            self.assertEqual(output, tp([65, 60, 30, 20, 0]))\n\n            with self.assertRaises(Exception):\n                self._convert_xywh_to_xywha([box])\n\n    def test_box_convert_xywh_to_xywha_array(self):\n        for dtype in [np.float64, np.float32]:\n            box = np.asarray([[30, 40, 70, 60], [30, 40, 60, 70], [-1, -1, 2, 2]], dtype=dtype)\n            output = self._convert_xywh_to_xywha(box)\n            self.assertEqual(output.dtype, box.dtype)\n            expected = np.asarray(\n                [[65, 70, 70, 60, 0], [60, 75, 60, 70, 0], [0, 0, 2, 2, 0]], dtype=dtype\n            )\n            self.assertTrue(np.allclose(output, expected, atol=1e-6), \"output={}\".format(output))\n\n    def test_box_convert_xywh_to_xywha_tensor(self):\n        for dtype in [torch.float32, torch.float64]:\n            box = torch.tensor([[30, 40, 70, 60], [30, 40, 60, 70], [-1, -1, 2, 2]], dtype=dtype)\n            output = self._convert_xywh_to_xywha(box)\n            self.assertEqual(output.dtype, box.dtype)\n            expected = torch.tensor(\n                [[65, 70, 70, 60, 0], [60, 75, 60, 70, 0], [0, 0, 2, 2, 0]], dtype=dtype\n            )\n\n            self.assertTrue(torch.allclose(output, expected, atol=1e-6), \"output={}\".format(output))\n\n    def test_json_serializable(self):\n        payload = {\"box_mode\": BoxMode.XYWH_REL}\n        try:\n            json.dumps(payload)\n        except Exception:\n            self.fail(\"JSON serialization failed\")\n\n    def test_json_deserializable(self):\n        payload = '{\"box_mode\": 2}'\n        obj = json.loads(payload)\n        try:\n            obj[\"box_mode\"] = BoxMode(obj[\"box_mode\"])\n        except Exception:\n            self.fail(\"JSON deserialization failed\")\n\n\nclass TestBoxIOU(unittest.TestCase):\n    def test_pairwise_iou(self):\n        boxes1 = torch.tensor([[0.0, 0.0, 1.0, 1.0], [0.0, 0.0, 1.0, 1.0]])\n\n        boxes2 = torch.tensor(\n            [\n                [0.0, 0.0, 1.0, 1.0],\n                [0.0, 0.0, 0.5, 1.0],\n                [0.0, 0.0, 1.0, 0.5],\n                [0.0, 0.0, 0.5, 0.5],\n                [0.5, 0.5, 1.0, 1.0],\n                [0.5, 0.5, 1.5, 1.5],\n            ]\n        )\n\n        expected_ious = torch.tensor(\n            [\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n            ]\n        )\n\n        ious = pairwise_iou(Boxes(boxes1), Boxes(boxes2))\n\n        self.assertTrue(torch.allclose(ious, expected_ious))\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_checkpoint.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport unittest\nfrom collections import OrderedDict\nimport torch\nfrom torch import nn\n\nfrom detectron2.checkpoint.c2_model_loading import align_and_update_state_dicts\nfrom detectron2.utils.logger import setup_logger\n\n\nclass TestCheckpointer(unittest.TestCase):\n    def setUp(self):\n        setup_logger()\n\n    def create_complex_model(self):\n        m = nn.Module()\n        m.block1 = nn.Module()\n        m.block1.layer1 = nn.Linear(2, 3)\n        m.layer2 = nn.Linear(3, 2)\n        m.res = nn.Module()\n        m.res.layer2 = nn.Linear(3, 2)\n\n        state_dict = OrderedDict()\n        state_dict[\"layer1.weight\"] = torch.rand(3, 2)\n        state_dict[\"layer1.bias\"] = torch.rand(3)\n        state_dict[\"layer2.weight\"] = torch.rand(2, 3)\n        state_dict[\"layer2.bias\"] = torch.rand(2)\n        state_dict[\"res.layer2.weight\"] = torch.rand(2, 3)\n        state_dict[\"res.layer2.bias\"] = torch.rand(2)\n        return m, state_dict\n\n    def test_complex_model_loaded(self):\n        for add_data_parallel in [False, True]:\n            model, state_dict = self.create_complex_model()\n            if add_data_parallel:\n                model = nn.DataParallel(model)\n            model_sd = model.state_dict()\n\n            align_and_update_state_dicts(model_sd, state_dict)\n            for loaded, stored in zip(model_sd.values(), state_dict.values()):\n                # different tensor references\n                self.assertFalse(id(loaded) == id(stored))\n                # same content\n                self.assertTrue(loaded.equal(stored))\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_config.py",
    "content": "#!/usr/bin/env python\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\n\nimport os\nimport tempfile\nimport unittest\n\nfrom detectron2.config import downgrade_config, get_cfg, upgrade_config\n\n_V0_CFG = \"\"\"\nMODEL:\n  RPN_HEAD:\n    NAME: \"TEST\"\nVERSION: 0\n\"\"\"\n\n_V1_CFG = \"\"\"\nMODEL:\n  WEIGHT: \"/path/to/weight\"\n\"\"\"\n\n\nclass TestConfigVersioning(unittest.TestCase):\n    def test_upgrade_downgrade_consistency(self):\n        cfg = get_cfg()\n        # check that custom is preserved\n        cfg.USER_CUSTOM = 1\n\n        down = downgrade_config(cfg, to_version=0)\n        up = upgrade_config(down)\n        self.assertTrue(up == cfg)\n\n    def _merge_cfg_str(self, cfg, merge_str):\n        f = tempfile.NamedTemporaryFile(mode=\"w\", suffix=\".yaml\", delete=False)\n        try:\n            f.write(merge_str)\n            f.close()\n            cfg.merge_from_file(f.name)\n        finally:\n            os.remove(f.name)\n        return cfg\n\n    def test_auto_upgrade(self):\n        cfg = get_cfg()\n        latest_ver = cfg.VERSION\n        cfg.USER_CUSTOM = 1\n\n        self._merge_cfg_str(cfg, _V0_CFG)\n\n        self.assertEqual(cfg.MODEL.RPN.HEAD_NAME, \"TEST\")\n        self.assertEqual(cfg.VERSION, latest_ver)\n\n    def test_guess_v1(self):\n        cfg = get_cfg()\n        latest_ver = cfg.VERSION\n        self._merge_cfg_str(cfg, _V1_CFG)\n        self.assertEqual(cfg.VERSION, latest_ver)\n"
  },
  {
    "path": "tests/test_data_loader.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n\nimport copy\nimport numpy as np\nimport unittest\nimport pycocotools.mask as mask_util\n\nfrom detectron2.data import detection_utils\nfrom detectron2.data import transforms as T\nfrom detectron2.structures import BitMasks, BoxMode\n\n\nclass TestTransformAnnotations(unittest.TestCase):\n    def test_transform_simple_annotation(self):\n        transforms = T.TransformList([T.HFlipTransform(400)])\n        anno = {\n            \"bbox\": np.asarray([10, 10, 200, 300]),\n            \"bbox_mode\": BoxMode.XYXY_ABS,\n            \"category_id\": 3,\n            \"segmentation\": [[10, 10, 100, 100, 100, 10], [150, 150, 200, 150, 200, 200]],\n        }\n\n        output = detection_utils.transform_instance_annotations(anno, transforms, (400, 400))\n        self.assertTrue(np.allclose(output[\"bbox\"], [200, 10, 390, 300]))\n        self.assertEqual(len(output[\"segmentation\"]), len(anno[\"segmentation\"]))\n        self.assertTrue(np.allclose(output[\"segmentation\"][0], [390, 10, 300, 100, 300, 10]))\n\n        detection_utils.annotations_to_instances([output, output], (400, 400))\n\n    def test_flip_keypoints(self):\n        transforms = T.TransformList([T.HFlipTransform(400)])\n        anno = {\n            \"bbox\": np.asarray([10, 10, 200, 300]),\n            \"bbox_mode\": BoxMode.XYXY_ABS,\n            \"keypoints\": np.random.rand(17, 3) * 50 + 15,\n        }\n\n        output = detection_utils.transform_instance_annotations(\n            copy.deepcopy(anno),\n            transforms,\n            (400, 400),\n            keypoint_hflip_indices=detection_utils.create_keypoint_hflip_indices(\n                [\"keypoints_coco_2017_train\"]\n            ),\n        )\n        # The first keypoint is nose\n        self.assertTrue(np.allclose(output[\"keypoints\"][0, 0], 400 - anno[\"keypoints\"][0, 0]))\n        # The last 16 keypoints are 8 left-right pairs\n        self.assertTrue(\n            np.allclose(\n                output[\"keypoints\"][1:, 0].reshape(-1, 2)[:, ::-1],\n                400 - anno[\"keypoints\"][1:, 0].reshape(-1, 2),\n            )\n        )\n        self.assertTrue(\n            np.allclose(\n                output[\"keypoints\"][1:, 1:].reshape(-1, 2, 2)[:, ::-1, :],\n                anno[\"keypoints\"][1:, 1:].reshape(-1, 2, 2),\n            )\n        )\n\n    def test_transform_RLE(self):\n        transforms = T.TransformList([T.HFlipTransform(400)])\n        mask = np.zeros((300, 400), order=\"F\").astype(\"uint8\")\n        mask[:, :200] = 1\n\n        anno = {\n            \"bbox\": np.asarray([10, 10, 200, 300]),\n            \"bbox_mode\": BoxMode.XYXY_ABS,\n            \"segmentation\": mask_util.encode(mask[:, :, None])[0],\n            \"category_id\": 3,\n        }\n        output = detection_utils.transform_instance_annotations(\n            copy.deepcopy(anno), transforms, (300, 400)\n        )\n        mask = output[\"segmentation\"]\n        self.assertTrue((mask[:, 200:] == 1).all())\n        self.assertTrue((mask[:, :200] == 0).all())\n\n        inst = detection_utils.annotations_to_instances(\n            [output, output], (400, 400), mask_format=\"bitmask\"\n        )\n        self.assertTrue(isinstance(inst.gt_masks, BitMasks))\n\n    def test_transform_RLE_resize(self):\n        transforms = T.TransformList(\n            [T.HFlipTransform(400), T.ScaleTransform(300, 400, 400, 400, \"bilinear\")]\n        )\n        mask = np.zeros((300, 400), order=\"F\").astype(\"uint8\")\n        mask[:, :200] = 1\n\n        anno = {\n            \"bbox\": np.asarray([10, 10, 200, 300]),\n            \"bbox_mode\": BoxMode.XYXY_ABS,\n            \"segmentation\": mask_util.encode(mask[:, :, None])[0],\n            \"category_id\": 3,\n        }\n        output = detection_utils.transform_instance_annotations(\n            copy.deepcopy(anno), transforms, (400, 400)\n        )\n\n        inst = detection_utils.annotations_to_instances(\n            [output, output], (400, 400), mask_format=\"bitmask\"\n        )\n        self.assertTrue(isinstance(inst.gt_masks, BitMasks))\n\n    def test_gen_crop(self):\n        instance = {\"bbox\": [10, 10, 100, 100], \"bbox_mode\": BoxMode.XYXY_ABS}\n        t = detection_utils.gen_crop_transform_with_instance((10, 10), (150, 150), instance)\n        # the box center must fall into the cropped region\n        self.assertTrue(t.x0 <= 55 <= t.x0 + t.w)\n\n    def test_gen_crop_outside_boxes(self):\n        instance = {\"bbox\": [10, 10, 100, 100], \"bbox_mode\": BoxMode.XYXY_ABS}\n        with self.assertRaises(AssertionError):\n            detection_utils.gen_crop_transform_with_instance((10, 10), (15, 15), instance)\n"
  },
  {
    "path": "tests/test_data_transform.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport logging\nimport numpy as np\nimport unittest\n\nfrom detectron2.config import get_cfg\nfrom detectron2.data import detection_utils\nfrom detectron2.data import transforms as T\nfrom detectron2.utils.logger import setup_logger\n\nlogger = logging.getLogger(__name__)\n\n\nclass TestTransforms(unittest.TestCase):\n    def setUp(self):\n        setup_logger()\n\n    def test_apply_rotated_boxes(self):\n        np.random.seed(125)\n        cfg = get_cfg()\n        is_train = True\n        transform_gen = detection_utils.build_transform_gen(cfg, is_train)\n        image = np.random.rand(200, 300)\n        image, transforms = T.apply_transform_gens(transform_gen, image)\n        image_shape = image.shape[:2]  # h, w\n        assert image_shape == (800, 1200)\n        annotation = {\"bbox\": [179, 97, 62, 40, -56]}\n\n        boxes = np.array([annotation[\"bbox\"]], dtype=np.float64)  # boxes.shape = (1, 5)\n        transformed_bbox = transforms.apply_rotated_box(boxes)[0]\n\n        expected_bbox = np.array([484, 388, 248, 160, 56], dtype=np.float64)\n        err_msg = \"transformed_bbox = {}, expected {}\".format(transformed_bbox, expected_bbox)\n        assert np.allclose(transformed_bbox, expected_bbox), err_msg\n\n    def test_apply_rotated_boxes_unequal_scaling_factor(self):\n        np.random.seed(125)\n        h, w = 400, 200\n        newh, neww = 800, 800\n        image = np.random.rand(h, w)\n        transform_gen = []\n        transform_gen.append(T.Resize(shape=(newh, neww)))\n        image, transforms = T.apply_transform_gens(transform_gen, image)\n        image_shape = image.shape[:2]  # h, w\n        assert image_shape == (newh, neww)\n\n        boxes = np.array(\n            [\n                [150, 100, 40, 20, 0],\n                [150, 100, 40, 20, 30],\n                [150, 100, 40, 20, 90],\n                [150, 100, 40, 20, -90],\n            ],\n            dtype=np.float64,\n        )\n        transformed_boxes = transforms.apply_rotated_box(boxes)\n\n        expected_bboxes = np.array(\n            [\n                [600, 200, 160, 40, 0],\n                [600, 200, 144.22205102, 52.91502622, 49.10660535],\n                [600, 200, 80, 80, 90],\n                [600, 200, 80, 80, -90],\n            ],\n            dtype=np.float64,\n        )\n        err_msg = \"transformed_boxes = {}, expected {}\".format(transformed_boxes, expected_bboxes)\n        assert np.allclose(transformed_boxes, expected_bboxes), err_msg\n\n    def test_print_transform_gen(self):\n        t = T.RandomCrop(\"relative\", (100, 100))\n        self.assertTrue(str(t) == \"RandomCrop(crop_type='relative', crop_size=(100, 100))\")\n\n        t = T.RandomFlip(prob=0.5)\n        self.assertTrue(str(t) == \"RandomFlip(prob=0.5)\")\n\n        t = T.RandomFlip()\n        self.assertTrue(str(t) == \"RandomFlip()\")\n"
  },
  {
    "path": "tests/test_fast_rcnn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\nimport torch\n\nfrom detectron2.config import get_cfg\nfrom detectron2.modeling.box_regression import Box2BoxTransform, Box2BoxTransformRotated\nfrom detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputLayers, FastRCNNOutputs\nfrom detectron2.modeling.roi_heads.rotated_fast_rcnn import RotatedFastRCNNOutputs\nfrom detectron2.structures import Boxes, Instances, RotatedBoxes\nfrom detectron2.utils.events import EventStorage\n\nlogger = logging.getLogger(__name__)\n\n\nclass FastRCNNTest(unittest.TestCase):\n    def test_fast_rcnn(self):\n        torch.manual_seed(132)\n        cfg = get_cfg()\n        cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10, 10, 5, 5)\n        box2box_transform = Box2BoxTransform(weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS)\n\n        box_head_output_size = 8\n        num_classes = 5\n        cls_agnostic_bbox_reg = False\n\n        box_predictor = FastRCNNOutputLayers(\n            box_head_output_size, num_classes, cls_agnostic_bbox_reg, box_dim=4\n        )\n        feature_pooled = torch.rand(2, box_head_output_size)\n        pred_class_logits, pred_proposal_deltas = box_predictor(feature_pooled)\n        image_shape = (10, 10)\n        proposal_boxes = torch.tensor([[0.8, 1.1, 3.2, 2.8], [2.3, 2.5, 7, 8]], dtype=torch.float32)\n        gt_boxes = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32)\n        result = Instances(image_shape)\n        result.proposal_boxes = Boxes(proposal_boxes)\n        result.gt_boxes = Boxes(gt_boxes)\n        result.gt_classes = torch.tensor([1, 2])\n        proposals = []\n        proposals.append(result)\n        smooth_l1_beta = cfg.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA\n\n        outputs = FastRCNNOutputs(\n            box2box_transform, pred_class_logits, pred_proposal_deltas, proposals, smooth_l1_beta\n        )\n        with EventStorage():  # capture events in a new storage to discard them\n            losses = outputs.losses()\n\n        expected_losses = {\n            \"loss_cls\": torch.tensor(1.7951188087),\n            \"loss_box_reg\": torch.tensor(4.0357131958),\n        }\n        for name in expected_losses.keys():\n            assert torch.allclose(losses[name], expected_losses[name])\n\n    def test_fast_rcnn_rotated(self):\n        torch.manual_seed(132)\n        cfg = get_cfg()\n        cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10, 10, 5, 5, 1)\n        box2box_transform = Box2BoxTransformRotated(weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS)\n\n        box_head_output_size = 8\n        num_classes = 5\n        cls_agnostic_bbox_reg = False\n\n        box_predictor = FastRCNNOutputLayers(\n            box_head_output_size, num_classes, cls_agnostic_bbox_reg, box_dim=5\n        )\n        feature_pooled = torch.rand(2, box_head_output_size)\n        pred_class_logits, pred_proposal_deltas = box_predictor(feature_pooled)\n        image_shape = (10, 10)\n        proposal_boxes = torch.tensor(\n            [[2, 1.95, 2.4, 1.7, 0], [4.65, 5.25, 4.7, 5.5, 0]], dtype=torch.float32\n        )\n        gt_boxes = torch.tensor([[2, 2, 2, 2, 0], [4, 4, 4, 4, 0]], dtype=torch.float32)\n        result = Instances(image_shape)\n        result.proposal_boxes = RotatedBoxes(proposal_boxes)\n        result.gt_boxes = RotatedBoxes(gt_boxes)\n        result.gt_classes = torch.tensor([1, 2])\n        proposals = []\n        proposals.append(result)\n        smooth_l1_beta = cfg.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA\n\n        outputs = RotatedFastRCNNOutputs(\n            box2box_transform, pred_class_logits, pred_proposal_deltas, proposals, smooth_l1_beta\n        )\n        with EventStorage():  # capture events in a new storage to discard them\n            losses = outputs.losses()\n\n        # Note: the expected losses are slightly different even if\n        # the boxes are essentially the same as in the FastRCNNOutput test, because\n        # bbox_pred in FastRCNNOutputLayers have different Linear layers/initialization\n        # between the two cases.\n        expected_losses = {\n            \"loss_cls\": torch.tensor(1.7920907736),\n            \"loss_box_reg\": torch.tensor(4.0410838127),\n        }\n        for name in expected_losses.keys():\n            assert torch.allclose(losses[name], expected_losses[name])\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_mask_ops.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n\nimport contextlib\nimport io\nimport numpy as np\nimport os\nimport unittest\nfrom collections import defaultdict\nimport torch\nimport tqdm\nfrom fvcore.common.benchmark import benchmark\nfrom pycocotools.coco import COCO\nfrom tabulate import tabulate\nfrom torch.nn import functional as F\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.layers.mask_ops import (\n    pad_masks,\n    paste_mask_in_image_old,\n    paste_masks_in_image,\n    scale_boxes,\n)\nfrom detectron2.structures import BitMasks, Boxes, BoxMode, PolygonMasks\nfrom detectron2.structures.masks import polygons_to_bitmask\n\n\ndef iou_between_full_image_bit_masks(a, b):\n    intersect = (a & b).sum()\n    union = (a | b).sum()\n    return intersect / union\n\n\ndef rasterize_polygons_with_grid_sample(full_image_bit_mask, box, mask_size, threshold=0.5):\n    x0, y0, x1, y1 = box[0], box[1], box[2], box[3]\n\n    img_h, img_w = full_image_bit_mask.shape\n\n    mask_y = np.arange(0.0, mask_size) + 0.5  # mask y sample coords in [0.5, mask_size - 0.5]\n    mask_x = np.arange(0.0, mask_size) + 0.5  # mask x sample coords in [0.5, mask_size - 0.5]\n    mask_y = (mask_y) / (mask_size) * (y1 - y0) + y0\n    mask_x = (mask_x) / (mask_size) * (x1 - x0) + x0\n\n    mask_x = (mask_x - 0.5) / (img_w - 1) * 2 + -1\n    mask_y = (mask_y - 0.5) / (img_h - 1) * 2 + -1\n    gy, gx = torch.meshgrid(torch.from_numpy(mask_y), torch.from_numpy(mask_x))\n    ind = torch.stack([gx, gy], dim=-1).to(dtype=torch.float32)\n\n    full_image_bit_mask = torch.from_numpy(full_image_bit_mask)\n    mask = F.grid_sample(\n        full_image_bit_mask[None, None, :, :].to(dtype=torch.float32),\n        ind[None, :, :, :],\n        align_corners=True,\n    )\n\n    return mask[0, 0] >= threshold\n\n\nclass TestMaskCropPaste(unittest.TestCase):\n    def setUp(self):\n        json_file = MetadataCatalog.get(\"coco_2017_val_100\").json_file\n        if not os.path.isfile(json_file):\n            raise unittest.SkipTest(\"{} not found\".format(json_file))\n        with contextlib.redirect_stdout(io.StringIO()):\n            self.coco = COCO(json_file)\n\n    def test_crop_paste_consistency(self):\n        \"\"\"\n        rasterize_polygons_within_box (used in training)\n        and\n        paste_masks_in_image (used in inference)\n        should be inverse operations to each other.\n\n        This function runs several implementation of the above two operations and prints\n        the reconstruction error.\n        \"\"\"\n\n        anns = self.coco.loadAnns(self.coco.getAnnIds(iscrowd=False))  # avoid crowd annotations\n\n        selected_anns = anns[:100]\n\n        ious = []\n        for ann in tqdm.tqdm(selected_anns):\n            results = self.process_annotation(ann)\n            ious.append([k[2] for k in results])\n\n        ious = np.array(ious)\n        mean_ious = ious.mean(axis=0)\n        table = []\n        res_dic = defaultdict(dict)\n        for row, iou in zip(results, mean_ious):\n            table.append((row[0], row[1], iou))\n            res_dic[row[0]][row[1]] = iou\n        print(tabulate(table, headers=[\"rasterize\", \"paste\", \"iou\"], tablefmt=\"simple\"))\n        # assert that the reconstruction is good:\n        self.assertTrue(res_dic[\"polygon\"][\"aligned\"] > 0.94)\n        self.assertTrue(res_dic[\"roialign\"][\"aligned\"] > 0.95)\n\n    def process_annotation(self, ann, mask_side_len=28):\n        # Parse annotation data\n        img_info = self.coco.loadImgs(ids=[ann[\"image_id\"]])[0]\n        height, width = img_info[\"height\"], img_info[\"width\"]\n        gt_polygons = [np.array(p, dtype=np.float64) for p in ann[\"segmentation\"]]\n        gt_bbox = BoxMode.convert(np.array(ann[\"bbox\"]), BoxMode.XYWH_ABS, BoxMode.XYXY_ABS)\n        gt_bit_mask = polygons_to_bitmask(gt_polygons, height, width)\n\n        # Run rasterize ..\n        torch_gt_bbox = torch.from_numpy(gt_bbox[None, :]).to(dtype=torch.float32)\n        box_bitmasks = {\n            \"polygon\": PolygonMasks([gt_polygons]).crop_and_resize(torch_gt_bbox, mask_side_len)[0],\n            \"gridsample\": rasterize_polygons_with_grid_sample(gt_bit_mask, gt_bbox, mask_side_len),\n            \"roialign\": BitMasks(torch.from_numpy(gt_bit_mask[None, :, :])).crop_and_resize(\n                torch_gt_bbox, mask_side_len\n            )[0],\n        }\n\n        # Run paste ..\n        results = defaultdict(dict)\n        for k, box_bitmask in box_bitmasks.items():\n            padded_bitmask, scale = pad_masks(box_bitmask[None, :, :], 1)\n            scaled_boxes = scale_boxes(torch_gt_bbox, scale)\n\n            r = results[k]\n            r[\"old\"] = paste_mask_in_image_old(\n                padded_bitmask[0], scaled_boxes[0], height, width, threshold=0.5\n            )\n            r[\"aligned\"] = paste_masks_in_image(\n                box_bitmask[None, :, :], Boxes(gt_bbox[None, :]), (height, width)\n            )[0]\n\n        table = []\n        for rasterize_method, r in results.items():\n            for paste_method, mask in r.items():\n                mask = np.asarray(mask)\n                iou = iou_between_full_image_bit_masks(gt_bit_mask.astype(\"uint8\"), mask)\n                table.append((rasterize_method, paste_method, iou))\n        return table\n\n    def test_polygon_area(self):\n        # Draw polygon boxes\n        for d in [5.0, 10.0, 1000.0]:\n            polygon = PolygonMasks([[[0, 0, 0, d, d, d, d, 0]]])\n            area = polygon.area()[0]\n            target = d ** 2\n            self.assertEqual(area, target)\n\n        # Draw polygon triangles\n        for d in [5.0, 10.0, 1000.0]:\n            polygon = PolygonMasks([[[0, 0, 0, d, d, d]]])\n            area = polygon.area()[0]\n            target = d ** 2 / 2\n            self.assertEqual(area, target)\n\n\ndef benchmark_paste():\n    S = 800\n    H, W = image_shape = (S, S)\n    N = 64\n    torch.manual_seed(42)\n    masks = torch.rand(N, 28, 28)\n\n    center = torch.rand(N, 2) * 600 + 100\n    wh = torch.clamp(torch.randn(N, 2) * 40 + 200, min=50)\n    x0y0 = torch.clamp(center - wh * 0.5, min=0.0)\n    x1y1 = torch.clamp(center + wh * 0.5, max=S)\n    boxes = Boxes(torch.cat([x0y0, x1y1], axis=1))\n\n    def func(device, n=3):\n        m = masks.to(device=device)\n        b = boxes.to(device=device)\n\n        def bench():\n            for _ in range(n):\n                paste_masks_in_image(m, b, image_shape)\n            if device.type == \"cuda\":\n                torch.cuda.synchronize()\n\n        return bench\n\n    specs = [{\"device\": torch.device(\"cpu\"), \"n\": 3}]\n    if torch.cuda.is_available():\n        specs.append({\"device\": torch.device(\"cuda\"), \"n\": 3})\n\n    benchmark(func, \"paste_masks\", specs, num_iters=10, warmup_iters=2)\n\n\nif __name__ == \"__main__\":\n    benchmark_paste()\n    unittest.main()\n"
  },
  {
    "path": "tests/test_model_e2e.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\n\n\nimport unittest\nimport torch\n\nimport detectron2.model_zoo as model_zoo\nfrom detectron2.config import get_cfg\nfrom detectron2.modeling import build_model\nfrom detectron2.structures import BitMasks, Boxes, Instances\nfrom detectron2.utils.events import EventStorage\n\n\ndef get_model_zoo(config_path):\n    \"\"\"\n    Like model_zoo.get, but do not load any weights (even pretrained)\n    \"\"\"\n    cfg_file = model_zoo.get_config_file(config_path)\n    cfg = get_cfg()\n    cfg.merge_from_file(cfg_file)\n    if not torch.cuda.is_available():\n        cfg.MODEL.DEVICE = \"cpu\"\n    return build_model(cfg)\n\n\ndef create_model_input(img, inst=None):\n    if inst is not None:\n        return {\"image\": img, \"instances\": inst}\n    else:\n        return {\"image\": img}\n\n\ndef get_empty_instance(h, w):\n    inst = Instances((h, w))\n    inst.gt_boxes = Boxes(torch.rand(0, 4))\n    inst.gt_classes = torch.tensor([]).to(dtype=torch.int64)\n    inst.gt_masks = BitMasks(torch.rand(0, h, w))\n    return inst\n\n\ndef get_regular_bitmask_instances(h, w):\n    inst = Instances((h, w))\n    inst.gt_boxes = Boxes(torch.rand(3, 4))\n    inst.gt_boxes.tensor[:, 2:] += inst.gt_boxes.tensor[:, :2]\n    inst.gt_classes = torch.tensor([3, 4, 5]).to(dtype=torch.int64)\n    inst.gt_masks = BitMasks((torch.rand(3, h, w) > 0.5))\n    return inst\n\n\nclass ModelE2ETest(unittest.TestCase):\n    def setUp(self):\n        self.model = get_model_zoo(self.CONFIG_PATH)\n\n    def _test_eval(self, input_sizes):\n        inputs = [create_model_input(torch.rand(3, s[0], s[1])) for s in input_sizes]\n        self.model.eval()\n        self.model(inputs)\n\n    def _test_train(self, input_sizes, instances):\n        assert len(input_sizes) == len(instances)\n        inputs = [\n            create_model_input(torch.rand(3, s[0], s[1]), inst)\n            for s, inst in zip(input_sizes, instances)\n        ]\n        self.model.train()\n        with EventStorage():\n            losses = self.model(inputs)\n            sum(losses.values()).backward()\n            del losses\n\n\nclass MaskRCNNE2ETest(ModelE2ETest):\n    CONFIG_PATH = \"COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml\"\n\n    def test_empty_data(self):\n        instances = [get_empty_instance(200, 250), get_empty_instance(200, 249)]\n        self._test_eval([(200, 250), (200, 249)])\n        self._test_train([(200, 250), (200, 249)], instances)\n\n    def test_half_empty_data(self):\n        instances = [get_empty_instance(200, 250), get_regular_bitmask_instances(200, 249)]\n        self._test_train([(200, 250), (200, 249)], instances)\n\n\nclass RetinaNetE2ETest(ModelE2ETest):\n    CONFIG_PATH = \"COCO-Detection/retinanet_R_50_FPN_1x.yaml\"\n\n    def test_empty_data(self):\n        instances = [get_empty_instance(200, 250), get_empty_instance(200, 249)]\n        self._test_eval([(200, 250), (200, 249)])\n        self._test_train([(200, 250), (200, 249)], instances)\n"
  },
  {
    "path": "tests/test_model_zoo.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\n\nfrom detectron2 import model_zoo\nfrom detectron2.modeling import FPN, GeneralizedRCNN\n\nlogger = logging.getLogger(__name__)\n\n\nclass TestModelZoo(unittest.TestCase):\n    def test_get_returns_model(self):\n        model = model_zoo.get(\"Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml\", trained=False)\n        self.assertIsInstance(model, GeneralizedRCNN)\n        self.assertIsInstance(model.backbone, FPN)\n\n    def test_get_invalid_model(self):\n        self.assertRaises(RuntimeError, model_zoo.get, \"Invalid/config.yaml\")\n\n    def test_get_url(self):\n        url = model_zoo.get_checkpoint_url(\"Misc/scratch_mask_rcnn_R_50_FPN_3x_gn.yaml\")\n        self.assertEqual(\n            url,\n            \"https://dl.fbaipublicfiles.com/detectron2/Misc/scratch_mask_rcnn_R_50_FPN_3x_gn/138602908/model_final_01ca85.pkl\",  # noqa\n        )\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_nms_rotated.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom __future__ import absolute_import, division, print_function, unicode_literals\nimport unittest\nimport torch\nfrom torchvision import ops\n\nfrom detectron2.layers import batched_nms, batched_nms_rotated, nms_rotated\n\n\nclass TestNMSRotated(unittest.TestCase):\n    def reference_horizontal_nms(self, boxes, scores, iou_threshold):\n        \"\"\"\n        Args:\n            box_scores (N, 5): boxes in corner-form and probabilities.\n                (Note here 5 == 4 + 1, i.e., 4-dim horizontal box + 1-dim prob)\n            iou_threshold: intersection over union threshold.\n        Returns:\n             picked: a list of indexes of the kept boxes\n        \"\"\"\n        picked = []\n        _, indexes = scores.sort(descending=True)\n        while len(indexes) > 0:\n            current = indexes[0]\n            picked.append(current.item())\n            if len(indexes) == 1:\n                break\n            current_box = boxes[current, :]\n            indexes = indexes[1:]\n            rest_boxes = boxes[indexes, :]\n            iou = ops.box_iou(rest_boxes, current_box.unsqueeze(0)).squeeze(1)\n            indexes = indexes[iou <= iou_threshold]\n\n        return torch.as_tensor(picked)\n\n    def _create_tensors(self, N):\n        boxes = torch.rand(N, 4) * 100\n        # Note: the implementation of this function in torchvision is:\n        # boxes[:, 2:] += torch.rand(N, 2) * 100\n        # but it does not guarantee non-negative widths/heights constraints:\n        # boxes[:, 2] >= boxes[:, 0] and boxes[:, 3] >= boxes[:, 1]:\n        boxes[:, 2:] += boxes[:, :2]\n        scores = torch.rand(N)\n        return boxes, scores\n\n    def test_batched_nms_rotated_0_degree_cpu(self):\n        # torch.manual_seed(0)\n        N = 2000\n        num_classes = 50\n        boxes, scores = self._create_tensors(N)\n        idxs = torch.randint(0, num_classes, (N,))\n        rotated_boxes = torch.zeros(N, 5)\n        rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0\n        rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0\n        rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0]\n        rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1]\n        err_msg = \"Rotated NMS with 0 degree is incompatible with horizontal NMS for IoU={}\"\n        for iou in [0.2, 0.5, 0.8]:\n            backup = boxes.clone()\n            keep_ref = batched_nms(boxes, scores, idxs, iou)\n            assert torch.allclose(boxes, backup), \"boxes modified by batched_nms\"\n            backup = rotated_boxes.clone()\n            keep = batched_nms_rotated(rotated_boxes, scores, idxs, iou)\n            assert torch.allclose(\n                rotated_boxes, backup\n            ), \"rotated_boxes modified by batched_nms_rotated\"\n            assert torch.equal(keep, keep_ref), err_msg.format(iou)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA unavailable\")\n    def test_batched_nms_rotated_0_degree_cuda(self):\n        # torch.manual_seed(0)\n        N = 2000\n        num_classes = 50\n        boxes, scores = self._create_tensors(N)\n        idxs = torch.randint(0, num_classes, (N,))\n        rotated_boxes = torch.zeros(N, 5)\n        rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0\n        rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0\n        rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0]\n        rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1]\n        err_msg = \"Rotated NMS with 0 degree is incompatible with horizontal NMS for IoU={}\"\n        for iou in [0.2, 0.5, 0.8]:\n            backup = boxes.clone()\n            keep_ref = batched_nms(boxes.cuda(), scores.cuda(), idxs, iou)\n            assert torch.allclose(boxes, backup), \"boxes modified by batched_nms\"\n            backup = rotated_boxes.clone()\n            keep = batched_nms_rotated(rotated_boxes.cuda(), scores.cuda(), idxs, iou)\n            assert torch.allclose(\n                rotated_boxes, backup\n            ), \"rotated_boxes modified by batched_nms_rotated\"\n            assert torch.equal(keep, keep_ref), err_msg.format(iou)\n\n    def test_nms_rotated_0_degree_cpu(self):\n        N = 1000\n        boxes, scores = self._create_tensors(N)\n        rotated_boxes = torch.zeros(N, 5)\n        rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0\n        rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0\n        rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0]\n        rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1]\n        err_msg = \"Rotated NMS incompatible between CPU and reference implementation for IoU={}\"\n        for iou in [0.5]:\n            keep_ref = self.reference_horizontal_nms(boxes, scores, iou)\n            keep = nms_rotated(rotated_boxes, scores, iou)\n            assert torch.equal(keep, keep_ref), err_msg.format(iou)\n\n    def test_nms_rotated_90_degrees_cpu(self):\n        N = 1000\n        boxes, scores = self._create_tensors(N)\n        rotated_boxes = torch.zeros(N, 5)\n        rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0\n        rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0\n        # Note for rotated_boxes[:, 2] and rotated_boxes[:, 3]:\n        # widths and heights are intentionally swapped here for 90 degrees case\n        # so that the reference horizontal nms could be used\n        rotated_boxes[:, 2] = boxes[:, 3] - boxes[:, 1]\n        rotated_boxes[:, 3] = boxes[:, 2] - boxes[:, 0]\n\n        rotated_boxes[:, 4] = torch.ones(N) * 90\n        err_msg = \"Rotated NMS incompatible between CPU and reference implementation for IoU={}\"\n        for iou in [0.2, 0.5, 0.8]:\n            keep_ref = self.reference_horizontal_nms(boxes, scores, iou)\n            keep = nms_rotated(rotated_boxes, scores, iou)\n            assert torch.equal(keep, keep_ref), err_msg.format(iou)\n\n    def test_nms_rotated_180_degrees_cpu(self):\n        N = 1000\n        boxes, scores = self._create_tensors(N)\n        rotated_boxes = torch.zeros(N, 5)\n        rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0\n        rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0\n        rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0]\n        rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1]\n        rotated_boxes[:, 4] = torch.ones(N) * 180\n        err_msg = \"Rotated NMS incompatible between CPU and reference implementation for IoU={}\"\n        for iou in [0.2, 0.5, 0.8]:\n            keep_ref = self.reference_horizontal_nms(boxes, scores, iou)\n            keep = nms_rotated(rotated_boxes, scores, iou)\n            assert torch.equal(keep, keep_ref), err_msg.format(iou)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA unavailable\")\n    def test_nms_rotated_0_degree_cuda(self):\n        N = 1000\n        boxes, scores = self._create_tensors(N)\n        rotated_boxes = torch.zeros(N, 5)\n        rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0\n        rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0\n        rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0]\n        rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1]\n        err_msg = \"Rotated NMS incompatible between CPU and CUDA for IoU={}\"\n\n        for iou in [0.2, 0.5, 0.8]:\n            r_cpu = nms_rotated(rotated_boxes, scores, iou)\n            r_cuda = nms_rotated(rotated_boxes.cuda(), scores.cuda(), iou)\n\n            assert torch.equal(r_cpu, r_cuda.cpu()), err_msg.format(iou)\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_roi_align.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport numpy as np\nimport unittest\nimport cv2\nimport torch\n\nfrom detectron2.layers.roi_align import ROIAlign\n\n\nclass ROIAlignTest(unittest.TestCase):\n    def test_forward_output(self):\n        input = np.arange(25).reshape(5, 5).astype(\"float32\")\n        \"\"\"\n        0  1  2   3 4\n        5  6  7   8 9\n        10 11 12 13 14\n        15 16 17 18 19\n        20 21 22 23 24\n        \"\"\"\n\n        output = self._simple_roialign(input, [1, 1, 3, 3], (4, 4), aligned=False)\n        output_correct = self._simple_roialign(input, [1, 1, 3, 3], (4, 4), aligned=True)\n\n        # without correction:\n        old_results = [\n            [7.5, 8, 8.5, 9],\n            [10, 10.5, 11, 11.5],\n            [12.5, 13, 13.5, 14],\n            [15, 15.5, 16, 16.5],\n        ]\n\n        # with 0.5 correction:\n        correct_results = [\n            [4.5, 5.0, 5.5, 6.0],\n            [7.0, 7.5, 8.0, 8.5],\n            [9.5, 10.0, 10.5, 11.0],\n            [12.0, 12.5, 13.0, 13.5],\n        ]\n        # This is an upsampled version of [[6, 7], [11, 12]]\n\n        self.assertTrue(np.allclose(output.flatten(), np.asarray(old_results).flatten()))\n        self.assertTrue(\n            np.allclose(output_correct.flatten(), np.asarray(correct_results).flatten())\n        )\n\n        # Also see similar issues in tensorflow at\n        # https://github.com/tensorflow/tensorflow/issues/26278\n\n    def test_resize(self):\n        H, W = 30, 30\n        input = np.random.rand(H, W).astype(\"float32\") * 100\n        box = [10, 10, 20, 20]\n        output = self._simple_roialign(input, box, (5, 5), aligned=True)\n\n        input2x = cv2.resize(input, (W // 2, H // 2), interpolation=cv2.INTER_LINEAR)\n        box2x = [x / 2 for x in box]\n        output2x = self._simple_roialign(input2x, box2x, (5, 5), aligned=True)\n        diff = np.abs(output2x - output)\n        self.assertTrue(diff.max() < 1e-4)\n\n    def _simple_roialign(self, img, box, resolution, aligned=True):\n        \"\"\"\n        RoiAlign with scale 1.0 and 0 sample ratio.\n        \"\"\"\n        if isinstance(resolution, int):\n            resolution = (resolution, resolution)\n        op = ROIAlign(resolution, 1.0, 0, aligned=aligned)\n        input = torch.from_numpy(img[None, None, :, :].astype(\"float32\"))\n\n        rois = [0] + list(box)\n        rois = torch.from_numpy(np.asarray(rois)[None, :].astype(\"float32\"))\n        output = op.forward(input, rois)\n        if torch.cuda.is_available():\n            output_cuda = op.forward(input.cuda(), rois.cuda()).cpu()\n            self.assertTrue(torch.allclose(output, output_cuda))\n        return output[0, 0]\n\n    def _simple_roialign_with_grad(self, img, box, resolution, device):\n        if isinstance(resolution, int):\n            resolution = (resolution, resolution)\n\n        op = ROIAlign(resolution, 1.0, 0, aligned=True)\n        input = torch.from_numpy(img[None, None, :, :].astype(\"float32\"))\n\n        rois = [0] + list(box)\n        rois = torch.from_numpy(np.asarray(rois)[None, :].astype(\"float32\"))\n        input = input.to(device=device)\n        rois = rois.to(device=device)\n        input.requires_grad = True\n        output = op.forward(input, rois)\n        return input, output\n\n    def test_empty_box(self):\n        img = np.random.rand(5, 5)\n        box = [3, 4, 5, 4]\n        o = self._simple_roialign(img, box, 7)\n        self.assertTrue(o.shape == (7, 7))\n        self.assertTrue((o == 0).all())\n\n        for dev in [\"cpu\"] + [\"cuda\"] if torch.cuda.is_available() else []:\n            input, output = self._simple_roialign_with_grad(img, box, 7, torch.device(dev))\n            output.sum().backward()\n            self.assertTrue(torch.allclose(input.grad, torch.zeros_like(input)))\n\n    def test_empty_batch(self):\n        input = torch.zeros(0, 3, 10, 10, dtype=torch.float32)\n        rois = torch.zeros(0, 5, dtype=torch.float32)\n        op = ROIAlign((7, 7), 1.0, 0, aligned=True)\n        output = op.forward(input, rois)\n        self.assertTrue(output.shape == (0, 3, 7, 7))\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_roi_align_rotated.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\nimport cv2\nimport torch\nfrom torch.autograd import Variable, gradcheck\n\nfrom detectron2.layers.roi_align import ROIAlign\nfrom detectron2.layers.roi_align_rotated import ROIAlignRotated\n\nlogger = logging.getLogger(__name__)\n\n\nclass ROIAlignRotatedTest(unittest.TestCase):\n    def _box_to_rotated_box(self, box, angle):\n        return [\n            (box[0] + box[2]) / 2.0,\n            (box[1] + box[3]) / 2.0,\n            box[2] - box[0],\n            box[3] - box[1],\n            angle,\n        ]\n\n    def _rot90(self, img, num):\n        num = num % 4  # note: -1 % 4 == 3\n        for _ in range(num):\n            img = img.transpose(0, 1).flip(0)\n        return img\n\n    def test_forward_output_0_90_180_270(self):\n        for i in range(4):\n            # i = 0, 1, 2, 3 corresponding to 0, 90, 180, 270 degrees\n            img = torch.arange(25, dtype=torch.float32).reshape(5, 5)\n            \"\"\"\n            0  1  2   3 4\n            5  6  7   8 9\n            10 11 12 13 14\n            15 16 17 18 19\n            20 21 22 23 24\n            \"\"\"\n            box = [1, 1, 3, 3]\n            rotated_box = self._box_to_rotated_box(box=box, angle=90 * i)\n\n            result = self._simple_roi_align_rotated(img=img, box=rotated_box, resolution=(4, 4))\n\n            # Here's an explanation for 0 degree case:\n            # point 0 in the original input lies at [0.5, 0.5]\n            # (the center of bin [0, 1] x [0, 1])\n            # point 1 in the original input lies at [1.5, 0.5], etc.\n            # since the resolution is (4, 4) that divides [1, 3] x [1, 3]\n            # into 4 x 4 equal bins,\n            # the top-left bin is [1, 1.5] x [1, 1.5], and its center\n            # (1.25, 1.25) lies at the 3/4 position\n            # between point 0 and point 1, point 5 and point 6,\n            # point 0 and point 5, point 1 and point 6, so it can be calculated as\n            # 0.25*(0*0.25+1*0.75)+(5*0.25+6*0.75)*0.75 = 4.5\n            result_expected = torch.tensor(\n                [\n                    [4.5, 5.0, 5.5, 6.0],\n                    [7.0, 7.5, 8.0, 8.5],\n                    [9.5, 10.0, 10.5, 11.0],\n                    [12.0, 12.5, 13.0, 13.5],\n                ]\n            )\n            # This is also an upsampled version of [[6, 7], [11, 12]]\n\n            # When the box is rotated by 90 degrees CCW,\n            # the result would be rotated by 90 degrees CW, thus it's -i here\n            result_expected = self._rot90(result_expected, -i)\n\n            assert torch.allclose(result, result_expected)\n\n    def test_resize(self):\n        H, W = 30, 30\n        input = torch.rand(H, W) * 100\n        box = [10, 10, 20, 20]\n        rotated_box = self._box_to_rotated_box(box, angle=0)\n        output = self._simple_roi_align_rotated(img=input, box=rotated_box, resolution=(5, 5))\n\n        input2x = cv2.resize(input.numpy(), (W // 2, H // 2), interpolation=cv2.INTER_LINEAR)\n        input2x = torch.from_numpy(input2x)\n        box2x = [x / 2 for x in box]\n        rotated_box2x = self._box_to_rotated_box(box2x, angle=0)\n        output2x = self._simple_roi_align_rotated(img=input2x, box=rotated_box2x, resolution=(5, 5))\n        assert torch.allclose(output2x, output)\n\n    def _simple_roi_align_rotated(self, img, box, resolution):\n        \"\"\"\n        RoiAlignRotated with scale 1.0 and 0 sample ratio.\n        \"\"\"\n        op = ROIAlignRotated(output_size=resolution, spatial_scale=1.0, sampling_ratio=0)\n        input = img[None, None, :, :]\n\n        rois = [0] + list(box)\n        rois = torch.tensor(rois, dtype=torch.float32)[None, :]\n        result_cpu = op.forward(input, rois)\n        if torch.cuda.is_available():\n            result_cuda = op.forward(input.cuda(), rois.cuda())\n            assert torch.allclose(result_cpu, result_cuda.cpu())\n        return result_cpu[0, 0]\n\n    def test_empty_box(self):\n        img = torch.rand(5, 5)\n        out = self._simple_roi_align_rotated(img, [2, 3, 0, 0, 0], (7, 7))\n        self.assertTrue((out == 0).all())\n\n    def test_roi_align_rotated_gradcheck_cpu(self):\n        dtype = torch.float64\n        device = torch.device(\"cpu\")\n        roi_align_rotated_op = ROIAlignRotated(\n            output_size=(5, 5), spatial_scale=0.5, sampling_ratio=1\n        ).to(dtype=dtype, device=device)\n        x = torch.rand(1, 1, 10, 10, dtype=dtype, device=device, requires_grad=True)\n        # roi format is (batch index, x_center, y_center, width, height, angle)\n        rois = torch.tensor(\n            [[0, 4.5, 4.5, 9, 9, 0], [0, 2, 7, 4, 4, 0], [0, 7, 7, 4, 4, 0]],\n            dtype=dtype,\n            device=device,\n        )\n\n        def func(input):\n            return roi_align_rotated_op(input, rois)\n\n        assert gradcheck(func, (x,)), \"gradcheck failed for RoIAlignRotated CPU\"\n        assert gradcheck(func, (x.transpose(2, 3),)), \"gradcheck failed for RoIAlignRotated CPU\"\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA unavailable\")\n    def test_roi_align_rotated_gradient_cuda(self):\n        \"\"\"\n        Compute gradients for ROIAlignRotated with multiple bounding boxes on the GPU,\n        and compare the result with ROIAlign\n        \"\"\"\n        # torch.manual_seed(123)\n        dtype = torch.float64\n        device = torch.device(\"cuda\")\n        pool_h, pool_w = (5, 5)\n\n        roi_align = ROIAlign(output_size=(pool_h, pool_w), spatial_scale=1, sampling_ratio=2).to(\n            device=device\n        )\n\n        roi_align_rotated = ROIAlignRotated(\n            output_size=(pool_h, pool_w), spatial_scale=1, sampling_ratio=2\n        ).to(device=device)\n\n        x = torch.rand(1, 1, 10, 10, dtype=dtype, device=device, requires_grad=True)\n        # x_rotated = x.clone() won't work (will lead to grad_fun=CloneBackward)!\n        x_rotated = Variable(x.data.clone(), requires_grad=True)\n\n        # roi_rotated format is (batch index, x_center, y_center, width, height, angle)\n        rois_rotated = torch.tensor(\n            [[0, 4.5, 4.5, 9, 9, 0], [0, 2, 7, 4, 4, 0], [0, 7, 7, 4, 4, 0]],\n            dtype=dtype,\n            device=device,\n        )\n\n        y_rotated = roi_align_rotated(x_rotated, rois_rotated)\n        s_rotated = y_rotated.sum()\n        s_rotated.backward()\n\n        # roi format is (batch index, x1, y1, x2, y2)\n        rois = torch.tensor(\n            [[0, 0, 0, 9, 9], [0, 0, 5, 4, 9], [0, 5, 5, 9, 9]], dtype=dtype, device=device\n        )\n\n        y = roi_align(x, rois)\n        s = y.sum()\n        s.backward()\n\n        assert torch.allclose(\n            x.grad, x_rotated.grad\n        ), \"gradients for ROIAlign and ROIAlignRotated mismatch on CUDA\"\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_roi_heads.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\nimport torch\n\nfrom detectron2.config import get_cfg\nfrom detectron2.modeling.backbone import build_backbone\nfrom detectron2.modeling.proposal_generator.build import build_proposal_generator\nfrom detectron2.modeling.roi_heads import build_roi_heads\nfrom detectron2.structures import Boxes, ImageList, Instances, RotatedBoxes\nfrom detectron2.utils.events import EventStorage\n\nlogger = logging.getLogger(__name__)\n\n\nclass ROIHeadsTest(unittest.TestCase):\n    def test_roi_heads(self):\n        torch.manual_seed(121)\n        cfg = get_cfg()\n        cfg.MODEL.ROI_HEADS.NAME = \"StandardROIHeads\"\n        cfg.MODEL.ROI_BOX_HEAD.NAME = \"FastRCNNConvFCHead\"\n        cfg.MODEL.ROI_BOX_HEAD.NUM_FC = 2\n        cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE = \"ROIAlignV2\"\n        cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10, 10, 5, 5)\n        backbone = build_backbone(cfg)\n        num_images = 2\n        images_tensor = torch.rand(num_images, 20, 30)\n        image_sizes = [(10, 10), (20, 30)]\n        images = ImageList(images_tensor, image_sizes)\n        num_channels = 1024\n        features = {\"res4\": torch.rand(num_images, num_channels, 1, 2)}\n\n        image_shape = (15, 15)\n        gt_boxes0 = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32)\n        gt_instance0 = Instances(image_shape)\n        gt_instance0.gt_boxes = Boxes(gt_boxes0)\n        gt_instance0.gt_classes = torch.tensor([2, 1])\n        gt_boxes1 = torch.tensor([[1, 5, 2, 8], [7, 3, 10, 5]], dtype=torch.float32)\n        gt_instance1 = Instances(image_shape)\n        gt_instance1.gt_boxes = Boxes(gt_boxes1)\n        gt_instance1.gt_classes = torch.tensor([1, 2])\n        gt_instances = [gt_instance0, gt_instance1]\n\n        proposal_generator = build_proposal_generator(cfg, backbone.output_shape())\n        roi_heads = build_roi_heads(cfg, backbone.output_shape())\n\n        with EventStorage():  # capture events in a new storage to discard them\n            proposals, proposal_losses = proposal_generator(images, features, gt_instances)\n            _, detector_losses = roi_heads(images, features, proposals, gt_instances)\n\n        expected_losses = {\n            \"loss_cls\": torch.tensor(4.4236516953),\n            \"loss_box_reg\": torch.tensor(0.0091214813),\n        }\n        for name in expected_losses.keys():\n            assert torch.allclose(detector_losses[name], expected_losses[name])\n\n    def test_rroi_heads(self):\n        torch.manual_seed(121)\n        cfg = get_cfg()\n        cfg.MODEL.PROPOSAL_GENERATOR.NAME = \"RRPN\"\n        cfg.MODEL.ANCHOR_GENERATOR.NAME = \"RotatedAnchorGenerator\"\n        cfg.MODEL.ROI_HEADS.NAME = \"RROIHeads\"\n        cfg.MODEL.ROI_BOX_HEAD.NAME = \"FastRCNNConvFCHead\"\n        cfg.MODEL.ROI_BOX_HEAD.NUM_FC = 2\n        cfg.MODEL.RPN.BBOX_REG_WEIGHTS = (1, 1, 1, 1, 1)\n        cfg.MODEL.RPN.HEAD_NAME = \"StandardRPNHead\"\n        cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE = \"ROIAlignRotated\"\n        cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10, 10, 5, 5, 1)\n        backbone = build_backbone(cfg)\n        num_images = 2\n        images_tensor = torch.rand(num_images, 20, 30)\n        image_sizes = [(10, 10), (20, 30)]\n        images = ImageList(images_tensor, image_sizes)\n        num_channels = 1024\n        features = {\"res4\": torch.rand(num_images, num_channels, 1, 2)}\n\n        image_shape = (15, 15)\n        gt_boxes0 = torch.tensor([[2, 2, 2, 2, 30], [4, 4, 4, 4, 0]], dtype=torch.float32)\n        gt_instance0 = Instances(image_shape)\n        gt_instance0.gt_boxes = RotatedBoxes(gt_boxes0)\n        gt_instance0.gt_classes = torch.tensor([2, 1])\n        gt_boxes1 = torch.tensor([[1.5, 5.5, 1, 3, 0], [8.5, 4, 3, 2, -50]], dtype=torch.float32)\n        gt_instance1 = Instances(image_shape)\n        gt_instance1.gt_boxes = RotatedBoxes(gt_boxes1)\n        gt_instance1.gt_classes = torch.tensor([1, 2])\n        gt_instances = [gt_instance0, gt_instance1]\n\n        proposal_generator = build_proposal_generator(cfg, backbone.output_shape())\n        roi_heads = build_roi_heads(cfg, backbone.output_shape())\n\n        with EventStorage():  # capture events in a new storage to discard them\n            proposals, proposal_losses = proposal_generator(images, features, gt_instances)\n            _, detector_losses = roi_heads(images, features, proposals, gt_instances)\n\n        expected_losses = {\n            \"loss_cls\": torch.tensor(4.381443977355957),\n            \"loss_box_reg\": torch.tensor(0.0011560433777049184),\n        }\n        for name in expected_losses.keys():\n            err_msg = \"detector_losses[{}] = {}, expected losses = {}\".format(\n                name, detector_losses[name], expected_losses[name]\n            )\n            assert torch.allclose(detector_losses[name], expected_losses[name]), err_msg\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_roi_pooler.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\nimport torch\n\nfrom detectron2.modeling.poolers import ROIPooler\nfrom detectron2.structures import Boxes, RotatedBoxes\n\nlogger = logging.getLogger(__name__)\n\n\nclass TestROIPooler(unittest.TestCase):\n    def _rand_boxes(self, num_boxes, x_max, y_max):\n        coords = torch.rand(num_boxes, 4)\n        coords[:, 0] *= x_max\n        coords[:, 1] *= y_max\n        coords[:, 2] *= x_max\n        coords[:, 3] *= y_max\n        boxes = torch.zeros(num_boxes, 4)\n        boxes[:, 0] = torch.min(coords[:, 0], coords[:, 2])\n        boxes[:, 1] = torch.min(coords[:, 1], coords[:, 3])\n        boxes[:, 2] = torch.max(coords[:, 0], coords[:, 2])\n        boxes[:, 3] = torch.max(coords[:, 1], coords[:, 3])\n        return boxes\n\n    def _test_roialignv2_roialignrotated_match(self, device):\n        pooler_resolution = 14\n        canonical_level = 4\n        canonical_scale_factor = 2 ** canonical_level\n        pooler_scales = (1.0 / canonical_scale_factor,)\n        sampling_ratio = 0\n\n        N, C, H, W = 2, 4, 10, 8\n        N_rois = 10\n        std = 11\n        mean = 0\n        feature = (torch.rand(N, C, H, W) - 0.5) * 2 * std + mean\n\n        features = [feature.to(device)]\n\n        rois = []\n        rois_rotated = []\n        for _ in range(N):\n            boxes = self._rand_boxes(\n                num_boxes=N_rois, x_max=W * canonical_scale_factor, y_max=H * canonical_scale_factor\n            )\n\n            rotated_boxes = torch.zeros(N_rois, 5)\n            rotated_boxes[:, 0] = (boxes[:, 0] + boxes[:, 2]) / 2.0\n            rotated_boxes[:, 1] = (boxes[:, 1] + boxes[:, 3]) / 2.0\n            rotated_boxes[:, 2] = boxes[:, 2] - boxes[:, 0]\n            rotated_boxes[:, 3] = boxes[:, 3] - boxes[:, 1]\n            rois.append(Boxes(boxes).to(device))\n            rois_rotated.append(RotatedBoxes(rotated_boxes).to(device))\n\n        roialignv2_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=\"ROIAlignV2\",\n        )\n\n        roialignv2_out = roialignv2_pooler(features, rois)\n\n        roialignrotated_pooler = ROIPooler(\n            output_size=pooler_resolution,\n            scales=pooler_scales,\n            sampling_ratio=sampling_ratio,\n            pooler_type=\"ROIAlignRotated\",\n        )\n\n        roialignrotated_out = roialignrotated_pooler(features, rois_rotated)\n\n        assert torch.allclose(roialignv2_out, roialignrotated_out, atol=1e-4)\n\n    def test_roialignv2_roialignrotated_match_cpu(self):\n        self._test_roialignv2_roialignrotated_match(device=\"cpu\")\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA unavailable\")\n    def test_roialignv2_roialignrotated_match_cuda(self):\n        self._test_roialignv2_roialignrotated_match(device=\"cuda\")\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_rotated_boxes.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nfrom __future__ import absolute_import, division, print_function, unicode_literals\nimport logging\nimport math\nimport random\nimport unittest\nimport torch\nfrom fvcore.common.benchmark import benchmark\n\nfrom detectron2.layers.rotated_boxes import pairwise_iou_rotated\nfrom detectron2.structures.boxes import Boxes\nfrom detectron2.structures.rotated_boxes import RotatedBoxes, pairwise_iou\n\nlogger = logging.getLogger(__name__)\n\n\nclass TestRotatedBoxesLayer(unittest.TestCase):\n    def test_iou_0_dim_cpu(self):\n        boxes1 = torch.rand(0, 5, dtype=torch.float32)\n        boxes2 = torch.rand(10, 5, dtype=torch.float32)\n        expected_ious = torch.zeros(0, 10, dtype=torch.float32)\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n        boxes1 = torch.rand(10, 5, dtype=torch.float32)\n        boxes2 = torch.rand(0, 5, dtype=torch.float32)\n        expected_ious = torch.zeros(10, 0, dtype=torch.float32)\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_0_dim_cuda(self):\n        boxes1 = torch.rand(0, 5, dtype=torch.float32)\n        boxes2 = torch.rand(10, 5, dtype=torch.float32)\n        expected_ious = torch.zeros(0, 10, dtype=torch.float32)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n        boxes1 = torch.rand(10, 5, dtype=torch.float32)\n        boxes2 = torch.rand(0, 5, dtype=torch.float32)\n        expected_ious = torch.zeros(10, 0, dtype=torch.float32)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    def test_iou_half_overlap_cpu(self):\n        boxes1 = torch.tensor([[0.5, 0.5, 1.0, 1.0, 0.0]], dtype=torch.float32)\n        boxes2 = torch.tensor([[0.25, 0.5, 0.5, 1.0, 0.0]], dtype=torch.float32)\n        expected_ious = torch.tensor([[0.5]], dtype=torch.float32)\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_half_overlap_cuda(self):\n        boxes1 = torch.tensor([[0.5, 0.5, 1.0, 1.0, 0.0]], dtype=torch.float32)\n        boxes2 = torch.tensor([[0.25, 0.5, 0.5, 1.0, 0.0]], dtype=torch.float32)\n        expected_ious = torch.tensor([[0.5]], dtype=torch.float32)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    def test_iou_0_degree_cpu(self):\n        boxes1 = torch.tensor(\n            [[0.5, 0.5, 1.0, 1.0, 0.0], [0.5, 0.5, 1.0, 1.0, 0.0]], dtype=torch.float32\n        )\n        boxes2 = torch.tensor(\n            [\n                [0.5, 0.5, 1.0, 1.0, 0.0],\n                [0.25, 0.5, 0.5, 1.0, 0.0],\n                [0.5, 0.25, 1.0, 0.5, 0.0],\n                [0.25, 0.25, 0.5, 0.5, 0.0],\n                [0.75, 0.75, 0.5, 0.5, 0.0],\n                [1.0, 1.0, 1.0, 1.0, 0.0],\n            ],\n            dtype=torch.float32,\n        )\n        expected_ious = torch.tensor(\n            [\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n            ],\n            dtype=torch.float32,\n        )\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_0_degree_cuda(self):\n        boxes1 = torch.tensor(\n            [[0.5, 0.5, 1.0, 1.0, 0.0], [0.5, 0.5, 1.0, 1.0, 0.0]], dtype=torch.float32\n        )\n        boxes2 = torch.tensor(\n            [\n                [0.5, 0.5, 1.0, 1.0, 0.0],\n                [0.25, 0.5, 0.5, 1.0, 0.0],\n                [0.5, 0.25, 1.0, 0.5, 0.0],\n                [0.25, 0.25, 0.5, 0.5, 0.0],\n                [0.75, 0.75, 0.5, 0.5, 0.0],\n                [1.0, 1.0, 1.0, 1.0, 0.0],\n            ],\n            dtype=torch.float32,\n        )\n        expected_ious = torch.tensor(\n            [\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n            ],\n            dtype=torch.float32,\n        )\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    def test_iou_45_degrees_cpu(self):\n        boxes1 = torch.tensor(\n            [\n                [1, 1, math.sqrt(2), math.sqrt(2), 45],\n                [1, 1, 2 * math.sqrt(2), 2 * math.sqrt(2), -45],\n            ],\n            dtype=torch.float32,\n        )\n        boxes2 = torch.tensor([[1, 1, 2, 2, 0]], dtype=torch.float32)\n        expected_ious = torch.tensor([[0.5], [0.5]], dtype=torch.float32)\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_45_degrees_cuda(self):\n        boxes1 = torch.tensor(\n            [\n                [1, 1, math.sqrt(2), math.sqrt(2), 45],\n                [1, 1, 2 * math.sqrt(2), 2 * math.sqrt(2), -45],\n            ],\n            dtype=torch.float32,\n        )\n        boxes2 = torch.tensor([[1, 1, 2, 2, 0]], dtype=torch.float32)\n        expected_ious = torch.tensor([[0.5], [0.5]], dtype=torch.float32)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    def test_iou_perpendicular_cpu(self):\n        boxes1 = torch.tensor([[5, 5, 10.0, 6, 55]], dtype=torch.float32)\n        boxes2 = torch.tensor([[5, 5, 10.0, 6, -35]], dtype=torch.float32)\n        iou = (6.0 * 6.0) / (6.0 * 6.0 + 4.0 * 6.0 + 4.0 * 6.0)\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32)\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA unavailable\")\n    def test_iou_perpendicular_cuda(self):\n        boxes1 = torch.tensor([[5, 5, 10.0, 6, 55]], dtype=torch.float32)\n        boxes2 = torch.tensor([[5, 5, 10.0, 6, -35]], dtype=torch.float32)\n        iou = (6.0 * 6.0) / (6.0 * 6.0 + 4.0 * 6.0 + 4.0 * 6.0)\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    def test_iou_large_close_boxes_cpu(self):\n        boxes1 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259186, 27.1828]], dtype=torch.float32\n        )\n        boxes2 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259155, 27.1828]], dtype=torch.float32\n        )\n        iou = 364.259155 / 364.259186\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32)\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_large_close_boxes_cuda(self):\n        boxes1 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259186, 27.1828]], dtype=torch.float32\n        )\n        boxes2 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259155, 27.1828]], dtype=torch.float32\n        )\n        iou = 364.259155 / 364.259186\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    def test_iou_precision_cpu(self):\n        boxes1 = torch.tensor([[565, 565, 10, 10, 0]], dtype=torch.float32)\n        boxes2 = torch.tensor([[565, 565, 10, 8.3, 0]], dtype=torch.float32)\n        iou = 8.3 / 10.0\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32)\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_precision_cuda(self):\n        boxes1 = torch.tensor([[565, 565, 10, 10, 0]], dtype=torch.float32)\n        boxes2 = torch.tensor([[565, 565, 10, 8.3, 0]], dtype=torch.float32)\n        iou = 8.3 / 10.0\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    def test_iou_many_boxes_cpu(self):\n        num_boxes1 = 100\n        num_boxes2 = 200\n        boxes1 = torch.stack(\n            [\n                torch.tensor([5 + 20 * i, 5 + 20 * i, 10, 10, 0], dtype=torch.float32)\n                for i in range(num_boxes1)\n            ]\n        )\n        boxes2 = torch.stack(\n            [\n                torch.tensor(\n                    [5 + 20 * i, 5 + 20 * i, 10, 1 + 9 * i / num_boxes2, 0], dtype=torch.float32\n                )\n                for i in range(num_boxes2)\n            ]\n        )\n        expected_ious = torch.zeros(num_boxes1, num_boxes2, dtype=torch.float32)\n        for i in range(min(num_boxes1, num_boxes2)):\n            expected_ious[i][i] = (1 + 9 * i / num_boxes2) / 10.0\n        ious = pairwise_iou_rotated(boxes1, boxes2)\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_many_boxes_cuda(self):\n        num_boxes1 = 100\n        num_boxes2 = 200\n        boxes1 = torch.stack(\n            [\n                torch.tensor([5 + 20 * i, 5 + 20 * i, 10, 10, 0], dtype=torch.float32)\n                for i in range(num_boxes1)\n            ]\n        )\n        boxes2 = torch.stack(\n            [\n                torch.tensor(\n                    [5 + 20 * i, 5 + 20 * i, 10, 1 + 9 * i / num_boxes2, 0], dtype=torch.float32\n                )\n                for i in range(num_boxes2)\n            ]\n        )\n        expected_ious = torch.zeros(num_boxes1, num_boxes2, dtype=torch.float32)\n        for i in range(min(num_boxes1, num_boxes2)):\n            expected_ious[i][i] = (1 + 9 * i / num_boxes2) / 10.0\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        assert torch.allclose(ious_cuda.cpu(), expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_iou_too_many_boxes_cuda(self):\n        s1, s2 = 5, 1289035\n        boxes1 = torch.zeros(s1, 5)\n        boxes2 = torch.zeros(s2, 5)\n        ious_cuda = pairwise_iou_rotated(boxes1.cuda(), boxes2.cuda())\n        self.assertTrue(tuple(ious_cuda.shape), (s1, s2))\n\n\nclass TestRotatedBoxesStructure(unittest.TestCase):\n    def test_clip_area_0_degree(self):\n        for _ in range(50):\n            num_boxes = 100\n            boxes_5d = torch.zeros(num_boxes, 5)\n            boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-100, 500)\n            boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-100, 500)\n            boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, 500)\n            boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, 500)\n            # Convert from (x_ctr, y_ctr, w, h, 0) to  (x1, y1, x2, y2)\n            boxes_4d = torch.zeros(num_boxes, 4)\n            boxes_4d[:, 0] = boxes_5d[:, 0] - boxes_5d[:, 2] / 2.0\n            boxes_4d[:, 1] = boxes_5d[:, 1] - boxes_5d[:, 3] / 2.0\n            boxes_4d[:, 2] = boxes_5d[:, 0] + boxes_5d[:, 2] / 2.0\n            boxes_4d[:, 3] = boxes_5d[:, 1] + boxes_5d[:, 3] / 2.0\n\n            image_size = (500, 600)\n            test_boxes_4d = Boxes(boxes_4d)\n            test_boxes_5d = RotatedBoxes(boxes_5d)\n            # Before clip\n            areas_4d = test_boxes_4d.area()\n            areas_5d = test_boxes_5d.area()\n            assert torch.allclose(areas_4d, areas_5d, atol=1e-1, rtol=1e-5)\n            # After clip\n            test_boxes_4d.clip(image_size)\n            test_boxes_5d.clip(image_size)\n            areas_4d = test_boxes_4d.area()\n            areas_5d = test_boxes_5d.area()\n            assert torch.allclose(areas_4d, areas_5d, atol=1e-1, rtol=1e-5)\n\n    def test_clip_area_arbitrary_angle(self):\n        num_boxes = 100\n        boxes_5d = torch.zeros(num_boxes, 5)\n        boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-100, 500)\n        boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-100, 500)\n        boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, 500)\n        boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, 500)\n        boxes_5d[:, 4] = torch.FloatTensor(num_boxes).uniform_(-1800, 1800)\n        clip_angle_threshold = random.uniform(0, 180)\n\n        image_size = (500, 600)\n        test_boxes_5d = RotatedBoxes(boxes_5d)\n        # Before clip\n        areas_before = test_boxes_5d.area()\n        # After clip\n        test_boxes_5d.clip(image_size, clip_angle_threshold)\n        areas_diff = test_boxes_5d.area() - areas_before\n\n        # the areas should only decrease after clipping\n        assert torch.all(areas_diff <= 0)\n        # whenever the box is clipped (thus the area shrinks),\n        # the angle for the box must be within the clip_angle_threshold\n        # Note that the clip function will normalize the angle range\n        # to be within (-180, 180]\n        assert torch.all(\n            torch.abs(boxes_5d[:, 4][torch.where(areas_diff < 0)]) < clip_angle_threshold\n        )\n\n    def test_normalize_angles(self):\n        # torch.manual_seed(0)\n        for _ in range(50):\n            num_boxes = 100\n            boxes_5d = torch.zeros(num_boxes, 5)\n            boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-100, 500)\n            boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-100, 500)\n            boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, 500)\n            boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, 500)\n            boxes_5d[:, 4] = torch.FloatTensor(num_boxes).uniform_(-1800, 1800)\n            rotated_boxes = RotatedBoxes(boxes_5d)\n            normalized_boxes = rotated_boxes.clone()\n            normalized_boxes.normalize_angles()\n            assert torch.all(normalized_boxes.tensor[:, 4] >= -180)\n            assert torch.all(normalized_boxes.tensor[:, 4] < 180)\n            # x, y, w, h should not change\n            assert torch.allclose(boxes_5d[:, :4], normalized_boxes.tensor[:, :4])\n            # the cos/sin values of the angles should stay the same\n\n            assert torch.allclose(\n                torch.cos(boxes_5d[:, 4] * math.pi / 180),\n                torch.cos(normalized_boxes.tensor[:, 4] * math.pi / 180),\n                atol=1e-5,\n            )\n\n            assert torch.allclose(\n                torch.sin(boxes_5d[:, 4] * math.pi / 180),\n                torch.sin(normalized_boxes.tensor[:, 4] * math.pi / 180),\n                atol=1e-5,\n            )\n\n    def test_pairwise_iou_0_degree_cpu(self):\n        device = torch.device(\"cpu\")\n        boxes1 = torch.tensor(\n            [[0.5, 0.5, 1.0, 1.0, 0.0], [0.5, 0.5, 1.0, 1.0, 0.0]],\n            dtype=torch.float32,\n            device=device,\n        )\n        boxes2 = torch.tensor(\n            [\n                [0.5, 0.5, 1.0, 1.0, 0.0],\n                [0.25, 0.5, 0.5, 1.0, 0.0],\n                [0.5, 0.25, 1.0, 0.5, 0.0],\n                [0.25, 0.25, 0.5, 0.5, 0.0],\n                [0.75, 0.75, 0.5, 0.5, 0.0],\n                [1.0, 1.0, 1.0, 1.0, 0.0],\n            ],\n            dtype=torch.float32,\n            device=device,\n        )\n        expected_ious = torch.tensor(\n            [\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n            ],\n            dtype=torch.float32,\n            device=device,\n        )\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_pairwise_iou_0_degree_cuda(self):\n        device = torch.device(\"cuda\")\n        boxes1 = torch.tensor(\n            [[0.5, 0.5, 1.0, 1.0, 0.0], [0.5, 0.5, 1.0, 1.0, 0.0]],\n            dtype=torch.float32,\n            device=device,\n        )\n        boxes2 = torch.tensor(\n            [\n                [0.5, 0.5, 1.0, 1.0, 0.0],\n                [0.25, 0.5, 0.5, 1.0, 0.0],\n                [0.5, 0.25, 1.0, 0.5, 0.0],\n                [0.25, 0.25, 0.5, 0.5, 0.0],\n                [0.75, 0.75, 0.5, 0.5, 0.0],\n                [1.0, 1.0, 1.0, 1.0, 0.0],\n            ],\n            dtype=torch.float32,\n            device=device,\n        )\n        expected_ious = torch.tensor(\n            [\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n                [1.0, 0.5, 0.5, 0.25, 0.25, 0.25 / (2 - 0.25)],\n            ],\n            dtype=torch.float32,\n            device=device,\n        )\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    def test_pairwise_iou_45_degrees_cpu(self):\n        device = torch.device(\"cpu\")\n        boxes1 = torch.tensor(\n            [\n                [1, 1, math.sqrt(2), math.sqrt(2), 45],\n                [1, 1, 2 * math.sqrt(2), 2 * math.sqrt(2), -45],\n            ],\n            dtype=torch.float32,\n            device=device,\n        )\n        boxes2 = torch.tensor([[1, 1, 2, 2, 0]], dtype=torch.float32, device=device)\n        expected_ious = torch.tensor([[0.5], [0.5]], dtype=torch.float32, device=device)\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_pairwise_iou_45_degrees_cuda(self):\n        device = torch.device(\"cuda\")\n        boxes1 = torch.tensor(\n            [\n                [1, 1, math.sqrt(2), math.sqrt(2), 45],\n                [1, 1, 2 * math.sqrt(2), 2 * math.sqrt(2), -45],\n            ],\n            dtype=torch.float32,\n            device=device,\n        )\n        boxes2 = torch.tensor([[1, 1, 2, 2, 0]], dtype=torch.float32, device=device)\n        expected_ious = torch.tensor([[0.5], [0.5]], dtype=torch.float32, device=device)\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    def test_pairwise_iou_orthogonal_cpu(self):\n        device = torch.device(\"cpu\")\n        boxes1 = torch.tensor([[5, 5, 10, 6, 55]], dtype=torch.float32, device=device)\n        boxes2 = torch.tensor([[5, 5, 10, 6, -35]], dtype=torch.float32, device=device)\n        iou = (6.0 * 6.0) / (6.0 * 6.0 + 4.0 * 6.0 + 4.0 * 6.0)\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device)\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_pairwise_iou_orthogonal_cuda(self):\n        device = torch.device(\"cuda\")\n        boxes1 = torch.tensor([[5, 5, 10, 6, 55]], dtype=torch.float32, device=device)\n        boxes2 = torch.tensor([[5, 5, 10, 6, -35]], dtype=torch.float32, device=device)\n        iou = (6.0 * 6.0) / (6.0 * 6.0 + 4.0 * 6.0 + 4.0 * 6.0)\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device)\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    def test_pairwise_iou_large_close_boxes_cpu(self):\n        device = torch.device(\"cpu\")\n        boxes1 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259186, 27.1828]],\n            dtype=torch.float32,\n            device=device,\n        )\n        boxes2 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259155, 27.1828]],\n            dtype=torch.float32,\n            device=device,\n        )\n        iou = 364.259155 / 364.259186\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device)\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_pairwise_iou_large_close_boxes_cuda(self):\n        device = torch.device(\"cuda\")\n        boxes1 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259186, 27.1828]],\n            dtype=torch.float32,\n            device=device,\n        )\n        boxes2 = torch.tensor(\n            [[299.500000, 417.370422, 600.000000, 364.259155, 27.1828]],\n            dtype=torch.float32,\n            device=device,\n        )\n        iou = 364.259155 / 364.259186\n        expected_ious = torch.tensor([[iou]], dtype=torch.float32, device=device)\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    def test_pairwise_iou_many_boxes_cpu(self):\n        device = torch.device(\"cpu\")\n        num_boxes1 = 100\n        num_boxes2 = 200\n        boxes1 = torch.stack(\n            [\n                torch.tensor(\n                    [5 + 20 * i, 5 + 20 * i, 10, 10, 0], dtype=torch.float32, device=device\n                )\n                for i in range(num_boxes1)\n            ]\n        )\n        boxes2 = torch.stack(\n            [\n                torch.tensor(\n                    [5 + 20 * i, 5 + 20 * i, 10, 1 + 9 * i / num_boxes2, 0],\n                    dtype=torch.float32,\n                    device=device,\n                )\n                for i in range(num_boxes2)\n            ]\n        )\n        expected_ious = torch.zeros(num_boxes1, num_boxes2, dtype=torch.float32, device=device)\n        for i in range(min(num_boxes1, num_boxes2)):\n            expected_ious[i][i] = (1 + 9 * i / num_boxes2) / 10.0\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n    @unittest.skipIf(not torch.cuda.is_available(), \"CUDA not available\")\n    def test_pairwise_iou_many_boxes_cuda(self):\n        device = torch.device(\"cuda\")\n        num_boxes1 = 100\n        num_boxes2 = 200\n        boxes1 = torch.stack(\n            [\n                torch.tensor(\n                    [5 + 20 * i, 5 + 20 * i, 10, 10, 0], dtype=torch.float32, device=device\n                )\n                for i in range(num_boxes1)\n            ]\n        )\n        boxes2 = torch.stack(\n            [\n                torch.tensor(\n                    [5 + 20 * i, 5 + 20 * i, 10, 1 + 9 * i / num_boxes2, 0],\n                    dtype=torch.float32,\n                    device=device,\n                )\n                for i in range(num_boxes2)\n            ]\n        )\n        expected_ious = torch.zeros(num_boxes1, num_boxes2, dtype=torch.float32, device=device)\n        for i in range(min(num_boxes1, num_boxes2)):\n            expected_ious[i][i] = (1 + 9 * i / num_boxes2) / 10.0\n        ious = pairwise_iou(RotatedBoxes(boxes1), RotatedBoxes(boxes2))\n        assert torch.allclose(ious, expected_ious)\n\n\ndef benchmark_rotated_iou():\n    num_boxes1 = 200\n    num_boxes2 = 500\n    boxes1 = torch.stack(\n        [\n            torch.tensor([5 + 20 * i, 5 + 20 * i, 10, 10, 0], dtype=torch.float32)\n            for i in range(num_boxes1)\n        ]\n    )\n    boxes2 = torch.stack(\n        [\n            torch.tensor(\n                [5 + 20 * i, 5 + 20 * i, 10, 1 + 9 * i / num_boxes2, 0], dtype=torch.float32\n            )\n            for i in range(num_boxes2)\n        ]\n    )\n\n    def func(dev, n=1):\n        b1 = boxes1.to(device=dev)\n        b2 = boxes2.to(device=dev)\n\n        def bench():\n            for _ in range(n):\n                pairwise_iou_rotated(b1, b2)\n            if dev.type == \"cuda\":\n                torch.cuda.synchronize()\n\n        return bench\n\n    # only run it once per timed loop, since it's slow\n    args = [{\"dev\": torch.device(\"cpu\"), \"n\": 1}]\n    if torch.cuda.is_available():\n        args.append({\"dev\": torch.device(\"cuda\"), \"n\": 10})\n\n    benchmark(func, \"rotated_iou\", args, warmup_iters=3)\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n    benchmark_rotated_iou()\n"
  },
  {
    "path": "tests/test_rpn.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\nimport logging\nimport unittest\nimport torch\n\nfrom detectron2.config import get_cfg\nfrom detectron2.modeling.backbone import build_backbone\nfrom detectron2.modeling.proposal_generator.build import build_proposal_generator\nfrom detectron2.structures import Boxes, ImageList, Instances, RotatedBoxes\nfrom detectron2.utils.events import EventStorage\n\nlogger = logging.getLogger(__name__)\n\n\nclass RPNTest(unittest.TestCase):\n    def test_rpn(self):\n        torch.manual_seed(121)\n        cfg = get_cfg()\n        cfg.MODEL.PROPOSAL_GENERATOR.NAME = \"RPN\"\n        cfg.MODEL.ANCHOR_GENERATOR.NAME = \"DefaultAnchorGenerator\"\n        cfg.MODEL.RPN.BBOX_REG_WEIGHTS = (1, 1, 1, 1)\n        backbone = build_backbone(cfg)\n        proposal_generator = build_proposal_generator(cfg, backbone.output_shape())\n        num_images = 2\n        images_tensor = torch.rand(num_images, 20, 30)\n        image_sizes = [(10, 10), (20, 30)]\n        images = ImageList(images_tensor, image_sizes)\n        image_shape = (15, 15)\n        num_channels = 1024\n        features = {\"res4\": torch.rand(num_images, num_channels, 1, 2)}\n        gt_boxes = torch.tensor([[1, 1, 3, 3], [2, 2, 6, 6]], dtype=torch.float32)\n        gt_instances = Instances(image_shape)\n        gt_instances.gt_boxes = Boxes(gt_boxes)\n        with EventStorage():  # capture events in a new storage to discard them\n            proposals, proposal_losses = proposal_generator(\n                images, features, [gt_instances[0], gt_instances[1]]\n            )\n\n        expected_losses = {\n            \"loss_rpn_cls\": torch.tensor(0.0804563984),\n            \"loss_rpn_loc\": torch.tensor(0.0990132466),\n        }\n        for name in expected_losses.keys():\n            self.assertTrue(torch.allclose(proposal_losses[name], expected_losses[name]))\n\n        expected_proposal_boxes = [\n            Boxes(torch.tensor([[0, 0, 10, 10], [7.3365392685, 0, 10, 10]])),\n            Boxes(\n                torch.tensor(\n                    [\n                        [0, 0, 30, 20],\n                        [0, 0, 16.7862777710, 13.1362524033],\n                        [0, 0, 30, 13.3173446655],\n                        [0, 0, 10.8602609634, 20],\n                        [7.7165775299, 0, 27.3875980377, 20],\n                    ]\n                )\n            ),\n        ]\n\n        expected_objectness_logits = [\n            torch.tensor([0.1225359365, -0.0133192837]),\n            torch.tensor([0.1415634006, 0.0989848152, 0.0565387346, -0.0072308783, -0.0428492837]),\n        ]\n\n        for proposal, expected_proposal_box, im_size, expected_objectness_logit in zip(\n            proposals, expected_proposal_boxes, image_sizes, expected_objectness_logits\n        ):\n            self.assertEqual(len(proposal), len(expected_proposal_box))\n            self.assertEqual(proposal.image_size, im_size)\n            self.assertTrue(\n                torch.allclose(proposal.proposal_boxes.tensor, expected_proposal_box.tensor)\n            )\n            self.assertTrue(torch.allclose(proposal.objectness_logits, expected_objectness_logit))\n\n    def test_rrpn(self):\n        torch.manual_seed(121)\n        cfg = get_cfg()\n        cfg.MODEL.PROPOSAL_GENERATOR.NAME = \"RRPN\"\n        cfg.MODEL.ANCHOR_GENERATOR.NAME = \"RotatedAnchorGenerator\"\n        cfg.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64]]\n        cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.25, 1]]\n        cfg.MODEL.ANCHOR_GENERATOR.ANGLES = [[0, 60]]\n        cfg.MODEL.RPN.BBOX_REG_WEIGHTS = (1, 1, 1, 1, 1)\n        cfg.MODEL.RPN.HEAD_NAME = \"StandardRPNHead\"\n        backbone = build_backbone(cfg)\n        proposal_generator = build_proposal_generator(cfg, backbone.output_shape())\n        num_images = 2\n        images_tensor = torch.rand(num_images, 20, 30)\n        image_sizes = [(10, 10), (20, 30)]\n        images = ImageList(images_tensor, image_sizes)\n        image_shape = (15, 15)\n        num_channels = 1024\n        features = {\"res4\": torch.rand(num_images, num_channels, 1, 2)}\n        gt_boxes = torch.tensor([[2, 2, 2, 2, 0], [4, 4, 4, 4, 0]], dtype=torch.float32)\n        gt_instances = Instances(image_shape)\n        gt_instances.gt_boxes = RotatedBoxes(gt_boxes)\n        with EventStorage():  # capture events in a new storage to discard them\n            proposals, proposal_losses = proposal_generator(\n                images, features, [gt_instances[0], gt_instances[1]]\n            )\n\n        expected_losses = {\n            \"loss_rpn_cls\": torch.tensor(0.0432923734),\n            \"loss_rpn_loc\": torch.tensor(0.1552739739),\n        }\n        for name in expected_losses.keys():\n            self.assertTrue(torch.allclose(proposal_losses[name], expected_losses[name]))\n\n        expected_proposal_boxes = [\n            RotatedBoxes(\n                torch.tensor(\n                    [\n                        [0.60189795, 1.24095452, 61.98131943, 18.03621292, -4.07244873],\n                        [15.64940453, 1.69624567, 59.59749603, 16.34339333, 2.62692475],\n                        [-3.02982378, -2.69752932, 67.90952301, 59.62455750, 59.97010040],\n                        [16.71863365, 1.98309708, 35.61507797, 32.81484985, 62.92267227],\n                        [0.49432933, -7.92979717, 67.77606201, 62.93098450, -1.85656738],\n                        [8.00880814, 1.36017394, 121.81007385, 32.74150467, 50.44297409],\n                        [16.44299889, -4.82221127, 63.39775848, 61.22503662, 54.12270737],\n                        [5.00000000, 5.00000000, 10.00000000, 10.00000000, -0.76943970],\n                        [17.64130402, -0.98095351, 61.40377808, 16.28918839, 55.53118134],\n                        [0.13016054, 4.60568953, 35.80157471, 32.30180359, 62.52872086],\n                        [-4.26460743, 0.39604485, 124.30079651, 31.84611320, -1.58203125],\n                        [7.52815342, -0.91636634, 62.39784622, 15.45565224, 60.79549789],\n                    ]\n                )\n            ),\n            RotatedBoxes(\n                torch.tensor(\n                    [\n                        [0.07734215, 0.81635046, 65.33510590, 17.34688377, -1.51821899],\n                        [-3.41833067, -3.11320257, 64.17595673, 60.55617905, 58.27033234],\n                        [20.67383385, -6.16561556, 63.60531998, 62.52315903, 54.85546494],\n                        [15.00000000, 10.00000000, 30.00000000, 20.00000000, -0.18218994],\n                        [9.22646523, -6.84775209, 62.09895706, 65.46472931, -2.74307251],\n                        [15.00000000, 4.93451595, 30.00000000, 9.86903191, -0.60272217],\n                        [8.88342094, 2.65560246, 120.95362854, 32.45022202, 55.75970078],\n                        [16.39088631, 2.33887148, 34.78761292, 35.61492920, 60.81977463],\n                        [9.78298569, 10.00000000, 19.56597137, 20.00000000, -0.86660767],\n                        [1.28576660, 5.49873352, 34.93610382, 33.22600174, 60.51599884],\n                        [17.58912468, -1.63270092, 62.96052551, 16.45713997, 52.91245270],\n                        [5.64749718, -1.90428460, 62.37649155, 16.19474792, 61.09543991],\n                        [0.82255805, 2.34931135, 118.83985901, 32.83671188, 56.50753784],\n                        [-5.33874989, 1.64404404, 125.28501892, 33.35424042, -2.80731201],\n                    ]\n                )\n            ),\n        ]\n\n        expected_objectness_logits = [\n            torch.tensor(\n                [\n                    0.10111768,\n                    0.09112845,\n                    0.08466332,\n                    0.07589971,\n                    0.06650183,\n                    0.06350251,\n                    0.04299347,\n                    0.01864817,\n                    0.00986163,\n                    0.00078543,\n                    -0.04573630,\n                    -0.04799230,\n                ]\n            ),\n            torch.tensor(\n                [\n                    0.11373727,\n                    0.09377633,\n                    0.05281663,\n                    0.05143715,\n                    0.04040275,\n                    0.03250912,\n                    0.01307789,\n                    0.01177734,\n                    0.00038105,\n                    -0.00540255,\n                    -0.01194804,\n                    -0.01461012,\n                    -0.03061717,\n                    -0.03599222,\n                ]\n            ),\n        ]\n\n        torch.set_printoptions(precision=8, sci_mode=False)\n\n        for proposal, expected_proposal_box, im_size, expected_objectness_logit in zip(\n            proposals, expected_proposal_boxes, image_sizes, expected_objectness_logits\n        ):\n            self.assertEqual(len(proposal), len(expected_proposal_box))\n            self.assertEqual(proposal.image_size, im_size)\n            # It seems that there's some randomness in the result across different machines:\n            # This test can be run on a local machine for 100 times with exactly the same result,\n            # However, a different machine might produce slightly different results,\n            # thus the atol here.\n            err_msg = \"computed proposal boxes = {}, expected {}\".format(\n                proposal.proposal_boxes.tensor, expected_proposal_box.tensor\n            )\n            self.assertTrue(\n                torch.allclose(\n                    proposal.proposal_boxes.tensor, expected_proposal_box.tensor, atol=1e-5\n                ),\n                err_msg,\n            )\n\n            err_msg = \"computed objectness logits = {}, expected {}\".format(\n                proposal.objectness_logits, expected_objectness_logit\n            )\n            self.assertTrue(\n                torch.allclose(proposal.objectness_logits, expected_objectness_logit, atol=1e-5),\n                err_msg,\n            )\n\n\nif __name__ == \"__main__\":\n    unittest.main()\n"
  },
  {
    "path": "tests/test_sampler.py",
    "content": "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.\nimport unittest\nfrom torch.utils.data.sampler import SequentialSampler\n\nfrom detectron2.data.samplers import GroupedBatchSampler\n\n\nclass TestGroupedBatchSampler(unittest.TestCase):\n    def test_missing_group_id(self):\n        sampler = SequentialSampler(list(range(100)))\n        group_ids = [1] * 100\n        s = GroupedBatchSampler(sampler, group_ids, 2)\n\n        for k in s:\n            self.assertEqual(len(k), 2)\n\n    def test_groups(self):\n        sampler = SequentialSampler(list(range(100)))\n        group_ids = [1, 0] * 50\n        s = GroupedBatchSampler(sampler, group_ids, 2)\n\n        for k in s:\n            self.assertTrue((k[0] + k[1]) % 2 == 0)\n"
  },
  {
    "path": "tests/test_visualizer.py",
    "content": "# -*- coding: utf-8 -*-\n# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved\n# File:\n\nimport numpy as np\nimport unittest\nimport torch\n\nfrom detectron2.data import MetadataCatalog\nfrom detectron2.structures import BoxMode, Instances, RotatedBoxes\nfrom detectron2.utils.visualizer import Visualizer\n\n\nclass TestVisualizer(unittest.TestCase):\n    def _random_data(self):\n        H, W = 100, 100\n        N = 10\n        img = np.random.rand(H, W, 3) * 255\n        boxxy = np.random.rand(N, 2) * (H // 2)\n        boxes = np.concatenate((boxxy, boxxy + H // 2), axis=1)\n\n        def _rand_poly():\n            return np.random.rand(3, 2).flatten() * H\n\n        polygons = [[_rand_poly() for _ in range(np.random.randint(1, 5))] for _ in range(N)]\n\n        mask = np.zeros_like(img[:, :, 0], dtype=np.bool)\n        mask[:10, 10:20] = 1\n\n        labels = [str(i) for i in range(N)]\n        return img, boxes, labels, polygons, [mask] * N\n\n    @property\n    def metadata(self):\n        return MetadataCatalog.get(\"coco_2017_train\")\n\n    def test_draw_dataset_dict(self):\n        img = np.random.rand(512, 512, 3) * 255\n        dic = {\n            \"annotations\": [\n                {\n                    \"bbox\": [\n                        368.9946492271106,\n                        330.891438763377,\n                        13.148537455410235,\n                        13.644708680142685,\n                    ],\n                    \"bbox_mode\": BoxMode.XYWH_ABS,\n                    \"category_id\": 0,\n                    \"iscrowd\": 1,\n                    \"segmentation\": {\n                        \"counts\": \"_jh52m?2N2N2N2O100O10O001N1O2MceP2\",\n                        \"size\": [512, 512],\n                    },\n                }\n            ],\n            \"height\": 512,\n            \"image_id\": 1,\n            \"width\": 512,\n        }\n        v = Visualizer(img, self.metadata)\n        v.draw_dataset_dict(dic)\n\n    def test_overlay_instances(self):\n        img, boxes, labels, polygons, masks = self._random_data()\n\n        v = Visualizer(img, self.metadata)\n        output = v.overlay_instances(masks=polygons, boxes=boxes, labels=labels).get_image()\n        self.assertEqual(output.shape, img.shape)\n\n        # Test 2x scaling\n        v = Visualizer(img, self.metadata, scale=2.0)\n        output = v.overlay_instances(masks=polygons, boxes=boxes, labels=labels).get_image()\n        self.assertEqual(output.shape[0], img.shape[0] * 2)\n\n        # Test overlay masks\n        v = Visualizer(img, self.metadata)\n        output = v.overlay_instances(masks=masks, boxes=boxes, labels=labels).get_image()\n        self.assertEqual(output.shape, img.shape)\n\n    def test_overlay_instances_no_boxes(self):\n        img, boxes, labels, polygons, _ = self._random_data()\n        v = Visualizer(img, self.metadata)\n        v.overlay_instances(masks=polygons, boxes=None, labels=labels).get_image()\n\n    def test_draw_instance_predictions(self):\n        img, boxes, _, _, masks = self._random_data()\n        num_inst = len(boxes)\n        inst = Instances((img.shape[0], img.shape[1]))\n        inst.pred_classes = torch.randint(0, 80, size=(num_inst,))\n        inst.scores = torch.rand(num_inst)\n        inst.pred_boxes = torch.from_numpy(boxes)\n        inst.pred_masks = torch.from_numpy(np.asarray(masks))\n\n        v = Visualizer(img, self.metadata)\n        v.draw_instance_predictions(inst)\n\n    def test_draw_empty_mask_predictions(self):\n        img, boxes, _, _, masks = self._random_data()\n        num_inst = len(boxes)\n        inst = Instances((img.shape[0], img.shape[1]))\n        inst.pred_classes = torch.randint(0, 80, size=(num_inst,))\n        inst.scores = torch.rand(num_inst)\n        inst.pred_boxes = torch.from_numpy(boxes)\n        inst.pred_masks = torch.from_numpy(np.zeros_like(np.asarray(masks)))\n\n        v = Visualizer(img, self.metadata)\n        v.draw_instance_predictions(inst)\n\n    def test_correct_output_shape(self):\n        img = np.random.rand(928, 928, 3) * 255\n        v = Visualizer(img, self.metadata)\n        out = v.output.get_image()\n        self.assertEqual(out.shape, img.shape)\n\n    def test_overlay_rotated_instances(self):\n        H, W = 100, 150\n        img = np.random.rand(H, W, 3) * 255\n        num_boxes = 50\n        boxes_5d = torch.zeros(num_boxes, 5)\n        boxes_5d[:, 0] = torch.FloatTensor(num_boxes).uniform_(-0.1 * W, 1.1 * W)\n        boxes_5d[:, 1] = torch.FloatTensor(num_boxes).uniform_(-0.1 * H, 1.1 * H)\n        boxes_5d[:, 2] = torch.FloatTensor(num_boxes).uniform_(0, max(W, H))\n        boxes_5d[:, 3] = torch.FloatTensor(num_boxes).uniform_(0, max(W, H))\n        boxes_5d[:, 4] = torch.FloatTensor(num_boxes).uniform_(-1800, 1800)\n        rotated_boxes = RotatedBoxes(boxes_5d)\n        labels = [str(i) for i in range(num_boxes)]\n\n        v = Visualizer(img, self.metadata)\n        output = v.overlay_instances(boxes=rotated_boxes, labels=labels).get_image()\n        self.assertEqual(output.shape, img.shape)\n\n    def test_draw_no_metadata(self):\n        img, boxes, _, _, masks = self._random_data()\n        num_inst = len(boxes)\n        inst = Instances((img.shape[0], img.shape[1]))\n        inst.pred_classes = torch.randint(0, 80, size=(num_inst,))\n        inst.scores = torch.rand(num_inst)\n        inst.pred_boxes = torch.from_numpy(boxes)\n        inst.pred_masks = torch.from_numpy(np.asarray(masks))\n\n        v = Visualizer(img, MetadataCatalog.get(\"asdfasdf\"))\n        v.draw_instance_predictions(inst)\n"
  }
]