Full Code of enoche/MMRec for AI

master 61098b5aba68 cached
83 files
495.4 KB
136.4k tokens
420 symbols
1 requests
Download .txt
Showing preview only (522K chars total). Download the full file or copy to clipboard to get everything.
Repository: enoche/MMRec
Branch: master
Commit: 61098b5aba68
Files: 83
Total size: 495.4 KB

Directory structure:
gitextract_7sv64gl1/

├── .gitignore
├── .idea/
│   ├── .gitignore
│   ├── MMRec.iml
│   ├── deployment.xml
│   ├── inspectionProfiles/
│   │   ├── Project_Default.xml
│   │   └── profiles_settings.xml
│   ├── misc.xml
│   ├── modules.xml
│   └── vcs.xml
├── LICENSE
├── README.md
├── data/
│   └── README.md
├── evaluation/
│   └── README.md
├── preprocessing/
│   ├── 0rating2inter.ipynb
│   ├── 1splitting.ipynb
│   ├── 2reindex-feat.ipynb
│   ├── 3feat-encoder.ipynb
│   ├── README.md
│   └── dualgnn-gen-u-u-matrix.py
├── requirements.txt
└── src/
    ├── common/
    │   ├── abstract_recommender.py
    │   ├── encoders.py
    │   ├── init.py
    │   ├── loss.py
    │   └── trainer.py
    ├── configs/
    │   ├── dataset/
    │   │   ├── baby.yaml
    │   │   ├── clothing.yaml
    │   │   ├── elec.yaml
    │   │   ├── microlens.yaml
    │   │   └── sports.yaml
    │   ├── mg.yaml
    │   ├── model/
    │   │   ├── BM3.yaml
    │   │   ├── BPR.yaml
    │   │   ├── DAMRS.yaml
    │   │   ├── DRAGON.yaml
    │   │   ├── DualGNN.yaml
    │   │   ├── FREEDOM.yaml
    │   │   ├── GRCN.yaml
    │   │   ├── ItemKNNCBF.yaml
    │   │   ├── LATTICE.yaml
    │   │   ├── LGMRec.yaml
    │   │   ├── LayerGCN.yaml
    │   │   ├── LightGCN.yaml
    │   │   ├── MGCN.yaml
    │   │   ├── MMGCN.yaml
    │   │   ├── MVGAE.yaml
    │   │   ├── PGL.yaml
    │   │   ├── SELFCFED_LGN.yaml
    │   │   ├── SLMRec.yaml
    │   │   ├── SMORE.yaml
    │   │   └── VBPR.yaml
    │   └── overall.yaml
    ├── main.py
    ├── models/
    │   ├── bm3.py
    │   ├── bpr.py
    │   ├── damrs.py
    │   ├── dragon.py
    │   ├── dualgnn.py
    │   ├── freedom.py
    │   ├── grcn.py
    │   ├── itemknncbf.py
    │   ├── lattice.py
    │   ├── layergcn.py
    │   ├── lgmrec.py
    │   ├── lightgcn.py
    │   ├── mgcn.py
    │   ├── mmgcn.py
    │   ├── mvgae.py
    │   ├── pgl.py
    │   ├── selfcfed_lgn.py
    │   ├── slmrec.py
    │   ├── smore.py
    │   └── vbpr.py
    └── utils/
        ├── configurator.py
        ├── data_utils.py
        ├── dataloader.py
        ├── dataset.py
        ├── logger.py
        ├── metrics.py
        ├── misc.py
        ├── quick_start.py
        ├── topk_evaluator.py
        └── utils.py

================================================
FILE CONTENTS
================================================

================================================
FILE: .gitignore
================================================
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

/data/baby/

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
#  Usually these files are written by a python script from a template
#  before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
#   However, in case of collaboration, if having platform-specific dependencies or dependencies
#   having no cross-platform support, pipenv may install dependencies that don't work, or not
#   install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/


================================================
FILE: .idea/.gitignore
================================================
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml


================================================
FILE: .idea/MMRec.iml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
  <component name="NewModuleRootManager">
    <content url="file://$MODULE_DIR$">
      <sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
    </content>
    <orderEntry type="jdk" jdkName="Python 3.7 (env-test) (2)" jdkType="Python SDK" />
    <orderEntry type="sourceFolder" forTests="false" />
  </component>
  <component name="PyDocumentationSettings">
    <option name="format" value="PLAIN" />
    <option name="myDocStringFormat" value="Plain" />
  </component>
</module>

================================================
FILE: .idea/deployment.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="PublishConfigData" remoteFilesAllowedToDisappearOnAutoupload="false">
    <serverData>
      <paths name="ecs-user@8.220.208.249:22">
        <serverdata>
          <mappings>
            <mapping local="$PROJECT_DIR$" web="/" />
          </mappings>
        </serverdata>
      </paths>
    </serverData>
  </component>
</project>

================================================
FILE: .idea/inspectionProfiles/Project_Default.xml
================================================
<component name="InspectionProjectProfileManager">
  <profile version="1.0">
    <option name="myName" value="Project Default" />
    <inspection_tool class="DuplicatedCode" enabled="false" level="WEAK WARNING" enabled_by_default="false" />
    <inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
      <option name="ignoredPackages">
        <value>
          <list size="160">
            <item index="0" class="java.lang.String" itemvalue="numpy" />
            <item index="1" class="java.lang.String" itemvalue="scikit-learn" />
            <item index="2" class="java.lang.String" itemvalue="tensorflow" />
            <item index="3" class="java.lang.String" itemvalue="tables" />
            <item index="4" class="java.lang.String" itemvalue="statsmodels" />
            <item index="5" class="java.lang.String" itemvalue="wrapt" />
            <item index="6" class="java.lang.String" itemvalue="pandas" />
            <item index="7" class="java.lang.String" itemvalue="tqdm" />
            <item index="8" class="java.lang.String" itemvalue="scipy" />
            <item index="9" class="java.lang.String" itemvalue="torch" />
            <item index="10" class="java.lang.String" itemvalue="gensim" />
            <item index="11" class="java.lang.String" itemvalue="numba" />
            <item index="12" class="java.lang.String" itemvalue="pyDeprecate" />
            <item index="13" class="java.lang.String" itemvalue="torchmetrics" />
            <item index="14" class="java.lang.String" itemvalue="bs4" />
            <item index="15" class="java.lang.String" itemvalue="flair" />
            <item index="16" class="java.lang.String" itemvalue="srsly" />
            <item index="17" class="java.lang.String" itemvalue="conllu" />
            <item index="18" class="java.lang.String" itemvalue="mpld3" />
            <item index="19" class="java.lang.String" itemvalue="torchvision" />
            <item index="20" class="java.lang.String" itemvalue="ftfy" />
            <item index="21" class="java.lang.String" itemvalue="elasticsearch" />
            <item index="22" class="java.lang.String" itemvalue="Pygments" />
            <item index="23" class="java.lang.String" itemvalue="bleach" />
            <item index="24" class="java.lang.String" itemvalue="lxml" />
            <item index="25" class="java.lang.String" itemvalue="multiprocess" />
            <item index="26" class="java.lang.String" itemvalue="soupsieve" />
            <item index="27" class="java.lang.String" itemvalue="torchaudio" />
            <item index="28" class="java.lang.String" itemvalue="jsonschema" />
            <item index="29" class="java.lang.String" itemvalue="qtconsole" />
            <item index="30" class="java.lang.String" itemvalue="Janome" />
            <item index="31" class="java.lang.String" itemvalue="terminado" />
            <item index="32" class="java.lang.String" itemvalue="pydantic" />
            <item index="33" class="java.lang.String" itemvalue="transformers" />
            <item index="34" class="java.lang.String" itemvalue="Werkzeug" />
            <item index="35" class="java.lang.String" itemvalue="faiss" />
            <item index="36" class="java.lang.String" itemvalue="segtok" />
            <item index="37" class="java.lang.String" itemvalue="jupyter-client" />
            <item index="38" class="java.lang.String" itemvalue="jupyterlab-pygments" />
            <item index="39" class="java.lang.String" itemvalue="click" />
            <item index="40" class="java.lang.String" itemvalue="ipykernel" />
            <item index="41" class="java.lang.String" itemvalue="nbconvert" />
            <item index="42" class="java.lang.String" itemvalue="psutil" />
            <item index="43" class="java.lang.String" itemvalue="regex" />
            <item index="44" class="java.lang.String" itemvalue="tensorboard" />
            <item index="45" class="java.lang.String" itemvalue="cymem" />
            <item index="46" class="java.lang.String" itemvalue="platformdirs" />
            <item index="47" class="java.lang.String" itemvalue="bpemb" />
            <item index="48" class="java.lang.String" itemvalue="matplotlib" />
            <item index="49" class="java.lang.String" itemvalue="konoha" />
            <item index="50" class="java.lang.String" itemvalue="rank-bm25" />
            <item index="51" class="java.lang.String" itemvalue="murmurhash" />
            <item index="52" class="java.lang.String" itemvalue="lightgbm" />
            <item index="53" class="java.lang.String" itemvalue="jsonlines" />
            <item index="54" class="java.lang.String" itemvalue="pytrec-eval" />
            <item index="55" class="java.lang.String" itemvalue="wasabi" />
            <item index="56" class="java.lang.String" itemvalue="networkx" />
            <item index="57" class="java.lang.String" itemvalue="cffi" />
            <item index="58" class="java.lang.String" itemvalue="wget" />
            <item index="59" class="java.lang.String" itemvalue="antlr4-python3-runtime" />
            <item index="60" class="java.lang.String" itemvalue="datasets" />
            <item index="61" class="java.lang.String" itemvalue="py4j" />
            <item index="62" class="java.lang.String" itemvalue="requests" />
            <item index="63" class="java.lang.String" itemvalue="pyrsistent" />
            <item index="64" class="java.lang.String" itemvalue="pylcs" />
            <item index="65" class="java.lang.String" itemvalue="gdown" />
            <item index="66" class="java.lang.String" itemvalue="Deprecated" />
            <item index="67" class="java.lang.String" itemvalue="stack-data" />
            <item index="68" class="java.lang.String" itemvalue="smart-open" />
            <item index="69" class="java.lang.String" itemvalue="prompt-toolkit" />
            <item index="70" class="java.lang.String" itemvalue="ipywidgets" />
            <item index="71" class="java.lang.String" itemvalue="pyarrow" />
            <item index="72" class="java.lang.String" itemvalue="tornado" />
            <item index="73" class="java.lang.String" itemvalue="dpr" />
            <item index="74" class="java.lang.String" itemvalue="black" />
            <item index="75" class="java.lang.String" itemvalue="SoundFile" />
            <item index="76" class="java.lang.String" itemvalue="overrides" />
            <item index="77" class="java.lang.String" itemvalue="langcodes" />
            <item index="78" class="java.lang.String" itemvalue="importlib-resources" />
            <item index="79" class="java.lang.String" itemvalue="hydra-core" />
            <item index="80" class="java.lang.String" itemvalue="jupyter-console" />
            <item index="81" class="java.lang.String" itemvalue="typing_extensions" />
            <item index="82" class="java.lang.String" itemvalue="cachetools" />
            <item index="83" class="java.lang.String" itemvalue="debugpy" />
            <item index="84" class="java.lang.String" itemvalue="multidict" />
            <item index="85" class="java.lang.String" itemvalue="responses" />
            <item index="86" class="java.lang.String" itemvalue="thinc" />
            <item index="87" class="java.lang.String" itemvalue="yarl" />
            <item index="88" class="java.lang.String" itemvalue="pytz" />
            <item index="89" class="java.lang.String" itemvalue="Pillow" />
            <item index="90" class="java.lang.String" itemvalue="traitlets" />
            <item index="91" class="java.lang.String" itemvalue="protobuf" />
            <item index="92" class="java.lang.String" itemvalue="beir" />
            <item index="93" class="java.lang.String" itemvalue="threadpoolctl" />
            <item index="94" class="java.lang.String" itemvalue="huggingface-hub" />
            <item index="95" class="java.lang.String" itemvalue="nbclient" />
            <item index="96" class="java.lang.String" itemvalue="QtPy" />
            <item index="97" class="java.lang.String" itemvalue="tinycss2" />
            <item index="98" class="java.lang.String" itemvalue="frozenlist" />
            <item index="99" class="java.lang.String" itemvalue="submitit" />
            <item index="100" class="java.lang.String" itemvalue="fsspec" />
            <item index="101" class="java.lang.String" itemvalue="spacy" />
            <item index="102" class="java.lang.String" itemvalue="sqlitedict" />
            <item index="103" class="java.lang.String" itemvalue="filelock" />
            <item index="104" class="java.lang.String" itemvalue="jupyterlab-widgets" />
            <item index="105" class="java.lang.String" itemvalue="pyzmq" />
            <item index="106" class="java.lang.String" itemvalue="sentencepiece" />
            <item index="107" class="java.lang.String" itemvalue="certifi" />
            <item index="108" class="java.lang.String" itemvalue="pyserini" />
            <item index="109" class="java.lang.String" itemvalue="nmslib" />
            <item index="110" class="java.lang.String" itemvalue="pyparsing" />
            <item index="111" class="java.lang.String" itemvalue="Markdown" />
            <item index="112" class="java.lang.String" itemvalue="notebook" />
            <item index="113" class="java.lang.String" itemvalue="xxhash" />
            <item index="114" class="java.lang.String" itemvalue="tokenizers" />
            <item index="115" class="java.lang.String" itemvalue="sacremoses" />
            <item index="116" class="java.lang.String" itemvalue="langdetect" />
            <item index="117" class="java.lang.String" itemvalue="pyjnius" />
            <item index="118" class="java.lang.String" itemvalue="kiwisolver" />
            <item index="119" class="java.lang.String" itemvalue="pathy" />
            <item index="120" class="java.lang.String" itemvalue="Wikipedia-API" />
            <item index="121" class="java.lang.String" itemvalue="catalogue" />
            <item index="122" class="java.lang.String" itemvalue="omegaconf" />
            <item index="123" class="java.lang.String" itemvalue="fonttools" />
            <item index="124" class="java.lang.String" itemvalue="pytorch-lightning" />
            <item index="125" class="java.lang.String" itemvalue="widgetsnbextension" />
            <item index="126" class="java.lang.String" itemvalue="charset-normalizer" />
            <item index="127" class="java.lang.String" itemvalue="matplotlib-inline" />
            <item index="128" class="java.lang.String" itemvalue="async-timeout" />
            <item index="129" class="java.lang.String" itemvalue="spacy-loggers" />
            <item index="130" class="java.lang.String" itemvalue="more-itertools" />
            <item index="131" class="java.lang.String" itemvalue="cloudpickle" />
            <item index="132" class="java.lang.String" itemvalue="llvmlite" />
            <item index="133" class="java.lang.String" itemvalue="spacy-legacy" />
            <item index="134" class="java.lang.String" itemvalue="rouge" />
            <item index="135" class="java.lang.String" itemvalue="importlib-metadata" />
            <item index="136" class="java.lang.String" itemvalue="Jinja2" />
            <item index="137" class="java.lang.String" itemvalue="preshed" />
            <item index="138" class="java.lang.String" itemvalue="onnxruntime" />
            <item index="139" class="java.lang.String" itemvalue="blis" />
            <item index="140" class="java.lang.String" itemvalue="urllib3" />
            <item index="141" class="java.lang.String" itemvalue="Cython" />
            <item index="142" class="java.lang.String" itemvalue="pptree" />
            <item index="143" class="java.lang.String" itemvalue="pymongo" />
            <item index="144" class="java.lang.String" itemvalue="typer" />
            <item index="145" class="java.lang.String" itemvalue="faiss-cpu" />
            <item index="146" class="java.lang.String" itemvalue="pytest" />
            <item index="147" class="java.lang.String" itemvalue="hyperopt" />
            <item index="148" class="java.lang.String" itemvalue="nbformat" />
            <item index="149" class="java.lang.String" itemvalue="ipython" />
            <item index="150" class="java.lang.String" itemvalue="sentence-transformers" />
            <item index="151" class="java.lang.String" itemvalue="dill" />
            <item index="152" class="java.lang.String" itemvalue="fastjsonschema" />
            <item index="153" class="java.lang.String" itemvalue="prometheus-client" />
            <item index="154" class="java.lang.String" itemvalue="pybind11" />
            <item index="155" class="java.lang.String" itemvalue="aiohttp" />
            <item index="156" class="java.lang.String" itemvalue="grpcio" />
            <item index="157" class="java.lang.String" itemvalue="aiosignal" />
            <item index="158" class="java.lang.String" itemvalue="google-auth" />
            <item index="159" class="java.lang.String" itemvalue="recbole" />
          </list>
        </value>
      </option>
    </inspection_tool>
  </profile>
</component>

================================================
FILE: .idea/inspectionProfiles/profiles_settings.xml
================================================
<component name="InspectionProjectProfileManager">
  <settings>
    <option name="USE_PROJECT_PROFILE" value="false" />
    <version value="1.0" />
  </settings>
</component>

================================================
FILE: .idea/misc.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (env-test) (2)" project-jdk-type="Python SDK" />
</project>

================================================
FILE: .idea/modules.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="ProjectModuleManager">
    <modules>
      <module fileurl="file://$PROJECT_DIR$/.idea/MMRec.iml" filepath="$PROJECT_DIR$/.idea/MMRec.iml" />
    </modules>
  </component>
</project>

================================================
FILE: .idea/vcs.xml
================================================
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
  <component name="VcsDirectoryMappings">
    <mapping directory="$PROJECT_DIR$" vcs="Git" />
  </component>
</project>

================================================
FILE: LICENSE
================================================
                    GNU GENERAL PUBLIC LICENSE
                       Version 3, 29 June 2007

 Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
 Everyone is permitted to copy and distribute verbatim copies
 of this license document, but changing it is not allowed.

                            Preamble

  The GNU General Public License is a free, copyleft license for
software and other kinds of works.

  The licenses for most software and other practical works are designed
to take away your freedom to share and change the works.  By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.  We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors.  You can apply it to
your programs, too.

  When we speak of free software, we are referring to freedom, not
price.  Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.

  To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights.  Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.

  For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received.  You must make sure that they, too, receive
or can get the source code.  And you must show them these terms so they
know their rights.

  Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.

  For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software.  For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.

  Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so.  This is fundamentally incompatible with the aim of
protecting users' freedom to change the software.  The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable.  Therefore, we
have designed this version of the GPL to prohibit the practice for those
products.  If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.

  Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary.  To prevent this, the GPL assures that
patents cannot be used to render the program non-free.

  The precise terms and conditions for copying, distribution and
modification follow.

                       TERMS AND CONDITIONS

  0. Definitions.

  "This License" refers to version 3 of the GNU General Public License.

  "Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.

  "The Program" refers to any copyrightable work licensed under this
License.  Each licensee is addressed as "you".  "Licensees" and
"recipients" may be individuals or organizations.

  To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy.  The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.

  A "covered work" means either the unmodified Program or a work based
on the Program.

  To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy.  Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.

  To "convey" a work means any kind of propagation that enables other
parties to make or receive copies.  Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.

  An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License.  If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.

  1. Source Code.

  The "source code" for a work means the preferred form of the work
for making modifications to it.  "Object code" means any non-source
form of a work.

  A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.

  The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form.  A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.

  The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities.  However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work.  For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.

  The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.

  The Corresponding Source for a work in source code form is that
same work.

  2. Basic Permissions.

  All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met.  This License explicitly affirms your unlimited
permission to run the unmodified Program.  The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work.  This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.

  You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force.  You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright.  Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.

  Conveying under any other circumstances is permitted solely under
the conditions stated below.  Sublicensing is not allowed; section 10
makes it unnecessary.

  3. Protecting Users' Legal Rights From Anti-Circumvention Law.

  No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.

  When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.

  4. Conveying Verbatim Copies.

  You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.

  You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.

  5. Conveying Modified Source Versions.

  You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:

    a) The work must carry prominent notices stating that you modified
    it, and giving a relevant date.

    b) The work must carry prominent notices stating that it is
    released under this License and any conditions added under section
    7.  This requirement modifies the requirement in section 4 to
    "keep intact all notices".

    c) You must license the entire work, as a whole, under this
    License to anyone who comes into possession of a copy.  This
    License will therefore apply, along with any applicable section 7
    additional terms, to the whole of the work, and all its parts,
    regardless of how they are packaged.  This License gives no
    permission to license the work in any other way, but it does not
    invalidate such permission if you have separately received it.

    d) If the work has interactive user interfaces, each must display
    Appropriate Legal Notices; however, if the Program has interactive
    interfaces that do not display Appropriate Legal Notices, your
    work need not make them do so.

  A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit.  Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.

  6. Conveying Non-Source Forms.

  You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:

    a) Convey the object code in, or embodied in, a physical product
    (including a physical distribution medium), accompanied by the
    Corresponding Source fixed on a durable physical medium
    customarily used for software interchange.

    b) Convey the object code in, or embodied in, a physical product
    (including a physical distribution medium), accompanied by a
    written offer, valid for at least three years and valid for as
    long as you offer spare parts or customer support for that product
    model, to give anyone who possesses the object code either (1) a
    copy of the Corresponding Source for all the software in the
    product that is covered by this License, on a durable physical
    medium customarily used for software interchange, for a price no
    more than your reasonable cost of physically performing this
    conveying of source, or (2) access to copy the
    Corresponding Source from a network server at no charge.

    c) Convey individual copies of the object code with a copy of the
    written offer to provide the Corresponding Source.  This
    alternative is allowed only occasionally and noncommercially, and
    only if you received the object code with such an offer, in accord
    with subsection 6b.

    d) Convey the object code by offering access from a designated
    place (gratis or for a charge), and offer equivalent access to the
    Corresponding Source in the same way through the same place at no
    further charge.  You need not require recipients to copy the
    Corresponding Source along with the object code.  If the place to
    copy the object code is a network server, the Corresponding Source
    may be on a different server (operated by you or a third party)
    that supports equivalent copying facilities, provided you maintain
    clear directions next to the object code saying where to find the
    Corresponding Source.  Regardless of what server hosts the
    Corresponding Source, you remain obligated to ensure that it is
    available for as long as needed to satisfy these requirements.

    e) Convey the object code using peer-to-peer transmission, provided
    you inform other peers where the object code and Corresponding
    Source of the work are being offered to the general public at no
    charge under subsection 6d.

  A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.

  A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling.  In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage.  For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product.  A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.

  "Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source.  The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.

  If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information.  But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).

  The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed.  Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.

  Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.

  7. Additional Terms.

  "Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law.  If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.

  When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it.  (Additional permissions may be written to require their own
removal in certain cases when you modify the work.)  You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.

  Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:

    a) Disclaiming warranty or limiting liability differently from the
    terms of sections 15 and 16 of this License; or

    b) Requiring preservation of specified reasonable legal notices or
    author attributions in that material or in the Appropriate Legal
    Notices displayed by works containing it; or

    c) Prohibiting misrepresentation of the origin of that material, or
    requiring that modified versions of such material be marked in
    reasonable ways as different from the original version; or

    d) Limiting the use for publicity purposes of names of licensors or
    authors of the material; or

    e) Declining to grant rights under trademark law for use of some
    trade names, trademarks, or service marks; or

    f) Requiring indemnification of licensors and authors of that
    material by anyone who conveys the material (or modified versions of
    it) with contractual assumptions of liability to the recipient, for
    any liability that these contractual assumptions directly impose on
    those licensors and authors.

  All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10.  If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term.  If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.

  If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.

  Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.

  8. Termination.

  You may not propagate or modify a covered work except as expressly
provided under this License.  Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).

  However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.

  Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.

  Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License.  If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.

  9. Acceptance Not Required for Having Copies.

  You are not required to accept this License in order to receive or
run a copy of the Program.  Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance.  However,
nothing other than this License grants you permission to propagate or
modify any covered work.  These actions infringe copyright if you do
not accept this License.  Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.

  10. Automatic Licensing of Downstream Recipients.

  Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License.  You are not responsible
for enforcing compliance by third parties with this License.

  An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations.  If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.

  You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License.  For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.

  11. Patents.

  A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based.  The
work thus licensed is called the contributor's "contributor version".

  A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version.  For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.

  Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.

  In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement).  To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.

  If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients.  "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.

  If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.

  A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License.  You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.

  Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.

  12. No Surrender of Others' Freedom.

  If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License.  If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all.  For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.

  13. Use with the GNU Affero General Public License.

  Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work.  The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.

  14. Revised Versions of this License.

  The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time.  Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.

  Each version is given a distinguishing version number.  If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation.  If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.

  If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.

  Later license versions may give you additional or different
permissions.  However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.

  15. Disclaimer of Warranty.

  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.

  16. Limitation of Liability.

  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.

  17. Interpretation of Sections 15 and 16.

  If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.

                     END OF TERMS AND CONDITIONS

            How to Apply These Terms to Your New Programs

  If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.

  To do so, attach the following notices to the program.  It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.

    <one line to give the program's name and a brief idea of what it does.>
    Copyright (C) <year>  <name of author>

    This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with this program.  If not, see <https://www.gnu.org/licenses/>.

Also add information on how to contact you by electronic and paper mail.

  If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:

    <program>  Copyright (C) <year>  <name of author>
    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
    This is free software, and you are welcome to redistribute it
    under certain conditions; type `show c' for details.

The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License.  Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".

  You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.

  The GNU General Public License does not permit incorporating your program
into proprietary programs.  If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library.  If this is what you want to do, use the GNU Lesser General
Public License instead of this License.  But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.


================================================
FILE: README.md
================================================
# MMRec

<div align="center">
  <a href="https://github.com/enoche/MultimodalRecSys"><img width="300px" height="auto" src="https://github.com/enoche/MMRec/blob/master/images/logo.png"></a>
</div>


$\text{MMRec}$: A modern <ins>M</ins>ulti<ins>M</ins>odal <ins>Rec</ins>ommendation toolbox that simplifies your research [arXiv](https://arxiv.org/abs/2302.03497).  
:point_right: Check our [comprehensive survey on MMRec, arXiv](https://arxiv.org/abs/2302.04473).   
:point_right: Check the awesome [multimodal recommendation resources](https://github.com/enoche/MultimodalRecSys).  

## Toolbox
<p>
<img src="./images/MMRec.png" width="500">
</p>

## Supported Models
source code at: `src\models`

| **Model**       | **Paper**                                                                                             | **Conference/Journal** | **Code**    |
|------------------|--------------------------------------------------------------------------------------------------------|------------------------|-------------|
| **General models**  |                                                                                                        |                        |             |
| SelfCF              | [SelfCF: A Simple Framework for Self-supervised Collaborative Filtering](https://arxiv.org/abs/2107.03019)                                 | ACM TORS'23            | selfcfed_lgn.py  |
| LayerGCN            | [Layer-refined Graph Convolutional Networks for Recommendation](https://arxiv.org/abs/2207.11088)                                          | ICDE'23                | layergcn.py  |
| **Multimodal models**  |                                                                                                        |                        |             |
| VBPR              | [VBPR: Visual Bayesian Personalized Ranking from Implicit Feedback](https://arxiv.org/abs/1510.01784)                                              | AAAI'16                 | vbpr.py      |
| MMGCN             | [MMGCN: Multi-modal Graph Convolution Network for Personalized Recommendation of Micro-video](https://staff.ustc.edu.cn/~hexn/papers/mm19-MMGCN.pdf)               | MM'19                  | mmgcn.py  |
| ItemKNNCBF             | [Are We Really Making Much Progress? A Worrying Analysis of Recent Neural Recommendation Approaches](https://arxiv.org/abs/1907.06902)               | RecSys'19              | itemknncbf.py  |
| GRCN              | [Graph-Refined Convolutional Network for Multimedia Recommendation with Implicit Feedback](https://arxiv.org/abs/2111.02036)            | MM'20                  | grcn.py    |
| MVGAE             | [Multi-Modal Variational Graph Auto-Encoder for Recommendation Systems](https://ieeexplore.ieee.org/abstract/document/9535249)              | TMM'21                 | mvgae.py   |
| DualGNN           | [DualGNN: Dual Graph Neural Network for Multimedia Recommendation](https://ieeexplore.ieee.org/abstract/document/9662655)                   | TMM'21                 | dualgnn.py   |
| LATTICE           | [Mining Latent Structures for Multimedia Recommendation](https://arxiv.org/abs/2104.09036)                                               | MM'21                  | lattice.py  |
| SLMRec            | [Self-supervised Learning for Multimedia Recommendation](https://ieeexplore.ieee.org/document/9811387) | TMM'22                 |                  slmrec.py |
| **Newly added**  |                                                                                                        |                        |             |
| BM3         | [Bootstrap Latent Representations for Multi-modal Recommendation](https://dl.acm.org/doi/10.1145/3543507.3583251)                                          | WWW'23                 | bm3.py |
| FREEDOM | [A Tale of Two Graphs: Freezing and Denoising Graph Structures for Multimodal Recommendation](https://arxiv.org/abs/2211.06924)                                 | MM'23                  | freedom.py  |
| MGCN     | [Multi-View Graph Convolutional Network for Multimedia Recommendation](https://arxiv.org/abs/2308.03588)                       | MM'23               | mgcn.py          |
| DRAGON  | [Enhancing Dyadic Relations with Homogeneous Graphs for Multimodal Recommendation](https://arxiv.org/abs/2301.12097)                                 | ECAI'23                | dragon.py  |
| MG  | [Mirror Gradient: Towards Robust Multimodal Recommender Systems via Exploring Flat Local Minima](https://arxiv.org/abs/2402.11262)                                 | WWW'24                | common/trainer.py  |
| LGMRec  | [LGMRec: Local and Global Graph Learning for Multimodal Recommendation](https://arxiv.org/abs/2312.16400)                                 | AAAI'24                | lgmrec.py |
| DA-MRS  | [Improving Multi-modal Recommender Systems by Denoising and Aligning Multi-modal Content and User Feedback](https://dl.acm.org/doi/10.1145/3637528.3671703)                   | KDD'24                | damrs.py |
| SMORE   | [Spectrum-based Modality Representation Fusion Graph Convolutional Network for Multimodal Recommendation](https://arxiv.org/abs/2412.14978) | WSDM'25           | smore.py  |
| PGL | [Mind Individual Information! Principal Graph Learning for Multimedia Recommendation](https://ojs.aaai.org/index.php/AAAI/article/view/33429) | AAAI'25 | pgl.py |


#### Please consider to cite our paper if this framework helps you, thanks:
```
@inproceedings{zhou2023bootstrap,
author = {Zhou, Xin and Zhou, Hongyu and Liu, Yong and Zeng, Zhiwei and Miao, Chunyan and Wang, Pengwei and You, Yuan and Jiang, Feijun},
title = {Bootstrap Latent Representations for Multi-Modal Recommendation},
booktitle = {Proceedings of the ACM Web Conference 2023},
pages = {845–854},
year = {2023}
}

@article{zhou2023comprehensive,
      title={A Comprehensive Survey on Multimodal Recommender Systems: Taxonomy, Evaluation, and Future Directions}, 
      author={Hongyu Zhou and Xin Zhou and Zhiwei Zeng and Lingzi Zhang and Zhiqi Shen},
      year={2023},
      journal={arXiv preprint arXiv:2302.04473},
}

@inproceedings{zhou2023mmrec,
  title={Mmrec: Simplifying multimodal recommendation},
  author={Zhou, Xin},
  booktitle={Proceedings of the 5th ACM International Conference on Multimedia in Asia Workshops},
  pages={1--2},
  year={2023}
}
```


================================================
FILE: data/README.md
================================================

## Data
Download from Google Drive: [Baby/Sports/Elec](https://drive.google.com/drive/folders/13cBy1EA_saTUuXxVllKgtfci2A09jyaG?usp=sharing)
The data already contains text and image features extracted from Sentence-Transformers and CNN.

An alternative dataset for short-video recommendations: [MicroLens](https://drive.google.com/drive/folders/14UyTAh_YyDV8vzXteBJiy9jv8TBDK43w?usp=drive_link).
Thanks to @yxni98!

* Please move your downloaded data into this dir for model training.


================================================
FILE: evaluation/README.md
================================================
# EVALUATING THE SOTA MODELS

we validate the effectiveness and efficiency of state-of-the-art multimodal recommendation models by conducting extensive experiments on four public datasets. Furthermore, we investigate the principal determinants of model performance, including the impact of different modality information and data split methods.

## Statistics of the evaluated datasets.
| Datasets | # Users | # Items | # Interactions |Sparsity|
|----------|--------|---------|---------|---------|
| Baby     | 19,445     | 7,050     |160,792|99.8827%|
| Sports   | 35,598      | 18,357   |296,337|99.9547%|
| FoodRec     | 61,668      | 21,874    |1,654,456|99.8774%|
| Elec     | 192,403      | 63,001     |1,689,188|99.9861%|


## Experimental Results
| Dataset                 | Model    | Recall@10          | Recall@20          | Recall@50          | NDCG@10            | NDCG@20            | NDCG@50            |
|-------------------------|----------|--------------------|--------------------|--------------------|--------------------|--------------------|--------------------|
| **Baby**   | BPR      | 0.0357             | 0.0575             | 0.1054             | 0.0192             | 0.0249             | 0.0345             |
|                         | LightGCN | 0.0479             | 0.0754             | 0.1333             | 0.0257             | 0.0328             | 0.0445             |
|                         | VBPR     | 0.0423             | 0.0663             | 0.1212             | 0.0223             | 0.0284             | 0.0396             |
|                         | MMGCN    | 0.0378             | 0.0615             | 0.1100             | 0.0200             | 0.0261             | 0.0359             |
|                         | DualGNN  | 0.0448             | 0.0716             | 0.1288             | 0.0240             | 0.0309             | 0.0424             |
|                         | GRCN     | 0.0539             | 0.0833             | 0.1464             | 0.0288             | 0.0363             | 0.0490             |
|                         | LATTICE  | 0.0547             | 0.0850             | 0.1477             | 0.0292             | 0.0370             | 0.0497             |
|                         | BM3      | 0.0564             | 0.0883             | 0.1477             | 0.0301             | 0.0383             | 0.0502             |
|                         | SLMRec   | 0.0529             | 0.0775             | 0.1252             | 0.0290             | 0.0353             | 0.0450             |
|                         | ADDVAE   | _0.0598_ | _0.091_  | _0.1508_ | _0.0323_ | _0.0404_ | _0.0525_ |
|                         | FREEDOM  | **0.0627**    | **0.0992**    | **0.1655**    | **0.0330**    | **0.0424**    | **0.0558**    |
| **Sports**  | BPR      | 0.0432             | 0.0653             | 0.1083             | 0.0241             | 0.0298             | 0.0385             |
|                         | LightGCN | 0.0569             | 0.0864             | 0.1414             | 0.0311             | 0.0387             | 0.0498             |
|                         | VBPR     | 0.0558             | 0.0856             | 0.1391             | 0.0307             | 0.0384             | 0.0492             |
|                         | MMGCN    | 0.0370             | 0.0605             | 0.1078             | 0.0193             | 0.0254             | 0.0350             |
|                         | DualGNN  | 0.0568             | 0.0859             | 0.1392             | 0.0310             | 0.0385             | 0.0493             |
|                         | GRCN     | 0.0598             | 0.0915             | 0.1509             | 0.0332             | 0.0414             | 0.0535             |
|                         | LATTICE  | 0.0620             | 0.0953             | 0.1561             | 0.0335             | 0.0421             | 0.0544             |
|                         | BM3      | 0.0656             | 0.0980             | 0.1581             | 0.0355             | 0.0438             | 0.0561             |
|                         | SLMRec   | 0.0663             | 0.0990             | 0.1543             | 0.0365             | 0.0450             | 0.0562             |
|                         | ADDVAE   | _0.0709_ | _0.1035_ | _0.1663_ | _0.0389_    | _0.0473_ | _0.0600_ |
|                         | FREEDOM  | **0.0717**    | **0.1089**    | **0.1768**    | **0.0385** | **0.0481**    | **0.0618**    |
| **FoodRec** | BPR      | 0.0303             | 0.0511             | 0.0948             | 0.0188             | 0.0250             | 0.0356             |
|                         | LightGCN | 0.0331             | 0.0546             | 0.1003             | 0.0210             | 0.0274             | 0.0386             |
|                         | VBPR     | 0.0306             | 0.0516             | 0.0972             | 0.0191             | 0.0254             | 0.0365             |
|                         | MMGCN    | 0.0307             | 0.0510             | 0.0943             | 0.0192             | 0.0253             | 0.0359             |
|                         | DualGNN  | _0.0338_ | 0.0559             | _0.1027_ | _0.0214_ | _0.0280_ | _0.0394_ |
|                         | GRCN     | **0.0356**   | **0.0578**    | **0.1063**    | **0.0226**    | **0.0295**    | **0.0411**    |
|                         | LATTICE  | 0.0336             | _0.0560_| 0.1012             | 0.0211             | 0.0277             | 0.0388             |
|                         | BM3      | 0.0334             | 0.0553             | 0.0994             | 0.0208             | 0.0274             | 0.0381             |
|                         | SLMRec   | 0.0323             | 0.0515             | 0.0907             | 0.0208             | 0.0266             | 0.0362             |
|                         | ADDVAE   | 0.0309             | 0.0508             | 0.093              | 0.0186             | 0.0247             | 0.035              |
|                         | FREEDOM  | 0.0333             | 0.0556             | 0.1009             | 0.0212             | 0.0279             | 0.0389             |
| **Elec**    | BPR      | 0.0235             | 0.0367             | 0.0621             | 0.0127             | 0.0161             | 0.0212             |
|                         | LightGCN | 0.0363             | 0.0540             | 0.0879             | 0.0204             | 0.0250             | 0.0318             |
|                         | VBPR     | 0.0293             | 0.0458             | 0.0778             | 0.0159             | 0.0202             | 0.0267             |
|                         | MMGCN    | 0.0213             | 0.0343             | 0.0610             | 0.0112             | 0.0146             | 0.0200             |
|                         | DualGNN  | 0.0365             | 0.0542             | 0.0875             | 0.0206             | 0.0252             | 0.0319             |
|                         | GRCN     | 0.0389             | 0.0590             | 0.0970             | 0.0216             | 0.0268             | 0.0345             |
|                         | LATTICE  | -                  | -                  | -                  | -                  | -                  | -                  |
|                         | BM3      | 0.0437             | 0.0648             | 0.1021             | 0.0247             | 0.0302             | 0.0378             |
|                         | SLMRec   | _0.0443_ | _0.0651_ | _0.1038_ | _0.0249_ | _0.0303_ | _0.0382_ |
|                         | ADDVAE   | **0.0451**    | **0.0665**    | **0.1066**    | **0.0253**    | **0.0308**    | **0.0390**    |
|                         | FREEDOM  | 0.0396             | 0.0601             | 0.0998             | 0.0220             | 0.0273             | 0.0353             |

### Ablation Study

#### Recommendation performance comparison using different data split methods.:

We evaluate the performance of various recommendation models using different data splitting methods. The offline evaluation is based on the historical item ratings or the implicit item feedback. As this method relies on the user-item interactions and the models are all learning based on the supervised signals, we need to split the interactions into train, validation and test sets. There are three main split strategies that we applied to compare the performance:

• Random split: As the name suggested, this split strategy randomly selects the train and test boundary for each user, which selects to split the interactions according to the ratio. The disadvantage of the random splitting strategy is that they are not capable to reproduce unless the authors publish how the data split and this is not a realistic scenario without considering the time.

• User time split: The temporal split strategy splits the historical interactions based on the interaction timestamp by the ratio (e.g., train:validation:test=8:1:1). It split the last percentage of interactions the user made as the test set. Although it considers the timestamp, it is still not a realistic scenario because it is still splitting the train/test sets among all the interactions one user made but did not consider the global time.

• Global time split: The global time splitting strategy fixed the time point shared by all users according to the splitting ratio. The interactions after the last time point are split as the test set. Additionally, the users of the interactions after the global temporal boundary must be in the training set, which follows the most realistic and strict settings. The limitation of this strategy is that the number of users will be reduced due to the reason that the users not existing in the training set will be deleted

Our experiments on the Sports dataset, using these three splitting strategies, provide insights into their impact on recommendation performance. The table below presents the performance comparison results in terms of Recall@k and NDCG@k where k=10,20, and the second table shows the performance ranking of models based on Recall@20 and NDCG@20.

| Dataset | Model    |          | Recall@10 |             |          | Recall@20 |             |
|---------|----------|----------|-----------|-------------|----------|-----------|-------------|
|         |          | Random   | User Time | Global Time | Random   | User Time | Global Time |
|         | MMGCN    | 0.0384   | 0.0266    | 0.0140      | 0.0611   | 0.0446    | 0.0245      |
|         | BPR      | 0.0444   | 0.0322    | 0.0152      | 0.0663   | 0.0509    | 0.0258      |
|         | VBPR     | 0.0563   | 0.0385    | 0.0176      | 0.0851   | 0.0620    | 0.0298      |
|         | DualGNN  | 0.0576   | 0.0403    | 0.0181      | 0.0859   | 0.0611    | 0.0297      |
| sports  | GRCN     | 0.0604   | 0.0418    | 0.0167      | 0.0915   | 0.0666    | 0.0286      |
|         | LightGCN | 0.0568   | 0.0405    | 0.0205      | 0.0863   | 0.0663    | 0.0336      |
|         | LATTICE  | 0.0641   | 0.0450    | 0.0207      | 0.0964   | 0.0699    | 0.0337      |
|         | BM3      | 0.0646   | 0.0447    | 0.0213      | 0.0955   | 0.0724    | 0.0336      |
|         | SLMRec   | 0.0651   | 0.0470    | 0.0220      | 0.0985   | 0.0733    | 0.0350      |
|         | FREEDOM  | 0.0708   | 0.0490    | 0.0226      | 0.1080   | 0.0782    | 0.0372      |
| Dataset | Model    |          | NDCG@10   |             |          | NDCG@20   |             |
|         |          | Random   | User Time | Global Time | Random   | User Time | Global Time |
|         | MMGCN    | 0.0202   | 0.0134    | 0.0091      | 0.0261   | 0.0180    | 0.0125      |
|         | BPR      | 0.0245   | 0.0169    | 0.0102      | 0.0302   | 0.0218    | 0.0135      |
|         | VBPR     | 0.0304   | 0.0204    | 0.0115      | 0.0378   | 0.0265    | 0.0153      |
|         | DualGNN  | 0.0321   | 0.0214    | 0.0118      | 0.0394   | 0.0268    | 0.0155      |
| sports  | GRCN     | 0.0332   | 0.0219    | 0.0101      | 0.0412   | 0.0282    | 0.0138      |
|         | LightGCN | 0.0315   | 0.0220    | 0.0139      | 0.0391   | 0.0286    | 0.0180      |
|         | LATTICE  | 0.0351   | 0.0238    | 0.0138      | 0.0434   | 0.0302    | 0.0177      |
|         | BM3      | 0.0356   | 0.0237    | 0.0144      | 0.0436   | 0.0308    | 0.0182      |
|         | SLMRec   | 0.0364   | 0.0253    | 0.0148      | 0.0450   | 0.0321    | 0.0189      |
|         | FREEDOM  | 0.0388   | 0.0255    | 0.0151      | 0.0485   | 0.0330    | 0.0197      |

As demonstrated above, different data splitting strategies lead to varied performance outcomes for the same dataset and evaluation metrics. This variability presents a challenge in comparing the effectiveness of different models when they are based on different data split strategies.

|  Model   |        | Sports, NDCG@20   |             |
|----------|--------|-------------------|-------------|
|          | Random | User Time         | Global Time |
| MMGCN    | 10     | 10                | 10          |
| BPR      | 9      | 9                 | 8↑1         |
| VBPR     | 8      | 8                 | 7↑1         |
| LightGCN | 7      | 5↑2               | 4↑3         |
| DualGNN  | 6      | 7↓1               | 6           |
| DRCN     | 5      | 6↓1               | 9↓4         |
| LATTICE  | 4      | 4                 | 5↓1         |
| BM3      | 3      | 3                 | 3           |
| SLMRec   | 2      | 2                 | 2           |
| FREEDOM  | 1      | 1                 | 1           |
| **Model**    |        | **Sports, Recall@20** |             |
|          | Random | User Time         | Global Time |
| MMGCN    | 10     | 10                | 10          |
| BPR      | 9      | 9                 | 9           |
| VBPR     | 8      | 7↑1               | 6↑2         |
| DualGNN  | 7      | 8↓1               | 7           |
| LightGCN | 6      | 6                 | 5↑1         |
| GRCN     | 5      | 5                 | 8↓3         |
| BM3      | 4      | 3↑1               | 4           |
| LATTICE  | 3      | 4↓1               | 3           |
| SLMRec   | 2      | 2                 | 2           |
| FREEDOM  | 1      | 1                 | 1           |

The above table reports the ranks of SOTA models under each splitting strategy. The rows are sorted by the performance of models under random splitting strategy, with the up and down arrows indicating the relative rank position swaps compared with random splitting. As we can see, the ranking swaps are observed between the models under different splitting strategies

#### Recommendation performance comparison using Different Modalities
We are interested in how the modality information benefits the recommendation, and which modality contributes more. We aim to understand the specific benefits of different modalities in recommendation systems and provide guidelines for researchers on selecting appropriate modalities. We evaluate it by feeding the single modality information, and compare the performance between using both modalities and the single modality. 

The following figure is based on Recall@20 to show the summary and tendency of other modalities, visually summarize the impact of different modalities on various models. The orange point represents the performance of multi-modality, the green one represents the performance of textual modality and the blue point is for visual modality. The specific numerical values will be shown in our github.


<img src="https://github.com/hongyurain/Recommendation-with-modality-information/blob/main/IMG/modality-baby.jpg" alt="image-1" height="50%" width="50%" /><img src="https://github.com/hongyurain/Recommendation-with-modality-information/blob/main/IMG/modality-sports.jpg" alt="image-2" height="50%" width="50%" />

## Please consider to cite our paper those results helps you, thanks:
```



================================================
FILE: preprocessing/0rating2inter.ipynb
================================================
{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "# 从ratings_Sports_and_Outdoors.csv文件中提取U-I交互图, 5-core后重新编号\n",
    "- Extracting U-I interactions and performing 5-core, re-indexing\n",
    "- dataset located at: http://jmcauley.ucsd.edu/data/amazon/links.html, rating only file in \"Small\" subsets for experimentation"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import os, csv\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [
    "os.chdir('/home/enoche/MMRec/Sports14')\n",
    "os.getcwd()"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": true
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 先5-core过滤\n",
    "## 5-core filtering"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "shape: (3268695, 4)\n"
     ]
    },
    {
     "data": {
      "text/plain": "           userID      itemID  rating   timestamp\n0  A3PMSRCL80KSA1  0000031852     4.0  1388275200\n1  A1SNLWGLFXD70K  0000031852     4.0  1392940800\n2  A1KJ4CVG87QW09  0000031852     4.0  1389657600\n3    AA9ITO6ZLZW6  0000031852     5.0  1399507200\n4    APJ5ULJ1RMZ4  0000031852     1.0  1398556800",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>A3PMSRCL80KSA1</td>\n      <td>0000031852</td>\n      <td>4.0</td>\n      <td>1388275200</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>A1SNLWGLFXD70K</td>\n      <td>0000031852</td>\n      <td>4.0</td>\n      <td>1392940800</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>A1KJ4CVG87QW09</td>\n      <td>0000031852</td>\n      <td>4.0</td>\n      <td>1389657600</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>AA9ITO6ZLZW6</td>\n      <td>0000031852</td>\n      <td>5.0</td>\n      <td>1399507200</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>APJ5ULJ1RMZ4</td>\n      <td>0000031852</td>\n      <td>1.0</td>\n      <td>1398556800</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df = pd.read_csv('ratings_Sports_and_Outdoors.csv', names=['userID', 'itemID', 'rating', 'timestamp'], header=None)\n",
    "print(f'shape: {df.shape}')\n",
    "df[:5]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "After dropped: (3268695, 4)\n"
     ]
    },
    {
     "data": {
      "text/plain": "           userID      itemID  rating   timestamp\n0  A3PMSRCL80KSA1  0000031852     4.0  1388275200\n1  A1SNLWGLFXD70K  0000031852     4.0  1392940800\n2  A1KJ4CVG87QW09  0000031852     4.0  1389657600",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>A3PMSRCL80KSA1</td>\n      <td>0000031852</td>\n      <td>4.0</td>\n      <td>1388275200</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>A1SNLWGLFXD70K</td>\n      <td>0000031852</td>\n      <td>4.0</td>\n      <td>1392940800</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>A1KJ4CVG87QW09</td>\n      <td>0000031852</td>\n      <td>4.0</td>\n      <td>1389657600</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "k_core = 5\n",
    "learner_id, course_id, tmstmp_str = 'userID', 'itemID', 'timestamp'\n",
    "\n",
    "df.dropna(subset=[learner_id, course_id, tmstmp_str], inplace=True)\n",
    "df.drop_duplicates(subset=[learner_id, course_id, tmstmp_str], inplace=True)\n",
    "print(f'After dropped: {df.shape}')\n",
    "df[:3]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "outputs": [],
   "source": [
    "from collections import Counter\n",
    "import numpy as np\n",
    "\n",
    "min_u_num, min_i_num = 5, 5\n",
    "\n",
    "def get_illegal_ids_by_inter_num(df, field, max_num=None, min_num=None):\n",
    "    if field is None:\n",
    "        return set()\n",
    "    if max_num is None and min_num is None:\n",
    "        return set()\n",
    "\n",
    "    max_num = max_num or np.inf\n",
    "    min_num = min_num or -1\n",
    "\n",
    "    ids = df[field].values\n",
    "    inter_num = Counter(ids)\n",
    "    ids = {id_ for id_ in inter_num if inter_num[id_] < min_num or inter_num[id_] > max_num}\n",
    "    print(f'{len(ids)} illegal_ids_by_inter_num, field={field}')\n",
    "\n",
    "    return ids\n",
    "\n",
    "\n",
    "def filter_by_k_core(df):\n",
    "    while True:\n",
    "        ban_users = get_illegal_ids_by_inter_num(df, field=learner_id, max_num=None, min_num=min_u_num)\n",
    "        ban_items = get_illegal_ids_by_inter_num(df, field=course_id, max_num=None, min_num=min_i_num)\n",
    "        if len(ban_users) == 0 and len(ban_items) == 0:\n",
    "            return\n",
    "\n",
    "        dropped_inter = pd.Series(False, index=df.index)\n",
    "        if learner_id:\n",
    "            dropped_inter |= df[learner_id].isin(ban_users)\n",
    "        if course_id:\n",
    "            dropped_inter |= df[course_id].isin(ban_items)\n",
    "        print(f'{len(dropped_inter)} dropped interactions')\n",
    "        df.drop(df.index[dropped_inter], inplace=True)\n",
    "\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## k-core"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1906153 illegal_ids_by_inter_num, field=userID\n",
      "376127 illegal_ids_by_inter_num, field=itemID\n",
      "3268695 dropped interactions\n",
      "22213 illegal_ids_by_inter_num, field=userID\n",
      "54919 illegal_ids_by_inter_num, field=itemID\n",
      "589029 dropped interactions\n",
      "18323 illegal_ids_by_inter_num, field=userID\n",
      "3743 illegal_ids_by_inter_num, field=itemID\n",
      "422478 dropped interactions\n",
      "2298 illegal_ids_by_inter_num, field=userID\n",
      "4388 illegal_ids_by_inter_num, field=itemID\n",
      "349749 dropped interactions\n",
      "3331 illegal_ids_by_inter_num, field=userID\n",
      "639 illegal_ids_by_inter_num, field=itemID\n",
      "326238 dropped interactions\n",
      "579 illegal_ids_by_inter_num, field=userID\n",
      "1012 illegal_ids_by_inter_num, field=itemID\n",
      "311188 dropped interactions\n",
      "897 illegal_ids_by_inter_num, field=userID\n",
      "169 illegal_ids_by_inter_num, field=itemID\n",
      "305054 dropped interactions\n",
      "155 illegal_ids_by_inter_num, field=userID\n",
      "308 illegal_ids_by_inter_num, field=itemID\n",
      "300866 dropped interactions\n",
      "301 illegal_ids_by_inter_num, field=userID\n",
      "47 illegal_ids_by_inter_num, field=itemID\n",
      "299031 dropped interactions\n",
      "50 illegal_ids_by_inter_num, field=userID\n",
      "79 illegal_ids_by_inter_num, field=itemID\n",
      "297646 dropped interactions\n",
      "87 illegal_ids_by_inter_num, field=userID\n",
      "11 illegal_ids_by_inter_num, field=itemID\n",
      "297132 dropped interactions\n",
      "16 illegal_ids_by_inter_num, field=userID\n",
      "24 illegal_ids_by_inter_num, field=itemID\n",
      "296741 dropped interactions\n",
      "24 illegal_ids_by_inter_num, field=userID\n",
      "1 illegal_ids_by_inter_num, field=itemID\n",
      "296581 dropped interactions\n",
      "1 illegal_ids_by_inter_num, field=userID\n",
      "8 illegal_ids_by_inter_num, field=itemID\n",
      "296481 dropped interactions\n",
      "8 illegal_ids_by_inter_num, field=userID\n",
      "0 illegal_ids_by_inter_num, field=itemID\n",
      "296445 dropped interactions\n",
      "0 illegal_ids_by_inter_num, field=userID\n",
      "5 illegal_ids_by_inter_num, field=itemID\n",
      "296413 dropped interactions\n",
      "5 illegal_ids_by_inter_num, field=userID\n",
      "0 illegal_ids_by_inter_num, field=itemID\n",
      "296393 dropped interactions\n",
      "0 illegal_ids_by_inter_num, field=userID\n",
      "3 illegal_ids_by_inter_num, field=itemID\n",
      "296373 dropped interactions\n",
      "4 illegal_ids_by_inter_num, field=userID\n",
      "0 illegal_ids_by_inter_num, field=itemID\n",
      "296361 dropped interactions\n",
      "0 illegal_ids_by_inter_num, field=userID\n",
      "1 illegal_ids_by_inter_num, field=itemID\n",
      "296345 dropped interactions\n",
      "1 illegal_ids_by_inter_num, field=userID\n",
      "0 illegal_ids_by_inter_num, field=itemID\n",
      "296341 dropped interactions\n",
      "0 illegal_ids_by_inter_num, field=userID\n",
      "0 illegal_ids_by_inter_num, field=itemID\n",
      "k-core shape: (296337, 4)\n",
      "shape after k-core: (296337, 4)\n"
     ]
    },
    {
     "data": {
      "text/plain": "             userID      itemID  rating   timestamp\n564    AIXZKN4ACSKI  1881509818     5.0  1390694400\n565  A1L5P841VIO02V  1881509818     5.0  1328140800",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>564</th>\n      <td>AIXZKN4ACSKI</td>\n      <td>1881509818</td>\n      <td>5.0</td>\n      <td>1390694400</td>\n    </tr>\n    <tr>\n      <th>565</th>\n      <td>A1L5P841VIO02V</td>\n      <td>1881509818</td>\n      <td>5.0</td>\n      <td>1328140800</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "filter_by_k_core(df)\n",
    "print(f'k-core shape: {df.shape}')\n",
    "print(f'shape after k-core: {df.shape}')\n",
    "df[:2]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## Re-index"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [],
   "source": [
    "df.reset_index(drop=True, inplace=True)"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "mapping dumped...\n"
     ]
    }
   ],
   "source": [
    "\n",
    "i_mapping_file = 'i_id_mapping.csv'\n",
    "u_mapping_file = 'u_id_mapping.csv'\n",
    "\n",
    "splitting = [0.8, 0.1, 0.1]\n",
    "uid_field, iid_field = learner_id, course_id\n",
    "\n",
    "uni_users = pd.unique(df[uid_field])\n",
    "uni_items = pd.unique(df[iid_field])\n",
    "\n",
    "# start from 0\n",
    "u_id_map = {k: i for i, k in enumerate(uni_users)}\n",
    "i_id_map = {k: i for i, k in enumerate(uni_items)}\n",
    "\n",
    "df[uid_field] = df[uid_field].map(u_id_map)\n",
    "df[iid_field] = df[iid_field].map(i_id_map)\n",
    "df[uid_field] = df[uid_field].astype(int)\n",
    "df[iid_field] = df[iid_field].astype(int)\n",
    "\n",
    "# dump\n",
    "rslt_dir = './'\n",
    "u_df = pd.DataFrame(list(u_id_map.items()), columns=['user_id', 'userID'])\n",
    "i_df = pd.DataFrame(list(i_id_map.items()), columns=['asin', 'itemID'])\n",
    "\n",
    "u_df.to_csv(os.path.join(rslt_dir, u_mapping_file), sep='\\t', index=False)\n",
    "i_df.to_csv(os.path.join(rslt_dir, i_mapping_file), sep='\\t', index=False)\n",
    "print(f'mapping dumped...')"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [
    "\n",
    "# =========2. splitting\n",
    "print(f'splitting ...')\n",
    "tot_ratio = sum(splitting)\n",
    "# remove 0.0 in ratios\n",
    "ratios = [i for i in splitting if i > .0]\n",
    "ratios = [_ / tot_ratio for _ in ratios]\n",
    "split_ratios = np.cumsum(ratios)[:-1]\n",
    "\n",
    "#df[tmstmp_str] = df[tmstmp_str].map(lambda x: datetime.strptime(x, \"%Y-%m-%dT%H:%M:%SZ\"))\n",
    "split_ratios"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": true
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "columns: Index(['userID', 'itemID', 'rating', 'timestamp', 'x_label'], dtype='object')\n"
     ]
    },
    {
     "data": {
      "text/plain": "   userID  itemID  rating   timestamp  x_label\n1       1       0     5.0  1328140800        0\n2       2       0     4.0  1330387200        0\n3       3       0     4.0  1328400000        0\n4       4       0     4.0  1366675200        0\n5       5       0     5.0  1351814400        0",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n      <th>x_label</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>1</th>\n      <td>1</td>\n      <td>0</td>\n      <td>5.0</td>\n      <td>1328140800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>2</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1330387200</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>3</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1328400000</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>4</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1366675200</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>5</th>\n      <td>5</td>\n      <td>0</td>\n      <td>5.0</td>\n      <td>1351814400</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "ts_id = 'timestamp'\n",
    "\n",
    "split_timestamps = list(np.quantile(df[ts_id], split_ratios))\n",
    "# get df training dataset unique users/items\n",
    "df_train = df.loc[df[ts_id] < split_timestamps[0]].copy()\n",
    "df_val = df.loc[(split_timestamps[0] <= df[ts_id]) & (df[ts_id] < split_timestamps[1])].copy()\n",
    "df_test = df.loc[(split_timestamps[1] <= df[ts_id])].copy()\n",
    "\n",
    "x_label, rslt_file = 'x_label', 'sports14-indexed.inter'\n",
    "df_train[x_label] = 0\n",
    "df_val[x_label] = 1\n",
    "df_test[x_label] = 2\n",
    "temp_df = pd.concat([df_train, df_val, df_test])\n",
    "temp_df = temp_df[[learner_id, course_id, 'rating', ts_id, x_label]]\n",
    "print(f'columns: {temp_df.columns}')\n",
    "\n",
    "temp_df.columns = [learner_id, course_id, 'rating', ts_id, x_label]\n",
    "\n",
    "temp_df.to_csv(os.path.join(rslt_dir, rslt_file), sep='\\t', index=False)\n",
    "temp_df[:5]\n",
    "#print('done!')"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## Reload"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "shape: (296337, 5)\n"
     ]
    },
    {
     "data": {
      "text/plain": "   userID  itemID  rating   timestamp  x_label\n0       1       0     5.0  1328140800        0\n1       2       0     4.0  1330387200        0\n2       3       0     4.0  1328400000        0\n3       4       0     4.0  1366675200        0",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n      <th>x_label</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>1</td>\n      <td>0</td>\n      <td>5.0</td>\n      <td>1328140800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>2</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1330387200</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>3</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1328400000</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>4</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1366675200</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "indexed_df = pd.read_csv(rslt_file, sep='\\t')\n",
    "print(f'shape: {indexed_df.shape}')\n",
    "indexed_df[:4]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "# of unique learners: 35598\n",
      "# of unique courses: 18357\n",
      "min/max of unique learners: 0/35597\n",
      "min/max of unique courses: 0/18356\n"
     ]
    }
   ],
   "source": [
    "u_uni = indexed_df[learner_id].unique()\n",
    "c_uni = indexed_df[course_id].unique()\n",
    "\n",
    "print(f'# of unique learners: {len(u_uni)}')\n",
    "print(f'# of unique courses: {len(c_uni)}')\n",
    "\n",
    "print('min/max of unique learners: {0}/{1}'.format(min(u_uni), max(u_uni)))\n",
    "print('min/max of unique courses: {0}/{1}'.format(min(c_uni), max(c_uni)))\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}

================================================
FILE: preprocessing/1splitting.ipynb
================================================
{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "# 基于rating2inter.ipynb生成的5-core交互图,Train/Validation/Test data splitting\n",
    "- Based on generated interactions, perform data splitting\n"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import os, csv\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [
    "os.chdir('/home/enoche/MMRec/Sports14')\n",
    "os.getcwd()"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": true
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## 直接加载现成的, Load interactions"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "shape: (296337, 5)\n"
     ]
    },
    {
     "data": {
      "text/plain": "   userID  itemID  rating   timestamp  x_label\n0       1       0     5.0  1328140800        0\n1       2       0     4.0  1330387200        0\n2       3       0     4.0  1328400000        0\n3       4       0     4.0  1366675200        0",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n      <th>x_label</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>1</td>\n      <td>0</td>\n      <td>5.0</td>\n      <td>1328140800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>2</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1330387200</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>3</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1328400000</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>4</td>\n      <td>0</td>\n      <td>4.0</td>\n      <td>1366675200</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "rslt_file = 'sports14-indexed.inter'\n",
    "df = pd.read_csv(rslt_file, sep='\\t')\n",
    "print(f'shape: {df.shape}')\n",
    "df[:4]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "outputs": [],
   "source": [
    "import random\n",
    "import numpy as np"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "outputs": [
    {
     "data": {
      "text/plain": "        userID  itemID  rating   timestamp  x_label\n154667       0   11981     2.0  1390694400        1\n295557       0   15852     5.0  1390694400        1\n189316       0   17787     3.0  1391990400        2\n151302       0       0     5.0  1390694400        1\n1820         0    3369     5.0  1405123200        2\n60040        0   13372     5.0  1391990400        2\n199192       0    5458     5.0  1405123200        2\n163234       0    3327     3.0  1391990400        2\n60837        1    2322     5.0  1337212800        0\n233786       1    4123     5.0  1354838400        0\n163460       1   14212     5.0  1368230400        0\n206628       1    1542     4.0  1302220800        0\n261633       1    8802     4.0  1368230400        0\n99658        1    9198     5.0  1318377600        0\n268935       1    7215     5.0  1285372800        0\n77956        1   13468     5.0  1328140800        0\n105444       1    2374     5.0  1391558400        1\n237889       1    7169     5.0  1302220800        0\n173295       1    6677     5.0  1318377600        0\n50074        1   15278     5.0  1344902400        0",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n      <th>x_label</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>154667</th>\n      <td>0</td>\n      <td>11981</td>\n      <td>2.0</td>\n      <td>1390694400</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>295557</th>\n      <td>0</td>\n      <td>15852</td>\n      <td>5.0</td>\n      <td>1390694400</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>189316</th>\n      <td>0</td>\n      <td>17787</td>\n      <td>3.0</td>\n      <td>1391990400</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>151302</th>\n      <td>0</td>\n      <td>0</td>\n      <td>5.0</td>\n      <td>1390694400</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>1820</th>\n      <td>0</td>\n      <td>3369</td>\n      <td>5.0</td>\n      <td>1405123200</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>60040</th>\n      <td>0</td>\n      <td>13372</td>\n      <td>5.0</td>\n      <td>1391990400</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>199192</th>\n      <td>0</td>\n      <td>5458</td>\n      <td>5.0</td>\n      <td>1405123200</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>163234</th>\n      <td>0</td>\n      <td>3327</td>\n      <td>3.0</td>\n      <td>1391990400</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>60837</th>\n      <td>1</td>\n      <td>2322</td>\n      <td>5.0</td>\n      <td>1337212800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>233786</th>\n      <td>1</td>\n      <td>4123</td>\n      <td>5.0</td>\n      <td>1354838400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>163460</th>\n      <td>1</td>\n      <td>14212</td>\n      <td>5.0</td>\n      <td>1368230400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>206628</th>\n      <td>1</td>\n      <td>1542</td>\n      <td>4.0</td>\n      <td>1302220800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>261633</th>\n      <td>1</td>\n      <td>8802</td>\n      <td>4.0</td>\n      <td>1368230400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>99658</th>\n      <td>1</td>\n      <td>9198</td>\n      <td>5.0</td>\n      <td>1318377600</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>268935</th>\n      <td>1</td>\n      <td>7215</td>\n      <td>5.0</td>\n      <td>1285372800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>77956</th>\n      <td>1</td>\n      <td>13468</td>\n      <td>5.0</td>\n      <td>1328140800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>105444</th>\n      <td>1</td>\n      <td>2374</td>\n      <td>5.0</td>\n      <td>1391558400</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>237889</th>\n      <td>1</td>\n      <td>7169</td>\n      <td>5.0</td>\n      <td>1302220800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>173295</th>\n      <td>1</td>\n      <td>6677</td>\n      <td>5.0</td>\n      <td>1318377600</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>50074</th>\n      <td>1</td>\n      <td>15278</td>\n      <td>5.0</td>\n      <td>1344902400</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "\n",
    "df = df.sample(frac=1).reset_index(drop=True)\n",
    "\n",
    "df.sort_values(by=['userID'], inplace=True)\n",
    "df[:20]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [
    "uid_field, iid_field = 'userID', 'itemID'\n",
    "\n",
    "uid_freq = df.groupby(uid_field)[iid_field]\n",
    "u_i_dict = {}\n",
    "for u, u_ls in uid_freq:\n",
    "    u_i_dict[u] = list(u_ls)\n",
    "u_i_dict"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": true
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "outputs": [
    {
     "data": {
      "text/plain": "[0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 1,\n 2,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 1,\n 2,\n 2,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 1,\n 1,\n 2,\n 2,\n 0,\n 0,\n 0,\n 1,\n 2,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 1,\n 2,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0,\n 0]"
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "new_label = []\n",
    "u_ids_sorted = sorted(u_i_dict.keys())\n",
    "\n",
    "for u in u_ids_sorted:\n",
    "    items = u_i_dict[u]\n",
    "    n_items = len(items)\n",
    "    if n_items < 10:\n",
    "        tmp_ls = [0] * (n_items - 2) + [1] + [2]\n",
    "    else:\n",
    "        val_test_len = int(n_items * 0.2)\n",
    "        train_len = n_items - val_test_len\n",
    "        val_len = val_test_len // 2\n",
    "        test_len = val_test_len - val_len\n",
    "        tmp_ls = [0] * train_len + [1] * val_len + [2] * test_len\n",
    "    new_label.extend(tmp_ls)\n",
    "\n",
    "new_label[:100]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [
    {
     "data": {
      "text/plain": "        userID  itemID  rating   timestamp  x_label\n154667       0   11981     2.0  1390694400        0\n295557       0   15852     5.0  1390694400        0\n189316       0   17787     3.0  1391990400        0\n151302       0       0     5.0  1390694400        0\n1820         0    3369     5.0  1405123200        0\n60040        0   13372     5.0  1391990400        0\n199192       0    5458     5.0  1405123200        1\n163234       0    3327     3.0  1391990400        2\n60837        1    2322     5.0  1337212800        0\n233786       1    4123     5.0  1354838400        0\n163460       1   14212     5.0  1368230400        0\n206628       1    1542     4.0  1302220800        0\n261633       1    8802     4.0  1368230400        0\n99658        1    9198     5.0  1318377600        0\n268935       1    7215     5.0  1285372800        0\n77956        1   13468     5.0  1328140800        0\n105444       1    2374     5.0  1391558400        0\n237889       1    7169     5.0  1302220800        0\n173295       1    6677     5.0  1318377600        0\n50074        1   15278     5.0  1344902400        0",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n      <th>x_label</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>154667</th>\n      <td>0</td>\n      <td>11981</td>\n      <td>2.0</td>\n      <td>1390694400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>295557</th>\n      <td>0</td>\n      <td>15852</td>\n      <td>5.0</td>\n      <td>1390694400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>189316</th>\n      <td>0</td>\n      <td>17787</td>\n      <td>3.0</td>\n      <td>1391990400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>151302</th>\n      <td>0</td>\n      <td>0</td>\n      <td>5.0</td>\n      <td>1390694400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>1820</th>\n      <td>0</td>\n      <td>3369</td>\n      <td>5.0</td>\n      <td>1405123200</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>60040</th>\n      <td>0</td>\n      <td>13372</td>\n      <td>5.0</td>\n      <td>1391990400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>199192</th>\n      <td>0</td>\n      <td>5458</td>\n      <td>5.0</td>\n      <td>1405123200</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>163234</th>\n      <td>0</td>\n      <td>3327</td>\n      <td>3.0</td>\n      <td>1391990400</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>60837</th>\n      <td>1</td>\n      <td>2322</td>\n      <td>5.0</td>\n      <td>1337212800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>233786</th>\n      <td>1</td>\n      <td>4123</td>\n      <td>5.0</td>\n      <td>1354838400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>163460</th>\n      <td>1</td>\n      <td>14212</td>\n      <td>5.0</td>\n      <td>1368230400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>206628</th>\n      <td>1</td>\n      <td>1542</td>\n      <td>4.0</td>\n      <td>1302220800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>261633</th>\n      <td>1</td>\n      <td>8802</td>\n      <td>4.0</td>\n      <td>1368230400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>99658</th>\n      <td>1</td>\n      <td>9198</td>\n      <td>5.0</td>\n      <td>1318377600</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>268935</th>\n      <td>1</td>\n      <td>7215</td>\n      <td>5.0</td>\n      <td>1285372800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>77956</th>\n      <td>1</td>\n      <td>13468</td>\n      <td>5.0</td>\n      <td>1328140800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>105444</th>\n      <td>1</td>\n      <td>2374</td>\n      <td>5.0</td>\n      <td>1391558400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>237889</th>\n      <td>1</td>\n      <td>7169</td>\n      <td>5.0</td>\n      <td>1302220800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>173295</th>\n      <td>1</td>\n      <td>6677</td>\n      <td>5.0</td>\n      <td>1318377600</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>50074</th>\n      <td>1</td>\n      <td>15278</td>\n      <td>5.0</td>\n      <td>1344902400</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df['x_label'] = new_label\n",
    "df[:20]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "outputs": [
    {
     "data": {
      "text/plain": "'beauty14-indexed'"
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "rslt_file[:-6]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "done!!!\n"
     ]
    }
   ],
   "source": [
    "new_labeled_file = rslt_file[:-6] + '-v4.inter'\n",
    "df.to_csv(os.path.join('./', new_labeled_file), sep='\\t', index=False)\n",
    "print('done!!!')"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## Reload"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%% md\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "shape: (296337, 5)\n"
     ]
    },
    {
     "data": {
      "text/plain": "    userID  itemID  rating   timestamp  x_label\n0        0   11981     2.0  1390694400        0\n1        0   15852     5.0  1390694400        0\n2        0   17787     3.0  1391990400        0\n3        0       0     5.0  1390694400        0\n4        0    3369     5.0  1405123200        0\n5        0   13372     5.0  1391990400        0\n6        0    5458     5.0  1405123200        1\n7        0    3327     3.0  1391990400        2\n8        1    2322     5.0  1337212800        0\n9        1    4123     5.0  1354838400        0\n10       1   14212     5.0  1368230400        0\n11       1    1542     4.0  1302220800        0\n12       1    8802     4.0  1368230400        0\n13       1    9198     5.0  1318377600        0\n14       1    7215     5.0  1285372800        0\n15       1   13468     5.0  1328140800        0\n16       1    2374     5.0  1391558400        0\n17       1    7169     5.0  1302220800        0\n18       1    6677     5.0  1318377600        0\n19       1   15278     5.0  1344902400        0",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>userID</th>\n      <th>itemID</th>\n      <th>rating</th>\n      <th>timestamp</th>\n      <th>x_label</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>0</td>\n      <td>11981</td>\n      <td>2.0</td>\n      <td>1390694400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>0</td>\n      <td>15852</td>\n      <td>5.0</td>\n      <td>1390694400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>0</td>\n      <td>17787</td>\n      <td>3.0</td>\n      <td>1391990400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>0</td>\n      <td>0</td>\n      <td>5.0</td>\n      <td>1390694400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>0</td>\n      <td>3369</td>\n      <td>5.0</td>\n      <td>1405123200</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>5</th>\n      <td>0</td>\n      <td>13372</td>\n      <td>5.0</td>\n      <td>1391990400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>6</th>\n      <td>0</td>\n      <td>5458</td>\n      <td>5.0</td>\n      <td>1405123200</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>7</th>\n      <td>0</td>\n      <td>3327</td>\n      <td>3.0</td>\n      <td>1391990400</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>8</th>\n      <td>1</td>\n      <td>2322</td>\n      <td>5.0</td>\n      <td>1337212800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>9</th>\n      <td>1</td>\n      <td>4123</td>\n      <td>5.0</td>\n      <td>1354838400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>10</th>\n      <td>1</td>\n      <td>14212</td>\n      <td>5.0</td>\n      <td>1368230400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>11</th>\n      <td>1</td>\n      <td>1542</td>\n      <td>4.0</td>\n      <td>1302220800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>12</th>\n      <td>1</td>\n      <td>8802</td>\n      <td>4.0</td>\n      <td>1368230400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>13</th>\n      <td>1</td>\n      <td>9198</td>\n      <td>5.0</td>\n      <td>1318377600</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>14</th>\n      <td>1</td>\n      <td>7215</td>\n      <td>5.0</td>\n      <td>1285372800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>15</th>\n      <td>1</td>\n      <td>13468</td>\n      <td>5.0</td>\n      <td>1328140800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>16</th>\n      <td>1</td>\n      <td>2374</td>\n      <td>5.0</td>\n      <td>1391558400</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>17</th>\n      <td>1</td>\n      <td>7169</td>\n      <td>5.0</td>\n      <td>1302220800</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>18</th>\n      <td>1</td>\n      <td>6677</td>\n      <td>5.0</td>\n      <td>1318377600</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>19</th>\n      <td>1</td>\n      <td>15278</td>\n      <td>5.0</td>\n      <td>1344902400</td>\n      <td>0</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "indexed_df = pd.read_csv(new_labeled_file, sep='\\t')\n",
    "print(f'shape: {indexed_df.shape}')\n",
    "indexed_df[:20]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "# of unique learners: 35598\n",
      "# of unique courses: 18357\n",
      "min/max of unique learners: 0/35597\n",
      "min/max of unique courses: 0/18356\n"
     ]
    }
   ],
   "source": [
    "u_id_str, i_id_str = 'userID', 'itemID'\n",
    "u_uni = indexed_df[u_id_str].unique()\n",
    "c_uni = indexed_df[i_id_str].unique()\n",
    "\n",
    "print(f'# of unique learners: {len(u_uni)}')\n",
    "print(f'# of unique courses: {len(c_uni)}')\n",
    "\n",
    "print('min/max of unique learners: {0}/{1}'.format(min(u_uni), max(u_uni)))\n",
    "print('min/max of unique courses: {0}/{1}'.format(min(c_uni), max(c_uni)))\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}

================================================
FILE: preprocessing/2reindex-feat.ipynb
================================================
{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "# 利用rating2inter.ipynb中U/I的index对features进行一一对应(meta-text)\n",
    "- Reindex item feature ID with IDs generated in 0rating2inter.ipynb"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import os\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "outputs": [
    {
     "data": {
      "text/plain": "'/home/xin/XMMRec/Sports14'"
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "os.chdir('/home/xin/MMRec/Sports14')\n",
    "os.getcwd()"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "shape: (18357, 2)\n"
     ]
    },
    {
     "data": {
      "text/plain": "         asin  itemID\n0  1881509818       0\n1  2094869245       1\n2  7245456259       2\n3  7245456313       3",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>asin</th>\n      <th>itemID</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>1881509818</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>2094869245</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>7245456259</td>\n      <td>2</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>7245456313</td>\n      <td>3</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# load item mapping\n",
    "i_id_mapping = 'i_id_mapping.csv'\n",
    "df = pd.read_csv(i_id_mapping, sep='\\t')\n",
    "print(f'shape: {df.shape}')\n",
    "df[:4]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 Extracting U-I interactions.\n",
      "Total records: (532197, 9)\n"
     ]
    },
    {
     "data": {
      "text/plain": "         asin                           title  price  \\\n0  0000032069  Adult Ballet Tutu Cheetah Pink   7.89   \n1  0000031909     Girls Ballet Tutu Neon Pink   7.00   \n2  0000032034        Adult Ballet Tutu Yellow   7.87   \n\n                                               imUrl  \\\n0  http://ecx.images-amazon.com/images/I/51EzU6qu...   \n1  http://ecx.images-amazon.com/images/I/41xBoP0F...   \n2  http://ecx.images-amazon.com/images/I/21GNUNIa...   \n\n                                             related     brand  \\\n0  {'also_bought': ['0000032050', 'B00D0DJAEG', '...  BubuBibi   \n1  {'also_bought': ['B002BZX8Z6', 'B00JHONN1S', '...   Unknown   \n2  {'also_bought': ['B00D2JSRFQ', '0000032042', '...  BubuBibi   \n\n                                          categories  \\\n0  [[Sports & Outdoors, Other Sports, Dance, Clot...   \n1         [[Sports & Outdoors, Other Sports, Dance]]   \n2  [[Sports & Outdoors, Other Sports, Dance, Clot...   \n\n                  salesRank                                        description  \n0                       NaN                                                NaN  \n1  {'Toys & Games': 201847}  High quality 3 layer ballet tutu. 12 inches in...  \n2                       NaN                                                NaN  ",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>asin</th>\n      <th>title</th>\n      <th>price</th>\n      <th>imUrl</th>\n      <th>related</th>\n      <th>brand</th>\n      <th>categories</th>\n      <th>salesRank</th>\n      <th>description</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>0000032069</td>\n      <td>Adult Ballet Tutu Cheetah Pink</td>\n      <td>7.89</td>\n      <td>http://ecx.images-amazon.com/images/I/51EzU6qu...</td>\n      <td>{'also_bought': ['0000032050', 'B00D0DJAEG', '...</td>\n      <td>BubuBibi</td>\n      <td>[[Sports &amp; Outdoors, Other Sports, Dance, Clot...</td>\n      <td>NaN</td>\n      <td>NaN</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>0000031909</td>\n      <td>Girls Ballet Tutu Neon Pink</td>\n      <td>7.00</td>\n      <td>http://ecx.images-amazon.com/images/I/41xBoP0F...</td>\n      <td>{'also_bought': ['B002BZX8Z6', 'B00JHONN1S', '...</td>\n      <td>Unknown</td>\n      <td>[[Sports &amp; Outdoors, Other Sports, Dance]]</td>\n      <td>{'Toys &amp; Games': 201847}</td>\n      <td>High quality 3 layer ballet tutu. 12 inches in...</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>0000032034</td>\n      <td>Adult Ballet Tutu Yellow</td>\n      <td>7.87</td>\n      <td>http://ecx.images-amazon.com/images/I/21GNUNIa...</td>\n      <td>{'also_bought': ['B00D2JSRFQ', '0000032042', '...</td>\n      <td>BubuBibi</td>\n      <td>[[Sports &amp; Outdoors, Other Sports, Dance, Clot...</td>\n      <td>NaN</td>\n      <td>NaN</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "\n",
    "import gzip, json\n",
    "meta_file = 'meta_Sports_and_Outdoors.json.gz'\n",
    "\n",
    "print('0 Extracting U-I interactions.')\n",
    "\n",
    "def parse(path):\n",
    "  g = gzip.open(path, 'rb')\n",
    "  for l in g:\n",
    "    yield eval(l)\n",
    "\n",
    "def getDF(path):\n",
    "  i = 0\n",
    "  df = {}\n",
    "  for d in parse(path):\n",
    "    df[i] = d\n",
    "    i += 1\n",
    "  return pd.DataFrame.from_dict(df, orient='index')\n",
    "\n",
    "meta_df = getDF(meta_file)\n",
    "\n",
    "print(f'Total records: {meta_df.shape}')\n",
    "meta_df[:3]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "shape: (18357, 10)\n"
     ]
    },
    {
     "data": {
      "text/plain": "           asin                                              title  price  \\\n132  1881509818           Ghost Inc Glock Armorers Tool 3/32 Punch   9.99   \n155  2094869245  5 LED Bicycle Rear Tail Red Bike Torch Laser B...   8.26   \n\n                                                 imUrl  \\\n132  http://ecx.images-amazon.com/images/I/21iMxsyD...   \n155  http://ecx.images-amazon.com/images/I/51RtwnJw...   \n\n                                               related  brand  \\\n132  {'also_bought': ['B000U3YWEM', 'B000U401J6', '...  Ghost   \n155  {'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...          \n\n                                            categories  \\\n132  [[Sports & Outdoors, Hunting & Fishing, Huntin...   \n155  [[Sports & Outdoors, Cycling, Lights & Reflect...   \n\n                             salesRank  \\\n132  {'Sports &amp; Outdoors': 172909}   \n155   {'Sports &amp; Outdoors': 14293}   \n\n                                           description  itemID  \n132  Ghost Armorer Tool (1). The GAT is made with a...       0  \n155  This newly-designed Laser tail light can emit ...       1  ",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>asin</th>\n      <th>title</th>\n      <th>price</th>\n      <th>imUrl</th>\n      <th>related</th>\n      <th>brand</th>\n      <th>categories</th>\n      <th>salesRank</th>\n      <th>description</th>\n      <th>itemID</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>132</th>\n      <td>1881509818</td>\n      <td>Ghost Inc Glock Armorers Tool 3/32 Punch</td>\n      <td>9.99</td>\n      <td>http://ecx.images-amazon.com/images/I/21iMxsyD...</td>\n      <td>{'also_bought': ['B000U3YWEM', 'B000U401J6', '...</td>\n      <td>Ghost</td>\n      <td>[[Sports &amp; Outdoors, Hunting &amp; Fishing, Huntin...</td>\n      <td>{'Sports &amp;amp; Outdoors': 172909}</td>\n      <td>Ghost Armorer Tool (1). The GAT is made with a...</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>155</th>\n      <td>2094869245</td>\n      <td>5 LED Bicycle Rear Tail Red Bike Torch Laser B...</td>\n      <td>8.26</td>\n      <td>http://ecx.images-amazon.com/images/I/51RtwnJw...</td>\n      <td>{'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...</td>\n      <td></td>\n      <td>[[Sports &amp; Outdoors, Cycling, Lights &amp; Reflect...</td>\n      <td>{'Sports &amp;amp; Outdoors': 14293}</td>\n      <td>This newly-designed Laser tail light can emit ...</td>\n      <td>1</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# remapping\n",
    "map_dict = dict(zip(df['asin'], df['itemID']))\n",
    "\n",
    "meta_df['itemID'] = meta_df['asin'].map(map_dict)\n",
    "meta_df.dropna(subset=['itemID'], inplace=True)\n",
    "meta_df['itemID'] = meta_df['itemID'].astype('int64')\n",
    "#meta_df['description'] = meta_df['description'].fillna(\" \")\n",
    "meta_df.sort_values(by=['itemID'], inplace=True)\n",
    "\n",
    "print(f'shape: {meta_df.shape}')\n",
    "meta_df[:2]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "new column names: ['itemID', 'asin', 'title', 'price', 'imUrl', 'related', 'brand', 'categories', 'salesRank', 'description']\n"
     ]
    }
   ],
   "source": [
    "ori_cols = meta_df.columns.tolist()\n",
    "\n",
    "ret_cols = [ori_cols[-1]] + ori_cols[:-1]\n",
    "print(f'new column names: {ret_cols}')"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [
    {
     "data": {
      "text/plain": "           asin                                              title  price  \\\n132  1881509818           Ghost Inc Glock Armorers Tool 3/32 Punch   9.99   \n155  2094869245  5 LED Bicycle Rear Tail Red Bike Torch Laser B...   8.26   \n201  7245456259  Black Mountain Products Single Resistance Band...  10.49   \n\n                                                 imUrl  \\\n132  http://ecx.images-amazon.com/images/I/21iMxsyD...   \n155  http://ecx.images-amazon.com/images/I/51RtwnJw...   \n201  http://ecx.images-amazon.com/images/I/411Ikpf1...   \n\n                                               related           brand  \\\n132  {'also_bought': ['B000U3YWEM', 'B000U401J6', '...           Ghost   \n155  {'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...                   \n201  {'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...  Black Mountain   \n\n                                            categories  \\\n132  [[Sports & Outdoors, Hunting & Fishing, Huntin...   \n155  [[Sports & Outdoors, Cycling, Lights & Reflect...   \n201  [[Sports & Outdoors, Exercise & Fitness, Acces...   \n\n                             salesRank  \\\n132  {'Sports &amp; Outdoors': 172909}   \n155   {'Sports &amp; Outdoors': 14293}   \n201    {'Sports &amp; Outdoors': 1010}   \n\n                                           description  itemID  \n132  Ghost Armorer Tool (1). The GAT is made with a...       0  \n155  This newly-designed Laser tail light can emit ...       1  \n201  Black Mountain Products single resistance band...       2  ",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>asin</th>\n      <th>title</th>\n      <th>price</th>\n      <th>imUrl</th>\n      <th>related</th>\n      <th>brand</th>\n      <th>categories</th>\n      <th>salesRank</th>\n      <th>description</th>\n      <th>itemID</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>132</th>\n      <td>1881509818</td>\n      <td>Ghost Inc Glock Armorers Tool 3/32 Punch</td>\n      <td>9.99</td>\n      <td>http://ecx.images-amazon.com/images/I/21iMxsyD...</td>\n      <td>{'also_bought': ['B000U3YWEM', 'B000U401J6', '...</td>\n      <td>Ghost</td>\n      <td>[[Sports &amp; Outdoors, Hunting &amp; Fishing, Huntin...</td>\n      <td>{'Sports &amp;amp; Outdoors': 172909}</td>\n      <td>Ghost Armorer Tool (1). The GAT is made with a...</td>\n      <td>0</td>\n    </tr>\n    <tr>\n      <th>155</th>\n      <td>2094869245</td>\n      <td>5 LED Bicycle Rear Tail Red Bike Torch Laser B...</td>\n      <td>8.26</td>\n      <td>http://ecx.images-amazon.com/images/I/51RtwnJw...</td>\n      <td>{'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...</td>\n      <td></td>\n      <td>[[Sports &amp; Outdoors, Cycling, Lights &amp; Reflect...</td>\n      <td>{'Sports &amp;amp; Outdoors': 14293}</td>\n      <td>This newly-designed Laser tail light can emit ...</td>\n      <td>1</td>\n    </tr>\n    <tr>\n      <th>201</th>\n      <td>7245456259</td>\n      <td>Black Mountain Products Single Resistance Band...</td>\n      <td>10.49</td>\n      <td>http://ecx.images-amazon.com/images/I/411Ikpf1...</td>\n      <td>{'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...</td>\n      <td>Black Mountain</td>\n      <td>[[Sports &amp; Outdoors, Exercise &amp; Fitness, Acces...</td>\n      <td>{'Sports &amp;amp; Outdoors': 1010}</td>\n      <td>Black Mountain Products single resistance band...</td>\n      <td>2</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "meta_df[:3]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "done!\n"
     ]
    }
   ],
   "source": [
    "ret_df = meta_df[ret_cols]\n",
    "# dump\n",
    "ret_df.to_csv(os.path.join('./', 'meta-sports14.csv'), index=False)\n",
    "print('done!')"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## Reload"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "shape: (18357, 10)\n"
     ]
    },
    {
     "data": {
      "text/plain": "   itemID        asin                                              title  \\\n0       0  1881509818           Ghost Inc Glock Armorers Tool 3/32 Punch   \n1       1  2094869245  5 LED Bicycle Rear Tail Red Bike Torch Laser B...   \n2       2  7245456259  Black Mountain Products Single Resistance Band...   \n3       3  7245456313  Black Mountain Products Resistance Band Set wi...   \n\n   price                                              imUrl  \\\n0   9.99  http://ecx.images-amazon.com/images/I/21iMxsyD...   \n1   8.26  http://ecx.images-amazon.com/images/I/51RtwnJw...   \n2  10.49  http://ecx.images-amazon.com/images/I/411Ikpf1...   \n3  32.99  http://ecx.images-amazon.com/images/I/51FdHlZS...   \n\n                                             related           brand  \\\n0  {'also_bought': ['B000U3YWEM', 'B000U401J6', '...           Ghost   \n1  {'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...             NaN   \n2  {'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...  Black Mountain   \n3  {'also_bought': ['1612431712', 'B00GSBMW2Y', '...  Black Mountain   \n\n                                          categories  \\\n0  [['Sports & Outdoors', 'Hunting & Fishing', 'H...   \n1  [['Sports & Outdoors', 'Cycling', 'Lights & Re...   \n2  [['Sports & Outdoors', 'Exercise & Fitness', '...   \n3  [['Sports & Outdoors', 'Exercise & Fitness', '...   \n\n                           salesRank  \\\n0  {'Sports &amp; Outdoors': 172909}   \n1   {'Sports &amp; Outdoors': 14293}   \n2    {'Sports &amp; Outdoors': 1010}   \n3      {'Sports &amp; Outdoors': 15}   \n\n                                         description  \n0  Ghost Armorer Tool (1). The GAT is made with a...  \n1  This newly-designed Laser tail light can emit ...  \n2  Black Mountain Products single resistance band...  \n3  [if gte mso 9]><xml> <o:OfficeDocumentSettings...  ",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>itemID</th>\n      <th>asin</th>\n      <th>title</th>\n      <th>price</th>\n      <th>imUrl</th>\n      <th>related</th>\n      <th>brand</th>\n      <th>categories</th>\n      <th>salesRank</th>\n      <th>description</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>0</td>\n      <td>1881509818</td>\n      <td>Ghost Inc Glock Armorers Tool 3/32 Punch</td>\n      <td>9.99</td>\n      <td>http://ecx.images-amazon.com/images/I/21iMxsyD...</td>\n      <td>{'also_bought': ['B000U3YWEM', 'B000U401J6', '...</td>\n      <td>Ghost</td>\n      <td>[['Sports &amp; Outdoors', 'Hunting &amp; Fishing', 'H...</td>\n      <td>{'Sports &amp;amp; Outdoors': 172909}</td>\n      <td>Ghost Armorer Tool (1). The GAT is made with a...</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>1</td>\n      <td>2094869245</td>\n      <td>5 LED Bicycle Rear Tail Red Bike Torch Laser B...</td>\n      <td>8.26</td>\n      <td>http://ecx.images-amazon.com/images/I/51RtwnJw...</td>\n      <td>{'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...</td>\n      <td>NaN</td>\n      <td>[['Sports &amp; Outdoors', 'Cycling', 'Lights &amp; Re...</td>\n      <td>{'Sports &amp;amp; Outdoors': 14293}</td>\n      <td>This newly-designed Laser tail light can emit ...</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>2</td>\n      <td>7245456259</td>\n      <td>Black Mountain Products Single Resistance Band...</td>\n      <td>10.49</td>\n      <td>http://ecx.images-amazon.com/images/I/411Ikpf1...</td>\n      <td>{'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...</td>\n      <td>Black Mountain</td>\n      <td>[['Sports &amp; Outdoors', 'Exercise &amp; Fitness', '...</td>\n      <td>{'Sports &amp;amp; Outdoors': 1010}</td>\n      <td>Black Mountain Products single resistance band...</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>3</td>\n      <td>7245456313</td>\n      <td>Black Mountain Products Resistance Band Set wi...</td>\n      <td>32.99</td>\n      <td>http://ecx.images-amazon.com/images/I/51FdHlZS...</td>\n      <td>{'also_bought': ['1612431712', 'B00GSBMW2Y', '...</td>\n      <td>Black Mountain</td>\n      <td>[['Sports &amp; Outdoors', 'Exercise &amp; Fitness', '...</td>\n      <td>{'Sports &amp;amp; Outdoors': 15}</td>\n      <td>[if gte mso 9]&gt;&lt;xml&gt; &lt;o:OfficeDocumentSettings...</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "indexed_df = pd.read_csv('meta-sports14.csv')\n",
    "print(f'shape: {indexed_df.shape}')\n",
    "indexed_df[:4]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "# of unique items: 18357\n",
      "min/max of unique learners: 0/18356\n"
     ]
    }
   ],
   "source": [
    "## Reload\n",
    "\n",
    "i_uni = indexed_df['itemID'].unique()\n",
    "\n",
    "print(f'# of unique items: {len(i_uni)}')\n",
    "\n",
    "print('min/max of unique learners: {0}/{1}'.format(min(i_uni), max(i_uni)))"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}

================================================
FILE: preprocessing/3feat-encoder.ipynb
================================================
{
 "cells": [
  {
   "cell_type": "markdown",
   "source": [
    "# Sports14 Text/Image Feature Extraction"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "outputs": [],
   "source": [
    "\n",
    "import os\n",
    "import numpy as np\n",
    "import pandas as pd"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [
    "os.chdir('/home/xin/MMRec/Sports14')\n",
    "os.getcwd()"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n",
     "is_executing": true
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "## Load text data"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "data loaded!\n",
      "shape: (18357, 10)\n"
     ]
    },
    {
     "data": {
      "text/plain": "   itemID        asin                                              title  \\\n0       0  1881509818           Ghost Inc Glock Armorers Tool 3/32 Punch   \n1       1  2094869245  5 LED Bicycle Rear Tail Red Bike Torch Laser B...   \n2       2  7245456259  Black Mountain Products Single Resistance Band...   \n\n   price                                              imUrl  \\\n0   9.99  http://ecx.images-amazon.com/images/I/21iMxsyD...   \n1   8.26  http://ecx.images-amazon.com/images/I/51RtwnJw...   \n2  10.49  http://ecx.images-amazon.com/images/I/411Ikpf1...   \n\n                                             related           brand  \\\n0  {'also_bought': ['B000U3YWEM', 'B000U401J6', '...           Ghost   \n1  {'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...             NaN   \n2  {'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...  Black Mountain   \n\n                                          categories  \\\n0  [['Sports & Outdoors', 'Hunting & Fishing', 'H...   \n1  [['Sports & Outdoors', 'Cycling', 'Lights & Re...   \n2  [['Sports & Outdoors', 'Exercise & Fitness', '...   \n\n                           salesRank  \\\n0  {'Sports &amp; Outdoors': 172909}   \n1   {'Sports &amp; Outdoors': 14293}   \n2    {'Sports &amp; Outdoors': 1010}   \n\n                                         description  \n0  Ghost Armorer Tool (1). The GAT is made with a...  \n1  This newly-designed Laser tail light can emit ...  \n2  Black Mountain Products single resistance band...  ",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>itemID</th>\n      <th>asin</th>\n      <th>title</th>\n      <th>price</th>\n      <th>imUrl</th>\n      <th>related</th>\n      <th>brand</th>\n      <th>categories</th>\n      <th>salesRank</th>\n      <th>description</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>0</td>\n      <td>1881509818</td>\n      <td>Ghost Inc Glock Armorers Tool 3/32 Punch</td>\n      <td>9.99</td>\n      <td>http://ecx.images-amazon.com/images/I/21iMxsyD...</td>\n      <td>{'also_bought': ['B000U3YWEM', 'B000U401J6', '...</td>\n      <td>Ghost</td>\n      <td>[['Sports &amp; Outdoors', 'Hunting &amp; Fishing', 'H...</td>\n      <td>{'Sports &amp;amp; Outdoors': 172909}</td>\n      <td>Ghost Armorer Tool (1). The GAT is made with a...</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>1</td>\n      <td>2094869245</td>\n      <td>5 LED Bicycle Rear Tail Red Bike Torch Laser B...</td>\n      <td>8.26</td>\n      <td>http://ecx.images-amazon.com/images/I/51RtwnJw...</td>\n      <td>{'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...</td>\n      <td>NaN</td>\n      <td>[['Sports &amp; Outdoors', 'Cycling', 'Lights &amp; Re...</td>\n      <td>{'Sports &amp;amp; Outdoors': 14293}</td>\n      <td>This newly-designed Laser tail light can emit ...</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>2</td>\n      <td>7245456259</td>\n      <td>Black Mountain Products Single Resistance Band...</td>\n      <td>10.49</td>\n      <td>http://ecx.images-amazon.com/images/I/411Ikpf1...</td>\n      <td>{'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...</td>\n      <td>Black Mountain</td>\n      <td>[['Sports &amp; Outdoors', 'Exercise &amp; Fitness', '...</td>\n      <td>{'Sports &amp;amp; Outdoors': 1010}</td>\n      <td>Black Mountain Products single resistance band...</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "i_id, desc_str = 'itemID', 'description'\n",
    "\n",
    "file_path = './'\n",
    "file_name = 'meta-sports14.csv'\n",
    "\n",
    "meta_file = os.path.join(file_path, file_name)\n",
    "\n",
    "df = pd.read_csv(meta_file)\n",
    "df.sort_values(by=[i_id], inplace=True)\n",
    "\n",
    "print('data loaded!')\n",
    "print(f'shape: {df.shape}')\n",
    "\n",
    "df[:3]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(91, 10)\n",
      "(2659, 10)\n",
      "(40, 10)\n",
      "(40, 10)\n",
      "(0, 10)\n"
     ]
    }
   ],
   "source": [
    "\n",
    "# sentences: title + brand + category + description | All have title + description\n",
    "\n",
    "title_na_df = df[df['title'].isnull()]\n",
    "print(title_na_df.shape)\n",
    "\n",
    "desc_na_df = df[df['description'].isnull()]\n",
    "print(desc_na_df.shape)\n",
    "\n",
    "na_df = df[df['description'].isnull() & df['title'].isnull()]\n",
    "print(na_df.shape)\n",
    "\n",
    "na3_df = df[df['description'].isnull() & df['title'].isnull() & df['brand'].isnull()]\n",
    "print(na3_df.shape)\n",
    "\n",
    "na4_df = df[df['description'].isnull() & df['title'].isnull() & df['brand'].isnull() & df['categories'].isnull()]\n",
    "print(na4_df.shape)"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "outputs": [],
   "source": [
    "\n",
    "df[desc_str] = df[desc_str].fillna(\" \")\n",
    "df['title'] = df['title'].fillna(\" \")\n",
    "df['brand'] = df['brand'].fillna(\" \")\n",
    "df['categories'] = df['categories'].fillna(\" \")\n"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "outputs": [
    {
     "data": {
      "text/plain": "['Ghost Inc Glock Armorers Tool 3/32 Punch Ghost Sports & Outdoors Hunting & Fishing Hunting Gun Maintenance Gunsmithing Tools Ghost Armorer Tool (1). The GAT is made with a spring steel punch. The diameter is 3/32 of an inch or 2.5mm, this is the same as the OEM tool size. The difference is you will be able to press harder without bending the shaft of this punch. Just a better tool to work on your Glock with.',\n '5 LED Bicycle Rear Tail Red Bike Torch Laser Beam Lamp Light   Sports & Outdoors Cycling Lights & Reflectors Taillights This newly-designed Laser tail light can emit two parallel lines, to form a virtual lane together with the moving of bicycle on the road. LED flash light and  two lines not only enhance the waring effect strongly and greatly but also improve the safety of night riding.',\n 'Black Mountain Products Single Resistance Band - Door Anchor and Starter Guide Included Black Mountain Sports & Outdoors Exercise & Fitness Accessories Exercise Bands Black Mountain Products single resistance bands are made out of the highest quality rubber to ensure maximum life and are 99 percent latex free! These bands are ideal for physical therapy, exercise, weight loss, Pilates, muscle toning, muscle strengthening, stretching, rehabilitation, and general health and fitness. B.M.P. resistance bands are great for home use, gym use, offices, and are ideal for travel. B.M.P. single resistance bands are a great alternative to conventional weights and exercise equipment. All Black Mountain Products Resistance bands come with a manufactures warranty.',\n 'Black Mountain Products Resistance Band Set with Door Anchor, Ankle Strap, Exercise Chart, and Resistance Band Carrying Case Black Mountain Sports & Outdoors Exercise & Fitness Accessories Exercise Bands [if gte mso 9]><xml> <o:OfficeDocumentSettings> <o:AllowPNG  /> </o:OfficeDocumentSettings> </xml><![endif][if gte mso 9]><xml> <w:WordDocument> <w:View>Normal</w:View> <w:Zoom>0</w:Zoom> <w:TrackMoves  /> <w:TrackFormatting  /> <w:PunctuationKerning  /> <w:ValidateAgainstSchemas  /> <w:SaveIfXMLInvalid>false</w:SaveIfXMLInvalid> <w:IgnoreMixedContent>false</w:IgnoreMixedContent> <w:AlwaysShowPlaceholderText>false</w:AlwaysShowPlaceholderText> <w:DoNotPromoteQF  /> <w:LidThemeOther>EN-US</w:LidThemeOther> <w:LidThemeAsian>X-NONE</w:LidThemeAsian> <w:LidThemeComplexScript>X-NONE</w:LidThemeComplexScript> <w:Compatibility> <w:BreakWrappedTables  /> <w:SnapToGridInCell  /> <w:WrapTextWithPunct  /> <w:UseAsianBreakRules  /> <w:DontGrowAutofit  /> <w:SplitPgBreakAndParaMark  /> <w:EnableOpenTypeKerning  /> <w:DontFlipMirrorIndents  /> <w:OverrideTableStyleHps  /> </w:Compatibility> <m:mathPr> <m:mathFont m:val=\"Cambria Math\"  /> <m:brkBin m:val=\"before\"  /> <m:brkBinSub m:val=\"&#45;-\"  /> <m:smallFrac m:val=\"off\"  /> <m:dispDef  /> <m:lMargin m:val=\"0\"  /> <m:rMargin m:val=\"0\"  /> <m:defJc m:val=\"centerGroup\"  /> <m:wrapIndent m:val=\"1440\"  /> <m:intLim m:val=\"subSup\"  /> <m:naryLim m:val=\"undOvr\"  /> </m:mathPr></w:WordDocument> </xml><![endif][if gte mso 9]><xml> <w:LatentStyles DefLockedState=\"false\" DefUnhideWhenUsed=\"true\"   DefSemiHidden=\"true\" DefQFormat=\"false\" DefPriority=\"99\"   LatentStyleCount=\"267\"> <w:LsdException Locked=\"false\" Priority=\"0\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Normal\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"heading 1\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 2\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 3\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 4\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 5\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 6\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 7\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 8\"  /> <w:LsdException Locked=\"false\" Priority=\"9\" QFormat=\"true\" Name=\"heading 9\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 1\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 2\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 3\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 4\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 5\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 6\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 7\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 8\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" Name=\"toc 9\"  /> <w:LsdException Locked=\"false\" Priority=\"35\" QFormat=\"true\" Name=\"caption\"  /> <w:LsdException Locked=\"false\" Priority=\"10\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Title\"  /> <w:LsdException Locked=\"false\" Priority=\"1\" Name=\"Default Paragraph Font\"  /> <w:LsdException Locked=\"false\" Priority=\"11\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Subtitle\"  /> <w:LsdException Locked=\"false\" Priority=\"22\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Strong\"  /> <w:LsdException Locked=\"false\" Priority=\"20\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Emphasis\"  /> <w:LsdException Locked=\"false\" Priority=\"59\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Table Grid\"  /> <w:LsdException Locked=\"false\" UnhideWhenUsed=\"false\" Name=\"Placeholder Text\"  /> <w:LsdException Locked=\"false\" Priority=\"1\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"No Spacing\"  /> <w:LsdException Locked=\"false\" Priority=\"60\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Shading\"  /> <w:LsdException Locked=\"false\" Priority=\"61\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light List\"  /> <w:LsdException Locked=\"false\" Priority=\"62\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Grid\"  /> <w:LsdException Locked=\"false\" Priority=\"63\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 1\"  /> <w:LsdException Locked=\"false\" Priority=\"64\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 2\"  /> <w:LsdException Locked=\"false\" Priority=\"65\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 1\"  /> <w:LsdException Locked=\"false\" Priority=\"66\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 2\"  /> <w:LsdException Locked=\"false\" Priority=\"67\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 1\"  /> <w:LsdException Locked=\"false\" Priority=\"68\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 2\"  /> <w:LsdException Locked=\"false\" Priority=\"69\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 3\"  /> <w:LsdException Locked=\"false\" Priority=\"70\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Dark List\"  /> <w:LsdException Locked=\"false\" Priority=\"71\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Shading\"  /> <w:LsdException Locked=\"false\" Priority=\"72\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful List\"  /> <w:LsdException Locked=\"false\" Priority=\"73\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Grid\"  /> <w:LsdException Locked=\"false\" Priority=\"60\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Shading Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"61\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light List Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"62\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Grid Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"63\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 1 Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"64\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 2 Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"65\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 1 Accent 1\"  /> <w:LsdException Locked=\"false\" UnhideWhenUsed=\"false\" Name=\"Revision\"  /> <w:LsdException Locked=\"false\" Priority=\"34\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"List Paragraph\"  /> <w:LsdException Locked=\"false\" Priority=\"29\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Quote\"  /> <w:LsdException Locked=\"false\" Priority=\"30\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Intense Quote\"  /> <w:LsdException Locked=\"false\" Priority=\"66\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 2 Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"67\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 1 Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"68\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 2 Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"69\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 3 Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"70\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Dark List Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"71\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Shading Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"72\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful List Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"73\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Grid Accent 1\"  /> <w:LsdException Locked=\"false\" Priority=\"60\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Shading Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"61\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light List Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"62\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Grid Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"63\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 1 Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"64\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 2 Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"65\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 1 Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"66\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 2 Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"67\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 1 Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"68\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 2 Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"69\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 3 Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"70\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Dark List Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"71\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Shading Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"72\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful List Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"73\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Grid Accent 2\"  /> <w:LsdException Locked=\"false\" Priority=\"60\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Shading Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"61\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light List Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"62\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Grid Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"63\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 1 Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"64\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 2 Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"65\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 1 Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"66\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 2 Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"67\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 1 Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"68\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 2 Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"69\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 3 Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"70\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Dark List Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"71\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Shading Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"72\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful List Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"73\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Grid Accent 3\"  /> <w:LsdException Locked=\"false\" Priority=\"60\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Shading Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"61\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light List Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"62\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Grid Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"63\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 1 Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"64\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 2 Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"65\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 1 Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"66\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 2 Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"67\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 1 Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"68\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 2 Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"69\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 3 Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"70\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Dark List Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"71\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Shading Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"72\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful List Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"73\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Grid Accent 4\"  /> <w:LsdException Locked=\"false\" Priority=\"60\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Shading Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"61\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light List Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"62\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Grid Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"63\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 1 Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"64\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 2 Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"65\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 1 Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"66\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 2 Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"67\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 1 Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"68\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 2 Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"69\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 3 Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"70\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Dark List Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"71\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Shading Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"72\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful List Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"73\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Grid Accent 5\"  /> <w:LsdException Locked=\"false\" Priority=\"60\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Shading Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"61\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light List Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"62\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Light Grid Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"63\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 1 Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"64\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Shading 2 Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"65\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 1 Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"66\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium List 2 Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"67\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 1 Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"68\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 2 Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"69\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Medium Grid 3 Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"70\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Dark List Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"71\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Shading Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"72\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful List Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"73\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" Name=\"Colorful Grid Accent 6\"  /> <w:LsdException Locked=\"false\" Priority=\"19\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Subtle Emphasis\"  /> <w:LsdException Locked=\"false\" Priority=\"21\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Intense Emphasis\"  /> <w:LsdException Locked=\"false\" Priority=\"31\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Subtle Reference\"  /> <w:LsdException Locked=\"false\" Priority=\"32\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Intense Reference\"  /> <w:LsdException Locked=\"false\" Priority=\"33\" SemiHidden=\"false\"    UnhideWhenUsed=\"false\" QFormat=\"true\" Name=\"Book Title\"  /> <w:LsdException Locked=\"false\" Priority=\"37\" Name=\"Bibliography\"  /> <w:LsdException Locked=\"false\" Priority=\"39\" QFormat=\"true\" Name=\"TOC Heading\"  /> </w:LatentStyles> </xml><![endif][if gte mso 10]> <style>  /* Style Definitions */  table.MsoNormalTable {mso-style-name:\"Table Normal\"; mso-tstyle-rowband-size:0; mso-tstyle-colband-size:0; mso-style-noshow:yes; mso-style-priority:99; mso-style-parent:\"\"; mso-padding-alt:0in 5.4pt 0in 5.4pt; mso-para-margin-top:0in; mso-para-margin-right:0in; mso-para-margin-bottom:10.0pt; mso-para-margin-left:0in; line-height:115%; mso-pagination:widow-orphan; font-size:11.0pt; font-family:\"Calibri\",\"sans-serif\"; mso-ascii-font-family:Calibri; mso-ascii-theme-font:minor-latin; mso-hansi-font-family:Calibri; mso-hansi-theme-font:minor-latin; mso-bidi-font-family:\"Times New Roman\"; mso-bidi-theme-font:minor-bidi;} </style> <![endif]Resistance bands, also known as exercise bands, fitness tubes, and resistance cords, are a great way to add variety to your strength training routine. Black Mountain Products resistance bands are made from high quality, natural latex material to ensure maximum life. Each resistance band set comes with a soft cushioned foam handles for comfort during exercise. Resistance bands are an excellent alternative to free weights, workout machines, and traditional body weight exercises. Training with Black Mountain Products resistance bands may assist with rehabilitative exercises, fat loss, building muscle and are ideal for travel.What\\'s in the box?This set includes:Cushioned foam handlesDoor anchorCarrying bagAnkle strapStarter guideFive bands of varying resistance:Yellow = 2-to-4 poundsBlue = 4-to-6 poundsGreen = 10-to-12 poundsBlack = 15-to-20 poundsRed = 25-to-30 poundsBenefits of Working with Resistance BandsIn addition to being bulky and immobile, free weights are often limited in the number of exercises you can perform. Resistance bands, on the other hand, offer a surprisingly varied number of training exercises by simply changing your bodily position in order to change the tension on your muscles. Bands take up little space, are mobile, and easy to use on any fitness level.WarrantyThis Black Mountain Products item includes a limited 90-day manufacturer\\'s warranty against defects in materials and workmanship.About Black Mountain ProductsBlack Mountain Products manufactures and distributes high-quality home exercise equipment, with a complete line of resistance bands and doorway chin-up bars designed to deliver results in the comfort and convenience of your home, particularly when paired with home exercise programs such as P90X.',\n 'Outers Universal 32-Piece Blow Molded Gun Cleaning Kit Outers Sports & Outdoors Hunting & Fishing Hunting Gun Maintenance Gun Cleaning Kits Outers now offers this rigid and durable hard case to stow and organize an assortment of gun care products to clean shotguns, rifles, or handguns, quickly and effectively.  Their blow molded design has a specific compartment for each piece within that kit.  Plus, the kit has additional compartments to hold bottles of gun-cleaning chemicals or any other gear you want to keep nearby.',\n 'Power Hooks (Pair)   Sports & Outdoors Exercise & Fitness Power Hooks (Pair) at Power Systems, Inc.',\n \"Pacific Play Tents Playchute 10' Parachute (Colors and Designs May Vary) PACIFIC PLAY TENTS Sports & Outdoors Other Sports Gymnastics Accessories Parachutes The parachute gambit is a teacher's secret weapon--a tried-and-true way to annihilate the afternoon blahs. At approximately 9 feet in diameter, with handles all the way around for little (or big) hands to grab, this sunny Playchute Parachute is just smaller than the classroom standard. Fling it into the air and watch it drift down. Run underneath it and watch the colors rain down. Throw a few stuffed animals in the center and bounce them around. Simple, satisfying thrills, and worth every penny.--Claire Dederer\",\n 'Find Me 6ft Tunnel PACIFIC PLAY TENTS Sports & Outdoors Leisure Sports & Game Room Trampolines & Accessories Trampolines Find Me 6ft Tunnel. The Pacific Play Tents Find Me Tunnel is perfect for developing cooperative play and enhancing muscle development! This innovative design incorporates a new padded interior system for safer more comfortable playtime. Your little one will enjoy countless hours traveling from one destination to another! Wipes clean with a damp cloth. Features sturdy, hardened spring steel construction. Tunnel collapses flat for easy storage. For children ages 3 years and older. Materials: Polyester Dacron 600 x 300 weave with 600mm PU coating, Steel. Dimensions: 72\"L x 19\"W x 19\"H; 4 lbs',\n 'Club Champ Super Sized Electic Putt N\\' Hazzard Putting Mat Club Champ Sports & Outdoors Golf Training Equipment Putting Mats Non-directional turf simulates real grass; regulation-size cup  is \"protected\" by sand trap and water hazard; realistic cup and golf hazards  return ball electronicallyLarge 9-by-16-inch putting surfaceUse at home, office, clubhouse, and partiesNo batteries required for operation --',\n 'Victorinox Swiss Army SwissTool with Pouch Victorinox Sports & Outdoors Outdoor Gear Camping & Hiking Knives & Tools Folding Knives From the renowned company that created the Swiss Army knife a century ago comes this folding tool kit--neatly contained within a single implement that measures just 4-1/2 inches long and 1-1/3 inches thick, weighs just 10 ounces, and comes in a polyester/nylon belt pouch. Its rugged stainless-steel construction and ease of use make this tool kit is a tribute to the precision of Swiss engineering. Tools open individually (no clumping), lock in place, and fold with a push button. The SwissTool carries a lifetime warranty against defects. The tools include pliers; 2-, 3-, 5-, and 7-1/2-mm screwdrivers; Phillips head screwdriver; wire cutter; bottle opener; large knife blade; serrated blade; metal file; metal saw; wood saw; reamer/punch; chisel/scraper; crate opener; wire bender; wire stripper; wire scraper; can opener; 9-inch ruler; 230-cm ruler; electrical crimper; and lanyard hole. --Fred Brack']"
     },
     "execution_count": 6,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sentences = []\n",
    "for i, row in df.iterrows():\n",
    "    sen = row['title'] + ' ' + row['brand'] + ' '\n",
    "    cates = eval(row['categories'])\n",
    "    if isinstance(cates, list):\n",
    "        for c in cates[0]:\n",
    "            sen = sen + c + ' '\n",
    "    sen += row[desc_str]\n",
    "    sen = sen.replace('\\n', ' ')\n",
    "\n",
    "    sentences.append(sen)\n",
    "\n",
    "sentences[:10]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "outputs": [],
   "source": [
    "\n",
    "course_list = df[i_id].tolist()\n",
    "#sentences = df[desc_str].tolist()\n",
    "\n",
    "assert course_list[-1] == len(course_list) - 1"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "collapsed": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "text encoded!\n",
      "done!\n"
     ]
    }
   ],
   "source": [
    "# should `pip install sentence_transformers` first\n",
    "from sentence_transformers import SentenceTransformer\n",
    "\n",
    "model = SentenceTransformer('all-MiniLM-L6-v2')\n",
    "\n",
    "sentence_embeddings = model.encode(sentences)\n",
    "print('text encoded!')\n",
    "\n",
    "assert sentence_embeddings.shape[0] == df.shape[0]\n",
    "np.save(os.path.join(file_path, 'text_feat.npy'), sentence_embeddings)\n",
    "print('done!')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "outputs": [
    {
     "data": {
      "text/plain": "array([[-0.12623426,  0.03341388, -0.01948772, ..., -0.1013338 ,\n         0.0514545 ,  0.07334712],\n       [ 0.0068029 ,  0.00055715, -0.03157376, ...,  0.03421347,\n         0.02450724,  0.03113373],\n       [-0.12395922,  0.05546276, -0.00272348, ..., -0.19819073,\n         0.04171506,  0.05105354],\n       ...,\n       [-0.06516663,  0.04306812, -0.00357155, ...,  0.02348825,\n        -0.02514204,  0.06650119],\n       [ 0.05071206,  0.03823141, -0.04340539, ...,  0.00951272,\n         0.05093095,  0.03292951],\n       [-0.13305898,  0.07934257, -0.01714416, ..., -0.11284354,\n        -0.00523037,  0.03694083]], dtype=float32)"
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sentence_embeddings[:10]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(18357, 384)\n"
     ]
    },
    {
     "data": {
      "text/plain": "array([[-0.12623426,  0.03341388, -0.01948772, ..., -0.1013338 ,\n         0.0514545 ,  0.07334712],\n       [ 0.0068029 ,  0.00055715, -0.03157376, ...,  0.03421347,\n         0.02450724,  0.03113373],\n       [-0.12395922,  0.05546276, -0.00272348, ..., -0.19819073,\n         0.04171506,  0.05105354],\n       ...,\n       [-0.06516663,  0.04306812, -0.00357155, ...,  0.02348825,\n        -0.02514204,  0.06650119],\n       [ 0.05071206,  0.03823141, -0.04340539, ...,  0.00951272,\n         0.05093095,  0.03292951],\n       [-0.13305898,  0.07934257, -0.01714416, ..., -0.11284354,\n        -0.00523037,  0.03694083]], dtype=float32)"
     },
     "execution_count": 10,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "load_txt_feat = np.load('text_feat.npy', allow_pickle=True)\n",
    "print(load_txt_feat.shape)\n",
    "load_txt_feat[:10]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "markdown",
   "source": [],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "markdown",
   "source": [
    "# Image encoder (V0),following LATTICE, averaging over for missed items"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "outputs": [
    {
     "data": {
      "text/plain": "   itemID        asin                                              title  \\\n0       0  1881509818           Ghost Inc Glock Armorers Tool 3/32 Punch   \n1       1  2094869245  5 LED Bicycle Rear Tail Red Bike Torch Laser B...   \n2       2  7245456259  Black Mountain Products Single Resistance Band...   \n3       3  7245456313  Black Mountain Products Resistance Band Set wi...   \n4       4  B000002NUS  Outers Universal 32-Piece Blow Molded Gun Clea...   \n\n   price                                              imUrl  \\\n0   9.99  http://ecx.images-amazon.com/images/I/21iMxsyD...   \n1   8.26  http://ecx.images-amazon.com/images/I/51RtwnJw...   \n2  10.49  http://ecx.images-amazon.com/images/I/411Ikpf1...   \n3  32.99  http://ecx.images-amazon.com/images/I/51FdHlZS...   \n4  21.99  http://ecx.images-amazon.com/images/I/510GjWgd...   \n\n                                             related           brand  \\\n0  {'also_bought': ['B000U3YWEM', 'B000U401J6', '...           Ghost   \n1  {'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...                   \n2  {'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...  Black Mountain   \n3  {'also_bought': ['1612431712', 'B00GSBMW2Y', '...  Black Mountain   \n4  {'also_bought': ['B000PW64JY', 'B0010KHNEU', '...          Outers   \n\n                                          categories  \\\n0  [['Sports & Outdoors', 'Hunting & Fishing', 'H...   \n1  [['Sports & Outdoors', 'Cycling', 'Lights & Re...   \n2  [['Sports & Outdoors', 'Exercise & Fitness', '...   \n3  [['Sports & Outdoors', 'Exercise & Fitness', '...   \n4  [['Sports & Outdoors', 'Hunting & Fishing', 'H...   \n\n                           salesRank  \\\n0  {'Sports &amp; Outdoors': 172909}   \n1   {'Sports &amp; Outdoors': 14293}   \n2    {'Sports &amp; Outdoors': 1010}   \n3      {'Sports &amp; Outdoors': 15}   \n4   {'Sports &amp; Outdoors': 26738}   \n\n                                         description  \n0  Ghost Armorer Tool (1). The GAT is made with a...  \n1  This newly-designed Laser tail light can emit ...  \n2  Black Mountain Products single resistance band...  \n3  [if gte mso 9]><xml> <o:OfficeDocumentSettings...  \n4  Outers now offers this rigid and durable hard ...  ",
      "text/html": "<div>\n<style scoped>\n    .dataframe tbody tr th:only-of-type {\n        vertical-align: middle;\n    }\n\n    .dataframe tbody tr th {\n        vertical-align: top;\n    }\n\n    .dataframe thead th {\n        text-align: right;\n    }\n</style>\n<table border=\"1\" class=\"dataframe\">\n  <thead>\n    <tr style=\"text-align: right;\">\n      <th></th>\n      <th>itemID</th>\n      <th>asin</th>\n      <th>title</th>\n      <th>price</th>\n      <th>imUrl</th>\n      <th>related</th>\n      <th>brand</th>\n      <th>categories</th>\n      <th>salesRank</th>\n      <th>description</th>\n    </tr>\n  </thead>\n  <tbody>\n    <tr>\n      <th>0</th>\n      <td>0</td>\n      <td>1881509818</td>\n      <td>Ghost Inc Glock Armorers Tool 3/32 Punch</td>\n      <td>9.99</td>\n      <td>http://ecx.images-amazon.com/images/I/21iMxsyD...</td>\n      <td>{'also_bought': ['B000U3YWEM', 'B000U401J6', '...</td>\n      <td>Ghost</td>\n      <td>[['Sports &amp; Outdoors', 'Hunting &amp; Fishing', 'H...</td>\n      <td>{'Sports &amp;amp; Outdoors': 172909}</td>\n      <td>Ghost Armorer Tool (1). The GAT is made with a...</td>\n    </tr>\n    <tr>\n      <th>1</th>\n      <td>1</td>\n      <td>2094869245</td>\n      <td>5 LED Bicycle Rear Tail Red Bike Torch Laser B...</td>\n      <td>8.26</td>\n      <td>http://ecx.images-amazon.com/images/I/51RtwnJw...</td>\n      <td>{'also_bought': ['B0081O93N2', 'B00EYTCHJA', '...</td>\n      <td></td>\n      <td>[['Sports &amp; Outdoors', 'Cycling', 'Lights &amp; Re...</td>\n      <td>{'Sports &amp;amp; Outdoors': 14293}</td>\n      <td>This newly-designed Laser tail light can emit ...</td>\n    </tr>\n    <tr>\n      <th>2</th>\n      <td>2</td>\n      <td>7245456259</td>\n      <td>Black Mountain Products Single Resistance Band...</td>\n      <td>10.49</td>\n      <td>http://ecx.images-amazon.com/images/I/411Ikpf1...</td>\n      <td>{'also_bought': ['B00DDBS2JE', 'B00H1KNHE8', '...</td>\n      <td>Black Mountain</td>\n      <td>[['Sports &amp; Outdoors', 'Exercise &amp; Fitness', '...</td>\n      <td>{'Sports &amp;amp; Outdoors': 1010}</td>\n      <td>Black Mountain Products single resistance band...</td>\n    </tr>\n    <tr>\n      <th>3</th>\n      <td>3</td>\n      <td>7245456313</td>\n      <td>Black Mountain Products Resistance Band Set wi...</td>\n      <td>32.99</td>\n      <td>http://ecx.images-amazon.com/images/I/51FdHlZS...</td>\n      <td>{'also_bought': ['1612431712', 'B00GSBMW2Y', '...</td>\n      <td>Black Mountain</td>\n      <td>[['Sports &amp; Outdoors', 'Exercise &amp; Fitness', '...</td>\n      <td>{'Sports &amp;amp; Outdoors': 15}</td>\n      <td>[if gte mso 9]&gt;&lt;xml&gt; &lt;o:OfficeDocumentSettings...</td>\n    </tr>\n    <tr>\n      <th>4</th>\n      <td>4</td>\n      <td>B000002NUS</td>\n      <td>Outers Universal 32-Piece Blow Molded Gun Clea...</td>\n      <td>21.99</td>\n      <td>http://ecx.images-amazon.com/images/I/510GjWgd...</td>\n      <td>{'also_bought': ['B000PW64JY', 'B0010KHNEU', '...</td>\n      <td>Outers</td>\n      <td>[['Sports &amp; Outdoors', 'Hunting &amp; Fishing', 'H...</td>\n      <td>{'Sports &amp;amp; Outdoors': 26738}</td>\n      <td>Outers now offers this rigid and durable hard ...</td>\n    </tr>\n  </tbody>\n</table>\n</div>"
     },
     "execution_count": 11,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df[:5]"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "outputs": [],
   "source": [
    "import array\n",
    "\n",
    "def readImageFeatures(path):\n",
    "  f = open(path, 'rb')\n",
    "  while True:\n",
    "    asin = f.read(10).decode('UTF-8')\n",
    "    if asin == '': break\n",
    "    a = array.array('f')\n",
    "    a.fromfile(f, 4096)\n",
    "    yield asin, a.tolist()"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "# of items not in processed image features: 180\n",
      "done!\n"
     ]
    }
   ],
   "source": [
    "\n",
    "img_data = readImageFeatures(\"image_features_Sports_and_Outdoors.b\")\n",
    "item2id = dict(zip(df['asin'], df['itemID']))\n",
    "\n",
    "feats = {}\n",
    "avg = []\n",
    "for d in img_data:\n",
    "    if d[0] in item2id:\n",
    "        feats[int(item2id[d[0]])] = d[1]\n",
    "        avg.append(d[1])\n",
    "avg = np.array(avg).mean(0).tolist()\n",
    "\n",
    "ret = []\n",
    "non_no = []\n",
    "for i in range(len(item2id)):\n",
    "    if i in feats:\n",
    "        ret.append(feats[i])\n",
    "    else:\n",
    "        non_no.append(i)\n",
    "        ret.append(avg)\n",
    "\n",
    "print('# of items not in processed image features:', len(non_no))\n",
    "assert len(ret) == len(item2id)\n",
    "np.save('image_feat.npy', np.array(ret))\n",
    "np.savetxt(\"missed_img_itemIDs.csv\", non_no, delimiter =\",\", fmt ='%d')\n",
    "print('done!')"
   ],
   "metadata": {
    "collapsed": false,
    "pycharm": {
     "name": "#%%\n"
    }
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}

================================================
FILE: preprocessing/README.md
================================================
# Preprocessing from raw data 从原始数据处理
- The following preprocessing steps can be quite tedious. Please post issues if you cannot run the scripts.

- datasets: [Amazon](http://jmcauley.ucsd.edu/data/amazon/links.html)  
-- Rating file in `Files/Small subsets for experimentation`  
-- Meta files in `Per-category files`, [metadata], [image features]  

There has been an issue with the dataset site lately, 
as it automatically redirects to an updated version of the dataset. 
Keep pressing `ESC` to stop the redirecting action.

## Step by step
1. Performing 5-core filtering, re-indexing - `run 0rating2inter.ipynb`
2. Train/valid/test data splitting - `run 1spliting.ipynb`
3. Reindexing feature IDs with generated IDs in step 1 - `run 2reindex-feat.ipynb`
4. Encoding text/image features - `run 3feat-encoder.ipynb`
5. Filling your data description file `*.yaml` under `src/configs/dataset` with the generated file names `*.inter`, `*-feat.npy`, etc.
6. Specifying your evaluated dataset by cmd: `python -d sports -m BM3`.


## DualGNN requires additional operation to generate the u-u graph
1. Run `dualgnn-gen-u-u-matrix.py` on a dataset `baby`:  
`python dualgnn-gen-u-u-matrix.py -d baby`
2. The generated u-u graph should be located in the same dir as the dataset.


================================================
FILE: preprocessing/dualgnn-gen-u-u-matrix.py
================================================
# 对应于Preprocess-ml-imdb.py文件


import numpy as np
from collections import defaultdict
from tqdm import tqdm
import torch
import pandas as pd
import os
import yaml
import argparse


def gen_user_matrix(all_edge, no_users):
    edge_dict = defaultdict(set)

    for edge in all_edge:
        user, item = edge
        edge_dict[user].add(item)

    min_user = 0             # 0
    num_user = no_users      # in our case, users/items ids start from 1
    user_graph_matrix = torch.zeros(num_user, num_user)
    key_list = list(edge_dict.keys())
    key_list.sort()
    bar = tqdm(total=len(key_list))
    for head in range(len(key_list)):
        bar.update(1)
        for rear in range(head+1, len(key_list)):
            head_key = key_list[head]
            rear_key = key_list[rear]
            # print(head_key, rear_key)
            item_head = edge_dict[head_key]
            item_rear = edge_dict[rear_key]
            # print(len(user_head.intersection(user_rear)))
            inter_len = len(item_head.intersection(item_rear))
            if inter_len > 0:
                user_graph_matrix[head_key-min_user][rear_key-min_user] = inter_len
                user_graph_matrix[rear_key-min_user][head_key-min_user] = inter_len
    bar.close()

    return user_graph_matrix


if __name__ == 	'__main__':
    parser = argparse.ArgumentParser()
    parser.add_argument('--dataset', '-d', type=str, default='games', help='name of dataset')
    args = parser.parse_args()
    dataset_name = args.dataset
    print(f'Generating u-u matrix for {dataset_name} ...\n')

    config = {}
    os.chdir('../src')
    cur_dir = os.getcwd()
    con_dir = os.path.join(cur_dir, 'configs') # get config dir
    overall_config_file = os.path.join(con_dir, "overall.yaml")
    dataset_config_file = os.path.join(con_dir, "dataset", "{}.yaml".format(dataset_name))
    conf_files = [overall_config_file, dataset_config_file]
    # load configs
    for file in conf_files:
        if os.path.isfile(file):
            with open(file, 'r', encoding='utf-8') as f:
                tmp_d = yaml.safe_load(f)
                config.update(tmp_d)

    dataset_path = os.path.abspath(config['data_path'] + dataset_name)
    print('data path:\t', dataset_path)
    uid_field = config['USER_ID_FIELD']
    iid_field = config['ITEM_ID_FIELD']
    train_df = pd.read_csv(os.path.join(dataset_path, config['inter_file_name']), sep='\t')
    num_user = len(pd.unique(train_df[uid_field]))
    train_df = train_df[train_df['x_label'] == 0].copy()
    train_data = train_df[[uid_field, iid_field]].to_numpy()
    # item_item_pairs =[]
    user_graph_matrix = gen_user_matrix(train_data, num_user)
    #####################################################################generate user-user matrix
    # pdb.set_trace()
    user_graph = user_graph_matrix
    # user_num = torch.zeros(num_user)
    user_num = torch.zeros(num_user)

    user_graph_dict = {}
    item_graph_dict = {}
    edge_list_i = []
    edge_list_j = []

    for i in range(num_user):
        user_num[i] = len(torch.nonzero(user_graph[i]))
        print("this is ", i, "num", user_num[i])

    for i in range(num_user):
        if user_num[i] <= 200:
            user_i = torch.topk(user_graph[i],int(user_num[i]))
            edge_list_i =user_i.indices.numpy().tolist()
            edge_list_j =user_i.values.numpy().tolist()
            edge_list = [edge_list_i, edge_list_j]
            user_graph_dict[i] = edge_list
        else:
            user_i = torch.topk(user_graph[i], 200)
            edge_list_i = user_i.indices.numpy().tolist()
            edge_list_j = user_i.values.numpy().tolist()
            edge_list = [edge_list_i, edge_list_j]
            user_graph_dict[i] = edge_list
    # pdb.set_trace()
    np.save(os.path.join(dataset_path, config['user_graph_dict_file']), user_graph_dict, allow_pickle=True)


================================================
FILE: requirements.txt
================================================
numpy==1.21.5
pandas==1.3.5
python==3.7.11
scipy==1.7.3
torch==1.11.0
pyyaml==6.0


================================================
FILE: src/common/abstract_recommender.py
================================================
# coding: utf-8
# @email  : enoche.chow@gmail.com

import os
import numpy as np
import torch
import torch.nn as nn


class AbstractRecommender(nn.Module):
    r"""Base class for all models
    """
    def pre_epoch_processing(self):
        pass

    def post_epoch_processing(self):
        pass

    def calculate_loss(self, interaction):
        r"""Calculate the training loss for a batch data.

        Args:
            interaction (Interaction): Interaction class of the batch.

        Returns:
            torch.Tensor: Training loss, shape: []
        """
        raise NotImplementedError

    def predict(self, interaction):
        r"""Predict the scores between users and items.

        Args:
            interaction (Interaction): Interaction class of the batch.

        Returns:
            torch.Tensor: Predicted scores for given users and items, shape: [batch_size]
        """
        raise NotImplementedError

    def full_sort_predict(self, interaction):
        r"""full sort prediction function.
        Given users, calculate the scores between users and all candidate items.

        Args:
            interaction (Interaction): Interaction class of the batch.

        Returns:
            torch.Tensor: Predicted scores for given users and all candidate items,
            shape: [n_batch_users * n_candidate_items]
        """
        raise NotImplementedError
    #
    # def __str__(self):
    #     """
    #     Model prints with number of trainable parameters
    #     """
    #     model_parameters = filter(lambda p: p.requires_grad, self.parameters())
    #     params = sum([np.prod(p.size()) for p in model_parameters])
    #     return super().__str__() + '\nTrainable parameters: {}'.format(params)

    def __str__(self):
        """
        Model prints with number of trainable parameters
        """
        model_parameters = self.parameters()
        params = sum([np.prod(p.size()) for p in model_parameters])
        return super().__str__() + '\nTrainable parameters: {}'.format(params)


class GeneralRecommender(AbstractRecommender):
    """This is a abstract general recommender. All the general model should implement this class.
    The base general recommender class provide the basic dataset and parameters information.
    """
    def __init__(self, config, dataloader):
        super(GeneralRecommender, self).__init__()

        # load dataset info
        self.USER_ID = config['USER_ID_FIELD']
        self.ITEM_ID = config['ITEM_ID_FIELD']
        self.NEG_ITEM_ID = config['NEG_PREFIX'] + self.ITEM_ID
        self.n_users = dataloader.dataset.get_user_num()
        self.n_items = dataloader.dataset.get_item_num()

        # load parameters info
        self.batch_size = config['train_batch_size']
        self.device = config['device']

        # load encoded features here
        self.v_feat, self.t_feat = None, None
        if not config['end2end'] and config['is_multimodal_model']:
            dataset_path = os.path.abspath(config['data_path'] + config['dataset'])
            # if file exist?
            v_feat_file_path = os.path.join(dataset_path, config['vision_feature_file'])
            t_feat_file_path = os.path.join(dataset_path, config['text_feature_file'])
            if os.path.isfile(v_feat_file_path):
                self.v_feat = torch.from_numpy(np.load(v_feat_file_path, allow_pickle=True)).type(torch.FloatTensor).to(
                    self.device)
            if os.path.isfile(t_feat_file_path):
                self.t_feat = torch.from_numpy(np.load(t_feat_file_path, allow_pickle=True)).type(torch.FloatTensor).to(
                    self.device)

            assert self.v_feat is not None or self.t_feat is not None, 'Features all NONE'


================================================
FILE: src/common/encoders.py
================================================
import copy
import math

import numpy as np
import torch
import torch.nn as nn
from common.abstract_recommender import GeneralRecommender
import scipy.sparse as sp


class LightGCN_Encoder(GeneralRecommender):
    def __init__(self, config, dataset):
        super(LightGCN_Encoder, self).__init__(config, dataset)
        # load dataset info
        self.interaction_matrix = dataset.inter_matrix(
            form='coo').astype(np.float32)

        self.user_count = self.n_users
        self.item_count = self.n_items
        self.latent_size = config['embedding_size']
        self.n_layers = 3 if config['n_layers'] is None else config['n_layers']
        self.layers = [self.latent_size] * self.n_layers

        self.drop_ratio = 1.0
        self.drop_flag = True

        self.embedding_dict = self._init_model()
        self.sparse_norm_adj = self.get_norm_adj_mat().to(self.device)

    def _init_model(self):
        initializer = nn.init.xavier_uniform_
        embedding_dict = nn.ParameterDict({
            'user_emb': nn.Parameter(initializer(torch.empty(self.user_count, self.latent_size))),
            'item_emb': nn.Parameter(initializer(torch.empty(self.item_count, self.latent_size)))
        })

        return embedding_dict

    def get_norm_adj_mat(self):
        r"""Get the normalized interaction matrix of users and items.

        Construct the square matrix from the training data and normalize it
        using the laplace matrix.

        .. math::
            A_{hat} = D^{-0.5} \times A \times D^{-0.5}

        Returns:
            Sparse tensor of the normalized interaction matrix.
        """
        # build adj matrix
        A = sp.dok_matrix((self.n_users + self.n_items,
                           self.n_users + self.n_items), dtype=np.float32)
        inter_M = self.interaction_matrix
        inter_M_t = self.interaction_matrix.transpose()
        data_dict = dict(zip(zip(inter_M.row, inter_M.col+self.n_users),
                             [1]*inter_M.nnz))
        data_dict.updat
Download .txt
gitextract_7sv64gl1/

├── .gitignore
├── .idea/
│   ├── .gitignore
│   ├── MMRec.iml
│   ├── deployment.xml
│   ├── inspectionProfiles/
│   │   ├── Project_Default.xml
│   │   └── profiles_settings.xml
│   ├── misc.xml
│   ├── modules.xml
│   └── vcs.xml
├── LICENSE
├── README.md
├── data/
│   └── README.md
├── evaluation/
│   └── README.md
├── preprocessing/
│   ├── 0rating2inter.ipynb
│   ├── 1splitting.ipynb
│   ├── 2reindex-feat.ipynb
│   ├── 3feat-encoder.ipynb
│   ├── README.md
│   └── dualgnn-gen-u-u-matrix.py
├── requirements.txt
└── src/
    ├── common/
    │   ├── abstract_recommender.py
    │   ├── encoders.py
    │   ├── init.py
    │   ├── loss.py
    │   └── trainer.py
    ├── configs/
    │   ├── dataset/
    │   │   ├── baby.yaml
    │   │   ├── clothing.yaml
    │   │   ├── elec.yaml
    │   │   ├── microlens.yaml
    │   │   └── sports.yaml
    │   ├── mg.yaml
    │   ├── model/
    │   │   ├── BM3.yaml
    │   │   ├── BPR.yaml
    │   │   ├── DAMRS.yaml
    │   │   ├── DRAGON.yaml
    │   │   ├── DualGNN.yaml
    │   │   ├── FREEDOM.yaml
    │   │   ├── GRCN.yaml
    │   │   ├── ItemKNNCBF.yaml
    │   │   ├── LATTICE.yaml
    │   │   ├── LGMRec.yaml
    │   │   ├── LayerGCN.yaml
    │   │   ├── LightGCN.yaml
    │   │   ├── MGCN.yaml
    │   │   ├── MMGCN.yaml
    │   │   ├── MVGAE.yaml
    │   │   ├── PGL.yaml
    │   │   ├── SELFCFED_LGN.yaml
    │   │   ├── SLMRec.yaml
    │   │   ├── SMORE.yaml
    │   │   └── VBPR.yaml
    │   └── overall.yaml
    ├── main.py
    ├── models/
    │   ├── bm3.py
    │   ├── bpr.py
    │   ├── damrs.py
    │   ├── dragon.py
    │   ├── dualgnn.py
    │   ├── freedom.py
    │   ├── grcn.py
    │   ├── itemknncbf.py
    │   ├── lattice.py
    │   ├── layergcn.py
    │   ├── lgmrec.py
    │   ├── lightgcn.py
    │   ├── mgcn.py
    │   ├── mmgcn.py
    │   ├── mvgae.py
    │   ├── pgl.py
    │   ├── selfcfed_lgn.py
    │   ├── slmrec.py
    │   ├── smore.py
    │   └── vbpr.py
    └── utils/
        ├── configurator.py
        ├── data_utils.py
        ├── dataloader.py
        ├── dataset.py
        ├── logger.py
        ├── metrics.py
        ├── misc.py
        ├── quick_start.py
        ├── topk_evaluator.py
        └── utils.py
Download .txt
SYMBOL INDEX (420 symbols across 36 files)

FILE: preprocessing/dualgnn-gen-u-u-matrix.py
  function gen_user_matrix (line 14) | def gen_user_matrix(all_edge, no_users):

FILE: src/common/abstract_recommender.py
  class AbstractRecommender (line 10) | class AbstractRecommender(nn.Module):
    method pre_epoch_processing (line 13) | def pre_epoch_processing(self):
    method post_epoch_processing (line 16) | def post_epoch_processing(self):
    method calculate_loss (line 19) | def calculate_loss(self, interaction):
    method predict (line 30) | def predict(self, interaction):
    method full_sort_predict (line 41) | def full_sort_predict(self, interaction):
    method __str__ (line 62) | def __str__(self):
  class GeneralRecommender (line 71) | class GeneralRecommender(AbstractRecommender):
    method __init__ (line 75) | def __init__(self, config, dataloader):

FILE: src/common/encoders.py
  class LightGCN_Encoder (line 11) | class LightGCN_Encoder(GeneralRecommender):
    method __init__ (line 12) | def __init__(self, config, dataset):
    method _init_model (line 30) | def _init_model(self):
    method get_norm_adj_mat (line 39) | def get_norm_adj_mat(self):
    method sparse_dropout (line 77) | def sparse_dropout(self, x, rate, noise_shape):
    method forward (line 90) | def forward(self, inputs):
    method get_embedding (line 115) | def get_embedding(self):

FILE: src/common/init.py
  function xavier_normal_initialization (line 8) | def xavier_normal_initialization(module):
  function xavier_uniform_initialization (line 27) | def xavier_uniform_initialization(module):

FILE: src/common/loss.py
  class BPRLoss (line 9) | class BPRLoss(nn.Module):
    method __init__ (line 29) | def __init__(self, gamma=1e-10):
    method forward (line 33) | def forward(self, pos_score, neg_score):
  class EmbLoss (line 38) | class EmbLoss(nn.Module):
    method __init__ (line 42) | def __init__(self, norm=2):
    method forward (line 46) | def forward(self, *embeddings):
  class L2Loss (line 54) | class L2Loss(nn.Module):
    method __init__ (line 55) | def __init__(self):
    method forward (line 58) | def forward(self, *embeddings):

FILE: src/common/trainer.py
  class AbstractTrainer (line 23) | class AbstractTrainer(object):
    method __init__ (line 29) | def __init__(self, config, model):
    method fit (line 33) | def fit(self, train_data):
    method evaluate (line 39) | def evaluate(self, eval_data):
  class Trainer (line 47) | class Trainer(AbstractTrainer):
    method __init__ (line 62) | def __init__(self, config, model, mg=False):
    method _build_optimizer (line 111) | def _build_optimizer(self):
    method _train_epoch (line 130) | def _train_epoch(self, train_data, epoch_idx, loss_func=None):
    method _valid_epoch (line 196) | def _valid_epoch(self, valid_data):
    method _check_nan (line 210) | def _check_nan(self, loss):
    method _generate_train_loss_output (line 215) | def _generate_train_loss_output(self, epoch_idx, s_time, e_time, losses):
    method fit (line 223) | def fit(self, train_data, valid_data=None, test_data=None, saved=False...
    method evaluate (line 293) | def evaluate(self, eval_data, is_test=False, idx=0):
    method plot_train_loss (line 313) | def plot_train_loss(self, show=True, save_path=None):

FILE: src/models/bm3.py
  class BM3 (line 23) | class BM3(GeneralRecommender):
    method __init__ (line 24) | def __init__(self, config, dataset):
    method get_norm_adj_mat (line 58) | def get_norm_adj_mat(self, interaction_matrix):
    method forward (line 84) | def forward(self):
    method calculate_loss (line 97) | def calculate_loss(self, interactions):
    method full_sort_predict (line 149) | def full_sort_predict(self, interaction):

FILE: src/models/bpr.py
  class BPR (line 20) | class BPR(GeneralRecommender):
    method __init__ (line 24) | def __init__(self, config, dataset):
    method get_user_embedding (line 40) | def get_user_embedding(self, user):
    method get_item_embedding (line 51) | def get_item_embedding(self, item):
    method forward (line 62) | def forward(self, dropout=0.0):
    method calculate_loss (line 67) | def calculate_loss(self, interaction):
    method full_sort_predict (line 89) | def full_sort_predict(self, interaction):

FILE: src/models/damrs.py
  class DAMRS (line 14) | class DAMRS(GeneralRecommender):
    method __init__ (line 15) | def __init__(self, config, dataset):
    method get_knn_adj_mat (line 59) | def get_knn_adj_mat(self, v_embeddings, t_embeddings):
    method compute_normalized_laplacian (line 110) | def compute_normalized_laplacian(self, indices, adj_size):
    method get_session_adj (line 119) | def get_session_adj(self):
    method label_prediction (line 141) | def label_prediction(self, emb, aug_emb):
    method generate_pesudo_labels (line 149) | def generate_pesudo_labels(self, prob1, prob2, prob3):
    method neighbor_discrimination (line 157) | def neighbor_discrimination(self, mm_positive, s_positive, emb, aug_em...
    method KL (line 182) | def KL(self, p1, p2):
    method get_norm_adj_mat (line 186) | def get_norm_adj_mat(self):
    method forward (line 212) | def forward(self):
    method calculate_loss (line 242) | def calculate_loss(self, interaction):
    method full_sort_predict (line 295) | def full_sort_predict(self, interaction):
    method get_weight_modal (line 305) | def get_weight_modal(self, users, pos_items, neg_items, user_embedding...
    method bpr_loss (line 338) | def bpr_loss(self, users, pos_items, neg_items, p_weight, n_weight):

FILE: src/models/dragon.py
  class DRAGON (line 20) | class DRAGON(GeneralRecommender):
    method __init__ (line 21) | def __init__(self, config, dataset):
    method get_knn_adj_mat (line 158) | def get_knn_adj_mat(self, mm_embeddings):
    method compute_normalized_laplacian (line 172) | def compute_normalized_laplacian(self, indices, adj_size):
    method pre_epoch_processing (line 181) | def pre_epoch_processing(self):
    method pack_edge_index (line 185) | def pack_edge_index(self, inter_mat):
    method forward (line 191) | def forward(self, interaction):
    method calculate_loss (line 262) | def calculate_loss(self, interaction):
    method full_sort_predict (line 279) | def full_sort_predict(self, interaction):
    method topk_sample (line 287) | def topk_sample(self, k):
  class User_Graph_sample (line 327) | class User_Graph_sample(torch.nn.Module):
    method __init__ (line 328) | def __init__(self, num_user, aggr_mode, dim_latent):
    method forward (line 334) | def forward(self, features, user_graph, user_matrix):
  class GCN (line 344) | class GCN(torch.nn.Module):
    method __init__ (line 345) | def __init__(self, datasets, batch_size, num_user, num_item, dim_id, a...
    method forward (line 375) | def forward(self, edge_index_drop, edge_index, features):
  class Base_gcn (line 386) | class Base_gcn(MessagePassing):
    method __init__ (line 387) | def __init__(self, in_channels, out_channels, normalize=True, bias=Tru...
    method forward (line 393) | def forward(self, x, edge_index, size=None):
    method message (line 402) | def message(self, x_j, edge_index, size):
    method update (line 412) | def update(self, aggr_out):
    method __repr (line 415) | def __repr(self):

FILE: src/models/dualgnn.py
  class DualGNN (line 21) | class DualGNN(GeneralRecommender):
    method __init__ (line 22) | def __init__(self, config, dataset):
    method pre_epoch_processing (line 131) | def pre_epoch_processing(self):
    method pack_edge_index (line 135) | def pack_edge_index(self, inter_mat):
    method forward (line 141) | def forward(self, interaction):
    method calculate_loss (line 182) | def calculate_loss(self, interaction):
    method full_sort_predict (line 199) | def full_sort_predict(self, interaction):
    method topk_sample (line 207) | def topk_sample(self, k):
  class User_Graph_sample (line 252) | class User_Graph_sample(torch.nn.Module):
    method __init__ (line 253) | def __init__(self, num_user, aggr_mode,dim_latent):
    method forward (line 259) | def forward(self, features,user_graph,user_matrix):
  class GCN (line 269) | class GCN(torch.nn.Module):
    method __init__ (line 270) | def __init__(self,datasets, batch_size, num_user, num_item, dim_id, ag...
    method forward (line 304) | def forward(self, edge_index_drop,edge_index,features):
  class Base_gcn (line 318) | class Base_gcn(MessagePassing):
    method __init__ (line 319) | def __init__(self, in_channels, out_channels, normalize=True, bias=Tru...
    method forward (line 325) | def forward(self, x, edge_index, size=None):
    method message (line 334) | def message(self, x_j, edge_index, size):
    method update (line 344) | def update(self, aggr_out):
    method __repr (line 347) | def __repr(self):

FILE: src/models/freedom.py
  class FREEDOM (line 22) | class FREEDOM(GeneralRecommender):
    method __init__ (line 23) | def __init__(self, config, dataset):
    method get_knn_adj_mat (line 79) | def get_knn_adj_mat(self, mm_embeddings):
    method compute_normalized_laplacian (line 93) | def compute_normalized_laplacian(self, indices, adj_size):
    method get_norm_adj_mat (line 102) | def get_norm_adj_mat(self):
    method pre_epoch_processing (line 128) | def pre_epoch_processing(self):
    method _normalize_adj_m (line 145) | def _normalize_adj_m(self, indices, adj_size):
    method get_edge_info (line 156) | def get_edge_info(self):
    method forward (line 164) | def forward(self, adj):
    method bpr_loss (line 180) | def bpr_loss(self, users, pos_items, neg_items):
    method calculate_loss (line 189) | def calculate_loss(self, interaction):
    method full_sort_predict (line 212) | def full_sort_predict(self, interaction):

FILE: src/models/grcn.py
  class SAGEConv (line 26) | class SAGEConv(MessagePassing):
    method __init__ (line 27) | def __init__(self, in_channels, out_channels, normalize=True, bias=Tru...
    method forward (line 32) | def forward(self, x, edge_index, weight_vector, size=None):
    method message (line 36) | def message(self, x_j):
    method update (line 39) | def update(self, aggr_out):
    method __repr__ (line 42) | def __repr__(self):
  class GATConv (line 46) | class GATConv(MessagePassing):
    method __init__ (line 47) | def __init__(self, in_channels, out_channels, self_loops=False):
    method forward (line 53) | def forward(self, x, edge_index, size=None):
    method message (line 61) | def message(self,  x_i, x_j, size_i ,edge_index_i):
    method update (line 75) | def update(self, aggr_out):
  class EGCN (line 80) | class EGCN(torch.nn.Module):
    method __init__ (line 81) | def __init__(self, num_user, num_item, dim_E, aggr_mode, has_act, has_...
    method forward (line 93) | def forward(self, edge_index, weight_vector):
  class CGCN (line 112) | class CGCN(torch.nn.Module):
    method __init__ (line 113) | def __init__(self, features, num_user, num_item, dim_C, aggr_mode, num...
    method forward (line 139) | def forward(self, edge_index):
  class GRCN (line 169) | class GRCN(GeneralRecommender):
    method __init__ (line 170) | def __init__(self,  config, dataset):
    method pack_edge_index (line 217) | def pack_edge_index(self, inter_mat):
    method forward (line 224) | def forward(self):
    method calculate_loss (line 300) | def calculate_loss(self, interaction):
    method full_sort_predict (line 335) | def full_sort_predict(self, interaction):

FILE: src/models/itemknncbf.py
  class ItemKNNCBF (line 25) | class ItemKNNCBF(GeneralRecommender):
    method __init__ (line 26) | def __init__(self, config, dataset):
    method build_item_sim_matrix (line 56) | def build_item_sim_matrix(self, features):
    method build_item_sim_matrix_with_blocks (line 67) | def build_item_sim_matrix_with_blocks(self, features, block_size=1000):
    method calculate_loss (line 103) | def calculate_loss(self, interaction):
    method full_sort_predict (line 107) | def full_sort_predict(self, interaction):

FILE: src/models/lattice.py
  class LATTICE (line 26) | class LATTICE(GeneralRecommender):
    method __init__ (line 27) | def __init__(self, config, dataset):
    method pre_epoch_processing (line 97) | def pre_epoch_processing(self):
    method get_adj_mat (line 100) | def get_adj_mat(self):
    method sparse_mx_to_torch_sparse_tensor (line 124) | def sparse_mx_to_torch_sparse_tensor(self, sparse_mx):
    method forward (line 132) | def forward(self, adj, build_item_graph=False):
    method bpr_loss (line 199) | def bpr_loss(self, users, pos_items, neg_items):
    method calculate_loss (line 213) | def calculate_loss(self, interaction):
    method full_sort_predict (line 229) | def full_sort_predict(self, interaction):

FILE: src/models/layergcn.py
  class LayerGCN (line 15) | class LayerGCN(GeneralRecommender):
    method __init__ (line 16) | def __init__(self, config, dataset):
    method pre_epoch_processing (line 51) | def pre_epoch_processing(self):
    method _normalize_adj_m (line 72) | def _normalize_adj_m(self, indices, adj_size):
    method get_edge_info (line 83) | def get_edge_info(self):
    method get_norm_adj_mat (line 91) | def get_norm_adj_mat(self):
    method get_ego_embeddings (line 117) | def get_ego_embeddings(self):
    method forward (line 125) | def forward(self):
    method bpr_loss (line 140) | def bpr_loss(self, u_embeddings, i_embeddings, user, pos_item, neg_item):
    method emb_loss (line 154) | def emb_loss(self, user, pos_item, neg_item):
    method calculate_loss (line 163) | def calculate_loss(self, interaction):
    method full_sort_predict (line 177) | def full_sort_predict(self, interaction):

FILE: src/models/lgmrec.py
  class LGMRec (line 18) | class LGMRec(GeneralRecommender):
    method __init__ (line 19) | def __init__(self, config, dataset):
    method scipy_matrix_to_sparse_tenser (line 63) | def scipy_matrix_to_sparse_tenser(self, matrix, shape):
    method get_norm_adj_mat (line 70) | def get_norm_adj_mat(self):
    method cge (line 89) | def cge(self):
    method mge (line 103) | def mge(self, str='v'):
    method forward (line 115) | def forward(self):
    method bpr_loss (line 153) | def bpr_loss(self, users, pos_items, neg_items):
    method ssl_triple_loss (line 159) | def ssl_triple_loss(self, emb1, emb2, all_emb):
    method reg_loss (line 168) | def reg_loss(self, *embs):
    method calculate_loss (line 175) | def calculate_loss(self, interaction):
    method full_sort_predict (line 196) | def full_sort_predict(self, interaction):
  class HGNNLayer (line 202) | class HGNNLayer(nn.Module):
    method __init__ (line 203) | def __init__(self, n_hyper_layer):
    method forward (line 208) | def forward(self, i_hyper, u_hyper, embeds):

FILE: src/models/lightgcn.py
  class LightGCN (line 23) | class LightGCN(GeneralRecommender):
    method __init__ (line 33) | def __init__(self, config, dataset):
    method _init_model (line 56) | def _init_model(self):
    method get_norm_adj_mat (line 65) | def get_norm_adj_mat(self):
    method get_ego_embeddings (line 103) | def get_ego_embeddings(self):
    method forward (line 115) | def forward(self):
    method calculate_loss (line 130) | def calculate_loss(self, interaction):
    method full_sort_predict (line 156) | def full_sort_predict(self, interaction):

FILE: src/models/mgcn.py
  class MGCN (line 22) | class MGCN(GeneralRecommender):
    method __init__ (line 23) | def __init__(self, config, dataset):
    method pre_epoch_processing (line 106) | def pre_epoch_processing(self):
    method get_adj_mat (line 109) | def get_adj_mat(self):
    method sparse_mx_to_torch_sparse_tensor (line 138) | def sparse_mx_to_torch_sparse_tensor(self, sparse_mx):
    method forward (line 146) | def forward(self, adj, train=False):
    method bpr_loss (line 210) | def bpr_loss(self, users, pos_items, neg_items):
    method InfoNCE (line 224) | def InfoNCE(self, view1, view2, temperature):
    method calculate_loss (line 233) | def calculate_loss(self, interaction):
    method full_sort_predict (line 255) | def full_sort_predict(self, interaction):

FILE: src/models/mmgcn.py
  class MMGCN (line 22) | class MMGCN(GeneralRecommender):
    method __init__ (line 23) | def __init__(self, config, dataset):
    method pack_edge_index (line 58) | def pack_edge_index(self, inter_mat):
    method forward (line 64) | def forward(self):
    method calculate_loss (line 79) | def calculate_loss(self, interaction):
    method full_sort_predict (line 99) | def full_sort_predict(self, interaction):
  class GCN (line 108) | class GCN(torch.nn.Module):
    method __init__ (line 109) | def __init__(self, edge_index, batch_size, num_user, num_item, dim_fea...
    method forward (line 164) | def forward(self, features, id_embedding):
  class BaseModel (line 191) | class BaseModel(MessagePassing):
    method __init__ (line 192) | def __init__(self, in_channels, out_channels, normalize=True, bias=Tru...
    method reset_parameters (line 202) | def reset_parameters(self):
    method forward (line 205) | def forward(self, x, edge_index, size=None):
    method message (line 209) | def message(self, x_j, edge_index, size):
    method update (line 212) | def update(self, aggr_out):
    method __repr (line 215) | def __repr(self):

FILE: src/models/mvgae.py
  class MVGAE (line 27) | class MVGAE(GeneralRecommender):
    method __init__ (line 28) | def __init__(self, config, dataset):
    method pack_edge_index (line 60) | def pack_edge_index(self, inter_mat):
    method reparametrize (line 66) | def reparametrize(self, mu, logvar):
    method dot_product_decode_neg (line 73) | def dot_product_decode_neg(self, z, user, neg_items, sigmoid=True):
    method dot_product_decode (line 87) | def dot_product_decode(self, z, edge_index, sigmoid=True):
    method forward (line 91) | def forward(self):
    method recon_loss (line 121) | def recon_loss(self, z, pos_edge_index, user, neg_items):
    method kl_loss (line 138) | def kl_loss(self, mu, logvar):
    method calculate_loss (line 153) | def calculate_loss(self, interaction):
    method full_sort_predict (line 174) | def full_sort_predict(self, interaction):
  class GCN (line 183) | class GCN(torch.nn.Module):
    method __init__ (line 184) | def __init__(self, device, features, edge_index, batch_size, num_user,...
    method forward (line 247) | def forward(self):
  class ProductOfExperts (line 285) | class ProductOfExperts(torch.nn.Module):
    method __init__ (line 286) | def __init__(self):
    method forward (line 294) | def forward(self, mu, logvar, eps=1e-8):
  class BaseModel (line 304) | class BaseModel(MessagePassing):
    method __init__ (line 305) | def __init__(self, in_channels, out_channels, normalize=True, bias=Tru...
    method reset_parameters (line 318) | def reset_parameters(self):
    method forward (line 322) | def forward(self, x, edge_index, size=None):
    method message (line 331) | def message(self, x_j, edge_index, size):
    method update (line 340) | def update(self, aggr_out):
    method __repr (line 347) | def __repr(self):

FILE: src/models/pgl.py
  class PGL (line 22) | class PGL(GeneralRecommender):
    method __init__ (line 23) | def __init__(self, config, dataset):
    method sparse_mx_to_torch_sparse_tensor (line 78) | def sparse_mx_to_torch_sparse_tensor(self, sparse_mx):
    method get_knn_adj_mat (line 86) | def get_knn_adj_mat(self, mm_embeddings):
    method compute_normalized_laplacian (line 100) | def compute_normalized_laplacian(self, indices, adj_size):
    method get_norm_adj_mat (line 109) | def get_norm_adj_mat(self):
    method global_subgraph_extraction (line 138) | def global_subgraph_extraction(self, adj):
    method alignment (line 156) | def alignment(self, x, y):
    method uniformity (line 161) | def uniformity(self, x, t=2):
    method save (line 165) | def save(self):
    method pre_epoch_processing (line 168) | def pre_epoch_processing(self):
    method _normalize_adj_m (line 183) | def _normalize_adj_m(self, indices, adj_size):
    method get_edge_info (line 194) | def get_edge_info(self):
    method forward (line 202) | def forward(self, adj):
    method bpr_loss (line 227) | def bpr_loss(self, users, pos_items, neg_items):
    method InfoNCE (line 236) | def InfoNCE(self, view1, view2, temperature):
    method calculate_loss (line 245) | def calculate_loss(self, interaction):
    method full_sort_predict (line 261) | def full_sort_predict(self, interaction):

FILE: src/models/selfcfed_lgn.py
  class SELFCFED_LGN (line 28) | class SELFCFED_LGN(GeneralRecommender):
    method __init__ (line 29) | def __init__(self, config, dataset):
    method forward (line 41) | def forward(self, inputs):
    method get_embedding (line 53) | def get_embedding(self):
    method loss_fn (line 57) | def loss_fn(self, p, z):  # negative cosine similarity
    method calculate_loss (line 60) | def calculate_loss(self, interaction):
    method full_sort_predict (line 71) | def full_sort_predict(self, interaction):

FILE: src/models/slmrec.py
  class SLMRec (line 20) | class SLMRec(GeneralRecommender):
    method __init__ (line 21) | def __init__(self, config, dataset):
    method __init_weight (line 28) | def __init_weight(self, dataset):
    method compute (line 73) | def compute(self):
    method feature_dropout (line 120) | def feature_dropout(self, users_idx, items_idx):
    method feature_masking (line 192) | def feature_masking(self, users_idx, items_idx, dropout=False):
    method fac (line 278) | def fac(self, idx):
    method full_sort_predict (line 307) | def full_sort_predict(self, interaction, candidate_items=None):
    method getEmbedding (line 317) | def getEmbedding(self, users, pos_items, neg_items):
    method calculate_loss (line 332) | def calculate_loss(self, interaction):
    method ssl_loss (line 339) | def ssl_loss(self, users, pos):
    method compute_ssl (line 344) | def compute_ssl(self, users, items):
    method forward (line 354) | def forward(self, users, items):
    method mm_fusion (line 362) | def mm_fusion(self, reps: list):
    method infonce (line 369) | def infonce(self, users, pos):
    method create_u_embeding_i (line 380) | def create_u_embeding_i(self):
    method create_adj_mat (line 434) | def create_adj_mat(self, interaction_csr):

FILE: src/models/smore.py
  class SMORE (line 24) | class SMORE(GeneralRecommender):
    method __init__ (line 25) | def __init__(self, config, dataset):
    method pre_epoch_processing (line 129) | def pre_epoch_processing(self):
    method max_pool_fusion (line 132) | def max_pool_fusion(self):
    method get_adj_mat (line 155) | def get_adj_mat(self):
    method sparse_mx_to_torch_sparse_tensor (line 180) | def sparse_mx_to_torch_sparse_tensor(self, sparse_mx):
    method spectrum_convolution (line 188) | def spectrum_convolution(self, image_embeds, text_embeds):
    method forward (line 208) | def forward(self, adj, train=False):
    method bpr_loss (line 293) | def bpr_loss(self, users, pos_items, neg_items):
    method InfoNCE (line 307) | def InfoNCE(self, view1, view2, temperature):
    method calculate_loss (line 316) | def calculate_loss(self, interaction):
    method full_sort_predict (line 338) | def full_sort_predict(self, interaction):

FILE: src/models/vbpr.py
  class VBPR (line 20) | class VBPR(GeneralRecommender):
    method __init__ (line 23) | def __init__(self, config, dataloader):
    method get_user_embedding (line 47) | def get_user_embedding(self, user):
    method get_item_embedding (line 58) | def get_item_embedding(self, item):
    method forward (line 69) | def forward(self, dropout=0.0):
    method calculate_loss (line 77) | def calculate_loss(self, interaction):
    method full_sort_predict (line 100) | def full_sort_predict(self, interaction):

FILE: src/utils/configurator.py
  class Config (line 15) | class Config(object):
    method __init__ (line 46) | def __init__(self, model=None, dataset=None, config_dict=None, mg=False):
    method _load_dataset_model_config (line 68) | def _load_dataset_model_config(self, config_dict, mg):
    method _build_yaml_loader (line 92) | def _build_yaml_loader(self):
    method _set_default_parameters (line 106) | def _set_default_parameters(self):
    method _init_device (line 114) | def _init_device(self):
    method __setitem__ (line 120) | def __setitem__(self, key, value):
    method __getitem__ (line 125) | def __getitem__(self, item):
    method __contains__ (line 131) | def __contains__(self, key):
    method __str__ (line 136) | def __str__(self):
    method __repr__ (line 142) | def __repr__(self):

FILE: src/utils/data_utils.py
  function flat_list_of_lists (line 22) | def flat_list_of_lists(l):
  function mask_batch_text_tokens (line 27) | def mask_batch_text_tokens(
  function image_to_tensor (line 77) | def image_to_tensor(image: np.ndarray, keepdim: bool = True) -> torch.Te...
  function get_padding (line 116) | def get_padding(image, max_w, max_h, pad_all=False):
  class ImagePad (line 140) | class ImagePad(object):
    method __init__ (line 141) | def __init__(self, max_w, max_h, fill=0, padding_mode='constant'):
    method __call__ (line 149) | def __call__(self, img):
    method __repr__ (line 166) | def __repr__(self):
  function get_resize_size (line 171) | def get_resize_size(image, max_size):
  class ImageResize (line 206) | class ImageResize(object):
    method __init__ (line 219) | def __init__(self, max_size, interpolation=Image.BILINEAR):
    method __call__ (line 224) | def __call__(self, img):
    method __repr__ (line 240) | def __repr__(self):
  function get_imagenet_transform (line 246) | def get_imagenet_transform(min_size=600, max_size=1000):
  class ImageNorm (line 260) | class ImageNorm(object):
    method __init__ (line 263) | def __init__(self, mean, std):
    method __call__ (line 270) | def __call__(self, img):
  function chunk_list (line 283) | def chunk_list(examples, chunk_size=2, pad_to_divisible=True):
  function mk_input_group (line 311) | def mk_input_group(key_grouped_examples, max_n_example_per_group=2, is_t...
  function repeat_tensor_rows (line 348) | def repeat_tensor_rows(raw_tensor, row_repeats):
  function load_decompress_img_from_lmdb_value (line 367) | def load_decompress_img_from_lmdb_value(lmdb_value):

FILE: src/utils/dataloader.py
  class AbstractDataLoader (line 15) | class AbstractDataLoader(object):
    method __init__ (line 37) | def __init__(self, config, dataset, additional_dataset=None,
    method pretrain_setup (line 59) | def pretrain_setup(self):
    method data_preprocess (line 65) | def data_preprocess(self):
    method __len__ (line 71) | def __len__(self):
    method __iter__ (line 74) | def __iter__(self):
    method __next__ (line 79) | def __next__(self):
    method pr_end (line 87) | def pr_end(self):
    method _shuffle (line 91) | def _shuffle(self):
    method _next_batch_data (line 96) | def _next_batch_data(self):
  class TrainDataLoader (line 105) | class TrainDataLoader(AbstractDataLoader):
    method __init__ (line 109) | def __init__(self, config, dataset, batch_size=1, shuffle=False):
    method pretrain_setup (line 140) | def pretrain_setup(self):
    method inter_matrix (line 155) | def inter_matrix(self, form='coo', value_field=None):
    method _create_sparse_matrix (line 176) | def _create_sparse_matrix(self, df_feat, source_field, target_field, f...
    method pr_end (line 213) | def pr_end(self):
    method _shuffle (line 218) | def _shuffle(self):
    method _next_batch_data (line 223) | def _next_batch_data(self):
    method _get_neg_sample (line 226) | def _get_neg_sample(self):
    method _get_non_neg_sample (line 252) | def _get_non_neg_sample(self):
    method _get_full_uids_sample (line 262) | def _get_full_uids_sample(self):
    method _sample_neg_ids (line 267) | def _sample_neg_ids(self, u_ids):
    method _get_my_neighbors (line 277) | def _get_my_neighbors(self, id_str):
    method _get_neighborhood_samples (line 289) | def _get_neighborhood_samples(self, ids, id_str):
    method _random (line 307) | def _random(self):
    method _get_history_items_u (line 311) | def _get_history_items_u(self):
    method _get_history_users_i (line 320) | def _get_history_users_i(self):
  class EvalDataLoader (line 330) | class EvalDataLoader(AbstractDataLoader):
    method __init__ (line 334) | def __init__(self, config, dataset, additional_dataset=None,
    method pr_end (line 353) | def pr_end(self):
    method _shuffle (line 356) | def _shuffle(self):
    method _next_batch_data (line 359) | def _next_batch_data(self):
    method _get_pos_items_per_u (line 370) | def _get_pos_items_per_u(self, eval_users):
    method _get_eval_items_per_u (line 393) | def _get_eval_items_per_u(self, eval_users):
    method get_eval_items (line 409) | def get_eval_items(self):
    method get_eval_len_list (line 412) | def get_eval_len_list(self):
    method get_eval_users (line 415) | def get_eval_users(self):

FILE: src/utils/dataset.py
  class RecDataset (line 21) | class RecDataset(object):
    method __init__ (line 22) | def __init__(self, config, df=None):
    method load_inter_graph (line 50) | def load_inter_graph(self, file_name):
    method split (line 57) | def split(self):
    method copy (line 76) | def copy(self, new_df):
    method get_user_num (line 92) | def get_user_num(self):
    method get_item_num (line 95) | def get_item_num(self):
    method shuffle (line 98) | def shuffle(self):
    method __len__ (line 103) | def __len__(self):
    method __getitem__ (line 106) | def __getitem__(self, idx):
    method __repr__ (line 110) | def __repr__(self):
    method __str__ (line 113) | def __str__(self):

FILE: src/utils/logger.py
  function init_logger (line 13) | def init_logger(config):

FILE: src/utils/metrics.py
  function recall_ (line 12) | def recall_(pos_index, pos_len):
  function recall2_ (line 18) | def recall2_(pos_index, pos_len):
  function ndcg_ (line 30) | def ndcg_(pos_index, pos_len):
  function map_ (line 66) | def map_(pos_index, pos_len):
  function precision_ (line 92) | def precision_(pos_index, pos_len):

FILE: src/utils/misc.py
  class NoOp (line 14) | class NoOp(object):
    method __getattr__ (line 16) | def __getattr__(self, name):
    method noop (line 19) | def noop(self, *args, **kwargs):
  function set_random_seed (line 23) | def set_random_seed(seed):
  function zero_none_grad (line 30) | def zero_none_grad(model):

FILE: src/utils/quick_start.py
  function quick_start (line 19) | def quick_start(model, dataset, config_dict, save_model=True, mg=False):

FILE: src/utils/topk_evaluator.py
  class TopKEvaluator (line 19) | class TopKEvaluator(object):
    method __init__ (line 29) | def __init__(self, config):
    method collect (line 36) | def collect(self, interaction, scores_tensor, full=False):
    method evaluate (line 58) | def evaluate(self, batch_matrix_list, eval_data, is_test=False, idx=0):
    method _check_args (line 104) | def _check_args(self):
    method _calculate_metrics (line 129) | def _calculate_metrics(self, pos_len_list, topk_index):
    method __str__ (line 145) | def __str__(self):

FILE: src/utils/utils.py
  function get_local_time (line 16) | def get_local_time():
  function get_model (line 28) | def get_model(model_name):
  function get_trainer (line 44) | def get_trainer():
  function init_seed (line 48) | def init_seed(seed):
  function early_stopping (line 57) | def early_stopping(value, best, cur_step, max_step, bigger=True):
  function dict2str (line 101) | def dict2str(result_dict):
  function build_knn_neighbourhood (line 119) | def build_knn_neighbourhood(adj, topk):
  function compute_normalized_laplacian (line 125) | def compute_normalized_laplacian(adj):
  function build_sim (line 134) | def build_sim(context):
  function get_sparse_laplacian (line 139) | def get_sparse_laplacian(edge_index, edge_weight, num_nodes, normalizati...
  function get_dense_laplacian (line 154) | def get_dense_laplacian(adj, normalization='none'):
  function build_knn_normalized_graph (line 171) | def build_knn_normalized_graph(adj, topk, is_sparse, norm_type):
Condensed preview — 83 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (533K chars).
[
  {
    "path": ".gitignore",
    "chars": 1812,
    "preview": "# Byte-compiled / optimized / DLL files\n__pycache__/\n*.py[cod]\n*$py.class\n\n/data/baby/\n\n# C extensions\n*.so\n\n# Distribut"
  },
  {
    "path": ".idea/.gitignore",
    "chars": 176,
    "preview": "# Default ignored files\n/shelf/\n/workspace.xml\n# Editor-based HTTP Client requests\n/httpRequests/\n# Datasource local sto"
  },
  {
    "path": ".idea/MMRec.iml",
    "chars": 576,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<module type=\"PYTHON_MODULE\" version=\"4\">\n  <component name=\"NewModuleRootManager"
  },
  {
    "path": ".idea/deployment.xml",
    "chars": 412,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"PublishConfigData\" remoteFilesAllowedToD"
  },
  {
    "path": ".idea/inspectionProfiles/Project_Default.xml",
    "chars": 13194,
    "preview": "<component name=\"InspectionProjectProfileManager\">\n  <profile version=\"1.0\">\n    <option name=\"myName\" value=\"Project De"
  },
  {
    "path": ".idea/inspectionProfiles/profiles_settings.xml",
    "chars": 174,
    "preview": "<component name=\"InspectionProjectProfileManager\">\n  <settings>\n    <option name=\"USE_PROJECT_PROFILE\" value=\"false\" />\n"
  },
  {
    "path": ".idea/misc.xml",
    "chars": 200,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"ProjectRootManager\" version=\"2\" project-"
  },
  {
    "path": ".idea/modules.xml",
    "chars": 262,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"ProjectModuleManager\">\n    <modules>\n   "
  },
  {
    "path": ".idea/vcs.xml",
    "chars": 180,
    "preview": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n<project version=\"4\">\n  <component name=\"VcsDirectoryMappings\">\n    <mapping dire"
  },
  {
    "path": "LICENSE",
    "chars": 35149,
    "preview": "                    GNU GENERAL PUBLIC LICENSE\n                       Version 3, 29 June 2007\n\n Copyright (C) 2007 Free "
  },
  {
    "path": "README.md",
    "chars": 6352,
    "preview": "# MMRec\n\n<div align=\"center\">\n  <a href=\"https://github.com/enoche/MultimodalRecSys\"><img width=\"300px\" height=\"auto\" sr"
  },
  {
    "path": "data/README.md",
    "chars": 486,
    "preview": "\n## Data\nDownload from Google Drive: [Baby/Sports/Elec](https://drive.google.com/drive/folders/13cBy1EA_saTUuXxVllKgtfci"
  },
  {
    "path": "evaluation/README.md",
    "chars": 15983,
    "preview": "# EVALUATING THE SOTA MODELS\n\nwe validate the effectiveness and efficiency of state-of-the-art multimodal recommendation"
  },
  {
    "path": "preprocessing/0rating2inter.ipynb",
    "chars": 20827,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"# 从ratings_Sports_and_Outdoors.csv文件中提取U-I交互图, 5-core后"
  },
  {
    "path": "preprocessing/1splitting.ipynb",
    "chars": 22645,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"# 基于rating2inter.ipynb生成的5-core交互图,Train/Validation/Te"
  },
  {
    "path": "preprocessing/2reindex-feat.ipynb",
    "chars": 22051,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"# 利用rating2inter.ipynb中U/I的index对features进行一一对应(meta-t"
  },
  {
    "path": "preprocessing/3feat-encoder.ipynb",
    "chars": 45660,
    "preview": "{\n \"cells\": [\n  {\n   \"cell_type\": \"markdown\",\n   \"source\": [\n    \"# Sports14 Text/Image Feature Extraction\"\n   ],\n   \"me"
  },
  {
    "path": "preprocessing/README.md",
    "chars": 1273,
    "preview": "# Preprocessing from raw data 从原始数据处理\n- The following preprocessing steps can be quite tedious. Please post issues if yo"
  },
  {
    "path": "preprocessing/dualgnn-gen-u-u-matrix.py",
    "chars": 3868,
    "preview": "# 对应于Preprocess-ml-imdb.py文件\n\n\nimport numpy as np\nfrom collections import defaultdict\nfrom tqdm import tqdm\nimport torch"
  },
  {
    "path": "requirements.txt",
    "chars": 82,
    "preview": "numpy==1.21.5\npandas==1.3.5\npython==3.7.11\nscipy==1.7.3\ntorch==1.11.0\npyyaml==6.0\n"
  },
  {
    "path": "src/common/abstract_recommender.py",
    "chars": 3747,
    "preview": "# coding: utf-8\n# @email  : enoche.chow@gmail.com\n\nimport os\nimport numpy as np\nimport torch\nimport torch.nn as nn\n\n\ncla"
  },
  {
    "path": "src/common/encoders.py",
    "chars": 4898,
    "preview": "import copy\nimport math\n\nimport numpy as np\nimport torch\nimport torch.nn as nn\nfrom common.abstract_recommender import G"
  },
  {
    "path": "src/common/init.py",
    "chars": 1549,
    "preview": "# coding: utf-8\n# @email  : enoche.chow@gmail.com\n\nimport torch.nn as nn\nfrom torch.nn.init import xavier_normal_, xavie"
  },
  {
    "path": "src/common/loss.py",
    "chars": 1604,
    "preview": "# coding: utf-8\n# @email  : enoche.chow@gmail.com\n\n\nimport torch\nimport torch.nn as nn\n\n\nclass BPRLoss(nn.Module):\n\n    "
  },
  {
    "path": "src/common/trainer.py",
    "chars": 14363,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n\nr\"\"\"\n################################\n\"\"\"\n\nimport os\nimport itertools\ni"
  },
  {
    "path": "src/configs/dataset/baby.yaml",
    "chars": 331,
    "preview": "# Common Features\nUSER_ID_FIELD: userID\nITEM_ID_FIELD: itemID\n#RATING_FIELD: rating\nTIME_FIELD: timestamp\n\nfilter_out_co"
  },
  {
    "path": "src/configs/dataset/clothing.yaml",
    "chars": 336,
    "preview": "# Common Features\nUSER_ID_FIELD: userID\nITEM_ID_FIELD: itemID\n#RATING_FIELD: rating\nTIME_FIELD: timestamp\n\nfilter_out_co"
  },
  {
    "path": "src/configs/dataset/elec.yaml",
    "chars": 331,
    "preview": "# Common Features\nUSER_ID_FIELD: userID\nITEM_ID_FIELD: itemID\n#RATING_FIELD: rating\nTIME_FIELD: timestamp\n\nfilter_out_co"
  },
  {
    "path": "src/configs/dataset/microlens.yaml",
    "chars": 336,
    "preview": "# Common Features\nUSER_ID_FIELD: userID\nITEM_ID_FIELD: itemID\n#RATING_FIELD: rating\nTIME_FIELD: timestamp\n\nfilter_out_co"
  },
  {
    "path": "src/configs/dataset/sports.yaml",
    "chars": 333,
    "preview": "# Common Features\nUSER_ID_FIELD: userID\nITEM_ID_FIELD: itemID\n#RATING_FIELD: rating\nTIME_FIELD: timestamp\n\nfilter_out_co"
  },
  {
    "path": "src/configs/mg.yaml",
    "chars": 96,
    "preview": "alpha1: [1.0]\nalpha2: [0.1, 0.2, 0.3]\nbeta: [3]\n\nhyper_parameters: [\"alpha1\", \"alpha2\", \"beta\"]\n"
  },
  {
    "path": "src/configs/model/BM3.yaml",
    "chars": 198,
    "preview": "embedding_size: 64\nfeat_embed_dim: 64\n\nn_layers: [1, 2]\ndropout: [0.3, 0.5]\nreg_weight: [0.1, 0.01]\ncl_weight: 2.0\n\nuse_"
  },
  {
    "path": "src/configs/model/BPR.yaml",
    "chars": 138,
    "preview": "embedding_size: 64\nis_multimodal_model: False\nreg_weight: [2.0, 1.0, 1e-01, 1e-02, 1e-03, 1e-04, 1e-05]\n\nhyper_parameter"
  },
  {
    "path": "src/configs/model/DAMRS.yaml",
    "chars": 507,
    "preview": "embedding_size: 64\nfeat_embed_dim: 64\nweight_size: [64, 64]\n\n\nkl_weight: [1] # [10, 0.1, 0.01] # [1] # [0, 0.1, 1e-02, 1"
  },
  {
    "path": "src/configs/model/DRAGON.yaml",
    "chars": 277,
    "preview": "embedding_size: 64\nfeat_embed_dim: 64\n\nn_mm_layers: 1\nn_layers: 2\nknn_k: 10\nmm_image_weight: 0.1\naggr_mode: ['add']\nlear"
  },
  {
    "path": "src/configs/model/DualGNN.yaml",
    "chars": 213,
    "preview": "embedding_size: 64\nn_layers: 2\naggr_mode: ['add']\nreg_weight: [0.1, 0.01, 0.001, 0.0001, 0.00001]\nlearning_rate: [0.1, 0"
  },
  {
    "path": "src/configs/model/FREEDOM.yaml",
    "chars": 246,
    "preview": "embedding_size: 64\nfeat_embed_dim: 64\nweight_size: [64, 64]\n\nlambda_coeff: 0.9\nreg_weight: [0.0, 1e-05, 1e-04, 1e-03]\n\nn"
  },
  {
    "path": "src/configs/model/GRCN.yaml",
    "chars": 197,
    "preview": "embedding_size: 64\nlatent_embedding: 64\n\nn_layers: 3\nreg_weight: [0.1, 0.01, 0.001, 0.0001, 0.00001]\nlearning_rate: [1, "
  },
  {
    "path": "src/configs/model/ItemKNNCBF.yaml",
    "chars": 95,
    "preview": "\nknn_k: [10]\nshrink: [10]\nreq_training: False\nepochs: 1\nhyper_parameters: ['shrink', 'knn_k']\n\n"
  },
  {
    "path": "src/configs/model/LATTICE.yaml",
    "chars": 320,
    "preview": "embedding_size: 64\nfeat_embed_dim: 64\nweight_size: [64, 64]\n\nlearning_rate_scheduler: [0.96, 50]\nlambda_coeff: 0.9\nreg_w"
  },
  {
    "path": "src/configs/model/LGMRec.yaml",
    "chars": 504,
    "preview": "embedding_size: 64\nfeat_embed_dim: 64\ncf_model: lightgcn\n\nn_ui_layers: [2]\nn_mm_layers: [2]\n\n#baby\nn_hyper_layer: [1]\nhy"
  },
  {
    "path": "src/configs/model/LayerGCN.yaml",
    "chars": 155,
    "preview": "embedding_size: 64\nn_layers: [4]\nreg_weight: [1e-02, 1e-03, 1e-04, 1e-05]\ndropout: [0.0, 0.1, 0.2]\nhyper_parameters: [\"n"
  },
  {
    "path": "src/configs/model/LightGCN.yaml",
    "chars": 163,
    "preview": "embedding_size: 64\nis_multimodal_model: False\nn_layers: [1, 2, 3, 4]\nreg_weight: [1e-02, 1e-03, 1e-04, 1e-05, 1e-06]\nhyp"
  },
  {
    "path": "src/configs/model/MGCN.yaml",
    "chars": 209,
    "preview": "embedding_size: 64\nn_ui_layers: 2\nn_layers: 1\n\nlearning_rate_scheduler: [0.96, 50]\nlambda_coeff: 0.9\nreg_weight: 1e-04\n\n"
  },
  {
    "path": "src/configs/model/MMGCN.yaml",
    "chars": 186,
    "preview": "embedding_size: 64\nn_layers: 2\nreg_weight: [0, 0.00001, 0.0001, 0.001, 0.01, 0.1]\nlearning_rate: [0.0001, 0.0005, 0.001,"
  },
  {
    "path": "src/configs/model/MVGAE.yaml",
    "chars": 189,
    "preview": "embedding_size: 64\n\nn_layers: 1\n#reg_weight: [0.1, 0.01, 0.001, 0.0001, 0.00001]\nlearning_rate: [0.0001, 0.001, 0.01, 0."
  },
  {
    "path": "src/configs/model/PGL.yaml",
    "chars": 298,
    "preview": "embedding_size: 64\nfeat_embed_dim: 64\nweight_size: [64, 64]\n\nlearning_rate_scheduler: [0.96, 50]\nlambda_coeff: 0.9\nlearn"
  },
  {
    "path": "src/configs/model/SELFCFED_LGN.yaml",
    "chars": 195,
    "preview": "embedding_size: 64\nn_layers: [1, 2]\ndropout: [0.1, 0.2, 0.5]\nreg_weight: [1e-01, 1e-02, 1e-03, 1e-04, 1e-05, 0.0]\nuse_ne"
  },
  {
    "path": "src/configs/model/SLMRec.yaml",
    "chars": 355,
    "preview": "recdim: 64\nlayer_num: 3\nreg: [0.0001, 0.001, 0.01, 0.1]\nssl_task: 'FAC'\nlearning_rate: [0.0001, 0.001, 0.01, 0.1]\nweight"
  },
  {
    "path": "src/configs/model/SMORE.yaml",
    "chars": 354,
    "preview": "embedding_size: 64\nn_ui_layers: [3,4]\nn_layers: 1\n\nlearning_rate_scheduler: [0.96, 50]\nlambda_coeff: 0.9\nreg_weight: [1e"
  },
  {
    "path": "src/configs/model/VBPR.yaml",
    "chars": 110,
    "preview": "embedding_size: 64\nreg_weight: [2.0, 1.0, 1e-01, 1e-02, 1e-03, 1e-04, 1e-05]\nhyper_parameters: [\"reg_weight\"]\n"
  },
  {
    "path": "src/configs/overall.yaml",
    "chars": 1127,
    "preview": "# general\ngpu_id: 0\nuse_gpu: True\nseed: [999]\n\n# multi-modal raw features\ndata_path: '../data/'\ninter_splitting_label: '"
  },
  {
    "path": "src/main.py",
    "chars": 678,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n\n\"\"\"\nMain entry\n# UPDATED: 2022-Feb-15\n##########################\n\"\"\"\n\ni"
  },
  {
    "path": "src/models/bm3.py",
    "chars": 6659,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\nr\"\"\"\n\n################################################\npaper:  Bootstrap"
  },
  {
    "path": "src/models/bpr.py",
    "chars": 3518,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\nr\"\"\"\nBPR, with only u-i graph\n##########################################"
  },
  {
    "path": "src/models/damrs.py",
    "chars": 14823,
    "preview": "# coding: utf-8\n\nimport os\nimport random\nimport numpy as np\nimport scipy.sparse as sp\nimport torch\nimport torch.nn as nn"
  },
  {
    "path": "src/models/dragon.py",
    "chars": 19116,
    "preview": "# coding: utf-8\n#\n# user-graph need to be generated by the following script\n# tools/generate-u-u-matrix.py\nimport os\nimp"
  },
  {
    "path": "src/models/dualgnn.py",
    "chars": 16355,
    "preview": "# coding: utf-8\n# \n\"\"\"\nDualGNN: Dual Graph Neural Network for Multimedia Recommendation, IEEE Transactions on Multimedia"
  },
  {
    "path": "src/models/freedom.py",
    "chars": 9939,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\nr\"\"\"\nFREEDOM: A Tale of Two Graphs: Freezing and Denoising Graph Structu"
  },
  {
    "path": "src/models/grcn.py",
    "chars": 12883,
    "preview": "# coding: utf-8\n# \n\"\"\"\nGraph-Refined Convolutional Network for Multimedia Recommendation with Implicit Feedback, MM 2020"
  },
  {
    "path": "src/models/itemknncbf.py",
    "chars": 3768,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\nr\"\"\"\nItemKNNCBF\n################################################\nReferen"
  },
  {
    "path": "src/models/lattice.py",
    "chars": 10876,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\nr\"\"\"\nLATTICE\n################################################\nReference:"
  },
  {
    "path": "src/models/layergcn.py",
    "chars": 7729,
    "preview": "# -*- coding: utf-8 -*-\n\n\nimport numpy as np\nimport scipy.sparse as sp\nimport math\nimport random\nimport torch\nimport tor"
  },
  {
    "path": "src/models/lgmrec.py",
    "chars": 9780,
    "preview": "# coding: utf-8\n# @email: georgeguo.gzq.cn@gmail.com\nr\"\"\"\nLGMRec\n################################################\nRefere"
  },
  {
    "path": "src/models/lightgcn.py",
    "chars": 6283,
    "preview": "# -*- coding: utf-8 -*-\nr\"\"\"\nLightGCN\n################################################\n\nReference:\n    Xiangnan He et al"
  },
  {
    "path": "src/models/mgcn.py",
    "chars": 11394,
    "preview": "# coding: utf-8\n# @email: y463213402@gmail.com\nr\"\"\"\nMGCN\n################################################\nReference:\n   "
  },
  {
    "path": "src/models/mmgcn.py",
    "chars": 9913,
    "preview": "# coding: utf-8\n\"\"\"\nMMGCN: Multi-modal Graph Convolution Network for Personalized Recommendation of Micro-video. \nIn ACM"
  },
  {
    "path": "src/models/mvgae.py",
    "chars": 15936,
    "preview": "# coding: utf-8\n\"\"\"\nhttps://github.com/jing-1/MVGAE\nPaper: Multi-Modal Variational Graph Auto-Encoder for Recommendation"
  },
  {
    "path": "src/models/pgl.py",
    "chars": 12150,
    "preview": "# coding: utf-8\n# @email: y463213402@gmail.com\nr\"\"\"\nPGL\n################################################\nReference:\n    "
  },
  {
    "path": "src/models/selfcfed_lgn.py",
    "chars": 2636,
    "preview": "# -*- coding: utf-8 -*-\n# @Time   : 2021/05/17\n# @Author : Zhou xin\n# @Email  : enoche.chow@gmail.com\n\nr\"\"\"\n############"
  },
  {
    "path": "src/models/slmrec.py",
    "chars": 23000,
    "preview": "# coding: utf-8\n#\n# Updated by enoche\n# Paper: Self-supervised Learning for Multimedia Recommendation\n# Github: https://"
  },
  {
    "path": "src/models/smore.py",
    "chars": 15425,
    "preview": "# coding: utf-8\n# rongqing001@e.ntu.edu.sg\nr\"\"\"\nSMORE - Multi-modal Recommender System\nReference:\n    ACM WSDM 2025: Spe"
  },
  {
    "path": "src/models/vbpr.py",
    "chars": 4101,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\nr\"\"\"\nVBPR -- Recommended version\n#######################################"
  },
  {
    "path": "src/utils/configurator.py",
    "chars": 5750,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n#\n\"\"\"\n################################\n\"\"\"\n\nimport re\nimport os\nimport y"
  },
  {
    "path": "src/utils/data_utils.py",
    "chars": 14359,
    "preview": "import torch\nimport random\nimport torchvision.transforms as transforms\nfrom torchvision.transforms.functional import pad"
  },
  {
    "path": "src/utils/dataloader.py",
    "chars": 16839,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n\"\"\"\nWrap dataset into dataloader\n#######################################"
  },
  {
    "path": "src/utils/dataset.py",
    "chars": 5011,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n#\n# updated: Mar. 25, 2022\n# Filled non-existing raw features with non-z"
  },
  {
    "path": "src/utils/logger.py",
    "chars": 1882,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n\n\"\"\"\n###############################\n\"\"\"\n\nimport logging\nimport os\nfrom "
  },
  {
    "path": "src/utils/metrics.py",
    "chars": 4444,
    "preview": "# encoding: utf-8\n# @email: enoche.chow@gmail.com\n\"\"\"\n############################\n\"\"\"\n\nfrom logging import getLogger\n\ni"
  },
  {
    "path": "src/utils/misc.py",
    "chars": 631,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n\"\"\"\nmodified from UNITER\n\"\"\"\nimport json\nimport random\nimport sys\n\nimpor"
  },
  {
    "path": "src/utils/quick_start.py",
    "chars": 4512,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n\n\"\"\"\nRun application\n##########################\n\"\"\"\nfrom logging import "
  },
  {
    "path": "src/utils/topk_evaluator.py",
    "chars": 6018,
    "preview": "# coding: utf-8\n# @email: enoche.chow@gmail.com\n\"\"\"\n################################\n\"\"\"\nimport os\nimport numpy as np\nim"
  },
  {
    "path": "src/utils/utils.py",
    "chars": 5546,
    "preview": "# coding: utf-8\n# @email  : enoche.chow@gmail.com\n\n\"\"\"\nUtility functions\n##########################\n\"\"\"\n\nimport numpy as"
  }
]

About this extraction

This page contains the full source code of the enoche/MMRec GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 83 files (495.4 KB), approximately 136.4k tokens, and a symbol index with 420 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!